├── .coveragerc
├── .devcontainer
└── devcontainer.json
├── .gitattributes
├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
└── workflows
│ ├── python-package.yml
│ ├── python-publish.yml
│ └── readme-sync.yml
├── .gitignore
├── .pre-commit-config.yaml
├── .pre-commit-hooks
└── prevent-readme-edits.sh
├── .readthedocs.yml
├── CODE_OF_CONDUCT.md
├── LICENSE
├── README.md
├── README.py
├── docs
├── _config.yml
├── _toc.yml
├── conf.py
├── intro.md
├── logo.png
├── markdown-notebooks.md
├── markdown.md
├── notebooks.ipynb
├── reference
│ ├── array.md
│ ├── block.md
│ ├── compose.md
│ ├── curry.md
│ ├── mailbox.md
│ ├── map.md
│ ├── misc.md
│ ├── option.md
│ ├── pipe.md
│ ├── pipeline.md
│ ├── reference.md
│ ├── result.md
│ ├── seq.md
│ ├── try.md
│ └── union.md
├── requirements.txt
└── tutorial
│ ├── containers.md
│ ├── continuations.md
│ ├── data_modelling.md
│ ├── effects.md
│ ├── introduction.md
│ ├── lambda_calculus.md
│ ├── optional_values.md
│ └── railway.md
├── expression
├── __init__.py
├── _version.py
├── collections
│ ├── __init__.py
│ ├── array.py
│ ├── asyncseq.py
│ ├── block.py
│ ├── map.py
│ ├── maptree.py
│ └── seq.py
├── core
│ ├── __init__.py
│ ├── aiotools.py
│ ├── async_builder.py
│ ├── builder.py
│ ├── compose.py
│ ├── curry.py
│ ├── error.py
│ ├── fn.py
│ ├── mailbox.py
│ ├── misc.py
│ ├── option.py
│ ├── pipe.py
│ ├── result.py
│ ├── tagged_union.py
│ ├── try_.py
│ └── typing.py
├── effect
│ ├── __init__.py
│ ├── async_option.py
│ ├── async_result.py
│ ├── option.py
│ ├── result.py
│ └── seq.py
├── extra
│ ├── __init__.py
│ ├── option
│ │ ├── __init__.py
│ │ └── pipeline.py
│ ├── parser.py
│ └── result
│ │ ├── __init__.py
│ │ ├── catch.py
│ │ ├── pipeline.py
│ │ └── traversable.py
├── py.typed
└── system
│ ├── __init__.py
│ ├── cancellation.py
│ ├── disposable.py
│ └── error.py
├── make_readme.sh
├── poetry.lock
├── pyproject.toml
├── pyrightconfig.json
├── renovate.json
└── tests
├── __init__.py
├── test_array.py
├── test_async_option_builder.py
├── test_async_result_builder.py
├── test_asyncseq.py
├── test_block.py
├── test_cancellation.py
├── test_catch.py
├── test_compose.py
├── test_curried.py
├── test_disposable.py
├── test_fn.py
├── test_gen.py
├── test_mailbox.py
├── test_map.py
├── test_option.py
├── test_option_builder.py
├── test_parser.py
├── test_pipe.py
├── test_result.py
├── test_result_builder.py
├── test_seq.py
├── test_seq_builder.py
├── test_tagged_union.py
├── test_try.py
├── test_typing.py
└── utils.py
/.coveragerc:
--------------------------------------------------------------------------------
1 | [report]
2 | omit =
3 | tests/*
4 | */python?.?/*
5 | */site-packages/nose/*
6 | expression/_version.py
7 | exclude_lines =
8 | pragma: no cover
9 | return NotImplemented
10 | raise NotImplementedError
11 | \.\.\.
12 | [xml]
13 | output = coverage.xml
14 |
--------------------------------------------------------------------------------
/.devcontainer/devcontainer.json:
--------------------------------------------------------------------------------
1 | // For format details, see https://aka.ms/devcontainer.json. For config options, see the
2 | // README at: https://github.com/devcontainers/templates/tree/main/src/python
3 | {
4 | "name": "Python 3",
5 | // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
6 | "image": "mcr.microsoft.com/devcontainers/python:1-3.12-bullseye",
7 | "features": {
8 | "ghcr.io/devcontainers-contrib/features/ruff:1": {},
9 | "ghcr.io/devcontainers-contrib/features/poetry:2": {}
10 | }
11 |
12 | // Features to add to the dev container. More info: https://containers.dev/features.
13 | // "features": {},
14 |
15 | // Use 'forwardPorts' to make a list of ports inside the container available locally.
16 | // "forwardPorts": [],
17 |
18 | // Use 'postCreateCommand' to run commands after the container is created.
19 | // "postCreateCommand": "pip3 install --user -r requirements.txt",
20 |
21 | // Configure tool-specific properties.
22 | // "customizations": {},
23 |
24 | // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
25 | // "remoteUser": "root"
26 | }
27 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | expression/_version.py export-subst
2 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug**
11 | A clear and concise description of what the bug is.
12 |
13 | **To Reproduce**
14 | Steps to reproduce the behavior:
15 |
16 | **Expected behavior**
17 | A clear and concise description of what you expected to happen.
18 |
19 | **Code or Screenshots**
20 | If applicable, add a minimal code example or screenshots to help explain your problem.
21 |
22 | ```python
23 | def foo(self) -> str:
24 | return 3
25 | ```
26 |
27 | **Additional context**
28 | Add any other context about the problem here.
29 |
30 | - OS [e.g. Windows]
31 | - Expression version [e.g 1.0.0]
32 | - Python version [e.g. 3.10.2]
33 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen. and also include minimal code examples if applicable.
15 |
16 | ```python
17 | def foo(self) -> str:
18 | return 3
19 | ```
20 |
21 | **Describe alternatives you've considered**
22 | A clear and concise description of any alternative solutions or features you've considered.
23 |
24 | **Additional context**
25 | Add any other context or screenshots about the feature request here.
26 |
--------------------------------------------------------------------------------
/.github/workflows/python-package.yml:
--------------------------------------------------------------------------------
1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions
2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
3 |
4 | name: Python package
5 |
6 | on:
7 | push:
8 | branches: [main]
9 | pull_request:
10 | branches: [main]
11 |
12 | jobs:
13 | build:
14 | runs-on: ubuntu-latest
15 | if: ${{ github.actor != 'github-actions[bot]' }}
16 | strategy:
17 | matrix:
18 | python-version: ["3.10", "3.11", "3.12", "3.13"]
19 |
20 | steps:
21 | - uses: actions/checkout@v4
22 | with:
23 | fetch-depth: 0
24 |
25 | - name: Set up Python ${{ matrix.python-version }}
26 | uses: actions/setup-python@v4
27 | with:
28 | python-version: ${{ matrix.python-version }}
29 |
30 | - name: Install dependencies
31 | run: |
32 | pipx install poetry
33 | poetry install --all-extras
34 |
35 | - name: Code checks
36 | run: |
37 | poetry run pre-commit run --all-files --show-diff-on-failure
38 |
39 | - name: Check README modifications
40 | run: |
41 | # Get the list of changed files in the last commit
42 | CHANGED_FILES=$(git diff --name-only HEAD^)
43 |
44 | # Check if README.md was modified but README.py wasn't
45 | if echo "$CHANGED_FILES" | grep -q "README.md" && ! echo "$CHANGED_FILES" | grep -q "README.py"; then
46 | echo "ERROR: README.md was modified directly"
47 | echo "Please make all documentation updates in README.py instead"
48 | exit 1
49 | fi
50 |
51 | - name: Test with pytest
52 | run: |
53 | poetry run pytest
54 |
--------------------------------------------------------------------------------
/.github/workflows/python-publish.yml:
--------------------------------------------------------------------------------
1 | # This workflows will upload a Python Package using Poetry when a release is created
2 |
3 | name: Publish Package
4 |
5 | on:
6 | release:
7 | types: [created]
8 |
9 | jobs:
10 | publish:
11 | runs-on: ubuntu-latest
12 | name: "Publish library"
13 | steps:
14 | - name: Check out
15 | uses: actions/checkout@v3
16 | with:
17 | token: "${{ secrets.GITHUB_TOKEN }}"
18 | fetch-depth: 0
19 |
20 | - name: Setup Python Env
21 | uses: actions/setup-python@v4
22 | with:
23 | python-version: '3.11'
24 |
25 | - name: Install dependencies
26 | run: pip install poetry dunamai
27 |
28 | - name: Set version
29 | run: |
30 | VERSION=$(dunamai from any --no-metadata --style pep440)
31 | poetry version $VERSION
32 | echo "__version__ = \"$VERSION\"" > expression/_version.py
33 |
34 | - name: Build package
35 | run: poetry build
36 |
37 | - name: Release to PyPI
38 | run: |
39 | poetry publish -u __token__ -p ${{ secrets.PYPI_API_TOKEN }} || echo 'Version exists'
40 |
--------------------------------------------------------------------------------
/.github/workflows/readme-sync.yml:
--------------------------------------------------------------------------------
1 | name: README Sync
2 |
3 | on:
4 | push:
5 | paths:
6 | - 'README.py'
7 |
8 | jobs:
9 | create-readme-pr:
10 | runs-on: ubuntu-latest
11 | if: ${{ github.actor != 'github-actions[bot]' }}
12 |
13 | steps:
14 | - uses: actions/checkout@v4
15 | with:
16 | fetch-depth: 0
17 |
18 | - name: Set up Python
19 | uses: actions/setup-python@v4
20 | with:
21 | python-version: "3.10"
22 |
23 | - name: Install dependencies
24 | run: |
25 | pipx install poetry
26 | poetry install --all-extras
27 |
28 | - name: Generate README.md
29 | run: |
30 | # Generate new README.md
31 | poetry run ./make_readme.sh
32 |
33 | - name: Create Pull Request
34 | uses: peter-evans/create-pull-request@v7
35 | with:
36 | commit-message: "[auto] Sync README.md from README.py"
37 | title: '[auto] Sync README.md from README.py'
38 | body: 'Auto-generated PR to sync README.md with changes from README.py'
39 | branch: "update-readme"
40 | delete-branch: true
41 | base: ${{ github.ref }}
42 | labels: automated,documentation
43 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # Jupyter Notebook
78 | .ipynb_checkpoints
79 |
80 | # IPython
81 | profile_default/
82 | ipython_config.py
83 |
84 | # pyenv
85 | .python-version
86 |
87 | # pipenv
88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
91 | # install all needed dependencies.
92 | #Pipfile.lock
93 |
94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95 | __pypackages__/
96 |
97 | # Celery stuff
98 | celerybeat-schedule
99 | celerybeat.pid
100 |
101 | # SageMath parsed files
102 | *.sage.py
103 |
104 | # Environments
105 | .env
106 | .venv
107 | env/
108 | venv/
109 | ENV/
110 | env.bak/
111 | venv.bak/
112 |
113 | # Spyder project settings
114 | .spyderproject
115 | .spyproject
116 |
117 | # mkdocs documentation
118 | /site
119 |
120 | # mypy
121 | .mypy_cache/
122 | .dmypy.json
123 | dmypy.json
124 |
125 | # Pyre type checker
126 | .pyre/
127 | .ionide
128 |
129 | .idea/
130 |
131 | notebooks/images/.ztr-directory
132 |
133 | notebooks/.ztr-directory
134 |
135 | .vscode/
136 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | exclude: "_version.py|versioneer.py"
2 | repos:
3 | - repo: local
4 | hooks:
5 | - id: prevent-readme-edits
6 | name: Prevent direct README.md edits
7 | entry: .pre-commit-hooks/prevent-readme-edits.sh
8 | language: script
9 | files: README\.md$
10 |
11 | - hooks:
12 | - id: ruff
13 | args: ["--fix"]
14 | - id: ruff-format
15 | args: [--check]
16 | repo: https://github.com/astral-sh/ruff-pre-commit
17 | rev: v0.5.6
18 | - hooks:
19 | - id: pyright
20 | name: pyright
21 | entry: pyright
22 | language: node
23 | pass_filenames: false
24 | types: [python]
25 | additional_dependencies: ["pyright@1.1.394"]
26 | repo: local
27 |
--------------------------------------------------------------------------------
/.pre-commit-hooks/prevent-readme-edits.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Check if README.md was modified
4 | if git diff --cached --name-only | grep -q "README.md"; then
5 | # Check if README.py was also modified
6 | if ! git diff --cached --name-only | grep -q "README.py"; then
7 | echo "ERROR: Direct modifications to README.md are not allowed"
8 | echo "Please make all documentation updates in README.py instead"
9 | exit 1
10 | fi
11 | fi
12 |
13 | exit 0
14 |
--------------------------------------------------------------------------------
/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | # .readthedocs.yaml
2 | # Read the Docs configuration file
3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
4 | version: 2
5 |
6 | build:
7 | os: ubuntu-20.04
8 | tools:
9 | python: "3.11"
10 |
11 | sphinx:
12 | configuration: docs/requirements.txt
13 |
14 | python:
15 | install:
16 | - requirements: docs/requirements.txt
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | In the interest of fostering an open and welcoming environment, we as
6 | contributors and maintainers pledge to make participation in our project and
7 | our community a harassment-free experience for everyone, regardless of age, body
8 | size, disability, ethnicity, sex characteristics, gender identity and expression,
9 | level of experience, education, socio-economic status, nationality, personal
10 | appearance, race, religion, or sexual identity and orientation.
11 |
12 | ## Our Standards
13 |
14 | Examples of behavior that contributes to creating a positive environment
15 | include:
16 |
17 | * Using welcoming and inclusive language
18 | * Being respectful of differing viewpoints and experiences
19 | * Gracefully accepting constructive criticism
20 | * Focusing on what is best for the community
21 | * Showing empathy towards other community members
22 |
23 | Examples of unacceptable behavior by participants include:
24 |
25 | * The use of sexualized language or imagery and unwelcome sexual attention or
26 | advances
27 | * Trolling, insulting/derogatory comments, and personal or political attacks
28 | * Public or private harassment
29 | * Publishing others' private information, such as a physical or electronic
30 | address, without explicit permission
31 | * Other conduct which could reasonably be considered inappropriate in a
32 | professional setting
33 |
34 | ## Our Responsibilities
35 |
36 | Project maintainers are responsible for clarifying the standards of acceptable
37 | behavior and are expected to take appropriate and fair corrective action in
38 | response to any instances of unacceptable behavior.
39 |
40 | Project maintainers have the right and responsibility to remove, edit, or
41 | reject comments, commits, code, wiki edits, issues, and other contributions
42 | that are not aligned to this Code of Conduct, or to ban temporarily or
43 | permanently any contributor for other behaviors that they deem inappropriate,
44 | threatening, offensive, or harmful.
45 |
46 | ## Scope
47 |
48 | This Code of Conduct applies within all project spaces, and it also applies when
49 | an individual is representing the project or its community in public spaces.
50 | Examples of representing a project or community include using an official
51 | project e-mail address, posting via an official social media account, or acting
52 | as an appointed representative at an online or offline event. Representation of
53 | a project may be further defined and clarified by project maintainers.
54 |
55 | ## Enforcement
56 |
57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
58 | reported by contacting the project team at dag.brattli@cognite.com. All
59 | complaints will be reviewed and investigated and will result in a response that
60 | is deemed necessary and appropriate to the circumstances. The project team is
61 | obligated to maintain confidentiality with regard to the reporter of an incident.
62 | Further details of specific enforcement policies may be posted separately.
63 |
64 | Project maintainers who do not follow or enforce the Code of Conduct in good
65 | faith may face temporary or permanent repercussions as determined by other
66 | members of the project's leadership.
67 |
68 | ## Attribution
69 |
70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
72 |
73 | [homepage]: https://www.contributor-covenant.org
74 |
75 | For answers to common questions about this code of conduct, see
76 | https://www.contributor-covenant.org/faq
77 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 Dag Brattli
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/docs/_config.yml:
--------------------------------------------------------------------------------
1 | # Book settings
2 | # Learn more at https://jupyterbook.org/customize/config.html
3 |
4 | title: Expression
5 | author: Cognite
6 | logo: logo.png
7 |
8 | # Force re-execution of notebooks on each build.
9 | # See https://jupyterbook.org/content/execute.html
10 | execute:
11 | execute_notebooks: force
12 |
13 | # Define the name of the latex output file for PDF builds
14 | latex:
15 | latex_documents:
16 | targetname: book.tex
17 |
18 | # Information about where the book exists on the web
19 | repository:
20 | url: https://github.com/cognitedata/expression # Online location of your book
21 | path_to_book: docs # Optional path to your book, relative to the repository root
22 | branch: main # Which branch of the repository should be used when creating links (optional)
23 |
24 | # Add GitHub buttons to your book
25 | # See https://jupyterbook.org/customize/config.html#add-a-link-to-your-repository
26 | html:
27 | use_issues_button: true
28 | use_repository_button: true
29 |
30 | sphinx:
31 | extra_extensions: ["sphinx.ext.autodoc", "sphinx.ext.napoleon"]
32 | config:
33 | add_module_names: false
34 | autodoc_default_options: { 'member-order': 'alphabetical', 'undoc-members': True }
--------------------------------------------------------------------------------
/docs/_toc.yml:
--------------------------------------------------------------------------------
1 | # Table of contents
2 | # Learn more at https://jupyterbook.org/customize/toc.html
3 |
4 | format: jb-book
5 | root: intro
6 | chapters:
7 | - file: tutorial/introduction
8 | - file: tutorial/containers
9 | - file: tutorial/lambda_calculus
10 | - file: tutorial/optional_values
11 | - file: tutorial/data_modelling
12 | - file: tutorial/railway
13 | - file: tutorial/effects
14 | - file: tutorial/continuations
15 | - file: reference/reference
16 | sections:
17 | - file: reference/array
18 | - file: reference/block
19 | - file: reference/compose
20 | - file: reference/curry
21 | - file: reference/mailbox
22 | - file: reference/map
23 | - file: reference/misc
24 | - file: reference/option
25 | - file: reference/pipe
26 | - file: reference/pipeline
27 | - file: reference/result
28 | - file: reference/seq
29 | - file: reference/try
30 | - file: reference/union
31 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | ###############################################################################
2 | # Auto-generated by `jupyter-book config`
3 | # If you wish to continue using _config.yml, make edits to that file and
4 | # re-generate this one.
5 | ###############################################################################
6 | add_module_names = False
7 | author = 'Cognite'
8 | comments_config = {'hypothesis': False, 'utterances': False}
9 | copyright = '2021'
10 | exclude_patterns = ['**.ipynb_checkpoints', '.DS_Store', 'Thumbs.db', '_build']
11 | execution_allow_errors = False
12 | execution_excludepatterns = []
13 | execution_in_temp = False
14 | execution_timeout = 30
15 | extensions = ['sphinx_togglebutton', 'sphinx_copybutton', 'myst_nb', 'jupyter_book', 'sphinx_thebe', 'sphinx_comments', 'sphinx_external_toc', 'sphinx.ext.intersphinx', 'sphinx_panels', 'sphinx_book_theme', 'sphinx.ext.autodoc', 'sphinx.ext.napoleon', 'sphinx_jupyterbook_latex']
16 | external_toc_exclude_missing = False
17 | external_toc_path = '_toc.yml'
18 | html_baseurl = ''
19 | html_favicon = ''
20 | html_logo = 'logo.png'
21 | html_sourcelink_suffix = ''
22 | html_theme = 'sphinx_book_theme'
23 | html_theme_options = {'search_bar_text': 'Search this book...', 'launch_buttons': {'notebook_interface': 'classic', 'binderhub_url': 'https://mybinder.org', 'jupyterhub_url': '', 'thebe': False, 'colab_url': ''}, 'path_to_docs': 'docs', 'repository_url': 'https://github.com/cognitedata/expression', 'repository_branch': 'main', 'google_analytics_id': '', 'extra_navbar': 'Powered by Jupyter Book', 'extra_footer': '', 'home_page_in_toc': True, 'use_repository_button': True, 'use_edit_page_button': False, 'use_issues_button': True}
24 | html_title = 'Expression'
25 | jupyter_cache = ''
26 | jupyter_execute_notebooks = 'force'
27 | language = None
28 | latex_engine = 'pdflatex'
29 | myst_enable_extensions = ['colon_fence', 'dollarmath', 'linkify', 'substitution', 'tasklist']
30 | myst_url_schemes = ['mailto', 'http', 'https']
31 | nb_output_stderr = 'show'
32 | numfig = True
33 | panels_add_bootstrap_css = False
34 | pygments_style = 'sphinx'
35 | suppress_warnings = ['myst.domains']
36 | use_jupyterbook_latex = True
37 | use_multitoc_numbering = True
38 |
--------------------------------------------------------------------------------
/docs/intro.md:
--------------------------------------------------------------------------------
1 | # Welcome to Expression
2 |
3 | Expression aims to be a solid, type-safe, pragmatic, and high performance
4 | library for frictionless and practical functional programming in Python 3.10+.
5 |
6 | By pragmatic, we mean that the goal of the library is to use simple abstractions
7 | to enable you to do practical and productive functional programming in Python
8 | (instead of being a [Monad tutorial](https://github.com/dbrattli/OSlash)).
9 |
10 | Python is a multi-paradigm programming language that also supports functional
11 | programming constructs such as functions, higher-order functions, lambdas, and
12 | in many ways also favors composition over inheritance.
13 |
14 | ```{tableofcontents}
15 | ```
16 |
--------------------------------------------------------------------------------
/docs/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dbrattli/Expression/3981fd3a3ad60a13d00a4a3c170bdac76c1943fe/docs/logo.png
--------------------------------------------------------------------------------
/docs/markdown-notebooks.md:
--------------------------------------------------------------------------------
1 | ---
2 | jupytext:
3 | cell_metadata_filter: -all
4 | formats: md:myst
5 | text_representation:
6 | extension: .md
7 | format_name: myst
8 | format_version: 0.13
9 | jupytext_version: 1.11.5
10 | kernelspec:
11 | display_name: Python 3
12 | language: python
13 | name: python3
14 | ---
15 |
16 | # Notebooks with MyST Markdown
17 |
18 | Jupyter Book also lets you write text-based notebooks using MyST Markdown.
19 | See [the Notebooks with MyST Markdown documentation](https://jupyterbook.org/file-types/myst-notebooks.html) for more detailed instructions.
20 | This page shows off a notebook written in MyST Markdown.
21 |
22 | ## An example cell
23 |
24 | With MyST Markdown, you can define code cells with a directive like so:
25 |
26 | ```{code-cell}
27 | print(2 + 2)
28 | ```
29 |
30 | When your book is built, the contents of any `{code-cell}` blocks will be
31 | executed with your default Jupyter kernel, and their outputs will be displayed
32 | in-line with the rest of your content.
33 |
34 | ```{seealso}
35 | Jupyter Book uses [Jupytext](https://jupytext.readthedocs.io/en/latest/) to convert text-based files to notebooks, and can support [many other text-based notebook files](https://jupyterbook.org/file-types/jupytext.html).
36 | ```
37 |
38 | ## Create a notebook with MyST Markdown
39 |
40 | MyST Markdown notebooks are defined by two things:
41 |
42 | 1. YAML metadata that is needed to understand if / how it should convert text files to notebooks (including information about the kernel needed).
43 | See the YAML at the top of this page for example.
44 | 2. The presence of `{code-cell}` directives, which will be executed with your book.
45 |
46 | That's all that is needed to get started!
47 |
48 | ## Quickly add YAML metadata for MyST Notebooks
49 |
50 | If you have a markdown file and you'd like to quickly add YAML metadata to it, so that Jupyter Book will treat it as a MyST Markdown Notebook, run the following command:
51 |
52 | ```
53 | jupyter-book myst init path/to/markdownfile.md
54 | ```
55 |
--------------------------------------------------------------------------------
/docs/markdown.md:
--------------------------------------------------------------------------------
1 | # Markdown Files
2 |
3 | Whether you write your book's content in Jupyter Notebooks (`.ipynb`) or
4 | in regular markdown files (`.md`), you'll write in the same flavor of markdown
5 | called **MyST Markdown**.
6 | This is a simple file to help you get started and show off some syntax.
7 |
8 | ## What is MyST?
9 |
10 | MyST stands for "Markedly Structured Text". It
11 | is a slight variation on a flavor of markdown called "CommonMark" markdown,
12 | with small syntax extensions to allow you to write **roles** and **directives**
13 | in the Sphinx ecosystem.
14 |
15 | For more about MyST, see [the MyST Markdown Overview](https://jupyterbook.org/content/myst.html).
16 |
17 | ## Sample Roles and Directivs
18 |
19 | Roles and directives are two of the most powerful tools in Jupyter Book. They
20 | are kind of like functions, but written in a markup language. They both
21 | serve a similar purpose, but **roles are written in one line**, whereas
22 | **directives span many lines**. They both accept different kinds of inputs,
23 | and what they do with those inputs depends on the specific role or directive
24 | that is being called.
25 |
26 | Here is a "note" directive:
27 |
28 | ```{note}
29 | Here is a note
30 | ```
31 |
32 | It will be rendered in a special box when you build your book.
33 |
34 | Here is an inline directive to refer to a document: {doc}`markdown-notebooks`.
35 |
36 |
37 | ## Learn more
38 |
39 | This is just a simple starter to get you started.
40 | You can learn a lot more at [jupyterbook.org](https://jupyterbook.org).
41 |
--------------------------------------------------------------------------------
/docs/notebooks.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Content with notebooks\n",
8 | "\n",
9 | "You can also create content with Jupyter Notebooks. This means that you can include\n",
10 | "code blocks and their outputs in your book.\n",
11 | "\n",
12 | "## Markdown + notebooks\n",
13 | "\n",
14 | "As it is markdown, you can embed images, HTML, etc into your posts!\n",
15 | "\n",
16 | "\n",
17 | "\n",
18 | "You can also $add_{math}$ and\n",
19 | "\n",
20 | "$$\n",
21 | "math^{blocks}\n",
22 | "$$\n",
23 | "\n",
24 | "or\n",
25 | "\n",
26 | "$$\n",
27 | "\\begin{aligned}\n",
28 | "\\mbox{mean} la_{tex} \\\\ \\\\\n",
29 | "math blocks\n",
30 | "\\end{aligned}\n",
31 | "$$\n",
32 | "\n",
33 | "But make sure you \\$Escape \\$your \\$dollar signs \\$you want to keep!\n",
34 | "\n",
35 | "## MyST markdown\n",
36 | "\n",
37 | "MyST markdown works in Jupyter Notebooks as well. For more information about MyST markdown, check\n",
38 | "out [the MyST guide in Jupyter Book](https://jupyterbook.org/content/myst.html),\n",
39 | "or see [the MyST markdown documentation](https://myst-parser.readthedocs.io/en/latest/).\n",
40 | "\n",
41 | "## Code blocks and outputs\n",
42 | "\n",
43 | "Jupyter Book will also embed your code blocks and output in your book.\n",
44 | "For example, here's some sample Matplotlib code:"
45 | ]
46 | },
47 | {
48 | "cell_type": "code",
49 | "execution_count": null,
50 | "metadata": {},
51 | "outputs": [],
52 | "source": [
53 | "from matplotlib import rcParams, cycler\n",
54 | "import matplotlib.pyplot as plt\n",
55 | "import numpy as np\n",
56 | "plt.ion()"
57 | ]
58 | },
59 | {
60 | "cell_type": "code",
61 | "execution_count": null,
62 | "metadata": {},
63 | "outputs": [],
64 | "source": [
65 | "# Fixing random state for reproducibility\n",
66 | "np.random.seed(19680801)\n",
67 | "\n",
68 | "N = 10\n",
69 | "data = [np.logspace(0, 1, 100) + np.random.randn(100) + ii for ii in range(N)]\n",
70 | "data = np.array(data).T\n",
71 | "cmap = plt.cm.coolwarm\n",
72 | "rcParams['axes.prop_cycle'] = cycler(color=cmap(np.linspace(0, 1, N)))\n",
73 | "\n",
74 | "\n",
75 | "from matplotlib.lines import Line2D\n",
76 | "custom_lines = [Line2D([0], [0], color=cmap(0.), lw=4),\n",
77 | " Line2D([0], [0], color=cmap(.5), lw=4),\n",
78 | " Line2D([0], [0], color=cmap(1.), lw=4)]\n",
79 | "\n",
80 | "fig, ax = plt.subplots(figsize=(10, 5))\n",
81 | "lines = ax.plot(data)\n",
82 | "ax.legend(custom_lines, ['Cold', 'Medium', 'Hot']);"
83 | ]
84 | },
85 | {
86 | "cell_type": "markdown",
87 | "metadata": {},
88 | "source": [
89 | "There is a lot more that you can do with outputs (such as including interactive outputs)\n",
90 | "with your book. For more information about this, see [the Jupyter Book documentation](https://jupyterbook.org)"
91 | ]
92 | }
93 | ],
94 | "metadata": {
95 | "kernelspec": {
96 | "display_name": "Python 3",
97 | "language": "python",
98 | "name": "python3"
99 | },
100 | "language_info": {
101 | "codemirror_mode": {
102 | "name": "ipython",
103 | "version": 3
104 | },
105 | "file_extension": ".py",
106 | "mimetype": "text/x-python",
107 | "name": "python",
108 | "nbconvert_exporter": "python",
109 | "pygments_lexer": "ipython3",
110 | "version": "3.8.0"
111 | },
112 | "widgets": {
113 | "application/vnd.jupyter.widget-state+json": {
114 | "state": {},
115 | "version_major": 2,
116 | "version_minor": 0
117 | }
118 | }
119 | },
120 | "nbformat": 4,
121 | "nbformat_minor": 4
122 | }
123 |
--------------------------------------------------------------------------------
/docs/reference/array.md:
--------------------------------------------------------------------------------
1 | (reference_array)=
2 |
3 | # Typed Array
4 |
5 | ```{eval-rst}
6 | .. automodule:: expression.collections.array
7 | :members:
8 | ```
--------------------------------------------------------------------------------
/docs/reference/block.md:
--------------------------------------------------------------------------------
1 | (reference_block)=
2 |
3 | # Block
4 |
5 | ```{eval-rst}
6 | .. automodule:: expression.collections.block
7 | :members:
8 | ```
--------------------------------------------------------------------------------
/docs/reference/compose.md:
--------------------------------------------------------------------------------
1 | (reference_compose)=
2 |
3 | # Compose
4 |
5 | ```{eval-rst}
6 | .. automodule:: expression.core.compose
7 | :members:
8 | ```
9 |
--------------------------------------------------------------------------------
/docs/reference/curry.md:
--------------------------------------------------------------------------------
1 | (reference_curry)=
2 |
3 | # Curry
4 |
5 | ```{eval-rst}
6 | .. automodule:: expression.core.curry
7 | :members:
8 | ```
--------------------------------------------------------------------------------
/docs/reference/mailbox.md:
--------------------------------------------------------------------------------
1 | (reference_mailbox)=
2 |
3 | # Mailbox
4 |
5 | ```{eval-rst}
6 | .. automodule:: expression.core.mailbox
7 | :members:
8 | ```
--------------------------------------------------------------------------------
/docs/reference/map.md:
--------------------------------------------------------------------------------
1 | (reference_map)=
2 |
3 | # Map
4 |
5 | ```{eval-rst}
6 | .. automodule:: expression.collections.map
7 | :members:
8 | ```
--------------------------------------------------------------------------------
/docs/reference/misc.md:
--------------------------------------------------------------------------------
1 | (reference_misc)=
2 |
3 | # Misc
4 |
5 | ```{eval-rst}
6 | .. automodule:: expression.core.misc
7 | :members:
8 | ```
--------------------------------------------------------------------------------
/docs/reference/option.md:
--------------------------------------------------------------------------------
1 | (reference_option)=
2 |
3 | # Option
4 |
5 | ```{eval-rst}
6 | .. automodule:: expression.core.option
7 | :members:
8 | ```
--------------------------------------------------------------------------------
/docs/reference/pipe.md:
--------------------------------------------------------------------------------
1 | (reference_pipe)=
2 |
3 | # Pipe
4 |
5 | ```{eval-rst}
6 | .. automodule:: expression.core.pipe
7 | :members:
8 | ```
--------------------------------------------------------------------------------
/docs/reference/pipeline.md:
--------------------------------------------------------------------------------
1 | (reference_pipeline)=
2 |
3 | # Pipeline
4 |
5 | ## Option
6 | ```{eval-rst}
7 | .. automodule:: expression.extra.option.pipeline
8 | :members:
9 | ```
10 |
11 | ## Result
12 | ```{eval-rst}
13 | .. automodule:: expression.extra.result.pipeline
14 | :members:
15 | ```
--------------------------------------------------------------------------------
/docs/reference/reference.md:
--------------------------------------------------------------------------------
1 | # Reference
2 |
3 | ## Core Types
4 |
5 | - [Option](reference_option)
6 | - [Result](reference_result)
7 | - [Try](reference_try)
8 | - [Tagged Union](reference_union)
9 |
10 | ## Functional Helpers
11 |
12 | - [Misc](reference_misc)
13 | - [Pipe](reference_pipe)
14 | - [Pipeline](reference_pipeline)
15 | - [Compose](reference_compose)
16 | - [Curry](reference_curry)
17 | - [Mailbox](reference_mailbox)
18 |
19 | ## Collections
20 |
21 | - [Typed Array](reference_array)
22 | - [Map](reference_map)
23 | - [Block](reference_block)
24 | - [Seq](reference_seq)
25 |
--------------------------------------------------------------------------------
/docs/reference/result.md:
--------------------------------------------------------------------------------
1 | (reference_result)=
2 |
3 | # Result
4 |
5 | ```{eval-rst}
6 | .. automodule:: expression.core.result
7 | :members:
8 | ```
--------------------------------------------------------------------------------
/docs/reference/seq.md:
--------------------------------------------------------------------------------
1 | (reference_seq)=
2 |
3 | # Seq
4 |
5 | ```{eval-rst}
6 | .. automodule:: expression.collections.seq
7 | :members:
8 | ```
--------------------------------------------------------------------------------
/docs/reference/try.md:
--------------------------------------------------------------------------------
1 | (reference_try)=
2 |
3 | # Try
4 |
5 | ```{eval-rst}
6 | .. automodule:: expression.core.try_
7 | :members:
8 | :show-inheritance:
9 |
10 | ```
--------------------------------------------------------------------------------
/docs/reference/union.md:
--------------------------------------------------------------------------------
1 | (reference_union)=
2 |
3 | # Tagged Unions
4 |
5 | ```{eval-rst}
6 | .. automodule:: expression.core.tagged_union
7 | :members:
8 | ```
9 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | jupyter-book
2 | sphinx>=5.0.0
3 | sphinx-autodoc-typehints>=1.17.0
4 | sphinx-panels
5 | expression>=5.0.0
6 |
--------------------------------------------------------------------------------
/docs/tutorial/containers.md:
--------------------------------------------------------------------------------
1 | ---
2 | jupytext:
3 | cell_metadata_filter: -all
4 | formats: md:myst
5 | text_representation:
6 | extension: .md
7 | format_name: myst
8 | format_version: 0.13
9 | jupytext_version: 1.11.5
10 | kernelspec:
11 | display_name: Python 3
12 | language: python
13 | name: python3
14 | ---
15 | (tutorial_containers)=
16 |
17 | # Containers
18 |
19 | In Python a container is something that contains something. Containers may be sequences,
20 | sets or mappings. Thus a collection is an **abstraction** of **"something"** that:
21 |
22 | - May contain **something**
23 | - Sequences are iterable
24 | - Collections have a size
25 |
26 | We usually talk about generic container types such as `List[T]`, `Set[T]`,
27 | `Tuple[T,...]`. But we can also imagine taking the abstraction to a higher-order making
28 | the left side generic as well, e.g `Something[T]`. What do types of `Something` have in
29 | common?
30 |
31 | > *A something within a something*
32 |
33 | A container is really just some kind of box that you can pull values out of. Can values
34 | be pushed out of a container?
35 |
36 | ## Mapping
37 |
38 | A mapping object maps immutable values to arbitrary objects. There is both `Mapping` and
39 | `MutableMapping`. The most known mutable mapping is the `dict` type.
40 |
41 | ## Sequence
42 |
43 | A sequence is an iterable container such as `List`, `Tuple`, `str`, ...
44 |
45 | ## Immutable data types
46 |
47 | Immutable data types are important in functional programming. Immutable means that it's
48 | not possible to make any changes after the type have been created. Most data structures
49 | in Python are mutable such as `List` and `Dict`, but Python also have a few immutable
50 | data types:
51 |
52 | * Strings
53 | * Tuples
54 | * Iterable
55 |
56 | The advantages of immutable data types are:
57 |
58 | * Thread-safe. Multiple threads cannot modify or corrupt the state.
59 | * Safe to share and reuse
60 | * Easy to reason about. Reduces the cognitive load
61 | * Easier to debug
62 |
63 | Expression extends Python with a couple of more immutable data types:
64 |
65 | ## Block
66 |
67 | A Block is an immutable List type. The implementation is based on the already
68 | immutable tuple type but gives it a list feeling and lots of functions and methods to
69 | work with it.
70 |
71 | ```{code-cell} python
72 | from expression.collections import Block
73 |
74 | xs = Block.of_seq(range(10))
75 | print(xs)
76 |
77 | ys = xs.cons(10)
78 | print(ys)
79 |
80 | zs = xs.tail()
81 | print(zs)
82 | ```
83 |
84 | ## Map
85 |
86 | The Expression Map module is an immutable Dict type. The implementation is based on map
87 | type from F# and uses a balanced binary tree implementation.
88 |
89 | ```{code-cell} python
90 | from expression.collections import Map
91 |
92 | items = dict(a=10, b=20).items()
93 | xs = Map.of_seq(items)
94 | print(xs)
95 |
96 | ys = xs.filter(lambda k, v: v>10)
97 | print(ys)
98 | ```
99 |
100 | ## Functions are Containers
101 |
102 | It might not be obvious at first, but functions can also be containers. This is because
103 | values might be stored in function closures. That means that a value might be visible in
104 | the scope of the function.
105 |
106 | > A closure is a poor man's object. An object is a poor man's closure.
107 |
108 | In functional programming we often use function arguments to store values instead of
109 | objects
110 |
111 | ```{code-cell} python
112 | def hat(item):
113 | def pull():
114 | return item
115 | return pull
116 |
117 | small_hat = lambda item: lambda pull: item
118 |
119 | pull = hat("rabbit")
120 | pull()
121 | ```
122 |
123 | ## List out of lambda (LOL)
124 |
125 | We can even create a fully functional list implementation using only functions:
126 |
127 | ```python
128 | empty_list = None
129 |
130 | def prepend(el, lst):
131 | return lambda selector: selector(el, lst)
132 |
133 | def head(lst):
134 | return lst(lambda h, t: h)
135 |
136 | def tail(lst):
137 | return lst(lambda h, t: t)
138 |
139 | def is_empty(lst):
140 | return (lst == empty_list)
141 | ```
142 |
143 | ```python
144 | a = prepend("a", prepend("b", empty_list))
145 |
146 | assert("a" == head(a))
147 | assert("b" == head(tail(a)))
148 | assert(tail(tail(a))==empty_list)
149 | assert(not is_empty(a))
150 | assert(is_empty(empty_list))
151 |
152 | print("all tests are green!")
153 | ```
154 |
155 | ## LOL (more compact)
156 |
157 | A list can be created using only lambda functions:
158 |
159 | ```python
160 | empty_list = None
161 | prepend = lambda el, lst: lambda selector: selector(el, lst)
162 | head = lambda lst: lst(lambda h, t: h)
163 | tail = lambda lst: lst(lambda h, t: t)
164 | is_empty = lambda lst: lst is empty_list
165 | ```
166 |
167 | ```python
168 | a = prepend("a", prepend("b", empty_list))
169 |
170 | assert("a" == head(a))
171 | assert("b" == head(tail(a)))
172 | assert(tail(tail(a))==empty_list)
173 | assert(not is_empty(a))
174 | assert(is_empty(empty_list))
175 |
176 | print("all tests are green!")
177 | ```
178 |
179 | ## Pull vs Push
180 |
181 | List, iterables, mappings, strings etc are what we call "pull" collections. This is
182 | because we are actively pulling the values out of the collection by calling the `next()`
183 | function on the Iterator.
184 |
185 | ```python
186 | iterable = [1, 2, 3]
187 | iterator = iter(iterable) # get iterator
188 |
189 | value = next(iterator)
190 | print(value)
191 |
192 | value = next(iterator)
193 | print(value)
194 |
195 | value = next(iterator)
196 | print(value)
197 |
198 | # value = next(iterator)
199 | ```
200 |
201 | ## Push Collections
202 |
203 | A push collection is something that pushes values out of the collection. This can be
204 | seen as temporal (push) containers vs spatial (pull) collections. This collection is
205 | called an Observable and is the dual (or the opposite) of an Iterable.
206 |
207 | An `Iterable` have getter for getting an `Iterator` (__iter__)
208 | An `Obserable` have a setter for setting an `Observer` (subscribe)
209 |
210 | An `Iterator` have a getter for getting the next value (__next__)
211 | An `Observer` have a setter for setting the next value (on_next, or send)
212 |
213 | Summarized:
214 |
215 | * Iterable is a getter-getter function
216 | * Observable is a setter-setter function
217 |
218 | Let's try to implement an Observable using only functions:
219 |
220 | ```python
221 | import sys
222 |
223 | def observer(value):
224 | print(f"got value: {value}")
225 |
226 | def infinite():
227 | def subscribe(obv):
228 | for x in range(1000):
229 | obv(x)
230 |
231 | return subscribe
232 |
233 | def take(count):
234 | def obs(source):
235 | def subscribe(obv):
236 | n = count
237 | def observer(value):
238 | nonlocal n
239 | if n > 0:
240 | obv(value)
241 | n -= 1
242 |
243 | source(observer)
244 | return subscribe
245 | return obs
246 |
247 | take(10)(infinite())(observer)
248 | ```
249 |
250 | ```python
251 | def pipe(arg, *fns):
252 | for fn in fns:
253 | arg = fn(arg)
254 | return arg
255 |
256 |
257 | observable = pipe(
258 | infinite(), # infinite sequence of values
259 | take(10) # take the first 10
260 | )
261 |
262 | observable(observer)
263 | ```
264 |
265 | [RxPY](https://github.com/ReactiveX/RxPY) is an implementation of `Observable` and
266 | [aioreactive](https://github.com/dbrattli/aioreactive) project is an implementation of
267 | `AsyncObservable`.
268 |
--------------------------------------------------------------------------------
/docs/tutorial/continuations.md:
--------------------------------------------------------------------------------
1 | ---
2 | jupytext:
3 | cell_metadata_filter: -all
4 | formats: md:myst
5 | text_representation:
6 | extension: .md
7 | format_name: myst
8 | format_version: 0.13
9 | jupytext_version: 1.11.5
10 | kernelspec:
11 | display_name: Python 3
12 | language: python
13 | name: python3
14 | ---
15 | (tutorial_continuations)=
16 |
17 | # Callbacks and Continuations
18 |
19 | > Don't call me. I'll call you.
20 |
21 | ```python
22 | import threading
23 |
24 | def long_running(callback):
25 | def done():
26 | result = 42
27 | callback(result)
28 | timer = threading.Timer(5.0, done)
29 | timer.start()
30 |
31 | long_running(print)
32 | ```
33 |
34 | ## Continuation Passing Style (CPS)
35 |
36 | This is a functional programming style where you don’t return any values from your
37 | functions. Instead of returning the result, you pass a continuation function that will
38 | be applied to the result.
39 |
40 | ```python
41 | import math
42 |
43 | def add(a, b):
44 | return a + b
45 |
46 | def square(x):
47 | return x * x
48 |
49 | def sqrt(x):
50 | return math.sqrt(x)
51 |
52 | def pythagoras(a, b):
53 | return sqrt(add(square(a), square(b)))
54 | ```
55 |
56 | ```python
57 | result = pythagoras(2,3)
58 | print(result)
59 | ```
60 |
61 | ```python
62 | import math
63 |
64 | def add(a, b, cont):
65 | cont(a + b)
66 |
67 | def square(x, cont):
68 | cont(x * x)
69 |
70 | def sqrt(x, cont):
71 | cont(math.sqrt(x))
72 |
73 | # Pythagoras rewritten in CPS
74 | def pythagoras(a, b, cont):
75 | square(a, (lambda aa:
76 | square(b, (lambda bb:
77 | add(aa, bb, (lambda aabb:
78 | sqrt(aabb, (lambda result:
79 | cont(result)
80 | ))
81 | ))
82 | ))
83 | ))
84 | ```
85 |
86 | ```python
87 | pythagoras(2, 3, print)
88 | ```
89 |
90 | ## Nice, but unreadable. Kind of ....
91 |
92 | How can we do better? We're not really used to pass in continuations. We like to return our results!
93 |
94 | > Could we perhaps use currying?
95 |
96 | So instead of taking a continuation, we could return a function that takes a continuation. It's basically the exact same thing ... just moving the `:`.
97 |
98 | ```python
99 | import math
100 |
101 | def add(a, b):
102 | return lambda cont: cont(a + b)
103 |
104 | def square(x):
105 | return lambda cont: cont(x * x)
106 |
107 | def sqrt(x):
108 | return lambda cont: cont(math.sqrt(x))
109 |
110 | def pythagoras(a, b):
111 | def then(cont):
112 | then = square(a)
113 | def next(aa):
114 | then = square(b)
115 | def next(bb):
116 | then = add(aa, bb)
117 | def next(aabb):
118 | then = sqrt(aabb)
119 | def next(result):
120 | cont(result)
121 | then(next)
122 | then(next)
123 | then(next)
124 | then(next)
125 | return then
126 |
127 | result = pythagoras(2,3)
128 | result(print)
129 | ```
130 |
131 | ## Now what? Looks slightly better, kind of ...
132 |
133 | > Could we perhaps use types to make better abstractions?
134 |
135 | In Python we create new types using classes, so let's create a class to encapsulte the CPS function `(a -> r) -> r`.
136 |
137 | Ref: https://wiki.haskell.org/MonadCont_under_the_hood
138 |
139 | ```python
140 | class Cont:
141 | def __init__(self, cps):
142 | self.__cps = cps # fn: ('a -> 'r) -> 'r
143 |
144 | @staticmethod
145 | def rtn(a):
146 | return Cont(lambda cont: cont(a))
147 |
148 | def run(self, cont):
149 | self.__cps(cont)
150 |
151 | def then(self, fn):
152 | # Cont <| fun c -> run cont (fun a -> run (fn a) c )
153 | return Cont(lambda c: self.run(lambda a: fn(a).run(c)))
154 | ```
155 |
156 | ```python
157 | import math
158 |
159 | def add(a, b):
160 | return Cont.rtn(a + b)
161 |
162 | def square(x):
163 | return Cont.rtn(x * x)
164 |
165 | def sqrt(x):
166 | return Cont.rtn(math.sqrt(x))
167 |
168 | def pythagoras(a, b):
169 | return square(a).then(
170 | lambda aa: square(b).then(
171 | lambda bb: add(aa, bb).then(
172 | lambda aabb: sqrt(aabb)
173 | )
174 | )
175 | )
176 | ```
177 |
178 | ```python
179 | result = pythagoras(2, 3)
180 | result.run(print)
181 | ```
182 |
183 | ```python
184 | import asyncio
185 |
186 | class Cont:
187 | def __init__(self, cps):
188 | self.__cps = cps # fn: ('a -> 'r) -> 'r
189 |
190 | @staticmethod
191 | def rtn(a):
192 | return Cont(lambda cont: cont(a))
193 |
194 | def run(self, cont):
195 | self.__cps(cont)
196 |
197 | def __await__(self):
198 | loop = asyncio.get_event_loop()
199 | future = loop.create_future()
200 | def done(value):
201 | future.set_result(value)
202 | self.run(done)
203 | return iter(future)
204 |
205 | def then(self, fn):
206 | # Cont <| fun c -> run cont (fun a -> run (fn a) c )
207 | return Cont(lambda c: self.run(lambda a: (fn(a).run(c))))
208 | ```
209 |
210 | ```python
211 | import math
212 |
213 | def add(a, b):
214 | return Cont.rtn(a + b)
215 |
216 | def square(x):
217 | return Cont.rtn(x * x)
218 |
219 | def sqrt(x):
220 | return Cont.rtn(math.sqrt(x))
221 |
222 | async def pythagoras(a, b):
223 | aa = await square(a)
224 | bb = await square(b)
225 | aabb = await add(aa, bb)
226 | ab = await sqrt(aabb)
227 |
228 | return ab
229 |
230 | result = await pythagoras(2,3)
231 | print(result)
232 | ```
233 |
234 | ## Conclusion
235 |
236 | Async / await is basically just syntactic sugar for working with effects such as
237 | callbacks and continuations
238 |
239 | How do we know that the "normal" syntax of a programming language is not compiled to
240 | continuations under the hood? Maybe we have been programming with continuations all
241 | along?
242 |
243 |
244 | ## The Mother of all Monads
245 |
246 | > https://www.schoolofhaskell.com/school/to-infinity-and-beyond/pick-of-the-week/the-mother-of-all-monads
247 |
248 | ```python
249 | from typing_extensions import Protocol, runtime_checkable
250 | from abc import abstractmethod
251 |
252 | @runtime_checkable
253 | class Monad(Protocol):
254 | @staticmethod
255 | @abstractmethod
256 | def rtn(a):
257 | raise NotImplementedError
258 |
259 | @abstractmethod
260 | def then(self, fn):
261 | raise NotImplementedError
262 | ```
263 |
264 | ```python
265 | assert issubclass(Cont, Monad)
266 | print("Yey!!")
267 | ```
268 |
--------------------------------------------------------------------------------
/docs/tutorial/data_modelling.md:
--------------------------------------------------------------------------------
1 | ---
2 | jupytext:
3 | cell_metadata_filter: -all
4 | formats: md:myst
5 | text_representation:
6 | extension: .md
7 | format_name: myst
8 | format_version: 0.13
9 | jupytext_version: 1.11.5
10 | kernelspec:
11 | display_name: Python 3
12 | language: python
13 | name: python3
14 | ---
15 | (tutorial_data_modelling)=
16 |
17 | # Data Modelling
18 |
19 | With Expression and Python you can model your types using data-classes and
20 | tagged-unions. Let's start by importing some types we need before we begin.
21 |
22 | ```{code-cell} python
23 | from __future__ import annotations
24 |
25 | from dataclasses import dataclass
26 | from typing import Generic, Tuple, TypeVar, final
27 |
28 | from expression import case, tag, tagged_union
29 |
30 | _T = TypeVar("_T")
31 | ```
32 |
33 | You define your record types using normal Python data-classes e.g:
34 |
35 | ```{code-cell} python
36 | @dataclass
37 | class Rectangle:
38 | width: float
39 | length: float
40 |
41 |
42 | @dataclass
43 | class Circle:
44 | radius: float
45 | ```
46 |
47 | You can use tagged unions to create sum-types. Tagged unions are similar to untagged
48 | unions (or just unions) but are safer and allows to be nested in ways that normal
49 | cannot. With tagged unions each of the union cases produces the same type which is why
50 | we use a static create method for to create each of the union cases.
51 |
52 | ```{code-cell} python
53 | @tagged_union
54 | class Shape:
55 | tag: Literal["rectangle", "circle"] = tag()
56 |
57 | rectangle: Rectangle = case()
58 | circle: Circle = case()
59 |
60 | @staticmethod
61 | def Rectangle(width: float, length: float) -> Shape:
62 | return Shape(rectangle=Rectangle(width, length))
63 |
64 | @staticmethod
65 | def Circle(radius: float) -> Shape:
66 | return Shape(circle=Circle(radius))
67 | ```
68 |
69 | A more complex type modelling example:
70 |
71 | ```{code-cell} python
72 | from __future__ import annotations
73 | from typing import Tuple, final
74 |
75 | from expression import case, tag, tagged_union
76 |
77 |
78 | @tagged_union
79 | class Suit:
80 | tag: Literal["spades", "hearts", "clubs", "diamonds"] = tag()
81 |
82 | spades: Literal[True] = case()
83 | hearts: Literal[True] = case()
84 | clubs: Literal[True] = case()
85 | diamonds: Literal[True] = case()
86 |
87 | @staticmethod
88 | def Spades() -> Suit:
89 | return Suit(spades=True)
90 |
91 | @staticmethod
92 | def Hearts() -> Suit:
93 | return Suit(hearts=True)
94 |
95 | @staticmethod
96 | def Clubs() -> Suit:
97 | return Suit(clubs=True)
98 |
99 | @staticmethod
100 | def Diamonds() -> Suit:
101 | return Suit(diamonds=True)
102 |
103 | @tagged_union
104 | class Face:
105 | tag: Literal["jack", "queen", "king", "ace"] = tag()
106 |
107 | jack: Literal[True] = case()
108 | queen: Literal[True] = case()
109 | king: Literal[True] = case()
110 | ace: Literal[True] = case()
111 |
112 | @staticmethod
113 | def Jack() -> Face:
114 | return Face(jack=True)
115 |
116 | @staticmethod
117 | def Queen() -> Face:
118 | return Face(queen=True)
119 |
120 | @staticmethod
121 | def King() -> Face:
122 | return Face(king=True)
123 |
124 | @staticmethod
125 | def Ace() -> Face:
126 | return Face(ace=True)
127 |
128 |
129 | @tagged_union
130 | class Card:
131 | tag: Literal["value", "face", "joker"] = tag()
132 |
133 | face: tuple[Suit, Face] = case()
134 | value: tuple[Suit, int] = case()
135 | joker: Literal[True] = case()
136 |
137 | @staticmethod
138 | def Face(suit: Suit, face: Face) -> Card:
139 | return Card(face=(suit, face))
140 |
141 | @staticmethod
142 | def Value(suit: Suit, value: int) -> Card:
143 | if value < 1 or value > 10:
144 | raise ValueError("Value must be between 1 and 10")
145 | return Card(value=(suit, value))
146 |
147 | @staticmethod
148 | def Joker() -> Card:
149 | return Card(joker=True)
150 |
151 |
152 | jack_of_hearts = Card.Face(suit=Suit.Hearts(), face=Face.Jack())
153 | three_of_clubs = Card.Value(suit=Suit.Clubs(), value=3)
154 | joker = Card.Joker()
155 | ```
156 |
157 | We can now use our types with pattern matching to create our domain logic:
158 |
159 | ```{code-cell} python
160 | def calculate_value(card: Card) -> int:
161 | match card:
162 | case Card(tag="face", face=(Suit(spades=True), Face(queen=True))):
163 | return 40
164 | case Card(tag="face", face=(_suit, Face(ace=True))):
165 | return 15
166 | case Card(tag="face", face=(_suit, _face)):
167 | return 10
168 | case Card(tag="value", value=(_suit, value)):
169 | return value
170 | case Card(tag="joker", joker=True):
171 | return 0
172 | case _:
173 | raise AssertionError("Should not match")
174 |
175 |
176 | rummy_score = calculate_value(jack_of_hearts)
177 | print("Score: ", rummy_score)
178 |
179 | rummy_score = calculate_value(three_of_clubs)
180 | print("Score: ", rummy_score)
181 |
182 | rummy_score = calculate_value(joker)
183 | print("Score: ", rummy_score)
184 | ```
185 |
186 | ## Single case tagged unions
187 |
188 | You can also use tagged unions to create single case tagged unions. This is useful
189 | when you want to create a type that is different from the underlying type. For example
190 | you may want to create a type that is a string but is a different type to a normal
191 | string.
192 |
193 | For single case tagged unions we don't need to define the tag, just the single case.
194 |
195 | ```{code-cell} python
196 | @tagged_union(frozen=True, repr=False)
197 | class SecurePassword:
198 | password: str = case()
199 |
200 | # Override __str__ and __repr__ to make sure we don't leak the password in logs
201 | def __str__(self) -> str:
202 | return "********"
203 |
204 | def __repr__(self) -> str:
205 | return "SecurePassword(password='********')"
206 |
207 | password = SecurePassword(password="secret")
208 | match password:
209 | case SecurePassword(password=p):
210 | assert p == "secret"
211 |
212 | ```
213 |
214 | This will make sure that the password is not leaked in logs or when printed to the
215 | console, and that we don't assign a password to a normal string anywhere in our code.
216 |
217 |
--------------------------------------------------------------------------------
/docs/tutorial/effects.md:
--------------------------------------------------------------------------------
1 | ---
2 | jupytext:
3 | cell_metadata_filter: -all
4 | formats: md:myst
5 | text_representation:
6 | extension: .md
7 | format_name: myst
8 | format_version: 0.13
9 | jupytext_version: 1.11.5
10 | kernelspec:
11 | display_name: Python 3
12 | language: python
13 | name: python3
14 | ---
15 | (tutorial_effects)=
16 |
17 | # Effects and Side-effects
18 |
19 | What are effects? What are side-effects?
20 |
21 |
22 | ## Referential Transparency
23 |
24 | Is the result of an expression the same every time you evaluate it? Can you substitute an expression with the value? In functional programming the answer is always yes!
25 |
26 | What about Python?
27 |
28 | ```python
29 | z = [42]
30 |
31 | def expr(a):
32 | #return a + 1
33 |
34 | a += int(input())
35 | return a
36 | #print(a)
37 | #z[0] += a
38 | #return z[0]
39 | ```
40 |
41 | Are these programs the same?
42 |
43 | ```python
44 | a = expr(42)
45 | a, a
46 | ```
47 |
48 | ```python
49 | expr(42), expr(42)
50 | ```
51 |
52 | We need to be very careful with non-pure functions. Always look out for code smell:
53 |
54 | 1. Functions or methods that takes no arguments, i.e `Callable[[None], Result]`
55 | 2. Functions or methods that returns nothing, i.e `Callable[..., None]`
56 | 3. Functions that takes nothing and returns nothing `Callable[[], None]`
57 |
58 |
59 | ## Side Effects
60 |
61 | Functions that are not referenctial transparent
62 |
63 | Look out for functions that either takes or returns `None`. They are not composable. What do these two functions do?
64 |
65 | ```python
66 | def get() -> str:
67 | ...
68 |
69 |
70 | def put(text: str) -> None:
71 | ...
72 | ```
73 |
74 | How can we fix the problem? The solution is that the functions should take and return something to make them pure
75 |
76 | ```python
77 | from typing import Generic, Tuple, TypeVar
78 |
79 | TSource = TypeVar("TSource")
80 |
81 | class Io(Generic[TSource]):
82 | def __init__(self, fn):
83 | self.__fn = fn # a world changing function
84 |
85 | def rtn(a) -> "Io[TSource]":
86 | return Io(lambda world: (a, world + 1))
87 |
88 | def run(self, world: int=0) -> Tuple[TSource, int]:
89 | return self.__fn(world)
90 |
91 | def bind(self, fn: Callable[[TSource], "Io[TSource]"]) -> "Io[TSource]":
92 | def run(world):
93 | a, newWorld = self.run(world)
94 | return fn(a).run(newWorld)
95 | return Io(run)
96 |
97 | def __repr__(self):
98 | return "Io"
99 | ```
100 |
101 | ```python
102 | from typing import Callable
103 |
104 | def put(string) -> Io[str]:
105 | def side_effect(_):
106 | return Io.rtn(print(string))
107 |
108 | return Io.rtn(None).bind(side_effect)
109 |
110 | def get(fn: Callable[[str], Io[str]]) -> Io[str]:
111 | def side_effect(_):
112 | return fn(input())
113 | return Io.rtn(None).bind(side_effect)
114 | ```
115 |
116 | ```python
117 | io = put("Hello, what is your name?").bind(
118 | lambda _: get(
119 | lambda name: put("What is your age?").bind(
120 | lambda _: get(
121 | lambda age: put("Hello %s, your age is %d." % (name, int(age)))
122 | )
123 | )
124 | ))
125 |
126 | (io, io)
127 | ```
128 |
129 | Are they the same? We really don't know. We are not allowed to look inside the box. But we can run the effect:
130 |
131 | ```python
132 | io.run(world=0)
133 | ```
134 |
135 | ## Effects
136 |
137 | Effects are not the same as side-effects. Effects are just values with a context. The context is different for every effect.
138 |
139 | * Option
140 | * Result
141 | * Block
142 | * Observable
143 | * Async
144 | * AsyncObservable
145 | * Io
146 | * ...
147 |
148 | ## Effects in Expression
149 |
150 | Expression have a nice way of dealing with effects and lets you safely work with wrapped values wihout having to error check:
151 |
152 | ```python
153 | from expression import effect, Option, Some, Nothing
154 |
155 |
156 | def divide(a: float, divisor: float) -> Option[float]:
157 | try:
158 | return Some(a / divisor)
159 | except ZeroDivisionError:
160 | return Nothing
161 |
162 |
163 | @effect.option[float]()
164 | def comp(x: float):
165 | result: float = yield from divide(42, x)
166 | result += 32
167 | print(f"The result is {result}")
168 | return result
169 |
170 |
171 | comp(42)
172 | ```
173 |
174 | ## Living on the edge ...
175 |
176 | We have seen that we can create other wrapped worlds such as sequences, lists, results
177 | and options. On the edge of such a world you will find other objects that we usually do
178 | not want to work with:
179 |
180 | * None,
181 | * Exceptions
182 | * Callbacks, continuations and `run`
183 | * Iterators and `__iter__`
184 | * Observers and `subscribe`
185 |
186 | ## Summary
187 |
188 | - Effects are what we call *elevated* worlds
189 | - An elevated world is a strange place where basically anything is possible.
190 | - Two elevated worlds may e.g `Result`, `Option`, `Map` and `Io` may be completely
191 | different, but they still have the same basic structure.
192 | - But still every normal value has a corresponding elevated value.
193 | - Every function has a corresponding elevated function.
194 |
195 |
--------------------------------------------------------------------------------
/docs/tutorial/lambda_calculus.md:
--------------------------------------------------------------------------------
1 | ---
2 | jupytext:
3 | cell_metadata_filter: -all
4 | formats: md:myst
5 | text_representation:
6 | extension: .md
7 | format_name: myst
8 | format_version: 0.13
9 | jupytext_version: 1.11.5
10 | kernelspec:
11 | display_name: Python 3
12 | language: python
13 | name: python3
14 | ---
15 | (tutorial_lambda_calculus)=
16 |
17 | # Lambda Calculus
18 |
19 | Lambda calculus was introduced by mathematician Alonzo Church in the 1930s as part of an
20 | investigation into the foundations of mathematics
21 |
22 | - Lambda calculus is a formal language
23 | - The expressions of the language are called lambda terms
24 | - Everything is a function, there are no literals
25 |
26 | In lambda calculus, we write `(f x)` instead of the more traditional `f(x)`.
27 |
28 | Many real-world programming languages can be regarded as extensions of the lambda
29 | calculus. This is true for all functional programming languages, a class that includes
30 | Lisp, Scheme, Haskell, and ML (OCaml, F#).
31 |
32 |
33 | ## Lambda calculus in Python
34 |
35 | ```python
36 | identity = lambda x: x
37 |
38 | zero = lambda f: identity
39 | one = lambda f: lambda x: f(x)
40 | two = lambda f: lambda x: f(f(x))
41 | three = lambda f: lambda x: f(f(f(x)))
42 | ```
43 |
44 | ```python
45 | # Don't repeat yourself (DRY)
46 | succ = lambda numeral: lambda f: lambda x: f(numeral(f)(x))
47 | two = succ(one)
48 | three = succ(two)
49 |
50 | three(lambda x: x+1)(0)
51 | ```
52 |
53 | # Tools of lambda calculus
54 |
55 | Substitution rules of programming
56 |
57 | - **α-conversion:** changing bound variables (alpha);
58 | - **β-reduction:** applying functions to their arguments (beta);
59 | - **η-conversion:** which captures a notion of extensionality (eta).
60 |
61 |
62 | ## Alpha Conversion
63 |
64 | Alpha-conversion is about renaming of bound variables
65 |
66 | ```python
67 | (lambda x: x)(42) == (lambda y: y)(42) # Renaming
68 | ```
69 |
70 | ## Beta Reduction
71 |
72 | A beta reduction (also written β reduction) is the process of calculating a result from
73 | the application of a function to an expression.
74 |
75 | ((λn.n×2) 7) → 7×2.
76 |
77 |
78 | ```python
79 | (lambda n: n*2)(7) == 7*2
80 | ```
81 |
82 | ## Eta-conversion
83 |
84 | An eta conversion (also written η-conversion) is adding, or dropping an abstraction over
85 | a function.
86 |
87 | ```python
88 | # Eta-conversion
89 | # λx.(f x) and f
90 | f = lambda x: x
91 |
92 | (lambda x: f(x)) == f
93 | ```
94 |
95 | Extensive use of η-*reduction* can lead to what's called *point-free* programming.
96 |
97 | > Extensive use of point-free programming can lead to *point-less* programming.
98 |
99 | ```python
100 | from functools import reduce
101 |
102 | xs = reduce(lambda acc, x: max(acc, x), range(10))
103 | print(xs)
104 |
105 | xs = reduce(max, range(10))
106 | print(xs)
107 | ```
108 |
109 | ## Do we need to know about lambda calculus?
110 |
111 | You usually do not need to know about lambda calculus. But look out for point-free
112 | programming which may both simplify or over complicate your code. Lambda calculus is a
113 | must-have knowledge when dealing with compilers and expression trees (ASTs).
114 |
--------------------------------------------------------------------------------
/docs/tutorial/optional_values.md:
--------------------------------------------------------------------------------
1 | ---
2 | jupytext:
3 | cell_metadata_filter: -all
4 | formats: md:myst
5 | text_representation:
6 | extension: .md
7 | format_name: myst
8 | format_version: 0.13
9 | jupytext_version: 1.11.5
10 | kernelspec:
11 | display_name: Python 3
12 | language: python
13 | name: python3
14 | ---
15 | (tutorial_optional_values)=
16 |
17 | # Optional Values
18 |
19 | Sometimes we don't have a value for a given variable. Perhaps the value is not known or
20 | available yet. In Python we represent the absence of a value with the special value
21 | `None`. In other languages there is usually a `null` value.
22 |
23 | ```{code-cell} python
24 | xs = None
25 | print(xs)
26 | ```
27 |
28 | Without type hints we don't really know if the value is supposed to be `None´ or
29 | something else.
30 |
31 |
32 | ## Null Reference Exceptions
33 |
34 | > The billion-dollar mistake
35 |
36 | Speaking at a software conference in 2009, Tony Hoare apologized for inventing the null
37 | reference:
38 |
39 | > I call it my billion-dollar mistake. It was the invention of the null reference in
40 | > 1965. At that time, I was designing the first comprehensive type system for references
41 | > in an object-oriented language (ALGOL W). My goal was to ensure that all use of
42 | > references should be absolutely safe, with checking performed automatically by the
43 | > compiler. But I couldn't resist the temptation to put in a null reference, simply
44 | > because it was so easy to implement. This has led to innumerable errors,
45 | > vulnerabilities, and system crashes, which have probably caused a billion dollars of
46 | > pain and damage in the last forty years.
47 |
48 | We don't have null-values in Python, but we have `None` values. Dereferencing a `None`
49 | value will lead to a `NameError`:
50 |
51 | ```{code-cell} python
52 | :tags: ["raises-exception"]
53 | xs.run()
54 | ```
55 |
56 | With type hints we can say that this is supposed to be an integer, but the value is
57 | missing, so we currently don't know what integer just yet:
58 |
59 | ```{code-cell} python
60 | from typing import Optional
61 |
62 | xs: Optional[int] = None
63 |
64 | print(xs)
65 | ```
66 |
67 | ## Testing for optional values
68 |
69 | We can test for optional values using `is None` or `is not None`:
70 |
71 | ```{code-cell} python
72 | xs = None
73 | assert xs is None
74 | y = 42
75 | assert y is not None
76 | ```
77 |
78 | In addition we have a number of *falsy* values:
79 |
80 | ```{code-cell} python
81 | assert not None
82 | assert not 0
83 | assert not []
84 | assert not {}
85 | assert not ()
86 | assert not ""
87 | ```
88 |
89 | ## Problems with Nullable Types
90 |
91 | Using `Optional` and nullable types in general has a lot of advantages since a compiler
92 | or static type checker can help us avoid using optional values before we have done
93 | proper testing first. The type `Optional[A]` is the same as `Union[A, None]` which means
94 | that there still a few more problems:
95 |
96 | * It's easy to forget to check for `None`, but a static type checker will help
97 | * Extensive `None` checking can create a lot of noise in the code, increasing the
98 | cognitive load
99 | * Optional types cannot be nested. How do we differ between `None` being a proper values
100 | and `None` for telling that the value is missing i.e `Union[None, None]`? There is no
101 | equivalent of e.g a list containing an empty list e.g `[[]]`.
102 |
103 | **Example:** for dictionaries, how do we know if the key is missing or if the value is
104 | `None`?
105 |
106 | ```{code-cell} python
107 | mapping = dict(a=None)
108 | mapping.get("a")
109 | ```
110 |
111 | ## Options
112 |
113 | In functional programming we use the Option (or Maybe) type instead of `None` and
114 | `null`. The Option type is used when a value could be missing, that is when an actual
115 | value might not exist for a named value or variable.
116 |
117 | An Option has an underlying type and can hold a value of that type `Some(value)`, or it
118 | might not have the value and be `Nothing`.
119 |
120 |
121 | The Expression library provides an `option` module in the `expression` package:
122 |
123 | ```{code-cell} python
124 | from expression import Option, option, Some, Nothing
125 | ```
126 |
127 | ## Create option values
128 |
129 | Create some values using the `Some` constructor:
130 |
131 | ```{code-cell} python
132 | from expression import Some
133 |
134 | xs = Some(42)
135 | print(xs)
136 | ```
137 |
138 | You should not normally want to retrieve the value of an option since you do not know if
139 | it's successful or not. But if you are sure it's `Some` value then you retrieve the
140 | value back using the `value` property:
141 |
142 | ```python
143 | from expression import Some
144 |
145 | xs = Some(42)
146 | assert isinstance(xs, Some) # important!
147 | xs.value
148 | ```
149 |
150 | To create the `Nothing` case, you should use the `Nothing` singleton value. In the same
151 | way as with `None`, this value will never change, so it's safe to re-use it for all the
152 | code you write.
153 |
154 | ```{code-cell} python
155 | from expression import Nothing
156 |
157 | xs = Nothing
158 | print(xs)
159 | ```
160 |
161 | To test if an option is nothing you use `is` test:
162 |
163 | ```{code-cell} python
164 | xs = Nothing
165 | assert xs is Nothing
166 | ```
167 |
168 | ## Option returning functions
169 |
170 | Values are great, but the real power of options comes when you create option returning
171 | functions
172 |
173 | ```{code-cell} python
174 | def keep_positive(a: int) -> Option[int]:
175 | if a > 0:
176 | return Some(a)
177 |
178 | return Nothing
179 | ```
180 |
181 | ```{code-cell} python
182 | keep_positive(42)
183 | ```
184 |
185 | ```{code-cell} python
186 | keep_positive(-1)
187 | ```
188 |
189 | We can now make pure functions of potentially unsafe operations, i.e no more exceptions:
190 |
191 | ```{code-cell} python
192 | def divide(a: float, divisor: float) -> Option[int]:
193 | try:
194 | return Some(a/divisor)
195 | except ZeroDivisionError:
196 | return Nothing
197 | ```
198 |
199 | ```{code-cell} python
200 | divide(42, 2)
201 | ```
202 |
203 | ```{code-cell} python
204 | divide(10, 0)
205 | ```
206 |
207 | ## Transforming option values
208 |
209 | The great thing with options is that we can transform them without looking into the box.
210 | This eliminates the need for error checking at every step.
211 |
212 | ```{code-cell} python
213 | from expression import Some, option, pipe, Nothing
214 |
215 | xs = Some(42)
216 | ys = pipe(
217 | xs,
218 | option.map(lambda x: x*10)
219 | )
220 | print(ys)
221 | ```
222 |
223 | If we map a value that is `Nothing` then the result is also `Nothing`. Nothing in,
224 | nothing out:
225 |
226 | ```{code-cell} python
227 | xs = Nothing
228 | ys = pipe(
229 | xs,
230 | option.map(lambda x: x*10)
231 | )
232 | print(ys)
233 | ```
234 |
235 | ## Option as an effect
236 |
237 | Effects in Expression is implemented as specially decorated coroutines ([enhanced
238 | generators](https://www.python.org/dev/peps/pep-0342/)) using `yield`, `yield from` and
239 | `return` to consume or generate optional values:
240 |
241 | ```{code-cell} python
242 | from expression import effect, Some
243 |
244 | @effect.option[int]()
245 | def fn():
246 | x = yield 42
247 | y = yield from Some(43)
248 |
249 | return x + y
250 |
251 | fn()
252 | ```
253 |
254 | This enables ["railway oriented programming"](https://fsharpforfunandprofit.com/rop/),
255 | e.g., if one part of the function yields from `Nothing` then the function is
256 | side-tracked (short-circuit) and the following statements will never be executed. The
257 | end result of the expression will be `Nothing`. Thus results from such an option
258 | decorated function can either be `Ok(value)` or `Error(error_value)`.
259 |
260 | ```{code-cell} python
261 | from expression import effect, Some, Nothing
262 |
263 | @effect.option[int]()
264 | def fn():
265 | x = yield from Nothing # or a function returning Nothing
266 |
267 | # -- The rest of the function will never be executed --
268 | y = yield from Some(43)
269 |
270 | return x + y
271 |
272 | fn()
273 | ```
274 |
275 | For more information about options:
276 |
277 | - [API reference](https://expression.readthedocs.io/en/latest/reference/option.html)
278 |
--------------------------------------------------------------------------------
/docs/tutorial/railway.md:
--------------------------------------------------------------------------------
1 | ---
2 | jupytext:
3 | cell_metadata_filter: -all
4 | formats: md:myst
5 | text_representation:
6 | extension: .md
7 | format_name: myst
8 | format_version: 0.13
9 | jupytext_version: 1.11.5
10 | kernelspec:
11 | display_name: Python 3
12 | language: python
13 | name: python3
14 | ---
15 | (tutorial_railway)=
16 |
17 | # Railway Oriented Programming (ROP)
18 |
19 | - We don't really want to raise exceptions since it makes the code bloated with error
20 | checking
21 | - It's easy to forget to handle exceptions, or handle the wrong type of exception
22 | - Dependencies might even change the kind of exceptions they throw
23 | - Let's model errors using types instead
24 |
25 | ```{code-cell} python
26 | class Result:
27 | pass
28 |
29 | class Ok(Result):
30 | def __init__(self, value):
31 | self._value = value
32 |
33 | def __str__(self):
34 | return "Ok %s" % str(self._value)
35 |
36 | class Error(Result):
37 | def __init__(self, exn):
38 | self._exn = exn
39 |
40 | def __str__(self):
41 | return "Error %s" % str(self._exn)
42 | ```
43 |
44 | The Expression library contains a similar but more feature complete Result class we can
45 | use:
46 |
47 | ```{code-cell} python
48 | from expression import Ok, Error
49 |
50 | def fetch(url):
51 | try:
52 | if not "http://" in url:
53 | raise Exception("Error: unable to fetch from: '%s'" % url)
54 |
55 | value = url.replace("http://", "")
56 | return Ok(value)
57 | except Exception as exn:
58 | return Error(exn)
59 | ```
60 |
61 | ```{code-cell} python
62 | result = fetch("http://42")
63 |
64 | print(result)
65 | ```
66 |
67 | ```{code-cell} python
68 | def parse(string):
69 | try:
70 | value = float(string)
71 | return Ok(value)
72 | except Exception as exn:
73 | return Error(exn)
74 | ```
75 |
76 | ```{code-cell} python
77 | result = parse("42")
78 |
79 | print(result)
80 | ```
81 |
82 | ## Composition
83 |
84 | How should we compose Result returning functions? How can we make a `fetch_parse` from
85 | `fetch` and `parse`.
86 |
87 | We cannot use functional composition here since signatures don't match.
88 |
89 | ```python
90 | def compose(fn: Callable[[A], Result[B, TError]], gn: Callable[[B], Result[C, TError]]) -> Callable[[A], Result[C, TError]]:
91 | lambda x: ...
92 | ```
93 |
94 | First we can try to solve this with an "imperative" implementation using type-checks and
95 | `if-else` statements:
96 |
97 | ```{code-cell} python
98 | def fetch_parse(url):
99 | b = fetch(url)
100 | if isinstance(b, Ok):
101 | val_b = b._value # <--- Don't look inside the box!!!
102 | return parse(val_b)
103 | else: # Must be error
104 | return b
105 |
106 | result = fetch_parse("http://42")
107 | print(result)
108 | ```
109 |
110 | This works, but the code is not easy to read. We have also hard-coded the logic to it's
111 | not possible to easily reuse without copy/paste. Here is a nice example on how to solve
112 | this by mixing object-oriented code with functional thinking:
113 |
114 | ```{code-cell} python
115 | class Ok(Result):
116 | def __init__(self, value):
117 | self._value = value
118 |
119 | def bind(self, fn):
120 | return fn(self._value)
121 |
122 | def __str__(self):
123 | return "Ok %s" % str(self._value)
124 |
125 | class Error(Result):
126 | def __init__(self, exn):
127 | self._exn = exn
128 |
129 | def bind(self, fn):
130 | return self
131 |
132 | def __str__(self):
133 | return "Error %s" % str(self._exn)
134 |
135 | def bind(fn, result):
136 | """We don't want method chaining in Python."""
137 | return result.bind(fn)
138 | ```
139 |
140 | ```{code-cell} python
141 | result = bind(parse, fetch("http://42"))
142 | print(result)
143 | ```
144 |
145 | ```{code-cell} python
146 | def compose(f, g):
147 | return lambda x: f(x).bind(g)
148 |
149 | fetch_parse = compose(fetch, parse)
150 | result = fetch_parse("http://123.0")
151 | print(result)
152 | ```
153 |
154 | ```{code-cell} python
155 | result = fetch("http://invalid").bind(parse)
156 | print(result)
157 | ```
158 |
159 | ### But what if we wanted to call fetch 10 times in a row?
160 |
161 | This is what's called the "Pyramide of Doom":
162 |
163 | ```{code-cell} python
164 | from expression.core import result
165 |
166 | result = bind(parse,
167 | bind(lambda x: fetch("http://%s" % x),
168 | bind(lambda x: fetch("http://%s" % x),
169 | bind(lambda x: fetch("http://%s" % x),
170 | bind(lambda x: fetch("http://%s" % x),
171 | bind(lambda x: fetch("http://%s" % x),
172 | bind(lambda x: fetch("http://%s" % x),
173 | fetch("http://123")
174 | )
175 | )
176 | )
177 | )
178 | )
179 | )
180 | )
181 | print(result)
182 | ```
183 |
184 | ## Can we make a more generic compose?
185 |
186 | Let's try to make a general compose function that composes two result returning functions:
187 |
188 | ```{code-cell} python
189 | def compose(f, g):
190 | return lambda x: f(x).bind(g)
191 |
192 | fetch_parse = compose(fetch, parse)
193 | result = fetch_parse("http://42")
194 | print(result)
195 | ```
196 |
197 | ## Pipelining
198 |
199 | Functional compose of functions that returns wrapped values is called pipeling in the
200 | Expression library. Other languages calls this "Kleisli composition". Using a reducer we
201 | can compose any number of functions:
202 |
203 | ```{code-cell} python
204 | from functools import reduce
205 |
206 | def pipeline(*fns):
207 | return reduce(lambda res, fn: lambda x: res(x).bind(fn), fns)
208 | ```
209 |
210 | Now, make `fetch_and_parse` using kleisli:
211 |
212 | ```{code-cell} python
213 | fetch_and_parse = pipeline(fetch, parse)
214 | result = fetch_and_parse("http://123")
215 | print(result)
216 | ```
217 |
218 | ### What if we wanted to call fetch 10 times in a row?
219 |
220 | ```{code-cell} python
221 | from expression.extra.result import pipeline
222 |
223 | fetch_with_value = lambda x: fetch("http://%s" % x)
224 |
225 | request = pipeline(
226 | fetch,
227 | fetch_with_value,
228 | fetch_with_value,
229 | fetch_with_value,
230 | fetch_with_value,
231 | fetch_with_value,
232 | fetch_with_value,
233 | parse
234 | )
235 |
236 | result = request("http://123")
237 | print(result)
238 | ```
239 |
240 | ## Result in Expression
241 |
242 | The `Result[T, TError]` type in Expression lets you write error-tolerant code that can
243 | be composed. A Result works similar to `Option`, but lets you define the value used for
244 | errors, e.g., an exception type or similar. This is great when you want to know why some
245 | operation failed (not just `Nothing`). This type serves the same purpose of an `Either`
246 | type where `Left` is used for the error condition and `Right` for a success value.
247 |
248 | ```python
249 | from expression import effect, Ok, Result
250 |
251 | @effect.result[int, Exception]()
252 | def fn():
253 | x = yield from Ok(42)
254 | y = yield from Ok(10)
255 | return x + y
256 |
257 | xs = fn()
258 | assert isinstance(xs, Result)
259 | ```
260 |
261 | A simplified type called [`Try`](reference_try) is also available. It's a result type
262 | that is pinned to `Exception` i.e., `Result[TSource, Exception]`. This makes the code
263 | simpler since you don't have specify the error type every time you declare the type of
264 | your result.
265 |
--------------------------------------------------------------------------------
/expression/__init__.py:
--------------------------------------------------------------------------------
1 | """Expression library.
2 |
3 | Expression aims to be a solid, type-safe, pragmatic, and high
4 | performance library for practical functional programming in Python 3.9+.
5 | By pragmatic we mean that the goal of the library is to use simple
6 | abstractions to enable you to do practical and productive functional
7 | programming in Python.
8 |
9 | GitHub: https://github.com/cognitedata/Expression
10 | """
11 |
12 | from . import collections, core, effect
13 | from ._version import __version__
14 | from .core import (
15 | AsyncReplyChannel,
16 | Builder,
17 | EffectError,
18 | Error,
19 | Failure,
20 | MailboxProcessor,
21 | Nothing,
22 | Ok,
23 | Option,
24 | Result,
25 | Some,
26 | Success,
27 | TailCall,
28 | TailCallResult,
29 | Try,
30 | case,
31 | compose,
32 | curry,
33 | curry_flip,
34 | default_arg,
35 | downcast,
36 | failwith,
37 | flip,
38 | fst,
39 | identity,
40 | is_error,
41 | is_none,
42 | is_ok,
43 | is_some,
44 | option,
45 | pipe,
46 | pipe2,
47 | pipe3,
48 | result,
49 | snd,
50 | tag,
51 | tagged_union,
52 | tailrec,
53 | tailrec_async,
54 | try_downcast,
55 | upcast,
56 | )
57 |
58 |
59 | curry_flipped = curry_flip
60 | """Deprecated.
61 |
62 | Will be removed in next major version. Use curry_flip instead.
63 | """
64 |
65 | __all__ = [
66 | "AsyncReplyChannel",
67 | "Builder",
68 | "EffectError",
69 | "Error",
70 | "Failure",
71 | "MailboxProcessor",
72 | "Nothing",
73 | "Ok",
74 | "Option",
75 | "Result",
76 | "Some",
77 | "Success",
78 | "TailCall",
79 | "TailCallResult",
80 | "Try",
81 | "__version__",
82 | "case",
83 | "collections",
84 | "compose",
85 | "core",
86 | "curry",
87 | "curry_flip",
88 | "curry_flipped",
89 | "default_arg",
90 | "downcast",
91 | "effect",
92 | "failwith",
93 | "flip",
94 | "fst",
95 | "identity",
96 | "is_error",
97 | "is_none",
98 | "is_ok",
99 | "is_some",
100 | "option",
101 | "pipe",
102 | "pipe2",
103 | "pipe3",
104 | "result",
105 | "snd",
106 | "tag",
107 | "tagged_union",
108 | "tailrec",
109 | "tailrec_async",
110 | "try_downcast",
111 | "upcast",
112 | ]
113 |
--------------------------------------------------------------------------------
/expression/_version.py:
--------------------------------------------------------------------------------
1 | __version__ = "0.0.0"
2 |
--------------------------------------------------------------------------------
/expression/collections/__init__.py:
--------------------------------------------------------------------------------
1 | """Collection abstractions."""
2 |
3 | from . import array, asyncseq, block, map, seq
4 | from .array import TypedArray
5 | from .block import Block
6 | from .map import Map
7 | from .seq import Seq
8 |
9 |
10 | __all__ = [
11 | "Block",
12 | "Map",
13 | "Seq",
14 | "TypedArray",
15 | "array",
16 | "asyncseq",
17 | "block",
18 | "map",
19 | "seq",
20 | ]
21 |
--------------------------------------------------------------------------------
/expression/collections/asyncseq.py:
--------------------------------------------------------------------------------
1 | import builtins
2 | import itertools
3 | from collections.abc import AsyncIterable, AsyncIterator, Callable
4 | from typing import Any, TypeVar, overload
5 |
6 | from expression.core import pipe
7 |
8 |
9 | TSource = TypeVar("TSource")
10 | TResult = TypeVar("TResult")
11 |
12 |
13 | class AsyncSeq(AsyncIterable[TSource]):
14 | def __init__(self, ai: AsyncIterable[TSource]):
15 | self._ai = ai
16 |
17 | async def map(self, mapper: Callable[[TSource], TResult]) -> AsyncIterable[TResult]:
18 | return pipe(self, map(mapper))
19 |
20 | @classmethod
21 | def empty(cls) -> AsyncIterable[Any]:
22 | return AsyncSeq(empty())
23 |
24 | @overload
25 | @classmethod
26 | def range(cls, stop: int) -> AsyncIterable[int]: ...
27 |
28 | @overload
29 | @classmethod
30 | def range(cls, start: int, stop: int) -> AsyncIterable[int]: ...
31 |
32 | @overload
33 | @classmethod
34 | def range(cls, start: int, stop: int, step: int) -> AsyncIterable[int]: ...
35 |
36 | @classmethod
37 | def range(cls, *args: Any, **kw: Any) -> AsyncIterable[int]:
38 | return AsyncSeq(range(*args, **kw))
39 |
40 | def __aiter__(self) -> AsyncIterator[TSource]:
41 | return self._ai.__aiter__()
42 |
43 |
44 | def append(
45 | other: AsyncIterable[TSource],
46 | ) -> Callable[[AsyncIterable[TSource]], AsyncIterable[TSource]]:
47 | async def _append(source: AsyncIterable[TSource]) -> AsyncIterable[TSource]:
48 | async for value in source:
49 | yield value
50 | async for value in other:
51 | yield value
52 |
53 | return _append
54 |
55 |
56 | async def empty() -> AsyncIterable[Any]:
57 | while False:
58 | yield
59 |
60 |
61 | async def repeat(value: TSource, times: int | None = None) -> AsyncIterable[TSource]:
62 | for value in itertools.repeat(value, times or 0):
63 | yield value
64 |
65 |
66 | @overload
67 | def range(stop: int) -> AsyncIterable[int]: ...
68 |
69 |
70 | @overload
71 | def range(start: int, stop: int) -> AsyncIterable[int]: ...
72 |
73 |
74 | @overload
75 | def range(start: int, stop: int, step: int) -> AsyncIterable[int]: ...
76 |
77 |
78 | async def range(*args: int, **kw: int) -> AsyncIterable[int]:
79 | for value in builtins.range(*args, **kw):
80 | yield value
81 |
82 |
83 | def filter(predicate: Callable[[TSource], bool]) -> Callable[[AsyncIterable[TSource]], AsyncIterable[TSource]]:
84 | async def _filter(source: AsyncIterable[TSource]) -> AsyncIterable[TSource]:
85 | async for value in source:
86 | if predicate(value):
87 | yield value
88 |
89 | return _filter
90 |
91 |
92 | def map(mapper: Callable[[TSource], TResult]) -> Callable[[AsyncIterable[TSource]], AsyncIterable[TResult]]:
93 | async def _map(source: AsyncIterable[TSource]) -> AsyncIterable[TResult]:
94 | async for value in source:
95 | yield mapper(value)
96 |
97 | return _map
98 |
99 |
100 | __all__ = [
101 | "AsyncSeq",
102 | "filter",
103 | "map",
104 | "range",
105 | "repeat",
106 | ]
107 |
--------------------------------------------------------------------------------
/expression/core/__init__.py:
--------------------------------------------------------------------------------
1 | """Core abstractions such as pipes, options and results."""
2 |
3 | from . import aiotools, option, result
4 | from .builder import Builder
5 | from .compose import compose
6 | from .curry import curry, curry_flip
7 | from .error import EffectError, failwith
8 | from .fn import TailCall, TailCallResult, tailrec, tailrec_async
9 | from .mailbox import AsyncReplyChannel, MailboxProcessor
10 | from .misc import flip, fst, identity, snd
11 | from .option import Nothing, Option, Some, default_arg, is_none, is_some
12 | from .pipe import PipeMixin, pipe, pipe2, pipe3
13 | from .result import Error, Ok, Result, is_error, is_ok
14 | from .tagged_union import case, tag, tagged_union
15 | from .try_ import Failure, Success, Try
16 | from .typing import (
17 | SupportsGreaterThan,
18 | SupportsLessThan,
19 | SupportsMatch,
20 | SupportsSum,
21 | downcast,
22 | try_downcast,
23 | upcast,
24 | )
25 |
26 |
27 | __all__ = [
28 | "AsyncReplyChannel",
29 | "Builder",
30 | "EffectError",
31 | "Error",
32 | "Failure",
33 | "MailboxProcessor",
34 | "Nothing",
35 | "Ok",
36 | "Option",
37 | "PipeMixin",
38 | "Result",
39 | "Some",
40 | "Success",
41 | "SupportsGreaterThan",
42 | "SupportsLessThan",
43 | "SupportsMatch",
44 | "SupportsSum",
45 | "TailCall",
46 | "TailCallResult",
47 | "Try",
48 | "aiotools",
49 | "case",
50 | "compose",
51 | "curry",
52 | "curry_flip",
53 | "default_arg",
54 | "downcast",
55 | "failwith",
56 | "flip",
57 | "fst",
58 | "identity",
59 | "is_error",
60 | "is_none",
61 | "is_ok",
62 | "is_some",
63 | "option",
64 | "pipe",
65 | "pipe2",
66 | "pipe3",
67 | "result",
68 | "snd",
69 | "tag",
70 | "tag",
71 | "tagged_union",
72 | "tailrec",
73 | "tailrec_async",
74 | "try_downcast",
75 | "upcast",
76 | ]
77 |
--------------------------------------------------------------------------------
/expression/core/aiotools.py:
--------------------------------------------------------------------------------
1 | """The aiotools (async) module.
2 |
3 | The aio (asyncio) module contains asynchronous utility functions for
4 | working with async / await.
5 |
6 | The module is inspired by the F# `Async` module, but builds on top of
7 | Python async / await instead of providing an asynchronous IO mechanism
8 | by itself.
9 | """
10 |
11 | import asyncio
12 | from asyncio import Future, Task
13 | from collections.abc import Awaitable, Callable
14 | from typing import Any, TypeVar
15 |
16 | from expression.system import CancellationToken, OperationCanceledError
17 |
18 |
19 | _TSource = TypeVar("_TSource")
20 |
21 | Continuation = Callable[[_TSource], None]
22 | Callbacks = Callable[
23 | [
24 | Continuation[_TSource],
25 | Continuation[Exception],
26 | Continuation[OperationCanceledError],
27 | ],
28 | None,
29 | ]
30 |
31 |
32 | def from_continuations(callback: Callbacks[_TSource]) -> Awaitable[_TSource]:
33 | """Create async computation from continuations.
34 |
35 | Creates an asynchronous computation that captures the current
36 | success, exception and cancellation continuations. The callback must
37 | eventually call exactly one of the given continuations.
38 |
39 | Args:
40 | callback: The function that accepts the current success,
41 | exception, and cancellation continuations.
42 |
43 | Returns:
44 | An asynchronous computation that provides the callback with the
45 | current continuations.
46 | """
47 | future: Future[_TSource] = asyncio.Future()
48 |
49 | def done(value: _TSource) -> None:
50 | if not future.done():
51 | future.set_result(value)
52 |
53 | def error(err: Exception) -> None:
54 | if not future.done():
55 | future.set_exception(err)
56 |
57 | def cancel(_: OperationCanceledError) -> None:
58 | if not future.done():
59 | future.cancel()
60 |
61 | callback(done, error, cancel)
62 | return future
63 |
64 |
65 | # Tasks that are scheduled on the main event loop. The main event loop keeps a
66 | # a weak reference to the tasks, so we need to keep a strong reference to them until
67 | # they are completed.
68 | __running_tasks: set[Task[Any]] = set()
69 |
70 |
71 | def start(computation: Awaitable[Any], token: CancellationToken | None = None) -> None:
72 | """Start computation.
73 |
74 | Starts the asynchronous computation in the event loop. Do not await
75 | its result.
76 |
77 | If no cancellation token is provided then the default cancellation
78 | token is used.
79 | """
80 |
81 | async def runner() -> Any:
82 | result = await computation
83 | __running_tasks.remove(task)
84 | return result
85 |
86 | task = asyncio.create_task(runner())
87 | __running_tasks.add(task)
88 |
89 | def cb():
90 | task.cancel()
91 |
92 | if token:
93 | token.register(cb)
94 | return None
95 |
96 |
97 | def start_immediate(computation: Awaitable[Any], token: CancellationToken | None = None) -> None:
98 | """Start computation immediately.
99 |
100 | Runs an asynchronous computation, starting immediately on the
101 | current operating system thread.
102 | """
103 |
104 | async def runner() -> Any:
105 | result = await computation
106 | __running_tasks.remove(task)
107 | return result
108 |
109 | task = asyncio.create_task(runner())
110 | __running_tasks.add(task)
111 |
112 | def cb() -> None:
113 | task.cancel()
114 |
115 | if token:
116 | token.register(cb)
117 | return None
118 |
119 |
120 | def run_synchronously(computation: Awaitable[_TSource]) -> _TSource:
121 | """Runs the asynchronous computation and await its result."""
122 |
123 | async def runner() -> _TSource:
124 | return await computation
125 |
126 | return asyncio.run(runner())
127 |
128 |
129 | async def singleton(value: _TSource) -> _TSource:
130 | """Async function that returns a single value."""
131 | return value
132 |
133 |
134 | async def sleep(msecs: int) -> None:
135 | """Sleep.
136 |
137 | Creates an asynchronous computation that will sleep for the given
138 | time. This is scheduled using a System.Threading.Timer object. The
139 | operation will not block operating system threads for the duration
140 | of the wait.
141 | """
142 | await asyncio.sleep(msecs / 1000.0)
143 |
144 |
145 | async def empty() -> None:
146 | """Async no-op."""
147 |
148 |
149 | def from_result(result: _TSource) -> Awaitable[_TSource]:
150 | """Awaitable from result.
151 |
152 | Creates a async operation that's completed successfully with the
153 | specified result.
154 | """
155 |
156 | async def from_result(result: _TSource) -> _TSource:
157 | """Async return value."""
158 | return result
159 |
160 | return from_result(result)
161 |
162 |
163 | __all__ = [
164 | "Continuation",
165 | "empty",
166 | "from_continuations",
167 | "run_synchronously",
168 | "singleton",
169 | "sleep",
170 | "start",
171 | "start_immediate",
172 | ]
173 |
--------------------------------------------------------------------------------
/expression/core/async_builder.py:
--------------------------------------------------------------------------------
1 | """Async builder module.
2 |
3 | This module provides the base class for async builders, which are used to
4 | create computational expressions for async operations.
5 | """
6 |
7 | from abc import ABC
8 | from collections.abc import AsyncGenerator, Awaitable, Callable
9 | from functools import wraps
10 | from typing import Any, Generic, TypeVar, cast
11 |
12 | from typing_extensions import ParamSpec
13 |
14 | from .error import EffectError
15 |
16 |
17 | _T = TypeVar("_T") # The container item type
18 | _M = TypeVar("_M") # for container type
19 | _P = ParamSpec("_P")
20 |
21 |
22 | class AsyncBuilderState(Generic[_T]):
23 | """Encapsulates the state of an async builder computation."""
24 |
25 | def __init__(self):
26 | self.is_done = False
27 |
28 |
29 | class AsyncBuilder(Generic[_T, _M], ABC): # Corrected Generic definition
30 | """Async effect builder."""
31 |
32 | # Required methods
33 | async def bind(
34 | self, xs: _M, fn: Callable[[_T], Awaitable[_M]]
35 | ) -> _M: # Use concrete types for Callable input and output
36 | raise NotImplementedError("AsyncBuilder does not implement a `bind` method")
37 |
38 | async def return_(self, x: _T) -> _M:
39 | raise NotImplementedError("AsyncBuilder does not implement a `return` method")
40 |
41 | async def return_from(self, xs: _M) -> _M:
42 | raise NotImplementedError("AsyncBuilder does not implement a `return` from method")
43 |
44 | async def combine(self, xs: _M, ys: _M) -> _M:
45 | """Used for combining multiple statements in the effect."""
46 | raise NotImplementedError("AsyncBuilder does not implement a `combine` method")
47 |
48 | async def zero(self) -> _M:
49 | """Zero effect.
50 |
51 | Called if the effect raises StopAsyncIteration without a value, i.e
52 | returns None.
53 | """
54 | raise NotImplementedError("AsyncBuilder does not implement a `zero` method")
55 |
56 | # Optional methods for control flow
57 | async def delay(self, fn: Callable[[], _M]) -> _M:
58 | """Delay the computation.
59 |
60 | Default implementation is to return the result of the function.
61 | """
62 | return fn()
63 |
64 | async def run(self, computation: _M) -> _M:
65 | """Run a computation.
66 |
67 | Default implementation is to return the computation as is.
68 | """
69 | return computation
70 |
71 | # Internal implementation
72 | async def _send(
73 | self,
74 | gen: AsyncGenerator[_T, Any],
75 | state: AsyncBuilderState[_T], # Use AsyncBuilderState
76 | value: _T,
77 | ) -> _M:
78 | try:
79 | yielded = await gen.asend(value)
80 | return await self.return_(yielded)
81 | except EffectError as error:
82 | # Effect errors (Nothing, Error, etc) short circuits
83 | state.is_done = True
84 | return await self.return_from(cast("_M", error.args[0]))
85 | except StopAsyncIteration:
86 | state.is_done = True
87 | raise
88 | except Exception:
89 | state.is_done = True
90 | raise
91 |
92 | def __call__(
93 | self,
94 | fn: Callable[
95 | _P,
96 | AsyncGenerator[_T, Any],
97 | ],
98 | ) -> Callable[_P, Awaitable[_M]]:
99 | """The builder decorator."""
100 |
101 | @wraps(fn)
102 | async def wrapper(*args: _P.args, **kw: _P.kwargs) -> _M:
103 | gen = fn(*args, **kw)
104 | state = AsyncBuilderState[_T]() # Initialize AsyncBuilderState
105 | result: _M = await self.zero() # Initialize result
106 | value: _M
107 |
108 | async def binder(value: Any) -> _M:
109 | ret = await self._send(gen, state, value) # Pass state to _send
110 | return await self.delay(lambda: ret) # Delay every bind call
111 |
112 | try:
113 | # Initialize co-routine with None to start the generator and get the
114 | # first value
115 | result = value = await binder(None)
116 |
117 | while not state.is_done: # Loop until coroutine is exhausted
118 | value = await self.bind(value, binder) # Send value to coroutine
119 | result = await self.combine(result, value) # Combine previous result with new value
120 |
121 | except StopAsyncIteration:
122 | # This will happens if the generator exits by returning None
123 | pass
124 |
125 | return await self.run(result) # Run the result
126 |
127 | return wrapper
128 |
129 |
130 | __all__ = ["AsyncBuilder", "AsyncBuilderState"]
131 |
--------------------------------------------------------------------------------
/expression/core/builder.py:
--------------------------------------------------------------------------------
1 | from abc import ABC
2 | from collections.abc import Callable, Generator
3 | from functools import wraps
4 | from typing import Any, Generic, TypeVar, cast
5 |
6 | from typing_extensions import ParamSpec
7 |
8 | from .error import EffectError
9 |
10 |
11 | _T = TypeVar("_T") # for value type
12 | _M = TypeVar("_M") # for monadic type
13 | _P = ParamSpec("_P")
14 |
15 |
16 | class BuilderState(Generic[_T]):
17 | """Encapsulates the state of a builder computation."""
18 |
19 | def __init__(self):
20 | self.is_done = False
21 |
22 |
23 | class Builder(Generic[_T, _M], ABC): # Corrected Generic definition
24 | """Effect builder."""
25 |
26 | # Required methods
27 | def bind(self, xs: _M, fn: Callable[[_T], _M]) -> _M: # Use concrete types for Callable input and output
28 | raise NotImplementedError("Builder does not implement a `bind` method")
29 |
30 | def return_(self, x: _T) -> _M:
31 | raise NotImplementedError("Builder does not implement a `return` method")
32 |
33 | def return_from(self, xs: _M) -> _M:
34 | raise NotImplementedError("Builder does not implement a `return` from method")
35 |
36 | def combine(self, xs: _M, ys: _M) -> _M:
37 | """Used for combining multiple statements in the effect."""
38 | raise NotImplementedError("Builder does not implement a `combine` method")
39 |
40 | def zero(self) -> _M:
41 | """Zero effect.
42 |
43 | Called if the effect raises StopIteration without a value, i.e
44 | returns None.
45 | """
46 | raise NotImplementedError("Builder does not implement a `zero` method")
47 |
48 | # Optional methods for control flow
49 | def delay(self, fn: Callable[[], _M]) -> _M:
50 | """Delay the computation.
51 |
52 | Default implementation is to return the result of the function.
53 | """
54 | return fn()
55 |
56 | def run(self, computation: _M) -> _M:
57 | """Run a computation.
58 |
59 | Default implementation is to return the computation as is.
60 | """
61 | return computation
62 |
63 | # Internal implementation
64 | def _send(
65 | self,
66 | gen: Generator[Any, Any, Any],
67 | state: BuilderState[_T], # Use BuilderState
68 | value: _T,
69 | ) -> _M:
70 | try:
71 | yielded = gen.send(value)
72 | return self.return_(yielded)
73 | except EffectError as error:
74 | # Effect errors (Nothing, Error, etc) short circuits
75 | state.is_done = True
76 | return self.return_from(cast("_M", error.args[0]))
77 | except StopIteration as ex:
78 | state.is_done = True
79 |
80 | # Return of a value in the generator produces StopIteration with a value
81 | if ex.value is not None:
82 | return self.return_(ex.value)
83 |
84 | raise # Raise StopIteration with no value
85 |
86 | except RuntimeError:
87 | state.is_done = True
88 | return self.zero() # Return zero() to handle generator runtime errors instead of raising StopIteration
89 |
90 | def __call__(
91 | self,
92 | fn: Callable[
93 | _P,
94 | Generator[_T | None, _T, _T | None] | Generator[_T | None, None, _T | None],
95 | ],
96 | ) -> Callable[_P, _M]:
97 | """The builder decorator."""
98 |
99 | @wraps(fn)
100 | def wrapper(*args: _P.args, **kw: _P.kwargs) -> _M:
101 | gen = fn(*args, **kw)
102 | state = BuilderState[_T]() # Initialize BuilderState
103 | result: _M = self.zero() # Initialize result
104 | value: _M
105 |
106 | def binder(value: Any) -> _M:
107 | ret = self._send(gen, state, value) # Pass state to _send
108 | return self.delay(lambda: ret) # Delay every bind call
109 |
110 | try:
111 | # Initialize co-routine with None to start the generator and get the
112 | # first value
113 | result = value = binder(None)
114 |
115 | while not state.is_done: # Loop until coroutine is exhausted
116 | value: _M = self.bind(value, binder) # Send value to coroutine
117 | result = self.combine(result, value) # Combine previous result with new value
118 |
119 | except StopIteration:
120 | # This will happens if the generator exits by returning None
121 | pass
122 |
123 | return self.run(result) # Run the result
124 |
125 | return wrapper
126 |
--------------------------------------------------------------------------------
/expression/core/compose.py:
--------------------------------------------------------------------------------
1 | from collections.abc import Callable
2 | from functools import reduce
3 | from typing import Any, TypeVar, overload
4 |
5 | from typing_extensions import TypeVarTuple, Unpack
6 |
7 |
8 | _A = TypeVar("_A")
9 | _B = TypeVar("_B")
10 | _C = TypeVar("_C")
11 | _D = TypeVar("_D")
12 | _E = TypeVar("_E")
13 | _F = TypeVar("_F")
14 | _G = TypeVar("_G")
15 | _H = TypeVar("_H")
16 | _T = TypeVar("_T")
17 | _J = TypeVar("_J")
18 |
19 | _P = TypeVarTuple("_P")
20 | _X = TypeVarTuple("_X")
21 | _Y = TypeVarTuple("_Y")
22 | _Z = TypeVarTuple("_Z")
23 |
24 |
25 | @overload
26 | def compose() -> Callable[[_A], _A]: ...
27 |
28 |
29 | @overload
30 | def compose(__fn1: Callable[[_A], _B]) -> Callable[[_A], _B]: ...
31 |
32 |
33 | @overload
34 | def compose(__fn1: Callable[[_A], _B], __fn2: Callable[[_B], _C]) -> Callable[[_A], _C]: ...
35 |
36 |
37 | @overload
38 | def compose(__fn1: Callable[[_A], _B], __fn2: Callable[[_B], _C], __fn3: Callable[[_C], _D]) -> Callable[[_A], _D]: ...
39 |
40 |
41 | @overload
42 | def compose(
43 | fn1: Callable[[_A], _B],
44 | fn2: Callable[[_B], _C],
45 | fn3: Callable[[_C], _D],
46 | fn4: Callable[[_D], _E],
47 | /,
48 | ) -> Callable[[_A], _E]: ...
49 |
50 |
51 | @overload
52 | def compose(
53 | fn1: Callable[[_A], _B],
54 | fn2: Callable[[_B], _C],
55 | fn3: Callable[[_C], _D],
56 | fn4: Callable[[_D], _E],
57 | fn5: Callable[[_E], _F],
58 | /,
59 | ) -> Callable[[_A], _F]: ...
60 |
61 |
62 | @overload
63 | def compose(
64 | fn1: Callable[[_A], _B],
65 | fn2: Callable[[_B], _C],
66 | fn3: Callable[[_C], _D],
67 | fn4: Callable[[_D], _E],
68 | fn5: Callable[[_E], _F],
69 | fn6: Callable[[_F], _G],
70 | /,
71 | ) -> Callable[[_A], _G]: ...
72 |
73 |
74 | @overload
75 | def compose(
76 | fn1: Callable[[_A], _B],
77 | fn2: Callable[[_B], _C],
78 | fn3: Callable[[_C], _D],
79 | fn4: Callable[[_D], _E],
80 | fn5: Callable[[_E], _F],
81 | fn6: Callable[[_F], _G],
82 | fn7: Callable[[_G], _H],
83 | /,
84 | ) -> Callable[[_A], _H]: ...
85 |
86 |
87 | @overload
88 | def compose(
89 | fn1: Callable[[_A], _B],
90 | fn2: Callable[[_B], _C],
91 | fn3: Callable[[_C], _D],
92 | fn4: Callable[[_D], _E],
93 | fn5: Callable[[_E], _F],
94 | fn6: Callable[[_F], _G],
95 | fn7: Callable[[_G], _H],
96 | fn8: Callable[[_H], _T],
97 | /,
98 | ) -> Callable[[_A], _T]: ...
99 |
100 |
101 | @overload
102 | def compose(
103 | fn1: Callable[[_A], _B],
104 | fn2: Callable[[_B], _C],
105 | fn3: Callable[[_C], _D],
106 | fn4: Callable[[_D], _E],
107 | fn5: Callable[[_E], _F],
108 | fn6: Callable[[_F], _G],
109 | fn7: Callable[[_G], _H],
110 | fn8: Callable[[_H], _T],
111 | fn9: Callable[[_T], _J],
112 | /,
113 | ) -> Callable[[_A], _J]: ...
114 |
115 |
116 | def compose(*fns: Callable[[Any], Any]) -> Callable[[Any], Any]:
117 | """Compose multiple functions left to right.
118 |
119 | Composes zero or more functions into a functional composition. The
120 | functions are composed left to right. A composition of zero
121 | functions gives back the identity function.
122 |
123 | >>> x = 42
124 | >>> f = lambda a: a * 10
125 | >>> g = lambda b: b + 3
126 | >>> h = lambda c: c / 2
127 | >>> compose()(x) == x
128 | >>> compose(f)(x) == f(x)
129 | >>> compose(f, g)(x) == g(f(x))
130 | >>> compose(f, g, h)(x) == h(g(f(x)))
131 | ...
132 |
133 | Returns:
134 | The composed function.
135 | """
136 |
137 | def _compose(source: Any) -> Any:
138 | """Return a pipeline of composed functions."""
139 | return reduce(lambda acc, f: f(acc), fns, source)
140 |
141 | return _compose
142 |
143 |
144 | @overload
145 | def starcompose() -> Callable[[Any], Any]: ...
146 |
147 |
148 | @overload
149 | def starcompose(fn1: Callable[[Unpack[_P]], _A], /) -> Callable[[Unpack[_P]], _A]: ...
150 |
151 |
152 | @overload
153 | def starcompose(
154 | fn1: Callable[[Unpack[_P]], tuple[Unpack[_Y]]], fn2: Callable[[Unpack[_Y]], _B], /
155 | ) -> Callable[[Unpack[_P]], _B]: ...
156 |
157 |
158 | @overload
159 | def starcompose(
160 | fn1: Callable[[Unpack[_P]], tuple[Unpack[_Y]]],
161 | fn2: Callable[[Unpack[_Y]], tuple[Unpack[_Z]]],
162 | fn3: Callable[[Unpack[_Z]], _C],
163 | /,
164 | ) -> Callable[[Unpack[_P]], _C]: ...
165 |
166 |
167 | @overload
168 | def starcompose(
169 | fn1: Callable[[Unpack[_P]], tuple[Unpack[_Y]]],
170 | fn2: Callable[[Unpack[_Y]], tuple[Unpack[_Z]]],
171 | fn3: Callable[[Unpack[_Z]], tuple[Unpack[_X]]],
172 | fn4: Callable[[Unpack[_X]], _D],
173 | /,
174 | ) -> Callable[[Unpack[_P]], _D]: ...
175 |
176 |
177 | def starcompose(*fns: Callable[..., Any]) -> Callable[..., Any]:
178 | """Compose multiple functions left to right.
179 |
180 | Composes zero or more functions into a functional composition. The
181 | functions are composed left to right. A composition of zero
182 | functions gives back the identity function.
183 |
184 | The first function must accept a variable number of positional
185 | arguments and if it returns a tuple, the subsequent functions must
186 | accept the same number of arguments as the length of the tuple of
187 | the previous function.
188 | """
189 |
190 | def _compose(source: Any) -> Any:
191 | """Return a pipeline of composed functions."""
192 | return reduce(lambda fields, f: f(*fields), fns, source)
193 |
194 | return _compose
195 |
196 |
197 | __all__ = ["compose"]
198 |
--------------------------------------------------------------------------------
/expression/core/curry.py:
--------------------------------------------------------------------------------
1 | import functools
2 | from collections.abc import Callable
3 | from typing import Any, Concatenate, Literal, TypeVar, overload
4 |
5 | from typing_extensions import ParamSpec
6 |
7 |
8 | _P = ParamSpec("_P")
9 | _A = TypeVar("_A")
10 | _B = TypeVar("_B")
11 | _C = TypeVar("_C")
12 | _D = TypeVar("_D")
13 | _E = TypeVar("_E")
14 |
15 | _Arity = Literal[0, 1, 2, 3, 4]
16 |
17 |
18 | def _curry(args: tuple[Any, ...], arity: int, fun: Callable[..., Any]) -> Callable[..., Any]:
19 | def wrapper(*args_: Any, **kw: Any) -> Any:
20 | if arity == 1:
21 | return fun(*args, *args_, **kw)
22 | return _curry(args + args_, arity - 1, fun)
23 |
24 | return wrapper
25 |
26 |
27 | @overload
28 | def curry(num_args: Literal[0]) -> Callable[[Callable[_P, _B]], Callable[_P, _B]]: ...
29 |
30 |
31 | @overload
32 | def curry(
33 | num_args: Literal[1],
34 | ) -> Callable[[Callable[Concatenate[_A, _P], _B]], Callable[[_A], Callable[_P, _B]]]: ...
35 |
36 |
37 | @overload
38 | def curry(
39 | num_args: Literal[2],
40 | ) -> Callable[
41 | [Callable[Concatenate[_A, _B, _P], _C]],
42 | Callable[
43 | [_A],
44 | Callable[
45 | [_B],
46 | Callable[_P, _C],
47 | ],
48 | ],
49 | ]: ...
50 |
51 |
52 | @overload
53 | def curry(
54 | num_args: Literal[3],
55 | ) -> Callable[
56 | [Callable[Concatenate[_A, _B, _C, _P], _D]],
57 | Callable[
58 | [_A],
59 | Callable[
60 | [_B],
61 | Callable[
62 | [_C],
63 | Callable[_P, _D],
64 | ],
65 | ],
66 | ],
67 | ]: ...
68 |
69 |
70 | @overload
71 | def curry(
72 | num_args: Literal[4],
73 | ) -> Callable[
74 | [Callable[Concatenate[_A, _B, _C, _D, _P], _E]],
75 | Callable[
76 | [_A],
77 | Callable[
78 | [_B],
79 | Callable[
80 | [_C],
81 | Callable[[_D], Callable[_P, _E]],
82 | ],
83 | ],
84 | ],
85 | ]: ...
86 |
87 |
88 | def curry(num_args: _Arity) -> Callable[..., Any]:
89 | """A curry decorator.
90 |
91 | Makes a function curried.
92 |
93 | Args:
94 | num_args: The number of args to curry from the start of the
95 | function
96 |
97 | Example:
98 | >>> @curry(1)
99 | ... def add(a: int, b: int) -> int:
100 | ... return a + b
101 | >>>
102 | >>> assert add(3)(4) == 7
103 | """
104 |
105 | def wrapper(fun: Callable[..., Any]) -> Callable[..., Any]:
106 | return functools.wraps(fun)(_curry((), num_args + 1, fun))
107 |
108 | return wrapper
109 |
110 |
111 | @overload
112 | def curry_flip(
113 | num_args: Literal[0],
114 | ) -> Callable[[Callable[_P, _A]], Callable[_P, _A]]: ...
115 |
116 |
117 | @overload
118 | def curry_flip(
119 | num_args: Literal[1],
120 | ) -> Callable[[Callable[Concatenate[_A, _P], _B]], Callable[_P, Callable[[_A], _B]]]: ...
121 |
122 |
123 | @overload
124 | def curry_flip(
125 | num_args: Literal[2],
126 | ) -> Callable[
127 | [Callable[Concatenate[_A, _B, _P], _C]],
128 | Callable[
129 | _P,
130 | Callable[
131 | [_A],
132 | Callable[[_B], _C],
133 | ],
134 | ],
135 | ]: ...
136 |
137 |
138 | @overload
139 | def curry_flip(
140 | num_args: Literal[3],
141 | ) -> Callable[
142 | [Callable[Concatenate[_A, _B, _C, _P], _D]],
143 | Callable[
144 | _P,
145 | Callable[
146 | [_A],
147 | Callable[
148 | [_B],
149 | Callable[[_C], _D],
150 | ],
151 | ],
152 | ],
153 | ]: ...
154 |
155 |
156 | @overload
157 | def curry_flip(
158 | num_args: Literal[4],
159 | ) -> Callable[
160 | [Callable[Concatenate[_A, _B, _C, _D, _P], _E]],
161 | Callable[
162 | _P,
163 | Callable[
164 | [_A],
165 | Callable[
166 | [_B],
167 | Callable[[_C], Callable[[_D], _E]],
168 | ],
169 | ],
170 | ],
171 | ]: ...
172 |
173 |
174 | def curry_flip(
175 | num_args: _Arity,
176 | ) -> Callable[..., Any]:
177 | """A flipped curry decorator.
178 |
179 | Makes a function curried, but flips the curried arguments to become
180 | the last arguments. This is very nice when having e.g optional
181 | arguments after a source argument that will be piped.
182 |
183 | Args:
184 | num_args: The number of args to curry from the start of the
185 | function
186 |
187 | Example:
188 | >>> @curry_flip(1)
189 | ... def map(source: List[int], mapper: Callable[[int], int]):
190 | ... return [mapper(x) for x in source]
191 | >>>
192 | >>> ys = pipe(xs, map(lambda x: x * 10))
193 | """
194 |
195 | def _wrap_fun(fun: Callable[..., Any]) -> Callable[..., Any]:
196 | @functools.wraps(fun)
197 | def _wrap_args(*args: Any, **kwargs: Any) -> Callable[..., Any]:
198 | def _wrap_curried(*curry_args: Any) -> Any:
199 | return fun(*curry_args, *args, **kwargs)
200 |
201 | return _curry((), num_args, _wrap_curried)
202 |
203 | return _wrap_args if num_args else fun
204 |
205 | return _wrap_fun
206 |
207 |
208 | __all__ = ["curry", "curry_flip"]
209 |
--------------------------------------------------------------------------------
/expression/core/error.py:
--------------------------------------------------------------------------------
1 | from typing import NoReturn
2 |
3 |
4 | class EffectError(Exception):
5 | """An error that will exit any computational expression.
6 |
7 | We use this to detect if sub-generators causes an exit, since
8 | yielding nothing will be silently ignored.
9 | """
10 |
11 |
12 | def failwith(message: str) -> NoReturn:
13 | """Raise exception with the given message string."""
14 | raise Exception(message)
15 |
16 |
17 | __all__ = ["EffectError", "failwith"]
18 |
--------------------------------------------------------------------------------
/expression/core/fn.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import functools
4 | from collections.abc import Awaitable, Callable
5 | from typing import Generic, TypeVar, cast
6 |
7 | from typing_extensions import ParamSpec
8 |
9 |
10 | _TResult = TypeVar("_TResult")
11 | _P = ParamSpec("_P")
12 |
13 |
14 | class TailCall(Generic[_P]):
15 | """Returns a tail call.
16 |
17 | If a `tailrec` decorated function return a `TailCall` then the
18 | function will be called again with the new arguments provided.
19 | """
20 |
21 | def __init__(self, *args: _P.args, **kw: _P.kwargs):
22 | self.args = args
23 | self.kw = kw
24 |
25 |
26 | TailCallResult = _TResult | TailCall[_P]
27 |
28 |
29 | def tailrec(fn: Callable[_P, TailCallResult[_TResult, _P]]) -> Callable[_P, _TResult]:
30 | """Tail call recursive function decorator.
31 |
32 | Can be used to create tail call recursive functions that will not
33 | stack overflow. To recurse the function needs to return an instance
34 | of `TailCall` with the next arguments to be used for the next call.
35 | """
36 |
37 | def trampoline(bouncer: TailCallResult[_TResult, _P]) -> _TResult:
38 | while isinstance(bouncer, TailCall):
39 | bouncer = cast(TailCall[_P], bouncer)
40 |
41 | args, kw = bouncer.args, bouncer.kw
42 | bouncer = fn(*args, **kw)
43 |
44 | return bouncer
45 |
46 | @functools.wraps(fn)
47 | def wrapper(*args: _P.args, **kw: _P.kwargs) -> _TResult:
48 | return trampoline(fn(*args, **kw))
49 |
50 | return wrapper
51 |
52 |
53 | def tailrec_async(fn: Callable[_P, Awaitable[TailCallResult[_TResult, _P]]]) -> Callable[_P, Awaitable[_TResult]]:
54 | """Tail call recursive async function decorator."""
55 |
56 | async def trampoline(bouncer: TailCallResult[_TResult, _P]) -> _TResult:
57 | while isinstance(bouncer, TailCall):
58 | bouncer = cast(TailCall[_P], bouncer)
59 | args, kw = bouncer.args, bouncer.kw
60 | bouncer = await fn(*args, **kw)
61 |
62 | return bouncer
63 |
64 | @functools.wraps(fn)
65 | async def wrapper(*args: _P.args, **kw: _P.kwargs) -> _TResult:
66 | result = await fn(*args, **kw)
67 | return await trampoline(result)
68 |
69 | return wrapper
70 |
71 |
72 | __all__ = ["TailCall", "TailCallResult", "tailrec", "tailrec_async"]
73 |
--------------------------------------------------------------------------------
/expression/core/mailbox.py:
--------------------------------------------------------------------------------
1 | # Attribution to original authors of this code
2 | # --------------------------------------------
3 | # This code has been originally ported from the Fable project (https://fable.io)
4 | # Copyright (c) Alfonso García-Caro and contributors.
5 | #
6 | # The original code was authored by
7 | # - Alfonso Garcia-Caro (https://github.com/alfonsogarciacaro)
8 | # - ncave (https://github.com/ncave)
9 | #
10 | # You can find the original implementation here:
11 | # - https://github.com/fable-compiler/Fable/blob/nagareyama/src/fable-library/MailboxProcessor.ts
12 |
13 | from __future__ import annotations
14 |
15 | import asyncio
16 | from collections.abc import Awaitable, Callable
17 | from queue import SimpleQueue
18 | from threading import RLock
19 | from typing import Any, Generic, TypeVar
20 |
21 | from expression.system import CancellationToken, OperationCanceledError
22 |
23 | from .aiotools import Continuation, from_continuations, start_immediate
24 |
25 |
26 | _Msg = TypeVar("_Msg")
27 | _Reply = TypeVar("_Reply")
28 |
29 |
30 | class AsyncReplyChannel(Generic[_Reply]):
31 | def __init__(self, fn: Callable[[_Reply], None]) -> None:
32 | self.fn = fn
33 |
34 | def reply(self, r: _Reply) -> None:
35 | self.fn(r)
36 |
37 |
38 | class MailboxProcessor(Generic[_Msg]):
39 | def __init__(self, cancellation_token: CancellationToken | None) -> None:
40 | self.messages: SimpleQueue[_Msg] = SimpleQueue()
41 | self.token = cancellation_token or CancellationToken.none()
42 | self.loop = asyncio.get_event_loop()
43 | self.lock = RLock()
44 |
45 | # Holds the continuation i.e the `done` callback of Async.from_continuations
46 | # returned by `receive`.
47 | self.continuation: Continuation[_Msg] | None = None
48 | self.cancel: Continuation[OperationCanceledError] | None = None
49 |
50 | def post(self, msg: _Msg) -> None:
51 | """Post a message synchronously to the mailbox processor.
52 |
53 | This method is not asynchronous since it's very fast to execute.
54 | It simply adds the message to the message queue of the mailbox
55 | processor and returns.
56 |
57 | Args:
58 | msg: Message to post.
59 |
60 | Returns:
61 | None
62 | """
63 | self.messages.put(msg)
64 | self.loop.call_soon_threadsafe(self.__process_events)
65 |
66 | def post_and_async_reply(self, build_message: Callable[[AsyncReplyChannel[_Reply]], _Msg]) -> Awaitable[_Reply]:
67 | """Post with async reply.
68 |
69 | Post a message asynchronously to the mailbox processor and wait
70 | for the reply.
71 |
72 | Args:
73 | build_message: A function that takes a reply channel
74 | (`AsyncReplyChannel[Reply]`) and returns a message to send
75 | to the mailbox processor. The message should contain the
76 | reply channel as e.g a tuple.
77 |
78 | Returns:
79 | The reply from mailbox processor.
80 | """
81 | result: _Reply | None = None
82 | # This is the continuation for the `done` callback of the awaiting poster.
83 | continuation: Continuation[_Reply] | None = None
84 |
85 | def check_completion() -> None:
86 | if result is not None and continuation is not None:
87 | continuation(result)
88 |
89 | def reply_callback(res: _Reply):
90 | nonlocal result
91 | result = res
92 | check_completion()
93 |
94 | reply_channel = AsyncReplyChannel(reply_callback)
95 | self.messages.put(build_message(reply_channel))
96 | self.__process_events()
97 |
98 | def callback(
99 | done: Continuation[_Reply],
100 | _: Continuation[Exception],
101 | __: Continuation[OperationCanceledError],
102 | ):
103 | nonlocal continuation
104 | continuation = done
105 | check_completion()
106 |
107 | return from_continuations(callback)
108 |
109 | async def receive(self) -> _Msg:
110 | """Receive message from mailbox.
111 |
112 | Returns:
113 | An asynchronous computation which will consume the
114 | first message in arrival order. No thread is blocked while
115 | waiting for further messages. Raises a TimeoutException if
116 | the timeout is exceeded.
117 | """
118 |
119 | def callback(
120 | done: Continuation[_Msg],
121 | error: Continuation[Exception],
122 | cancel: Continuation[OperationCanceledError],
123 | ):
124 | if self.continuation:
125 | raise Exception("Receive can only be called once!")
126 |
127 | self.continuation = done
128 | self.cancel = cancel
129 |
130 | self.__process_events()
131 |
132 | return await from_continuations(callback)
133 |
134 | def __process_events(self):
135 | # Cancellation of async workflows is more tricky in Python than
136 | # with F# so we check the cancellation token for each process.
137 | if self.token.is_cancellation_requested:
138 | self.cancel, cancel = None, self.cancel
139 | if cancel is not None:
140 | cancel(OperationCanceledError("Mailbox was cancelled"))
141 | return
142 |
143 | if self.continuation is None:
144 | return
145 |
146 | with self.lock:
147 | if self.messages.empty():
148 | return
149 | msg = self.messages.get()
150 | self.continuation, cont = None, self.continuation
151 |
152 | if cont is not None: # type: ignore
153 | cont(msg)
154 |
155 | @staticmethod
156 | def start(
157 | body: Callable[[MailboxProcessor[Any]], Awaitable[None]],
158 | cancellation_token: CancellationToken | None = None,
159 | ) -> MailboxProcessor[Any]:
160 | mbox: MailboxProcessor[Any] = MailboxProcessor(cancellation_token)
161 | start_immediate(body(mbox), cancellation_token)
162 | return mbox
163 |
164 |
165 | __all__ = ["AsyncReplyChannel", "MailboxProcessor"]
166 |
--------------------------------------------------------------------------------
/expression/core/misc.py:
--------------------------------------------------------------------------------
1 | from collections.abc import Callable
2 | from typing import Any, TypeVar
3 |
4 | from typing_extensions import TypeVarTuple, Unpack
5 |
6 |
7 | _A = TypeVar("_A")
8 | _B = TypeVar("_B")
9 | _P = TypeVarTuple("_P")
10 | _TSource = TypeVar("_TSource")
11 | _TResult = TypeVar("_TResult")
12 |
13 |
14 | def identity(value: _A) -> _A:
15 | """Identity function.
16 |
17 | Returns value given as argument.
18 | """
19 | return value
20 |
21 |
22 | def starid(*value: Unpack[_P]) -> tuple[Unpack[_P]]:
23 | return value
24 |
25 |
26 | def flip(fn: Callable[[_A, _B], _TResult]) -> Callable[[_B, _A], _TResult]:
27 | """Flips the arguments for a function taking two arguments.
28 |
29 | Example:
30 | >>> fn(a, b) == flip(fn)(b, a)
31 | """
32 |
33 | def _flip(b: _B, a: _A) -> Any:
34 | return fn(a, b)
35 |
36 | return _flip
37 |
38 |
39 | def snd(value: tuple[Any, _TSource]) -> _TSource:
40 | """Return second argument of the tuple."""
41 | _, b = value
42 | return b
43 |
44 |
45 | def fst(value: tuple[_TSource, Any]) -> _TSource:
46 | """Return first argument of the tuple."""
47 | a, _ = value
48 | return a
49 |
50 |
51 | __all__ = ["flip", "fst", "identity", "snd", "starid"]
52 |
--------------------------------------------------------------------------------
/expression/core/tagged_union.py:
--------------------------------------------------------------------------------
1 | from collections.abc import Callable
2 | from copy import deepcopy
3 | from dataclasses import dataclass, field, fields
4 | from typing import Any, TypeVar, overload
5 |
6 | from typing_extensions import dataclass_transform
7 |
8 |
9 | _T = TypeVar("_T")
10 |
11 |
12 | @overload
13 | def tagged_union(
14 | *, frozen: bool = False, repr: bool = True, eq: bool = True, order: bool = False
15 | ) -> Callable[[type[_T]], type[_T]]: ...
16 |
17 |
18 | @overload
19 | def tagged_union(
20 | _cls: type[_T], *, frozen: bool = False, repr: bool = True, eq: bool = True, order: bool = False
21 | ) -> type[_T]: ...
22 |
23 |
24 | @dataclass_transform()
25 | def tagged_union(
26 | _cls: Any = None, *, frozen: bool = False, repr: bool = True, eq: bool = True, order: bool = False
27 | ) -> Any:
28 | """Tagged union decorator.
29 |
30 | A decorator that turns a dataclass into a tagged union.
31 |
32 | Arguments:
33 | frozen: Whether the tagged union should be frozen. If True,
34 | the __setattr__ and __delattr__ methods will be generated.
35 | repr: If True, the __repr__ method will be generated.
36 | order: If True, the __lt__ method will be generated. The first
37 | case will be considered the smallest with index 0 and the
38 | items will be compared as the tuple (index, value)
39 | eq: If True, the __eq__ method will be generated.
40 | """
41 |
42 | def transform(cls: Any) -> Any:
43 | cls = dataclass(init=False, repr=False, order=False, eq=False, kw_only=True)(cls)
44 | fields_ = fields(cls)
45 | field_names = tuple(f.name for f in fields_)
46 | original_init = cls.__init__
47 |
48 | def tagged_union_getstate(self: Any) -> dict[str, Any]:
49 | return {f.name: getattr(self, f.name) for f in fields(self)}
50 |
51 | def tagged_union_setstate(self: Any, state: dict[str, Any]):
52 | self.__init__(**state)
53 |
54 | cls.__setstate__ = tagged_union_setstate
55 | cls.__getstate__ = tagged_union_getstate
56 |
57 | def __init__(self: Any, **kwargs: Any) -> None:
58 | tag = kwargs.pop("tag", None)
59 |
60 | name, value = next(iter(kwargs.items()))
61 | if name not in field_names:
62 | raise TypeError(f"Unknown case name: {name}")
63 |
64 | if len(kwargs) != 1:
65 | raise TypeError(f"One and only one case can be specified. Not {kwargs}")
66 |
67 | match tag or name, name:
68 | case str(tag), name if tag == name:
69 | object.__setattr__(self, "tag", name)
70 | object.__setattr__(self, name, value)
71 | object.__setattr__(self, "_index", field_names.index(name))
72 | case tag, name:
73 | raise TypeError(f"Tag {tag} does not match case name {name}")
74 |
75 | # Enables the use of dataclasses.asdict
76 | union_fields = dict((f.name, f) for f in fields_ if f.name in [name, "tag"])
77 | object.__setattr__(self, "__dataclass_fields__", union_fields)
78 | original_init(self)
79 |
80 | def __repr__(self: Any) -> str:
81 | return f"{cls.__name__}({self.tag}={getattr(self, self.tag)})"
82 |
83 | if order:
84 |
85 | def __lt__(self: Any, other: Any) -> bool:
86 | if not isinstance(other, cls):
87 | return False
88 |
89 | return (self._index, getattr(self, self.tag)) < (other._index, getattr(other, other.tag))
90 |
91 | cls.__lt__ = __lt__
92 |
93 | if frozen:
94 |
95 | def __hash__(self: Any) -> int:
96 | return hash((cls.__name__, self.tag, getattr(self, self.tag)))
97 |
98 | def __setattr__(self: Any, name: str, value: Any) -> None:
99 | if name in field_names:
100 | raise TypeError("Cannot change the value of a tagged union case")
101 | object.__setattr__(self, name, value)
102 |
103 | def __delattr__(self: Any, name: str) -> None:
104 | if name in field_names:
105 | raise TypeError("Cannot delete a tagged union case")
106 |
107 | object.__delattr__(self, name)
108 |
109 | cls.__setattr__ = __setattr__
110 | cls.__delattr__ = __delattr__
111 | cls.__hash__ = __hash__
112 | if eq:
113 |
114 | def __eq__(self: Any, other: Any) -> bool:
115 | return (
116 | isinstance(other, cls)
117 | and self.tag == getattr(other, "tag")
118 | and getattr(self, self.tag) == getattr(other, self.tag)
119 | )
120 |
121 | cls.__eq__ = __eq__
122 |
123 | def __copy__(self: Any) -> Any:
124 | mapping = {self.tag: getattr(self, self.tag)}
125 | return cls(**mapping)
126 |
127 | def __deepcopy__(self: Any, memo: Any) -> Any:
128 | value = deepcopy(getattr(self, self.tag), memo)
129 | mapping = {self.tag: value}
130 | return cls(**mapping)
131 |
132 | cls.__init__ = __init__
133 | if repr:
134 | cls.__repr__ = __repr__
135 | cls.__match_args__ = field_names
136 |
137 | # We need to handle copy and deepcopy ourselves because they are needed by Pydantic
138 | cls.__copy__ = __copy__
139 | cls.__deepcopy__ = __deepcopy__
140 |
141 | return cls
142 |
143 | return transform if _cls is None else transform(_cls)
144 |
145 |
146 | def case() -> Any:
147 | """A case in a tagged union."""
148 | return field(init=False, kw_only=True)
149 |
150 |
151 | def tag() -> Any:
152 | """The tag of a tagged union."""
153 | return field(init=False, kw_only=True)
154 |
--------------------------------------------------------------------------------
/expression/core/try_.py:
--------------------------------------------------------------------------------
1 | """Try result class.
2 |
3 | The `Try` type is a simpler `Result` type that pins the error type to
4 | Exception.
5 |
6 | Everything else is the same as `Result`, just simpler to use.
7 | """
8 |
9 | from typing import Any, TypeVar
10 |
11 | from .result import Result
12 |
13 |
14 | _TSource = TypeVar("_TSource")
15 |
16 |
17 | class Try(Result[_TSource, Exception]):
18 | """A try result.
19 |
20 | Same as `Result` but with error type pinned to `Exception`. Making it simpler
21 | to use when the error type is an exception.
22 | """
23 |
24 | def __str__(self) -> str:
25 | """Return a string representation of the Try."""
26 | match self:
27 | case Try(tag="ok", ok=ok):
28 | return f"Success {ok}"
29 | case Try(error=error):
30 | return f"Failure {error}"
31 |
32 |
33 | def Success(value: _TSource) -> Try[_TSource]:
34 | """The successful Try case.
35 |
36 | Same as result `Ok` but with error type pinned to an exception, i.e:
37 | `Ok[TSource, Exception]`
38 | """
39 | return Try[_TSource](ok=value)
40 |
41 |
42 | def Failure(error: Exception) -> Try[Any]:
43 | """The failure Try case.
44 |
45 | Same as result `Error` but with error type pinned to an exception,
46 | i.e: `Error[TSource, Exception]`
47 | """
48 | return Try[Any](error=error)
49 |
--------------------------------------------------------------------------------
/expression/core/typing.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from abc import abstractmethod
4 | from collections.abc import Iterable
5 | from typing import Any, Protocol, TypeVar, get_origin
6 |
7 |
8 | _T_co = TypeVar("_T_co", covariant=True)
9 |
10 | _Base = TypeVar("_Base")
11 | _Derived = TypeVar("_Derived")
12 |
13 |
14 | class SupportsLessThan(Protocol):
15 | @abstractmethod
16 | def __lt__(self, __other: Any) -> bool:
17 | raise NotImplementedError
18 |
19 |
20 | class SupportsSum(Protocol):
21 | @abstractmethod
22 | def __radd__(self, __other: Any) -> Any:
23 | raise NotImplementedError
24 |
25 | def __add__(self, __other: Any) -> Any:
26 | raise NotImplementedError
27 |
28 |
29 | class SupportsGreaterThan(Protocol):
30 | @abstractmethod
31 | def __gt__(self, __other: Any) -> bool:
32 | raise NotImplementedError
33 |
34 |
35 | class SupportsMatch(Protocol[_T_co]):
36 | """Pattern matching protocol."""
37 |
38 | @abstractmethod
39 | def __match__(self, pattern: Any) -> Iterable[_T_co]:
40 | """Match pattern with value.
41 |
42 | Return a singleton iterable item (e.g `[ value ]`) if pattern
43 | matches value, else an empty iterable (e.g. `[]`).
44 | """
45 | raise NotImplementedError
46 |
47 |
48 | class ModelField:
49 | """Type mock to avoid taking a hard dependency on pydantic."""
50 |
51 | sub_fields: list[ModelField]
52 |
53 | def validate(self, value: Any, values: dict[str, str], loc: str) -> tuple[Any, Any]: ...
54 |
55 |
56 | def upcast(type: type[_Base], expr: _Base) -> _Base:
57 | """Upcast expression from a `Derived` to `Base`.
58 |
59 | Note: F# `:>` or `upcast`.
60 | """
61 | return expr
62 |
63 |
64 | def downcast(type: type[_Derived], expr: Any) -> _Derived:
65 | """Downcast expression `Derived` to `Base`.
66 |
67 | Checks at compile time that the type of expression Base is a
68 | supertype of Derived, and checks at runtime that Base is in fact an
69 | instance of Derived.
70 |
71 | Note: F# `:?>` or `downcast`.
72 | """
73 | assert isinstance(expr, type), f"The type of expression {expr} is not a supertype of {type}"
74 | return expr
75 |
76 |
77 | def try_downcast(type_: type[_Derived], expr: Any) -> _Derived | None:
78 | """Downcast expression `Base` to `Derived`.
79 |
80 | Check that the `Derived` type is a supertype of `Base`.
81 |
82 | NOTE: Supports generic types.
83 |
84 | Returns:
85 | None if `Derived` is not a supertype of `Base`.
86 | """
87 | origin: type[_Derived] | None = get_origin(type_) or type_
88 | if origin is not None and isinstance(expr, origin):
89 | return expr
90 |
91 | return None
92 |
93 |
94 | __all__ = [
95 | "SupportsGreaterThan",
96 | "SupportsLessThan",
97 | "SupportsMatch",
98 | "SupportsSum",
99 | "downcast",
100 | "try_downcast",
101 | "upcast",
102 | ]
103 |
--------------------------------------------------------------------------------
/expression/effect/__init__.py:
--------------------------------------------------------------------------------
1 | """A collection of computational expression effects."""
2 |
3 | from .async_option import AsyncOptionBuilder as async_option
4 | from .async_result import AsyncResultBuilder as async_result
5 | from .async_result import AsyncTryBuilder as async_try
6 | from .option import OptionBuilder as option
7 | from .result import ResultBuilder as result
8 | from .result import TryBuilder as try_
9 | from .seq import SeqBuilder as seq_builder
10 |
11 |
12 | seq = seq_builder
13 |
14 |
15 | __all__ = ["async_option", "async_result", "async_try", "option", "result", "seq", "try_"]
16 |
--------------------------------------------------------------------------------
/expression/effect/async_option.py:
--------------------------------------------------------------------------------
1 | """AsyncOption builder module.
2 |
3 | The AsyncOption builder allows for composing asynchronous operations that
4 | may return an optional value, using the Option type. It's similar to the Option builder but
5 | works with async operations.
6 | """
7 |
8 | from collections.abc import AsyncGenerator, Awaitable, Callable
9 | from typing import Any, ParamSpec, TypeVar
10 |
11 | from expression.core import Nothing, Option, Some
12 | from expression.core.async_builder import AsyncBuilder
13 |
14 |
15 | _TSource = TypeVar("_TSource")
16 | _TResult = TypeVar("_TResult")
17 | _P = ParamSpec("_P")
18 |
19 |
20 | class AsyncOptionBuilder(AsyncBuilder[_TSource, Option[Any]]):
21 | """AsyncOption builder.
22 |
23 | The AsyncOption builder allows for composing asynchronous operations that
24 | may return an optional value, using the Option type.
25 | """
26 |
27 | async def bind(
28 | self,
29 | xs: Option[_TSource],
30 | fn: Callable[[Any], Awaitable[Option[_TResult]]],
31 | ) -> Option[_TResult]:
32 | """Bind a function to an async option value.
33 |
34 | In F# computation expressions, this corresponds to ``let!`` and enables
35 | sequencing of computations.
36 |
37 | Args:
38 | xs: The async option value to bind
39 | fn: The function to apply to the value if Some
40 |
41 | Returns:
42 | The result of applying fn to the value if Some, otherwise Nothing
43 | """
44 | match xs:
45 | case Option(tag="some", some=value):
46 | return await fn(value)
47 | case _:
48 | return Nothing
49 |
50 | async def return_(self, x: _TSource) -> Option[_TSource]:
51 | """Wrap a value in an async option.
52 |
53 | In F# computation expressions, this corresponds to ``return`` and lifts
54 | a value into the option context.
55 |
56 | Args:
57 | x: The value to wrap
58 |
59 | Returns:
60 | Some containing the value
61 | """
62 | return Some(x)
63 |
64 | async def return_from(self, xs: Option[_TSource]) -> Option[_TSource]:
65 | """Return an async option value directly.
66 |
67 | In F# computation expressions, this corresponds to ``return!`` and allows
68 | returning an already wrapped value.
69 |
70 | Args:
71 | xs: The async option value to return
72 |
73 | Returns:
74 | The async option value unchanged
75 | """
76 | return xs
77 |
78 | async def combine(self, xs: Option[_TSource], ys: Option[_TSource]) -> Option[_TSource]:
79 | """Combine two async option computations.
80 |
81 | In F# computation expressions, this enables sequencing multiple
82 | expressions where we only care about the final result.
83 |
84 | Args:
85 | xs: First async option computation
86 | ys: Second async option computation
87 |
88 | Returns:
89 | The second computation if first is Some, otherwise Nothing
90 | """
91 | match xs:
92 | case Option(tag="some"):
93 | return ys
94 | case _:
95 | return Nothing
96 |
97 | async def zero(self) -> Option[Any]:
98 | """Return the zero value for async options.
99 |
100 | In F# computation expressions, this is used when no value is returned,
101 | corresponding to None in F#.
102 |
103 | Returns:
104 | Nothing
105 | """
106 | return Nothing
107 |
108 | async def delay(self, fn: Callable[[], Option[_TSource]]) -> Option[_TSource]:
109 | """Delay the computation.
110 |
111 | Default implementation is to return the result of the function.
112 | """
113 | return fn()
114 |
115 | async def run(self, computation: Option[_TSource]) -> Option[_TSource]:
116 | """Run a computation.
117 |
118 | Default implementation is to return the computation as is.
119 | """
120 | return computation
121 |
122 | def __call__(
123 | self,
124 | fn: Callable[
125 | _P,
126 | AsyncGenerator[_TSource, _TSource] | AsyncGenerator[_TSource, None],
127 | ],
128 | ) -> Callable[_P, Awaitable[Option[_TSource]]]:
129 | """The builder decorator."""
130 | return super().__call__(fn)
131 |
132 |
133 | # Create singleton instance
134 | async_option: AsyncOptionBuilder[Any] = AsyncOptionBuilder()
135 |
136 |
137 | __all__ = ["AsyncOptionBuilder", "async_option"]
138 |
--------------------------------------------------------------------------------
/expression/effect/async_result.py:
--------------------------------------------------------------------------------
1 | """AsyncResult builder module.
2 |
3 | The AsyncResult builder allows for composing asynchronous operations that
4 | may fail, using the Result type. It's similar to the Result builder but
5 | works with async operations.
6 | """
7 |
8 | from collections.abc import AsyncGenerator, Awaitable, Callable
9 | from typing import Any, ParamSpec, TypeVar
10 |
11 | from expression.core import Ok, Result
12 | from expression.core.async_builder import AsyncBuilder
13 |
14 |
15 | _TSource = TypeVar("_TSource")
16 | _TResult = TypeVar("_TResult")
17 | _TError = TypeVar("_TError")
18 | _P = ParamSpec("_P")
19 |
20 |
21 | class AsyncResultBuilder(AsyncBuilder[_TSource, Result[Any, _TError]]):
22 | """AsyncResult builder.
23 |
24 | The AsyncResult builder allows for composing asynchronous operations that
25 | may fail, using the Result type.
26 | """
27 |
28 | async def bind(
29 | self,
30 | xs: Result[_TSource, _TError],
31 | fn: Callable[[Any], Awaitable[Result[_TResult, _TError]]],
32 | ) -> Result[_TResult, _TError]:
33 | """Bind a function to an async result value.
34 |
35 | In F# computation expressions, this corresponds to ``let!`` and enables
36 | sequencing of computations.
37 |
38 | Args:
39 | xs: The async result value to bind
40 | fn: The function to apply to the value if Ok
41 |
42 | Returns:
43 | The result of applying fn to the value if Ok, otherwise Error
44 | """
45 | match xs:
46 | case Result(tag="ok", ok=value):
47 | return await fn(value)
48 | case Result(error=error):
49 | return Result[_TResult, _TError].Error(error)
50 |
51 | async def return_(self, x: _TSource) -> Result[_TSource, _TError]:
52 | """Wrap a value in an async result.
53 |
54 | In F# computation expressions, this corresponds to ``return`` and lifts
55 | a value into the result context.
56 |
57 | Args:
58 | x: The value to wrap
59 |
60 | Returns:
61 | Ok containing the value
62 | """
63 | return Ok(x)
64 |
65 | async def return_from(self, xs: Result[_TSource, _TError]) -> Result[_TSource, _TError]:
66 | """Return an async result value directly.
67 |
68 | In F# computation expressions, this corresponds to ``return!`` and allows
69 | returning an already wrapped value.
70 |
71 | Args:
72 | xs: The async result value to return
73 |
74 | Returns:
75 | The async result value unchanged
76 | """
77 | return xs
78 |
79 | async def combine(self, xs: Result[_TSource, _TError], ys: Result[_TSource, _TError]) -> Result[_TSource, _TError]:
80 | """Combine two async result computations.
81 |
82 | In F# computation expressions, this enables sequencing multiple
83 | expressions where we only care about the final result.
84 |
85 | Args:
86 | xs: First async result computation
87 | ys: Second async result computation
88 |
89 | Returns:
90 | The second computation if first is Ok, otherwise Error
91 | """
92 | match xs:
93 | case Result(tag="ok", ok=_):
94 | return ys
95 | case Result(error=error):
96 | return Result[_TSource, _TError].Error(error)
97 |
98 | async def zero(self) -> Result[Any, _TError]:
99 | """Return the zero value for async results.
100 |
101 | In F# computation expressions, this is used when no value is returned,
102 | corresponding to Ok(()) in F#.
103 |
104 | Returns:
105 | Ok(None)
106 | """
107 | return Ok(None)
108 |
109 | async def delay(self, fn: Callable[[], Result[_TSource, _TError]]) -> Result[_TSource, _TError]:
110 | """Delay the computation.
111 |
112 | Default implementation is to return the result of the function.
113 | """
114 | return fn()
115 |
116 | async def run(self, computation: Result[_TSource, _TError]) -> Result[_TSource, _TError]:
117 | """Run a computation.
118 |
119 | Default implementation is to return the computation as is.
120 | """
121 | return computation
122 |
123 | def __call__(
124 | self,
125 | fn: Callable[
126 | _P,
127 | AsyncGenerator[_TSource, _TSource] | AsyncGenerator[_TSource, None],
128 | ],
129 | ) -> Callable[_P, Awaitable[Result[_TSource, _TError]]]:
130 | """The builder decorator."""
131 | return super().__call__(fn)
132 |
133 |
134 | # Create singleton instances
135 | async_result: AsyncResultBuilder[Any, Any] = AsyncResultBuilder()
136 |
137 |
138 | class AsyncTryBuilder(AsyncResultBuilder[_TSource, Exception]):
139 | """AsyncTry builder.
140 |
141 | The AsyncTry builder allows for composing asynchronous operations that
142 | may throw exceptions, using the Result type with Exception as the error type.
143 | """
144 |
145 | pass
146 |
147 |
148 | # Create singleton instance
149 | async_try: AsyncTryBuilder[Any] = AsyncTryBuilder()
150 |
151 |
152 | __all__ = ["AsyncResultBuilder", "AsyncTryBuilder", "async_result", "async_try"]
153 |
--------------------------------------------------------------------------------
/expression/effect/option.py:
--------------------------------------------------------------------------------
1 | from collections.abc import Callable, Generator
2 | from typing import Any, TypeVar
3 |
4 | from typing_extensions import ParamSpec
5 |
6 | from expression.core import Builder, Nothing, Option, Some, option
7 |
8 |
9 | _TSource = TypeVar("_TSource")
10 | _TResult = TypeVar("_TResult")
11 | _P = ParamSpec("_P")
12 |
13 |
14 | class OptionBuilder(Builder[_TSource, Option[Any]]):
15 | def bind(self, xs: Option[_TSource], fn: Callable[[_TSource], Option[_TResult]]) -> Option[_TResult]:
16 | """Bind a function to an option value.
17 |
18 | In F# computation expressions, this corresponds to let! and enables
19 | sequencing of computations.
20 |
21 | Args:
22 | xs: The option value to bind
23 | fn: The function to apply to the value if Some
24 |
25 | Returns:
26 | The result of applying fn to the value if Some, otherwise Nothing
27 | """
28 | return option.bind(fn)(xs)
29 |
30 | def return_(self, x: _TSource) -> Option[_TSource]:
31 | """Wrap a value in an option.
32 |
33 | In F# computation expressions, this corresponds to return and lifts
34 | a value into the option context.
35 |
36 | Args:
37 | x: The value to wrap
38 |
39 | Returns:
40 | Some containing the value
41 | """
42 | return Some(x)
43 |
44 | def return_from(self, xs: Option[_TSource]) -> Option[_TSource]:
45 | """Return an option value directly.
46 |
47 | In F# computation expressions, this corresponds to return! and allows
48 | returning an already wrapped value.
49 |
50 | Args:
51 | xs: The option value to return
52 |
53 | Returns:
54 | The option value unchanged
55 | """
56 | return xs
57 |
58 | def combine(self, xs: Option[_TSource], ys: Option[_TSource]) -> Option[_TSource]:
59 | """Combine two option computations.
60 |
61 | In F# computation expressions, this enables sequencing multiple
62 | expressions where we only care about the final result.
63 |
64 | Args:
65 | xs: First option computation
66 | ys: Second option computation
67 |
68 | Returns:
69 | The second computation if first is Some, otherwise Nothing
70 | """
71 | return xs.bind(lambda _: ys)
72 |
73 | def zero(self) -> Option[_TSource]:
74 | """Return the zero value for options.
75 |
76 | In F# computation expressions, this is used when no value is returned,
77 | corresponding to None in F#.
78 |
79 | Returns:
80 | Nothing
81 | """
82 | return Nothing
83 |
84 | def delay(self, fn: Callable[[], Option[_TSource]]) -> Option[_TSource]:
85 | """Delay an option computation.
86 |
87 | In F# computation expressions, delay ensures proper sequencing of effects
88 | by controlling when computations are evaluated.
89 |
90 | Args:
91 | fn: The computation to delay
92 |
93 | Returns:
94 | The result of evaluating the computation
95 | """
96 | return fn()
97 |
98 | def __call__(
99 | self, # Ignored self parameter
100 | fn: Callable[
101 | _P,
102 | Generator[_TSource | None, _TSource, _TSource | None] | Generator[_TSource | None, None, _TSource | None],
103 | ],
104 | ) -> Callable[_P, Option[_TSource]]:
105 | return super().__call__(fn)
106 |
107 |
108 | __all__ = ["OptionBuilder"]
109 |
--------------------------------------------------------------------------------
/expression/effect/result.py:
--------------------------------------------------------------------------------
1 | from collections.abc import Callable, Generator
2 | from typing import Any, TypeVar
3 |
4 | from typing_extensions import ParamSpec
5 |
6 | from expression.core import Builder, Ok, Result, pipe, result
7 |
8 |
9 | _TSource = TypeVar("_TSource")
10 | _TResult = TypeVar("_TResult")
11 | _TError = TypeVar("_TError")
12 | _P = ParamSpec("_P")
13 |
14 |
15 | class ResultBuilder(Builder[_TSource, Result[Any, _TError]]):
16 | def bind(
17 | self,
18 | xs: Result[_TSource, _TError],
19 | fn: Callable[[Any], Result[_TResult, _TError]],
20 | ) -> Result[_TResult, _TError]:
21 | """Bind a function to a result value.
22 |
23 | In F# computation expressions, this corresponds to ``let!`` and enables
24 | sequencing of computations.
25 |
26 | Args:
27 | xs: The result value to bind
28 | fn: The function to apply to the value if Ok
29 |
30 | Returns:
31 | The result of applying fn to the value if Ok, otherwise Error
32 | """
33 | return pipe(xs, result.bind(fn))
34 |
35 | def return_(self, x: _TSource) -> Result[_TSource, _TError]: # Use Any for return_ type
36 | """Wrap a value in a result.
37 |
38 | In F# computation expressions, this corresponds to ``return`` and lifts
39 | a value into the result context.
40 |
41 | Args:
42 | x: The value to wrap
43 |
44 | Returns:
45 | Ok containing the value
46 | """
47 | return Ok(x)
48 |
49 | def return_from(self, xs: Result[_TSource, _TError]) -> Result[_TSource, _TError]:
50 | """Return a result value directly.
51 |
52 | In F# computation expressions, this corresponds to ``return!`` and allows
53 | returning an already wrapped value.
54 |
55 | Args:
56 | xs: The result value to return
57 |
58 | Returns:
59 | The result value unchanged
60 | """
61 | return xs
62 |
63 | def combine(self, xs: Result[_TSource, _TError], ys: Result[_TSource, _TError]) -> Result[_TSource, _TError]:
64 | """Combine two result computations.
65 |
66 | In F# computation expressions, this enables sequencing multiple
67 | expressions where we only care about the final result.
68 |
69 | Args:
70 | xs: First result computation
71 | ys: Second result computation
72 |
73 | Returns:
74 | The second computation if first is Ok, otherwise Error
75 | """
76 | return xs.bind(lambda _: ys)
77 |
78 | def zero(self) -> Result[Any, _TError]: # Use Any for zero return type
79 | """Return the zero value for results.
80 |
81 | In F# computation expressions, this is used when no value is returned,
82 | corresponding to Ok(()) in F#.
83 |
84 | Returns:
85 | Ok(None)
86 | """
87 | return Ok(None)
88 |
89 | def __call__(
90 | self, # Ignored self parameter
91 | fn: Callable[
92 | _P,
93 | Generator[_TSource | None, _TSource, _TSource | None] | Generator[_TSource | None, None, _TSource | None],
94 | ],
95 | ) -> Callable[_P, Result[_TSource, _TError]]:
96 | return super().__call__(fn)
97 |
98 |
99 | class TryBuilder(ResultBuilder[_TSource, Exception]): ...
100 |
101 |
102 | __all__ = ["ResultBuilder", "TryBuilder"]
103 |
--------------------------------------------------------------------------------
/expression/effect/seq.py:
--------------------------------------------------------------------------------
1 | from collections.abc import Callable, Generator, Iterable
2 | from typing import Any, TypeVar
3 |
4 | from expression.collections import seq
5 | from expression.core import Builder
6 |
7 |
8 | _TSource = TypeVar("_TSource")
9 | _TResult = TypeVar("_TResult")
10 |
11 |
12 | class SeqBuilder(Builder[_TSource, Iterable[Any]]):
13 | def bind(self, xs: Iterable[_TSource], fn: Callable[[_TSource], Iterable[_TResult]]) -> Iterable[_TResult]:
14 | """Bind a function to a sequence value.
15 |
16 | In F# computation expressions, this corresponds to let! and enables
17 | sequencing of computations.
18 |
19 | Args:
20 | xs: The sequence value to bind
21 | fn: The function to apply to each value in the sequence
22 |
23 | Returns:
24 | The concatenated results of applying fn to each value in the sequence
25 | """
26 | # We do this eagerly to have progress in the sequence
27 | return [y for x in xs for y in fn(x)]
28 |
29 | def return_(self, x: _TSource) -> Iterable[_TSource]:
30 | """Wrap a value in a sequence.
31 |
32 | Args:
33 | x: The value to wrap
34 |
35 | Returns:
36 | A singleton sequence containing the value
37 | """
38 | return seq.singleton(x)
39 |
40 | def return_from(self, xs: Iterable[_TSource]) -> Iterable[_TSource]:
41 | """Return a sequence value directly.
42 |
43 | Args:
44 | xs: The sequence value to return
45 |
46 | Returns:
47 | The sequence value unchanged
48 | """
49 | return xs
50 |
51 | def combine(self, xs: Iterable[_TSource], ys: Iterable[_TSource]) -> Iterable[_TSource]:
52 | """Combine two sequence computations.
53 |
54 | Args:
55 | xs: First sequence computation
56 | ys: Second sequence computation
57 |
58 | Returns:
59 | The concatenated sequences
60 | """
61 | return seq.concat(xs, ys)
62 |
63 | def zero(self) -> Iterable[_TSource]:
64 | """Return the zero value for sequences.
65 |
66 | Returns:
67 | An empty sequence
68 | """
69 | return seq.empty
70 |
71 | def delay(self, fn: Callable[[], Iterable[_TSource]]) -> Iterable[_TSource]:
72 | """Delay the computation.
73 |
74 | Returns a sequence that is built from the given delayed specification of a
75 | sequence. The input function is evaluated each time an iterator for the sequence
76 | is requested.
77 |
78 | Args:
79 | fn: The generating function for the sequence.
80 |
81 | Returns:
82 | A sequence that will evaluate the function when iterated.
83 | """
84 | return fn()
85 |
86 | def run(self, computation: Iterable[_TSource]) -> Iterable[_TSource]:
87 | """Run a computation.
88 |
89 | In Python, the return value should be included in the sequence
90 | as it's essentially a "final yield".
91 |
92 | Args:
93 | computation: The computation to run
94 |
95 | Returns:
96 | The result of the computation including the return value
97 | """
98 | # Simply return the computation as is, including the return value
99 | return computation
100 |
101 | def __call__(
102 | self,
103 | fn: Callable[
104 | ...,
105 | Generator[_TSource | None, _TSource, _TSource | None] | Generator[_TSource | None, None, _TSource | None],
106 | ],
107 | ) -> Callable[..., Iterable[_TSource]]:
108 | return super().__call__(fn)
109 |
110 |
111 | __all__ = ["SeqBuilder"]
112 |
--------------------------------------------------------------------------------
/expression/extra/__init__.py:
--------------------------------------------------------------------------------
1 | """Contains extra functionality for core modules.
2 |
3 | Pipelining i.e composition (kliesli) of result or option returning
4 | functions.
5 | """
6 |
7 | from . import option, result
8 |
9 |
10 | __all__ = ["option", "result"]
11 |
--------------------------------------------------------------------------------
/expression/extra/option/__init__.py:
--------------------------------------------------------------------------------
1 | """Extra option functions."""
2 |
3 | from .pipeline import pipeline
4 |
5 |
6 | __all__ = ["pipeline"]
7 |
--------------------------------------------------------------------------------
/expression/extra/option/pipeline.py:
--------------------------------------------------------------------------------
1 | from collections.abc import Callable
2 | from functools import reduce
3 | from typing import Any, TypeVar, overload
4 |
5 | from expression.core import Option, Some
6 |
7 |
8 | _A = TypeVar("_A")
9 | _B = TypeVar("_B")
10 | _C = TypeVar("_C")
11 | _D = TypeVar("_D")
12 | _E = TypeVar("_E")
13 | _F = TypeVar("_F")
14 | _G = TypeVar("_G")
15 | _TError = TypeVar("_TError")
16 |
17 |
18 | @overload
19 | def pipeline() -> Callable[[_A], Option[_A]]: ...
20 |
21 |
22 | @overload
23 | def pipeline(__fn: Callable[[_A], Option[_B]]) -> Callable[[_A], Option[_B]]: ...
24 |
25 |
26 | @overload
27 | def pipeline(__fn1: Callable[[_A], Option[_B]], __fn2: Callable[[_B], Option[_C]]) -> Callable[[_A], Option[_C]]: ...
28 |
29 |
30 | @overload
31 | def pipeline(
32 | __fn1: Callable[[_A], Option[_B]],
33 | __fn2: Callable[[_B], Option[_C]],
34 | __fn3: Callable[[_C], Option[_D]],
35 | ) -> Callable[[_A], Option[_D]]: ...
36 |
37 |
38 | @overload
39 | def pipeline(
40 | __fn1: Callable[[_A], Option[_B]],
41 | __fn2: Callable[[_B], Option[_C]],
42 | __fn3: Callable[[_C], Option[_D]],
43 | __fn4: Callable[[_D], Option[_E]],
44 | ) -> Callable[[_A], Option[_E]]: ...
45 |
46 |
47 | @overload
48 | def pipeline(
49 | __fn1: Callable[[_A], Option[_B]],
50 | __fn2: Callable[[_B], Option[_C]],
51 | __fn3: Callable[[_C], Option[_D]],
52 | __fn4: Callable[[_D], Option[_E]],
53 | __fn5: Callable[[_E], Option[_F]],
54 | ) -> Callable[[_A], Option[_F]]: ...
55 |
56 |
57 | @overload
58 | def pipeline(
59 | __fn1: Callable[[_A], Option[_B]],
60 | __fn2: Callable[[_B], Option[_C]],
61 | __fn3: Callable[[_C], Option[_D]],
62 | __fn4: Callable[[_D], Option[_E]],
63 | __fn5: Callable[[_E], Option[_F]],
64 | __fn6: Callable[[_F], Option[_G]],
65 | ) -> Callable[[_A], Option[_G]]: ...
66 |
67 |
68 | def pipeline(*fns: Callable[[Any], Option[Any]]) -> Callable[[Any], Option[Any]]:
69 | """Pipeline multiple option returning functions left to right.
70 |
71 | A pipeline kleisli (>=>) composes zero or more functions into a
72 | functional composition. The functions are composed left to right. A
73 | composition of zero functions gives back the identity function.
74 |
75 | >>> pipeline()(x) == Some(x)
76 | >>> pipeline(f)(x) == f(x)
77 | >>> pipeline(f, g)(x) == f(x).bind(g)
78 | >>> pipeline(f, g, h)(x) == f(x).bind(g).bind(h)
79 | ...
80 |
81 | Returns:
82 | The composed functions.
83 | """
84 |
85 | def reducer(acc: Callable[[Any], Option[Any]], fn: Callable[[Any], Option[Any]]) -> Callable[[Any], Option[Any]]:
86 | def gn(x: Any) -> Option[Any]:
87 | return acc(x).bind(fn)
88 |
89 | return gn
90 |
91 | return reduce(reducer, fns, Some)
92 |
93 |
94 | __all__ = ["pipeline"]
95 |
--------------------------------------------------------------------------------
/expression/extra/result/__init__.py:
--------------------------------------------------------------------------------
1 | """Extra result functions."""
2 |
3 | from .catch import catch
4 | from .pipeline import pipeline
5 | from .traversable import sequence, traverse
6 |
7 |
8 | __all__ = ["catch", "pipeline", "sequence", "traverse"]
9 |
--------------------------------------------------------------------------------
/expression/extra/result/catch.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from collections.abc import Callable
4 | from functools import wraps
5 | from typing import Any, TypeVar, cast, overload
6 |
7 | from expression.core import Error, Ok, Result
8 |
9 |
10 | _TSource = TypeVar("_TSource")
11 | _TError = TypeVar("_TError", bound=Exception)
12 | _TError_ = TypeVar("_TError_", bound=Exception)
13 |
14 |
15 | @overload
16 | def catch(
17 | exception: type[_TError_],
18 | ) -> Callable[
19 | [Callable[..., _TSource | Result[_TSource, _TError]]],
20 | Callable[..., Result[_TSource, _TError | _TError_]],
21 | ]: ...
22 |
23 |
24 | @overload
25 | def catch(f: Callable[..., _TSource], *, exception: type[_TError]) -> Callable[..., Result[_TSource, _TError]]: ...
26 |
27 |
28 | def catch( # type: ignore
29 | f: Callable[..., _TSource] | None = None, *, exception: type[_TError]
30 | ) -> Callable[
31 | [Callable[..., _TSource]],
32 | Callable[..., Result[_TSource, _TError]] | Result[_TSource, _TError],
33 | ]:
34 | def decorator(fn: Callable[..., _TSource]) -> Callable[..., Result[_TSource, _TError]]:
35 | @wraps(fn)
36 | def wrapper(*args: Any, **kwargs: Any) -> Result[_TSource, _TError]:
37 | try:
38 | out = fn(*args, **kwargs)
39 | except exception as exn:
40 | return Error(cast(_TError, exn))
41 | else:
42 | if isinstance(out, Result):
43 | return cast(Result[_TSource, _TError], out)
44 |
45 | return Ok(out)
46 |
47 | return wrapper
48 |
49 | if f is not None:
50 | return decorator(f)
51 |
52 | return decorator
53 |
54 |
55 | __all__ = ["catch"]
56 |
--------------------------------------------------------------------------------
/expression/extra/result/pipeline.py:
--------------------------------------------------------------------------------
1 | from collections.abc import Callable
2 | from functools import reduce
3 | from typing import Any, TypeVar, overload
4 |
5 | from expression.core.result import Ok, Result
6 |
7 |
8 | _A = TypeVar("_A")
9 | _B = TypeVar("_B")
10 | _C = TypeVar("_C")
11 | _D = TypeVar("_D")
12 | _E = TypeVar("_E")
13 | _F = TypeVar("_F")
14 | _G = TypeVar("_G")
15 | _TError = TypeVar("_TError")
16 |
17 |
18 | @overload
19 | def pipeline() -> Callable[[Any], Result[Any, Any]]: ...
20 |
21 |
22 | @overload
23 | def pipeline(__fn: Callable[[_A], Result[_B, _TError]]) -> Callable[[_A], Result[_B, _TError]]: ...
24 |
25 |
26 | @overload
27 | def pipeline(
28 | __fn1: Callable[[_A], Result[_B, _TError]],
29 | __fn2: Callable[[_B], Result[_C, _TError]],
30 | ) -> Callable[[_A], Result[_C, _TError]]: ...
31 |
32 |
33 | @overload
34 | def pipeline(
35 | __fn1: Callable[[_A], Result[_B, _TError]],
36 | __fn2: Callable[[_B], Result[_C, _TError]],
37 | __fn3: Callable[[_C], Result[_D, _TError]],
38 | ) -> Callable[[_A], Result[_D, _TError]]: ...
39 |
40 |
41 | @overload
42 | def pipeline(
43 | __fn1: Callable[[_A], Result[_B, _TError]],
44 | __fn2: Callable[[_B], Result[_C, _TError]],
45 | __fn3: Callable[[_C], Result[_D, _TError]],
46 | __fn4: Callable[[_D], Result[_E, _TError]],
47 | ) -> Callable[[_A], Result[_E, _TError]]: ...
48 |
49 |
50 | @overload
51 | def pipeline(
52 | __fn1: Callable[[_A], Result[_B, _TError]],
53 | __fn2: Callable[[_B], Result[_C, _TError]],
54 | __fn3: Callable[[_C], Result[_D, _TError]],
55 | __fn4: Callable[[_D], Result[_E, _TError]],
56 | __fn5: Callable[[_E], Result[_F, _TError]],
57 | ) -> Callable[[_A], Result[_F, _TError]]: ...
58 |
59 |
60 | @overload
61 | def pipeline(
62 | __fn1: Callable[[_A], Result[_B, _TError]],
63 | __fn2: Callable[[_B], Result[_C, _TError]],
64 | __fn3: Callable[[_C], Result[_D, _TError]],
65 | __fn4: Callable[[_D], Result[_E, _TError]],
66 | __fn5: Callable[[_E], Result[_F, _TError]],
67 | __fn6: Callable[[_F], Result[_G, _TError]],
68 | ) -> Callable[[_A], Result[_G, _TError]]: ...
69 |
70 |
71 | def pipeline(*fns: Callable[[Any], Result[Any, Any]]) -> Callable[[Any], Result[Any, Any]]:
72 | """Pipeline multiple result returning functions left to right.
73 |
74 | A pipeline kleisli (>=>) composes zero or more functions into a
75 | functional composition. The functions are composed left to right. A
76 | composition of zero functions gives back the identity function.
77 |
78 | >>> pipeline()(x) == Ok(x)
79 | >>> pipeline(f)(x) == f(x)
80 | >>> pipeline(f, g)(x) == f(x).bind(g)
81 | >>> pipeline(f, g, h)(x) == f(x).bind(g).bind(h)
82 | ...
83 |
84 | Returns:
85 | The composed functions.
86 | """
87 |
88 | def reducer(
89 | acc: Callable[[Any], Result[Any, Any]], fn: Callable[[Any], Result[Any, Any]]
90 | ) -> Callable[[Any], Result[Any, Any]]:
91 | def gn(x: Any) -> Result[Any, Any]:
92 | return acc(x).bind(fn)
93 |
94 | return gn
95 |
96 | return reduce(reducer, fns, Ok)
97 |
98 |
99 | __all__ = ["pipeline"]
100 |
--------------------------------------------------------------------------------
/expression/extra/result/traversable.py:
--------------------------------------------------------------------------------
1 | """Data structures that can be traversed from left to right, performing an action on each element."""
2 |
3 | from collections.abc import Callable
4 | from typing import Any, TypeVar
5 |
6 | from expression import effect
7 | from expression.collections import Block, block, seq
8 | from expression.core import Ok, Result, identity, pipe
9 |
10 |
11 | _TSource = TypeVar("_TSource")
12 | _TResult = TypeVar("_TResult")
13 | _TError = TypeVar("_TError")
14 |
15 |
16 | def traverse(
17 | fn: Callable[[_TSource], Result[_TResult, _TError]], lst: Block[_TSource]
18 | ) -> Result[Block[_TResult], _TError]:
19 | """Traverses a list of items.
20 |
21 | Threads an applicative computation though a list of items.
22 | """
23 |
24 | @effect.result[Block[_TResult], _TError]()
25 | def folder(head: _TSource, tail: Result[Block[_TResult], _TError]) -> Any:
26 | """Fold back function.
27 |
28 | Same as:
29 | >>> fn(head).bind(lambda head: tail.bind(lambda tail: Ok([head] + tail))).
30 | """
31 | h: _TResult = yield from fn(head)
32 | t: Block[_TResult] = yield from tail
33 |
34 | return Block([h]) + t
35 |
36 | state: Result[Block[_TResult], _TError] = Ok(block.empty)
37 | ret = pipe(
38 | state,
39 | seq.fold_back(folder, lst),
40 | )
41 | return ret
42 |
43 |
44 | def sequence(lst: Block[Result[_TSource, _TError]]) -> Result[Block[_TSource], _TError]:
45 | """Sequence block.
46 |
47 | Execute a sequence of result returning commands and collect the
48 | sequence of their response.
49 | """
50 | return traverse(identity, lst)
51 |
--------------------------------------------------------------------------------
/expression/py.typed:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dbrattli/Expression/3981fd3a3ad60a13d00a4a3c170bdac76c1943fe/expression/py.typed
--------------------------------------------------------------------------------
/expression/system/__init__.py:
--------------------------------------------------------------------------------
1 | """The system module.
2 |
3 | Contains tools and utilities for dealing with (async) disposables and
4 | cancellation tokens.
5 | """
6 |
7 | from .cancellation import CancellationToken, CancellationTokenSource
8 | from .disposable import (
9 | AnonymousDisposable,
10 | AsyncAnonymousDisposable,
11 | AsyncCompositeDisposable,
12 | AsyncDisposable,
13 | Disposable,
14 | )
15 | from .error import ObjectDisposedException, OperationCanceledError
16 |
17 |
18 | __all__ = [
19 | "AnonymousDisposable",
20 | "AsyncAnonymousDisposable",
21 | "AsyncCompositeDisposable",
22 | "AsyncDisposable",
23 | "CancellationToken",
24 | "CancellationTokenSource",
25 | "Disposable",
26 | "ObjectDisposedException",
27 | "OperationCanceledError",
28 | ]
29 |
--------------------------------------------------------------------------------
/expression/system/cancellation.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from collections.abc import Callable
4 | from threading import RLock
5 |
6 | from .disposable import Disposable
7 | from .error import ObjectDisposedException
8 |
9 |
10 | class CancellationToken:
11 | """CancellationToken class.
12 |
13 | A `CancellationToken` enables cooperative cancellation between
14 | threads, thread pool work items, or Task objects. You create a
15 | cancellation token by instantiating a `CancellationTokenSource`
16 | object, which manages cancellation tokens retrieved from its
17 | CancellationTokenSource.Token property. You then pass the
18 | cancellation token to any number of threads, tasks, or operations
19 | that should receive notice of cancellation. The token cannot be used
20 | to initiate cancellation. When the owning object calls
21 | `CancellationTokenSource.cancel`, the `is_cancellation_requested`
22 | property on every copy of the cancellation token is set to true. The
23 | objects that receive the notification can respond in whatever manner
24 | is appropriate.
25 | """
26 |
27 | def __init__(self, cancelled: bool = True, source: CancellationTokenSource | None = None) -> None:
28 | """The init function.
29 |
30 | Should not be used directly. Create cancellation tokens using
31 | the `CancellationTokenSource` instead.
32 | """
33 | self._cancelled = cancelled
34 | self._source = CancellationTokenSource.cancelled_source() if source is None else source
35 |
36 | @property
37 | def is_cancellation_requested(self) -> bool:
38 | return not self._cancelled and self._source.is_cancellation_requested
39 |
40 | @property
41 | def can_be_canceled(self) -> bool:
42 | return not self._cancelled
43 |
44 | def throw_if_cancellation_requested(self):
45 | if self.is_cancellation_requested:
46 | raise ObjectDisposedException()
47 |
48 | def register(self, callback: Callable[[], None]) -> Disposable:
49 | return self._source.register_internal(callback)
50 |
51 | @staticmethod
52 | def none():
53 | return CancellationToken(True, None)
54 |
55 |
56 | class CancellationTokenSource(Disposable):
57 | def __init__(self):
58 | self._is_disposed = False
59 | self._lock = RLock()
60 | self._listeners: dict[int, Callable[[], None]] = dict()
61 | self._id = 0
62 |
63 | @property
64 | def token(self) -> CancellationToken:
65 | return CancellationToken(False, self)
66 |
67 | @property
68 | def is_cancellation_requested(self) -> bool:
69 | return self._is_disposed
70 |
71 | def cancel(self) -> None:
72 | self.dispose()
73 |
74 | def dispose(self) -> None:
75 | """Performs the task of cleaning up resources."""
76 | listeners: list[Callable[[], None]] = []
77 | with self._lock:
78 | if not self._is_disposed:
79 | self._is_disposed = True
80 | listeners.extend(self._listeners.values())
81 |
82 | for listener in listeners:
83 | try:
84 | listener()
85 | except Exception:
86 | pass
87 |
88 | def register_internal(self, callback: Callable[[], None]) -> Disposable:
89 | if self._is_disposed:
90 | raise ObjectDisposedException()
91 |
92 | with self._lock:
93 | current = self._id
94 | self._listeners[current] = callback
95 | self._id += 1
96 |
97 | def dispose():
98 | with self._lock:
99 | del self._listeners[current]
100 |
101 | return Disposable.create(dispose)
102 |
103 | def __enter__(self) -> Disposable:
104 | if self._is_disposed:
105 | raise ObjectDisposedException()
106 | return self
107 |
108 | @staticmethod
109 | def cancelled_source() -> CancellationTokenSource:
110 | source = CancellationTokenSource()
111 | source.cancel()
112 | return source
113 |
--------------------------------------------------------------------------------
/expression/system/disposable.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from abc import ABC, abstractmethod
4 | from asyncio import iscoroutinefunction
5 | from collections.abc import Awaitable, Callable
6 | from threading import RLock
7 | from types import TracebackType
8 |
9 | from .error import ObjectDisposedException
10 |
11 |
12 | class Disposable(ABC):
13 | """The disposable class.
14 |
15 | A disposable class with a context manager. Must implement the
16 | dispose method. Will dispose on exit.
17 | """
18 |
19 | @abstractmethod
20 | def dispose(self) -> None:
21 | raise NotImplementedError
22 |
23 | def __enter__(self) -> Disposable:
24 | """Enter context management."""
25 | return self
26 |
27 | def __exit__(
28 | self,
29 | exctype: type[BaseException] | None,
30 | excinst: BaseException | None,
31 | exctb: TracebackType | None,
32 | ):
33 | """Exit context management."""
34 | self.dispose()
35 | return False
36 |
37 | @staticmethod
38 | def create(action: Callable[[], None]):
39 | """Create disposable.
40 |
41 | Create disposable from action. Will call action when disposed.
42 | """
43 | return AnonymousDisposable(action)
44 |
45 |
46 | class AnonymousDisposable(Disposable):
47 | def __init__(self, action: Callable[[], None]):
48 | self._is_disposed = False
49 | self._action = action
50 | self._lock = RLock()
51 |
52 | def dispose(self) -> None:
53 | """Performs the task of cleaning up resources."""
54 | dispose = False
55 | with self._lock:
56 | if not self._is_disposed:
57 | dispose = True
58 | self._is_disposed = True
59 |
60 | if dispose:
61 | self._action()
62 |
63 | def __enter__(self) -> Disposable:
64 | if self._is_disposed:
65 | raise ObjectDisposedException()
66 | return self
67 |
68 |
69 | class AsyncDisposable(ABC):
70 | """The async disposable.
71 |
72 | A disposable class with a context manager. Must implement the
73 | `dispose_async` method. Will dispose on exit.
74 | """
75 |
76 | @abstractmethod
77 | async def dispose_async(self) -> None:
78 | raise NotImplementedError
79 |
80 | async def __aenter__(self) -> AsyncDisposable:
81 | """Enter context management."""
82 | return self
83 |
84 | async def __aexit__(
85 | self,
86 | exctype: type[BaseException] | None,
87 | excinst: BaseException | None,
88 | exctb: TracebackType | None,
89 | ) -> None:
90 | """Exit context management."""
91 | await self.dispose_async()
92 |
93 | @staticmethod
94 | def create(action: Callable[[], Awaitable[None]]) -> AsyncDisposable:
95 | return AsyncAnonymousDisposable(action)
96 |
97 | @staticmethod
98 | def composite(*disposables: AsyncDisposable) -> AsyncDisposable:
99 | return AsyncCompositeDisposable(*disposables)
100 |
101 | @staticmethod
102 | def empty() -> AsyncDisposable:
103 | async def anoop() -> None:
104 | pass
105 |
106 | return AsyncAnonymousDisposable(anoop)
107 |
108 |
109 | class AsyncAnonymousDisposable(AsyncDisposable):
110 | def __init__(self, action: Callable[[], Awaitable[None]]) -> None:
111 | assert iscoroutinefunction(action)
112 | self._is_disposed = False
113 | self._action = action
114 |
115 | async def dispose_async(self) -> None:
116 | if self._is_disposed:
117 | return
118 |
119 | self._is_disposed = True
120 | await self._action()
121 |
122 | async def __aenter__(self) -> AsyncDisposable:
123 | if self._is_disposed:
124 | raise ObjectDisposedException()
125 | return self
126 |
127 |
128 | class AsyncCompositeDisposable(AsyncDisposable):
129 | def __init__(self, *disposables: AsyncDisposable) -> None:
130 | self._disposables = disposables
131 |
132 | async def dispose_async(self) -> None:
133 | for disposable in self._disposables:
134 | await disposable.dispose_async()
135 |
136 |
137 | __all__ = [
138 | "AnonymousDisposable",
139 | "AsyncAnonymousDisposable",
140 | "AsyncCompositeDisposable",
141 | "AsyncDisposable",
142 | "Disposable",
143 | ]
144 |
--------------------------------------------------------------------------------
/expression/system/error.py:
--------------------------------------------------------------------------------
1 | class ObjectDisposedException(Exception):
2 | def __init__(self):
3 | super().__init__("Cannot access a disposed object")
4 |
5 |
6 | class OperationCanceledError(Exception):
7 | pass
8 |
--------------------------------------------------------------------------------
/make_readme.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | jupytext --to markdown --pipe-fmt markdown --pipe "sed '//,//d'" README.py -o README.md
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "Expression"
3 | version = "0.0.0" # NOTE: will be updated by publish script
4 | description = "Practical functional programming for Python 3.10+"
5 | readme = "README.md"
6 | authors = ["Dag Brattli "]
7 | license = "MIT License"
8 | homepage = "https://github.com/cognitedata/Expression"
9 | classifiers = [
10 | "Development Status :: 5 - Production/Stable",
11 | "Environment :: Other Environment",
12 | "Intended Audience :: Developers",
13 | "License :: OSI Approved :: MIT License",
14 | "Operating System :: OS Independent",
15 | "Programming Language :: Python :: 3",
16 | "Programming Language :: Python :: 3.10",
17 | "Programming Language :: Python :: 3.11",
18 | "Programming Language :: Python :: 3.12",
19 | "Programming Language :: Python :: Implementation :: CPython",
20 | "Topic :: Software Development :: Libraries :: Python Modules",
21 | ]
22 |
23 | [tool.poetry.dependencies]
24 | python = ">= 3.10, < 4"
25 | typing-extensions = ">=4.6.0"
26 |
27 | pydantic = {version = "^2.6.2", optional = true}
28 |
29 | [tool.poetry.group.dev.dependencies]
30 | pytest-asyncio = "^0.25.0"
31 | pytest = "^8.3.3"
32 | coverage = "^6.4.3"
33 | coveralls = "^3.3.1"
34 | pre-commit = "^4.0.1"
35 | dunamai = "^1.12.0"
36 | hypothesis = "^6.54.2"
37 | ruff = "^0.9.0"
38 | jupytext = "^1.16.1"
39 |
40 |
41 | [tool.poetry.extras]
42 | pydantic = ["pydantic"]
43 | all = ["pydantic"]
44 |
45 |
46 | [tool.ruff]
47 | # Keep in sync with .pre-commit-config.yaml
48 | line-length = 120
49 | target-version = "py310"
50 | # D100: Missing docstring in public module
51 | # D104: Missing docstring in public package
52 | # D105: Missing docstring in magic method
53 | lint.ignore = ["D100", "D101", "D102", "D103", "D105", "D107"]
54 | lint.select = ["D", "E", "W", "F", "I", "T", "RUF", "TID", "UP"]
55 | exclude = ["tests", "docs"]
56 |
57 | [tool.ruff.lint.pydocstyle]
58 | convention = "google"
59 |
60 | [tool.ruff.lint.isort]
61 | lines-after-imports = 2
62 | known-third-party = ["pytest"]
63 |
64 | [tool.pytest.ini_options]
65 | testpaths = ["tests"]
66 | asyncio_mode = "strict"
67 |
68 | [build-system]
69 | requires = ["poetry-core>=1.0.0"]
70 | build-backend = "poetry.core.masonry.api"
71 |
--------------------------------------------------------------------------------
/pyrightconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "include": [
3 | "expression",
4 | "tests",
5 | "README.py"
6 | ],
7 | "reportImportCycles": false,
8 | "reportMissingImports": false,
9 | "pythonVersion": "3.10",
10 | "typeCheckingMode": "strict",
11 | "reportShadowedImports": "none",
12 | "venv": ".venv",
13 | "venvPath": "."
14 | }
15 |
--------------------------------------------------------------------------------
/renovate.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json",
3 | "extends": [
4 | "config:base"
5 | ]
6 | }
7 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dbrattli/Expression/3981fd3a3ad60a13d00a4a3c170bdac76c1943fe/tests/__init__.py
--------------------------------------------------------------------------------
/tests/test_asyncseq.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | import pytest
4 | from hypothesis import given # type: ignore
5 | from hypothesis import strategies as st
6 |
7 | from expression.collections.asyncseq import AsyncSeq
8 |
9 |
10 | @pytest.mark.asyncio
11 | async def test_asyncseq_empty():
12 | xs = AsyncSeq.empty()
13 | async for _ in xs:
14 | assert False
15 |
16 |
17 | @given(st.integers(min_value=0, max_value=100))
18 | def test_asyncseq_range(count: int):
19 | acc = 0
20 |
21 | async def runner():
22 | nonlocal acc
23 |
24 | xs = AsyncSeq.range(count)
25 | async for x in xs:
26 | acc += x
27 |
28 | asyncio.run(runner())
29 | assert acc == sum(range(count))
30 |
31 |
32 | @given(st.integers(min_value=0, max_value=100))
33 | def test_asyncseq_map(count: int):
34 | acc = 0
35 |
36 | async def runner():
37 | nonlocal acc
38 | xs = AsyncSeq.range(count)
39 | async for x in xs:
40 | acc += x
41 |
42 | asyncio.run(runner())
43 | assert acc == sum(range(count))
44 |
--------------------------------------------------------------------------------
/tests/test_cancellation.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from expression.system import (
4 | CancellationToken,
5 | CancellationTokenSource,
6 | ObjectDisposedException,
7 | )
8 | from expression.system.disposable import Disposable
9 |
10 |
11 | def test_token_none_works():
12 | token = CancellationToken.none()
13 | assert isinstance(token, CancellationToken)
14 | assert not token.can_be_canceled
15 | assert not token.is_cancellation_requested
16 | token.throw_if_cancellation_requested()
17 |
18 |
19 | def test_token_source_works():
20 | source = CancellationTokenSource()
21 | assert not source.is_cancellation_requested
22 |
23 | with source as disp:
24 | assert isinstance(disp, Disposable)
25 |
26 |
27 | def test_token_cancelled_source_works():
28 | source = CancellationTokenSource.cancelled_source()
29 | assert isinstance(source, CancellationTokenSource)
30 | assert source.is_cancellation_requested
31 |
32 | with pytest.raises(ObjectDisposedException):
33 | with source as disposable:
34 | assert not disposable
35 |
36 |
37 | def test_token_cancellation_works():
38 | source = CancellationTokenSource()
39 | with source:
40 | token = source.token
41 | token.throw_if_cancellation_requested()
42 |
43 | assert token.can_be_canceled
44 | assert not token.is_cancellation_requested
45 |
46 | assert token.is_cancellation_requested
47 | with pytest.raises(ObjectDisposedException):
48 | token.throw_if_cancellation_requested()
49 |
50 |
51 | def test_token_disposing_works():
52 | source = CancellationTokenSource()
53 | with source as disposable:
54 | token = source.token
55 | disposable.dispose()
56 |
57 | assert token.is_cancellation_requested
58 |
59 | with pytest.raises(ObjectDisposedException):
60 | token.throw_if_cancellation_requested()
61 |
62 |
63 | def test_token_cancellation_register_works():
64 | called: list[bool] = []
65 | source = CancellationTokenSource()
66 | with source:
67 | token = source.token
68 | token.register(lambda: called.append(True))
69 | assert not called
70 |
71 | assert called
72 |
73 |
74 | def test_token_cancellation_register_unregister_works():
75 | called: list[bool] = []
76 | source = CancellationTokenSource()
77 | with source as _:
78 | token = source.token
79 | registration = token.register(lambda: called.append(True))
80 | assert not called
81 | registration.dispose()
82 |
83 | assert not called
84 |
85 |
86 | def test_token_cancelled_register_throws():
87 | called: list[bool] = []
88 | source = CancellationTokenSource.cancelled_source()
89 |
90 | with pytest.raises(ObjectDisposedException):
91 | with source:
92 | token = source.token
93 | token.register(lambda: called.append(True))
94 |
95 | assert not called
96 |
--------------------------------------------------------------------------------
/tests/test_catch.py:
--------------------------------------------------------------------------------
1 | from collections.abc import Generator
2 | from typing import Any
3 |
4 | import pytest
5 |
6 | from expression import Error, Ok, Result, effect
7 | from expression.extra.result import catch
8 |
9 |
10 | def test_catch_wraps_ok():
11 | @catch(exception=ValueError)
12 | def add(a: int, b: int) -> Any:
13 | return a + b
14 |
15 | assert add(3, 4) == Ok(7)
16 |
17 |
18 | def test_catch_wraps_error():
19 | @catch(exception=ValueError)
20 | def fn() -> Any:
21 | raise ValueError("error")
22 |
23 | result = fn()
24 | match result:
25 | case Result(tag="error", error=ex):
26 | assert isinstance(ex, ValueError)
27 | assert str(ex) == "error"
28 |
29 | case _:
30 | assert False
31 |
32 |
33 | def test_catch_ignores_other_exceptions():
34 | @catch(exception=KeyError)
35 | def fn(ex: Exception) -> Result[int, Exception]:
36 | raise ex
37 |
38 | with pytest.raises(ValueError):
39 | fn(ValueError("error"))
40 |
41 | with pytest.raises(RuntimeError):
42 | fn(RuntimeError("error"))
43 |
44 |
45 | def test_catch_chained():
46 | @catch(exception=KeyError)
47 | @catch(exception=ValueError)
48 | def fn(ex: Exception) -> Any:
49 | raise ex
50 |
51 | result = fn(ValueError("error"))
52 | match result:
53 | case Result(tag="error", error=ex):
54 | assert isinstance(ex, ValueError)
55 | assert str(ex) == "error"
56 | case _:
57 | assert False
58 |
59 | result = fn(KeyError("error"))
60 | match result:
61 | case Result(tag="error", error=ex):
62 | assert isinstance(ex, KeyError)
63 | assert str(ex) == "'error'"
64 | case _:
65 | assert False
66 |
67 |
68 | def test_catch_with_effect_ok():
69 | @catch(exception=TypeError)
70 | @effect.try_[int]()
71 | def fn(a: int) -> Generator[int, int, int]:
72 | b = yield from Ok(42)
73 | return a + b
74 |
75 | result = fn(1)
76 | assert result == Ok(43)
77 |
78 |
79 | def test_catch_with_effect_error():
80 | @catch(exception=TypeError)
81 | @effect.try_[int]()
82 | def fn(a: int) -> Generator[int, int, int]:
83 | b = yield from Error(ValueError("failure"))
84 | return a + b
85 |
86 | result = fn(1)
87 | match result:
88 | case Result(tag="error", error=ex):
89 | assert isinstance(ex, ValueError)
90 | assert str(ex) == "failure"
91 | case _:
92 | assert False
93 |
94 |
95 | def test_catch_with_effect_exception():
96 | @catch(exception=TypeError)
97 | @effect.result[str, Exception]()
98 | def fn(a: int) -> Generator[str, str, str]:
99 | b = yield from Ok("hello")
100 | return a + b # type: ignore (by design)
101 |
102 | result = fn(1)
103 | match result:
104 | case Result(tag="error", error=ex):
105 | assert isinstance(ex, TypeError)
106 | case _:
107 | assert False
108 |
--------------------------------------------------------------------------------
/tests/test_compose.py:
--------------------------------------------------------------------------------
1 | from collections.abc import Callable
2 |
3 | from hypothesis import given # type: ignore
4 | from hypothesis import strategies as st
5 |
6 | from expression import compose, identity
7 |
8 |
9 | Func = Callable[[int], int]
10 |
11 |
12 | @given(st.integers()) # type: ignore
13 | def test_compose_identity_implicit(x: int):
14 | fn = compose()
15 |
16 | assert fn(x) == x
17 |
18 |
19 | @given(st.integers()) # type: ignore
20 | def test_compose_identity(x: int):
21 | fn: Func = compose(identity)
22 |
23 | assert fn(x) == x
24 |
25 |
26 | @given(st.integers()) # type: ignore
27 | def test_compose_1(x: int):
28 | fn: Callable[[int], int] = lambda x: x + 42
29 | gn = compose(fn)
30 |
31 | assert gn(x) == fn(x) == x + 42
32 |
33 |
34 | @given(st.integers()) # type: ignore
35 | def test_compose_2(x: int):
36 | fn: Func = lambda x: x + 42
37 | gn: Func = lambda x: x - 3
38 | hn = compose(fn, gn)
39 |
40 | assert hn(x) == gn(fn(x))
41 |
42 |
43 | @given(st.integers()) # type: ignore
44 | def test_compose_3(x: int):
45 | fn: Func = lambda x: x + 42
46 | gn: Func = lambda x: x - 3
47 | hn: Func = lambda x: x * 2
48 |
49 | cn = compose(fn, gn, hn)
50 |
51 | assert cn(x) == hn(gn(fn(x)))
52 |
53 |
54 | @given(st.integers()) # type: ignore
55 | def test_compose_many(x: int):
56 | fn: Func = lambda x: x + 42
57 | gn: Func = lambda x: x - 3
58 | hn: Func = lambda x: x * 2
59 |
60 | cn = compose(fn, gn, hn, fn, hn, gn, fn)
61 |
62 | assert cn(x) == fn(gn(hn(fn(hn(gn(fn(x)))))))
63 |
64 |
65 | @given(st.integers()) # type: ignore
66 | def test_compose_rigth_identity(x: int):
67 | fn: Func = lambda x: x + 42
68 |
69 | cn = compose(fn, identity)
70 |
71 | assert cn(x) == fn(x)
72 |
73 |
74 | @given(st.integers()) # type: ignore
75 | def test_compose_left_identity(x: int):
76 | fn: Func = lambda x: x + 42
77 |
78 | cn: Func = compose(identity, fn)
79 |
80 | assert cn(x) == fn(x)
81 |
82 |
83 | @given(st.integers(), st.integers(), st.integers()) # type: ignore
84 | def test_compose_associative(x: int, y: int, z: int):
85 | """Rearranging the parentheses in an expression will not change the result."""
86 | fn: Func = lambda a: a + x # noqa
87 | gn: Func = lambda b: b - y # noqa
88 | hn: Func = lambda c: c * z # noqa
89 |
90 | cn = compose(fn, gn, hn)
91 | cn_: Func = lambda x: hn(gn(fn(x))) # noqa
92 |
93 | rn = compose(fn, compose(gn, hn))
94 | # right associative
95 | rn_: Func = lambda x: (lambda b: hn(gn(b)))(fn(x)) # type: ignore # noqa
96 | ln: Func = compose(compose(fn, gn), hn)
97 | # left associative
98 | ln_: Func = lambda x: hn((lambda b: gn(fn(b)))(x)) # type: ignore # noqa
99 |
100 | assert cn(x) == cn_(x) == rn(x) == rn_(x) == ln(x) == ln_(x)
101 |
--------------------------------------------------------------------------------
/tests/test_curried.py:
--------------------------------------------------------------------------------
1 | from collections.abc import Callable
2 |
3 | import pytest
4 |
5 | from expression import curry, curry_flip, pipe
6 |
7 |
8 | def test_curry_identity():
9 | @curry(0)
10 | def add(a: int, b: int) -> int:
11 | """Add a + b"""
12 | return a + b
13 |
14 | assert add(3, 4) == 7
15 |
16 |
17 | def test_curry_simple():
18 | @curry(1)
19 | def add(a: int, b: int) -> int:
20 | """Add a + b"""
21 | return a + b
22 |
23 | assert add(3)(4) == 7
24 |
25 |
26 | def test_curry_simple_curry_2_of_2():
27 | @curry(2)
28 | def add(a: int, b: int) -> int:
29 | """Add a + b"""
30 | return a + b
31 |
32 | assert add(3)(4)() == 7
33 |
34 |
35 | def test_curry_named():
36 | @curry(1)
37 | def add(a: int, b: int) -> int:
38 | """Add a + b"""
39 | return a + b
40 |
41 | assert add(3)(b=4) == 7
42 |
43 |
44 | def test_curry_none():
45 | @curry(0)
46 | def magic() -> int:
47 | return 42
48 |
49 | assert magic() == 42
50 | with pytest.raises(TypeError):
51 | magic(42) # type:ignore
52 |
53 |
54 | def test_curry_three():
55 | @curry(2)
56 | def add(a: int, b: int, c: int) -> int:
57 | """Add a + b + c"""
58 | return a + b + c
59 |
60 | assert add(3)(4)(2) == 9
61 |
62 |
63 | def test_curry1of3_with_optional():
64 | @curry(1)
65 | def add(a: int, b: int, c: int = 10) -> int:
66 | """Add a + b + c"""
67 | return a + b + c
68 |
69 | assert add(3)(4) == 17
70 |
71 |
72 | def test_curry2of3_with_optional():
73 | @curry(2)
74 | def add(a: int, b: int, c: int = 10) -> int:
75 | """Add a + b + c"""
76 | return a + b + c
77 |
78 | assert add(3)(4)() == 17
79 |
80 |
81 | def test_curry1of3_with_optional2():
82 | @curry(1)
83 | def add(a: int, b: int, c: int = 10) -> int:
84 | """Add a + b + c"""
85 | return a + b + c
86 |
87 | assert add(3)(4, c=9) == 16
88 |
89 |
90 | def test_curry_flip_identity():
91 | @curry_flip(0)
92 | def add(a: int, b: int) -> int:
93 | """Add a + b"""
94 | return a + b
95 |
96 | assert add(3, 4) == 7
97 |
98 |
99 | def test_curry_flip_1():
100 | xs = [1, 2, 3]
101 |
102 | @curry_flip(1)
103 | def map(source: list[int], mapper: Callable[[int], int]):
104 | return [mapper(x) for x in source]
105 |
106 | ys = pipe(
107 | xs,
108 | map(mapper=lambda x: x * 10),
109 | )
110 |
111 | assert ys == [10, 20, 30]
112 |
113 |
114 | def test_curry_flip_2():
115 | xs = [1, 2, 3]
116 |
117 | @curry_flip(2)
118 | def map(a: int, source: list[int], mapper: Callable[[int], int]):
119 | return [mapper(x) + a for x in source]
120 |
121 | ys = pipe(xs, map(lambda x: x * 10)(10))
122 |
123 | assert ys == [20, 30, 40]
124 |
--------------------------------------------------------------------------------
/tests/test_disposable.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from expression.system import AsyncDisposable, Disposable, ObjectDisposedException
4 |
5 |
6 | def test_disposable_works():
7 | called: list[bool] = []
8 | disp = Disposable.create(lambda: called.append(True))
9 |
10 | with disp:
11 | assert not called
12 |
13 | assert called
14 |
15 |
16 | def test_disposable_disposed():
17 | called: list[bool] = []
18 | disp = Disposable.create(lambda: called.append(True))
19 | disp.dispose()
20 | assert called
21 |
22 | with pytest.raises(ObjectDisposedException):
23 | with disp:
24 | assert not called
25 |
26 | assert called
27 |
28 |
29 | def test_disposable_disposed_twice_calls_once():
30 | called: list[bool] = []
31 | disp = Disposable.create(lambda: called.append(True))
32 | disp.dispose()
33 | disp.dispose()
34 |
35 | assert len(called) == 1
36 |
37 |
38 | @pytest.mark.asyncio
39 | async def test_async_disposable_works():
40 | called: list[bool] = []
41 |
42 | async def action():
43 | called.append(True)
44 |
45 | disp = AsyncDisposable.create(action)
46 |
47 | async with disp:
48 | assert not called
49 |
50 | assert called
51 |
52 |
53 | @pytest.mark.asyncio
54 | async def test_async_disposable_disposed():
55 | called: list[bool] = []
56 |
57 | async def action():
58 | called.append(True)
59 |
60 | disp = AsyncDisposable.create(action)
61 | await disp.dispose_async()
62 | assert called
63 |
64 | with pytest.raises(ObjectDisposedException):
65 | async with disp:
66 | assert not called
67 |
68 | assert called
69 |
70 |
71 | @pytest.mark.asyncio
72 | async def test_async_disposable_disposed_twice_calls_once():
73 | called: list[bool] = []
74 |
75 | async def action():
76 | called.append(True)
77 |
78 | disp = AsyncDisposable.create(action)
79 | await disp.dispose_async()
80 | await disp.dispose_async()
81 |
82 | assert len(called) == 1
83 |
--------------------------------------------------------------------------------
/tests/test_fn.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa
2 | from typing import Any
3 |
4 | from expression import TailCall, tailrec
5 |
6 |
7 | def test_factorial():
8 | @tailrec
9 | def factorial(n: int, acc: int = 1) -> Any: # Python 3.10: TailCallResult[int, [int, int]]:
10 | if n == 0:
11 | return acc
12 |
13 | return TailCall(n - 1, acc * n) # type: ignore , use [int, int] for Python 3.10
14 |
15 | result = factorial(1000)
16 | assert (
17 | result
18 | == 402387260077093773543702433923003985719374864210714632543799910429938512398629020592044208486969404800479988610197196058631666872994808558901323829669944590997424504087073759918823627727188732519779505950995276120874975462497043601418278094646496291056393887437886487337119181045825783647849977012476632889835955735432513185323958463075557409114262417474349347553428646576611667797396668820291207379143853719588249808126867838374559731746136085379534524221586593201928090878297308431392844403281231558611036976801357304216168747609675871348312025478589320767169132448426236131412508780208000261683151027341827977704784635868170164365024153691398281264810213092761244896359928705114964975419909342221566832572080821333186116811553615836546984046708975602900950537616475847728421889679646244945160765353408198901385442487984959953319101723355556602139450399736280750137837615307127761926849034352625200015888535147331611702103968175921510907788019393178114194545257223865541461062892187960223838971476088506276862967146674697562911234082439208160153780889893964518263243671616762179168909779911903754031274622289988005195444414282012187361745992642956581746628302955570299024324153181617210465832036786906117260158783520751516284225540265170483304226143974286933061690897968482590125458327168226458066526769958652682272807075781391858178889652208164348344825993266043367660176999612831860788386150279465955131156552036093988180612138558600301435694527224206344631797460594682573103790084024432438465657245014402821885252470935190620929023136493273497565513958720559654228749774011413346962715422845862377387538230483865688976461927383814900140767310446640259899490222221765904339901886018566526485061799702356193897017860040811889729918311021171229845901641921068884387121855646124960798722908519296819372388642614839657382291123125024186649353143970137428531926649875337218940694281434118520158014123344828015051399694290153483077644569099073152433278288269864602789864321139083506217095002597389863554277196742822248757586765752344220207573630569498825087968928162753848863396909959826280956121450994871701244516461260379029309120889086942028510640182154399457156805941872748998094254742173582401063677404595741785160829230135358081840096996372524230560855903700624271243416909004153690105933983835777939410970027753472000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
19 | )
20 |
--------------------------------------------------------------------------------
/tests/test_gen.py:
--------------------------------------------------------------------------------
1 | """This file is just to explore how generators works
2 | """
3 | from collections.abc import Generator
4 |
5 | import pytest
6 |
7 |
8 | def test_generator_with_single_empty_yield():
9 | def fn():
10 | yield
11 |
12 | gen = fn()
13 | value = next(gen)
14 | assert value is None
15 |
16 |
17 | def test_generator_with_single_empty_yield_double_next():
18 | def fn():
19 | yield
20 |
21 | gen = fn()
22 | value = next(gen)
23 | assert value is None
24 | with pytest.raises(StopIteration) as ex:
25 | next(gen)
26 | assert ex.value.value is None
27 |
28 |
29 | def test_generator_with_single_yield_value():
30 | def fn():
31 | yield 42
32 |
33 | gen = fn()
34 | value = next(gen)
35 | assert value == 42
36 |
37 |
38 | def test_generator_with_multiple_yield_value():
39 | def fn():
40 | yield 2
41 | yield 4
42 |
43 | gen = fn()
44 | value = next(gen)
45 | assert value == 2
46 | value = next(gen)
47 | assert value == 4
48 |
49 |
50 | def test_generator_with_single_return_value():
51 | def fn():
52 | return 42
53 | yield
54 |
55 | gen = fn()
56 |
57 | # Return in a generator is just syntactic sugar for raise StopIteration
58 | with pytest.raises(StopIteration) as ex:
59 | next(gen) # type: ignore
60 | assert ex.value.value == 42
61 |
62 |
63 | def test_generator_with_multiple_return_value():
64 | def fn():
65 | return 2
66 | return 4
67 | yield
68 |
69 | gen = fn()
70 |
71 | # Return in a generator is just syntactic sugar for raise StopIteration
72 | with pytest.raises(StopIteration) as ex:
73 | next(gen) # type: ignore
74 | assert ex.value.value == 2
75 |
76 | with pytest.raises(StopIteration) as ex:
77 | next(gen) # type: ignore
78 |
79 | # Cannot get value from second return
80 | assert ex.value.value is None
81 |
82 |
83 | def test_generator_with_yield_assignment_and_yield():
84 | def fn() -> Generator[int, int, None]:
85 | x = yield 42
86 | yield x
87 |
88 | gen = fn()
89 | value = next(gen)
90 | assert value == 42
91 | value = gen.send(10)
92 | assert value == 10
93 |
94 |
95 | def test_generator_with_yield_assignment_and_return():
96 | def fn() -> Generator[int, int, int]:
97 | x = yield 42
98 | return x
99 |
100 | gen = fn()
101 | value = next(gen)
102 | assert value == 42
103 | with pytest.raises(StopIteration) as ex:
104 | gen.send(10) # type: ignore
105 | assert ex.value.value == 10
106 |
107 |
108 | def test_generator_with_yield_from():
109 | def fn() -> Generator[int, None, None]:
110 | yield from [42]
111 |
112 | gen = fn()
113 | value = next(gen)
114 | assert value == 42
115 |
116 |
117 | def test_generator_with_yield_from_subgen():
118 | def gn():
119 | yield 42
120 |
121 | def fn() -> Generator[int, None, None]:
122 | yield from gn()
123 |
124 | gen = fn()
125 | value = next(gen)
126 | assert value == 42
127 |
128 |
129 | def test_generator_with_yield_from_gen_empty():
130 | def gn() -> Generator[int, None, None]:
131 | yield from []
132 |
133 | def fn() -> Generator[int, None, None]:
134 | yield from gn()
135 | yield 42
136 |
137 | gen = fn()
138 | value = next(gen)
139 | assert value == 42
140 |
141 |
142 | def test_generator_with_yield_from_send_to_subgen():
143 | def gn() -> Generator[int, int | None, int]:
144 | ret = yield 42
145 | assert ret is not None
146 | return ret + 1
147 |
148 | def fn() -> Generator[int, int | None, int]:
149 | ret = yield from gn()
150 | print("generator: ret", ret)
151 | return ret + 1
152 |
153 | gen = fn()
154 | value = gen.send(None)
155 | assert value == 42
156 | try:
157 | value = gen.send(10)
158 | except StopIteration as ex:
159 | assert ex.value == 12
160 |
--------------------------------------------------------------------------------
/tests/test_mailbox.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | from collections.abc import Callable
3 |
4 | from hypothesis import given # type: ignore
5 | from hypothesis import strategies as st
6 |
7 | from expression import AsyncReplyChannel, MailboxProcessor
8 |
9 |
10 | @given(st.lists(st.integers())) # type: ignore
11 | def test_mailbox(xs: list[int]) -> None:
12 | result: list[int] = []
13 |
14 | async def runner():
15 | async def process(inbox: MailboxProcessor[int]) -> None:
16 | """The message processing function."""
17 |
18 | async def message_loop() -> None:
19 | msg: int = await inbox.receive()
20 | result.append(msg)
21 |
22 | return await message_loop()
23 |
24 | return await message_loop() # start the loop
25 |
26 | agent = MailboxProcessor.start(process)
27 | for x in xs:
28 | agent.post(x)
29 | await asyncio.sleep(0)
30 |
31 | asyncio.run(runner())
32 |
33 | assert result == xs
34 |
35 |
36 | @given(st.integers()) # type: ignore
37 | def test_mailbox_post_and_async_reply(x: int):
38 | async def runner():
39 | async def process(inbox: MailboxProcessor[tuple[int, AsyncReplyChannel[str]]]) -> None:
40 | """The message processing function."""
41 |
42 | async def message_loop() -> None:
43 | msg, rc = await inbox.receive()
44 | rc.reply(f"Got {msg}")
45 |
46 | return await message_loop()
47 |
48 | # start the loop
49 | return await message_loop()
50 |
51 | agent: MailboxProcessor[tuple[int, AsyncReplyChannel[str]]] = MailboxProcessor.start(process)
52 | build_message: Callable[[AsyncReplyChannel[str]], tuple[int, AsyncReplyChannel[str]]] = lambda r: (x, r)
53 | reply = await agent.post_and_async_reply(build_message)
54 |
55 | assert reply == f"Got {x}"
56 |
57 | asyncio.run(runner())
58 |
--------------------------------------------------------------------------------
/tests/test_map.py:
--------------------------------------------------------------------------------
1 | from collections.abc import Callable, ItemsView, Iterable
2 |
3 | from hypothesis import given # type: ignore
4 | from hypothesis import strategies as st
5 |
6 | from expression import Some, pipe
7 | from expression.collections import Block, Map, map
8 |
9 |
10 | def test_map_empty():
11 | m: Map[str, int] = map.empty
12 | assert map.is_empty(m)
13 | assert len(m) == 0
14 | assert not m
15 |
16 |
17 | def test_map_non_empty():
18 | m: Map[str, int] = map.empty.add("test", 42)
19 | assert not map.is_empty(m)
20 | assert len(m) == 1
21 | assert m
22 |
23 |
24 | @given(st.dictionaries(keys=st.text(), values=st.integers()))
25 | def test_map_create(xs: dict[str, int]):
26 | items: Iterable[tuple[str, int]] = xs.items()
27 | m = map.create(items)
28 | assert len(m) == len(xs)
29 |
30 |
31 | @given(st.dictionaries(keys=st.text(), values=st.integers()))
32 | def test_map_of_seq(xs: dict[str, int]):
33 | items: ItemsView[str, int] = xs.items()
34 | m = map.of_seq(items)
35 | assert len(m) == len(xs)
36 |
37 |
38 | @given(st.dictionaries(keys=st.text(), values=st.integers()))
39 | def test_map_to_list_fluent(xs: dict[str, int]):
40 | items: ItemsView[str, int] = xs.items()
41 | ys = map.of_seq(items).to_list()
42 | assert sorted(xs.items()) == sorted(ys)
43 |
44 |
45 | @given(st.dictionaries(keys=st.text(), values=st.integers()))
46 | def test_map_to_seq(xs: dict[str, int]):
47 | items: list[tuple[str, int]] = list(xs.items())
48 | ys = map.of_list(items)
49 | zs = pipe(ys, map.to_seq)
50 | assert sorted(list(items)) == list(zs)
51 |
52 |
53 | @given(st.dictionaries(keys=st.text(), values=st.integers()))
54 | def test_map_remove_fluent(xs: dict[str, int]):
55 | items: ItemsView[str, int] = xs.items()
56 | m = Map.of_seq(items)
57 |
58 | keys = xs.keys()
59 | count = len(m)
60 | for key in keys:
61 | m = m.remove(key)
62 | count -= 1
63 | assert len(m) == count == 0
64 |
65 |
66 | @given(st.dictionaries(keys=st.text(), values=st.integers()))
67 | def test_map_remove(xs: dict[str, int]):
68 | items: ItemsView[str, int] = xs.items()
69 | m = Map.of_seq(items)
70 |
71 | keys = xs.keys()
72 | count = len(m)
73 | for key in keys:
74 | m = pipe(m, map.remove(key))
75 | count -= 1
76 | assert len(m) == count == 0
77 |
78 |
79 | @given(st.dictionaries(keys=st.text(), values=st.integers()))
80 | def test_map_to_seq_fluent(xs: dict[str, int]):
81 | items: ItemsView[str, int] = xs.items()
82 | ys = map.of_seq(items).to_seq()
83 |
84 | assert sorted(list(items)) == list(ys)
85 |
86 |
87 | @given(st.dictionaries(keys=st.text(), values=st.integers()))
88 | def test_map_to_list(xs: dict[str, int]):
89 | items = Block(xs.items())
90 | ys = map.of_block(items).to_seq()
91 |
92 | assert sorted(list(items)) == list(ys)
93 |
94 |
95 | @given(st.dictionaries(keys=st.text(), values=st.integers()))
96 | def test_map_map(xs: dict[str, int]):
97 | items = Block(xs.items())
98 |
99 | mapper: Callable[[str, int], int] = lambda k, v: v * 20
100 | ys = map.of_block(items).map(mapper)
101 |
102 | expected = [(k, mapper(k, v)) for k, v in sorted(list(items))]
103 | assert expected == list(ys.to_list())
104 |
105 |
106 | def test_map_pipe_fluent():
107 | xs = map.of(a=1, b=2)
108 | mapper: Callable[[str, int], int] = lambda k, v: v * 10
109 | ys = xs.pipe(map.map(mapper))
110 |
111 | assert ys == map.of(a=10, b=20)
112 |
113 |
114 | @given(st.dictionaries(keys=st.text(), values=st.integers()))
115 | def test_map_count(xs: dict[str, int]):
116 | ys: Map[str, int] = map.of(**xs)
117 |
118 | assert len(ys) == len(xs) == map.count(ys)
119 |
120 |
121 | @given(st.dictionaries(keys=st.text(), values=st.integers()))
122 | def test_map_iterate(xs: dict[str, int]):
123 | ys = [k for k in map.of(**xs)]
124 |
125 | assert sorted(ys) == sorted(list(xs.keys()))
126 |
127 |
128 | def test_map_change():
129 | xs = ( # type: ignore
130 | Map.empty()
131 | .change(1, lambda _: Some(1)) # type: ignore
132 | .change(2, lambda _: Some(2)) # type: ignore
133 | .change(3, lambda _: Some(3)) # type: ignore
134 | ) # type: ignore
135 |
136 | assert xs == Map.of_seq([(1, 1), (2, 2), (3, 3)])
137 |
138 |
139 | def test_map_try_get_value():
140 | values: list[int] = []
141 | xs = Map.of(a=1, b=2)
142 | assert xs.try_get_value("a", values) is True
143 | assert xs.try_get_value("b", values) is True
144 | assert xs.try_get_value("c", values) is False
145 | assert values == [1, 2]
146 |
147 |
148 | def test_expression_issue_105():
149 | m = Map[str, int].empty()
150 | m = m.add("1", 1).add("2", 2).add("3", 3).add("4", 4)
151 | m = m.change("2", lambda x: x)
152 | m = m.change("3", lambda x: x)
153 |
--------------------------------------------------------------------------------
/tests/test_parser.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import string
4 | from dataclasses import dataclass
5 | from typing import Any, Literal
6 |
7 | from expression import Option, Result, case, pipe, tag, tagged_union
8 | from expression.collections import Block
9 | from expression.extra.parser import (
10 | Parser,
11 | and_then,
12 | any_of,
13 | choice,
14 | many,
15 | opt,
16 | pchar,
17 | pfloat,
18 | pint,
19 | pstring,
20 | )
21 |
22 |
23 | def test_parse_pchar():
24 | input = "ABC"
25 | parseA: Parser[str] = pchar("A")
26 |
27 | result = parseA(input)
28 |
29 | assert result.is_ok()
30 | match result:
31 | case Result(tag="ok", ok=a):
32 | assert a == "A"
33 | case _:
34 | assert False
35 |
36 |
37 | def test_parse_pchar_fluent():
38 | input = "ABC"
39 | parseA: Parser[str] = Parser.pchar("A")
40 |
41 | result = parseA(input)
42 |
43 | assert result.is_ok()
44 | match result:
45 | case Result(tag="ok", ok=a):
46 | assert a == "A"
47 | case _:
48 | assert False
49 |
50 |
51 | def test_parse_a_then_b():
52 | input = "ABC"
53 | parse_a: Parser[str] = pchar("A")
54 | parse_b: Parser[str] = pchar("B")
55 |
56 | parseAB = pipe(
57 | parse_a,
58 | and_then(parse_b),
59 | )
60 |
61 | result = parseAB(input)
62 | assert result.is_ok()
63 | match result:
64 | case Result(tag="ok", ok=(a, b)):
65 | assert (a, b) == ("A", "B")
66 | case _:
67 | assert False
68 |
69 |
70 | def test_parse_a_then_b_fluent():
71 | input = "ABC"
72 | parseAB = pchar("A").and_then(pchar("B"))
73 |
74 | result = parseAB(input)
75 | assert result.is_ok()
76 | match result:
77 | case Result(tag="ok", ok=(a, b)):
78 | assert (a, b) == ("A", "B")
79 | case _:
80 | assert False
81 |
82 |
83 | def test_pstring():
84 | parse_abc = pstring("ABC")
85 |
86 | ret = parse_abc("ABCDE") # Success ("ABC", "DE")
87 | assert ret.is_ok()
88 | match ret:
89 | case Result(tag="ok", ok=success):
90 | assert success == "ABC"
91 | case _:
92 | assert False
93 |
94 | ret = parse_abc("A|CDE") # Failure "Expecting 'B'. Got '|'"
95 | assert ret.is_error()
96 | match ret:
97 | case Result(tag="error", error=error):
98 | assert error == "Expecting 'B'. Got '|'"
99 | case _:
100 | assert False
101 |
102 | ret = parse_abc("AB|DE") # Failure "Expecting 'C'. Got '|'"
103 | assert ret.is_error()
104 | match ret:
105 | case Result(tag="error", error=error):
106 | assert error == "Expecting 'C'. Got '|'"
107 | case _:
108 | assert False
109 |
110 |
111 | def test_int():
112 | ret = pint("123C")
113 |
114 | match ret:
115 | case Result(tag="ok", ok=success):
116 | assert success == 123
117 | case _:
118 | assert False
119 |
120 |
121 | def test_int_negative():
122 | ret = pint("-123C")
123 |
124 | match ret:
125 | case Result(tag="ok", ok=success):
126 | assert success == -123
127 | case _:
128 | assert False
129 |
130 |
131 | def test_float():
132 | ret = pfloat("123C")
133 |
134 | match ret:
135 | case Result(tag="ok", ok=success):
136 | assert success == 123
137 | case _:
138 | assert False
139 |
140 |
141 | def test_float_with_decimal():
142 | ret = pfloat("123.45C")
143 |
144 | match ret:
145 | case Result(tag="ok", ok=success):
146 | assert success == 123.45
147 | case _:
148 | assert False
149 |
150 |
151 | def test_negative_float_with_decimal():
152 | ret = pfloat("-123.45C")
153 |
154 | match ret:
155 | case Result(tag="ok", ok=success):
156 | assert success == -123.45
157 | case _:
158 | assert False
159 |
160 |
161 | @tagged_union
162 | class ComparisonOperator:
163 | tag: Literal["EQ", "NOT_EQ", "LT", "LT_E", "GT", "GT_E", "IS", "IS_NOT", "IN", "NOT_IN"] = tag()
164 |
165 | EQ: bool = case()
166 | NOT_EQ: bool = case()
167 | LT: bool = case()
168 | LT_E: bool = case()
169 | GT: bool = case()
170 | GT_E: bool = case()
171 | IS: bool = case()
172 | IS_NOT: bool = case()
173 | IN: bool = case()
174 | NOT_IN: None = case()
175 |
176 | @staticmethod
177 | def eq() -> ComparisonOperator:
178 | return ComparisonOperator(EQ=True)
179 |
180 | @staticmethod
181 | def not_eq() -> ComparisonOperator:
182 | return ComparisonOperator(NOT_EQ=True)
183 |
184 |
185 | @dataclass
186 | class Compare:
187 | left: Expression
188 | comparators: Block[Expression]
189 | ops: Block[ComparisonOperator]
190 |
191 |
192 | @tagged_union
193 | class BoolOp:
194 | AND: None = case()
195 | OR: None = case()
196 |
197 | @staticmethod
198 | def and_() -> BoolOp:
199 | return BoolOp(AND=None)
200 |
201 | @staticmethod
202 | def or_() -> BoolOp:
203 | return BoolOp(OR=None)
204 |
205 |
206 | @tagged_union
207 | class Expression:
208 | tag: Literal["NAME", "CONSTANT", "BOOL_OP", "COMPARE"] = tag()
209 |
210 | CONSTANT: bool = case()
211 | NAME: str = case()
212 | BOOL_OP: BoolOp = case()
213 | COMPARE: Compare = case()
214 |
215 | @staticmethod
216 | def name(name: str) -> Expression:
217 | return Expression(NAME=name)
218 |
219 | @staticmethod
220 | def compare(compare: Compare) -> Expression:
221 | return Expression(COMPARE=compare)
222 |
223 | @staticmethod
224 | def constant(value: Any) -> Expression:
225 | return Expression(CONSTANT=value)
226 |
227 |
228 | def pname() -> Parser[Expression]:
229 | first = any_of(string.ascii_letters + "_")
230 | rest = pipe(
231 | any_of(string.ascii_letters + string.digits + "_"),
232 | many,
233 | opt,
234 | )
235 |
236 | def mapper(first: str, rest: Option[Block[str]]) -> str:
237 | match rest:
238 | case Option(tag="some", some=letters):
239 | return first + "".join(letters)
240 | case _:
241 | return first
242 |
243 | return first.and_then(rest).starmap(mapper).map(Expression.name)
244 |
245 |
246 | def pexpr() -> Parser[Expression]:
247 | parsers = [
248 | pname(),
249 | ]
250 | return pipe(
251 | parsers,
252 | Block[Parser[Expression]].of_seq,
253 | choice,
254 | )
255 |
256 |
257 | def test_parse_name_expr():
258 | result = pipe(
259 | "test",
260 | pexpr(),
261 | )
262 |
263 | assert result.is_ok()
264 | match result:
265 | case Result(tag="ok", ok=Expression(NAME=name)):
266 | assert name == "test"
267 |
268 | case _:
269 | assert False
270 |
--------------------------------------------------------------------------------
/tests/test_pipe.py:
--------------------------------------------------------------------------------
1 | from collections.abc import Callable
2 | from typing import TypeVar
3 |
4 | from hypothesis import given
5 | from hypothesis import strategies as st
6 |
7 | from expression import pipe, pipe2
8 | from expression.core.pipe import starpipe, starid
9 |
10 | _A = TypeVar("_A")
11 | _B = TypeVar("_B")
12 |
13 |
14 | @given(st.integers())
15 | def test_pipe_id(x: int):
16 | value = pipe(x)
17 | assert value == x
18 |
19 |
20 | @given(st.integers())
21 | def test_pipe_fn(x: int):
22 | value = pipe(x, lambda x: x + 1)
23 | assert value == x + 1
24 |
25 |
26 | @given(st.integers(), st.integers(), st.integers())
27 | def test_pipe_fn_gn(x: int, y: int, z: int):
28 | gn: Callable[[int], int] = lambda g: g * y
29 | fn: Callable[[int], int] = lambda x: x + z
30 | value = pipe(
31 | x,
32 | fn,
33 | gn
34 | )
35 |
36 | assert value == gn(fn(x))
37 |
38 |
39 | @given(st.integers(), st.integers())
40 | def test_pipe2_id(x: int, y: int):
41 | value = pipe2((x, y))
42 | assert value == (x, y)
43 |
44 |
45 | @given(st.integers(), st.integers())
46 | def test_pipe2_fn(x: int, y: int):
47 | value = pipe2((x, y), lambda x: lambda y: x + y)
48 | assert value == x + y
49 |
50 |
51 | @given(st.integers(), st.integers())
52 | def test_pipe2_fn_gn(x: int, y: int):
53 | gn: Callable[[int], int] = lambda g: g * y
54 | fn: Callable[[int], Callable[[int], int]] = lambda x: lambda y: x + y
55 | value = pipe2((x, y), fn, gn)
56 |
57 | assert value == gn(fn(x)(y))
58 |
59 |
60 | def test_starid_simple():
61 | assert starid(1) == (1,)
62 | assert starid(1, 2) == (1, 2)
63 | assert starid(1, 2, 3) == (1, 2, 3)
64 | assert starid(1, 2, 3, 4) == (1, 2, 3, 4)
65 |
66 |
67 | def fn(a: _A, b: _B) -> tuple[_A, _B]:
68 | return a, b
69 |
70 |
71 | def gn(a: _A, b: _B) -> tuple[_B, _A]:
72 | return b, a
73 |
74 |
75 | def yn(a: _A, b: _B) -> tuple[_A, _B, int]:
76 | return a, b, 3
77 |
78 |
79 | def test_starpipe_simple():
80 | assert starpipe((1, 2), fn) == fn(1, 2)
81 |
82 |
83 | def test_starpipe_id():
84 | assert starpipe((1, 2), starid) == (1, 2)
85 |
86 |
87 | def test_starpipe_fn_gn():
88 | assert starpipe((1, 2), fn, gn) == gn(*fn(1, 2))
89 |
90 |
91 | def test_starpipe_fn_gn_yn():
92 | assert starpipe((1, 2), fn, gn, yn) == yn(*gn(*fn(1, 2)))
93 |
--------------------------------------------------------------------------------
/tests/test_try.py:
--------------------------------------------------------------------------------
1 | from expression import Failure, Success, Try
2 |
3 |
4 | def test_can_create_success():
5 | Success(10)
6 |
7 |
8 | def test_can_create_failure():
9 | Failure(ValueError())
10 |
11 |
12 | def test_try_success():
13 | xs: Try[int] = Success(10)
14 |
15 | match xs:
16 | case Try(tag="ok", ok=x):
17 | assert x == 10
18 | case _:
19 | assert False
20 |
21 |
22 | def test_try_match_failure():
23 | error = Exception("err")
24 | xs: Try[int] = Failure(error)
25 |
26 | match xs:
27 | case Try(tag="error", error=err):
28 | assert err == error
29 | case _:
30 | assert False
31 |
32 |
33 | def test_try_to_string_success():
34 | xs: Try[int] = Success(10)
35 | assert str(xs) == "Success 10"
36 |
37 |
38 | def test_try_to_string_failure():
39 | error = Exception("err")
40 | xs: Try[int] = Failure(error)
41 | assert str(xs) == "Failure err"
42 |
--------------------------------------------------------------------------------
/tests/test_typing.py:
--------------------------------------------------------------------------------
1 | from typing import Any, cast
2 |
3 | import pytest
4 |
5 | from expression import downcast, try_downcast, upcast
6 |
7 |
8 | class Base:
9 | pass
10 |
11 |
12 | class Derived(Base):
13 | pass
14 |
15 |
16 | class Other:
17 | pass
18 |
19 |
20 | def test_upcast():
21 | # Arrange
22 | d = Derived()
23 |
24 | # Act
25 | b = upcast(Base, d)
26 |
27 | # Assert
28 | assert isinstance(b, Base)
29 | assert isinstance(b, Derived)
30 |
31 |
32 | def test_upcast_negative():
33 | # Arrange
34 | d = Derived()
35 |
36 | # Act / Assert
37 | x = upcast(Other, d)
38 |
39 | with pytest.raises(AssertionError):
40 | assert isinstance(x, Other)
41 |
42 |
43 | def test_downcast():
44 | # Arrange
45 | b = cast(Base, Derived())
46 |
47 | # Act
48 | d = downcast(Derived, b)
49 |
50 | # Assert
51 | assert isinstance(d, Base)
52 | assert isinstance(d, Derived)
53 |
54 |
55 | def test_downcast_negative():
56 | # Arrange
57 | b = cast(Base, Derived())
58 |
59 | # Act / Assert
60 | try:
61 | downcast(Other, b)
62 | except AssertionError:
63 | pass
64 |
65 |
66 | def test_try_downcast():
67 | # Arrange
68 | b = cast(Base, Derived())
69 |
70 | # Act
71 | c = try_downcast(Derived, b)
72 |
73 | # Assert
74 | assert isinstance(c, Derived)
75 |
76 |
77 | def test_try_downcast_negative():
78 | # Arrange
79 | b = cast(Base, Derived())
80 |
81 | # Act
82 | c = try_downcast(Other, b)
83 |
84 | # Assert
85 | assert c is None
86 |
87 |
88 | def test_try_cast_generic():
89 | # Arrange
90 | d: list[Derived] = [Derived()]
91 |
92 | # Act
93 | b = try_downcast(list[Any], d)
94 |
95 | # Assert
96 | assert isinstance(b, list)
97 |
98 |
99 | def test_try_cast_generic_negative():
100 | # Arrange
101 | d: list[Derived] = [Derived()]
102 |
103 | # Act
104 | b = try_downcast(str, d)
105 |
106 | # Assert
107 | assert b is None
108 |
--------------------------------------------------------------------------------
/tests/utils.py:
--------------------------------------------------------------------------------
1 | class CustomException(Exception):
2 | def __init__(self, message: str):
3 | self.message = message
4 |
5 |
6 | def throw(err: Exception):
7 | raise err
8 |
--------------------------------------------------------------------------------