├── .github └── workflows │ ├── release.yml │ └── test.yml ├── .gitignore ├── .pre-commit-config.yaml ├── LICENSE ├── README.md ├── pyproject.toml ├── src └── isolate │ ├── __init__.py │ ├── _version.py │ ├── backends │ ├── __init__.py │ ├── _base.py │ ├── common.py │ ├── conda.py │ ├── container.py │ ├── local.py │ ├── pyenv.py │ ├── remote.py │ ├── settings.py │ └── virtualenv.py │ ├── common │ ├── __init__.py │ └── timestamp.py │ ├── connections │ ├── __init__.py │ ├── _local │ │ ├── __init__.py │ │ ├── _base.py │ │ └── agent_startup.py │ ├── common.py │ ├── grpc │ │ ├── __init__.py │ │ ├── _base.py │ │ ├── agent.py │ │ ├── configuration.py │ │ ├── definitions │ │ │ ├── __init__.py │ │ │ ├── agent.proto │ │ │ ├── agent_pb2.py │ │ │ ├── agent_pb2.pyi │ │ │ ├── agent_pb2_grpc.py │ │ │ ├── common.proto │ │ │ ├── common_pb2.py │ │ │ ├── common_pb2.pyi │ │ │ └── common_pb2_grpc.py │ │ └── interface.py │ └── ipc │ │ ├── __init__.py │ │ ├── _base.py │ │ └── agent.py │ ├── logger.py │ ├── logs.py │ ├── py.typed │ ├── registry.py │ └── server │ ├── __init__.py │ ├── definitions │ ├── __init__.py │ ├── server.proto │ ├── server_pb2.py │ ├── server_pb2.pyi │ └── server_pb2_grpc.py │ ├── health │ ├── __init__.py │ ├── health.proto │ ├── health_pb2.py │ ├── health_pb2.pyi │ └── health_pb2_grpc.py │ ├── health_server.py │ ├── interface.py │ └── server.py ├── tests ├── __init__.py ├── conftest.py ├── test_backends.py ├── test_concurrency.py ├── test_connections.py ├── test_isolate.py ├── test_log.py ├── test_logger.py ├── test_serialization.py └── test_server.py └── tools ├── Dockerfile ├── agent_requirements.txt ├── protobuf-requirements.txt ├── regen_grpc.py ├── requirements.txt └── test_agent_requirements.txt /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: PyPI release 2 | 3 | on: 4 | release: 5 | types: 6 | - published 7 | 8 | jobs: 9 | build: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v3 13 | with: 14 | fetch-depth: 0 15 | 16 | - uses: actions/setup-python@v2 17 | with: 18 | python-version: "3.9" 19 | 20 | - name: Install deps 21 | run: python -m pip install --upgrade pip setuptools_scm build 22 | 23 | - name: Build 24 | run: python -m build 25 | 26 | - name: Upload dist 27 | uses: actions/upload-artifact@v4 28 | with: 29 | name: dist 30 | path: dist 31 | 32 | pypi-publish: 33 | name: Upload release to PyPI 34 | runs-on: ubuntu-latest 35 | needs: build 36 | environment: 37 | name: pypi 38 | url: https://pypi.org/p/isolate 39 | permissions: 40 | id-token: write 41 | steps: 42 | - name: Download dist 43 | uses: actions/download-artifact@v4 44 | with: 45 | name: dist 46 | path: dist 47 | 48 | - name: Publish package distributions to PyPI 49 | uses: pypa/gh-action-pypi-publish@release/v1 50 | 51 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | branches: 9 | schedule: 10 | - cron: '30 5 * * *' # every day at 5:30 UTC 11 | 12 | concurrency: 13 | group: ${{ github.workflow }}-${{ github.ref }} 14 | cancel-in-progress: true 15 | 16 | jobs: 17 | test: 18 | runs-on: ${{ matrix.os }} 19 | strategy: 20 | fail-fast: false 21 | 22 | matrix: 23 | os: [ubuntu-latest] 24 | python: ["3.8", "3.9", "3.10", "3.11"] 25 | include: 26 | - os: macos-latest 27 | python: "3.8" 28 | - os: macos-latest 29 | python: "3.11" 30 | steps: 31 | - uses: actions/checkout@v3 32 | 33 | - uses: actions/setup-python@v3 34 | with: 35 | python-version: ${{ matrix.python }} 36 | 37 | - name: Install mamba 38 | uses: mamba-org/setup-micromamba@v1 39 | with: 40 | environment-name: ci 41 | create-args: >- 42 | python=${{ matrix.python }} 43 | condarc: | 44 | channels: 45 | - anaconda 46 | - conda-forge 47 | - pytorch 48 | 49 | - uses: actions/checkout@v3 50 | with: 51 | repository: pyenv/pyenv 52 | ref: v2.3.6 53 | path: pyenv 54 | 55 | - name: Install dependencies 56 | run: | 57 | python -m pip install -e ".[test]" 58 | 59 | - name: Install uv 60 | run: | 61 | python -m pip install uv 62 | 63 | - uses: pre-commit/action@v3.0.1 64 | 65 | - name: Test 66 | run: | 67 | export ISOLATE_PYENV_EXECUTABLE=pyenv/bin/pyenv 68 | python -m pytest -vvv 69 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | 53 | # Translations 54 | *.mo 55 | *.pot 56 | 57 | # Django stuff: 58 | *.log 59 | local_settings.py 60 | db.sqlite3 61 | db.sqlite3-journal 62 | 63 | # Flask stuff: 64 | instance/ 65 | .webassets-cache 66 | 67 | # Scrapy stuff: 68 | .scrapy 69 | 70 | # Sphinx documentation 71 | docs/_build/ 72 | 73 | # PyBuilder 74 | target/ 75 | 76 | # Jupyter Notebook 77 | .ipynb_checkpoints 78 | 79 | # IPython 80 | profile_default/ 81 | ipython_config.py 82 | 83 | # pyenv 84 | .python-version 85 | 86 | # anti lolenv 87 | Pipfile* 88 | 89 | # celery beat schedule file 90 | celerybeat-schedule 91 | 92 | # SageMath parsed files 93 | *.sage.py 94 | 95 | # Environments 96 | .env 97 | .venv 98 | env/ 99 | venv/ 100 | ENV/ 101 | env.bak/ 102 | venv.bak/ 103 | 104 | # Spyder project settings 105 | .spyderproject 106 | .spyproject 107 | 108 | # Rope project settings 109 | .ropeproject 110 | 111 | # mkdocs documentation 112 | /site 113 | 114 | # mypy 115 | .mypy_cache/ 116 | .dmypy.json 117 | dmypy.json 118 | 119 | # Pyre type checker 120 | .pyre/ 121 | 122 | # vim 123 | *.swp 124 | 125 | /src/isolate/_isolate_version.py 126 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/astral-sh/ruff-pre-commit 3 | rev: 'v0.3.4' 4 | hooks: 5 | - id: ruff 6 | args: 7 | - --fix 8 | - --exit-non-zero-on-fix 9 | - --exclude=UP007 10 | exclude: ".*(_pb2.py|_pb2.pyi|_pb2_grpc.py)$" 11 | - id: ruff-format 12 | exclude: ".*(_pb2.py|_pb2.pyi|_pb2_grpc.py)$" 13 | - repo: https://github.com/asottile/setup-cfg-fmt 14 | rev: v1.20.1 15 | hooks: 16 | - id: setup-cfg-fmt 17 | - repo: https://github.com/pre-commit/mirrors-mypy 18 | rev: v1.2.0 19 | hooks: 20 | - id: mypy 21 | args: 22 | - --disallow-incomplete-defs 23 | - --ignore-missing-imports 24 | - --no-warn-no-return 25 | exclude: ".*(_pb2.py|_pb2.pyi|_pb2_grpc.py)$" 26 | additional_dependencies: [types-protobuf] 27 | - repo: https://github.com/codespell-project/codespell 28 | rev: v2.1.0 29 | hooks: 30 | - id: codespell 31 | exclude: ".*(_pb2.py|_pb2.pyi|_pb2_grpc.py)$" 32 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2024 fal - Features & Labels, Inc. 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Isolate 2 | 3 | Run any Python function, with any dependencies, in any machine you want. Isolate offers a 4 | pluggable end-to-end solution for building, managing, and using isolated environments (virtualenv, 5 | conda, remote, and more). 6 | 7 | 8 | ## Motivation 9 | 10 | ![XKCD 1987](https://imgs.xkcd.com/comics/python_environment.png) 11 | 12 | The fact that nearly every piece of software uses some other libraries or some 13 | other programs is undeniable. Each of these come with their set of dependencies, 14 | and this chain moves forward. Once there are enough 'nodes' in the chain, then 15 | the ["dependency mess"](https://en.wikipedia.org/wiki/Dependency_hell) starts 16 | to surface and our lives become much harder. 17 | 18 | Python tried to solve it by recommending the "virtual environment" concept. In 19 | theory it was designed to isolate environments of different projects, so my project 20 | A can depend on `pandas==1.0.0` while B depends on `pandas==2.0.0` and whichever 21 | project I choose to work with, I just activate its own environment. 22 | 23 | Overall this was a very nice solution that did work, and still continues to work 24 | for this use case. But as with every other scoped fix, in time other problems started 25 | to appear that demand a much narrower scope (like defining module-level dependencies, 26 | or even function-level ones for cloud runtimes that allow seamless integration with the 27 | rest of your code running in a different machine). 28 | 29 | However, unlike "virtual environment" concept, each of the projects that tried to tackle 30 | this problem lacked a universal interface which one can simply define a set of requirements 31 | (this might be dependencies, size of the machine that is needed to run it, or something completely 32 | different) and can change it without any loss. Isolate is working towards a future where this 33 | transititon is as seamless as the transition from your local environment to the remote 34 | environment. 35 | 36 | ## Contributing 37 | 38 | ### Installing in editable mode with dev dependencies 39 | 40 | ``` 41 | pip install -e '.[dev]' 42 | ``` 43 | 44 | ### Running tests 45 | 46 | ``` 47 | pytest 48 | ``` 49 | 50 | ### Pre-commit 51 | 52 | ``` 53 | pre-commit install 54 | ``` 55 | 56 | ### Commit format 57 | 58 | Please follow [conventional commits specification](https://www.conventionalcommits.org/) for descriptions/messages. 59 | 60 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools", "wheel", "setuptools_scm[toml]>=7"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [tool.setuptools_scm] 6 | version_file = "src/isolate/_isolate_version.py" 7 | 8 | [tool.setuptools.packages.find] 9 | where = ["src"] 10 | include = ["isolate"] 11 | namespaces = false 12 | 13 | [project] 14 | name = "isolate" 15 | dynamic = ["version"] 16 | readme = "README.md" 17 | description = "Managed isolated environments for Python" 18 | authors = [{ name = "Features & Labels", email = "hello@fal.ai"}] 19 | requires-python = ">=3.8" 20 | dependencies = [ 21 | # NOTE: make sure you re-generate python bindings (see tools/regen_grpc.py) 22 | # when updating grpcio version. 23 | "grpcio==1.64.0", 24 | "protobuf", 25 | # These are non-intrusive packages with no transitive dependencies. 26 | # They are also used in the agents themselves. 27 | "tblib>=1.7.0", 28 | "platformdirs", 29 | # For 3.10 and later, importlib-metadata's newer versions are included in the 30 | # standard library. 31 | 'importlib-metadata>=4.4; python_version < "3.10"', 32 | ] 33 | 34 | [project.urls] 35 | Issues = "https://github.com/fal-ai/isolate/issues" 36 | Source = "https://github.com/fal-ai/isolate" 37 | 38 | [project.optional-dependencies] 39 | build = [ 40 | # The following are build-related dependencies, and their usage sites are heavily 41 | # guarded with optional_import() calls. 42 | "virtualenv>=20.4", 43 | "PyYAML>=6.0", 44 | ] 45 | test = [ 46 | "isolate[build]", 47 | "pytest", 48 | "cloudpickle>=2.2.0", 49 | "dill>=0.3.5.1", 50 | "flaky", 51 | ] 52 | dev = [ 53 | "isolate[test]", 54 | "grpcio-tools==1.64.0", 55 | ] 56 | 57 | 58 | [project.entry-points."isolate.backends"] 59 | "virtualenv" = "isolate.backends.virtualenv:VirtualPythonEnvironment" 60 | "conda" = "isolate.backends.conda:CondaEnvironment" 61 | "local" = "isolate.backends.local:LocalPythonEnvironment" 62 | "container" = "isolate.backends.container:ContainerizedPythonEnvironment" 63 | "isolate-server" = "isolate.backends.remote:IsolateServer" 64 | "pyenv" = "isolate.backends.pyenv:PyenvEnvironment" 65 | 66 | [tool.ruff] 67 | target-version = "py38" 68 | exclude = ["*_pb2.py", "*_pb2.pyi", "*_pb2_grpc.py"] 69 | 70 | [tool.ruff.lint.pyupgrade] 71 | # Preserve types, even if a file imports `from __future__ import annotations`. 72 | keep-runtime-typing = true 73 | 74 | [tool.ruff.lint] 75 | select = ["E", "F", "W", "PLC", "PLE", "PLW", "I", "UP"] 76 | -------------------------------------------------------------------------------- /src/isolate/__init__.py: -------------------------------------------------------------------------------- 1 | from isolate.registry import prepare_environment # noqa: F401 2 | 3 | from ._version import __version__, version_tuple # noqa: F401 4 | -------------------------------------------------------------------------------- /src/isolate/_version.py: -------------------------------------------------------------------------------- 1 | try: 2 | from ._isolate_version import version as __version__ # type: ignore[import] 3 | from ._isolate_version import version_tuple # type: ignore[import] 4 | except ImportError: 5 | __version__ = "UNKNOWN" 6 | version_tuple = (0, 0, __version__) # type: ignore[assignment] 7 | -------------------------------------------------------------------------------- /src/isolate/backends/__init__.py: -------------------------------------------------------------------------------- 1 | from isolate.backends._base import * # noqa: F403 2 | from isolate.backends.settings import IsolateSettings # noqa: F401 3 | -------------------------------------------------------------------------------- /src/isolate/backends/_base.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from contextlib import contextmanager 4 | from dataclasses import dataclass 5 | from typing import ( 6 | Any, 7 | Callable, 8 | ClassVar, 9 | Generic, 10 | Iterator, 11 | TypeVar, 12 | ) 13 | 14 | from isolate.backends.settings import DEFAULT_SETTINGS, IsolateSettings 15 | from isolate.logs import Log, LogLevel, LogSource 16 | 17 | __all__ = [ 18 | "BasicCallable", 19 | "CallResultType", 20 | "EnvironmentConnection", 21 | "BaseEnvironment", 22 | "EnvironmentCreationError", 23 | ] 24 | 25 | ConnectionKeyType = TypeVar("ConnectionKeyType") 26 | CallResultType = TypeVar("CallResultType") 27 | BasicCallable = Callable[[], CallResultType] 28 | 29 | 30 | class EnvironmentCreationError(Exception): 31 | """Raised when the environment cannot be created.""" 32 | 33 | 34 | class BaseEnvironment(Generic[ConnectionKeyType]): 35 | """Represents a managed environment definition for an isolatation backend 36 | that can be used to run Python code with different set of dependencies.""" 37 | 38 | BACKEND_NAME: ClassVar[str | None] = None 39 | 40 | settings: IsolateSettings = DEFAULT_SETTINGS 41 | 42 | @classmethod 43 | def from_config( 44 | cls, 45 | config: dict[str, Any], 46 | settings: IsolateSettings = DEFAULT_SETTINGS, 47 | ) -> BaseEnvironment: 48 | """Create a new environment from the given configuration.""" 49 | raise NotImplementedError 50 | 51 | @property 52 | def key(self) -> str: 53 | """A unique identifier for this environment (combination of requirements, 54 | python version and other relevant information) that can be used for caching 55 | and identification purposes.""" 56 | raise NotImplementedError 57 | 58 | def create(self, *, force: bool = False) -> ConnectionKeyType: 59 | """Setup the given environment, and return all the information needed 60 | for establishing a connection to it. If `force` flag is set, then even 61 | if the environment is cached; it will be tried to be re-built.""" 62 | raise NotImplementedError 63 | 64 | def destroy(self, connection_key: ConnectionKeyType) -> None: 65 | """Dismantle this environment. Might raise an exception if the environment 66 | does not exist.""" 67 | raise NotImplementedError 68 | 69 | def exists(self) -> bool: 70 | """Return True if the environment already exists.""" 71 | raise NotImplementedError 72 | 73 | def open_connection( 74 | self, connection_key: ConnectionKeyType 75 | ) -> EnvironmentConnection: 76 | """Return a new connection to the environment with using the 77 | `connection_key`.""" 78 | raise NotImplementedError 79 | 80 | @contextmanager 81 | def connect(self) -> Iterator[EnvironmentConnection]: 82 | """Create the given environment (if it already doesn't exist) and establish a 83 | connection to it.""" 84 | connection_key = self.create() 85 | with self.open_connection(connection_key) as connection: 86 | yield connection 87 | 88 | def apply_settings(self, settings: IsolateSettings) -> None: 89 | """Apply the new settings to this environment.""" 90 | self.settings = settings 91 | 92 | def log( 93 | self, 94 | message: str, 95 | *, 96 | level: LogLevel = LogLevel.DEBUG, 97 | source: LogSource = LogSource.BUILDER, 98 | ) -> None: 99 | """Log a message.""" 100 | log_msg = Log(message, level=level, source=source, bound_env=self) 101 | self.settings.log(log_msg) 102 | 103 | 104 | @dataclass 105 | class EnvironmentConnection: 106 | environment: BaseEnvironment 107 | 108 | def __enter__(self) -> EnvironmentConnection: 109 | return self 110 | 111 | def __exit__(self, *exc_info): 112 | return None 113 | 114 | def run( 115 | self, 116 | executable: BasicCallable, 117 | *args: Any, 118 | **kwargs: Any, 119 | ) -> CallResultType: # type: ignore[type-var] 120 | """Run the given executable inside the environment, and return the result. 121 | If the executable raises an exception, then it will be raised directly.""" 122 | raise NotImplementedError 123 | 124 | def log( 125 | self, 126 | message: str, 127 | *, 128 | level: LogLevel = LogLevel.TRACE, 129 | source: LogSource = LogSource.BRIDGE, 130 | ) -> None: 131 | """Log a message through the bound environment.""" 132 | self.environment.log(message, level=level, source=source) 133 | -------------------------------------------------------------------------------- /src/isolate/backends/common.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import errno 4 | import hashlib 5 | import os 6 | import select 7 | import shutil 8 | import sysconfig 9 | import threading 10 | import time 11 | from contextlib import contextmanager, suppress 12 | from functools import lru_cache 13 | from pathlib import Path 14 | from types import ModuleType 15 | from typing import Callable, Iterator 16 | 17 | # For ensuring that the lock is created and not forgotten 18 | # (e.g. the process which acquires it crashes, so it is never 19 | # released), we are going to check the lock file's mtime every 20 | # _REVOKE_LOCK_DELAY seconds. If the mtime is older than that 21 | # value, we are going to assume the lock is stale and revoke it. 22 | _REVOKE_LOCK_DELAY = 30 23 | 24 | 25 | @contextmanager 26 | def lock_build_path(path: Path, lock_dir: Path) -> Iterator[None]: 27 | """Try to acquire a lock for all operations on the given 'path'. This guarantees 28 | that the path will not be modified by any other process while the lock is held.""" 29 | lock_file = (lock_dir / path.name).with_suffix(".lock") 30 | while not _try_acquire(lock_file): 31 | time.sleep(0.05) 32 | continue 33 | 34 | with _keep_lock_alive(lock_file): 35 | yield 36 | 37 | 38 | @contextmanager 39 | def _keep_lock_alive(lock_file: Path) -> Iterator[None]: 40 | """Keep the lock file alive by updating its mtime as long 41 | as we are doing something in the cache.""" 42 | event = threading.Event() 43 | 44 | def _keep_alive(per_beat_delay: float = 1) -> None: 45 | while not event.wait(per_beat_delay): 46 | lock_file.touch() 47 | lock_file.unlink() 48 | 49 | thread = threading.Thread(target=_keep_alive) 50 | try: 51 | thread.start() 52 | yield 53 | finally: 54 | event.set() 55 | thread.join() 56 | 57 | 58 | def _try_acquire(lock_file: Path) -> bool: 59 | with suppress(FileNotFoundError): 60 | mtime = lock_file.stat().st_mtime 61 | if time.time() - mtime > _REVOKE_LOCK_DELAY: 62 | # The lock file exists, but it may be stale. Check the 63 | # mtime and if it is too old, revoke it. 64 | lock_file.unlink() 65 | 66 | try: 67 | lock_file.touch(exist_ok=False) 68 | except FileExistsError: 69 | return False 70 | else: 71 | return True 72 | 73 | 74 | def get_executable_path(search_path: Path, executable_name: str) -> Path: 75 | """Return the path for the executable named 'executable_name' under 76 | the '/bin' directory of 'search_path'.""" 77 | 78 | bin_dir = (search_path / "bin").as_posix() 79 | executable_path = shutil.which(executable_name, path=bin_dir) 80 | if executable_path is None: 81 | raise FileNotFoundError( 82 | f"Could not find '{executable_name}' in '{search_path}'. " 83 | f"Is the virtual environment corrupted?" 84 | ) 85 | 86 | return Path(executable_path) 87 | 88 | 89 | _CHECK_FOR_TERMINATION_DELAY = 0.05 90 | HookT = Callable[[str], None] 91 | 92 | 93 | def _io_observer( 94 | hooks: dict[int, HookT], 95 | termination_event: threading.Event, 96 | ) -> threading.Thread: 97 | """Starts a new thread that reads from the specified file descriptors 98 | and calls the bound hook function for each line until the EOF is reached 99 | or the termination event is set. 100 | 101 | Caller is responsible for joining the thread. 102 | """ 103 | 104 | followed_fds = list(hooks.keys()) 105 | for fd in followed_fds: 106 | if os.get_blocking(fd): 107 | raise NotImplementedError( 108 | "All the hooked file descriptors must be non-blocking." 109 | ) 110 | 111 | def forward_lines(fd: int) -> None: 112 | hook = hooks[fd] 113 | with open(fd, closefd=False, errors="backslashreplace") as stream: 114 | # TODO: we probably should pass the real line endings 115 | raw_data = stream.read() 116 | if not raw_data: 117 | return # Nothing to read 118 | 119 | for line in raw_data.splitlines(): 120 | # TODO: parse the lines to include `extra={...}` added by the logger? 121 | hook(line) 122 | 123 | def _reader(): 124 | while not termination_event.is_set(): 125 | # The observed file descriptors may be closed by the 126 | # underlying process at any given time. So before we 127 | # make a select call, we need to check if the file 128 | # descriptors are still valid and remove the ones 129 | # that are not. 130 | for fd in followed_fds.copy(): 131 | try: 132 | os.fstat(fd) 133 | except OSError as exc: 134 | if exc.errno == errno.EBADF: 135 | followed_fds.remove(fd) 136 | 137 | if not followed_fds: 138 | # All the file descriptors are closed, so we can 139 | # stop the thread. 140 | return 141 | 142 | ready, _, _ = select.select( 143 | # rlist= 144 | followed_fds, 145 | # wlist= 146 | [], 147 | # xlist= 148 | [], 149 | # timeout= 150 | _CHECK_FOR_TERMINATION_DELAY, 151 | ) 152 | for fd in ready: 153 | forward_lines(fd) 154 | 155 | observer_thread = threading.Thread(target=_reader) 156 | observer_thread.start() 157 | return observer_thread 158 | 159 | 160 | def _unblocked_pipe() -> tuple[int, int]: 161 | """Create a pair of unblocked pipes. This is actually 162 | the same as os.pipe2(os.O_NONBLOCK), but that is not 163 | available in MacOS so we have to do it manually.""" 164 | 165 | read_fd, write_fd = os.pipe() 166 | os.set_blocking(read_fd, False) 167 | os.set_blocking(write_fd, False) 168 | return read_fd, write_fd 169 | 170 | 171 | @contextmanager 172 | def logged_io( 173 | stdout_hook: HookT, 174 | stderr_hook: HookT | None = None, 175 | log_hook: HookT | None = None, 176 | ) -> Iterator[tuple[int, int, int]]: 177 | """Open two new streams (for stdout and stderr, respectively) and start relaying all 178 | the output from them to the given hooks.""" 179 | 180 | stdout_reader_fd, stdout_writer_fd = _unblocked_pipe() 181 | stderr_reader_fd, stderr_writer_fd = _unblocked_pipe() 182 | log_reader_fd, log_writer_fd = _unblocked_pipe() 183 | 184 | termination_event = threading.Event() 185 | io_observer = _io_observer( 186 | hooks={ 187 | stdout_reader_fd: stdout_hook, 188 | stderr_reader_fd: stderr_hook or stdout_hook, 189 | log_reader_fd: log_hook or stdout_hook, 190 | }, 191 | termination_event=termination_event, 192 | ) 193 | try: 194 | yield stdout_writer_fd, stderr_writer_fd, log_writer_fd 195 | finally: 196 | termination_event.set() 197 | try: 198 | # The observer thread checks the termination event in every 199 | # _CHECK_FOR_TERMINATION_DELAY seconds. We need to wait at least 200 | # more than that to make sure that it has a chance to terminate 201 | # properly. 202 | io_observer.join(timeout=_CHECK_FOR_TERMINATION_DELAY * 3) 203 | except TimeoutError: 204 | raise RuntimeError("Log observers did not terminate in time.") 205 | 206 | 207 | @lru_cache(maxsize=None) 208 | def sha256_digest_of(*unique_fields: str | bytes) -> str: 209 | """Return the SHA256 digest that corresponds to the combined version 210 | of 'unique_fields. The order is preserved.""" 211 | 212 | def _normalize(text: str | bytes) -> bytes: 213 | if isinstance(text, str): 214 | return text.encode() 215 | else: 216 | return text 217 | 218 | join_char = b"\n" 219 | inner_text = join_char.join(map(_normalize, unique_fields)) 220 | return hashlib.sha256(inner_text).hexdigest() 221 | 222 | 223 | def active_python() -> str: 224 | """Return the active Python version that can be used for caching 225 | and re-creating this environment. Currently only covers major and 226 | minor versions (like 3.9); patch versions are ignored (like 3.9.4).""" 227 | return sysconfig.get_python_version() 228 | 229 | 230 | def optional_import(module_name: str) -> ModuleType: 231 | """Try to import the given module, and fail if it is not available 232 | with an informative error message that includes the installations 233 | instructions.""" 234 | 235 | import importlib 236 | 237 | try: 238 | return importlib.import_module(module_name) 239 | except ImportError as exc: 240 | raise ImportError( 241 | "isolate must be installed with the 'build' extras for " 242 | f"accessing {module_name!r} import functionality. Please try: " 243 | f"'$ pip install \"isolate[build]\"' to install it." 244 | ) from exc 245 | 246 | 247 | @lru_cache(4) 248 | def get_executable(command: str, home: str | None = None) -> Path: 249 | for path in [home, None]: 250 | binary_path = shutil.which(command, path=path) 251 | if binary_path is not None: 252 | return Path(binary_path) 253 | # TODO: we should probably show some instructions on how you 254 | # can install conda here. 255 | raise FileNotFoundError( 256 | f"Could not find the {command} executable. " 257 | f"If the {command} executable is not available by default, please point " 258 | f"isolate to the path where the {command} binary is available '{home}'." 259 | ) 260 | -------------------------------------------------------------------------------- /src/isolate/backends/conda.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import copy 4 | import os 5 | import subprocess 6 | import tempfile 7 | from dataclasses import dataclass, field 8 | from functools import partial 9 | from pathlib import Path 10 | from typing import Any, ClassVar 11 | 12 | from isolate.backends import BaseEnvironment, EnvironmentCreationError 13 | from isolate.backends.common import ( 14 | active_python, 15 | get_executable, 16 | logged_io, 17 | optional_import, 18 | sha256_digest_of, 19 | ) 20 | from isolate.backends.settings import DEFAULT_SETTINGS, IsolateSettings 21 | from isolate.connections import PythonIPC 22 | from isolate.logs import LogLevel 23 | 24 | # Specify paths where conda and mamba binaries might reside 25 | _CONDA_COMMAND = os.environ.get("CONDA_EXE", "conda") 26 | _MAMBA_COMMAND = os.environ.get("MAMBA_EXE", "micromamba") 27 | _ISOLATE_CONDA_HOME = os.getenv("ISOLATE_CONDA_HOME") 28 | _ISOLATE_MAMBA_HOME = os.getenv("ISOLATE_MAMBA_HOME") 29 | _ISOLATE_DEFAULT_RESOLVER = os.getenv("ISOLATE_DEFAULT_RESOLVER", "mamba") 30 | 31 | # Conda accepts the following version specifiers: =, ==, >=, <=, >, <, != 32 | _POSSIBLE_CONDA_VERSION_IDENTIFIERS = ( 33 | "=", 34 | "<", 35 | ">", 36 | "!", 37 | ) 38 | 39 | 40 | @dataclass 41 | class CondaEnvironment(BaseEnvironment[Path]): 42 | BACKEND_NAME: ClassVar[str] = "conda" 43 | 44 | environment_definition: dict[str, Any] = field(default_factory=dict) 45 | python_version: str | None = None 46 | tags: list[str] = field(default_factory=list) 47 | _exec_home: str | None = _ISOLATE_MAMBA_HOME 48 | _exec_command: str | None = _MAMBA_COMMAND 49 | 50 | @classmethod 51 | def from_config( 52 | cls, 53 | config: dict[str, Any], 54 | settings: IsolateSettings = DEFAULT_SETTINGS, 55 | ) -> BaseEnvironment: 56 | processing_config = copy.deepcopy(config) 57 | processing_config.setdefault("python_version", active_python()) 58 | resolver = processing_config.pop("resolver", _ISOLATE_DEFAULT_RESOLVER) 59 | if resolver == "conda": 60 | _exec_home = _ISOLATE_CONDA_HOME 61 | _exec_command = _CONDA_COMMAND 62 | elif resolver == "mamba": 63 | _exec_home = _ISOLATE_MAMBA_HOME 64 | _exec_command = _MAMBA_COMMAND 65 | else: 66 | raise Exception(f"Conda resolver of type {resolver} is not supported") 67 | if "env_dict" in processing_config: 68 | definition = processing_config.pop("env_dict") 69 | elif "env_yml_str" in processing_config: 70 | yaml = optional_import("yaml") 71 | 72 | definition = yaml.safe_load(processing_config.pop("env_yml_str")) 73 | elif "packages" in processing_config: 74 | definition = { 75 | "dependencies": processing_config.pop("packages"), 76 | } 77 | else: 78 | raise ValueError( 79 | "Either 'env_dict', 'env_yml_str' or 'packages' must be specified" 80 | ) 81 | 82 | dependencies = definition.setdefault("dependencies", []) 83 | if _depends_on(dependencies, "python"): 84 | raise ValueError( 85 | "Python version can not be specified by the environment but rather ", 86 | " it needs to be passed as `python_version` option to the environment.", 87 | ) 88 | 89 | dependencies.append(f"python={processing_config['python_version']}") 90 | 91 | # Extend pip dependencies and channels if they are specified. 92 | if "pip" in processing_config: 93 | if not _depends_on(dependencies, "pip"): 94 | dependencies.append("pip") 95 | 96 | try: 97 | dependency_group = next( 98 | dependency 99 | for dependency in dependencies 100 | if isinstance(dependency, dict) and "pip" in dependency 101 | ) 102 | except StopIteration: 103 | dependency_group = {"pip": []} 104 | dependencies.append(dependency_group) 105 | 106 | dependency_group["pip"].extend(processing_config.pop("pip")) 107 | 108 | if "channels" in processing_config: 109 | definition.setdefault("channels", []) 110 | definition["channels"].extend(processing_config.pop("channels")) 111 | 112 | environment = cls( 113 | environment_definition=definition, 114 | _exec_home=_exec_home, 115 | _exec_command=_exec_command, 116 | **processing_config, 117 | ) 118 | environment.apply_settings(settings) 119 | return environment 120 | 121 | @property 122 | def key(self) -> str: 123 | return sha256_digest_of( 124 | repr(self.environment_definition), 125 | self.python_version, 126 | self._exec_command, 127 | *sorted(self.tags), 128 | ) 129 | 130 | def create(self, *, force: bool = False) -> Path: 131 | env_path = self.settings.cache_dir_for(self) 132 | with self.settings.cache_lock_for(env_path): 133 | if env_path.exists() and not force: 134 | return env_path 135 | 136 | self.log(f"Creating the environment at '{env_path}'") 137 | with tempfile.NamedTemporaryFile(mode="w", suffix=".yml") as tf: 138 | yaml = optional_import("yaml") 139 | yaml.dump(self.environment_definition, tf) 140 | tf.flush() 141 | 142 | try: 143 | self._run_create(str(env_path), tf.name) 144 | except subprocess.SubprocessError as exc: 145 | raise EnvironmentCreationError( 146 | f"Failure during 'conda create': {exc}" 147 | ) 148 | 149 | self.log(f"New environment cached at '{env_path}'") 150 | return env_path 151 | 152 | def destroy(self, connection_key: Path) -> None: 153 | with self.settings.cache_lock_for(connection_key): 154 | # It might be destroyed already (when we are awaiting 155 | # for the lock to be released). 156 | if not connection_key.exists(): 157 | return 158 | 159 | self._run_destroy(str(connection_key)) 160 | 161 | def _run_create(self, env_path: str, env_name: str) -> None: 162 | if self._exec_command == "conda": 163 | self._run_conda( 164 | "env", "create", "--yes", "--prefix", env_path, "-f", env_name 165 | ) 166 | else: 167 | self._run_conda("env", "create", "--prefix", env_path, "-f", env_name) 168 | 169 | def _run_destroy(self, connection_key: str) -> None: 170 | self._run_conda("remove", "--yes", "--all", "--prefix", connection_key) 171 | 172 | def _run_conda(self, *args: Any) -> None: 173 | conda_executable = get_executable(self._exec_command, self._exec_home) 174 | with logged_io(partial(self.log, level=LogLevel.INFO)) as (stdout, stderr, _): 175 | subprocess.check_call( 176 | [conda_executable, *args], 177 | stdout=stdout, 178 | stderr=stderr, 179 | ) 180 | 181 | def exists(self) -> bool: 182 | path = self.settings.cache_dir_for(self) 183 | return path.exists() 184 | 185 | def open_connection(self, connection_key: Path) -> PythonIPC: 186 | return PythonIPC(self, connection_key) 187 | 188 | 189 | def _depends_on( 190 | dependencies: list[str | dict[str, list[str]]], 191 | package_name: str, 192 | ) -> bool: 193 | for dependency in dependencies: 194 | if isinstance(dependency, dict): 195 | # It is a dependency group like pip: [...] 196 | continue 197 | 198 | # Get rid of all whitespace characters (python = 3.8 becomes python=3.8) 199 | package = dependency.replace(" ", "") 200 | if not package.startswith(package_name): 201 | continue 202 | 203 | # Ensure that the package name matches perfectly and not only 204 | # at the prefix level. Examples: 205 | # - python # OK 206 | # - python=3.8 # OK 207 | # - python>=3.8 # OK 208 | # - python-user-toolkit # NOT OK 209 | # - pythonhelp!=1.0 # NOT OK 210 | suffix = package[len(package_name) :] 211 | if suffix and suffix[0] not in _POSSIBLE_CONDA_VERSION_IDENTIFIERS: 212 | continue 213 | 214 | return True 215 | return False 216 | -------------------------------------------------------------------------------- /src/isolate/backends/container.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import sys 4 | from dataclasses import dataclass, field 5 | from pathlib import Path 6 | from typing import Any, ClassVar 7 | 8 | from isolate.backends import BaseEnvironment 9 | from isolate.backends.common import sha256_digest_of 10 | from isolate.backends.settings import DEFAULT_SETTINGS, IsolateSettings 11 | from isolate.connections import PythonIPC 12 | 13 | 14 | @dataclass 15 | class ContainerizedPythonEnvironment(BaseEnvironment[Path]): 16 | BACKEND_NAME: ClassVar[str] = "container" 17 | 18 | image: dict[str, Any] = field(default_factory=dict) 19 | python_version: str | None = None 20 | requirements: list[str] = field(default_factory=list) 21 | tags: list[str] = field(default_factory=list) 22 | 23 | @classmethod 24 | def from_config( 25 | cls, 26 | config: dict[str, Any], 27 | settings: IsolateSettings = DEFAULT_SETTINGS, 28 | ) -> BaseEnvironment: 29 | environment = cls(**config) 30 | environment.apply_settings(settings) 31 | return environment 32 | 33 | @property 34 | def key(self) -> str: 35 | # dockerfile_str is always there, but the validation is handled by the 36 | # controller. 37 | dockerfile_str = self.image.get("dockerfile_str", "") 38 | return sha256_digest_of(dockerfile_str, *self.requirements, *sorted(self.tags)) 39 | 40 | def create(self, *, force: bool = False) -> Path: 41 | return Path(sys.exec_prefix) 42 | 43 | def destroy(self, connection_key: Path) -> None: 44 | raise NotImplementedError("ContainerizedPythonEnvironment cannot be destroyed") 45 | 46 | def exists(self) -> bool: 47 | return True 48 | 49 | def open_connection(self, connection_key: Path) -> PythonIPC: 50 | return PythonIPC(self, connection_key) 51 | -------------------------------------------------------------------------------- /src/isolate/backends/local.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import sys 4 | from dataclasses import dataclass 5 | from pathlib import Path 6 | from typing import Any, ClassVar 7 | 8 | from isolate.backends import BaseEnvironment 9 | from isolate.backends.common import sha256_digest_of 10 | from isolate.backends.settings import DEFAULT_SETTINGS, IsolateSettings 11 | from isolate.connections import PythonIPC 12 | 13 | 14 | @dataclass 15 | class LocalPythonEnvironment(BaseEnvironment[Path]): 16 | BACKEND_NAME: ClassVar[str] = "local" 17 | 18 | @classmethod 19 | def from_config( 20 | cls, 21 | config: dict[str, Any], 22 | settings: IsolateSettings = DEFAULT_SETTINGS, 23 | ) -> BaseEnvironment: 24 | environment = cls(**config) 25 | environment.apply_settings(settings) 26 | return environment 27 | 28 | @property 29 | def key(self) -> str: 30 | return sha256_digest_of(sys.exec_prefix) 31 | 32 | def create(self, *, force: bool = False) -> Path: 33 | if force is True: 34 | raise NotImplementedError( 35 | "LocalPythonEnvironment cannot be forcibly created" 36 | ) 37 | return Path(sys.exec_prefix) 38 | 39 | def destroy(self, connection_key: Path) -> None: 40 | raise NotImplementedError("LocalPythonEnvironment cannot be destroyed") 41 | 42 | def exists(self) -> bool: 43 | return True 44 | 45 | def open_connection(self, connection_key: Path) -> PythonIPC: 46 | return PythonIPC(self, connection_key) 47 | -------------------------------------------------------------------------------- /src/isolate/backends/pyenv.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import functools 4 | import os 5 | import shutil 6 | import subprocess 7 | from dataclasses import dataclass 8 | from functools import partial 9 | from pathlib import Path 10 | from typing import Any, ClassVar 11 | 12 | from isolate.backends import BaseEnvironment, EnvironmentCreationError 13 | from isolate.backends.common import logged_io 14 | from isolate.backends.settings import DEFAULT_SETTINGS, IsolateSettings 15 | from isolate.connections import PythonIPC 16 | from isolate.logs import LogLevel 17 | 18 | _PYENV_EXECUTABLE_NAME = "pyenv" 19 | _PYENV_EXECUTABLE_PATH = os.environ.get("ISOLATE_PYENV_EXECUTABLE") 20 | 21 | 22 | @dataclass 23 | class PyenvEnvironment(BaseEnvironment[Path]): 24 | BACKEND_NAME: ClassVar[str] = "pyenv" 25 | 26 | python_version: str 27 | 28 | @classmethod 29 | def from_config( 30 | cls, 31 | config: dict[str, Any], 32 | settings: IsolateSettings = DEFAULT_SETTINGS, 33 | ) -> BaseEnvironment: 34 | environment = cls(**config) 35 | environment.apply_settings(settings) 36 | return environment 37 | 38 | @property 39 | def key(self) -> str: 40 | return os.path.join("versions", self.python_version) 41 | 42 | def create(self, *, force: bool = False) -> Path: 43 | pyenv = _get_pyenv_executable() 44 | env_path = self.settings.cache_dir_for(self) 45 | with self.settings.cache_lock_for(env_path): 46 | # PyEnv installs* the Python versions under $root/versions/$version, where 47 | # we use versions/$version as the key and $root as the base directory 48 | # (for pyenv). 49 | # 50 | # [0]: https://github.com/pyenv/pyenv#locating-pyenv-provided-python-installations 51 | pyenv_root = env_path.parent.parent 52 | prefix = self._try_get_prefix(pyenv, pyenv_root) 53 | if prefix is None or force: 54 | self._install_python(pyenv, pyenv_root) 55 | prefix = self._try_get_prefix(pyenv, pyenv_root) 56 | if not prefix: 57 | raise EnvironmentCreationError( 58 | f"Python {self.python_version} must have been installed by now." 59 | ) 60 | 61 | assert prefix is not None 62 | return prefix 63 | 64 | def _try_get_prefix(self, pyenv: Path, root_path: Path) -> Path | None: 65 | try: 66 | prefix = subprocess.check_output( 67 | [pyenv, "prefix", self.python_version], 68 | env={**os.environ, "PYENV_ROOT": str(root_path)}, 69 | text=True, 70 | stderr=subprocess.PIPE, 71 | ) 72 | except subprocess.CalledProcessError as exc: 73 | if "not installed" in exc.stderr: 74 | return None 75 | raise EnvironmentCreationError( 76 | f"Failed to get the prefix for Python {self.python_version}.\n" 77 | f"{exc.stdout}\n{exc.stderr}" 78 | ) 79 | 80 | return Path(prefix.strip()) 81 | 82 | def _install_python(self, pyenv: Path, root_path: Path) -> None: 83 | with logged_io(partial(self.log, level=LogLevel.INFO)) as (stdout, stderr, _): 84 | try: 85 | subprocess.check_call( 86 | [pyenv, "install", "--skip-existing", self.python_version], 87 | env={**os.environ, "PYENV_ROOT": str(root_path)}, 88 | stdout=stdout, 89 | stderr=stderr, 90 | ) 91 | except subprocess.CalledProcessError: 92 | raise EnvironmentCreationError( 93 | f"Failed to install Python {self.python_version} via pyenv.\n" 94 | ) 95 | 96 | def destroy(self, connection_key: Path) -> None: 97 | pyenv = _get_pyenv_executable() 98 | with self.settings.cache_lock_for(connection_key): 99 | # It might be destroyed already (when we are awaiting 100 | # for the lock to be released). 101 | if not connection_key.exists(): 102 | return None 103 | 104 | pyenv_root = connection_key.parent.parent 105 | with logged_io(self.log) as (stdout, stderr, _): 106 | subprocess.check_call( 107 | [pyenv, "uninstall", "-f", connection_key.name], 108 | env={**os.environ, "PYENV_ROOT": str(pyenv_root)}, 109 | stdout=stdout, 110 | stderr=stderr, 111 | ) 112 | 113 | def exists(self) -> bool: 114 | pyenv = _get_pyenv_executable() 115 | cache_dir = self.settings.cache_dir_for(self) 116 | with self.settings.cache_lock_for(cache_dir): 117 | pyenv_root = cache_dir.parent.parent 118 | prefix = self._try_get_prefix(pyenv, pyenv_root) 119 | return prefix is not None 120 | 121 | def open_connection(self, connection_key: Path) -> PythonIPC: 122 | return PythonIPC(self, connection_key) 123 | 124 | 125 | @functools.lru_cache(1) 126 | def _get_pyenv_executable() -> Path: 127 | if _PYENV_EXECUTABLE_PATH: 128 | if not os.path.exists(_PYENV_EXECUTABLE_PATH): 129 | raise EnvironmentCreationError( 130 | "Path to pyenv executable not found! ISOLATE_PYENV_EXECUTABLE " 131 | f"variable: {_PYENV_EXECUTABLE_PATH!r}" 132 | ) 133 | return Path(_PYENV_EXECUTABLE_PATH) 134 | 135 | pyenv_path = shutil.which(_PYENV_EXECUTABLE_NAME) 136 | if pyenv_path is None: 137 | raise FileNotFoundError( 138 | "Could not find the pyenv executable. If pyenv is not already installed " 139 | "in your system, please install it first. If it is not in your PATH, " 140 | "then point ISOLATE_PYENV_COMMAND to the absolute path of the " 141 | "pyenv executable." 142 | ) 143 | return Path(pyenv_path) 144 | -------------------------------------------------------------------------------- /src/isolate/backends/remote.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import copy 4 | import json 5 | from dataclasses import dataclass 6 | from typing import Any, ClassVar, List 7 | 8 | import grpc 9 | 10 | from isolate.backends import ( 11 | BaseEnvironment, 12 | BasicCallable, 13 | CallResultType, 14 | EnvironmentConnection, 15 | ) 16 | from isolate.backends.common import sha256_digest_of 17 | from isolate.backends.settings import DEFAULT_SETTINGS, IsolateSettings 18 | from isolate.server import interface 19 | from isolate.server.definitions import ( 20 | BoundFunction, 21 | EnvironmentDefinition, 22 | IsolateStub, 23 | ) 24 | 25 | 26 | @dataclass 27 | class IsolateServer(BaseEnvironment[List[EnvironmentDefinition]]): 28 | BACKEND_NAME: ClassVar[str] = "isolate-server" 29 | 30 | host: str 31 | target_environments: list[dict[str, Any]] 32 | 33 | @classmethod 34 | def from_config( 35 | cls, 36 | config: dict[str, Any], 37 | settings: IsolateSettings = DEFAULT_SETTINGS, 38 | ) -> BaseEnvironment: 39 | environment = cls(**config) 40 | environment.apply_settings(settings) 41 | 42 | return environment 43 | 44 | @property 45 | def key(self) -> str: 46 | return sha256_digest_of( 47 | self.host, 48 | json.dumps(self.target_environments), 49 | ) 50 | 51 | def create(self, *, force: bool = False) -> list[EnvironmentDefinition]: 52 | if force is True: 53 | raise NotImplementedError( 54 | "Only individual environments can be forcibly created, please set " 55 | "them up manually by using the 'force_create' flag on the " 56 | "environment definition." 57 | ) 58 | 59 | envs = [] 60 | for env in self.target_environments: 61 | if not env.get("kind") or not env.get("configuration"): 62 | raise RuntimeError(f"`kind` or `configuration` key missing in: {env}") 63 | configuration = copy.deepcopy(env["configuration"]) 64 | force_create = configuration.pop("force_create", False) 65 | envs.append( 66 | EnvironmentDefinition( 67 | kind=env["kind"], 68 | configuration=interface.to_struct(env["configuration"]), 69 | force=force_create, 70 | ) 71 | ) 72 | return envs 73 | 74 | def exists(self) -> bool: 75 | return False 76 | 77 | def open_connection( 78 | self, 79 | connection_key: list[EnvironmentDefinition], 80 | ) -> IsolateServerConnection: 81 | return IsolateServerConnection(self, self.host, connection_key) 82 | 83 | 84 | @dataclass 85 | class IsolateServerConnection(EnvironmentConnection): 86 | host: str 87 | definitions: list[EnvironmentDefinition] 88 | _channel: grpc.Channel | None = None 89 | 90 | def _acquire_channel(self) -> None: 91 | self._channel = grpc.insecure_channel(self.host) 92 | 93 | def _release_channel(self) -> None: 94 | if self._channel: 95 | self._channel.close() 96 | self._channel = None 97 | 98 | def __exit__(self, *args: Any) -> None: 99 | self._release_channel() 100 | 101 | def run( 102 | self, 103 | executable: BasicCallable, 104 | *args: Any, 105 | **kwargs: Any, 106 | ) -> CallResultType: # type: ignore[type-var] 107 | if self._channel is None: 108 | self._acquire_channel() 109 | 110 | stub = IsolateStub(self._channel) 111 | request = BoundFunction( 112 | function=interface.to_serialized_object( 113 | executable, 114 | method=self.environment.settings.serialization_method, 115 | was_it_raised=False, 116 | ), 117 | environments=self.definitions, 118 | stream_logs=True, # Default to streaming logs 119 | ) 120 | 121 | return_value = [] 122 | for result in stub.Run(request): 123 | for raw_log in result.logs: 124 | log = interface.from_grpc(raw_log) 125 | self.log(log.message, level=log.level, source=log.source) 126 | 127 | if result.is_complete: 128 | return_value.append(interface.from_grpc(result.result)) 129 | 130 | if len(return_value) == 0: 131 | raise RuntimeError( 132 | "No result object was received from the server" 133 | " (it never set is_complete to True)." 134 | ) 135 | elif len(return_value) > 1: 136 | raise RuntimeError( 137 | "Multiple result objects were received from the server" 138 | " (it set is_complete to True multiple times)." 139 | ) 140 | else: 141 | return return_value[0] 142 | -------------------------------------------------------------------------------- /src/isolate/backends/settings.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import shutil 5 | import tempfile 6 | from contextlib import contextmanager 7 | from dataclasses import dataclass, replace 8 | from pathlib import Path 9 | from typing import TYPE_CHECKING, Callable, Iterator 10 | 11 | from platformdirs import user_cache_dir 12 | 13 | from isolate.backends.common import lock_build_path 14 | from isolate.logs import Log, LogLevel, LogSource 15 | 16 | if TYPE_CHECKING: 17 | from isolate.backends import BaseEnvironment 18 | 19 | _SYSTEM_TEMP_DIR = Path(tempfile.gettempdir()) 20 | _STRICT_CACHE = os.getenv("ISOLATE_STRICT_CACHE", "0") == "1" 21 | 22 | 23 | @dataclass(frozen=True) 24 | class IsolateSettings: 25 | cache_dir: Path = Path(user_cache_dir("isolate", "isolate")) 26 | serialization_method: str = "pickle" 27 | log_hook: Callable[[Log], None] = print 28 | strict_cache: bool = _STRICT_CACHE 29 | 30 | def log(self, log: Log) -> None: 31 | self.log_hook(self._infer_log_level(log)) 32 | 33 | def _infer_log_level(self, log: Log) -> Log: 34 | """Infer the log level if it's correctly set.""" 35 | if log.level not in (LogLevel.STDOUT, LogLevel.STDERR): 36 | # We should only infer the log level for stdout/stderr logs. 37 | return log 38 | 39 | if log.source in (LogSource.BUILDER, LogSource.BRIDGE): 40 | return replace(log, level=LogLevel.TRACE) 41 | 42 | line = log.message.lower() 43 | 44 | if "[error]" in line: 45 | return replace(log, level=LogLevel.ERROR) 46 | if "[warning]" in line: 47 | return replace(log, level=LogLevel.WARNING) 48 | if "[warn]" in line: 49 | return replace(log, level=LogLevel.WARNING) 50 | if "[info]" in line: 51 | return replace(log, level=LogLevel.INFO) 52 | if "[debug]" in line: 53 | return replace(log, level=LogLevel.DEBUG) 54 | if "[trace]" in line: 55 | return replace(log, level=LogLevel.TRACE) 56 | 57 | if log.level == LogLevel.STDERR: 58 | return replace(log, level=LogLevel.ERROR) 59 | 60 | # Default to INFO level 61 | return replace(log, level=LogLevel.INFO) 62 | 63 | def _get_temp_base(self) -> Path: 64 | """Return the base path for creating temporary files/directories. 65 | 66 | If the isolate cache directory is in a different device than the 67 | system temp base (e.g. /tmp), then it will return a new directory 68 | under the cache directory.""" 69 | 70 | cache_stat = self.cache_dir.stat() 71 | system_stat = _SYSTEM_TEMP_DIR.stat() 72 | if cache_stat.st_dev == system_stat.st_dev: 73 | return _SYSTEM_TEMP_DIR 74 | 75 | if _SYSTEM_TEMP_DIR.samefile(self.cache_dir): 76 | path = _SYSTEM_TEMP_DIR / "isolate" 77 | else: 78 | # This is quite important since if we have a shared cache 79 | # disk, then /tmp is going to be in a different disk than 80 | # the cache directory, which would make it impossible to 81 | # rename() atomically. 82 | path = self.cache_dir / "tmp" 83 | 84 | path.mkdir(exist_ok=True, parents=True) 85 | return path 86 | 87 | def _get_lock_dir(self) -> Path: 88 | """Return a directory which can be used for storing file-based locks.""" 89 | lock_dir = self._get_temp_base() / "locks" 90 | lock_dir.mkdir(exist_ok=True, parents=True) 91 | return lock_dir 92 | 93 | @contextmanager 94 | def cache_lock_for(self, path: Path) -> Iterator[Path]: 95 | """Create a lock for accessing (and operating on) the given path. This 96 | means whenever the context manager is entered, the path can be freely 97 | modified and accessed without any other process interfering.""" 98 | 99 | with lock_build_path(path, self._get_lock_dir()): 100 | try: 101 | yield path 102 | except BaseException: 103 | # If anything goes wrong, we have to clean up the 104 | # directory (we can't leave it as a corrupted build). 105 | shutil.rmtree(path, ignore_errors=True) 106 | raise 107 | 108 | def cache_dir_for(self, backend: BaseEnvironment) -> Path: 109 | """Return a directory which can be used for caching the given 110 | environment's artifacts.""" 111 | backend_name = backend.BACKEND_NAME 112 | assert backend_name is not None 113 | 114 | environment_base_path = self.cache_dir / backend_name 115 | environment_base_path.mkdir(exist_ok=True, parents=True) 116 | return environment_base_path / backend.key 117 | 118 | def completion_marker_for(self, path: Path) -> Path: 119 | return path / ".isolate.completed" 120 | 121 | replace = replace 122 | 123 | 124 | DEFAULT_SETTINGS = IsolateSettings() 125 | -------------------------------------------------------------------------------- /src/isolate/backends/virtualenv.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import shutil 5 | import subprocess 6 | from dataclasses import dataclass, field 7 | from functools import partial 8 | from pathlib import Path 9 | from typing import Any, ClassVar 10 | 11 | from isolate.backends import BaseEnvironment, EnvironmentCreationError 12 | from isolate.backends.common import ( 13 | active_python, 14 | get_executable, 15 | get_executable_path, 16 | logged_io, 17 | optional_import, 18 | sha256_digest_of, 19 | ) 20 | from isolate.backends.settings import DEFAULT_SETTINGS, IsolateSettings 21 | from isolate.connections import PythonIPC 22 | from isolate.logs import LogLevel 23 | 24 | _UV_RESOLVER_EXECUTABLE = os.environ.get("ISOLATE_UV_EXE", "uv") 25 | _UV_RESOLVER_HOME = os.getenv("ISOLATE_UV_HOME") 26 | 27 | 28 | @dataclass 29 | class VirtualPythonEnvironment(BaseEnvironment[Path]): 30 | BACKEND_NAME: ClassVar[str] = "virtualenv" 31 | 32 | requirements: list[str] = field(default_factory=list) 33 | constraints_file: os.PathLike | None = None 34 | python_version: str | None = None 35 | extra_index_urls: list[str] = field(default_factory=list) 36 | tags: list[str] = field(default_factory=list) 37 | resolver: str | None = None 38 | 39 | @classmethod 40 | def from_config( 41 | cls, 42 | config: dict[str, Any], 43 | settings: IsolateSettings = DEFAULT_SETTINGS, 44 | ) -> BaseEnvironment: 45 | environment = cls(**config) 46 | environment.apply_settings(settings) 47 | if environment.resolver not in ("uv", None): 48 | raise ValueError( 49 | "Only 'uv' is supported as a resolver for virtualenv environments." 50 | ) 51 | return environment 52 | 53 | @property 54 | def key(self) -> str: 55 | if self.constraints_file is not None: 56 | with open(self.constraints_file) as stream: 57 | constraints = stream.read().splitlines() 58 | else: 59 | constraints = [] 60 | 61 | extras = [] 62 | if self.resolver is not None: 63 | extras.append(f"resolver={self.resolver}") 64 | 65 | active_python_version = self.python_version or active_python() 66 | return sha256_digest_of( 67 | active_python_version, 68 | *self.requirements, 69 | *constraints, 70 | *self.extra_index_urls, 71 | *sorted(self.tags), 72 | # This is backwards compatible with environments not using 73 | # the 'resolver' field. 74 | *extras, 75 | ) 76 | 77 | def install_requirements(self, path: Path) -> None: 78 | """Install the requirements of this environment using 'pip' to the 79 | given virtualenv path. 80 | 81 | If there are any constraint files specified, they will be also passed to 82 | the package resolver. 83 | """ 84 | if not self.requirements: 85 | return None 86 | 87 | self.log(f"Installing requirements: {', '.join(self.requirements)}") 88 | environ = os.environ.copy() 89 | 90 | if self.resolver == "uv": 91 | # Set VIRTUAL_ENV to the actual path of the environment since that is 92 | # how uv discovers the environment. This is necessary when using uv 93 | # as the resolver. 94 | environ["VIRTUAL_ENV"] = str(path) 95 | base_pip_cmd = [ 96 | get_executable(_UV_RESOLVER_EXECUTABLE, _UV_RESOLVER_HOME), 97 | "pip", 98 | ] 99 | else: 100 | base_pip_cmd = [get_executable_path(path, "pip")] 101 | 102 | pip_cmd: list[str | os.PathLike] = [ 103 | *base_pip_cmd, # type: ignore 104 | "install", 105 | *self.requirements, 106 | ] 107 | if self.constraints_file: 108 | pip_cmd.extend(["-c", self.constraints_file]) 109 | 110 | for extra_index_url in self.extra_index_urls: 111 | pip_cmd.extend(["--extra-index-url", extra_index_url]) 112 | 113 | with logged_io(partial(self.log, level=LogLevel.INFO)) as (stdout, stderr, _): 114 | try: 115 | subprocess.check_call( 116 | pip_cmd, 117 | stdout=stdout, 118 | stderr=stderr, 119 | env=environ, 120 | ) 121 | except subprocess.SubprocessError as exc: 122 | raise EnvironmentCreationError(f"Failure during 'pip install': {exc}") 123 | 124 | def _install_python_through_pyenv(self) -> str: 125 | from isolate.backends.pyenv import PyenvEnvironment 126 | 127 | self.log( 128 | f"Requested Python version of {self.python_version} is not available " 129 | "in the system, attempting to install it through pyenv." 130 | ) 131 | 132 | pyenv = PyenvEnvironment.from_config( 133 | {"python_version": self.python_version}, 134 | settings=self.settings, 135 | ) 136 | return str(get_executable_path(pyenv.create(), "python")) 137 | 138 | def _decide_python(self) -> str: 139 | from isolate.backends.pyenv import _get_pyenv_executable 140 | 141 | builtin_discovery = optional_import("virtualenv.discovery.builtin") 142 | interpreter = builtin_discovery.get_interpreter(self.python_version, ()) 143 | if interpreter is not None: 144 | return interpreter.executable 145 | 146 | try: 147 | _get_pyenv_executable() 148 | except Exception: 149 | raise EnvironmentCreationError( 150 | f"Python {self.python_version} is not available in your " 151 | "system. Please install it first." 152 | ) from None 153 | else: 154 | return self._install_python_through_pyenv() 155 | 156 | def create(self, *, force: bool = False) -> Path: 157 | virtualenv = optional_import("virtualenv") 158 | 159 | venv_path = self.settings.cache_dir_for(self) 160 | completion_marker = self.settings.completion_marker_for(venv_path) 161 | with self.settings.cache_lock_for(venv_path): 162 | if not force: 163 | is_cached = venv_path.exists() 164 | if self.settings.strict_cache: 165 | is_cached &= completion_marker.exists() 166 | 167 | if is_cached: 168 | return venv_path 169 | 170 | self.log(f"Creating the environment at '{venv_path}'") 171 | 172 | args = [str(venv_path)] 173 | if self.python_version: 174 | args.append(f"--python={self._decide_python()}") 175 | 176 | try: 177 | virtualenv.cli_run(args) 178 | except (RuntimeError, OSError) as exc: 179 | raise EnvironmentCreationError( 180 | f"Failed to create the environment at '{venv_path}': {exc}" 181 | ) 182 | 183 | self.install_requirements(venv_path) 184 | completion_marker.touch() 185 | 186 | self.log(f"New environment cached at '{venv_path}'") 187 | return venv_path 188 | 189 | def destroy(self, connection_key: Path) -> None: 190 | with self.settings.cache_lock_for(connection_key): 191 | # It might be destroyed already (when we are awaiting 192 | # for the lock to be released). 193 | if not connection_key.exists(): 194 | return 195 | 196 | shutil.rmtree(connection_key) 197 | 198 | def exists(self) -> bool: 199 | path = self.settings.cache_dir_for(self) 200 | return path.exists() 201 | 202 | def open_connection(self, connection_key: Path) -> PythonIPC: 203 | return PythonIPC(self, connection_key) 204 | -------------------------------------------------------------------------------- /src/isolate/common/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fal-ai/isolate/43cbe0d852a75229e8372c4305fa37da4aa5ec78/src/isolate/common/__init__.py -------------------------------------------------------------------------------- /src/isolate/common/timestamp.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from datetime import datetime, timezone 4 | 5 | from google.protobuf.timestamp_pb2 import Timestamp 6 | 7 | 8 | def from_datetime(time: datetime) -> Timestamp: 9 | timestamp = Timestamp() 10 | timestamp.FromDatetime(time) 11 | return timestamp 12 | 13 | 14 | def to_datetime(timestamp: Timestamp) -> datetime: 15 | return timestamp.ToDatetime(tzinfo=timezone.utc) 16 | -------------------------------------------------------------------------------- /src/isolate/connections/__init__.py: -------------------------------------------------------------------------------- 1 | import importlib 2 | 3 | from isolate.connections.ipc import IsolatedProcessConnection, PythonIPC # noqa: F401 4 | 5 | 6 | def __getattr__(name): 7 | if name == "LocalPythonGRPC": 8 | extra = "grpc" 9 | module_name = "isolate.connections.grpc" 10 | else: 11 | raise AttributeError(f"module {__name__!r} has no attribute {name!r}") 12 | 13 | try: 14 | module = importlib.import_module(module_name) 15 | except ImportError: 16 | raise AttributeError( 17 | f"For using {name!r} you need to install isolate with {extra!r} support." 18 | f'\n $ pip install "isolate[{extra}]"' 19 | ) 20 | 21 | return getattr(module, name) 22 | -------------------------------------------------------------------------------- /src/isolate/connections/_local/__init__.py: -------------------------------------------------------------------------------- 1 | from isolate.connections._local import agent_startup # noqa: F401 2 | from isolate.connections._local._base import PythonExecutionBase # noqa: F401 3 | -------------------------------------------------------------------------------- /src/isolate/connections/_local/_base.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import subprocess 5 | import sysconfig 6 | from contextlib import contextmanager 7 | from dataclasses import dataclass, field 8 | from functools import partial 9 | from pathlib import Path 10 | from typing import ( 11 | TYPE_CHECKING, 12 | Any, 13 | Generic, 14 | Iterator, 15 | TypeVar, 16 | ) 17 | 18 | from isolate import __version__ as isolate_version 19 | from isolate.backends.common import get_executable_path, logged_io 20 | from isolate.connections.common import AGENT_SIGNATURE 21 | from isolate.logs import LogLevel, LogSource 22 | 23 | if TYPE_CHECKING: 24 | from isolate.backends import BaseEnvironment 25 | 26 | ConnectionType = TypeVar("ConnectionType") 27 | 28 | 29 | def binary_path_for(*search_paths: Path) -> str: 30 | """Return the binary search path for the given 'search_paths'. 31 | It will be a combination of the 'bin/' folders in them and 32 | the existing PATH environment variable.""" 33 | 34 | paths = [] 35 | for search_path in search_paths: 36 | path = sysconfig.get_path("scripts", vars={"base": search_path}) 37 | paths.append(path) 38 | # Some distributions (conda) might include both a 'bin' and 39 | # a 'scripts' folder. 40 | 41 | auxilary_binary_path = search_path / "bin" 42 | if path != auxilary_binary_path and auxilary_binary_path.exists(): 43 | paths.append(str(auxilary_binary_path)) 44 | 45 | if "PATH" in os.environ: 46 | paths.append(os.environ["PATH"]) 47 | 48 | return os.pathsep.join(paths) 49 | 50 | 51 | def python_path_for(*search_paths: Path) -> str: 52 | """Return the PYTHONPATH for the library paths residing 53 | in the given 'search_paths'. The order of the paths is 54 | preserved.""" 55 | assert len(search_paths) >= 1 56 | lib_paths = [] 57 | for search_path in search_paths: 58 | # sysconfig defines the schema of the directories under 59 | # any comforming Python installation (like venv, conda, etc.). 60 | # 61 | # Be aware that Debian's system installation does not 62 | # comform sysconfig. 63 | raw_glob_expr = sysconfig.get_path( 64 | "purelib", 65 | vars={ 66 | "base": search_path, 67 | "python_version": "*", 68 | "py_version_short": "*", 69 | "py_version_nodot": "*", 70 | }, 71 | ) 72 | relative_glob_expr = Path(raw_glob_expr).relative_to(search_path).as_posix() 73 | 74 | # Try to find expand the Python version in the path. This is 75 | # necessary for supporting multiple Python versions in the same 76 | # environment. 77 | for file in search_path.glob(relative_glob_expr): 78 | lib_paths.append(str(file)) 79 | 80 | return os.pathsep.join(lib_paths) 81 | 82 | 83 | @dataclass 84 | class PythonExecutionBase(Generic[ConnectionType]): 85 | """A generic Python execution implementation that can trigger a new process 86 | and start watching stdout/stderr for the logs. The environment_path must be 87 | the base directory of a new Python environment (structure that complies with 88 | sysconfig). Python binary inside that environment will be used to run the 89 | agent process. 90 | 91 | If set, extra_inheritance_paths allows extending the custom package search 92 | system with additional environments. As an example, the current environment_path 93 | might point to an environment with numpy and the extra_inheritance_paths might 94 | point to an environment with pandas. In this case, the agent process will have 95 | access to both numpy and pandas. The order is important here, as the first 96 | path in the list will be the first one to be looked up (so if there is multiple 97 | versions of the same package in different environments, the one in the first 98 | path will take precedence). Dependency resolution and compatibility must be 99 | handled by the user.""" 100 | 101 | environment: BaseEnvironment 102 | environment_path: Path 103 | extra_inheritance_paths: list[Path] = field(default_factory=list) 104 | 105 | @contextmanager 106 | def start_process( 107 | self, 108 | connection: ConnectionType, 109 | *args: Any, 110 | **kwargs: Any, 111 | ) -> Iterator[subprocess.Popen]: 112 | """Start the agent process with the Python binary available inside the 113 | bound environment.""" 114 | 115 | python_executable = get_executable_path(self.environment_path, "python") 116 | with logged_io( 117 | partial( 118 | self.handle_agent_log, source=LogSource.USER, level=LogLevel.STDOUT 119 | ), 120 | partial( 121 | self.handle_agent_log, source=LogSource.USER, level=LogLevel.STDERR 122 | ), 123 | partial( 124 | self.handle_agent_log, source=LogSource.BRIDGE, level=LogLevel.TRACE 125 | ), 126 | ) as (stdout, stderr, log_fd): 127 | yield subprocess.Popen( 128 | self.get_python_cmd(python_executable, connection, log_fd), 129 | env=self.get_env_vars(), 130 | stdout=stdout, 131 | stderr=stderr, 132 | pass_fds=(log_fd,), 133 | text=True, 134 | ) 135 | 136 | def get_env_vars(self) -> dict[str, str]: 137 | """Return the environment variables to run the agent process with. By default 138 | PYTHONUNBUFFERED is set to 1 to ensure the prints to stdout/stderr are reflect 139 | immediately (so that we can seamlessly transfer logs).""" 140 | 141 | custom_vars = {} 142 | custom_vars["ISOLATE_SERVER_VERSION"] = isolate_version 143 | custom_vars[AGENT_SIGNATURE] = "1" 144 | custom_vars["PYTHONUNBUFFERED"] = "1" 145 | 146 | # NOTE: we don't have to manually set PYTHONPATH here if we are 147 | # using a single environment since python will automatically 148 | # use the proper path. 149 | if self.extra_inheritance_paths: 150 | # The order here should reflect the order of the inheritance 151 | # where the actual environment already takes precedence. 152 | custom_vars["PYTHONPATH"] = python_path_for( 153 | self.environment_path, *self.extra_inheritance_paths 154 | ) 155 | 156 | # But the PATH must be always set since it will be not be 157 | # automatically set by Python (think of this as ./venv/bin/activate) 158 | custom_vars["PATH"] = binary_path_for( 159 | self.environment_path, *self.extra_inheritance_paths 160 | ) 161 | 162 | return { 163 | **os.environ, 164 | **custom_vars, 165 | } 166 | 167 | def get_python_cmd( 168 | self, 169 | executable: Path, 170 | connection: ConnectionType, 171 | log_fd: int, 172 | ) -> list[str | Path]: 173 | """Return the command to run the agent process with.""" 174 | raise NotImplementedError 175 | 176 | def handle_agent_log( 177 | self, line: str, *, level: LogLevel, source: LogSource 178 | ) -> None: 179 | """Handle a log line emitted by the agent process. The level will be either 180 | STDOUT or STDERR.""" 181 | raise NotImplementedError 182 | -------------------------------------------------------------------------------- /src/isolate/connections/_local/agent_startup.py: -------------------------------------------------------------------------------- 1 | """ 2 | Agent process execution wrapper for handling extended PYTHONPATH. 3 | """ 4 | 5 | import os 6 | import runpy 7 | import site 8 | import sys 9 | import traceback 10 | 11 | 12 | def load_pth_files() -> None: 13 | """Each site dir in Python can contain some .pth files, which are 14 | basically instructions that tell Python to load other stuff. This is 15 | generally used for editable installations, and just setting PYTHONPATH 16 | won't make them expand so we need manually process them. Luckily, site 17 | module can simply take the list of new paths and recognize them. 18 | 19 | https://docs.python.org/3/tutorial/modules.html#the-module-search-path 20 | """ 21 | python_path = os.getenv("PYTHONPATH") 22 | if python_path is None: 23 | return None 24 | 25 | # TODO: The order here is the same as the one that is used for generating the 26 | # PYTHONPATH. The only problem that might occur is that, on a chain with 27 | # 3 ore more nodes (A, B, C), if X is installed as an editable package to 28 | # B and a normal package to C, then C might actually take precedence. This 29 | # will need to be fixed once we are dealing with more than 2 nodes and editable 30 | # packages. 31 | for site_dir in python_path.split(os.pathsep): 32 | try: 33 | site.addsitedir(site_dir) 34 | except Exception: 35 | # NOTE: there could be .pth files that are model weights and not 36 | # python path configuration files. 37 | traceback.print_exc() 38 | print(f"Error adding site directory {site_dir}, skipping...") 39 | 40 | 41 | def main(): 42 | real_agent, *real_arguments = sys.argv[1:] 43 | 44 | load_pth_files() 45 | # TODO(feat): implement a check to parse "agent-requires" line and see if 46 | # all the dependencies are installed. 47 | sys.argv = [real_agent] + real_arguments 48 | runpy.run_path(real_agent, run_name="__main__") 49 | 50 | 51 | if __name__ == "__main__": 52 | load_pth_files() 53 | main() 54 | -------------------------------------------------------------------------------- /src/isolate/connections/common.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import importlib 4 | import os 5 | from contextlib import contextmanager 6 | from dataclasses import dataclass 7 | from typing import TYPE_CHECKING, Any, Iterator, cast 8 | 9 | from tblib import Traceback, TracebackParseError 10 | 11 | if TYPE_CHECKING: 12 | from typing import Protocol 13 | 14 | class SerializationBackend(Protocol): 15 | def loads(self, data: bytes) -> Any: ... 16 | 17 | def dumps(self, obj: Any) -> bytes: ... 18 | 19 | 20 | AGENT_SIGNATURE = "IS_ISOLATE_AGENT" 21 | 22 | 23 | @dataclass 24 | class SerializationError(Exception): 25 | """An error that happened during the serialization process.""" 26 | 27 | message: str 28 | 29 | 30 | @contextmanager 31 | def _step(message: str) -> Iterator[None]: 32 | """A context manager to capture every expression 33 | underneath it and if any of them fails for any reason 34 | then it will raise a SerializationError with the 35 | given message.""" 36 | 37 | try: 38 | yield 39 | except BaseException as exception: 40 | raise SerializationError("Error while " + message) from exception 41 | 42 | 43 | def as_serialization_method(backend: Any) -> SerializationBackend: 44 | """Ensures that the given backend has loads/dumps methods, and returns 45 | it as is (also convinces type checkers that the given object satisfies 46 | the serialization protocol).""" 47 | 48 | if not hasattr(backend, "loads") or not hasattr(backend, "dumps"): 49 | raise TypeError( 50 | f"The given serialization backend ({backend.__name__}) does " 51 | "not have one of the required methods (loads/dumps)." 52 | ) 53 | 54 | return cast("SerializationBackend", backend) 55 | 56 | 57 | def load_serialized_object( 58 | serialization_method: str, 59 | raw_object: bytes, 60 | *, 61 | was_it_raised: bool = False, 62 | stringized_traceback: str | None = None, 63 | ) -> Any: 64 | """Load the given serialized object using the given serialization method. If 65 | anything fails, then a SerializationError will be raised. If the was_it_raised 66 | flag is set to true, then the given object will be raised as an exception (instead 67 | of being returned).""" 68 | 69 | with _step(f"preparing the serialization backend ({serialization_method})"): 70 | serialization_backend = as_serialization_method( 71 | importlib.import_module(serialization_method) 72 | ) 73 | 74 | with _step("deserializing the given object"): 75 | result = serialization_backend.loads(raw_object) 76 | 77 | if was_it_raised: 78 | raise prepare_exc(result, stringized_traceback=stringized_traceback) 79 | else: 80 | return result 81 | 82 | 83 | def serialize_object(serialization_method: str, object: Any) -> bytes: 84 | """Serialize the given object using the given serialization method. If 85 | anything fails, then a SerializationError will be raised.""" 86 | 87 | with _step(f"preparing the serialization backend ({serialization_method})"): 88 | serialization_backend = as_serialization_method( 89 | importlib.import_module(serialization_method) 90 | ) 91 | 92 | with _step("serializing the given object"): 93 | return serialization_backend.dumps(object) 94 | 95 | 96 | def is_agent() -> bool: 97 | """Returns true if the current process is an isolate agent.""" 98 | return os.environ.get(AGENT_SIGNATURE) == "1" 99 | 100 | 101 | def prepare_exc( 102 | exc: BaseException, 103 | *, 104 | stringized_traceback: str | None = None, 105 | ) -> BaseException: 106 | if stringized_traceback: 107 | try: 108 | traceback = Traceback.from_string(stringized_traceback).as_traceback() 109 | except TracebackParseError: 110 | traceback = None 111 | else: 112 | traceback = None 113 | 114 | exc.__traceback__ = traceback 115 | return exc 116 | -------------------------------------------------------------------------------- /src/isolate/connections/grpc/__init__.py: -------------------------------------------------------------------------------- 1 | from isolate.connections.grpc._base import AgentError, LocalPythonGRPC # noqa: F401 2 | -------------------------------------------------------------------------------- /src/isolate/connections/grpc/_base.py: -------------------------------------------------------------------------------- 1 | import socket 2 | from contextlib import contextmanager 3 | from dataclasses import dataclass 4 | from pathlib import Path 5 | from typing import Any, ContextManager, Iterator, List, Tuple, Union, cast 6 | 7 | import grpc 8 | 9 | from isolate.backends import ( 10 | BasicCallable, 11 | CallResultType, 12 | EnvironmentConnection, 13 | ) 14 | from isolate.connections._local import PythonExecutionBase, agent_startup 15 | from isolate.connections.common import serialize_object 16 | from isolate.connections.grpc import agent, definitions 17 | from isolate.connections.grpc.configuration import get_default_options 18 | from isolate.connections.grpc.interface import from_grpc 19 | from isolate.logs import LogLevel, LogSource 20 | 21 | 22 | class AgentError(Exception): 23 | """An internal problem caused by (most probably) the agent.""" 24 | 25 | 26 | @dataclass 27 | class GRPCExecutionBase(EnvironmentConnection): 28 | """A customizable gRPC-based execution backend.""" 29 | 30 | def start_agent(self) -> ContextManager[Tuple[str, grpc.ChannelCredentials]]: 31 | """Starts the gRPC agent and returns the address it is listening on and 32 | the required credentials to connect to it.""" 33 | raise NotImplementedError 34 | 35 | @contextmanager 36 | def _establish_bridge( 37 | self, 38 | *, 39 | max_wait_timeout: float = 20.0, 40 | ) -> Iterator[definitions.AgentStub]: 41 | with self.start_agent() as (address, credentials): 42 | with grpc.secure_channel( 43 | address, 44 | credentials, 45 | options=get_default_options(), 46 | ) as channel: 47 | channel_status = grpc.channel_ready_future(channel) 48 | try: 49 | channel_status.result(timeout=max_wait_timeout) 50 | except grpc.FutureTimeoutError: 51 | raise AgentError( 52 | "Couldn't connect to the gRPC server in the agent " 53 | f"(listening at {address}) in time." 54 | ) 55 | stub = definitions.AgentStub(channel) 56 | stub._channel = channel # type: ignore 57 | yield stub 58 | 59 | def run( 60 | self, 61 | executable: BasicCallable, 62 | *args: Any, 63 | **kwargs: Any, 64 | ) -> CallResultType: # type: ignore[type-var] 65 | # Implementation details 66 | # ====================== 67 | # 68 | # RPC Flow: 69 | # --------- 70 | # 1. [controller]: Spawn the agent. 71 | # 2. [agent]: Start listening at the given address. 72 | # 3. [controller]: Await *at most* max_wait_timeout seconds for the agent to 73 | # be available if it doesn't do it until then, 74 | # raise an AgentError. 75 | # 4. [controller]: If the server is available, then establish the bridge and 76 | # pass the 'function' as the input. 77 | # 5. [agent]: Receive the function, deserialize it, start the execution. 78 | # 6. [controller]: Watch agent for logs (stdout/stderr), and as soon as they 79 | # appear call the log handler. 80 | # 7. [agent]: Once the execution of the function is finished, send the 81 | # result using the same serialization method. 82 | # 8. [controller]: Receive the result back and return it. 83 | 84 | method = self.environment.settings.serialization_method 85 | function = definitions.SerializedObject( 86 | method=method, 87 | definition=serialize_object(method, executable), 88 | was_it_raised=False, 89 | stringized_traceback=None, 90 | ) 91 | function_call = definitions.FunctionCall( 92 | function=function, 93 | ) 94 | 95 | with self._establish_bridge() as bridge: 96 | for partial_result in bridge.Run(function_call): 97 | for raw_log in partial_result.logs: 98 | log = from_grpc(raw_log) 99 | self.log(log.message, level=log.level, source=log.source) 100 | 101 | if partial_result.is_complete: 102 | if not partial_result.result: 103 | raise AgentError( 104 | "The agent didn't return a result, but it should have." 105 | ) 106 | 107 | return cast(CallResultType, from_grpc(partial_result.result)) 108 | 109 | raise AgentError( 110 | "No result object was received from the agent " 111 | "(it never set is_complete to True)." 112 | ) 113 | 114 | 115 | class LocalPythonGRPC(PythonExecutionBase[str], GRPCExecutionBase): 116 | @contextmanager 117 | def start_agent(self) -> Iterator[Tuple[str, grpc.ChannelCredentials]]: 118 | def find_free_port() -> Tuple[str, int]: 119 | """Find a free port in the system.""" 120 | with socket.socket() as _temp_socket: 121 | _temp_socket.bind(("", 0)) 122 | return _temp_socket.getsockname() 123 | 124 | host, port = find_free_port() 125 | address = f"{host}:{port}" 126 | process = None 127 | try: 128 | with self.start_process(address) as process: 129 | yield address, grpc.local_channel_credentials() 130 | finally: 131 | if process is not None: 132 | # TODO: should we check the status code here? 133 | process.terminate() 134 | 135 | def get_python_cmd( 136 | self, 137 | executable: Path, 138 | connection: str, 139 | log_fd: int, 140 | ) -> List[Union[str, Path]]: 141 | return [ 142 | executable, 143 | agent_startup.__file__, 144 | agent.__file__, 145 | connection, 146 | "--log-fd", 147 | str(log_fd), 148 | ] 149 | 150 | def handle_agent_log( 151 | self, line: str, *, level: LogLevel, source: LogSource 152 | ) -> None: 153 | self.log(line, level=level, source=source) 154 | -------------------------------------------------------------------------------- /src/isolate/connections/grpc/agent.py: -------------------------------------------------------------------------------- 1 | # agent-requires: isolate[server] 2 | """ 3 | This file contains the implementation of the gRPC agent. The agent is a 4 | separate process that is responsible for running the user code in a 5 | sandboxed environment. 6 | 7 | This file is referenced by the latest version of the `isolate` package 8 | but then runs it in the context of the frozen agent built environment. 9 | """ 10 | 11 | from __future__ import annotations 12 | 13 | import os 14 | import sys 15 | import traceback 16 | from argparse import ArgumentParser 17 | from concurrent import futures 18 | from dataclasses import dataclass 19 | from typing import ( 20 | Any, 21 | Iterable, 22 | Iterator, 23 | ) 24 | 25 | import grpc 26 | from grpc import ServicerContext, StatusCode 27 | 28 | try: 29 | from isolate import __version__ as agent_version 30 | except ImportError: 31 | agent_version = "UNKNOWN" 32 | 33 | from isolate.backends.common import sha256_digest_of 34 | from isolate.connections.common import SerializationError, serialize_object 35 | from isolate.connections.grpc import definitions 36 | from isolate.connections.grpc.configuration import get_default_options 37 | from isolate.connections.grpc.interface import from_grpc 38 | 39 | 40 | @dataclass 41 | class AbortException(Exception): 42 | message: str 43 | 44 | 45 | class AgentServicer(definitions.AgentServicer): 46 | def __init__(self, log_fd: int | None = None): 47 | super().__init__() 48 | 49 | self._run_cache: dict[str, Any] = {} 50 | self._log = sys.stdout if log_fd is None else os.fdopen(log_fd, "w") 51 | 52 | def Run( 53 | self, 54 | request: definitions.FunctionCall, 55 | context: ServicerContext, 56 | ) -> Iterator[definitions.PartialRunResult]: 57 | self.log(f"A connection has been established: {context.peer()}!") 58 | server_version = os.getenv("ISOLATE_SERVER_VERSION") or "unknown" 59 | self.log(f"Isolate info: server {server_version}, agent {agent_version}") 60 | 61 | extra_args = [] 62 | if request.HasField("setup_func"): 63 | cache_key = sha256_digest_of( 64 | request.setup_func.definition, 65 | request.setup_func.method, 66 | ) 67 | if cache_key not in self._run_cache: 68 | try: 69 | ( 70 | result, 71 | was_it_raised, 72 | stringized_tb, 73 | ) = self.execute_function( 74 | request.setup_func, 75 | "setup", 76 | ) 77 | 78 | if was_it_raised: 79 | self.log( 80 | "The setup function has thrown an error. Aborting the run." 81 | ) 82 | yield self.send_object( 83 | request.setup_func.method, 84 | result, 85 | was_it_raised, 86 | stringized_tb, 87 | ) 88 | raise AbortException("The setup function has thrown an error.") 89 | except AbortException as exc: 90 | return self.abort_with_msg(context, exc.message) 91 | else: 92 | assert not was_it_raised 93 | self._run_cache[cache_key] = result 94 | 95 | extra_args.append(self._run_cache[cache_key]) 96 | 97 | try: 98 | result, was_it_raised, stringized_tb = self.execute_function( 99 | request.function, 100 | "function", 101 | extra_args=extra_args, 102 | ) 103 | yield self.send_object( 104 | request.function.method, 105 | result, 106 | was_it_raised, 107 | stringized_tb, 108 | ) 109 | except AbortException as exc: 110 | return self.abort_with_msg(context, exc.message) 111 | 112 | def execute_function( 113 | self, 114 | function: definitions.SerializedObject, 115 | function_kind: str, 116 | *, 117 | extra_args: Iterable[Any] = (), 118 | ) -> tuple[Any, bool, str | None]: 119 | if function.was_it_raised: 120 | raise AbortException( 121 | f"The {function_kind} function must be callable, " 122 | "not a raised exception." 123 | ) 124 | 125 | try: 126 | # TODO: technically any sort of exception could be raised here, since 127 | # depickling is basically involves code execution from the *user*. 128 | function = from_grpc(function) 129 | except SerializationError: 130 | traceback.print_exc() 131 | raise AbortException( 132 | f"The {function_kind} function could not be deserialized." 133 | ) 134 | 135 | if not callable(function): 136 | raise AbortException( 137 | f"The {function_kind} function must be callable, " 138 | f"not {type(function).__name__}." 139 | ) 140 | 141 | self.log(f"Starting the execution of the {function_kind} function.") 142 | 143 | was_it_raised = False 144 | stringized_tb = None 145 | try: 146 | result = function(*extra_args) 147 | except BaseException as exc: 148 | result = exc 149 | was_it_raised = True 150 | num_frames = len(traceback.extract_stack()[:-5]) 151 | stringized_tb = "".join(traceback.format_exc(limit=-num_frames)) 152 | 153 | self.log(f"Completed the execution of the {function_kind} function.") 154 | return result, was_it_raised, stringized_tb 155 | 156 | def send_object( 157 | self, 158 | serialization_method: str, 159 | result: object, 160 | was_it_raised: bool, 161 | stringized_tb: str | None, 162 | ) -> definitions.PartialRunResult: 163 | try: 164 | definition = serialize_object(serialization_method, result) 165 | except SerializationError: 166 | if stringized_tb: 167 | print(stringized_tb, file=sys.stderr) 168 | self.log(traceback.format_exc()) 169 | raise AbortException( 170 | "Error while serializing the execution result " 171 | f"(object of type {type(result)})." 172 | ) 173 | except BaseException: 174 | self.log(traceback.format_exc()) 175 | raise AbortException( 176 | "An unexpected error occurred while serializing the result." 177 | ) 178 | 179 | self.log("Sending the result.") 180 | serialized_obj = definitions.SerializedObject( 181 | method=serialization_method, 182 | definition=definition, 183 | was_it_raised=was_it_raised, 184 | stringized_traceback=stringized_tb, 185 | ) 186 | return definitions.PartialRunResult( 187 | result=serialized_obj, 188 | is_complete=True, 189 | logs=[], 190 | ) 191 | 192 | def log(self, message: str) -> None: 193 | self._log.write(message + "\n") 194 | self._log.flush() 195 | 196 | def abort_with_msg( 197 | self, 198 | context: ServicerContext, 199 | message: str, 200 | *, 201 | code: StatusCode = StatusCode.INVALID_ARGUMENT, 202 | ) -> None: 203 | context.set_code(code) 204 | context.set_details(message) 205 | return None 206 | 207 | 208 | def create_server(address: str) -> grpc.Server: 209 | """Create a new (temporary) gRPC server listening on the given 210 | address.""" 211 | server = grpc.server( 212 | futures.ThreadPoolExecutor(max_workers=1), 213 | maximum_concurrent_rpcs=1, 214 | options=get_default_options(), 215 | ) 216 | 217 | # Local server credentials allow us to ensure that the 218 | # connection is established by a local process. 219 | server_credentials = grpc.local_server_credentials() 220 | server.add_secure_port(address, server_credentials) 221 | return server 222 | 223 | 224 | def run_agent(address: str, log_fd: int | None = None) -> int: 225 | """Run the agent servicer on the given address.""" 226 | server = create_server(address) 227 | servicer = AgentServicer(log_fd=log_fd) 228 | 229 | # This function just calls some methods on the server 230 | # and register a generic handler for the bridge. It does 231 | # not have any global side effects. 232 | definitions.register_agent(servicer, server) 233 | 234 | server.start() 235 | server.wait_for_termination() 236 | return 0 237 | 238 | 239 | def main() -> int: 240 | parser = ArgumentParser() 241 | parser.add_argument("address", type=str) 242 | parser.add_argument("--log-fd", type=int) 243 | 244 | options = parser.parse_args() 245 | return run_agent(options.address, log_fd=options.log_fd) 246 | 247 | 248 | if __name__ == "__main__": 249 | main() 250 | -------------------------------------------------------------------------------- /src/isolate/connections/grpc/configuration.py: -------------------------------------------------------------------------------- 1 | import ast 2 | import os 3 | 4 | _GRPC_OPTION_PREFIX = "ISOLATE_GRPC_CALL_" 5 | 6 | 7 | def get_default_options(): 8 | """Return the default list of GRPC call options (both for 9 | server and client) which are set via environment variables. 10 | 11 | Each environment variable starting with `ISOLATE_GRPC_CALL_` 12 | will be converted to a GRPC option. The name of the option 13 | will be the name of the environment variable, with the 14 | `ISOLATE_GRPC_CALL_` prefix removed and converted to lowercase. 15 | """ 16 | 17 | options = [] 18 | for raw_key, raw_value in os.environ.items(): 19 | if raw_key.startswith(_GRPC_OPTION_PREFIX): 20 | field = raw_key[len(_GRPC_OPTION_PREFIX) :].lower() 21 | value = ast.literal_eval(raw_value) 22 | options.append((f"grpc.{field}", value)) 23 | return options 24 | -------------------------------------------------------------------------------- /src/isolate/connections/grpc/definitions/__init__.py: -------------------------------------------------------------------------------- 1 | from google.protobuf.message import Message # noqa: F401 2 | 3 | from isolate.connections.grpc.definitions.agent_pb2 import * # noqa: F403 4 | from isolate.connections.grpc.definitions.agent_pb2_grpc import ( # noqa: F401 5 | AgentServicer, 6 | AgentStub, 7 | ) 8 | from isolate.connections.grpc.definitions.agent_pb2_grpc import ( # noqa: F401 9 | add_AgentServicer_to_server as register_agent, 10 | ) 11 | from isolate.connections.grpc.definitions.common_pb2 import * # noqa: F403 12 | -------------------------------------------------------------------------------- /src/isolate/connections/grpc/definitions/agent.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | import "common.proto"; 4 | 5 | service Agent { 6 | // Start running the given function, and stream results back. 7 | rpc Run (FunctionCall) returns (stream PartialRunResult) {} 8 | } 9 | 10 | message FunctionCall { 11 | // The function to execute and return the results to. 12 | SerializedObject function = 1; 13 | // Optionally the setup function which will be passed 14 | // as the first argument to the given function. This 15 | // has to be an idempotent step since the result for 16 | // this executable will be cached. 17 | optional SerializedObject setup_func = 2; 18 | } 19 | -------------------------------------------------------------------------------- /src/isolate/connections/grpc/definitions/agent_pb2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # source: agent.proto 4 | # Protobuf Python Version: 4.25.1 5 | """Generated protocol buffer code.""" 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import descriptor_pool as _descriptor_pool 8 | from google.protobuf import symbol_database as _symbol_database 9 | from google.protobuf.internal import builder as _builder 10 | # @@protoc_insertion_point(imports) 11 | 12 | _sym_db = _symbol_database.Default() 13 | 14 | 15 | from isolate.connections.grpc.definitions import common_pb2 as common__pb2 16 | 17 | 18 | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x61gent.proto\x1a\x0c\x63ommon.proto\"n\n\x0c\x46unctionCall\x12#\n\x08\x66unction\x18\x01 \x01(\x0b\x32\x11.SerializedObject\x12*\n\nsetup_func\x18\x02 \x01(\x0b\x32\x11.SerializedObjectH\x00\x88\x01\x01\x42\r\n\x0b_setup_func24\n\x05\x41gent\x12+\n\x03Run\x12\r.FunctionCall\x1a\x11.PartialRunResult\"\x00\x30\x01\x62\x06proto3') 19 | 20 | _globals = globals() 21 | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) 22 | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'agent_pb2', _globals) 23 | if _descriptor._USE_C_DESCRIPTORS == False: 24 | DESCRIPTOR._options = None 25 | _globals['_FUNCTIONCALL']._serialized_start=29 26 | _globals['_FUNCTIONCALL']._serialized_end=139 27 | _globals['_AGENT']._serialized_start=141 28 | _globals['_AGENT']._serialized_end=193 29 | # @@protoc_insertion_point(module_scope) 30 | -------------------------------------------------------------------------------- /src/isolate/connections/grpc/definitions/agent_pb2.pyi: -------------------------------------------------------------------------------- 1 | """ 2 | @generated by mypy-protobuf. Do not edit manually! 3 | isort:skip_file 4 | """ 5 | import builtins 6 | from isolate.connections.grpc.definitions import common_pb2 7 | import google.protobuf.descriptor 8 | import google.protobuf.message 9 | import sys 10 | 11 | if sys.version_info >= (3, 8): 12 | import typing as typing_extensions 13 | else: 14 | import typing_extensions 15 | 16 | DESCRIPTOR: google.protobuf.descriptor.FileDescriptor 17 | 18 | @typing_extensions.final 19 | class FunctionCall(google.protobuf.message.Message): 20 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 21 | 22 | FUNCTION_FIELD_NUMBER: builtins.int 23 | SETUP_FUNC_FIELD_NUMBER: builtins.int 24 | @property 25 | def function(self) -> common_pb2.SerializedObject: 26 | """The function to execute and return the results to.""" 27 | @property 28 | def setup_func(self) -> common_pb2.SerializedObject: 29 | """Optionally the setup function which will be passed 30 | as the first argument to the given function. This 31 | has to be an idempotent step since the result for 32 | this executable will be cached. 33 | """ 34 | def __init__( 35 | self, 36 | *, 37 | function: common_pb2.SerializedObject | None = ..., 38 | setup_func: common_pb2.SerializedObject | None = ..., 39 | ) -> None: ... 40 | def HasField(self, field_name: typing_extensions.Literal["_setup_func", b"_setup_func", "function", b"function", "setup_func", b"setup_func"]) -> builtins.bool: ... 41 | def ClearField(self, field_name: typing_extensions.Literal["_setup_func", b"_setup_func", "function", b"function", "setup_func", b"setup_func"]) -> None: ... 42 | def WhichOneof(self, oneof_group: typing_extensions.Literal["_setup_func", b"_setup_func"]) -> typing_extensions.Literal["setup_func"] | None: ... 43 | 44 | global___FunctionCall = FunctionCall 45 | -------------------------------------------------------------------------------- /src/isolate/connections/grpc/definitions/agent_pb2_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! 2 | """Client and server classes corresponding to protobuf-defined services.""" 3 | import grpc 4 | 5 | from isolate.connections.grpc.definitions import agent_pb2 as agent__pb2 6 | from isolate.connections.grpc.definitions import common_pb2 as common__pb2 7 | 8 | 9 | class AgentStub(object): 10 | """Missing associated documentation comment in .proto file.""" 11 | 12 | def __init__(self, channel): 13 | """Constructor. 14 | 15 | Args: 16 | channel: A grpc.Channel. 17 | """ 18 | self.Run = channel.unary_stream( 19 | '/Agent/Run', 20 | request_serializer=agent__pb2.FunctionCall.SerializeToString, 21 | response_deserializer=common__pb2.PartialRunResult.FromString, 22 | ) 23 | 24 | 25 | class AgentServicer(object): 26 | """Missing associated documentation comment in .proto file.""" 27 | 28 | def Run(self, request, context): 29 | """Start running the given function, and stream results back. 30 | """ 31 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 32 | context.set_details('Method not implemented!') 33 | raise NotImplementedError('Method not implemented!') 34 | 35 | 36 | def add_AgentServicer_to_server(servicer, server): 37 | rpc_method_handlers = { 38 | 'Run': grpc.unary_stream_rpc_method_handler( 39 | servicer.Run, 40 | request_deserializer=agent__pb2.FunctionCall.FromString, 41 | response_serializer=common__pb2.PartialRunResult.SerializeToString, 42 | ), 43 | } 44 | generic_handler = grpc.method_handlers_generic_handler( 45 | 'Agent', rpc_method_handlers) 46 | server.add_generic_rpc_handlers((generic_handler,)) 47 | 48 | 49 | # This class is part of an EXPERIMENTAL API. 50 | class Agent(object): 51 | """Missing associated documentation comment in .proto file.""" 52 | 53 | @staticmethod 54 | def Run(request, 55 | target, 56 | options=(), 57 | channel_credentials=None, 58 | call_credentials=None, 59 | insecure=False, 60 | compression=None, 61 | wait_for_ready=None, 62 | timeout=None, 63 | metadata=None): 64 | return grpc.experimental.unary_stream(request, target, '/Agent/Run', 65 | agent__pb2.FunctionCall.SerializeToString, 66 | common__pb2.PartialRunResult.FromString, 67 | options, channel_credentials, 68 | insecure, call_credentials, compression, wait_for_ready, timeout, metadata) 69 | -------------------------------------------------------------------------------- /src/isolate/connections/grpc/definitions/common.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | import "google/protobuf/timestamp.proto"; 4 | 5 | message SerializedObject { 6 | // The serialization method used to serialize the the raw_object. Must be 7 | // present in the environment that is running the agent itself. 8 | string method = 1; 9 | // The Python object serialized with the method above. 10 | bytes definition = 2; 11 | // A flag indicating whether the given object was raised (e.g. an exception 12 | // that was captured) or not. 13 | bool was_it_raised = 3; 14 | // The stringized version of the traceback, if it was raised. 15 | optional string stringized_traceback = 4; 16 | } 17 | 18 | message PartialRunResult { 19 | // A flag indicating whether the run has completed. 20 | bool is_complete = 1; 21 | // A list of logs collected during this partial execution. It does 22 | // not include old logs. 23 | repeated Log logs = 2; 24 | // The result of the run, if it is complete. 25 | optional SerializedObject result = 3; 26 | } 27 | 28 | message Log { 29 | string message = 1; 30 | LogSource source = 2; 31 | LogLevel level = 3; 32 | google.protobuf.Timestamp timestamp = 4; 33 | } 34 | 35 | enum LogSource { 36 | BUILDER = 0; 37 | BRIDGE = 1; 38 | USER = 2; 39 | } 40 | 41 | enum LogLevel { 42 | TRACE = 0; 43 | DEBUG = 1; 44 | INFO = 2; 45 | WARNING = 3; 46 | ERROR = 4; 47 | STDOUT = 5; 48 | STDERR = 6; 49 | } 50 | -------------------------------------------------------------------------------- /src/isolate/connections/grpc/definitions/common_pb2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # source: common.proto 4 | # Protobuf Python Version: 4.25.1 5 | """Generated protocol buffer code.""" 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import descriptor_pool as _descriptor_pool 8 | from google.protobuf import symbol_database as _symbol_database 9 | from google.protobuf.internal import builder as _builder 10 | # @@protoc_insertion_point(imports) 11 | 12 | _sym_db = _symbol_database.Default() 13 | 14 | 15 | from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 16 | 17 | 18 | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0c\x63ommon.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\x89\x01\n\x10SerializedObject\x12\x0e\n\x06method\x18\x01 \x01(\t\x12\x12\n\ndefinition\x18\x02 \x01(\x0c\x12\x15\n\rwas_it_raised\x18\x03 \x01(\x08\x12!\n\x14stringized_traceback\x18\x04 \x01(\tH\x00\x88\x01\x01\x42\x17\n\x15_stringized_traceback\"n\n\x10PartialRunResult\x12\x13\n\x0bis_complete\x18\x01 \x01(\x08\x12\x12\n\x04logs\x18\x02 \x03(\x0b\x32\x04.Log\x12&\n\x06result\x18\x03 \x01(\x0b\x32\x11.SerializedObjectH\x00\x88\x01\x01\x42\t\n\x07_result\"{\n\x03Log\x12\x0f\n\x07message\x18\x01 \x01(\t\x12\x1a\n\x06source\x18\x02 \x01(\x0e\x32\n.LogSource\x12\x18\n\x05level\x18\x03 \x01(\x0e\x32\t.LogLevel\x12-\n\ttimestamp\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp*.\n\tLogSource\x12\x0b\n\x07\x42UILDER\x10\x00\x12\n\n\x06\x42RIDGE\x10\x01\x12\x08\n\x04USER\x10\x02*Z\n\x08LogLevel\x12\t\n\x05TRACE\x10\x00\x12\t\n\x05\x44\x45\x42UG\x10\x01\x12\x08\n\x04INFO\x10\x02\x12\x0b\n\x07WARNING\x10\x03\x12\t\n\x05\x45RROR\x10\x04\x12\n\n\x06STDOUT\x10\x05\x12\n\n\x06STDERR\x10\x06\x62\x06proto3') 19 | 20 | _globals = globals() 21 | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) 22 | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'common_pb2', _globals) 23 | if _descriptor._USE_C_DESCRIPTORS == False: 24 | DESCRIPTOR._options = None 25 | _globals['_LOGSOURCE']._serialized_start=426 26 | _globals['_LOGSOURCE']._serialized_end=472 27 | _globals['_LOGLEVEL']._serialized_start=474 28 | _globals['_LOGLEVEL']._serialized_end=564 29 | _globals['_SERIALIZEDOBJECT']._serialized_start=50 30 | _globals['_SERIALIZEDOBJECT']._serialized_end=187 31 | _globals['_PARTIALRUNRESULT']._serialized_start=189 32 | _globals['_PARTIALRUNRESULT']._serialized_end=299 33 | _globals['_LOG']._serialized_start=301 34 | _globals['_LOG']._serialized_end=424 35 | # @@protoc_insertion_point(module_scope) 36 | -------------------------------------------------------------------------------- /src/isolate/connections/grpc/definitions/common_pb2.pyi: -------------------------------------------------------------------------------- 1 | """ 2 | @generated by mypy-protobuf. Do not edit manually! 3 | isort:skip_file 4 | """ 5 | import builtins 6 | import collections.abc 7 | import google.protobuf.descriptor 8 | import google.protobuf.internal.containers 9 | import google.protobuf.internal.enum_type_wrapper 10 | import google.protobuf.message 11 | import google.protobuf.timestamp_pb2 12 | import sys 13 | import typing 14 | 15 | if sys.version_info >= (3, 10): 16 | import typing as typing_extensions 17 | else: 18 | import typing_extensions 19 | 20 | DESCRIPTOR: google.protobuf.descriptor.FileDescriptor 21 | 22 | class _LogSource: 23 | ValueType = typing.NewType("ValueType", builtins.int) 24 | V: typing_extensions.TypeAlias = ValueType 25 | 26 | class _LogSourceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_LogSource.ValueType], builtins.type): 27 | DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor 28 | BUILDER: _LogSource.ValueType # 0 29 | BRIDGE: _LogSource.ValueType # 1 30 | USER: _LogSource.ValueType # 2 31 | 32 | class LogSource(_LogSource, metaclass=_LogSourceEnumTypeWrapper): ... 33 | 34 | BUILDER: LogSource.ValueType # 0 35 | BRIDGE: LogSource.ValueType # 1 36 | USER: LogSource.ValueType # 2 37 | global___LogSource = LogSource 38 | 39 | class _LogLevel: 40 | ValueType = typing.NewType("ValueType", builtins.int) 41 | V: typing_extensions.TypeAlias = ValueType 42 | 43 | class _LogLevelEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_LogLevel.ValueType], builtins.type): 44 | DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor 45 | TRACE: _LogLevel.ValueType # 0 46 | DEBUG: _LogLevel.ValueType # 1 47 | INFO: _LogLevel.ValueType # 2 48 | WARNING: _LogLevel.ValueType # 3 49 | ERROR: _LogLevel.ValueType # 4 50 | STDOUT: _LogLevel.ValueType # 5 51 | STDERR: _LogLevel.ValueType # 6 52 | 53 | class LogLevel(_LogLevel, metaclass=_LogLevelEnumTypeWrapper): ... 54 | 55 | TRACE: LogLevel.ValueType # 0 56 | DEBUG: LogLevel.ValueType # 1 57 | INFO: LogLevel.ValueType # 2 58 | WARNING: LogLevel.ValueType # 3 59 | ERROR: LogLevel.ValueType # 4 60 | STDOUT: LogLevel.ValueType # 5 61 | STDERR: LogLevel.ValueType # 6 62 | global___LogLevel = LogLevel 63 | 64 | @typing_extensions.final 65 | class SerializedObject(google.protobuf.message.Message): 66 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 67 | 68 | METHOD_FIELD_NUMBER: builtins.int 69 | DEFINITION_FIELD_NUMBER: builtins.int 70 | WAS_IT_RAISED_FIELD_NUMBER: builtins.int 71 | STRINGIZED_TRACEBACK_FIELD_NUMBER: builtins.int 72 | method: builtins.str 73 | """The serialization method used to serialize the the raw_object. Must be 74 | present in the environment that is running the agent itself. 75 | """ 76 | definition: builtins.bytes 77 | """The Python object serialized with the method above.""" 78 | was_it_raised: builtins.bool 79 | """A flag indicating whether the given object was raised (e.g. an exception 80 | that was captured) or not. 81 | """ 82 | stringized_traceback: builtins.str 83 | """The stringized version of the traceback, if it was raised.""" 84 | def __init__( 85 | self, 86 | *, 87 | method: builtins.str = ..., 88 | definition: builtins.bytes = ..., 89 | was_it_raised: builtins.bool = ..., 90 | stringized_traceback: builtins.str | None = ..., 91 | ) -> None: ... 92 | def HasField(self, field_name: typing_extensions.Literal["_stringized_traceback", b"_stringized_traceback", "stringized_traceback", b"stringized_traceback"]) -> builtins.bool: ... 93 | def ClearField(self, field_name: typing_extensions.Literal["_stringized_traceback", b"_stringized_traceback", "definition", b"definition", "method", b"method", "stringized_traceback", b"stringized_traceback", "was_it_raised", b"was_it_raised"]) -> None: ... 94 | def WhichOneof(self, oneof_group: typing_extensions.Literal["_stringized_traceback", b"_stringized_traceback"]) -> typing_extensions.Literal["stringized_traceback"] | None: ... 95 | 96 | global___SerializedObject = SerializedObject 97 | 98 | @typing_extensions.final 99 | class PartialRunResult(google.protobuf.message.Message): 100 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 101 | 102 | IS_COMPLETE_FIELD_NUMBER: builtins.int 103 | LOGS_FIELD_NUMBER: builtins.int 104 | RESULT_FIELD_NUMBER: builtins.int 105 | is_complete: builtins.bool 106 | """A flag indicating whether the run has completed.""" 107 | @property 108 | def logs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Log]: 109 | """A list of logs collected during this partial execution. It does 110 | not include old logs. 111 | """ 112 | @property 113 | def result(self) -> global___SerializedObject: 114 | """The result of the run, if it is complete.""" 115 | def __init__( 116 | self, 117 | *, 118 | is_complete: builtins.bool = ..., 119 | logs: collections.abc.Iterable[global___Log] | None = ..., 120 | result: global___SerializedObject | None = ..., 121 | ) -> None: ... 122 | def HasField(self, field_name: typing_extensions.Literal["_result", b"_result", "result", b"result"]) -> builtins.bool: ... 123 | def ClearField(self, field_name: typing_extensions.Literal["_result", b"_result", "is_complete", b"is_complete", "logs", b"logs", "result", b"result"]) -> None: ... 124 | def WhichOneof(self, oneof_group: typing_extensions.Literal["_result", b"_result"]) -> typing_extensions.Literal["result"] | None: ... 125 | 126 | global___PartialRunResult = PartialRunResult 127 | 128 | @typing_extensions.final 129 | class Log(google.protobuf.message.Message): 130 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 131 | 132 | MESSAGE_FIELD_NUMBER: builtins.int 133 | SOURCE_FIELD_NUMBER: builtins.int 134 | LEVEL_FIELD_NUMBER: builtins.int 135 | TIMESTAMP_FIELD_NUMBER: builtins.int 136 | message: builtins.str 137 | source: global___LogSource.ValueType 138 | level: global___LogLevel.ValueType 139 | @property 140 | def timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: ... 141 | def __init__( 142 | self, 143 | *, 144 | message: builtins.str = ..., 145 | source: global___LogSource.ValueType = ..., 146 | level: global___LogLevel.ValueType = ..., 147 | timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., 148 | ) -> None: ... 149 | def HasField(self, field_name: typing_extensions.Literal["timestamp", b"timestamp"]) -> builtins.bool: ... 150 | def ClearField(self, field_name: typing_extensions.Literal["level", b"level", "message", b"message", "source", b"source", "timestamp", b"timestamp"]) -> None: ... 151 | 152 | global___Log = Log 153 | -------------------------------------------------------------------------------- /src/isolate/connections/grpc/definitions/common_pb2_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! 2 | """Client and server classes corresponding to protobuf-defined services.""" 3 | import grpc 4 | 5 | -------------------------------------------------------------------------------- /src/isolate/connections/grpc/interface.py: -------------------------------------------------------------------------------- 1 | """A common gRPC interface for both the gRPC connection implementation 2 | and the Isolate Server to share.""" 3 | 4 | import functools 5 | from typing import Any, Optional 6 | 7 | from isolate.common import timestamp 8 | from isolate.connections.common import load_serialized_object, serialize_object 9 | from isolate.connections.grpc import definitions 10 | from isolate.logs import Log, LogLevel, LogSource 11 | 12 | 13 | @functools.singledispatch 14 | def from_grpc(message: definitions.Message) -> Any: 15 | """Materialize a gRPC message into a Python object.""" 16 | wrong_type = type(message).__name__ 17 | raise NotImplementedError(f"Can't convert {wrong_type} to a Python object.") 18 | 19 | 20 | @functools.singledispatch 21 | def to_grpc(obj: Any) -> definitions.Message: 22 | """Convert a Python object into a gRPC message.""" 23 | wrong_type = type(obj).__name__ 24 | raise NotImplementedError(f"Cannot convert {wrong_type} to a gRPC message.") 25 | 26 | 27 | @from_grpc.register 28 | def _(message: definitions.SerializedObject) -> Any: 29 | return load_serialized_object( 30 | message.method, 31 | message.definition, 32 | was_it_raised=message.was_it_raised, 33 | stringized_traceback=message.stringized_traceback, 34 | ) 35 | 36 | 37 | @from_grpc.register 38 | def _(message: definitions.Log) -> Log: 39 | source = LogSource(definitions.LogSource.Name(message.source).lower()) 40 | level = LogLevel[definitions.LogLevel.Name(message.level).upper()] 41 | return Log( 42 | message=message.message, 43 | source=source, 44 | level=level, 45 | timestamp=timestamp.to_datetime(message.timestamp), 46 | ) 47 | 48 | 49 | @to_grpc.register 50 | def _(obj: Log) -> definitions.Log: 51 | return definitions.Log( 52 | message=obj.message, 53 | source=definitions.LogSource.Value(obj.source.name.upper()), 54 | level=definitions.LogLevel.Value(obj.level.name.upper()), 55 | timestamp=timestamp.from_datetime(obj.timestamp), 56 | ) 57 | 58 | 59 | def to_serialized_object( 60 | obj: Any, 61 | method: str, 62 | was_it_raised: bool = False, 63 | stringized_traceback: Optional[str] = None, 64 | ) -> definitions.SerializedObject: 65 | """Convert a Python object into a gRPC message.""" 66 | return definitions.SerializedObject( 67 | method=method, 68 | definition=serialize_object(method, obj), 69 | was_it_raised=was_it_raised, 70 | stringized_traceback=stringized_traceback, 71 | ) 72 | -------------------------------------------------------------------------------- /src/isolate/connections/ipc/__init__.py: -------------------------------------------------------------------------------- 1 | from isolate.connections.ipc._base import ( # noqa: F401 2 | IsolatedProcessConnection, 3 | PythonExecutionBase, 4 | PythonIPC, 5 | ) 6 | -------------------------------------------------------------------------------- /src/isolate/connections/ipc/_base.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import base64 4 | import importlib 5 | import subprocess 6 | import time 7 | from contextlib import ExitStack, closing 8 | from dataclasses import dataclass 9 | from multiprocessing.connection import Connection, Listener 10 | from pathlib import Path 11 | from typing import ( 12 | TYPE_CHECKING, 13 | Any, 14 | Callable, 15 | ContextManager, 16 | ) 17 | 18 | from isolate.backends import ( 19 | BasicCallable, 20 | CallResultType, 21 | EnvironmentConnection, 22 | ) 23 | from isolate.connections._local import PythonExecutionBase, agent_startup 24 | from isolate.connections.common import prepare_exc 25 | from isolate.connections.ipc import agent 26 | from isolate.logs import LogLevel, LogSource 27 | 28 | if TYPE_CHECKING: 29 | # Somhow mypy can't figure out that `ConnectionWrapper` 30 | # really exists. 31 | class ConnectionWrapper(Connection): 32 | def __init__( 33 | self, 34 | connection: Any, 35 | loads: Callable[[bytes], Any], 36 | dumps: Callable[[Any], bytes], 37 | ) -> None: ... 38 | 39 | def recv(self) -> Any: ... 40 | 41 | def send(self, value: Any) -> None: ... 42 | 43 | def close(self) -> None: ... 44 | 45 | else: 46 | from multiprocessing.connection import ConnectionWrapper 47 | 48 | 49 | class AgentListener(Listener): 50 | """A custom listener that can use any available serialization method 51 | to communicate with the child process.""" 52 | 53 | def __init__(self, backend_name: str, *args: Any, **kwargs: Any) -> None: 54 | self.serialization_backend = loadserialization_method(backend_name) 55 | super().__init__(*args, **kwargs) 56 | 57 | def accept(self) -> Connection: 58 | return ConnectionWrapper( 59 | super().accept(), 60 | dumps=self.serialization_backend.dumps, 61 | loads=self.serialization_backend.loads, 62 | ) 63 | 64 | 65 | def loadserialization_method(backend_name: str) -> Any: 66 | # TODO(feat): This should probably throw a better error if the 67 | # given backend does not exist. 68 | return importlib.import_module(backend_name) 69 | 70 | 71 | def encode_service_address(address: tuple[str, int]) -> str: 72 | host, port = address 73 | return base64.b64encode(f"{host}:{port}".encode()).decode("utf-8") 74 | 75 | 76 | @dataclass 77 | class IsolatedProcessConnection(EnvironmentConnection): 78 | """A generic IPC implementation for running the isolate backend 79 | in a separated process. 80 | 81 | Each implementation needs to define a start_process method to 82 | spawn the agent.""" 83 | 84 | # The amount of seconds to wait before checking whether the 85 | # isolated process has exited or not. 86 | _DEFER_THRESHOLD = 0.25 87 | 88 | def start_process( 89 | self, 90 | connection: AgentListener, 91 | *args: Any, 92 | **kwargs: Any, 93 | ) -> ContextManager[subprocess.Popen]: 94 | """Start the agent process.""" 95 | raise NotImplementedError 96 | 97 | def run( # type: ignore[return-value] 98 | self, 99 | executable: BasicCallable, 100 | *args: Any, 101 | **kwargs: Any, 102 | ) -> CallResultType: # type: ignore[type-var] 103 | """Spawn an agent process using the given environment, run the given 104 | `executable` in that process, and return the result object back.""" 105 | 106 | with ExitStack() as stack: 107 | # IPC flow is the following: 108 | # 1. [controller]: Create the socket server 109 | # 2. [controller]: Spawn the call agent with the socket address 110 | # 3. [agent]: Connect to the socket server 111 | # 4. [controller]: Accept the incoming connection request 112 | # 5. [controller]: Send the executable over the established bridge 113 | # 6. [agent]: Receive the executable from the bridge 114 | # 7. [agent]: Execute the executable and once done send the result 115 | # back 116 | # 8. [controller]: Loop until either the isolated process exits or sends 117 | # any data (will be interpreted as a tuple of two 118 | # mutually exclusive objects, either a result object or 119 | # an exception to be raised). 120 | # 121 | 122 | self.log("Starting the controller bridge.") 123 | controller_service = stack.enter_context( 124 | AgentListener( 125 | self.environment.settings.serialization_method, 126 | family="AF_INET", 127 | ) 128 | ) 129 | 130 | self.log( 131 | f"Controller server is listening at {controller_service.address}." 132 | " Attempting to start the agent process." 133 | ) 134 | assert not (args or kwargs), "run() should not receive any arguments." 135 | isolated_process = stack.enter_context( 136 | self.start_process(controller_service, *args, **kwargs) 137 | ) 138 | 139 | # TODO(fix): this might hang if the agent process crashes before it can 140 | # connect to the controller bridge. 141 | self.log( 142 | f"Awaiting agent process of {isolated_process.pid}" 143 | " to establish a connection." 144 | ) 145 | established_connection = stack.enter_context( 146 | closing(controller_service.accept()) 147 | ) 148 | 149 | self.log("Bridge between controller and the agent has been established.") 150 | established_connection.send(executable) 151 | 152 | self.log("Executable has been sent, awaiting execution result.") 153 | return self.poll_until_result( 154 | isolated_process, 155 | established_connection, 156 | ) 157 | 158 | def poll_until_result( 159 | self, 160 | process: subprocess.Popen, 161 | connection: Connection, 162 | ) -> CallResultType: # type: ignore[type-var] 163 | """Take the given process, and poll until either it exits or returns 164 | a result object.""" 165 | 166 | while not connection.poll(): 167 | # Normally, if we do connection.read() without having this loop 168 | # it is going to block us indefinitely (even if the underlying 169 | # process has crashed). We can use a combination of process.poll 170 | # and connection.poll to check if the process is alive and has data 171 | # to move forward. 172 | if process.poll(): 173 | break 174 | 175 | # For preventing busy waiting, we can sleep for a bit 176 | # and let other threads run. 177 | time.sleep(self._DEFER_THRESHOLD) 178 | continue 179 | 180 | if not connection.poll(): 181 | # If the process has exited but there is still no data, we 182 | # can assume something terrible has happened. 183 | raise OSError( 184 | "The isolated process has exited unexpectedly with code " 185 | f"'{process.poll()}' without sending any data back." 186 | ) 187 | 188 | # TODO(fix): handle EOFError that might happen here (e.g. problematic 189 | # serialization might cause it). 190 | result, did_it_raise, stringized_traceback = connection.recv() 191 | 192 | if did_it_raise: 193 | raise prepare_exc(result, stringized_traceback=stringized_traceback) 194 | else: 195 | assert stringized_traceback is None 196 | return result 197 | 198 | 199 | @dataclass 200 | class PythonIPC(PythonExecutionBase[AgentListener], IsolatedProcessConnection): 201 | def get_python_cmd( 202 | self, 203 | executable: Path, 204 | connection: AgentListener, 205 | log_fd: int, 206 | ) -> list[str | Path]: 207 | assert isinstance(connection.address, tuple) 208 | return [ 209 | executable, 210 | agent_startup.__file__, 211 | agent.__file__, 212 | encode_service_address(connection.address), 213 | # TODO(feat): we probably should check if the given backend is installed 214 | # on the remote interpreter, otherwise it will fail without establishing 215 | # the connection with the bridge. 216 | "--serialization-backend", 217 | self.environment.settings.serialization_method, 218 | "--log-fd", 219 | str(log_fd), 220 | ] 221 | 222 | def handle_agent_log( 223 | self, line: str, *, level: LogLevel, source: LogSource 224 | ) -> None: 225 | self.log(line, level=level, source=source) 226 | -------------------------------------------------------------------------------- /src/isolate/connections/ipc/agent.py: -------------------------------------------------------------------------------- 1 | # This file defines an "isolate" agent for inter-process communication over 2 | # sockets. It is spawned by the controller process with a single argument (a 3 | # base64 encoded server address) and expected to go through the following procedures: 4 | # 1. Decode the given address 5 | # 2. Create a connection to the transmission bridge using the address 6 | # 3. Receive a callable object from the bridge 7 | # 4. Execute the callable object 8 | # 5. Send the result back to the bridge 9 | # 6. Exit 10 | # 11 | # Up until to point 4, the agent process has no way of transmitting information 12 | # to the controller so it should use the stderr/stdout channels appropriately. After 13 | # the executable is received, the controller process will switch to the listening mode 14 | # and wait for agent to return something. The expected object is a tuple of two objects 15 | # one being the actual result of the given callable, and the other one is a boolean flag 16 | # indicating whether the callable has raised an exception or not. 17 | 18 | from __future__ import annotations 19 | 20 | import base64 21 | import importlib 22 | import os 23 | import sys 24 | import time 25 | import traceback 26 | from argparse import ArgumentParser 27 | from contextlib import closing 28 | from multiprocessing.connection import Client 29 | from typing import TYPE_CHECKING, Any, Callable, ContextManager 30 | 31 | if TYPE_CHECKING: 32 | # Somhow mypy can't figure out that `ConnectionWrapper` 33 | # really exists. 34 | class ConnectionWrapper: 35 | def __init__( 36 | self, 37 | connection: Any, 38 | loads: Callable[[bytes], Any], 39 | dumps: Callable[[Any], bytes], 40 | ) -> None: ... 41 | 42 | def recv(self) -> Any: ... 43 | 44 | def send(self, value: Any) -> None: ... 45 | 46 | def close(self) -> None: ... 47 | 48 | else: 49 | from multiprocessing.connection import ConnectionWrapper 50 | 51 | 52 | def decode_service_address(address: str) -> tuple[str, int]: 53 | host, port = base64.b64decode(address).decode("utf-8").rsplit(":", 1) 54 | return host, int(port) 55 | 56 | 57 | def child_connection( 58 | serialization_method: str, address: tuple[str, int] 59 | ) -> ContextManager[ConnectionWrapper]: 60 | serialization_backend = importlib.import_module(serialization_method) 61 | return closing( 62 | ConnectionWrapper( 63 | Client(address), 64 | loads=serialization_backend.loads, 65 | dumps=serialization_backend.dumps, 66 | ) 67 | ) 68 | 69 | 70 | IS_DEBUG_MODE = os.getenv("ISOLATE_ENABLE_DEBUGGING") == "1" 71 | DEBUG_TIMEOUT = 60 * 15 72 | 73 | 74 | def run_client( 75 | serialization_method: str, 76 | address: tuple[str, int], 77 | *, 78 | with_pdb: bool = False, 79 | log_fd: int | None = None, 80 | ) -> None: 81 | # Debug Mode 82 | # ========== 83 | # 84 | # Isolated processes are really tricky to debug properly 85 | # so we want to have a smooth way into the process and see 86 | # what is really going on in the case of errors. 87 | # 88 | # For using the debug mode, you first need to set ISOLATE_ENABLE_DEBUGGING 89 | # environment variable to "1" from your controller process. This will 90 | # make the isolated process hang at the initialization, and make it print 91 | # the instructions to connect to the controller process. 92 | # 93 | # On a separate shell (while letting the the controller process hang), you can 94 | # execute the given command to drop into the PDB (Python Debugger). With that 95 | # you can observe each step of the connection and run process. 96 | 97 | if with_pdb: 98 | # This condition will only be activated if we want to 99 | # debug the isolated process by passing the --with-pdb 100 | # flag when executing the binary. 101 | import pdb 102 | 103 | pdb.set_trace() 104 | 105 | if log_fd is None: 106 | _log = sys.stdout 107 | else: 108 | _log = os.fdopen(log_fd, "w") 109 | 110 | def log(_msg): 111 | _log.write(_msg) 112 | _log.flush() 113 | 114 | log(f"Trying to create a connection to {address}") 115 | # TODO(feat): this should probably run in a loop instead of 116 | # receiving a single function and then exitting immediately. 117 | with child_connection(serialization_method, address) as connection: 118 | log(f"Created child connection to {address}") 119 | callable = connection.recv() 120 | log(f"Received the callable at {address}") 121 | 122 | result = None 123 | did_it_raise = False 124 | stringized_tb = None 125 | try: 126 | result = callable() 127 | except BaseException as exc: 128 | result = exc 129 | did_it_raise = True 130 | num_frames = len(traceback.extract_stack()[:-4]) 131 | stringized_tb = "".join(traceback.format_exc(limit=-num_frames)) 132 | finally: 133 | try: 134 | connection.send((result, did_it_raise, stringized_tb)) 135 | except BaseException: 136 | if did_it_raise: 137 | # If we can't even send it through the connection 138 | # still try to dump it to the stderr as the last 139 | # resort. 140 | assert isinstance(result, BaseException) 141 | traceback.print_exception( 142 | type(result), 143 | result, 144 | result.__traceback__, 145 | ) 146 | raise 147 | 148 | 149 | def _get_shell_bootstrap() -> str: 150 | # Return a string that contains environment variables that 151 | # might be used during isolated hook's execution. 152 | return " ".join( 153 | f"{session_variable}={os.getenv(session_variable)}" 154 | for session_variable in [ 155 | # PYTHONPATH is customized by the Extended Environment IPC 156 | # system to make sure that the isolated process can 157 | # import stuff from the primary environment. Without this 158 | # the isolated process will not be able to run properly 159 | # on the newly created debug session. 160 | "PYTHONPATH", 161 | ] 162 | if session_variable in os.environ 163 | ) 164 | 165 | 166 | def main() -> int: 167 | parser = ArgumentParser() 168 | parser.add_argument("listen_at") 169 | parser.add_argument("--with-pdb", action="store_true", default=False) 170 | parser.add_argument("--serialization-backend", default="pickle") 171 | parser.add_argument("--log-fd", type=int) 172 | 173 | options = parser.parse_args() 174 | if IS_DEBUG_MODE: 175 | assert not options.with_pdb, "--with-pdb can't be used in the debug mode" 176 | message = "=" * 60 177 | message += "\n" * 3 178 | message += ( 179 | "Debug mode successfully activated. " 180 | "You can start your debugging session with the following command:\n" 181 | ) 182 | message += ( 183 | f" $ {_get_shell_bootstrap()}\\\n " 184 | f"{sys.executable} {os.path.abspath(__file__)} " 185 | f"--serialization-backend {options.serialization_backend} " 186 | f"--with-pdb {options.listen_at}" 187 | ) 188 | message += "\n" * 3 189 | message += "=" * 60 190 | print(message) 191 | time.sleep(DEBUG_TIMEOUT) 192 | 193 | serialization_method = options.serialization_backend 194 | address = decode_service_address(options.listen_at) 195 | run_client( 196 | serialization_method, 197 | address, 198 | with_pdb=options.with_pdb, 199 | log_fd=options.log_fd, 200 | ) 201 | return 0 202 | 203 | 204 | if __name__ == "__main__": 205 | sys.exit(main()) 206 | -------------------------------------------------------------------------------- /src/isolate/logger.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | from datetime import datetime, timezone 4 | from typing import Dict 5 | 6 | from isolate.logs import LogLevel, LogSource 7 | 8 | 9 | # NOTE: we probably should've created a proper `logging.getLogger` here, 10 | # but it handling `source` would be not trivial, so we are better off 11 | # just keeping it simple for now. 12 | class IsolateLogger: 13 | extra_labels: Dict[str, str] = {} 14 | 15 | def __init__(self, log_labels: Dict[str, str]): 16 | self.log_labels = log_labels 17 | 18 | def log(self, level: LogLevel, message: str, source: LogSource) -> None: 19 | record = { 20 | # Set the timestamp from source so we can be sure no buffering or 21 | # latency is affecting the timestamp. 22 | "logged_at": datetime.now(tz=timezone.utc).isoformat(), 23 | "isolate_source": source.name, 24 | "level": level.name, 25 | "message": message, 26 | **self.log_labels, 27 | **self.extra_labels, 28 | } 29 | print(json.dumps(record)) 30 | 31 | @classmethod 32 | def with_env_expanded(cls, labels: Dict[str, str]) -> "IsolateLogger": 33 | for key, value in labels.items(): 34 | if value.startswith("$"): 35 | expanded = os.getenv(value[1:]) 36 | else: 37 | expanded = value 38 | if expanded is not None: 39 | labels[key] = expanded 40 | 41 | return cls(labels) 42 | 43 | @classmethod 44 | def from_env(cls) -> "IsolateLogger": 45 | _labels: Dict[str, str] = {} 46 | raw = os.getenv("ISOLATE_LOG_LABELS") 47 | if raw: 48 | try: 49 | _labels = json.loads(raw) 50 | except json.JSONDecodeError: 51 | print("Failed to parse ISOLATE_LOG_LABELS") 52 | 53 | return cls.with_env_expanded(labels=_labels) 54 | -------------------------------------------------------------------------------- /src/isolate/logs.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import tempfile 4 | from dataclasses import dataclass, field 5 | from datetime import datetime, timezone 6 | from enum import Enum 7 | from functools import total_ordering 8 | from pathlib import Path 9 | from typing import TYPE_CHECKING 10 | 11 | if TYPE_CHECKING: 12 | from isolate.backends import BaseEnvironment 13 | 14 | _SYSTEM_TEMP_DIR = Path(tempfile.gettempdir()) 15 | 16 | 17 | class LogSource(str, Enum): 18 | """Represents where the log orinates from.""" 19 | 20 | # During the environment creation process (e.g. if the environment 21 | # is already created/cached, then no logs from this source will be 22 | # emitted). 23 | BUILDER = "builder" 24 | 25 | # During the environment execution process (from the server<->agent 26 | # communication, mostly for debugging purposes). 27 | BRIDGE = "bridge" 28 | 29 | # From the user script itself (e.g. a print() call in the given 30 | # function). The stream will be attached as level (stdout or stderr) 31 | USER = "user" 32 | 33 | 34 | @total_ordering 35 | class LogLevel(Enum): 36 | """Represents the log level.""" 37 | 38 | TRACE = 0 39 | DEBUG = 10 40 | INFO = 20 41 | WARNING = 30 42 | ERROR = 40 43 | 44 | # For user scripts 45 | STDOUT = 100 46 | STDERR = 110 47 | 48 | def __lt__(self, other: LogLevel) -> bool: 49 | if self.__class__ is other.__class__: 50 | return self.value < other.value 51 | return NotImplemented 52 | 53 | def __str__(self) -> str: 54 | return self.name.lower() 55 | 56 | 57 | @dataclass 58 | class Log: 59 | """A structured log message with an option source and level.""" 60 | 61 | message: str 62 | source: LogSource 63 | level: LogLevel = LogLevel.INFO 64 | bound_env: BaseEnvironment | None = field(default=None, repr=False) 65 | timestamp: datetime = field(default_factory=lambda: datetime.now(timezone.utc)) 66 | 67 | def __str__(self) -> str: 68 | parts = [self.timestamp.strftime("%m/%d/%Y %H:%M:%S")] 69 | if self.bound_env: 70 | parts.append(f"[{self.bound_env.key[:6]}]") 71 | else: 72 | parts.append("[global]") 73 | 74 | parts.append(f"[{self.source}]".ljust(10)) 75 | parts.append(f"[{self.level}]".ljust(10)) 76 | return " ".join(parts) + self.message 77 | -------------------------------------------------------------------------------- /src/isolate/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fal-ai/isolate/43cbe0d852a75229e8372c4305fa37da4aa5ec78/src/isolate/py.typed -------------------------------------------------------------------------------- /src/isolate/registry.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import sys 4 | from typing import TYPE_CHECKING, Any 5 | 6 | if sys.version_info >= (3, 10): 7 | import importlib.metadata as importlib_metadata 8 | else: 9 | import importlib_metadata 10 | 11 | if TYPE_CHECKING: 12 | from isolate.backends import BaseEnvironment 13 | 14 | # Any new environments can register themselves during package installation 15 | # time by simply adding an entry point to the `isolate.environment` group. 16 | _ENTRY_POINT = "isolate.backends" 17 | 18 | _ENTRY_POINTS: dict[str, importlib_metadata.EntryPoint] = {} 19 | _ENVIRONMENTS: dict[str, type[BaseEnvironment]] = {} 20 | 21 | 22 | def _reload_registry() -> None: 23 | entry_points = importlib_metadata.entry_points() 24 | _ENTRY_POINTS.update( 25 | { 26 | # We are not immediately loading the backend class here 27 | # since it might cause importing modules that we won't be 28 | # using at all. 29 | entry_point.name: entry_point 30 | for entry_point in entry_points.select(group=_ENTRY_POINT) 31 | } 32 | ) 33 | 34 | 35 | _reload_registry() 36 | 37 | 38 | def prepare_environment( 39 | kind: str, 40 | **kwargs: Any, 41 | ) -> BaseEnvironment: 42 | """Get the environment for the given `kind` with the given `config`.""" 43 | from isolate.backends.settings import DEFAULT_SETTINGS 44 | 45 | if kind not in _ENVIRONMENTS: 46 | entry_point = _ENTRY_POINTS.get(kind) 47 | if entry_point is None: 48 | raise ValueError(f"Unknown environment: '{kind}'") 49 | 50 | _ENVIRONMENTS[kind] = entry_point.load() 51 | 52 | settings = kwargs.pop("context", DEFAULT_SETTINGS) 53 | return _ENVIRONMENTS[kind].from_config(config=kwargs, settings=settings) 54 | -------------------------------------------------------------------------------- /src/isolate/server/__init__.py: -------------------------------------------------------------------------------- 1 | from isolate.server.server import BridgeManager, IsolateServicer # noqa: F401 2 | -------------------------------------------------------------------------------- /src/isolate/server/definitions/__init__.py: -------------------------------------------------------------------------------- 1 | from google.protobuf.json_format import MessageToDict as struct_to_dict # noqa: F401 2 | from google.protobuf.struct_pb2 import Struct # noqa: F401 3 | 4 | # Inherit everything from the gRPC connection handler. 5 | from isolate.connections.grpc.definitions import * # noqa: F403 6 | from isolate.server.definitions.server_pb2 import * # noqa: F403 7 | from isolate.server.definitions.server_pb2_grpc import ( # noqa: F401 8 | IsolateServicer, 9 | IsolateStub, 10 | ) 11 | from isolate.server.definitions.server_pb2_grpc import ( # noqa: F401 12 | add_IsolateServicer_to_server as register_isolate, 13 | ) 14 | -------------------------------------------------------------------------------- /src/isolate/server/definitions/server.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | import "common.proto"; 4 | import "google/protobuf/struct.proto"; 5 | 6 | service Isolate { 7 | // Run the given function on the specified environment. Streams logs 8 | // and the result originating from that function. 9 | rpc Run (BoundFunction) returns (stream PartialRunResult) {} 10 | 11 | // Submit a function to be run without waiting for results. 12 | rpc Submit (SubmitRequest) returns (SubmitResponse) {} 13 | 14 | // Set the metadata for a task. 15 | rpc SetMetadata (SetMetadataRequest) returns (SetMetadataResponse) {} 16 | 17 | // List running tasks 18 | rpc List (ListRequest) returns (ListResponse) {} 19 | 20 | // Cancel a running task 21 | rpc Cancel (CancelRequest) returns (CancelResponse) {} 22 | } 23 | 24 | message BoundFunction { 25 | repeated EnvironmentDefinition environments = 1; 26 | SerializedObject function = 2; 27 | optional SerializedObject setup_func = 3; 28 | bool stream_logs = 4; 29 | } 30 | 31 | message EnvironmentDefinition { 32 | // Kind of the isolate environment. 33 | string kind = 1; 34 | // A free-form definition of environment properties. 35 | google.protobuf.Struct configuration = 2; 36 | // Whether to force-create this environment or not. 37 | bool force = 3; 38 | } 39 | 40 | message SubmitRequest { 41 | // The function to run. 42 | BoundFunction function = 1; 43 | // Task metadata. 44 | TaskMetadata metadata = 2; 45 | } 46 | 47 | message TaskMetadata { 48 | // Labels to attach to the logs. 49 | map logger_labels = 1; 50 | } 51 | 52 | message SubmitResponse { 53 | string task_id = 1; 54 | } 55 | 56 | message SetMetadataRequest{ 57 | string task_id = 1; 58 | TaskMetadata metadata = 2; 59 | } 60 | 61 | message SetMetadataResponse { 62 | } 63 | 64 | message ListRequest { 65 | } 66 | 67 | message TaskInfo { 68 | string task_id = 1; 69 | } 70 | 71 | message ListResponse { 72 | repeated TaskInfo tasks = 1; 73 | } 74 | 75 | message CancelRequest { 76 | string task_id = 1; 77 | } 78 | 79 | message CancelResponse { 80 | } 81 | -------------------------------------------------------------------------------- /src/isolate/server/definitions/server_pb2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # source: server.proto 4 | # Protobuf Python Version: 4.25.1 5 | """Generated protocol buffer code.""" 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import descriptor_pool as _descriptor_pool 8 | from google.protobuf import symbol_database as _symbol_database 9 | from google.protobuf.internal import builder as _builder 10 | # @@protoc_insertion_point(imports) 11 | 12 | _sym_db = _symbol_database.Default() 13 | 14 | 15 | from isolate.connections.grpc.definitions import common_pb2 as common__pb2 16 | from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 17 | 18 | 19 | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cserver.proto\x1a\x0c\x63ommon.proto\x1a\x1cgoogle/protobuf/struct.proto\"\xb2\x01\n\rBoundFunction\x12,\n\x0c\x65nvironments\x18\x01 \x03(\x0b\x32\x16.EnvironmentDefinition\x12#\n\x08\x66unction\x18\x02 \x01(\x0b\x32\x11.SerializedObject\x12*\n\nsetup_func\x18\x03 \x01(\x0b\x32\x11.SerializedObjectH\x00\x88\x01\x01\x12\x13\n\x0bstream_logs\x18\x04 \x01(\x08\x42\r\n\x0b_setup_func\"d\n\x15\x45nvironmentDefinition\x12\x0c\n\x04kind\x18\x01 \x01(\t\x12.\n\rconfiguration\x18\x02 \x01(\x0b\x32\x17.google.protobuf.Struct\x12\r\n\x05\x66orce\x18\x03 \x01(\x08\"R\n\rSubmitRequest\x12 \n\x08\x66unction\x18\x01 \x01(\x0b\x32\x0e.BoundFunction\x12\x1f\n\x08metadata\x18\x02 \x01(\x0b\x32\r.TaskMetadata\"{\n\x0cTaskMetadata\x12\x36\n\rlogger_labels\x18\x01 \x03(\x0b\x32\x1f.TaskMetadata.LoggerLabelsEntry\x1a\x33\n\x11LoggerLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"!\n\x0eSubmitResponse\x12\x0f\n\x07task_id\x18\x01 \x01(\t\"F\n\x12SetMetadataRequest\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\x1f\n\x08metadata\x18\x02 \x01(\x0b\x32\r.TaskMetadata\"\x15\n\x13SetMetadataResponse\"\r\n\x0bListRequest\"\x1b\n\x08TaskInfo\x12\x0f\n\x07task_id\x18\x01 \x01(\t\"(\n\x0cListResponse\x12\x18\n\x05tasks\x18\x01 \x03(\x0b\x32\t.TaskInfo\" \n\rCancelRequest\x12\x0f\n\x07task_id\x18\x01 \x01(\t\"\x10\n\x0e\x43\x61ncelResponse2\xf4\x01\n\x07Isolate\x12,\n\x03Run\x12\x0e.BoundFunction\x1a\x11.PartialRunResult\"\x00\x30\x01\x12+\n\x06Submit\x12\x0e.SubmitRequest\x1a\x0f.SubmitResponse\"\x00\x12:\n\x0bSetMetadata\x12\x13.SetMetadataRequest\x1a\x14.SetMetadataResponse\"\x00\x12%\n\x04List\x12\x0c.ListRequest\x1a\r.ListResponse\"\x00\x12+\n\x06\x43\x61ncel\x12\x0e.CancelRequest\x1a\x0f.CancelResponse\"\x00\x62\x06proto3') 20 | 21 | _globals = globals() 22 | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) 23 | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'server_pb2', _globals) 24 | if _descriptor._USE_C_DESCRIPTORS == False: 25 | DESCRIPTOR._options = None 26 | _globals['_TASKMETADATA_LOGGERLABELSENTRY']._options = None 27 | _globals['_TASKMETADATA_LOGGERLABELSENTRY']._serialized_options = b'8\001' 28 | _globals['_BOUNDFUNCTION']._serialized_start=61 29 | _globals['_BOUNDFUNCTION']._serialized_end=239 30 | _globals['_ENVIRONMENTDEFINITION']._serialized_start=241 31 | _globals['_ENVIRONMENTDEFINITION']._serialized_end=341 32 | _globals['_SUBMITREQUEST']._serialized_start=343 33 | _globals['_SUBMITREQUEST']._serialized_end=425 34 | _globals['_TASKMETADATA']._serialized_start=427 35 | _globals['_TASKMETADATA']._serialized_end=550 36 | _globals['_TASKMETADATA_LOGGERLABELSENTRY']._serialized_start=499 37 | _globals['_TASKMETADATA_LOGGERLABELSENTRY']._serialized_end=550 38 | _globals['_SUBMITRESPONSE']._serialized_start=552 39 | _globals['_SUBMITRESPONSE']._serialized_end=585 40 | _globals['_SETMETADATAREQUEST']._serialized_start=587 41 | _globals['_SETMETADATAREQUEST']._serialized_end=657 42 | _globals['_SETMETADATARESPONSE']._serialized_start=659 43 | _globals['_SETMETADATARESPONSE']._serialized_end=680 44 | _globals['_LISTREQUEST']._serialized_start=682 45 | _globals['_LISTREQUEST']._serialized_end=695 46 | _globals['_TASKINFO']._serialized_start=697 47 | _globals['_TASKINFO']._serialized_end=724 48 | _globals['_LISTRESPONSE']._serialized_start=726 49 | _globals['_LISTRESPONSE']._serialized_end=766 50 | _globals['_CANCELREQUEST']._serialized_start=768 51 | _globals['_CANCELREQUEST']._serialized_end=800 52 | _globals['_CANCELRESPONSE']._serialized_start=802 53 | _globals['_CANCELRESPONSE']._serialized_end=818 54 | _globals['_ISOLATE']._serialized_start=821 55 | _globals['_ISOLATE']._serialized_end=1065 56 | # @@protoc_insertion_point(module_scope) 57 | -------------------------------------------------------------------------------- /src/isolate/server/definitions/server_pb2.pyi: -------------------------------------------------------------------------------- 1 | """ 2 | @generated by mypy-protobuf. Do not edit manually! 3 | isort:skip_file 4 | """ 5 | import builtins 6 | import collections.abc 7 | from isolate.connections.grpc.definitions import common_pb2 8 | import google.protobuf.descriptor 9 | import google.protobuf.internal.containers 10 | import google.protobuf.message 11 | import google.protobuf.struct_pb2 12 | import sys 13 | 14 | if sys.version_info >= (3, 8): 15 | import typing as typing_extensions 16 | else: 17 | import typing_extensions 18 | 19 | DESCRIPTOR: google.protobuf.descriptor.FileDescriptor 20 | 21 | @typing_extensions.final 22 | class BoundFunction(google.protobuf.message.Message): 23 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 24 | 25 | ENVIRONMENTS_FIELD_NUMBER: builtins.int 26 | FUNCTION_FIELD_NUMBER: builtins.int 27 | SETUP_FUNC_FIELD_NUMBER: builtins.int 28 | STREAM_LOGS_FIELD_NUMBER: builtins.int 29 | @property 30 | def environments(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___EnvironmentDefinition]: ... 31 | @property 32 | def function(self) -> common_pb2.SerializedObject: ... 33 | @property 34 | def setup_func(self) -> common_pb2.SerializedObject: ... 35 | stream_logs: builtins.bool 36 | def __init__( 37 | self, 38 | *, 39 | environments: collections.abc.Iterable[global___EnvironmentDefinition] | None = ..., 40 | function: common_pb2.SerializedObject | None = ..., 41 | setup_func: common_pb2.SerializedObject | None = ..., 42 | stream_logs: builtins.bool = ..., 43 | ) -> None: ... 44 | def HasField(self, field_name: typing_extensions.Literal["_setup_func", b"_setup_func", "function", b"function", "setup_func", b"setup_func"]) -> builtins.bool: ... 45 | def ClearField(self, field_name: typing_extensions.Literal["_setup_func", b"_setup_func", "environments", b"environments", "function", b"function", "setup_func", b"setup_func", "stream_logs", b"stream_logs"]) -> None: ... 46 | def WhichOneof(self, oneof_group: typing_extensions.Literal["_setup_func", b"_setup_func"]) -> typing_extensions.Literal["setup_func"] | None: ... 47 | 48 | global___BoundFunction = BoundFunction 49 | 50 | @typing_extensions.final 51 | class EnvironmentDefinition(google.protobuf.message.Message): 52 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 53 | 54 | KIND_FIELD_NUMBER: builtins.int 55 | CONFIGURATION_FIELD_NUMBER: builtins.int 56 | FORCE_FIELD_NUMBER: builtins.int 57 | kind: builtins.str 58 | """Kind of the isolate environment.""" 59 | @property 60 | def configuration(self) -> google.protobuf.struct_pb2.Struct: 61 | """A free-form definition of environment properties.""" 62 | force: builtins.bool 63 | """Whether to force-create this environment or not.""" 64 | def __init__( 65 | self, 66 | *, 67 | kind: builtins.str = ..., 68 | configuration: google.protobuf.struct_pb2.Struct | None = ..., 69 | force: builtins.bool = ..., 70 | ) -> None: ... 71 | def HasField(self, field_name: typing_extensions.Literal["configuration", b"configuration"]) -> builtins.bool: ... 72 | def ClearField(self, field_name: typing_extensions.Literal["configuration", b"configuration", "force", b"force", "kind", b"kind"]) -> None: ... 73 | 74 | global___EnvironmentDefinition = EnvironmentDefinition 75 | 76 | @typing_extensions.final 77 | class SubmitRequest(google.protobuf.message.Message): 78 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 79 | 80 | FUNCTION_FIELD_NUMBER: builtins.int 81 | METADATA_FIELD_NUMBER: builtins.int 82 | @property 83 | def function(self) -> global___BoundFunction: 84 | """The function to run.""" 85 | @property 86 | def metadata(self) -> global___TaskMetadata: 87 | """Task metadata.""" 88 | def __init__( 89 | self, 90 | *, 91 | function: global___BoundFunction | None = ..., 92 | metadata: global___TaskMetadata | None = ..., 93 | ) -> None: ... 94 | def HasField(self, field_name: typing_extensions.Literal["function", b"function", "metadata", b"metadata"]) -> builtins.bool: ... 95 | def ClearField(self, field_name: typing_extensions.Literal["function", b"function", "metadata", b"metadata"]) -> None: ... 96 | 97 | global___SubmitRequest = SubmitRequest 98 | 99 | @typing_extensions.final 100 | class TaskMetadata(google.protobuf.message.Message): 101 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 102 | 103 | @typing_extensions.final 104 | class LoggerLabelsEntry(google.protobuf.message.Message): 105 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 106 | 107 | KEY_FIELD_NUMBER: builtins.int 108 | VALUE_FIELD_NUMBER: builtins.int 109 | key: builtins.str 110 | value: builtins.str 111 | def __init__( 112 | self, 113 | *, 114 | key: builtins.str = ..., 115 | value: builtins.str = ..., 116 | ) -> None: ... 117 | def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... 118 | 119 | LOGGER_LABELS_FIELD_NUMBER: builtins.int 120 | @property 121 | def logger_labels(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: 122 | """Labels to attach to the logs.""" 123 | def __init__( 124 | self, 125 | *, 126 | logger_labels: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., 127 | ) -> None: ... 128 | def ClearField(self, field_name: typing_extensions.Literal["logger_labels", b"logger_labels"]) -> None: ... 129 | 130 | global___TaskMetadata = TaskMetadata 131 | 132 | @typing_extensions.final 133 | class SubmitResponse(google.protobuf.message.Message): 134 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 135 | 136 | TASK_ID_FIELD_NUMBER: builtins.int 137 | task_id: builtins.str 138 | def __init__( 139 | self, 140 | *, 141 | task_id: builtins.str = ..., 142 | ) -> None: ... 143 | def ClearField(self, field_name: typing_extensions.Literal["task_id", b"task_id"]) -> None: ... 144 | 145 | global___SubmitResponse = SubmitResponse 146 | 147 | @typing_extensions.final 148 | class SetMetadataRequest(google.protobuf.message.Message): 149 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 150 | 151 | TASK_ID_FIELD_NUMBER: builtins.int 152 | METADATA_FIELD_NUMBER: builtins.int 153 | task_id: builtins.str 154 | @property 155 | def metadata(self) -> global___TaskMetadata: ... 156 | def __init__( 157 | self, 158 | *, 159 | task_id: builtins.str = ..., 160 | metadata: global___TaskMetadata | None = ..., 161 | ) -> None: ... 162 | def HasField(self, field_name: typing_extensions.Literal["metadata", b"metadata"]) -> builtins.bool: ... 163 | def ClearField(self, field_name: typing_extensions.Literal["metadata", b"metadata", "task_id", b"task_id"]) -> None: ... 164 | 165 | global___SetMetadataRequest = SetMetadataRequest 166 | 167 | @typing_extensions.final 168 | class SetMetadataResponse(google.protobuf.message.Message): 169 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 170 | 171 | def __init__( 172 | self, 173 | ) -> None: ... 174 | 175 | global___SetMetadataResponse = SetMetadataResponse 176 | 177 | @typing_extensions.final 178 | class ListRequest(google.protobuf.message.Message): 179 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 180 | 181 | def __init__( 182 | self, 183 | ) -> None: ... 184 | 185 | global___ListRequest = ListRequest 186 | 187 | @typing_extensions.final 188 | class TaskInfo(google.protobuf.message.Message): 189 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 190 | 191 | TASK_ID_FIELD_NUMBER: builtins.int 192 | task_id: builtins.str 193 | def __init__( 194 | self, 195 | *, 196 | task_id: builtins.str = ..., 197 | ) -> None: ... 198 | def ClearField(self, field_name: typing_extensions.Literal["task_id", b"task_id"]) -> None: ... 199 | 200 | global___TaskInfo = TaskInfo 201 | 202 | @typing_extensions.final 203 | class ListResponse(google.protobuf.message.Message): 204 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 205 | 206 | TASKS_FIELD_NUMBER: builtins.int 207 | @property 208 | def tasks(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TaskInfo]: ... 209 | def __init__( 210 | self, 211 | *, 212 | tasks: collections.abc.Iterable[global___TaskInfo] | None = ..., 213 | ) -> None: ... 214 | def ClearField(self, field_name: typing_extensions.Literal["tasks", b"tasks"]) -> None: ... 215 | 216 | global___ListResponse = ListResponse 217 | 218 | @typing_extensions.final 219 | class CancelRequest(google.protobuf.message.Message): 220 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 221 | 222 | TASK_ID_FIELD_NUMBER: builtins.int 223 | task_id: builtins.str 224 | def __init__( 225 | self, 226 | *, 227 | task_id: builtins.str = ..., 228 | ) -> None: ... 229 | def ClearField(self, field_name: typing_extensions.Literal["task_id", b"task_id"]) -> None: ... 230 | 231 | global___CancelRequest = CancelRequest 232 | 233 | @typing_extensions.final 234 | class CancelResponse(google.protobuf.message.Message): 235 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 236 | 237 | def __init__( 238 | self, 239 | ) -> None: ... 240 | 241 | global___CancelResponse = CancelResponse 242 | -------------------------------------------------------------------------------- /src/isolate/server/definitions/server_pb2_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! 2 | """Client and server classes corresponding to protobuf-defined services.""" 3 | import grpc 4 | 5 | from isolate.connections.grpc.definitions import common_pb2 as common__pb2 6 | from isolate.server.definitions import server_pb2 as server__pb2 7 | 8 | 9 | class IsolateStub(object): 10 | """Missing associated documentation comment in .proto file.""" 11 | 12 | def __init__(self, channel): 13 | """Constructor. 14 | 15 | Args: 16 | channel: A grpc.Channel. 17 | """ 18 | self.Run = channel.unary_stream( 19 | '/Isolate/Run', 20 | request_serializer=server__pb2.BoundFunction.SerializeToString, 21 | response_deserializer=common__pb2.PartialRunResult.FromString, 22 | ) 23 | self.Submit = channel.unary_unary( 24 | '/Isolate/Submit', 25 | request_serializer=server__pb2.SubmitRequest.SerializeToString, 26 | response_deserializer=server__pb2.SubmitResponse.FromString, 27 | ) 28 | self.SetMetadata = channel.unary_unary( 29 | '/Isolate/SetMetadata', 30 | request_serializer=server__pb2.SetMetadataRequest.SerializeToString, 31 | response_deserializer=server__pb2.SetMetadataResponse.FromString, 32 | ) 33 | self.List = channel.unary_unary( 34 | '/Isolate/List', 35 | request_serializer=server__pb2.ListRequest.SerializeToString, 36 | response_deserializer=server__pb2.ListResponse.FromString, 37 | ) 38 | self.Cancel = channel.unary_unary( 39 | '/Isolate/Cancel', 40 | request_serializer=server__pb2.CancelRequest.SerializeToString, 41 | response_deserializer=server__pb2.CancelResponse.FromString, 42 | ) 43 | 44 | 45 | class IsolateServicer(object): 46 | """Missing associated documentation comment in .proto file.""" 47 | 48 | def Run(self, request, context): 49 | """Run the given function on the specified environment. Streams logs 50 | and the result originating from that function. 51 | """ 52 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 53 | context.set_details('Method not implemented!') 54 | raise NotImplementedError('Method not implemented!') 55 | 56 | def Submit(self, request, context): 57 | """Submit a function to be run without waiting for results. 58 | """ 59 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 60 | context.set_details('Method not implemented!') 61 | raise NotImplementedError('Method not implemented!') 62 | 63 | def SetMetadata(self, request, context): 64 | """Set the metadata for a task. 65 | """ 66 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 67 | context.set_details('Method not implemented!') 68 | raise NotImplementedError('Method not implemented!') 69 | 70 | def List(self, request, context): 71 | """List running tasks 72 | """ 73 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 74 | context.set_details('Method not implemented!') 75 | raise NotImplementedError('Method not implemented!') 76 | 77 | def Cancel(self, request, context): 78 | """Cancel a running task 79 | """ 80 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 81 | context.set_details('Method not implemented!') 82 | raise NotImplementedError('Method not implemented!') 83 | 84 | 85 | def add_IsolateServicer_to_server(servicer, server): 86 | rpc_method_handlers = { 87 | 'Run': grpc.unary_stream_rpc_method_handler( 88 | servicer.Run, 89 | request_deserializer=server__pb2.BoundFunction.FromString, 90 | response_serializer=common__pb2.PartialRunResult.SerializeToString, 91 | ), 92 | 'Submit': grpc.unary_unary_rpc_method_handler( 93 | servicer.Submit, 94 | request_deserializer=server__pb2.SubmitRequest.FromString, 95 | response_serializer=server__pb2.SubmitResponse.SerializeToString, 96 | ), 97 | 'SetMetadata': grpc.unary_unary_rpc_method_handler( 98 | servicer.SetMetadata, 99 | request_deserializer=server__pb2.SetMetadataRequest.FromString, 100 | response_serializer=server__pb2.SetMetadataResponse.SerializeToString, 101 | ), 102 | 'List': grpc.unary_unary_rpc_method_handler( 103 | servicer.List, 104 | request_deserializer=server__pb2.ListRequest.FromString, 105 | response_serializer=server__pb2.ListResponse.SerializeToString, 106 | ), 107 | 'Cancel': grpc.unary_unary_rpc_method_handler( 108 | servicer.Cancel, 109 | request_deserializer=server__pb2.CancelRequest.FromString, 110 | response_serializer=server__pb2.CancelResponse.SerializeToString, 111 | ), 112 | } 113 | generic_handler = grpc.method_handlers_generic_handler( 114 | 'Isolate', rpc_method_handlers) 115 | server.add_generic_rpc_handlers((generic_handler,)) 116 | 117 | 118 | # This class is part of an EXPERIMENTAL API. 119 | class Isolate(object): 120 | """Missing associated documentation comment in .proto file.""" 121 | 122 | @staticmethod 123 | def Run(request, 124 | target, 125 | options=(), 126 | channel_credentials=None, 127 | call_credentials=None, 128 | insecure=False, 129 | compression=None, 130 | wait_for_ready=None, 131 | timeout=None, 132 | metadata=None): 133 | return grpc.experimental.unary_stream(request, target, '/Isolate/Run', 134 | server__pb2.BoundFunction.SerializeToString, 135 | common__pb2.PartialRunResult.FromString, 136 | options, channel_credentials, 137 | insecure, call_credentials, compression, wait_for_ready, timeout, metadata) 138 | 139 | @staticmethod 140 | def Submit(request, 141 | target, 142 | options=(), 143 | channel_credentials=None, 144 | call_credentials=None, 145 | insecure=False, 146 | compression=None, 147 | wait_for_ready=None, 148 | timeout=None, 149 | metadata=None): 150 | return grpc.experimental.unary_unary(request, target, '/Isolate/Submit', 151 | server__pb2.SubmitRequest.SerializeToString, 152 | server__pb2.SubmitResponse.FromString, 153 | options, channel_credentials, 154 | insecure, call_credentials, compression, wait_for_ready, timeout, metadata) 155 | 156 | @staticmethod 157 | def SetMetadata(request, 158 | target, 159 | options=(), 160 | channel_credentials=None, 161 | call_credentials=None, 162 | insecure=False, 163 | compression=None, 164 | wait_for_ready=None, 165 | timeout=None, 166 | metadata=None): 167 | return grpc.experimental.unary_unary(request, target, '/Isolate/SetMetadata', 168 | server__pb2.SetMetadataRequest.SerializeToString, 169 | server__pb2.SetMetadataResponse.FromString, 170 | options, channel_credentials, 171 | insecure, call_credentials, compression, wait_for_ready, timeout, metadata) 172 | 173 | @staticmethod 174 | def List(request, 175 | target, 176 | options=(), 177 | channel_credentials=None, 178 | call_credentials=None, 179 | insecure=False, 180 | compression=None, 181 | wait_for_ready=None, 182 | timeout=None, 183 | metadata=None): 184 | return grpc.experimental.unary_unary(request, target, '/Isolate/List', 185 | server__pb2.ListRequest.SerializeToString, 186 | server__pb2.ListResponse.FromString, 187 | options, channel_credentials, 188 | insecure, call_credentials, compression, wait_for_ready, timeout, metadata) 189 | 190 | @staticmethod 191 | def Cancel(request, 192 | target, 193 | options=(), 194 | channel_credentials=None, 195 | call_credentials=None, 196 | insecure=False, 197 | compression=None, 198 | wait_for_ready=None, 199 | timeout=None, 200 | metadata=None): 201 | return grpc.experimental.unary_unary(request, target, '/Isolate/Cancel', 202 | server__pb2.CancelRequest.SerializeToString, 203 | server__pb2.CancelResponse.FromString, 204 | options, channel_credentials, 205 | insecure, call_credentials, compression, wait_for_ready, timeout, metadata) 206 | -------------------------------------------------------------------------------- /src/isolate/server/health/__init__.py: -------------------------------------------------------------------------------- 1 | from isolate.server.health.health_pb2 import ( # noqa: F401 2 | HealthCheckRequest, 3 | HealthCheckResponse, 4 | ) 5 | from isolate.server.health.health_pb2_grpc import ( # noqa: F401 6 | HealthServicer, 7 | HealthStub, 8 | ) 9 | from isolate.server.health.health_pb2_grpc import ( # noqa: F401 10 | add_HealthServicer_to_server as register_health, 11 | ) 12 | -------------------------------------------------------------------------------- /src/isolate/server/health/health.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package grpc.health.v1; 4 | 5 | message HealthCheckRequest { 6 | string service = 1; 7 | } 8 | 9 | message HealthCheckResponse { 10 | enum ServingStatus { 11 | UNKNOWN = 0; 12 | SERVING = 1; 13 | NOT_SERVING = 2; 14 | SERVICE_UNKNOWN = 3; // Used only by the Watch method. 15 | } 16 | ServingStatus status = 1; 17 | } 18 | 19 | service Health { 20 | rpc Check(HealthCheckRequest) returns (HealthCheckResponse); 21 | 22 | rpc Watch(HealthCheckRequest) returns (stream HealthCheckResponse); 23 | } 24 | -------------------------------------------------------------------------------- /src/isolate/server/health/health_pb2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # source: health.proto 4 | # Protobuf Python Version: 4.25.1 5 | """Generated protocol buffer code.""" 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import descriptor_pool as _descriptor_pool 8 | from google.protobuf import symbol_database as _symbol_database 9 | from google.protobuf.internal import builder as _builder 10 | # @@protoc_insertion_point(imports) 11 | 12 | _sym_db = _symbol_database.Default() 13 | 14 | 15 | 16 | 17 | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0chealth.proto\x12\x0egrpc.health.v1\"%\n\x12HealthCheckRequest\x12\x0f\n\x07service\x18\x01 \x01(\t\"\xa9\x01\n\x13HealthCheckResponse\x12\x41\n\x06status\x18\x01 \x01(\x0e\x32\x31.grpc.health.v1.HealthCheckResponse.ServingStatus\"O\n\rServingStatus\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07SERVING\x10\x01\x12\x0f\n\x0bNOT_SERVING\x10\x02\x12\x13\n\x0fSERVICE_UNKNOWN\x10\x03\x32\xae\x01\n\x06Health\x12P\n\x05\x43heck\x12\".grpc.health.v1.HealthCheckRequest\x1a#.grpc.health.v1.HealthCheckResponse\x12R\n\x05Watch\x12\".grpc.health.v1.HealthCheckRequest\x1a#.grpc.health.v1.HealthCheckResponse0\x01\x62\x06proto3') 18 | 19 | _globals = globals() 20 | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) 21 | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'health_pb2', _globals) 22 | if _descriptor._USE_C_DESCRIPTORS == False: 23 | DESCRIPTOR._options = None 24 | _globals['_HEALTHCHECKREQUEST']._serialized_start=32 25 | _globals['_HEALTHCHECKREQUEST']._serialized_end=69 26 | _globals['_HEALTHCHECKRESPONSE']._serialized_start=72 27 | _globals['_HEALTHCHECKRESPONSE']._serialized_end=241 28 | _globals['_HEALTHCHECKRESPONSE_SERVINGSTATUS']._serialized_start=162 29 | _globals['_HEALTHCHECKRESPONSE_SERVINGSTATUS']._serialized_end=241 30 | _globals['_HEALTH']._serialized_start=244 31 | _globals['_HEALTH']._serialized_end=418 32 | # @@protoc_insertion_point(module_scope) 33 | -------------------------------------------------------------------------------- /src/isolate/server/health/health_pb2.pyi: -------------------------------------------------------------------------------- 1 | """ 2 | @generated by mypy-protobuf. Do not edit manually! 3 | isort:skip_file 4 | """ 5 | import builtins 6 | import google.protobuf.descriptor 7 | import google.protobuf.internal.enum_type_wrapper 8 | import google.protobuf.message 9 | import sys 10 | import typing 11 | 12 | if sys.version_info >= (3, 10): 13 | import typing as typing_extensions 14 | else: 15 | import typing_extensions 16 | 17 | DESCRIPTOR: google.protobuf.descriptor.FileDescriptor 18 | 19 | @typing_extensions.final 20 | class HealthCheckRequest(google.protobuf.message.Message): 21 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 22 | 23 | SERVICE_FIELD_NUMBER: builtins.int 24 | service: builtins.str 25 | def __init__( 26 | self, 27 | *, 28 | service: builtins.str = ..., 29 | ) -> None: ... 30 | def ClearField(self, field_name: typing_extensions.Literal["service", b"service"]) -> None: ... 31 | 32 | global___HealthCheckRequest = HealthCheckRequest 33 | 34 | @typing_extensions.final 35 | class HealthCheckResponse(google.protobuf.message.Message): 36 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 37 | 38 | class _ServingStatus: 39 | ValueType = typing.NewType("ValueType", builtins.int) 40 | V: typing_extensions.TypeAlias = ValueType 41 | 42 | class _ServingStatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[HealthCheckResponse._ServingStatus.ValueType], builtins.type): 43 | DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor 44 | UNKNOWN: HealthCheckResponse._ServingStatus.ValueType # 0 45 | SERVING: HealthCheckResponse._ServingStatus.ValueType # 1 46 | NOT_SERVING: HealthCheckResponse._ServingStatus.ValueType # 2 47 | SERVICE_UNKNOWN: HealthCheckResponse._ServingStatus.ValueType # 3 48 | """Used only by the Watch method.""" 49 | 50 | class ServingStatus(_ServingStatus, metaclass=_ServingStatusEnumTypeWrapper): ... 51 | UNKNOWN: HealthCheckResponse.ServingStatus.ValueType # 0 52 | SERVING: HealthCheckResponse.ServingStatus.ValueType # 1 53 | NOT_SERVING: HealthCheckResponse.ServingStatus.ValueType # 2 54 | SERVICE_UNKNOWN: HealthCheckResponse.ServingStatus.ValueType # 3 55 | """Used only by the Watch method.""" 56 | 57 | STATUS_FIELD_NUMBER: builtins.int 58 | status: global___HealthCheckResponse.ServingStatus.ValueType 59 | def __init__( 60 | self, 61 | *, 62 | status: global___HealthCheckResponse.ServingStatus.ValueType = ..., 63 | ) -> None: ... 64 | def ClearField(self, field_name: typing_extensions.Literal["status", b"status"]) -> None: ... 65 | 66 | global___HealthCheckResponse = HealthCheckResponse 67 | -------------------------------------------------------------------------------- /src/isolate/server/health/health_pb2_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! 2 | """Client and server classes corresponding to protobuf-defined services.""" 3 | import grpc 4 | 5 | from isolate.server.health import health_pb2 as health__pb2 6 | 7 | 8 | class HealthStub(object): 9 | """Missing associated documentation comment in .proto file.""" 10 | 11 | def __init__(self, channel): 12 | """Constructor. 13 | 14 | Args: 15 | channel: A grpc.Channel. 16 | """ 17 | self.Check = channel.unary_unary( 18 | '/grpc.health.v1.Health/Check', 19 | request_serializer=health__pb2.HealthCheckRequest.SerializeToString, 20 | response_deserializer=health__pb2.HealthCheckResponse.FromString, 21 | ) 22 | self.Watch = channel.unary_stream( 23 | '/grpc.health.v1.Health/Watch', 24 | request_serializer=health__pb2.HealthCheckRequest.SerializeToString, 25 | response_deserializer=health__pb2.HealthCheckResponse.FromString, 26 | ) 27 | 28 | 29 | class HealthServicer(object): 30 | """Missing associated documentation comment in .proto file.""" 31 | 32 | def Check(self, request, context): 33 | """Missing associated documentation comment in .proto file.""" 34 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 35 | context.set_details('Method not implemented!') 36 | raise NotImplementedError('Method not implemented!') 37 | 38 | def Watch(self, request, context): 39 | """Missing associated documentation comment in .proto file.""" 40 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 41 | context.set_details('Method not implemented!') 42 | raise NotImplementedError('Method not implemented!') 43 | 44 | 45 | def add_HealthServicer_to_server(servicer, server): 46 | rpc_method_handlers = { 47 | 'Check': grpc.unary_unary_rpc_method_handler( 48 | servicer.Check, 49 | request_deserializer=health__pb2.HealthCheckRequest.FromString, 50 | response_serializer=health__pb2.HealthCheckResponse.SerializeToString, 51 | ), 52 | 'Watch': grpc.unary_stream_rpc_method_handler( 53 | servicer.Watch, 54 | request_deserializer=health__pb2.HealthCheckRequest.FromString, 55 | response_serializer=health__pb2.HealthCheckResponse.SerializeToString, 56 | ), 57 | } 58 | generic_handler = grpc.method_handlers_generic_handler( 59 | 'grpc.health.v1.Health', rpc_method_handlers) 60 | server.add_generic_rpc_handlers((generic_handler,)) 61 | 62 | 63 | # This class is part of an EXPERIMENTAL API. 64 | class Health(object): 65 | """Missing associated documentation comment in .proto file.""" 66 | 67 | @staticmethod 68 | def Check(request, 69 | target, 70 | options=(), 71 | channel_credentials=None, 72 | call_credentials=None, 73 | insecure=False, 74 | compression=None, 75 | wait_for_ready=None, 76 | timeout=None, 77 | metadata=None): 78 | return grpc.experimental.unary_unary(request, target, '/grpc.health.v1.Health/Check', 79 | health__pb2.HealthCheckRequest.SerializeToString, 80 | health__pb2.HealthCheckResponse.FromString, 81 | options, channel_credentials, 82 | insecure, call_credentials, compression, wait_for_ready, timeout, metadata) 83 | 84 | @staticmethod 85 | def Watch(request, 86 | target, 87 | options=(), 88 | channel_credentials=None, 89 | call_credentials=None, 90 | insecure=False, 91 | compression=None, 92 | wait_for_ready=None, 93 | timeout=None, 94 | metadata=None): 95 | return grpc.experimental.unary_stream(request, target, '/grpc.health.v1.Health/Watch', 96 | health__pb2.HealthCheckRequest.SerializeToString, 97 | health__pb2.HealthCheckResponse.FromString, 98 | options, channel_credentials, 99 | insecure, call_credentials, compression, wait_for_ready, timeout, metadata) 100 | -------------------------------------------------------------------------------- /src/isolate/server/health_server.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from dataclasses import dataclass 3 | from typing import AsyncIterator 4 | 5 | from grpc.aio import ServicerContext 6 | 7 | from isolate.server import health 8 | 9 | 10 | @dataclass 11 | class HealthServicer(health.HealthServicer): 12 | def __post_init__(self): 13 | self._state = { 14 | # Empty refers to the whole server 15 | "": health.HealthCheckResponse.ServingStatus.SERVING, 16 | "isolate": health.HealthCheckResponse.ServingStatus.SERVING, 17 | } 18 | 19 | def _get_status( 20 | self, service: str 21 | ) -> health.HealthCheckResponse.ServingStatus.ValueType: 22 | status = self._state.get( 23 | service, 24 | health.HealthCheckResponse.ServingStatus.SERVICE_UNKNOWN, 25 | ) 26 | return status 27 | 28 | def Check( 29 | self, request: health.HealthCheckRequest, context: ServicerContext 30 | ) -> health.HealthCheckResponse: 31 | return health.HealthCheckResponse(status=self._get_status(request.service)) 32 | 33 | async def Watch( 34 | self, 35 | request: health.HealthCheckRequest, 36 | context: ServicerContext, 37 | ) -> AsyncIterator[health.HealthCheckResponse]: 38 | while True: 39 | yield health.HealthCheckResponse(status=self._get_status(request.service)) 40 | await asyncio.sleep(2) 41 | -------------------------------------------------------------------------------- /src/isolate/server/interface.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict 2 | 3 | from isolate.backends import BaseEnvironment 4 | from isolate.connections.grpc.interface import ( 5 | from_grpc, 6 | to_grpc, 7 | to_serialized_object, 8 | ) 9 | from isolate.server import definitions 10 | 11 | __all__ = ["from_grpc", "to_grpc", "to_serialized_object", "to_struct"] 12 | 13 | 14 | @from_grpc.register 15 | def _(message: definitions.EnvironmentDefinition) -> BaseEnvironment: 16 | from isolate import prepare_environment 17 | 18 | return prepare_environment( 19 | message.kind, 20 | **definitions.struct_to_dict(message.configuration), 21 | ) 22 | 23 | 24 | def to_struct(data: Dict[str, Any]) -> definitions.Struct: 25 | struct = definitions.Struct() 26 | struct.update(data) 27 | return struct 28 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fal-ai/isolate/43cbe0d852a75229e8372c4305fa37da4aa5ec78/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from isolate.backends.settings import IsolateSettings 3 | 4 | 5 | @pytest.fixture 6 | def isolate_server(monkeypatch, tmp_path): 7 | from concurrent import futures 8 | 9 | import grpc 10 | from isolate.server import BridgeManager, IsolateServicer, definitions 11 | 12 | monkeypatch.setattr("isolate.server.server.INHERIT_FROM_LOCAL", True) 13 | server = grpc.server(futures.ThreadPoolExecutor(max_workers=1)) 14 | test_settings = IsolateSettings(cache_dir=tmp_path / "cache") 15 | 16 | with BridgeManager() as bridge_manager: 17 | definitions.register_isolate( 18 | IsolateServicer(bridge_manager, test_settings), server 19 | ) 20 | host, port = "localhost", server.add_insecure_port("[::]:0") 21 | server.start() 22 | try: 23 | yield f"{host}:{port}" 24 | finally: 25 | server.stop(None) 26 | -------------------------------------------------------------------------------- /tests/test_concurrency.py: -------------------------------------------------------------------------------- 1 | import os 2 | from concurrent import futures 3 | from functools import partial 4 | 5 | import isolate 6 | 7 | MAX_ENV_CREATE_TIME = 120 8 | 9 | 10 | def test_concurrent_creation_only(): 11 | environment_1 = isolate.prepare_environment( 12 | "virtualenv", requirements=["pyjokes==0.6.0"] 13 | ) 14 | environment_2 = isolate.prepare_environment("conda", packages=["pyjokes=0.5.0"]) 15 | 16 | # Ensure that we can create these environments in sync 17 | # first. 18 | sync_create_1 = environment_1.create() 19 | sync_create_2 = environment_2.create() 20 | 21 | # Cleanup the existing environments. 22 | environment_1.destroy(sync_create_1) 23 | environment_2.destroy(sync_create_2) 24 | 25 | with futures.ProcessPoolExecutor(os.cpu_count() or 8) as executor: 26 | # Do some create and destroy operations in parallel and ensure 27 | # everything works as expected. 28 | fs = [ 29 | executor.submit(environment.create) 30 | for environment in [environment_1, environment_2] 31 | for _ in range(24) 32 | ] 33 | 34 | done_fs, not_done_fs = futures.wait(fs, timeout=MAX_ENV_CREATE_TIME) 35 | assert not not_done_fs 36 | 37 | for future in done_fs: 38 | assert future.exception() is None 39 | 40 | 41 | def test_concurrency_on_delete(): 42 | environment = isolate.prepare_environment( 43 | "virtualenv", requirements=["pyjokes==0.6.0"] 44 | ) 45 | 46 | key = environment.create() 47 | with futures.ProcessPoolExecutor(max_workers=os.cpu_count() or 8) as executor: 48 | # Do some create and destroy operations in parallel and ensure 49 | # everything works as expected. 50 | 51 | destroy = partial(environment.destroy, key) 52 | done_fs, not_done_fs = futures.wait( 53 | [ 54 | executor.submit( 55 | destroy, 56 | ) 57 | for _ in range(24) 58 | ], 59 | timeout=MAX_ENV_CREATE_TIME, 60 | ) 61 | assert not not_done_fs 62 | 63 | for future in done_fs: 64 | assert future.exception() is None 65 | -------------------------------------------------------------------------------- /tests/test_connections.py: -------------------------------------------------------------------------------- 1 | import operator 2 | import traceback 3 | from dataclasses import replace 4 | from functools import partial 5 | from pathlib import Path 6 | from typing import Any, List 7 | 8 | import pytest 9 | from isolate.backends import BaseEnvironment, EnvironmentConnection 10 | from isolate.backends.local import LocalPythonEnvironment 11 | from isolate.backends.settings import IsolateSettings 12 | from isolate.backends.virtualenv import VirtualPythonEnvironment 13 | from isolate.connections import LocalPythonGRPC, PythonIPC 14 | from isolate.connections.common import is_agent 15 | 16 | REPO_DIR = Path(__file__).parent.parent 17 | assert ( 18 | REPO_DIR.exists() and REPO_DIR.name == "isolate" 19 | ), "This test should have access to isolate as an installable package." 20 | 21 | 22 | # Enable dill to only serialize globals that are accessed by the function 23 | import dill # noqa: E402 24 | 25 | dill.settings["recurse"] = True 26 | 27 | 28 | class GenericPythonConnectionTests: 29 | """Generic tests for local Python connection implementations.""" 30 | 31 | def open_connection( 32 | self, 33 | environment: BaseEnvironment, 34 | environment_path: Path, 35 | **kwargs: Any, 36 | ) -> EnvironmentConnection: 37 | """Open a new connection (to be implemented by various connection 38 | types for testing all of them).""" 39 | raise NotImplementedError 40 | 41 | def make_venv( 42 | self, tmp_path: Any, requirements: List[str] 43 | ) -> VirtualPythonEnvironment: 44 | """Create a new virtual env with the specified requirements.""" 45 | env = VirtualPythonEnvironment(requirements) 46 | env.apply_settings(IsolateSettings(Path(tmp_path))) 47 | return env 48 | 49 | def check_version(self, connection: EnvironmentConnection, module: str) -> str: 50 | """Return the version for the given module.""" 51 | src = f"__import__('{module}').__version__" 52 | return connection.run(partial(eval, src)) 53 | 54 | def test_basic_connection(self): 55 | local_env = LocalPythonEnvironment() 56 | 57 | with self.open_connection(local_env, local_env.create()) as conn: 58 | result = conn.run(partial(operator.add, 1, 2)) 59 | assert result == 3 60 | 61 | @pytest.mark.parametrize("serialization_method", ["dill"]) 62 | def test_customized_serialization(self, serialization_method: str) -> None: 63 | local_env = LocalPythonEnvironment() 64 | assert local_env.settings.serialization_method == "pickle" 65 | 66 | # By default the serialization uses pickle, which can't serialize 67 | # a lambda function. 68 | with self.open_connection(local_env, local_env.create()) as conn: 69 | with pytest.raises(Exception): 70 | result: int = conn.run(lambda: 1 + 2) 71 | 72 | # But we can switch serialization backends, and use cloudpickle or dill 73 | # since both of them can serialize a lambda function. 74 | cloudpickle_ettings = replace( 75 | local_env.settings, 76 | serialization_method=serialization_method, 77 | ) 78 | local_env.apply_settings(cloudpickle_ettings) 79 | 80 | with self.open_connection(local_env, local_env.create()) as conn: 81 | result = conn.run(lambda: 1 + 2) 82 | assert result == 3 83 | 84 | def test_extra_inheritance_paths(self, tmp_path: Any) -> None: 85 | first_env = self.make_venv(tmp_path, ["pyjokes==0.5.0"]) 86 | second_env = self.make_venv(tmp_path, ["emoji==0.5.4"]) 87 | 88 | with self.open_connection( 89 | first_env, 90 | first_env.create(), 91 | extra_inheritance_paths=[second_env.create()], 92 | ) as conn: 93 | assert self.check_version(conn, "pyjokes") == "0.5.0" 94 | assert self.check_version(conn, "emoji") == "0.5.4" 95 | 96 | third_env = self.make_venv(tmp_path, ["pyjokes==0.6.0", "emoji==2.0.0"]) 97 | with self.open_connection( 98 | second_env, 99 | second_env.create(), 100 | extra_inheritance_paths=[third_env.create()], 101 | ) as conn: 102 | assert self.check_version(conn, "pyjokes") == "0.6.0" 103 | # Even if the third environment has a newer version of emoji, it won't be 104 | # used because since the second environment already has emoji installed and 105 | # it takes the precedence. 106 | assert self.check_version(conn, "emoji") == "0.5.4" 107 | 108 | # Order matters, so if the first_env (with 0.5.0) is specified first then it 109 | # is going to take precedence. 110 | with self.open_connection( 111 | first_env, 112 | first_env.create(), 113 | extra_inheritance_paths=[third_env.create()], 114 | ) as conn: 115 | assert self.check_version(conn, "pyjokes") == "0.5.0" 116 | 117 | # Or if it is specified last, then it will be overridden. 118 | with self.open_connection( 119 | third_env, 120 | third_env.create(), 121 | extra_inheritance_paths=[first_env.create()], 122 | ) as conn: 123 | assert self.check_version(conn, "pyjokes") == "0.6.0" 124 | 125 | fourth_env = self.make_venv(tmp_path, ["pyjokes==0.4.1", "emoji==2.1.0"]) 126 | 127 | with self.open_connection( 128 | first_env, 129 | first_env.create(), 130 | extra_inheritance_paths=[third_env.create(), fourth_env.create()], 131 | ) as conn: 132 | # This comes from the first_env 133 | assert self.check_version(conn, "pyjokes") == "0.5.0" 134 | # This comes from the third_env 135 | assert self.check_version(conn, "emoji") == "2.0.0" 136 | 137 | def test_is_agent(self): 138 | local_env = LocalPythonEnvironment() 139 | 140 | assert not is_agent() 141 | with self.open_connection(local_env, local_env.create()) as conn: 142 | assert not is_agent() 143 | assert conn.run(is_agent) 144 | assert not is_agent() 145 | assert not is_agent() 146 | 147 | def test_tracebacks(self): 148 | local_env = LocalPythonEnvironment() 149 | local_env.apply_settings( 150 | local_env.settings.replace(serialization_method="dill") 151 | ) 152 | 153 | def long_function_chain(): 154 | def foo(): 155 | a = 1 156 | b = 0 157 | c = a / b 158 | return c 159 | 160 | def bar(): 161 | a = "" + "" # noqa: F841 162 | return 0 + foo() + 1 163 | 164 | def baz(): 165 | return bar() + 1 166 | 167 | return baz() 168 | 169 | with self.open_connection(local_env, local_env.create()) as conn: 170 | with pytest.raises(ZeroDivisionError) as exc: 171 | conn.run(long_function_chain) 172 | 173 | exception = "".join( 174 | traceback.format_exception( 175 | type(exc.value), exc.value, exc.value.__traceback__ 176 | ) 177 | ) 178 | assert "c = a / b" in exception 179 | assert "return 0 + foo() + 1" in exception 180 | assert "return bar() + 1" in exception 181 | assert "return baz()" in exception 182 | assert "conn.run(long_function_chain)" in exception 183 | 184 | 185 | class TestPythonIPC(GenericPythonConnectionTests): 186 | def open_connection( 187 | self, 188 | environment: BaseEnvironment, 189 | environment_path: Path, 190 | **kwargs: Any, 191 | ) -> EnvironmentConnection: 192 | return PythonIPC(environment, environment_path, **kwargs) 193 | 194 | 195 | class TestPythonGRPC(GenericPythonConnectionTests): 196 | def open_connection( 197 | self, 198 | environment: BaseEnvironment, 199 | environment_path: Path, 200 | **kwargs: Any, 201 | ) -> EnvironmentConnection: 202 | return LocalPythonGRPC(environment, environment_path, **kwargs) 203 | 204 | def make_venv( 205 | self, tmp_path: Any, requirements: List[str] 206 | ) -> VirtualPythonEnvironment: 207 | # Since gRPC agent requires isolate to be installed, we 208 | # have to add it to the requirements. 209 | env = VirtualPythonEnvironment(requirements + [f"{REPO_DIR}[grpc]"]) 210 | env.apply_settings(IsolateSettings(Path(tmp_path))) 211 | return env 212 | -------------------------------------------------------------------------------- /tests/test_isolate.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import create_autospec, patch 2 | 3 | import pytest 4 | from isolate import prepare_environment 5 | 6 | 7 | @pytest.fixture 8 | def fresh_registry(monkeypatch): 9 | """Temporarily clear the environment registry for this test. Also restores 10 | back to the initial state once the test is executed.""" 11 | monkeypatch.setattr("isolate.registry._ENTRY_POINTS", {}) 12 | monkeypatch.setattr("isolate.registry._ENVIRONMENTS", {}) 13 | 14 | 15 | def test_unknown_environment(fresh_registry): 16 | with pytest.raises(ValueError): 17 | prepare_environment("$unknown_env") 18 | 19 | 20 | def test_environment_discovery(fresh_registry): 21 | # This test currently depends on too-much internals, but 22 | # can be improved later on. 23 | 24 | from isolate.registry import ( 25 | _reload_registry, 26 | importlib_metadata, 27 | ) 28 | 29 | fake_ep = create_autospec( 30 | importlib_metadata.EntryPoint, 31 | ) 32 | fake_ep.name = "fake" 33 | fake_ep.value = "isolate.backends._base.BaseEnvironment" 34 | fake_ep.group = "isolate.backends" 35 | 36 | with patch( 37 | "isolate.registry.importlib_metadata.entry_points", 38 | return_value=importlib_metadata.EntryPoints([fake_ep]), 39 | ): 40 | _reload_registry() 41 | 42 | prepare_environment("fake") 43 | -------------------------------------------------------------------------------- /tests/test_log.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timezone 2 | 3 | from isolate.common import timestamp 4 | from isolate.connections.grpc import definitions 5 | from isolate.logs import Log, LogLevel, LogSource 6 | 7 | 8 | def test_log_default_timestamp(): 9 | log = Log(message="message", source=LogSource.USER, level=LogLevel.DEBUG) 10 | assert log.timestamp is not None 11 | assert log.timestamp <= datetime.now(timezone.utc) 12 | 13 | 14 | def test_timestamp_conversion(): 15 | now = datetime.now(timezone.utc) 16 | now_timestamp = timestamp.from_datetime(now) 17 | assert now_timestamp.ToMilliseconds() == int(now.timestamp() * 1000.0) 18 | 19 | 20 | def test_level_gt_comparison(): 21 | assert LogLevel.INFO > LogLevel.DEBUG 22 | 23 | 24 | def test_level_lt_comparison(): 25 | assert LogLevel.WARNING < LogLevel.ERROR 26 | 27 | 28 | def test_level_str(): 29 | assert str(LogLevel.INFO) == "info" 30 | 31 | 32 | def test_log_definition_conversion(): 33 | message = definitions.Log(message="message", source=0, level=3) 34 | level_definition = definitions.LogLevel.Name(message.level) 35 | assert LogLevel[level_definition.upper()] == LogLevel.WARNING 36 | -------------------------------------------------------------------------------- /tests/test_logger.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import pytest 4 | from isolate.logger import IsolateLogger 5 | 6 | 7 | @pytest.fixture 8 | def log_labels(): 9 | return { 10 | "foo": "$MYENVVAR1", 11 | "bar": "$MYENVVAR2", 12 | "baz": "baz", 13 | "qux": "$NOTTHERE", 14 | } 15 | 16 | 17 | def test_logger_with_env_expanded(log_labels, monkeypatch): 18 | monkeypatch.setenv("MYENVVAR1", "myenvvar1") 19 | monkeypatch.setenv("MYENVVAR2", "myenvvar2") 20 | logger = IsolateLogger.with_env_expanded(log_labels) 21 | assert logger.log_labels == { 22 | "foo": "myenvvar1", 23 | "bar": "myenvvar2", 24 | "baz": "baz", 25 | "qux": "$NOTTHERE", 26 | } 27 | 28 | 29 | def test_logger_from_env(log_labels, monkeypatch): 30 | monkeypatch.setenv("MYENVVAR1", "myenvvar1") 31 | monkeypatch.setenv("MYENVVAR2", "myenvvar2") 32 | monkeypatch.setenv("ISOLATE_LOG_LABELS", json.dumps(log_labels)) 33 | logger = IsolateLogger.from_env() 34 | assert logger.log_labels == { 35 | "foo": "myenvvar1", 36 | "bar": "myenvvar2", 37 | "baz": "baz", 38 | "qux": "$NOTTHERE", 39 | } 40 | 41 | 42 | def test_logger_direct(log_labels): 43 | logger = IsolateLogger(log_labels=log_labels) 44 | # should not do env expansion 45 | assert logger.log_labels == log_labels 46 | -------------------------------------------------------------------------------- /tests/test_serialization.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | 3 | import pytest 4 | from isolate.connections.common import ( 5 | SerializationError, 6 | load_serialized_object, 7 | serialize_object, 8 | ) 9 | 10 | 11 | @pytest.mark.parametrize( 12 | "method", 13 | [ 14 | "pickle", 15 | "dill", 16 | "cloudpickle", 17 | ], 18 | ) 19 | def test_serialize_object(method): 20 | func = partial(eval, "2 + 2") 21 | serialized = serialize_object(method, func) 22 | deserialized = load_serialized_object(method, serialized) 23 | assert deserialized() == 4 24 | 25 | 26 | def test_deserialize_exception(): 27 | serialized = serialize_object("pickle", ValueError("some error")) 28 | regular_obj = load_serialized_object("pickle", serialized) 29 | assert isinstance(regular_obj, ValueError) 30 | assert regular_obj.args == ("some error",) 31 | 32 | 33 | def test_deserialize_raised_exception(): 34 | serialized = serialize_object("pickle", ValueError("some error")) 35 | with pytest.raises(ValueError) as exc_info: 36 | load_serialized_object("pickle", serialized, was_it_raised=True) 37 | assert exc_info.value.args == ("some error",) 38 | 39 | 40 | def error_while_serializing(): 41 | anon = lambda: 2 + 2 # anonymous functions are not # noqa: E731 42 | # serializable by pickle 43 | with pytest.raises(SerializationError) as exc_info: 44 | serialize_object("pickle", anon) 45 | 46 | assert exc_info.match("Error while serializing the given object") 47 | 48 | dill_serialized_lambda = serialize_object("dill", anon) 49 | 50 | with pytest.raises(SerializationError) as exc_info: 51 | load_serialized_object("pickle", dill_serialized_lambda) 52 | 53 | assert exc_info.match("Error while deserializing the given object") 54 | 55 | 56 | def error_while_loading_backend(): 57 | with pytest.raises(SerializationError) as exc_info: 58 | serialize_object("$$$", 1) 59 | 60 | assert exc_info.match("Error while preparing the serialization backend") 61 | 62 | with pytest.raises(SerializationError) as exc_info: 63 | load_serialized_object("$$$", b"1") 64 | 65 | assert exc_info.match("Error while preparing the serialization backend") 66 | -------------------------------------------------------------------------------- /tools/Dockerfile: -------------------------------------------------------------------------------- 1 | # For building this image, you need to be in the project 2 | # root and then pass -f tools/Dockerfile to docker build. 3 | # 4 | # $ docker build -f tools/Dockerfile -t isolate_server [--network=host] 5 | # 6 | # This is important since we want to be able to access src/ directory 7 | # for copying the source code into the container (so the dockerfile 8 | # will stay in a different directory than its context). 9 | FROM python:3.9 10 | 11 | RUN apt-get update && apt-get install -y git 12 | 13 | RUN mkdir -p /opt 14 | RUN git clone https://github.com/pyenv/pyenv --branch v2.3.6 --depth=1 /opt/pyenv 15 | # TODO: Investigate whether we can leverage the compiled pyenv extension. 16 | ENV ISOLATE_PYENV_EXECUTABLE=/opt/pyenv/bin/pyenv 17 | 18 | #### CONDA #### 19 | ENV ISOLATE_CONDA_HOME=/opt/conda/bin 20 | ENV CONDA_DIR /opt/conda 21 | RUN wget --quiet https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda.sh && \ 22 | /bin/bash ~/miniconda.sh -b -p /opt/conda 23 | #### END CONDA #### 24 | 25 | #### MAMBA #### 26 | ENV ISOLATE_MAMBA_HOME=/opt/mamba/bin 27 | ENV CONDA_DIR /opt/mamba 28 | RUN mkdir -p /opt/mamba/bin 29 | RUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest | tar -xvj -C /opt/mamba/bin/ --strip-components=1 bin/micromamba 30 | RUN /opt/mamba/bin/micromamba config append channels conda-forge 31 | RUN /opt/mamba/bin/micromamba config append channels pytorch 32 | #### END MAMBA #### 33 | 34 | RUN pip install --upgrade pip virtualenv wheel poetry-core 35 | 36 | # Since system-level debian does not comply with 37 | # the sysconfig, and we need to install a bunch 38 | # of dependencies (like dbt-core), we are going to 39 | # use a virtualenv. 40 | ENV VIRTUAL_ENV=/opt/venv 41 | RUN python3 -m virtualenv $VIRTUAL_ENV 42 | ENV PATH="$VIRTUAL_ENV/bin:$PATH" 43 | 44 | COPY tools/requirements.txt /tmp/requirements.txt 45 | RUN pip install -r /tmp/requirements.txt 46 | 47 | COPY . /isolate 48 | RUN pip install /isolate[server,build] 49 | 50 | ENV ISOLATE_INHERIT_FROM_LOCAL=1 51 | ENV AGENT_REQUIREMENTS_TXT=/isolate/tools/agent_requirements.txt 52 | 53 | CMD ["python", "-m", "isolate.server.server"] 54 | -------------------------------------------------------------------------------- /tools/agent_requirements.txt: -------------------------------------------------------------------------------- 1 | /isolate[grpc] 2 | cloudpickle==3.0.0 3 | dill==0.3.5.1 4 | google-cloud-storage==2.6.0 5 | -------------------------------------------------------------------------------- /tools/protobuf-requirements.txt: -------------------------------------------------------------------------------- 1 | # Pre-commit 2 | pre-commit 3 | pyyaml 4 | 5 | # Protobuf 6 | refactor 7 | grpcio-tools 8 | mypy-protobuf 9 | -------------------------------------------------------------------------------- /tools/regen_grpc.py: -------------------------------------------------------------------------------- 1 | import ast 2 | import glob 3 | import os 4 | import subprocess 5 | import sys 6 | from argparse import ArgumentParser 7 | from pathlib import Path 8 | 9 | from refactor import Rule, Session, actions 10 | 11 | PROJECT_ROOT = Path(__file__).parent.parent / "src" 12 | COMMON_DIR = PROJECT_ROOT / "isolate" / "connections" / "grpc" / "definitions" 13 | KNOWN_PATHS = {"common_pb2": "isolate.connections.grpc.definitions"} 14 | 15 | 16 | class FixGRPCImports(Rule): 17 | """Change all unqualified imports to qualified imports.""" 18 | 19 | def match(self, node: ast.AST) -> actions.Replace: 20 | # import *_pb2 21 | assert isinstance(node, ast.Import) 22 | assert len(node.names) == 1 23 | assert not node.names[0].name.startswith("google") 24 | assert node.names[0].name.endswith("_pb2") 25 | 26 | # If we know where the import is coming from, use that. 27 | qualified_name = KNOWN_PATHS.get(node.names[0].name) 28 | 29 | if not qualified_name: 30 | # Otherwise discover it from the current file path. 31 | parent_dir = self.context.file.resolve().relative_to(PROJECT_ROOT).parent 32 | qualified_name = ".".join(parent_dir.parts) 33 | 34 | # Change import *_pb2 to from import *_pb2 35 | return actions.Replace( 36 | node, 37 | ast.ImportFrom(module=qualified_name, names=node.names, level=0), 38 | ) 39 | 40 | 41 | def regen_grpc(file: Path) -> None: 42 | assert file.exists() 43 | 44 | parent_dir = file.parent 45 | common_dir = os.path.relpath(COMMON_DIR, parent_dir) 46 | subprocess.check_output( 47 | [ 48 | sys.executable, 49 | "-m", 50 | "grpc_tools.protoc", 51 | f"-I={common_dir}", 52 | "--proto_path=.", 53 | "--python_out=.", 54 | "--grpc_python_out=.", 55 | "--mypy_out=.", 56 | file.name, 57 | ], 58 | cwd=parent_dir, 59 | ) 60 | 61 | # Python gRPC compiler is bad at using the proper import 62 | # notation so it doesn't work with our package structure. 63 | # 64 | # See: https://github.com/protocolbuffers/protobuf/issues/1491 65 | 66 | # For fixing this we are going to manually correct the generated 67 | # source. 68 | for grpc_output_file in parent_dir.glob("*_pb2*.py*"): 69 | session = Session(rules=[FixGRPCImports]) 70 | changes = session.run_file(grpc_output_file) 71 | if changes: 72 | changes.apply_diff() 73 | 74 | 75 | def main() -> None: 76 | parser = ArgumentParser() 77 | parser.add_argument("definition_file", nargs="?") 78 | 79 | options = parser.parse_args() 80 | 81 | if options.definition_file: 82 | files = [options.definition_file] 83 | else: 84 | files = glob.glob("**/*.proto", recursive=True) 85 | if not files: 86 | raise Exception("No definition files specified or found.") 87 | 88 | for file in files: 89 | regen_grpc(Path(file)) 90 | 91 | 92 | if __name__ == "__main__": 93 | main() 94 | -------------------------------------------------------------------------------- /tools/requirements.txt: -------------------------------------------------------------------------------- 1 | virtualenv>=20.4 2 | importlib-metadata>=4.4 3 | grpcio>=1.49 4 | cloudpickle==3.0.0 5 | dill==0.3.5.1 6 | protobuf 7 | -------------------------------------------------------------------------------- /tools/test_agent_requirements.txt: -------------------------------------------------------------------------------- 1 | git+https://github.com/fal-ai/isolate.git@main 2 | cloudpickle==3.0.0 3 | dill==0.3.5.1 4 | google-cloud-storage==2.6.0 5 | --------------------------------------------------------------------------------