├── .gitignore ├── LICENSE ├── README.md ├── conftest.py ├── poetry.lock ├── pyproject.toml └── trellis_dag ├── __init__.py ├── dag.py ├── llm.py ├── node.py ├── tests ├── data.txt ├── test_dag.py ├── test_dag_execute.py ├── test_llm.py └── test_node.py └── utils ├── analyzer.py ├── constants.py ├── generate.py └── status.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | trellis/__pycache__/ 4 | trellis/utils/__pycache__/ 5 | trellis/tests/__pycache__/ 6 | .pytest_cache/ 7 | *.py[cod] 8 | *$py.class 9 | 10 | # C extensions 11 | *.so 12 | 13 | # Distribution / packaging 14 | .Python 15 | build/ 16 | develop-eggs/ 17 | dist/ 18 | downloads/ 19 | eggs/ 20 | .eggs/ 21 | lib/ 22 | lib64/ 23 | parts/ 24 | sdist/ 25 | var/ 26 | wheels/ 27 | share/python-wheels/ 28 | *.egg-info/ 29 | .installed.cfg 30 | *.egg 31 | MANIFEST 32 | 33 | # PyInstaller 34 | # Usually these files are written by a python script from a template 35 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 36 | *.manifest 37 | *.spec 38 | 39 | # Installer logs 40 | pip-log.txt 41 | pip-delete-this-directory.txt 42 | 43 | # Unit test / coverage reports 44 | htmlcov/ 45 | .tox/ 46 | .nox/ 47 | .coverage 48 | .coverage.* 49 | .cache 50 | nosetests.xml 51 | coverage.xml 52 | *.cover 53 | *.py,cover 54 | .hypothesis/ 55 | .pytest_cache/ 56 | cover/ 57 | 58 | # Translations 59 | *.mo 60 | *.pot 61 | 62 | # Django stuff: 63 | *.log 64 | local_settings.py 65 | db.sqlite3 66 | db.sqlite3-journal 67 | 68 | # Flask stuff: 69 | instance/ 70 | .webassets-cache 71 | 72 | # Scrapy stuff: 73 | .scrapy 74 | 75 | # Sphinx documentation 76 | docs/_build/ 77 | 78 | # PyBuilder 79 | .pybuilder/ 80 | target/ 81 | 82 | # Jupyter Notebook 83 | .ipynb_checkpoints 84 | 85 | # IPython 86 | profile_default/ 87 | ipython_config.py 88 | 89 | # pyenv 90 | # For a library or package, you might want to ignore these files since the code is 91 | # intended to run in multiple environments; otherwise, check them in: 92 | # .python-version 93 | 94 | # pipenv 95 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 96 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 97 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 98 | # install all needed dependencies. 99 | #Pipfile.lock 100 | 101 | # poetry 102 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 103 | # This is especially recommended for binary packages to ensure reproducibility, and is more 104 | # commonly ignored for libraries. 105 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 106 | #poetry.lock 107 | 108 | # pdm 109 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 110 | #pdm.lock 111 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 112 | # in version control. 113 | # https://pdm.fming.dev/#use-with-ide 114 | .pdm.toml 115 | 116 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 117 | __pypackages__/ 118 | 119 | # Celery stuff 120 | celerybeat-schedule 121 | celerybeat.pid 122 | 123 | # SageMath parsed files 124 | *.sage.py 125 | 126 | # Environments 127 | .env 128 | .venv 129 | env/ 130 | venv/ 131 | ENV/ 132 | env.bak/ 133 | venv.bak/ 134 | 135 | # Spyder project settings 136 | .spyderproject 137 | .spyproject 138 | 139 | # Rope project settings 140 | .ropeproject 141 | 142 | # mkdocs documentation 143 | /site 144 | 145 | # mypy 146 | .mypy_cache/ 147 | .dmypy.json 148 | dmypy.json 149 | 150 | # Pyre type checker 151 | .pyre/ 152 | 153 | # pytype static type analyzer 154 | .pytype/ 155 | 156 | # Cython debug symbols 157 | cython_debug/ 158 | 159 | # PyCharm 160 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 161 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 162 | # and can be added to the global gitignore or merged into this file. For a more nuclear 163 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 164 | #.idea/ 165 | 166 | 167 | .DS_Store 168 | trellis/.DS_Store 169 | trellis/tests/.DS_Store 170 | trellis/utils/.DS_Store 171 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 interlock labs 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Trellis 2 | ## Intro 3 | **Trellis is an open-source framework for programmatically 4 | orchestrating LLM workflows as Directed Acyclic Graphs (DAGs) in Python.** We've intentionally designed it to give 5 | developers as much control as possible, and we've written documentation to make it incredibly easy to 6 | get started. Start building with our [docs](https://interlocklabsinc.mintlify.app/documentation/introduction). 7 | 8 | ## Structure 9 | Trellis is composed of only three abstractions: `Node`, `DAG`, and `LLM`. 10 | - Node: the atomic unit of Trellis. Nodes are chained together to form a DAG. 11 |   `Node` is an abstract class with only *one* method required to implement. 12 | - DAG: a directed acyclic graph of `Node`s. It is the primary abstraction for orchestrating LLM workflows. When you 13 |   add edges between `Node`s, you can specify a transformation function to reuse `Node`s and connect any two `Node`s. 14 |   Trellis verifies the data flowing between `Nodes` in a `DAG` to ensure the flow of data is validated. 15 | - LLM: a wrapper around a large language model with simple catches for common OpenAI errors. Currently, the only provider 16 |   that Trellis supports is OpenAI. 17 | 18 | ## Environment Variables 19 | - If you're going to use the LLM class, set: 20 | - `OPENAI_API_KEY=YOUR_OPENAI_KEY` 21 | - If you don't want us to send telemetry data (in the `Node._init_()`, `LLM.execute()` (including prompts and responses from OpenAI) and `DAG.execute()` methods, info about nodes you create or dags you run), to an external server (currently (PostHog)[https://posthog.com/]) for analysis, set: 22 | - `DISABLE_TELEMETRY=1` 23 | - If you want to reduce the amount of information the logger returns, set: 24 | - [for everything] `LOG_LEVEL=DEBUG` 25 | - [for status updates] `LOG_LEVEL=INFO` 26 | - [for only warnings] `LOG_LEVEL=WARNING` 27 | - [for errors which stop runtime] `LOG_LEVEL=ERROR` 28 | - [for only critical errors] `LOG_LEVEL=CRITICAL` 29 | 30 | ## Install 31 | You can install Trellis with any of the following methods: 32 | 33 | ### Pip 34 | ``` 35 | pip install trellis-dag 36 | ``` 37 | 38 | ### Poetry 39 | ``` 40 | poetry add trellis-dag 41 | ``` 42 | 43 | ### Conda 44 | ``` 45 | conda install trellis-dag 46 | ``` 47 | -------------------------------------------------------------------------------- /conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import aiohttp 3 | from trellis_dag.node import Node 4 | from trellis_dag import DAG 5 | 6 | 7 | class DummyNode(Node): 8 | def __init__(self, name: str) -> None: 9 | super().__init__(name) 10 | 11 | async def execute(self) -> dict[str : type]: 12 | pass 13 | 14 | 15 | class ReadFromFileTool(Node): 16 | def __init__( 17 | self, 18 | name: str, 19 | input_s: dict[str, type] = dict, 20 | output_s: dict[str, type] = {"file_contents": str}, 21 | file_path: str = "data.txt", 22 | *args, 23 | **kwargs, 24 | ) -> None: 25 | super().__init__(name, input_s, output_s, *args, **kwargs) 26 | self.file_path = file_path 27 | 28 | async def execute(self) -> dict: 29 | with open( 30 | self.execute_args["kwargs"].get("file_path", "tests/data.txt"), "r" 31 | ) as f: 32 | self.output = {"file_contents": f.read()} 33 | return self.output 34 | 35 | 36 | class CatFactsAPITool(Node): 37 | def __init__( 38 | self, 39 | name: str, 40 | input_s: dict[str, type] = dict, 41 | output_s: dict[str, type] = {"cat_information": list[dict[str, str]]}, 42 | limit: int = 1, 43 | max_length: int = 140, 44 | *args, 45 | **kwargs, 46 | ) -> None: 47 | super().__init__(name, input_s, output_s, *args, **kwargs) 48 | self.limit = limit 49 | self.max_length = max_length 50 | 51 | def set_limit(self, limit: int) -> None: 52 | self.limit = limit 53 | 54 | def set_max_length(self, max_length: int) -> None: 55 | self.max_length = max_length 56 | 57 | async def execute(self) -> dict: 58 | async with aiohttp.ClientSession() as session: 59 | async with session.get( 60 | f"https://catfact.ninja/facts?limit={self.execute_args['kwargs'].get('limit', 1)}&max_length={self.execute_args['kwargs'].get('max_length', 140)}" 61 | ) as response: 62 | if response.status == 200: 63 | data = await response.json() 64 | self.output = {"cat_information": data["data"]} 65 | return self.output 66 | else: 67 | return {"error": "Unable to fetch cat fact."} 68 | 69 | 70 | class UselessFactsAPITool(Node): 71 | def __init__( 72 | self, 73 | name: str, 74 | input_s: dict[str, type] = dict, 75 | output_s: dict[str, type] = {"useless_information": str}, 76 | *args, 77 | **kwargs, 78 | ) -> None: 79 | super().__init__(name, input_s, output_s, *args, **kwargs) 80 | 81 | async def execute(self) -> dict: 82 | async with aiohttp.ClientSession() as session: 83 | async with session.get( 84 | "https://uselessfacts.jsph.pl/random.json" 85 | ) as response: 86 | if response.status == 200: 87 | data = await response.json() 88 | self.output = {"useless_information": data["text"]} 89 | return self.output 90 | else: 91 | return {"error": "Unable to fetch useless fact."} 92 | 93 | 94 | class CorporateBSGeneratorAPITool(Node): 95 | def __init__( 96 | self, 97 | name: str, 98 | input_s: dict[str, type] = dict, 99 | output_s: dict[str, type] = {"corporate_bs": str}, 100 | *args, 101 | **kwargs, 102 | ) -> None: 103 | super().__init__(name, input_s, output_s, *args, **kwargs) 104 | 105 | async def execute(self) -> dict: 106 | async with aiohttp.ClientSession() as session: 107 | async with session.get( 108 | "https://corporatebs-generator.sameerkumar.website/" 109 | ) as response: 110 | if response.status == 200: 111 | data = await response.json() 112 | self.output = {"corporate_bs": data["phrase"]} 113 | return self.output 114 | else: 115 | return {"error": "Unable to fetch corporate bs."} 116 | 117 | 118 | class ImgFlipMemeNameAPITool(Node): 119 | def __init__( 120 | self, 121 | name: str, 122 | input_s: dict[str, type] = dict, 123 | output_s: dict[str, type] = {"meme_name": str}, 124 | *args, 125 | **kwargs, 126 | ) -> None: 127 | super().__init__(name, input_s, output_s, *args, **kwargs) 128 | 129 | async def execute(self) -> dict: 130 | async with aiohttp.ClientSession() as session: 131 | async with session.get("https://api.imgflip.com/get_memes") as response: 132 | if response.status == 200: 133 | data = await response.json() 134 | self.output = {"meme_name": data["data"]["memes"][0]["name"]} 135 | return self.output 136 | else: 137 | return {"error": "Unable to fetch meme name."} 138 | 139 | 140 | @pytest.fixture 141 | def dag() -> DAG: 142 | return DAG() 143 | 144 | 145 | @pytest.fixture 146 | def cat_facts_api_tool() -> Node: 147 | return CatFactsAPITool("cat_facts_api_tool") 148 | 149 | 150 | @pytest.fixture 151 | def useless_facts_api_tool() -> Node: 152 | return UselessFactsAPITool("useless_facts_api_tool") 153 | 154 | 155 | @pytest.fixture 156 | def corporate_bs_generator_api_tool() -> Node: 157 | return CorporateBSGeneratorAPITool("corporate_bs_generator_api_tool") 158 | 159 | 160 | @pytest.fixture 161 | def img_flip_meme_name_api_tool() -> Node: 162 | return ImgFlipMemeNameAPITool("img_flip_meme_name_api_tool") 163 | 164 | 165 | @pytest.fixture 166 | def read_from_file_tool() -> Node: 167 | return ReadFromFileTool("read_from_file_tool") 168 | 169 | 170 | @pytest.fixture 171 | def dummy_node() -> DummyNode: 172 | return DummyNode("test") 173 | 174 | 175 | @pytest.fixture() 176 | def dummy_node_2(): 177 | return DummyNode("test2") 178 | 179 | 180 | @pytest.fixture() 181 | def dummy_node_3(): 182 | return DummyNode("test3") 183 | 184 | 185 | @pytest.fixture 186 | def dummy_node_4(): 187 | return DummyNode("test4") 188 | 189 | 190 | @pytest.fixture 191 | def dummy_node_5(): 192 | return DummyNode("test5") 193 | 194 | 195 | @pytest.fixture 196 | def dummy_node_6(): 197 | return DummyNode("test6") 198 | 199 | 200 | @pytest.fixture 201 | def dummy_node_7(): 202 | return DummyNode("test7") 203 | 204 | 205 | @pytest.fixture 206 | def dummy_node_8(): 207 | return DummyNode("test8") 208 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. 2 | 3 | [[package]] 4 | name = "aiohttp" 5 | version = "3.8.5" 6 | description = "Async http client/server framework (asyncio)" 7 | optional = false 8 | python-versions = ">=3.6" 9 | files = [ 10 | {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a94159871304770da4dd371f4291b20cac04e8c94f11bdea1c3478e557fbe0d8"}, 11 | {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:13bf85afc99ce6f9ee3567b04501f18f9f8dbbb2ea11ed1a2e079670403a7c84"}, 12 | {file = "aiohttp-3.8.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ce2ac5708501afc4847221a521f7e4b245abf5178cf5ddae9d5b3856ddb2f3a"}, 13 | {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96943e5dcc37a6529d18766597c491798b7eb7a61d48878611298afc1fca946c"}, 14 | {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ad5c3c4590bb3cc28b4382f031f3783f25ec223557124c68754a2231d989e2b"}, 15 | {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c413c633d0512df4dc7fd2373ec06cc6a815b7b6d6c2f208ada7e9e93a5061d"}, 16 | {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df72ac063b97837a80d80dec8d54c241af059cc9bb42c4de68bd5b61ceb37caa"}, 17 | {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c48c5c0271149cfe467c0ff8eb941279fd6e3f65c9a388c984e0e6cf57538e14"}, 18 | {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:368a42363c4d70ab52c2c6420a57f190ed3dfaca6a1b19afda8165ee16416a82"}, 19 | {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7607ec3ce4993464368505888af5beb446845a014bc676d349efec0e05085905"}, 20 | {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0d21c684808288a98914e5aaf2a7c6a3179d4df11d249799c32d1808e79503b5"}, 21 | {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:312fcfbacc7880a8da0ae8b6abc6cc7d752e9caa0051a53d217a650b25e9a691"}, 22 | {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad093e823df03bb3fd37e7dec9d4670c34f9e24aeace76808fc20a507cace825"}, 23 | {file = "aiohttp-3.8.5-cp310-cp310-win32.whl", hash = "sha256:33279701c04351a2914e1100b62b2a7fdb9a25995c4a104259f9a5ead7ed4802"}, 24 | {file = "aiohttp-3.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:6e4a280e4b975a2e7745573e3fc9c9ba0d1194a3738ce1cbaa80626cc9b4f4df"}, 25 | {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae871a964e1987a943d83d6709d20ec6103ca1eaf52f7e0d36ee1b5bebb8b9b9"}, 26 | {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:461908b2578955045efde733719d62f2b649c404189a09a632d245b445c9c975"}, 27 | {file = "aiohttp-3.8.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:72a860c215e26192379f57cae5ab12b168b75db8271f111019509a1196dfc780"}, 28 | {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc14be025665dba6202b6a71cfcdb53210cc498e50068bc088076624471f8bb9"}, 29 | {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af740fc2711ad85f1a5c034a435782fbd5b5f8314c9a3ef071424a8158d7f6b"}, 30 | {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:841cd8233cbd2111a0ef0a522ce016357c5e3aff8a8ce92bcfa14cef890d698f"}, 31 | {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed1c46fb119f1b59304b5ec89f834f07124cd23ae5b74288e364477641060ff"}, 32 | {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84f8ae3e09a34f35c18fa57f015cc394bd1389bce02503fb30c394d04ee6b938"}, 33 | {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62360cb771707cb70a6fd114b9871d20d7dd2163a0feafe43fd115cfe4fe845e"}, 34 | {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:23fb25a9f0a1ca1f24c0a371523546366bb642397c94ab45ad3aedf2941cec6a"}, 35 | {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0ba0d15164eae3d878260d4c4df859bbdc6466e9e6689c344a13334f988bb53"}, 36 | {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5d20003b635fc6ae3f96d7260281dfaf1894fc3aa24d1888a9b2628e97c241e5"}, 37 | {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0175d745d9e85c40dcc51c8f88c74bfbaef9e7afeeeb9d03c37977270303064c"}, 38 | {file = "aiohttp-3.8.5-cp311-cp311-win32.whl", hash = "sha256:2e1b1e51b0774408f091d268648e3d57f7260c1682e7d3a63cb00d22d71bb945"}, 39 | {file = "aiohttp-3.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:043d2299f6dfdc92f0ac5e995dfc56668e1587cea7f9aa9d8a78a1b6554e5755"}, 40 | {file = "aiohttp-3.8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cae533195e8122584ec87531d6df000ad07737eaa3c81209e85c928854d2195c"}, 41 | {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f21e83f355643c345177a5d1d8079f9f28b5133bcd154193b799d380331d5d3"}, 42 | {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a75ef35f2df54ad55dbf4b73fe1da96f370e51b10c91f08b19603c64004acc"}, 43 | {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e2e9839e14dd5308ee773c97115f1e0a1cb1d75cbeeee9f33824fa5144c7634"}, 44 | {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44e65da1de4403d0576473e2344828ef9c4c6244d65cf4b75549bb46d40b8dd"}, 45 | {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78d847e4cde6ecc19125ccbc9bfac4a7ab37c234dd88fbb3c5c524e8e14da543"}, 46 | {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:c7a815258e5895d8900aec4454f38dca9aed71085f227537208057853f9d13f2"}, 47 | {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:8b929b9bd7cd7c3939f8bcfffa92fae7480bd1aa425279d51a89327d600c704d"}, 48 | {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:5db3a5b833764280ed7618393832e0853e40f3d3e9aa128ac0ba0f8278d08649"}, 49 | {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:a0215ce6041d501f3155dc219712bc41252d0ab76474615b9700d63d4d9292af"}, 50 | {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:fd1ed388ea7fbed22c4968dd64bab0198de60750a25fe8c0c9d4bef5abe13824"}, 51 | {file = "aiohttp-3.8.5-cp36-cp36m-win32.whl", hash = "sha256:6e6783bcc45f397fdebc118d772103d751b54cddf5b60fbcc958382d7dd64f3e"}, 52 | {file = "aiohttp-3.8.5-cp36-cp36m-win_amd64.whl", hash = "sha256:b5411d82cddd212644cf9360879eb5080f0d5f7d809d03262c50dad02f01421a"}, 53 | {file = "aiohttp-3.8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:01d4c0c874aa4ddfb8098e85d10b5e875a70adc63db91f1ae65a4b04d3344cda"}, 54 | {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5980a746d547a6ba173fd5ee85ce9077e72d118758db05d229044b469d9029a"}, 55 | {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a482e6da906d5e6e653be079b29bc173a48e381600161c9932d89dfae5942ef"}, 56 | {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80bd372b8d0715c66c974cf57fe363621a02f359f1ec81cba97366948c7fc873"}, 57 | {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1161b345c0a444ebcf46bf0a740ba5dcf50612fd3d0528883fdc0eff578006a"}, 58 | {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd56db019015b6acfaaf92e1ac40eb8434847d9bf88b4be4efe5bfd260aee692"}, 59 | {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:153c2549f6c004d2754cc60603d4668899c9895b8a89397444a9c4efa282aaf4"}, 60 | {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4a01951fabc4ce26ab791da5f3f24dca6d9a6f24121746eb19756416ff2d881b"}, 61 | {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bfb9162dcf01f615462b995a516ba03e769de0789de1cadc0f916265c257e5d8"}, 62 | {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7dde0009408969a43b04c16cbbe252c4f5ef4574ac226bc8815cd7342d2028b6"}, 63 | {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4149d34c32f9638f38f544b3977a4c24052042affa895352d3636fa8bffd030a"}, 64 | {file = "aiohttp-3.8.5-cp37-cp37m-win32.whl", hash = "sha256:68c5a82c8779bdfc6367c967a4a1b2aa52cd3595388bf5961a62158ee8a59e22"}, 65 | {file = "aiohttp-3.8.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2cf57fb50be5f52bda004b8893e63b48530ed9f0d6c96c84620dc92fe3cd9b9d"}, 66 | {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:eca4bf3734c541dc4f374ad6010a68ff6c6748f00451707f39857f429ca36ced"}, 67 | {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1274477e4c71ce8cfe6c1ec2f806d57c015ebf84d83373676036e256bc55d690"}, 68 | {file = "aiohttp-3.8.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28c543e54710d6158fc6f439296c7865b29e0b616629767e685a7185fab4a6b9"}, 69 | {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:910bec0c49637d213f5d9877105d26e0c4a4de2f8b1b29405ff37e9fc0ad52b8"}, 70 | {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5443910d662db951b2e58eb70b0fbe6b6e2ae613477129a5805d0b66c54b6cb7"}, 71 | {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e460be6978fc24e3df83193dc0cc4de46c9909ed92dd47d349a452ef49325b7"}, 72 | {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1558def481d84f03b45888473fc5a1f35747b5f334ef4e7a571bc0dfcb11f8"}, 73 | {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dd0c107799dcbbf7d48b53be761a013c0adf5571bf50c4ecad5643fe9cfcd0"}, 74 | {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aa1990247f02a54185dc0dff92a6904521172a22664c863a03ff64c42f9b5410"}, 75 | {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0e584a10f204a617d71d359fe383406305a4b595b333721fa50b867b4a0a1548"}, 76 | {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a3cf433f127efa43fee6b90ea4c6edf6c4a17109d1d037d1a52abec84d8f2e42"}, 77 | {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c11f5b099adafb18e65c2c997d57108b5bbeaa9eeee64a84302c0978b1ec948b"}, 78 | {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:84de26ddf621d7ac4c975dbea4c945860e08cccde492269db4e1538a6a6f3c35"}, 79 | {file = "aiohttp-3.8.5-cp38-cp38-win32.whl", hash = "sha256:ab88bafedc57dd0aab55fa728ea10c1911f7e4d8b43e1d838a1739f33712921c"}, 80 | {file = "aiohttp-3.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:5798a9aad1879f626589f3df0f8b79b3608a92e9beab10e5fda02c8a2c60db2e"}, 81 | {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a6ce61195c6a19c785df04e71a4537e29eaa2c50fe745b732aa937c0c77169f3"}, 82 | {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:773dd01706d4db536335fcfae6ea2440a70ceb03dd3e7378f3e815b03c97ab51"}, 83 | {file = "aiohttp-3.8.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f83a552443a526ea38d064588613aca983d0ee0038801bc93c0c916428310c28"}, 84 | {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f7372f7341fcc16f57b2caded43e81ddd18df53320b6f9f042acad41f8e049a"}, 85 | {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea353162f249c8097ea63c2169dd1aa55de1e8fecbe63412a9bc50816e87b761"}, 86 | {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d47ae48db0b2dcf70bc8a3bc72b3de86e2a590fc299fdbbb15af320d2659de"}, 87 | {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d827176898a2b0b09694fbd1088c7a31836d1a505c243811c87ae53a3f6273c1"}, 88 | {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3562b06567c06439d8b447037bb655ef69786c590b1de86c7ab81efe1c9c15d8"}, 89 | {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4e874cbf8caf8959d2adf572a78bba17cb0e9d7e51bb83d86a3697b686a0ab4d"}, 90 | {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6809a00deaf3810e38c628e9a33271892f815b853605a936e2e9e5129762356c"}, 91 | {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:33776e945d89b29251b33a7e7d006ce86447b2cfd66db5e5ded4e5cd0340585c"}, 92 | {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eaeed7abfb5d64c539e2db173f63631455f1196c37d9d8d873fc316470dfbacd"}, 93 | {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e91d635961bec2d8f19dfeb41a539eb94bd073f075ca6dae6c8dc0ee89ad6f91"}, 94 | {file = "aiohttp-3.8.5-cp39-cp39-win32.whl", hash = "sha256:00ad4b6f185ec67f3e6562e8a1d2b69660be43070bd0ef6fcec5211154c7df67"}, 95 | {file = "aiohttp-3.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:c0a9034379a37ae42dea7ac1e048352d96286626251862e448933c0f59cbd79c"}, 96 | {file = "aiohttp-3.8.5.tar.gz", hash = "sha256:b9552ec52cc147dbf1944ac7ac98af7602e51ea2dcd076ed194ca3c0d1c7d0bc"}, 97 | ] 98 | 99 | [package.dependencies] 100 | aiosignal = ">=1.1.2" 101 | async-timeout = ">=4.0.0a3,<5.0" 102 | attrs = ">=17.3.0" 103 | charset-normalizer = ">=2.0,<4.0" 104 | frozenlist = ">=1.1.1" 105 | multidict = ">=4.5,<7.0" 106 | yarl = ">=1.0,<2.0" 107 | 108 | [package.extras] 109 | speedups = ["Brotli", "aiodns", "cchardet"] 110 | 111 | [[package]] 112 | name = "aiosignal" 113 | version = "1.3.1" 114 | description = "aiosignal: a list of registered asynchronous callbacks" 115 | optional = false 116 | python-versions = ">=3.7" 117 | files = [ 118 | {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, 119 | {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, 120 | ] 121 | 122 | [package.dependencies] 123 | frozenlist = ">=1.1.0" 124 | 125 | [[package]] 126 | name = "async-timeout" 127 | version = "4.0.3" 128 | description = "Timeout context manager for asyncio programs" 129 | optional = false 130 | python-versions = ">=3.7" 131 | files = [ 132 | {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, 133 | {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, 134 | ] 135 | 136 | [[package]] 137 | name = "attrs" 138 | version = "23.1.0" 139 | description = "Classes Without Boilerplate" 140 | optional = false 141 | python-versions = ">=3.7" 142 | files = [ 143 | {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, 144 | {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, 145 | ] 146 | 147 | [package.extras] 148 | cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] 149 | dev = ["attrs[docs,tests]", "pre-commit"] 150 | docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] 151 | tests = ["attrs[tests-no-zope]", "zope-interface"] 152 | tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] 153 | 154 | [[package]] 155 | name = "backoff" 156 | version = "2.2.1" 157 | description = "Function decoration for backoff and retry" 158 | optional = false 159 | python-versions = ">=3.7,<4.0" 160 | files = [ 161 | {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, 162 | {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, 163 | ] 164 | 165 | [[package]] 166 | name = "certifi" 167 | version = "2023.7.22" 168 | description = "Python package for providing Mozilla's CA Bundle." 169 | optional = false 170 | python-versions = ">=3.6" 171 | files = [ 172 | {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, 173 | {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, 174 | ] 175 | 176 | [[package]] 177 | name = "charset-normalizer" 178 | version = "3.2.0" 179 | description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." 180 | optional = false 181 | python-versions = ">=3.7.0" 182 | files = [ 183 | {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, 184 | {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, 185 | {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, 186 | {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, 187 | {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, 188 | {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, 189 | {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, 190 | {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, 191 | {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, 192 | {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, 193 | {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, 194 | {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, 195 | {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, 196 | {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, 197 | {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, 198 | {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, 199 | {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, 200 | {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, 201 | {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, 202 | {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, 203 | {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, 204 | {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, 205 | {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, 206 | {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, 207 | {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, 208 | {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, 209 | {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, 210 | {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, 211 | {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, 212 | {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, 213 | {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, 214 | {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, 215 | {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, 216 | {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, 217 | {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, 218 | {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, 219 | {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, 220 | {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, 221 | {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, 222 | {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, 223 | {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, 224 | {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, 225 | {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, 226 | {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, 227 | {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, 228 | {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, 229 | {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, 230 | {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, 231 | {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, 232 | {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, 233 | {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, 234 | {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, 235 | {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, 236 | {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, 237 | {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, 238 | {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, 239 | {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, 240 | {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, 241 | {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, 242 | {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, 243 | {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, 244 | {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, 245 | {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, 246 | {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, 247 | {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, 248 | {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, 249 | {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, 250 | {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, 251 | {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, 252 | {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, 253 | {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, 254 | {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, 255 | {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, 256 | {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, 257 | {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, 258 | ] 259 | 260 | [[package]] 261 | name = "colorama" 262 | version = "0.4.6" 263 | description = "Cross-platform colored terminal text." 264 | optional = false 265 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" 266 | files = [ 267 | {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, 268 | {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, 269 | ] 270 | 271 | [[package]] 272 | name = "exceptiongroup" 273 | version = "1.1.3" 274 | description = "Backport of PEP 654 (exception groups)" 275 | optional = false 276 | python-versions = ">=3.7" 277 | files = [ 278 | {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, 279 | {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, 280 | ] 281 | 282 | [package.extras] 283 | test = ["pytest (>=6)"] 284 | 285 | [[package]] 286 | name = "frozenlist" 287 | version = "1.4.0" 288 | description = "A list-like structure which implements collections.abc.MutableSequence" 289 | optional = false 290 | python-versions = ">=3.8" 291 | files = [ 292 | {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:764226ceef3125e53ea2cb275000e309c0aa5464d43bd72abd661e27fffc26ab"}, 293 | {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6484756b12f40003c6128bfcc3fa9f0d49a687e171186c2d85ec82e3758c559"}, 294 | {file = "frozenlist-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ac08e601308e41eb533f232dbf6b7e4cea762f9f84f6357136eed926c15d12c"}, 295 | {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d081f13b095d74b67d550de04df1c756831f3b83dc9881c38985834387487f1b"}, 296 | {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71932b597f9895f011f47f17d6428252fc728ba2ae6024e13c3398a087c2cdea"}, 297 | {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:981b9ab5a0a3178ff413bca62526bb784249421c24ad7381e39d67981be2c326"}, 298 | {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e41f3de4df3e80de75845d3e743b3f1c4c8613c3997a912dbf0229fc61a8b963"}, 299 | {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6918d49b1f90821e93069682c06ffde41829c346c66b721e65a5c62b4bab0300"}, 300 | {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e5c8764c7829343d919cc2dfc587a8db01c4f70a4ebbc49abde5d4b158b007b"}, 301 | {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8d0edd6b1c7fb94922bf569c9b092ee187a83f03fb1a63076e7774b60f9481a8"}, 302 | {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e29cda763f752553fa14c68fb2195150bfab22b352572cb36c43c47bedba70eb"}, 303 | {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0c7c1b47859ee2cac3846fde1c1dc0f15da6cec5a0e5c72d101e0f83dcb67ff9"}, 304 | {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:901289d524fdd571be1c7be054f48b1f88ce8dddcbdf1ec698b27d4b8b9e5d62"}, 305 | {file = "frozenlist-1.4.0-cp310-cp310-win32.whl", hash = "sha256:1a0848b52815006ea6596c395f87449f693dc419061cc21e970f139d466dc0a0"}, 306 | {file = "frozenlist-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:b206646d176a007466358aa21d85cd8600a415c67c9bd15403336c331a10d956"}, 307 | {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de343e75f40e972bae1ef6090267f8260c1446a1695e77096db6cfa25e759a95"}, 308 | {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad2a9eb6d9839ae241701d0918f54c51365a51407fd80f6b8289e2dfca977cc3"}, 309 | {file = "frozenlist-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7bd3b3830247580de99c99ea2a01416dfc3c34471ca1298bccabf86d0ff4dc"}, 310 | {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdf1847068c362f16b353163391210269e4f0569a3c166bc6a9f74ccbfc7e839"}, 311 | {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38461d02d66de17455072c9ba981d35f1d2a73024bee7790ac2f9e361ef1cd0c"}, 312 | {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5a32087d720c608f42caed0ef36d2b3ea61a9d09ee59a5142d6070da9041b8f"}, 313 | {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd65632acaf0d47608190a71bfe46b209719bf2beb59507db08ccdbe712f969b"}, 314 | {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261b9f5d17cac914531331ff1b1d452125bf5daa05faf73b71d935485b0c510b"}, 315 | {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b89ac9768b82205936771f8d2eb3ce88503b1556324c9f903e7156669f521472"}, 316 | {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:008eb8b31b3ea6896da16c38c1b136cb9fec9e249e77f6211d479db79a4eaf01"}, 317 | {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e74b0506fa5aa5598ac6a975a12aa8928cbb58e1f5ac8360792ef15de1aa848f"}, 318 | {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:490132667476f6781b4c9458298b0c1cddf237488abd228b0b3650e5ecba7467"}, 319 | {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:76d4711f6f6d08551a7e9ef28c722f4a50dd0fc204c56b4bcd95c6cc05ce6fbb"}, 320 | {file = "frozenlist-1.4.0-cp311-cp311-win32.whl", hash = "sha256:a02eb8ab2b8f200179b5f62b59757685ae9987996ae549ccf30f983f40602431"}, 321 | {file = "frozenlist-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:515e1abc578dd3b275d6a5114030b1330ba044ffba03f94091842852f806f1c1"}, 322 | {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0ed05f5079c708fe74bf9027e95125334b6978bf07fd5ab923e9e55e5fbb9d3"}, 323 | {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca265542ca427bf97aed183c1676e2a9c66942e822b14dc6e5f42e038f92a503"}, 324 | {file = "frozenlist-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:491e014f5c43656da08958808588cc6c016847b4360e327a62cb308c791bd2d9"}, 325 | {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ae5cd0f333f94f2e03aaf140bb762c64783935cc764ff9c82dff626089bebf"}, 326 | {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e78fb68cf9c1a6aa4a9a12e960a5c9dfbdb89b3695197aa7064705662515de2"}, 327 | {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5655a942f5f5d2c9ed93d72148226d75369b4f6952680211972a33e59b1dfdc"}, 328 | {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11b0746f5d946fecf750428a95f3e9ebe792c1ee3b1e96eeba145dc631a9672"}, 329 | {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e66d2a64d44d50d2543405fb183a21f76b3b5fd16f130f5c99187c3fb4e64919"}, 330 | {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:88f7bc0fcca81f985f78dd0fa68d2c75abf8272b1f5c323ea4a01a4d7a614efc"}, 331 | {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5833593c25ac59ede40ed4de6d67eb42928cca97f26feea219f21d0ed0959b79"}, 332 | {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fec520865f42e5c7f050c2a79038897b1c7d1595e907a9e08e3353293ffc948e"}, 333 | {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:b826d97e4276750beca7c8f0f1a4938892697a6bcd8ec8217b3312dad6982781"}, 334 | {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ceb6ec0a10c65540421e20ebd29083c50e6d1143278746a4ef6bcf6153171eb8"}, 335 | {file = "frozenlist-1.4.0-cp38-cp38-win32.whl", hash = "sha256:2b8bcf994563466db019fab287ff390fffbfdb4f905fc77bc1c1d604b1c689cc"}, 336 | {file = "frozenlist-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:a6c8097e01886188e5be3e6b14e94ab365f384736aa1fca6a0b9e35bd4a30bc7"}, 337 | {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6c38721585f285203e4b4132a352eb3daa19121a035f3182e08e437cface44bf"}, 338 | {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0c6da9aee33ff0b1a451e867da0c1f47408112b3391dd43133838339e410963"}, 339 | {file = "frozenlist-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93ea75c050c5bb3d98016b4ba2497851eadf0ac154d88a67d7a6816206f6fa7f"}, 340 | {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f61e2dc5ad442c52b4887f1fdc112f97caeff4d9e6ebe78879364ac59f1663e1"}, 341 | {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa384489fefeb62321b238e64c07ef48398fe80f9e1e6afeff22e140e0850eef"}, 342 | {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10ff5faaa22786315ef57097a279b833ecab1a0bfb07d604c9cbb1c4cdc2ed87"}, 343 | {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:007df07a6e3eb3e33e9a1fe6a9db7af152bbd8a185f9aaa6ece10a3529e3e1c6"}, 344 | {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4f399d28478d1f604c2ff9119907af9726aed73680e5ed1ca634d377abb087"}, 345 | {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5374b80521d3d3f2ec5572e05adc94601985cc526fb276d0c8574a6d749f1b3"}, 346 | {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ce31ae3e19f3c902de379cf1323d90c649425b86de7bbdf82871b8a2a0615f3d"}, 347 | {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7211ef110a9194b6042449431e08c4d80c0481e5891e58d429df5899690511c2"}, 348 | {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:556de4430ce324c836789fa4560ca62d1591d2538b8ceb0b4f68fb7b2384a27a"}, 349 | {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7645a8e814a3ee34a89c4a372011dcd817964ce8cb273c8ed6119d706e9613e3"}, 350 | {file = "frozenlist-1.4.0-cp39-cp39-win32.whl", hash = "sha256:19488c57c12d4e8095a922f328df3f179c820c212940a498623ed39160bc3c2f"}, 351 | {file = "frozenlist-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6221d84d463fb110bdd7619b69cb43878a11d51cbb9394ae3105d082d5199167"}, 352 | {file = "frozenlist-1.4.0.tar.gz", hash = "sha256:09163bdf0b2907454042edb19f887c6d33806adc71fbd54afc14908bfdc22251"}, 353 | ] 354 | 355 | [[package]] 356 | name = "idna" 357 | version = "3.4" 358 | description = "Internationalized Domain Names in Applications (IDNA)" 359 | optional = false 360 | python-versions = ">=3.5" 361 | files = [ 362 | {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, 363 | {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, 364 | ] 365 | 366 | [[package]] 367 | name = "iniconfig" 368 | version = "2.0.0" 369 | description = "brain-dead simple config-ini parsing" 370 | optional = false 371 | python-versions = ">=3.7" 372 | files = [ 373 | {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, 374 | {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, 375 | ] 376 | 377 | [[package]] 378 | name = "monotonic" 379 | version = "1.6" 380 | description = "An implementation of time.monotonic() for Python 2 & < 3.3" 381 | optional = false 382 | python-versions = "*" 383 | files = [ 384 | {file = "monotonic-1.6-py2.py3-none-any.whl", hash = "sha256:68687e19a14f11f26d140dd5c86f3dba4bf5df58003000ed467e0e2a69bca96c"}, 385 | {file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"}, 386 | ] 387 | 388 | [[package]] 389 | name = "multidict" 390 | version = "6.0.4" 391 | description = "multidict implementation" 392 | optional = false 393 | python-versions = ">=3.7" 394 | files = [ 395 | {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, 396 | {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, 397 | {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, 398 | {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, 399 | {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, 400 | {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, 401 | {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, 402 | {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, 403 | {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, 404 | {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, 405 | {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, 406 | {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, 407 | {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, 408 | {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, 409 | {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, 410 | {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, 411 | {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, 412 | {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, 413 | {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, 414 | {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, 415 | {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, 416 | {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, 417 | {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, 418 | {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, 419 | {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, 420 | {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, 421 | {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, 422 | {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, 423 | {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, 424 | {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, 425 | {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, 426 | {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, 427 | {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, 428 | {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, 429 | {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, 430 | {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, 431 | {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, 432 | {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, 433 | {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, 434 | {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, 435 | {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, 436 | {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, 437 | {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, 438 | {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, 439 | {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, 440 | {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, 441 | {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, 442 | {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, 443 | {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, 444 | {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, 445 | {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, 446 | {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, 447 | {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, 448 | {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, 449 | {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, 450 | {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, 451 | {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, 452 | {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, 453 | {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, 454 | {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, 455 | {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, 456 | {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, 457 | {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, 458 | {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, 459 | {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, 460 | {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, 461 | {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, 462 | {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, 463 | {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, 464 | {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, 465 | {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, 466 | {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, 467 | {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, 468 | {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, 469 | ] 470 | 471 | [[package]] 472 | name = "openai" 473 | version = "0.27.9" 474 | description = "Python client library for the OpenAI API" 475 | optional = false 476 | python-versions = ">=3.7.1" 477 | files = [ 478 | {file = "openai-0.27.9-py3-none-any.whl", hash = "sha256:6a3cf8e276d1a6262b50562fbc0cba7967cfebb78ed827d375986b48fdad6475"}, 479 | {file = "openai-0.27.9.tar.gz", hash = "sha256:b687761c82f5ebb6f61efc791b2083d2d068277b94802d4d1369efe39851813d"}, 480 | ] 481 | 482 | [package.dependencies] 483 | aiohttp = "*" 484 | requests = ">=2.20" 485 | tqdm = "*" 486 | 487 | [package.extras] 488 | datalib = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] 489 | dev = ["black (>=21.6b0,<22.0)", "pytest (==6.*)", "pytest-asyncio", "pytest-mock"] 490 | embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "plotly", "scikit-learn (>=1.0.2)", "scipy", "tenacity (>=8.0.1)"] 491 | wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "wandb"] 492 | 493 | [[package]] 494 | name = "packaging" 495 | version = "23.1" 496 | description = "Core utilities for Python packages" 497 | optional = false 498 | python-versions = ">=3.7" 499 | files = [ 500 | {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, 501 | {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, 502 | ] 503 | 504 | [[package]] 505 | name = "pluggy" 506 | version = "1.3.0" 507 | description = "plugin and hook calling mechanisms for python" 508 | optional = false 509 | python-versions = ">=3.8" 510 | files = [ 511 | {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, 512 | {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, 513 | ] 514 | 515 | [package.extras] 516 | dev = ["pre-commit", "tox"] 517 | testing = ["pytest", "pytest-benchmark"] 518 | 519 | [[package]] 520 | name = "posthog" 521 | version = "3.0.2" 522 | description = "Integrate PostHog into any python application." 523 | optional = false 524 | python-versions = "*" 525 | files = [ 526 | {file = "posthog-3.0.2-py2.py3-none-any.whl", hash = "sha256:a8c0af6f2401fbe50f90e68c4143d0824b54e872de036b1c2f23b5abb39d88ce"}, 527 | {file = "posthog-3.0.2.tar.gz", hash = "sha256:701fba6e446a4de687c6e861b587e7b7741955ad624bf34fe013c06a0fec6fb3"}, 528 | ] 529 | 530 | [package.dependencies] 531 | backoff = ">=1.10.0" 532 | monotonic = ">=1.5" 533 | python-dateutil = ">2.1" 534 | requests = ">=2.7,<3.0" 535 | six = ">=1.5" 536 | 537 | [package.extras] 538 | dev = ["black", "flake8", "flake8-print", "isort", "pre-commit"] 539 | sentry = ["django", "sentry-sdk"] 540 | test = ["coverage", "flake8", "freezegun (==0.3.15)", "mock (>=2.0.0)", "pylint", "pytest"] 541 | 542 | [[package]] 543 | name = "pytest" 544 | version = "7.4.0" 545 | description = "pytest: simple powerful testing with Python" 546 | optional = false 547 | python-versions = ">=3.7" 548 | files = [ 549 | {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, 550 | {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, 551 | ] 552 | 553 | [package.dependencies] 554 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 555 | exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} 556 | iniconfig = "*" 557 | packaging = "*" 558 | pluggy = ">=0.12,<2.0" 559 | tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} 560 | 561 | [package.extras] 562 | testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] 563 | 564 | [[package]] 565 | name = "pytest-asyncio" 566 | version = "0.21.1" 567 | description = "Pytest support for asyncio" 568 | optional = false 569 | python-versions = ">=3.7" 570 | files = [ 571 | {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"}, 572 | {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"}, 573 | ] 574 | 575 | [package.dependencies] 576 | pytest = ">=7.0.0" 577 | 578 | [package.extras] 579 | docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] 580 | testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] 581 | 582 | [[package]] 583 | name = "pytest-mock" 584 | version = "3.11.1" 585 | description = "Thin-wrapper around the mock package for easier use with pytest" 586 | optional = false 587 | python-versions = ">=3.7" 588 | files = [ 589 | {file = "pytest-mock-3.11.1.tar.gz", hash = "sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f"}, 590 | {file = "pytest_mock-3.11.1-py3-none-any.whl", hash = "sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39"}, 591 | ] 592 | 593 | [package.dependencies] 594 | pytest = ">=5.0" 595 | 596 | [package.extras] 597 | dev = ["pre-commit", "pytest-asyncio", "tox"] 598 | 599 | [[package]] 600 | name = "python-dateutil" 601 | version = "2.8.2" 602 | description = "Extensions to the standard Python datetime module" 603 | optional = false 604 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" 605 | files = [ 606 | {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, 607 | {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, 608 | ] 609 | 610 | [package.dependencies] 611 | six = ">=1.5" 612 | 613 | [[package]] 614 | name = "python-dotenv" 615 | version = "1.0.0" 616 | description = "Read key-value pairs from a .env file and set them as environment variables" 617 | optional = false 618 | python-versions = ">=3.8" 619 | files = [ 620 | {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"}, 621 | {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"}, 622 | ] 623 | 624 | [package.extras] 625 | cli = ["click (>=5.0)"] 626 | 627 | [[package]] 628 | name = "requests" 629 | version = "2.31.0" 630 | description = "Python HTTP for Humans." 631 | optional = false 632 | python-versions = ">=3.7" 633 | files = [ 634 | {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, 635 | {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, 636 | ] 637 | 638 | [package.dependencies] 639 | certifi = ">=2017.4.17" 640 | charset-normalizer = ">=2,<4" 641 | idna = ">=2.5,<4" 642 | urllib3 = ">=1.21.1,<3" 643 | 644 | [package.extras] 645 | socks = ["PySocks (>=1.5.6,!=1.5.7)"] 646 | use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] 647 | 648 | [[package]] 649 | name = "six" 650 | version = "1.16.0" 651 | description = "Python 2 and 3 compatibility utilities" 652 | optional = false 653 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" 654 | files = [ 655 | {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, 656 | {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, 657 | ] 658 | 659 | [[package]] 660 | name = "tomli" 661 | version = "2.0.1" 662 | description = "A lil' TOML parser" 663 | optional = false 664 | python-versions = ">=3.7" 665 | files = [ 666 | {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, 667 | {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, 668 | ] 669 | 670 | [[package]] 671 | name = "tqdm" 672 | version = "4.66.1" 673 | description = "Fast, Extensible Progress Meter" 674 | optional = false 675 | python-versions = ">=3.7" 676 | files = [ 677 | {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, 678 | {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, 679 | ] 680 | 681 | [package.dependencies] 682 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 683 | 684 | [package.extras] 685 | dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] 686 | notebook = ["ipywidgets (>=6)"] 687 | slack = ["slack-sdk"] 688 | telegram = ["requests"] 689 | 690 | [[package]] 691 | name = "urllib3" 692 | version = "2.0.4" 693 | description = "HTTP library with thread-safe connection pooling, file post, and more." 694 | optional = false 695 | python-versions = ">=3.7" 696 | files = [ 697 | {file = "urllib3-2.0.4-py3-none-any.whl", hash = "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4"}, 698 | {file = "urllib3-2.0.4.tar.gz", hash = "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11"}, 699 | ] 700 | 701 | [package.extras] 702 | brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] 703 | secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] 704 | socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] 705 | zstd = ["zstandard (>=0.18.0)"] 706 | 707 | [[package]] 708 | name = "voluptuous" 709 | version = "0.13.1" 710 | description = "" 711 | optional = false 712 | python-versions = "*" 713 | files = [ 714 | {file = "voluptuous-0.13.1-py3-none-any.whl", hash = "sha256:4b838b185f5951f2d6e8752b68fcf18bd7a9c26ded8f143f92d6d28f3921a3e6"}, 715 | {file = "voluptuous-0.13.1.tar.gz", hash = "sha256:e8d31c20601d6773cb14d4c0f42aee29c6821bbd1018039aac7ac5605b489723"}, 716 | ] 717 | 718 | [[package]] 719 | name = "yarl" 720 | version = "1.9.2" 721 | description = "Yet another URL library" 722 | optional = false 723 | python-versions = ">=3.7" 724 | files = [ 725 | {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"}, 726 | {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"}, 727 | {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"}, 728 | {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"}, 729 | {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"}, 730 | {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"}, 731 | {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"}, 732 | {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"}, 733 | {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"}, 734 | {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"}, 735 | {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"}, 736 | {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"}, 737 | {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"}, 738 | {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"}, 739 | {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"}, 740 | {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"}, 741 | {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"}, 742 | {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"}, 743 | {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"}, 744 | {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"}, 745 | {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"}, 746 | {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"}, 747 | {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"}, 748 | {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"}, 749 | {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"}, 750 | {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"}, 751 | {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"}, 752 | {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"}, 753 | {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"}, 754 | {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"}, 755 | {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"}, 756 | {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"}, 757 | {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"}, 758 | {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"}, 759 | {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"}, 760 | {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"}, 761 | {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"}, 762 | {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"}, 763 | {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"}, 764 | {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"}, 765 | {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"}, 766 | {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"}, 767 | {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"}, 768 | {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"}, 769 | {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"}, 770 | {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"}, 771 | {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"}, 772 | {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"}, 773 | {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"}, 774 | {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"}, 775 | {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"}, 776 | {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"}, 777 | {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"}, 778 | {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"}, 779 | {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"}, 780 | {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"}, 781 | {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"}, 782 | {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"}, 783 | {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"}, 784 | {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"}, 785 | {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"}, 786 | {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"}, 787 | {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"}, 788 | {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"}, 789 | {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"}, 790 | {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"}, 791 | {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"}, 792 | {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"}, 793 | {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"}, 794 | {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"}, 795 | {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"}, 796 | {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"}, 797 | {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"}, 798 | {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"}, 799 | ] 800 | 801 | [package.dependencies] 802 | idna = ">=2.0" 803 | multidict = ">=4.0" 804 | 805 | [metadata] 806 | lock-version = "2.0" 807 | python-versions = "^3.9" 808 | content-hash = "a96ff222410b9c02b1ed4c2e386c6de1bd89f81cc19453070c83a2b26cd3e65e" 809 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "trellis_dag" 3 | version = "0.1.5" 4 | description = "DAG-based LLM execution framework." 5 | authors = ["Shivansh Rustagi ", "Kamil Kisielewicz "] 6 | license = "MIT" 7 | readme = "README.md" 8 | 9 | [tool.poetry.dependencies] 10 | python = "^3.9" 11 | pytest = "^7.4.0" 12 | openai = "^0.27.9" 13 | python-dotenv = "^1.0.0" 14 | voluptuous = "^0.13.1" 15 | pytest-asyncio = "^0.21.1" 16 | pytest-mock = "^3.11.1" 17 | aiohttp = "^3.8.5" 18 | posthog = "^3.0.2" 19 | 20 | 21 | [build-system] 22 | requires = ["poetry-core"] 23 | build-backend = "poetry.core.masonry.api" 24 | 25 | 26 | [tool.pytest.ini_options] 27 | pythonpath = [".", "trellis"] 28 | -------------------------------------------------------------------------------- /trellis_dag/__init__.py: -------------------------------------------------------------------------------- 1 | import dotenv 2 | 3 | import logging.config 4 | from os import getenv 5 | 6 | from .node import Node 7 | from .dag import DAG 8 | from .llm import LLM 9 | from .utils.status import Status 10 | 11 | dotenv.load_dotenv() 12 | 13 | 14 | 15 | logging_config = { 16 | "version": 1, 17 | "disable_existing_loggers": False, 18 | "formatters": { 19 | "simple": {"format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s"} 20 | }, 21 | "handlers": { 22 | "console_info": { 23 | "class": "logging.StreamHandler", 24 | "level": "DEBUG", 25 | "formatter": "simple", 26 | "stream": "ext://sys.stdout", 27 | }, 28 | "console_error": { 29 | "class": "logging.StreamHandler", 30 | "level": "WARNING", 31 | "formatter": "simple", 32 | "stream": "ext://sys.stderr", 33 | }, 34 | }, 35 | "root": {"level": "INFO", "handlers": ["console_info", "console_error"]}, 36 | } 37 | 38 | logging_config["root"]["level"] = "INFO" if not getenv("LOG_LEVEL") else getenv("LOG_LEVEL") 39 | logging.config.dictConfig(logging_config) 40 | -------------------------------------------------------------------------------- /trellis_dag/dag.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from asyncio import iscoroutinefunction 3 | from typing import Callable 4 | 5 | from .utils.analyzer import analyzer 6 | from .node import Node 7 | 8 | 9 | class DAG: 10 | def __init__(self) -> None: 11 | self.adj = {} 12 | self.deps = {} 13 | self.nodes = {} 14 | self.logger = logging.getLogger(self.__class__.__name__) 15 | 16 | def set_logger(self, logger: logging.Logger) -> None: 17 | if not isinstance(logger, logging.Logger): 18 | raise ValueError(f"Logger {logger} is not a valid logger") 19 | self.logger = logger 20 | 21 | def is_node(self, node: Node) -> bool: 22 | return node and isinstance(node, Node) 23 | 24 | def add_node(self, node: Node) -> None: 25 | if not self.is_node(node): 26 | self.logger.error(f"{node} is not a valid Node object") 27 | raise ValueError(f"{node} is not a valid Node object") 28 | node_id = node.get_id() 29 | if node_id in self.nodes: 30 | self.logger.error(f"Node with id {node_id} already exists") 31 | raise ValueError(f"Node with id {node_id} already exists") 32 | self.nodes[node_id] = node 33 | self.adj[node_id] = [] 34 | self.deps[node_id] = [] 35 | self.logger.debug(f"Added node {node} with id {node_id}") 36 | 37 | def remove_node(self, node: Node) -> None: 38 | if not self.is_node(node): 39 | self.logger.error(f"{node} is not a valid Node object") 40 | raise ValueError(f"{node} is not a valid Node object") 41 | node_id = node.get_id() 42 | if node_id not in self.nodes: 43 | self.logger.error(f"Node with id {node_id} does not exist") 44 | raise KeyError(f"Node with id {node_id} does not exist") 45 | for _id in self.adj: 46 | self.adj[_id] = [n for n in self.adj[_id] if n["id"] != node_id] 47 | for _id in self.deps: 48 | if node_id in self.deps[_id]: 49 | self.deps[_id].remove(node_id) 50 | del self.adj[node_id] 51 | del self.deps[node_id] 52 | del self.nodes[node_id] 53 | self.logger.debug(f"Removed node {node} with id {node_id}") 54 | 55 | def get_node(self, node_id: str) -> Node: 56 | if not node_id or not isinstance(node_id, str): 57 | self.logger.error(f"{node_id} is not a valid node id") 58 | raise ValueError("Please provide a valid node id") 59 | if node_id not in self.nodes: 60 | self.logger.error(f"Node with id {node_id} does not exist") 61 | raise ValueError(f"Node with id {node_id} does not exist") 62 | return self.nodes[node_id] 63 | 64 | def _is_reachable(self, start: Node, target: Node) -> bool: 65 | if not self.is_node(start) or not self.is_node(target): 66 | self.logger.error(f"{target} or {start} is not a valid Node object") 67 | raise ValueError(f"{target} or {start} is not a valid Node object") 68 | visited = set() 69 | stack = [start.get_id()] 70 | while stack: 71 | node_id = stack.pop() 72 | if node_id == target.get_id(): 73 | return True 74 | if node_id not in visited: 75 | visited.add(node_id) 76 | stack.extend([n["id"] for n in self.adj[node_id]]) 77 | return False 78 | 79 | def add_edge( 80 | self, 81 | from_node: Node, 82 | to_node: Node, 83 | fn: Callable[[dict[str:type]], dict[str:type]] = lambda x: x, 84 | ) -> None: 85 | if not self.is_node(from_node) or not self.is_node(to_node): 86 | self.logger.error(f"{from_node} or {to_node} is not a valid Node object") 87 | raise ValueError(f"{from_node} or {to_node} is not a valid Node object") 88 | fnode_id = from_node.get_id() 89 | tnode_id = to_node.get_id() 90 | if fnode_id not in self.nodes or tnode_id not in self.nodes: 91 | self.logger.error( 92 | f"Cannot add edge either {from_node.get_name()} to {to_node.get_name()} does not exist" 93 | ) 94 | raise ValueError( 95 | f"Cannot add edge either {from_node.get_name()} to {to_node.get_name()} does not exist" 96 | ) 97 | # if we add u -> v and u is reachable from v, then we have a cycle 98 | if tnode_id == fnode_id or self._is_reachable(to_node, from_node): 99 | self.logger.error( 100 | f"Cannot add edge from {from_node.get_name()} to {to_node.get_name()}; cycle detected" 101 | ) 102 | raise ValueError( 103 | f"Cannot add edge from {from_node.get_name()} to {to_node.get_name()}; cycle detected" 104 | ) 105 | self.adj[fnode_id].append({"fn": fn, "id": tnode_id}) 106 | self.deps[tnode_id].append(fnode_id) 107 | self.logger.debug( 108 | f"Added edge from {fnode_id} to {tnode_id} with function {fn.__name__}" 109 | ) 110 | 111 | def remove_edge(self, from_node: Node, to_node: Node) -> None: 112 | if not self.is_node(from_node) or not self.is_node(to_node): 113 | self.logger.error(f"{from_node} or {to_node} is not a valid Node object") 114 | raise ValueError(f"{from_node} or {to_node} is not a valid Node object") 115 | fnode_id = from_node.get_id() 116 | tnode_id = to_node.get_id() 117 | if fnode_id not in self.nodes or tnode_id not in self.nodes: 118 | self.logger.error( 119 | f"Cannot remove edge either {from_node.get_name()} to {to_node.get_name()} does not exist" 120 | ) 121 | raise ValueError( 122 | f"Cannot remove nonexistent edge from {from_node.get_name()} to {to_node.get_name()}" 123 | ) 124 | # if we add u -> v and u is reachable from v, then we have a cycle 125 | if tnode_id == fnode_id or tnode_id not in map( 126 | lambda x: x["id"], self.adj[fnode_id] 127 | ): 128 | self.logger.error( 129 | f"Cannot remove edge from {from_node.get_name()} to {to_node.get_name()}" 130 | ) 131 | raise ValueError( 132 | f"Cannot remove nonexistent edge from {from_node.get_name()} to {to_node.get_name()}" 133 | ) 134 | self.adj[fnode_id] = [n for n in self.adj[fnode_id] if tnode_id != n["id"]] 135 | self.deps[tnode_id].remove(fnode_id) 136 | self.logger.debug(f"Removed edge from {fnode_id} to {tnode_id}") 137 | 138 | def _topological_sort(self) -> list[str]: 139 | # Kahn's algorithm 140 | # https://en.wikipedia.org/wiki/Topological_sorting#Kahn's_algorithm 141 | L = [] 142 | S = {n for n in self.nodes if not self.deps[n]} 143 | deps_copy = self.deps.copy() 144 | while S: 145 | n = S.pop() 146 | L.append(n) 147 | for m in self.adj[n]: 148 | deps_copy[m["id"]].remove(n) 149 | if not deps_copy[m["id"]]: 150 | S.add(m["id"]) 151 | if any(deps_copy.values()): 152 | self.logger.error("Cycle detected") 153 | raise ValueError("Cycle detected") 154 | self.logger.debug(f"Topological sort: {L}") 155 | return L 156 | 157 | def _is_valid_topological_order(self, order: list[str]) -> bool: 158 | # Check if all nodes are present in the order 159 | if set(order) != set(self.nodes.keys()): 160 | return False 161 | 162 | for node_id, edges in self.adj.items(): 163 | for edge in edges: 164 | # If u comes after v in the list, it's not a valid order 165 | if order.index(node_id) > order.index(edge["id"]): 166 | return False 167 | 168 | return True 169 | 170 | async def execute( 171 | self, 172 | init_source_nodes: dict[str:type], 173 | ) -> dict[str:type]: 174 | if not isinstance(init_source_nodes, dict): 175 | self.logger.error(f"{init_source_nodes} is not a valid dict") 176 | raise ValueError("Please provide a valid dict of source nodes") 177 | for k, args_kwargs in init_source_nodes.items(): 178 | if k not in self.nodes: 179 | self.logger.error(f"Node with id {k} does not exist") 180 | raise ValueError(f"Node with id {k} does not exist") 181 | if not isinstance(args_kwargs, dict): 182 | self.logger.error(f"Node {k} input {args_kwargs} is not a valid dict") 183 | raise ValueError(f"Node {k} input {args_kwargs} is not a valid dict") 184 | order = self._topological_sort() 185 | self.logger.info("Executing DAG") 186 | while order: 187 | node_id = order.pop(0) 188 | node = self.nodes[node_id] 189 | if node.get_status() == "EXECUTING": 190 | order.append(node_id) 191 | self.logger.info( 192 | f"Node {node_id} is already executing, moving to end of queue" 193 | ) 194 | else: 195 | try: 196 | a = ( 197 | init_source_nodes[node_id].get("args", []) 198 | if init_source_nodes.get(node_id, {}) 199 | else ( 200 | [] 201 | if not node.execute_args["args"] 202 | else node.execute_args["args"] 203 | ) 204 | ) 205 | k = ( 206 | init_source_nodes[node_id].get("kwargs", {}) 207 | if init_source_nodes.get(node_id, {}) 208 | else ( 209 | {} 210 | if not node.execute_args["kwargs"] 211 | else node.execute_args["kwargs"] 212 | ) 213 | ) 214 | node.set_execute_args(*a, **k) 215 | self.logger.info(f"Executing node {node_id}") 216 | if iscoroutinefunction(node._pre_hook): 217 | await node._pre_hook() 218 | else: 219 | node._pre_hook() 220 | flag = node.validate_input() 221 | if flag is False: 222 | self.logger.error( 223 | f"Node {node_id} input {node.input} is not valid for schema {node._input_s}" 224 | ) 225 | raise ValueError( 226 | f"Node {node_id} input {node.input} is not valid for schema {node._input_s}" 227 | ) 228 | if iscoroutinefunction(node.execute): 229 | await node.execute() 230 | else: 231 | node.execute() 232 | if iscoroutinefunction(node._post_hook): 233 | await node._post_hook() 234 | else: 235 | node._post_hook() 236 | flag = node.validate_output() 237 | if flag is False: 238 | self.logger.error( 239 | f"Node {node_id} output {node.output} is not valid for schema {node._output_s}" 240 | ) 241 | raise ValueError( 242 | f"Node {node_id} output {node.output} is not valid for schema {node._output_s}" 243 | ) 244 | self.logger.info(f"Node {node_id} executed successfully") 245 | for edge in self.adj[node_id]: 246 | self.nodes[edge["id"]].set_input( 247 | edge["fn"](node.get_output()), wipe=False 248 | ) 249 | self.logger.info( 250 | f"Node {node_id} output propagated to children successfully" 251 | ) 252 | except Exception as e: 253 | node.set_status("FAILED") 254 | self.logger.error(f"Node {node_id} failed: {e}") 255 | raise e 256 | 257 | analyzer( 258 | "dag/execute", 259 | { 260 | "nodes": [v.to_dict() for _, v in self.nodes.items()], 261 | "adj": str(self.adj), 262 | "deps": str(self.deps), 263 | "init_source_nodes": init_source_nodes, 264 | }, 265 | ) 266 | 267 | leaves = [self.get_node(n).get_output() for n in self.nodes if not self.adj[n]] 268 | return leaves 269 | -------------------------------------------------------------------------------- /trellis_dag/llm.py: -------------------------------------------------------------------------------- 1 | import openai 2 | import os 3 | import asyncio 4 | from voluptuous import Schema, Invalid, Required, ALLOW_EXTRA 5 | from dotenv import load_dotenv 6 | 7 | from .utils.analyzer import analyzer 8 | from .node import Node 9 | from .utils.constants import ( 10 | DEFAULT_MAX_RETRIES, 11 | DEFAULT_RETRY_DELAY, 12 | DEFAULT_RATE_LIMIT_DELAY, 13 | OPENAI_MODELS, 14 | OPENAI_ARGS, 15 | OPENAI_RESPONSE_SCHEMA, 16 | ) 17 | 18 | load_dotenv() 19 | openai.api_key = os.getenv("OPENAI_API_KEY") 20 | 21 | 22 | class LLM(Node): 23 | def __init__( 24 | self, 25 | name: str, 26 | input_s: dict[str:type] = dict, 27 | output_s: dict[str:type] = OPENAI_RESPONSE_SCHEMA, 28 | messages: list[dict] = [ 29 | {"role": "system", "content": "You are a helpful assistant."}, 30 | {"role": "user", "content": "Hello!"}, 31 | ], 32 | model: str = "gpt-3.5-turbo", 33 | max_retries: int = DEFAULT_MAX_RETRIES, 34 | retry_delay: int = DEFAULT_RETRY_DELAY, 35 | rate_limit_delay: int = DEFAULT_RATE_LIMIT_DELAY, 36 | *args: list, 37 | **kwargs: dict, 38 | ) -> None: 39 | super().__init__(name, input_s, output_s, *args, **kwargs) 40 | # manually reset to allow extra keys, idk what the full OpenAI response schema can have 41 | self._output_s = Schema(output_s, extra=ALLOW_EXTRA) 42 | if model in OPENAI_MODELS: 43 | self.model = model 44 | self.max_retries = max_retries 45 | self.retry_delay = retry_delay 46 | self.rate_limit_delay = rate_limit_delay 47 | self.messages = messages 48 | 49 | def get_model(self) -> str: 50 | return self.model 51 | 52 | def get_messages(self) -> list[dict]: 53 | return self.messages 54 | 55 | def get_max_retries(self) -> int: 56 | return self.max_retries 57 | 58 | def get_retry_delay(self) -> int: 59 | return self.retry_delay 60 | 61 | def get_rate_limit_delay(self) -> int: 62 | return self.rate_limit_delay 63 | 64 | def set_model(self, model: str) -> None: 65 | if model in OPENAI_MODELS: 66 | self.model = model 67 | self.logger.debug(f"Set model to {model}") 68 | else: 69 | self.logger.error(f"Model {model} is not a valid OpenAI model") 70 | raise ValueError(f"Model {model} is not a valid OpenAI model") 71 | 72 | def set_messages(self, messages: list[dict]) -> None: 73 | if not isinstance(messages, list): 74 | self.logger.error(f"Messages {messages} is not a valid list") 75 | raise ValueError(f"Messages {messages} is not a valid list") 76 | for msg in messages: 77 | try: 78 | Schema({Required("role"): str, Required("content"): str})(msg) 79 | except Invalid as e: 80 | self.logger.error(f"Message {msg} is not a valid OpenAI message: {e}") 81 | raise ValueError(f"Message {msg} is not a valid OpenAI message: {e}") 82 | self.messages = messages 83 | self.logger.debug(f"Set messages to {messages}") 84 | 85 | def set_max_retries(self, max_retries: int) -> None: 86 | if not isinstance(max_retries, int) or max_retries < 0: 87 | self.logger.error(f"Max retries {max_retries} is not a valid int") 88 | raise ValueError(f"Max retries {max_retries} is not a valid int") 89 | self.max_retries = max_retries 90 | self.logger.debug(f"Set max retries to {max_retries}") 91 | 92 | def set_retry_delay(self, retry_delay: int) -> None: 93 | if not isinstance(retry_delay, int) or retry_delay < 0: 94 | self.logger.error(f"Retry delay {retry_delay} is not a valid int") 95 | raise ValueError(f"Retry delay {retry_delay} is not a valid int") 96 | self.retry_delay = retry_delay 97 | self.logger.debug(f"Set retry delay to {retry_delay}") 98 | 99 | def set_rate_limit_delay(self, rate_limit_delay: int) -> None: 100 | if not isinstance(rate_limit_delay, int) or rate_limit_delay < 0: 101 | self.logger.error(f"Rate limit delay {rate_limit_delay} is not a valid int") 102 | raise ValueError(f"Rate limit delay {rate_limit_delay} is not a valid int") 103 | self.rate_limit_delay = rate_limit_delay 104 | self.logger.debug(f"Set rate limit delay to {rate_limit_delay}") 105 | 106 | async def execute(self) -> dict: 107 | optional_params = { 108 | k: v 109 | for k, v in super().get_execute_args()["kwargs"].items() 110 | if k in OPENAI_ARGS and v != OPENAI_ARGS[k] 111 | } 112 | 113 | msgs_template = self.messages.copy() 114 | 115 | # handle filling in variables 116 | if self.input: 117 | for msg in msgs_template: 118 | try: 119 | msg["content"] = msg["content"].format(**self.input) 120 | except KeyError: 121 | pass 122 | 123 | retries = 0 124 | 125 | while retries < self.max_retries: 126 | try: 127 | response = openai.ChatCompletion.create( 128 | model=self.model, messages=self.messages, **optional_params 129 | ) 130 | self.set_output(response) 131 | analyzer( 132 | "llm/chat_completion", 133 | { 134 | "model": self.model, 135 | "messages": self.messages, 136 | "response": response, 137 | "input": self.input, 138 | "optional_params": optional_params, 139 | }, 140 | ) 141 | return response.to_dict() 142 | except Exception as e: 143 | if isinstance(e, openai.error.InvalidRequestError): 144 | self.logger.error("Invalid request to OpenAI API") 145 | raise e 146 | elif isinstance(e, openai.error.AuthenticationError): 147 | self.logger.error("Failed to authenticate with OpenAI API") 148 | self.logger.error( 149 | "Please make sure your key is set as an environment variable. Run 'export OPENAI_API_KEY=your_key_here' to set your key." 150 | ) 151 | raise e 152 | elif isinstance(e, openai.error.APIConnectionError): 153 | self.logger.error(f"Failed to connect to OpenAI API: {e}") 154 | if retries + 1 < self.max_retries: 155 | retries += 1 156 | self.logger.error(f"Retrying in {self.retry_delay} seconds...") 157 | await asyncio.sleep(self.retry_delay) 158 | else: 159 | self.logger.error( 160 | f"Method failed after {self.max_retries} retries" 161 | ) 162 | raise e 163 | elif isinstance(e, openai.error.APIError): 164 | self.logger.error(f"OpenAI API returned an API Error: {e}") 165 | if retries + 1 < self.max_retries: 166 | retries += 1 167 | self.logger.error(f"Retrying in {self.retry_delay} seconds...") 168 | await asyncio.sleep(self.retry_delay) 169 | else: 170 | self.logger.error( 171 | f"Method failed after {self.max_retries} retries" 172 | ) 173 | raise e 174 | elif isinstance(e, openai.error.RateLimitError): 175 | self.logger.error(f"OpenAI API request exceeded rate limit: {e}") 176 | if retries + 1 < self.max_retries: 177 | retries += 1 178 | self.logger.error( 179 | f"Retrying in {self.rate_limit_delay} seconds..." 180 | ) 181 | await asyncio.sleep(self.rate_limit_delay) 182 | else: 183 | self.logger.error( 184 | f"Method failed after {self.max_retries} retries" 185 | ) 186 | raise e 187 | elif isinstance(e, openai.error.Timeout): 188 | self.logger.error(f"OpenAI API request timed out: {e}") 189 | if retries + 1 < self.max_retries: 190 | retries += 1 191 | self.logger.error(f"Retrying in {self.retry_delay} seconds...") 192 | await asyncio.sleep(self.retry_delay) 193 | else: 194 | self.logger.error( 195 | f"Method failed after {self.max_retries} retries" 196 | ) 197 | raise e 198 | elif isinstance(e, openai.error.ServiceUnavailableError): 199 | self.logger.error( 200 | f"OpenAI API returned a Service Unavailable Error: {e}" 201 | ) 202 | if retries + 1 < self.max_retries: 203 | retries += 1 204 | self.logger.error(f"Retrying in {self.retry_delay} seconds...") 205 | await asyncio.sleep(self.retry_delay) 206 | else: 207 | self.logger.error( 208 | f"Method failed after {self.max_retries} retries" 209 | ) 210 | raise e 211 | -------------------------------------------------------------------------------- /trellis_dag/node.py: -------------------------------------------------------------------------------- 1 | from voluptuous import Schema, Invalid 2 | from asyncio import iscoroutinefunction 3 | from typing import Callable 4 | from abc import ABC, abstractmethod 5 | from uuid import uuid4 6 | import logging 7 | 8 | from .utils.analyzer import analyzer 9 | from .utils.status import Status 10 | 11 | 12 | class Node(ABC): 13 | def __init__( 14 | self, 15 | name: str, 16 | input_s: dict[str:type] = dict, 17 | output_s: dict[str:type] = dict, 18 | *args, 19 | **kwargs, 20 | ) -> None: 21 | self.logger = logging.getLogger(self.__class__.__name__) 22 | self.name = name 23 | self._id = uuid4().hex 24 | self.set_status("PENDING") 25 | self.input = {} 26 | self.set_execute_args(*args, **kwargs) 27 | self.output = {} 28 | self.set_input_s(input_s) 29 | self.__execute_args_s = Schema({"args": list, "kwargs": dict}) 30 | self.set_output_s(output_s) 31 | self.pre_execute_hook = lambda _: self.input 32 | self.post_execute_hook = lambda _: self.output 33 | analyzer( 34 | "node/added", 35 | { 36 | "id": self._id, 37 | "name": self.name, 38 | }, 39 | ) 40 | 41 | def to_dict(self): 42 | return { 43 | "id": self._id, 44 | "name": self.name, 45 | "status": self._status.name, 46 | "input": self.input, 47 | "output": self.output, 48 | "execute_args": self.execute_args, 49 | } 50 | 51 | def __repr__(self) -> str: 52 | return f"Node(name={self.name}, id={self._id}, status={self._status})" 53 | 54 | def set_logger(self, logger: logging.Logger) -> None: 55 | if not isinstance(logger, logging.Logger): 56 | raise ValueError(f"Logger {logger} is not a valid logger") 57 | self.logger = logger 58 | 59 | # getters 60 | def get_status(self) -> str: 61 | return self._status.name 62 | 63 | def get_id(self) -> str: 64 | return self._id 65 | 66 | def get_name(self) -> str: 67 | return self.name 68 | 69 | def get_input(self) -> dict[str:type]: 70 | return self.input 71 | 72 | def get_output(self) -> dict[str:type]: 73 | return self.output 74 | 75 | def get_execute_args(self) -> dict[str:type]: 76 | return self.execute_args 77 | 78 | def safe_get_execute_arg(self, key: str, default: type = None) -> type: 79 | return ( 80 | self.input.get(key, default) 81 | if key in self.input 82 | else self.execute_args["kwargs"].get(key, default) 83 | ) 84 | 85 | # setters 86 | def set_status(self, status: str) -> None: 87 | if not hasattr(Status, status): 88 | self.logger.error(f"Status {status} is not a valid Status") 89 | raise ValueError(f"Status {status} is not a valid Status") 90 | self._status = Status[status] 91 | self.logger.debug(f"Node {self._id} status set to {self._status.name}") 92 | 93 | def set_input(self, input: dict[str:type], wipe=True) -> None: 94 | if not isinstance(input, dict): 95 | self.logger.error(f"Input {input} is not a valid dict") 96 | raise ValueError(f"Input {input} is not a valid dict") 97 | if wipe: 98 | self.input = input 99 | self.logger.debug(f"Node {self._id} input set to {input}") 100 | else: 101 | self.input.update(input) 102 | self.logger.debug(f"Node {self._id} input updated with {input}") 103 | 104 | def set_output(self, output: dict[str:type], wipe=True) -> None: 105 | if not isinstance(output, dict): 106 | self.logger.error(f"Output {output} is not a valid dict") 107 | raise ValueError(f"Output {output} is not a valid dict") 108 | if wipe: 109 | self.output = output 110 | self.logger.debug(f"Node {self._id} output set to {output}") 111 | else: 112 | self.output.update(output) 113 | self.logger.debug(f"Node {self._id} output updated with {output}") 114 | 115 | def set_execute_args(self, *args: list[type], **kwargs: dict[str:type]) -> None: 116 | res = {"args": [], "kwargs": {}} 117 | if args: 118 | res["args"] = list(args) 119 | self.logger.debug(f"Node {self._id} args set to {res}") 120 | if kwargs: 121 | res["kwargs"] = kwargs 122 | self.logger.debug(f"Node {self._id} kwargs set to {res}") 123 | self.execute_args = res 124 | 125 | def set_input_s(self, input_s: dict[str:type]) -> None: 126 | if isinstance(input_s, dict) or input_s is dict: 127 | self._input_s = Schema(input_s) 128 | self.logger.debug(f"Node {self._id} input schema set to {input_s}") 129 | else: 130 | self.logger.error(f"Input Schema {input_s} is not a valid dict") 131 | raise ValueError(f"Input Schema {input_s} is not a valid dict") 132 | 133 | def set_output_s(self, output_s: dict[str:type]) -> None: 134 | if isinstance(output_s, dict) or output_s is dict: 135 | self._output_s = Schema(output_s) 136 | self.logger.debug(f"Node {self._id} output schema set to {output_s}") 137 | else: 138 | self.logger.error(f"Output Schema {output_s} is not a valid dict") 139 | raise ValueError(f"Output Schema {output_s} is not a valid dict") 140 | 141 | def set_pre_execute_hook( 142 | self, hook: Callable[[dict[str:type]], dict[str:type]] 143 | ) -> None: 144 | if not callable(hook): 145 | self.logger.error(f"Pre execute hook {hook} is not a callable function") 146 | raise ValueError(f"Pre execute hook {hook} is not a callable function") 147 | self.pre_execute_hook = hook 148 | self.logger.debug(f"Node {self._id} pre execute hook set to {hook.__name__}") 149 | 150 | def set_post_execute_hook( 151 | self, hook: Callable[[dict[str:type]], dict[str:type]] 152 | ) -> None: 153 | if not callable(hook): 154 | self.logger.error(f"Post execute hook {hook} is not a callable function") 155 | raise ValueError(f"Post execute hook {hook} is not a callable function") 156 | self.post_execute_hook = hook 157 | self.logger.debug(f"Node {self._id} post execute hook set to {hook.__name__}") 158 | 159 | # validators 160 | def validate_input(self) -> bool: 161 | try: 162 | self.logger.debug(f"Node {self._id} validating input {self.input}") 163 | return self._input_s(self.input) 164 | except Invalid: 165 | self.logger.error(f"Node {self._id} input {self.input} is not valid") 166 | return False 167 | 168 | def validate_output(self) -> bool: 169 | try: 170 | self.logger.debug(f"Node {self._id} validating output {self.output}") 171 | return self._output_s(self.output) 172 | except Invalid: 173 | self.logger.error(f"Node {self._id} output {self.output} is not valid") 174 | return False 175 | 176 | def validate_execute_args(self) -> bool: 177 | try: 178 | self.logger.debug( 179 | f"Node {self._id} validating execute args {self.execute_args}" 180 | ) 181 | return self.__execute_args_s(self.execute_args) 182 | except Invalid: 183 | self.logger.error( 184 | f"Node {self._id} execute args {self.execute_args} is not valid" 185 | ) 186 | return False 187 | 188 | # hooks 189 | async def _pre_hook(self) -> None: 190 | self.set_status("EXECUTING") 191 | if iscoroutinefunction(self.pre_execute_hook): 192 | self.input = await self.pre_execute_hook(self.input) 193 | self.logger.debug(f"Node {self._id} executing async pre execute hook") 194 | else: 195 | self.input = self.pre_execute_hook(self.input) 196 | self.logger.debug(f"Node {self._id} executing pre execute hook") 197 | 198 | async def _post_hook(self) -> None: 199 | self.set_status("SUCCESS") 200 | if iscoroutinefunction(self.post_execute_hook): 201 | self.output = await self.post_execute_hook(self.output) 202 | self.logger.debug(f"Node {self._id} executing async post execute hook") 203 | else: 204 | self.output = self.post_execute_hook(self.output) 205 | self.logger.debug(f"Node {self._id} executing post execute hook") 206 | 207 | @abstractmethod 208 | async def execute(self) -> None: 209 | pass 210 | -------------------------------------------------------------------------------- /trellis_dag/tests/data.txt: -------------------------------------------------------------------------------- 1 | Cairo 2 | Chicago -------------------------------------------------------------------------------- /trellis_dag/tests/test_dag.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from trellis_dag import Node 4 | from trellis_dag import DAG 5 | 6 | 7 | def test_init(dag: DAG) -> None: 8 | assert dag.adj == {} 9 | assert dag.deps == {} 10 | assert dag.nodes == {} 11 | 12 | 13 | def test_add_node(dag: DAG, dummy_node: Node) -> None: 14 | dag.add_node(dummy_node) 15 | assert dag.nodes[dummy_node.get_id()] == dummy_node 16 | assert dag.adj[dummy_node.get_id()] == [] 17 | assert dag.deps[dummy_node.get_id()] == [] 18 | 19 | 20 | def test_add_node_twice(dag: DAG, dummy_node: Node) -> None: 21 | dag.add_node(dummy_node) 22 | with pytest.raises(ValueError, match="already exists"): 23 | dag.add_node(dummy_node) 24 | 25 | 26 | def test_add_bad_node(dag: DAG) -> None: 27 | with pytest.raises(ValueError, match="not a valid Node object"): 28 | dag.add_node(None) 29 | with pytest.raises(ValueError, match="not a valid Node object"): 30 | dag.add_node("bad node") 31 | 32 | 33 | def test_remove_node(dag: DAG, dummy_node: Node) -> None: 34 | dag.add_node(dummy_node) 35 | dag.remove_node(dummy_node) 36 | assert dummy_node.get_id() not in dag.nodes 37 | assert dummy_node.get_id() not in dag.deps 38 | assert dummy_node.get_id() not in dag.adj 39 | 40 | 41 | def test_remove_node_with_edge(dag, dummy_node, dummy_node_2) -> None: 42 | dag.add_node(dummy_node) 43 | dag.add_node(dummy_node_2) 44 | dag.add_edge(dummy_node, dummy_node_2) 45 | dag.remove_node(dummy_node_2) 46 | assert dummy_node.get_id() in dag.nodes 47 | assert dummy_node.get_id() in dag.deps 48 | assert dummy_node.get_id() in dag.adj 49 | assert dummy_node_2.get_id() not in dag.nodes 50 | assert dummy_node_2.get_id() not in dag.deps 51 | assert dummy_node_2.get_id() not in dag.adj 52 | 53 | 54 | def test_remove_nonexistent_node(dag: DAG, dummy_node: Node) -> None: 55 | with pytest.raises(KeyError, match="does not exist"): 56 | dag.remove_node(dummy_node) 57 | 58 | 59 | def test_remove_bad_node(dag: DAG) -> None: 60 | with pytest.raises(ValueError, match="not a valid Node object"): 61 | dag.remove_node(None) 62 | with pytest.raises(ValueError, match="not a valid Node object"): 63 | dag.remove_node("bad node") 64 | 65 | 66 | def test_get_node(dag: DAG, dummy_node: Node) -> None: 67 | dag.add_node(dummy_node) 68 | assert dag.get_node(dummy_node.get_id()) == dummy_node 69 | 70 | 71 | def test_get_nonexistent_node(dag: DAG, dummy_node: Node) -> None: 72 | with pytest.raises(ValueError, match="does not exist"): 73 | dag.get_node(dummy_node.get_id()) 74 | 75 | 76 | def test_get_bad_node(dag: DAG) -> None: 77 | with pytest.raises(ValueError, match="Please provide a valid node id"): 78 | dag.get_node(None) 79 | with pytest.raises(ValueError, match="does not exist"): 80 | dag.get_node("bad node") 81 | 82 | 83 | def test_add_edge(dag, dummy_node, dummy_node_2) -> None: 84 | dag.add_node(dummy_node) 85 | dag.add_node(dummy_node_2) 86 | dag.add_edge(dummy_node, dummy_node_2) 87 | assert dummy_node_2.get_id() == dag.adj[dummy_node.get_id()][-1].get("id", None) 88 | assert dummy_node.get_id() in dag.deps[dummy_node_2.get_id()] 89 | 90 | 91 | def test_add_edge_bad_node(dag, dummy_node, dummy_node_2) -> None: 92 | dag.add_node(dummy_node) 93 | dag.add_node(dummy_node_2) 94 | with pytest.raises(ValueError, match="not a valid Node object"): 95 | dag.add_edge(dummy_node, None) 96 | with pytest.raises(ValueError, match="not a valid Node object"): 97 | dag.add_edge(None, dummy_node) 98 | with pytest.raises(ValueError, match="is not a valid Node object"): 99 | dag.add_edge(dummy_node, "cool node") 100 | with pytest.raises(ValueError, match="is not a valid Node object"): 101 | dag.add_edge("cool node", dummy_node_2) 102 | with pytest.raises(ValueError, match="not a valid Node object"): 103 | dag.add_edge(None, None) 104 | 105 | 106 | def test_add_edge_nonexistent_node(dag, dummy_node, dummy_node_2) -> None: 107 | with pytest.raises(ValueError, match="does not exist"): 108 | dag.add_edge(dummy_node, dummy_node_2) 109 | dag.add_node(dummy_node) 110 | with pytest.raises(ValueError, match="does not exist"): 111 | dag.add_edge(dummy_node, dummy_node_2) 112 | 113 | 114 | def test_add_edge_self_loop(dag: DAG, dummy_node: Node) -> None: 115 | dag.add_node(dummy_node) 116 | with pytest.raises(ValueError, match="cycle detected"): 117 | dag.add_edge(dummy_node, dummy_node) 118 | 119 | 120 | # todo add more cases for cycle tests? 121 | def test_add_edge_cycle( 122 | dag: DAG, dummy_node: Node, dummy_node_2: Node, dummy_node_3: Node 123 | ) -> None: 124 | dag.add_node(dummy_node) 125 | dag.add_node(dummy_node_2) 126 | dag.add_node(dummy_node_3) 127 | dag.add_edge(dummy_node, dummy_node_2) 128 | dag.add_edge(dummy_node_2, dummy_node_3) 129 | with pytest.raises(ValueError, match="cycle detected"): 130 | dag.add_edge(dummy_node_3, dummy_node) 131 | 132 | 133 | def test_remove_edge(dag, dummy_node, dummy_node_2) -> None: 134 | dag.add_node(dummy_node) 135 | dag.add_node(dummy_node_2) 136 | dag.add_edge(dummy_node, dummy_node_2) 137 | dag.remove_edge(dummy_node, dummy_node_2) 138 | assert dummy_node_2.get_id() not in dag.adj[dummy_node.get_id()] 139 | assert dummy_node.get_id() not in dag.deps[dummy_node_2.get_id()] 140 | 141 | 142 | def test_remove_edge_bad_node(dag, dummy_node, dummy_node_2) -> None: 143 | dag.add_node(dummy_node) 144 | dag.add_node(dummy_node_2) 145 | with pytest.raises(ValueError, match="not a valid Node object"): 146 | dag.remove_edge(dummy_node, None) 147 | with pytest.raises(ValueError, match="not a valid Node object"): 148 | dag.remove_edge(None, dummy_node) 149 | with pytest.raises(ValueError, match="is not a valid Node object"): 150 | dag.remove_edge(dummy_node, "cool node") 151 | with pytest.raises(ValueError, match="is not a valid Node object"): 152 | dag.remove_edge("cool node", dummy_node_2) 153 | with pytest.raises(ValueError, match="not a valid Node object"): 154 | dag.remove_edge(None, None) 155 | 156 | 157 | def test_remove_edge_nonexistent_node(dag, dummy_node, dummy_node_2) -> None: 158 | with pytest.raises(ValueError, match="Cannot remove nonexistent edge"): 159 | dag.remove_edge(dummy_node, dummy_node_2) 160 | dag.add_node(dummy_node) 161 | with pytest.raises(ValueError, match="Cannot remove nonexistent edge"): 162 | dag.remove_edge(dummy_node, dummy_node_2) 163 | 164 | 165 | def test_remove_edge_self_loop(dag: DAG, dummy_node: Node) -> None: 166 | dag.add_node(dummy_node) 167 | with pytest.raises(ValueError, match="Cannot remove nonexistent edge"): 168 | dag.remove_edge(dummy_node, dummy_node) 169 | 170 | 171 | def test_remove_edge_cycle( 172 | dag: DAG, dummy_node: Node, dummy_node_2: Node, dummy_node_3: Node 173 | ) -> None: 174 | dag.add_node(dummy_node) 175 | dag.add_node(dummy_node_2) 176 | dag.add_node(dummy_node_3) 177 | dag.add_edge(dummy_node, dummy_node_2) 178 | dag.add_edge(dummy_node_2, dummy_node_3) 179 | with pytest.raises(ValueError, match="Cannot remove nonexistent edge"): 180 | dag.remove_edge(dummy_node_3, dummy_node) 181 | 182 | 183 | # Simple test with clear order 184 | # A -> B -> C -> D 185 | def test_topological_sort_simple( 186 | dag: DAG, 187 | dummy_node: Node, 188 | dummy_node_2: Node, 189 | dummy_node_3: Node, 190 | dummy_node_4: Node, 191 | ) -> None: 192 | dag.add_node(dummy_node) 193 | dag.add_node(dummy_node_2) 194 | dag.add_node(dummy_node_3) 195 | dag.add_node(dummy_node_4) 196 | dag.add_edge(dummy_node, dummy_node_2) 197 | dag.add_edge(dummy_node_2, dummy_node_3) 198 | dag.add_edge(dummy_node_3, dummy_node_4) 199 | assert dag._is_valid_topological_order(dag._topological_sort()) 200 | 201 | 202 | # Small DAG with multiple orders 203 | # A -> B -> D -> F 204 | # | ^ 205 | # v | 206 | # C ------> E 207 | def test_topological_sort_small_multi_order( 208 | dag: DAG, 209 | dummy_node: Node, 210 | dummy_node_2: Node, 211 | dummy_node_3: Node, 212 | dummy_node_4: Node, 213 | dummy_node_5: Node, 214 | dummy_node_6: Node, 215 | ) -> None: 216 | dag.add_node(dummy_node) 217 | dag.add_node(dummy_node_2) 218 | dag.add_node(dummy_node_3) 219 | dag.add_node(dummy_node_4) 220 | dag.add_node(dummy_node_5) 221 | dag.add_node(dummy_node_6) 222 | dag.add_edge(dummy_node, dummy_node_2) 223 | dag.add_edge(dummy_node, dummy_node_3) 224 | dag.add_edge(dummy_node_2, dummy_node_4) 225 | dag.add_edge(dummy_node_3, dummy_node_5) 226 | dag.add_edge(dummy_node_4, dummy_node_6) 227 | dag.add_edge(dummy_node_5, dummy_node_4) 228 | assert dag._is_valid_topological_order(dag._topological_sort()) 229 | 230 | 231 | # Empty DAG 232 | # 233 | def test_topological_sort_empty(dag: DAG) -> None: 234 | assert dag._is_valid_topological_order(dag._topological_sort()) 235 | 236 | 237 | # DAG with one Node, no edges 238 | # A 239 | def test_topological_sort_one_node(dag: DAG, dummy_node: Node) -> None: 240 | dag.add_node(dummy_node) 241 | assert dag._is_valid_topological_order(dag._topological_sort()) 242 | 243 | 244 | # DAG with multiple Nodes, no edges 245 | # A B 246 | 247 | 248 | # C D 249 | def test_topological_sort_four_nodes( 250 | dag: DAG, 251 | dummy_node: Node, 252 | dummy_node_2: Node, 253 | dummy_node_3: Node, 254 | dummy_node_4: Node, 255 | ) -> None: 256 | dag.add_node(dummy_node) 257 | dag.add_node(dummy_node_2) 258 | dag.add_node(dummy_node_3) 259 | dag.add_node(dummy_node_4) 260 | assert dag._is_valid_topological_order(dag._topological_sort()) 261 | 262 | 263 | # DAG with Nodes with multiple incoming edges 264 | # A 265 | # | 266 | # v 267 | # B <- C 268 | # ^ 269 | # | 270 | # D 271 | def test_topological_sort_multiple_incoming_edges( 272 | dag: DAG, 273 | dummy_node: Node, 274 | dummy_node_2: Node, 275 | dummy_node_3: Node, 276 | dummy_node_4: Node, 277 | ) -> None: 278 | dag.add_node(dummy_node) 279 | dag.add_node(dummy_node_2) 280 | dag.add_node(dummy_node_3) 281 | dag.add_node(dummy_node_4) 282 | dag.add_edge(dummy_node, dummy_node_2) 283 | dag.add_edge(dummy_node_3, dummy_node_2) 284 | dag.add_edge(dummy_node_4, dummy_node_2) 285 | assert dag._is_valid_topological_order(dag._topological_sort()) 286 | 287 | 288 | # DAG with Nodes with multiple outgoing edges 289 | # A 290 | # ^ 291 | # | 292 | # B -> C 293 | # | 294 | # v 295 | # D 296 | def test_topological_sort_multiple_outgoing_edges( 297 | dag: DAG, 298 | dummy_node: Node, 299 | dummy_node_2: Node, 300 | dummy_node_3: Node, 301 | dummy_node_4: Node, 302 | ) -> None: 303 | dag.add_node(dummy_node) 304 | dag.add_node(dummy_node_2) 305 | dag.add_node(dummy_node_3) 306 | dag.add_node(dummy_node_4) 307 | dag.add_edge(dummy_node_2, dummy_node) 308 | dag.add_edge(dummy_node_2, dummy_node_3) 309 | dag.add_edge(dummy_node_2, dummy_node_4) 310 | assert dag._is_valid_topological_order(dag._topological_sort()) 311 | 312 | 313 | # DAG with multiple Nodes with multiple incoming and outgoing edges 314 | # A 315 | # / \ 316 | # v v 317 | # B C 318 | # | | \ 319 | # v v v 320 | # D E F 321 | # \ / | 322 | # v v 323 | # G -> H 324 | def test_topological_sort_multiple_incoming_outgoing( 325 | dag: DAG, 326 | dummy_node: Node, 327 | dummy_node_2: Node, 328 | dummy_node_3: Node, 329 | dummy_node_4: Node, 330 | dummy_node_5: Node, 331 | dummy_node_6: Node, 332 | dummy_node_7: Node, 333 | dummy_node_8: Node, 334 | ) -> None: 335 | dag.add_node(dummy_node) 336 | dag.add_node(dummy_node_2) 337 | dag.add_node(dummy_node_3) 338 | dag.add_node(dummy_node_4) 339 | dag.add_node(dummy_node_5) 340 | dag.add_node(dummy_node_6) 341 | dag.add_node(dummy_node_7) 342 | dag.add_node(dummy_node_8) 343 | dag.add_edge(dummy_node, dummy_node_2) 344 | dag.add_edge(dummy_node, dummy_node_3) 345 | dag.add_edge(dummy_node_2, dummy_node_4) 346 | dag.add_edge(dummy_node_3, dummy_node_5) 347 | dag.add_edge(dummy_node_3, dummy_node_6) 348 | dag.add_edge(dummy_node_4, dummy_node_7) 349 | dag.add_edge(dummy_node_5, dummy_node_7) 350 | dag.add_edge(dummy_node_6, dummy_node_8) 351 | dag.add_edge(dummy_node_7, dummy_node_8) 352 | assert dag._is_valid_topological_order(dag._topological_sort()) 353 | -------------------------------------------------------------------------------- /trellis_dag/tests/test_dag_execute.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from trellis_dag import DAG 4 | from trellis_dag import Node 5 | from trellis_dag import LLM 6 | 7 | 8 | @pytest.mark.asyncio 9 | async def test_CatFactsAPITool(cat_facts_api_tool) -> None: 10 | assert cat_facts_api_tool.get_name() == "cat_facts_api_tool" 11 | assert cat_facts_api_tool.get_status() == "PENDING" 12 | assert cat_facts_api_tool.get_id() is not None 13 | assert cat_facts_api_tool.get_input() == {} 14 | assert cat_facts_api_tool.get_output() == {} 15 | 16 | cat_facts_api_tool.set_execute_args(limit=1, max_length=140) 17 | await cat_facts_api_tool._pre_hook() 18 | cat_facts_api_tool.validate_input() 19 | await cat_facts_api_tool.execute() 20 | await cat_facts_api_tool._post_hook() 21 | 22 | assert cat_facts_api_tool.get_status() == "SUCCESS" 23 | assert cat_facts_api_tool.validate_output() 24 | 25 | 26 | @pytest.mark.asyncio 27 | async def test_ReadFromFileTool(read_from_file_tool) -> None: 28 | assert read_from_file_tool.get_name() == "read_from_file_tool" 29 | assert read_from_file_tool.get_status() == "PENDING" 30 | assert read_from_file_tool.get_id() is not None 31 | assert read_from_file_tool.get_input() == {} 32 | assert read_from_file_tool.get_output() == {} 33 | 34 | read_from_file_tool.set_execute_args(file_path="trellis_dag/tests/data.txt") 35 | await read_from_file_tool._pre_hook() 36 | read_from_file_tool.validate_input() 37 | await read_from_file_tool.execute() 38 | await read_from_file_tool._post_hook() 39 | read_from_file_tool.validate_output() 40 | 41 | assert read_from_file_tool.get_status() == "SUCCESS" 42 | assert read_from_file_tool.validate_output() 43 | 44 | 45 | @pytest.mark.asyncio 46 | async def test_UselessFactsAPITool(useless_facts_api_tool) -> None: 47 | assert useless_facts_api_tool.get_name() == "useless_facts_api_tool" 48 | assert useless_facts_api_tool.get_status() == "PENDING" 49 | assert useless_facts_api_tool.get_id() is not None 50 | assert useless_facts_api_tool.get_input() == {} 51 | assert useless_facts_api_tool.get_output() == {} 52 | 53 | await useless_facts_api_tool._pre_hook() 54 | useless_facts_api_tool.validate_input() 55 | await useless_facts_api_tool.execute() 56 | await useless_facts_api_tool._post_hook() 57 | useless_facts_api_tool.validate_output() 58 | 59 | assert useless_facts_api_tool.get_status() == "SUCCESS" 60 | assert useless_facts_api_tool.validate_output() 61 | 62 | 63 | @pytest.mark.asyncio 64 | async def test_CorporateBSGeneratorAPITool(corporate_bs_generator_api_tool) -> None: 65 | assert ( 66 | corporate_bs_generator_api_tool.get_name() == "corporate_bs_generator_api_tool" 67 | ) 68 | assert corporate_bs_generator_api_tool.get_status() == "PENDING" 69 | assert corporate_bs_generator_api_tool.get_id() is not None 70 | assert corporate_bs_generator_api_tool.get_input() == {} 71 | assert corporate_bs_generator_api_tool.get_output() == {} 72 | 73 | await corporate_bs_generator_api_tool._pre_hook() 74 | corporate_bs_generator_api_tool.validate_input() 75 | await corporate_bs_generator_api_tool.execute() 76 | await corporate_bs_generator_api_tool._post_hook() 77 | corporate_bs_generator_api_tool.validate_output() 78 | 79 | assert corporate_bs_generator_api_tool.get_status() == "SUCCESS" 80 | assert corporate_bs_generator_api_tool.validate_output() 81 | 82 | 83 | @pytest.mark.asyncio 84 | async def test_ImgFlipMemeNameAPITool(img_flip_meme_name_api_tool) -> None: 85 | assert img_flip_meme_name_api_tool.get_name() == "img_flip_meme_name_api_tool" 86 | assert img_flip_meme_name_api_tool.get_status() == "PENDING" 87 | assert img_flip_meme_name_api_tool.get_id() is not None 88 | assert img_flip_meme_name_api_tool.get_input() == {} 89 | assert img_flip_meme_name_api_tool.get_output() == {} 90 | 91 | await img_flip_meme_name_api_tool._pre_hook() 92 | img_flip_meme_name_api_tool.validate_input() 93 | await img_flip_meme_name_api_tool.execute() 94 | await img_flip_meme_name_api_tool._post_hook() 95 | img_flip_meme_name_api_tool.validate_output() 96 | 97 | assert img_flip_meme_name_api_tool.get_status() == "SUCCESS" 98 | assert img_flip_meme_name_api_tool.validate_output() 99 | 100 | 101 | @pytest.mark.asyncio 102 | async def test_execute_bad_init_source( 103 | dag: DAG, dummy_node: Node, dummy_node_2: Node 104 | ) -> None: 105 | dag.add_node(dummy_node_2) 106 | id = dummy_node.get_id() 107 | init = {id: {"key": "value"}} 108 | with pytest.raises(ValueError, match=f"Node with id {id} does not exist"): 109 | await dag.execute(init) 110 | with pytest.raises(ValueError, match="is not a valid dict"): 111 | await dag.execute({dummy_node_2.get_id(): dummy_node}) 112 | 113 | 114 | # Simple test with clear order 115 | # A -> B -> C -> D 116 | # A; Read location data from file 117 | # B; Generate false weather for each location using LLM 118 | # C; Generate travel itinerary using weather and location data using LLM 119 | # D; Write invite to vacation based on itinerary using LLM 120 | @pytest.mark.asyncio 121 | async def test_execute_simple( 122 | dag: DAG, 123 | read_from_file_tool: Node, 124 | ) -> None: 125 | messages_1 = [ 126 | { 127 | "role": "user", 128 | "content": "Decide which of the following {locations} is better for a vacation. Return the best one and ONLY one. Do not offer any platitudes as to why they are both being returned, pick only a SINGLE ONE and return it.", 129 | } 130 | ] 131 | messages_2 = [ 132 | { 133 | "role": "user", 134 | "content": "Generate an itinerary for {location}.", 135 | } 136 | ] 137 | messages_3 = [ 138 | { 139 | "role": "user", 140 | "content": "Write an invite to a vacation based on the {itinerary}.", 141 | } 142 | ] 143 | node = read_from_file_tool 144 | LLM1 = LLM("LLM1") 145 | LLM2 = LLM("LLM2") 146 | LLM3 = LLM("LLM3") 147 | 148 | LLM1.set_messages(messages_1) 149 | LLM2.set_messages(messages_2) 150 | LLM3.set_messages(messages_3) 151 | init_dict = {node.get_id(): {"kwargs": {"file_path": "trellis_dag/tests/data.txt"}}} 152 | 153 | dag.add_node(node) 154 | dag.add_node(LLM1) 155 | dag.add_node(LLM2) 156 | dag.add_node(LLM3) 157 | 158 | dag.add_edge(node, LLM1, fn=lambda x: {"locations": x["file_contents"]}) 159 | dag.add_edge( 160 | LLM1, LLM2, fn=lambda x: {"location": x["choices"][0]["message"]["content"]} 161 | ) 162 | dag.add_edge( 163 | LLM2, LLM3, fn=lambda x: {"itinerary": x["choices"][0]["message"]["content"]} 164 | ) 165 | 166 | res = await dag.execute(init_source_nodes=init_dict) 167 | 168 | 169 | # Small DAG with multiple orders 170 | # A -> B -> E -> F 171 | # | ^ 172 | # v | 173 | # C ------> D 174 | # 175 | # A: using https://github.com/sameerkumar18/corporate-bs-generator-api get a random corporate bs phrase 176 | # B: call LLM to translate the phrase into plain english 177 | # C: call LLM to make it longer 178 | # D: call LLM to translate it into plain english 179 | # E: ask LLM to choose which one of those would make more sense to a 5 year old 180 | # F: ask LLM to translate the explanation into spanish 181 | @pytest.mark.asyncio 182 | async def test_execute_sort_small_multi_order( 183 | dag: DAG, 184 | corporate_bs_generator_api_tool: Node, 185 | ) -> None: 186 | messages_1 = [ 187 | { 188 | "role": "user", 189 | "content": "Take this corporate BS {phrase} and rewrite it simply, in plain english, and short (less than 10 sentences).", 190 | } 191 | ] 192 | messages_2 = [ 193 | { 194 | "role": "user", 195 | "content": "Take this corporate BS {phrase} and make it longer by 3 sentences.", 196 | } 197 | ] 198 | messages_3 = [ 199 | { 200 | "role": "user", 201 | "content": "Take this corporate BS {phrase} and rewrite it simply, in plain english, and short (less than 10 sentences).", 202 | } 203 | ] 204 | messages_4 = [ 205 | { 206 | "role": "user", 207 | "content": "Between \n\nPhrase 1:{phrase_1} and \n\nPhrase 2{phrase_2}, which one would make more sense to a 5 year old? Please return the full phrase.", 208 | } 209 | ] 210 | messages_5 = [ 211 | { 212 | "role": "user", 213 | "content": "Take this {phrase} and translate it to Spanish to the best of your ability. If you don't know a word, feel free to make it up completely.", 214 | } 215 | ] 216 | 217 | node = corporate_bs_generator_api_tool 218 | LLM1 = LLM("LLM1") 219 | LLM2 = LLM("LLM2") 220 | LLM3 = LLM("LLM3") 221 | LLM4 = LLM("LLM4") 222 | LLM5 = LLM("LLM5") 223 | 224 | LLM1.set_messages(messages_1) 225 | LLM2.set_messages(messages_2) 226 | LLM3.set_messages(messages_3) 227 | LLM4.set_messages(messages_4) 228 | LLM5.set_messages(messages_5) 229 | 230 | init_dict = {} 231 | 232 | dag.add_node(node) 233 | dag.add_node(LLM1) 234 | dag.add_node(LLM2) 235 | dag.add_node(LLM3) 236 | dag.add_node(LLM4) 237 | dag.add_node(LLM5) 238 | 239 | def transform_1(x): 240 | return {"phrase": x["corporate_bs"]} 241 | 242 | def transform_2(x): 243 | return {"phrase_1": x["choices"][0]["message"]["content"]} 244 | 245 | def transform_3(x): 246 | return {"phrase": x["choices"][0]["message"]["content"]} 247 | 248 | def transform_4(x): 249 | return {"phrase_2": x["choices"][0]["message"]["content"]} 250 | 251 | dag.add_edge(node, LLM1, fn=transform_1) 252 | dag.add_edge(node, LLM2, fn=transform_1) 253 | dag.add_edge(LLM2, LLM3, fn=transform_3) 254 | dag.add_edge(LLM1, LLM4, fn=transform_2) 255 | dag.add_edge(LLM3, LLM4, fn=transform_4) 256 | dag.add_edge(LLM4, LLM5, fn=transform_3) 257 | 258 | res = await dag.execute(init_source_nodes=init_dict) 259 | 260 | 261 | # DAG with one Node, no edges 262 | # A 263 | # 264 | # A: ask LLM question about how it's doing 265 | # A: call cat facts api 266 | @pytest.mark.asyncio 267 | async def test_execute_one_node(dag: DAG, cat_facts_api_tool: Node) -> None: 268 | cat_facts_api_tool.set_execute_args(limit=1, max_length=140) 269 | dag.add_node(cat_facts_api_tool) 270 | res = await dag.execute({}) 271 | 272 | 273 | # DAG with multiple Nodes, no edges 274 | # A B 275 | 276 | 277 | # C D 278 | # A: ask LLM question about how it's doing 279 | # B: call cat facts api 280 | # C: using https://github.com/sameerkumar18/corporate-bs-generator-api get a random corporate bs phrase 281 | # D: using https://uselessfacts.jsph.pl/ call uselessfacts and get a useless random fact 282 | @pytest.mark.asyncio 283 | async def test_execute_four_nodes( 284 | dag: DAG, 285 | cat_facts_api_tool: Node, 286 | corporate_bs_generator_api_tool: Node, 287 | useless_facts_api_tool: Node, 288 | ) -> None: 289 | LLM1 = LLM("LLM") 290 | messages1 = [ 291 | { 292 | "role": "user", 293 | "content": "How are you doing today?", 294 | } 295 | ] 296 | LLM1.set_messages(messages1) 297 | 298 | init_dict = { 299 | cat_facts_api_tool.get_id(): {"kwargs": {"limit": 1, "max_length": 140}}, 300 | LLM1.get_id(): { 301 | "kwargs": { 302 | "temperature": 0.5, 303 | "max_tokens": 25, 304 | } 305 | }, 306 | } 307 | 308 | dag.add_node(cat_facts_api_tool) 309 | dag.add_node(corporate_bs_generator_api_tool) 310 | dag.add_node(useless_facts_api_tool) 311 | dag.add_node(LLM1) 312 | 313 | await dag.execute(init_source_nodes=init_dict) 314 | 315 | 316 | # DAG with Nodes with multiple incoming edges 317 | # A 318 | # | 319 | # v 320 | # D <- B 321 | # ^ 322 | # | 323 | # C 324 | # 325 | # A: using https://github.com/sameerkumar18/corporate-bs-generator-api get a random corporate bs phrase 326 | # B: using https://uselessfacts.jsph.pl/ call uselessfacts and get a useless random fact 327 | # C: using https://imgflip.com/api call imgflip/get_memes, do result["data"]["memes"][0]["name"] 328 | # D: generate what all of these statements have in common using LLM 329 | @pytest.mark.asyncio 330 | async def test_execute_multiple_incoming_edges( 331 | dag: DAG, 332 | corporate_bs_generator_api_tool: Node, 333 | useless_facts_api_tool: Node, 334 | img_flip_meme_name_api_tool: Node, 335 | ) -> None: 336 | LLM1 = LLM("LLM") 337 | messages1 = [ 338 | { 339 | "role": "user", 340 | "content": "Statement 1\n{statement_1}\n\nStatement 2\n{statement_2}\n\nStatement 3\n{statement_3}\n\nWhat do all of these statements have in common? Even if it's something as simple as each of them containing a certain letter, please return it.", 341 | } 342 | ] 343 | LLM1.set_messages(messages1) 344 | 345 | init_dict = { 346 | LLM1.get_id(): { 347 | "kwargs": { 348 | "temperature": 0.5, 349 | } 350 | }, 351 | } 352 | 353 | dag.add_node(img_flip_meme_name_api_tool) 354 | dag.add_node(corporate_bs_generator_api_tool) 355 | dag.add_node(useless_facts_api_tool) 356 | dag.add_node(LLM1) 357 | 358 | dag.add_edge( 359 | img_flip_meme_name_api_tool, LLM1, fn=lambda x: {"statement_1": x["meme_name"]} 360 | ) 361 | dag.add_edge( 362 | corporate_bs_generator_api_tool, 363 | LLM1, 364 | fn=lambda x: {"statement_2": x["corporate_bs"]}, 365 | ) 366 | dag.add_edge( 367 | useless_facts_api_tool, 368 | LLM1, 369 | fn=lambda x: {"statement_3": x["useless_information"]}, 370 | ) 371 | 372 | res = await dag.execute(init_source_nodes=init_dict) 373 | 374 | 375 | # DAG with Nodes with multiple outgoing edges 376 | # B 377 | # ^ 378 | # | 379 | # A -> C 380 | # | 381 | # v 382 | # D 383 | # 384 | # A: using https://uselessfacts.jsph.pl/ call uselessfacts and get a useless random fact 385 | # B: call LLM to say why the fact isn't useless 386 | # C: call LLM to say why the fact is useless 387 | # D: call LLM to give more context on the fact 388 | @pytest.mark.asyncio 389 | async def test_execute_multiple_outgoing_edges( 390 | dag: DAG, 391 | useless_facts_api_tool: Node, 392 | ) -> None: 393 | LLM1 = LLM("LLM") 394 | messages1 = [ 395 | { 396 | "role": "user", 397 | "content": "Why is this fact not useless?\n{useless_information}", 398 | } 399 | ] 400 | LLM1.set_messages(messages1) 401 | 402 | LLM2 = LLM("LLM") 403 | messages2 = [ 404 | { 405 | "role": "user", 406 | "content": "Why is this fact useless?\n{useless_information}", 407 | } 408 | ] 409 | LLM2.set_messages(messages2) 410 | 411 | LLM3 = LLM("LLM") 412 | messages3 = [ 413 | { 414 | "role": "user", 415 | "content": "Can you give more context on this fact?\n{useless_information}", 416 | } 417 | ] 418 | LLM3.set_messages(messages3) 419 | 420 | init_dict = { 421 | LLM1.get_id(): { 422 | "kwargs": { 423 | "temperature": 0.5, 424 | } 425 | }, 426 | LLM2.get_id(): { 427 | "kwargs": { 428 | "temperature": 0.5, 429 | } 430 | }, 431 | LLM3.get_id(): { 432 | "kwargs": { 433 | "temperature": 0.5, 434 | } 435 | }, 436 | } 437 | 438 | dag.add_node(useless_facts_api_tool) 439 | dag.add_node(LLM1) 440 | dag.add_node(LLM2) 441 | dag.add_node(LLM3) 442 | 443 | dag.add_edge( 444 | useless_facts_api_tool, 445 | LLM1, 446 | fn=lambda x: {"useless_information": x["useless_information"]}, 447 | ) 448 | dag.add_edge( 449 | useless_facts_api_tool, 450 | LLM2, 451 | fn=lambda x: {"useless_information": x["useless_information"]}, 452 | ) 453 | dag.add_edge( 454 | useless_facts_api_tool, 455 | LLM3, 456 | fn=lambda x: {"useless_information": x["useless_information"]}, 457 | ) 458 | 459 | res = await dag.execute(init_source_nodes=init_dict) 460 | 461 | 462 | # DAG with multiple Nodes with multiple incoming and outgoing edges 463 | # A 464 | # / \ 465 | # v v 466 | # B C 467 | # | | \ 468 | # v v v 469 | # D E F 470 | # \ / | 471 | # v v 472 | # G -> H 473 | # 474 | # A: get a meme type using https://imgflip.com/api call imgflip/get_memes, do result["data"]["memes"][0]["name"] 475 | # B: ask LLM to explain the origin of the meme in one sentence 476 | # C: ask LLM to create a new meme tagline with it 477 | # D: ask LLM to make the origin explanation something a dad would appreciate 478 | # E: ask LLM to make the meme tagline something that a dad would like 479 | # F: ask LLM to make the meme tagline funnier 480 | # G: ask LLM to make an explanation of why the new meme tagline is funny given the context of the origin 481 | # H: ask LLM to judge which one it prefers and print it 482 | @pytest.mark.asyncio 483 | async def test_execute_multiple_incoming_outgoing( 484 | dag: DAG, 485 | dummy_node: Node, 486 | dummy_node_2: Node, 487 | dummy_node_3: Node, 488 | dummy_node_4: Node, 489 | dummy_node_5: Node, 490 | dummy_node_6: Node, 491 | dummy_node_7: Node, 492 | dummy_node_8: Node, 493 | ) -> None: 494 | pass 495 | -------------------------------------------------------------------------------- /trellis_dag/tests/test_llm.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import openai 3 | 4 | from trellis_dag.utils.constants import OPENAI_RESPONSE_SCHEMA, EXCEPTIONS_TO_TEST 5 | from trellis_dag import LLM 6 | 7 | 8 | @pytest.fixture 9 | def car_messages() -> list[dict]: 10 | messages = [] 11 | messages.append({"role": "system", "content": "You are a helpful auto mechanic."}) 12 | messages.append({"role": "user", "content": "Hello my {car} is broken."}) 13 | return messages 14 | 15 | 16 | @pytest.fixture 17 | def llm() -> LLM: 18 | return LLM( 19 | name="test_llm", 20 | model="gpt-3.5-turbo", 21 | max_retries=2, 22 | retry_delay=3, 23 | rate_limit_delay=4, 24 | ) 25 | 26 | 27 | def test_init(llm) -> None: 28 | llm.set_max_retries(4) 29 | llm.set_retry_delay(6) 30 | llm.set_rate_limit_delay(90) 31 | llm.set_model("gpt-4") 32 | assert llm.get_name() == "test_llm" 33 | assert llm.get_status() == "PENDING" 34 | assert llm.get_input() == {} 35 | assert llm.get_output() == {} 36 | assert llm.get_id() is not None 37 | assert llm.get_model() == "gpt-4" 38 | assert llm.get_max_retries() == 4 39 | assert llm.get_retry_delay() == 6 40 | assert llm.get_rate_limit_delay() == 90 41 | 42 | 43 | def test_set_model(llm) -> None: 44 | llm.set_model("gpt-4") 45 | assert llm.get_model() == "gpt-4" 46 | 47 | 48 | def test_set_model_failure(llm) -> None: 49 | with pytest.raises(ValueError, match="is not a valid OpenAI model"): 50 | llm.set_model("gpt-5") 51 | 52 | 53 | def test_set_max_retries(llm) -> None: 54 | llm.set_max_retries(4) 55 | assert llm.get_max_retries() == 4 56 | 57 | 58 | def test_set_max_retries_failure(llm) -> None: 59 | with pytest.raises(ValueError, match="is not a valid int"): 60 | llm.set_max_retries("4") 61 | 62 | with pytest.raises(ValueError, match="is not a valid int"): 63 | llm.set_max_retries(-1) 64 | 65 | 66 | def test_set_retry_delay(llm) -> None: 67 | llm.set_retry_delay(4) 68 | assert llm.get_retry_delay() == 4 69 | 70 | 71 | def test_set_retry_delay_failure(llm) -> None: 72 | with pytest.raises(ValueError, match="is not a valid int"): 73 | llm.set_retry_delay("4") 74 | 75 | with pytest.raises(ValueError, match="is not a valid int"): 76 | llm.set_retry_delay(-1) 77 | 78 | 79 | def test_set_rate_limit_delay(llm) -> None: 80 | llm.set_rate_limit_delay(4) 81 | assert llm.get_rate_limit_delay() == 4 82 | 83 | 84 | def test_set_rate_limit_delay_failure(llm) -> None: 85 | with pytest.raises(ValueError, match="is not a valid int"): 86 | llm.set_rate_limit_delay("4") 87 | 88 | with pytest.raises(ValueError, match="is not a valid int"): 89 | llm.set_rate_limit_delay(-1) 90 | 91 | 92 | def test_set_messages(llm, car_messages) -> None: 93 | llm.set_messages(car_messages) 94 | assert llm.get_messages() == car_messages 95 | 96 | 97 | def test_set_messages_failure(llm) -> None: 98 | with pytest.raises(ValueError, match="is not a valid list"): 99 | llm.set_messages("car_messages") 100 | 101 | with pytest.raises(ValueError, match="is not a valid OpenAI message"): 102 | llm.set_messages([{"role": "system"}]) 103 | 104 | with pytest.raises(ValueError, match="is not a valid OpenAI message"): 105 | llm.set_messages([{"role": "system", "content": 1}]) 106 | 107 | 108 | @pytest.mark.asyncio 109 | async def test_paid_execute(llm, car_messages) -> None: 110 | llm.set_input({"car": "Tesla"}) 111 | llm.set_messages(car_messages) 112 | llm.set_output_s(OPENAI_RESPONSE_SCHEMA) 113 | await llm.execute() 114 | assert llm.validate_output() 115 | 116 | 117 | @pytest.mark.asyncio 118 | async def test_free_execute(llm, car_messages) -> None: 119 | llm.set_input({}) 120 | llm.set_messages(car_messages) 121 | llm.set_output_s(OPENAI_RESPONSE_SCHEMA) 122 | await llm.execute() 123 | assert llm.validate_output() 124 | 125 | 126 | @pytest.mark.asyncio 127 | @pytest.mark.parametrize("mocked_exception", EXCEPTIONS_TO_TEST) 128 | async def test_openai_errors(llm, car_messages, mocker, mocked_exception): 129 | mocker.patch.object(openai.ChatCompletion, "create", side_effect=mocked_exception) 130 | 131 | llm.set_input({"car": "Tesla"}) 132 | llm.set_messages(car_messages) 133 | llm.set_output_s(OPENAI_RESPONSE_SCHEMA) 134 | 135 | with pytest.raises(type(mocked_exception)): 136 | await llm.execute() 137 | -------------------------------------------------------------------------------- /trellis_dag/tests/test_node.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from voluptuous import Invalid 3 | 4 | from trellis_dag import Node 5 | 6 | 7 | def test_init(dummy_node: Node) -> None: 8 | assert dummy_node.get_name() == "test" 9 | assert dummy_node.get_status() == "PENDING" 10 | assert dummy_node.get_output() == {} 11 | assert dummy_node.get_input() == {} 12 | assert dummy_node.get_id() is not None 13 | 14 | 15 | def test_validate_input(dummy_node: Node) -> None: 16 | dummy_node.set_input_s({"a": int, "b": str}) 17 | dummy_node.set_input({"a": 1, "b": "2"}) 18 | assert dummy_node.validate_input() 19 | 20 | 21 | def test_validate_input_failure(dummy_node: Node) -> None: 22 | with pytest.raises(ValueError, match="is not a valid dict"): 23 | dummy_node.set_input(2) 24 | assert not dummy_node.validate_input() 25 | 26 | dummy_node.set_input_s({"a": int}) 27 | dummy_node.set_input({"a": "1"}) 28 | assert not dummy_node.validate_input() 29 | 30 | 31 | def test_validate_output(dummy_node: Node) -> None: 32 | dummy_node.set_output_s({"a": int, "b": str}) 33 | dummy_node.set_output({"a": 1, "b": "2"}) 34 | assert dummy_node.validate_output() 35 | 36 | 37 | def test_validate_output_failure(dummy_node: Node) -> None: 38 | with pytest.raises(ValueError, match="is not a valid dict"): 39 | dummy_node.set_output(3) 40 | assert not dummy_node.validate_output() 41 | 42 | dummy_node.set_output_s({"a": int}) 43 | dummy_node.set_output({"a": "1"}) 44 | assert not dummy_node.validate_output() 45 | 46 | 47 | def test_validate_execute_args(dummy_node: Node) -> None: 48 | dummy_node.set_execute_args(1, 2, 3, a=1, b=2, c=3) 49 | assert dummy_node.validate_execute_args() 50 | 51 | 52 | @pytest.mark.asyncio 53 | async def test_pre_hook(dummy_node: Node) -> None: 54 | dummy_node.set_pre_execute_hook(lambda x: {"result": x}) 55 | dummy_node.set_input_s({"result": {"a": int, "b": int}}) 56 | dummy_node.set_input({"a": 1, "b": 2}) 57 | await dummy_node._pre_hook() 58 | assert dummy_node.get_input() == {"result": {"a": 1, "b": 2}} 59 | assert dummy_node.validate_input() 60 | assert dummy_node.get_status() == "EXECUTING" 61 | 62 | 63 | def test_pre_hook_failure(dummy_node: Node) -> None: 64 | with pytest.raises(ValueError, match="is not a callable function"): 65 | dummy_node.set_pre_execute_hook(4) 66 | 67 | 68 | @pytest.mark.asyncio 69 | async def test_post_hook(dummy_node: Node) -> None: 70 | dummy_node.set_post_execute_hook(lambda x: {"result": x}) 71 | dummy_node.set_output_s({"result": {"a": int, "b": int}}) 72 | dummy_node.set_output({"a": 1, "b": 2}) 73 | await dummy_node._post_hook() 74 | assert dummy_node.get_output() == {"result": {"a": 1, "b": 2}} 75 | assert dummy_node.validate_output() 76 | assert dummy_node.get_status() == "SUCCESS" 77 | 78 | 79 | def test_post_hook_failure(dummy_node: Node) -> None: 80 | with pytest.raises(ValueError, match="is not a callable function"): 81 | dummy_node.set_post_execute_hook(4) 82 | -------------------------------------------------------------------------------- /trellis_dag/utils/analyzer.py: -------------------------------------------------------------------------------- 1 | from posthog import Posthog 2 | from dotenv import load_dotenv 3 | import time 4 | import os 5 | 6 | load_dotenv() 7 | p = Posthog( 8 | project_api_key="phc_qLInS8phhqhE7IrHTMxfbm5yBiTSLz30mOQmsrgLaCD", 9 | host="https://app.posthog.com", 10 | ) 11 | 12 | def analyzer(name: str, _input: dict): 13 | if os.getenv("DISABLE_TELEMETRY") == 1: 14 | return 15 | else: 16 | p.capture(str(int(time.time())), name, _input) 17 | -------------------------------------------------------------------------------- /trellis_dag/utils/constants.py: -------------------------------------------------------------------------------- 1 | import openai 2 | from os import getenv 3 | 4 | max_retries = getenv("DEFAULT_MAX_RETRIES") 5 | retry_delay = getenv("DEFAULT_RETRY_DELAY") 6 | rate_limit_delay = getenv("DEFAULT_RATE_LIMIT_DELAY") 7 | 8 | DEFAULT_MAX_RETRIES = int(max_retries) if max_retries else 3 9 | DEFAULT_RETRY_DELAY: int = int(retry_delay) if retry_delay else 5 10 | DEFAULT_RATE_LIMIT_DELAY: int = int(rate_limit_delay) if rate_limit_delay else 60 11 | 12 | 13 | OPENAI_MODELS = [ 14 | "gpt-4", 15 | "gpt-4-0613", 16 | "gpt-4-32k", 17 | "gpt-4-32k-0613", 18 | "gpt-3.5-turbo", 19 | "gpt-3.5-turbo-0613", 20 | "gpt-3.5-turbo-16k", 21 | "gpt-3.5-turbo-16k-0613", 22 | ] 23 | OPENAI_ARGS = { 24 | "functions": [], 25 | "function_call": "", 26 | "temperature": 1, 27 | "top_p": 1, 28 | "n": 1, 29 | "stream": False, 30 | "stop": None, 31 | "max_tokens": float("inf"), 32 | "presence_penalty": 0, 33 | "frequency_penalty": 0, 34 | "logit_bias": {}, 35 | "user": "", 36 | } 37 | OPENAI_RESPONSE_SCHEMA = { 38 | "id": str, 39 | "object": str, 40 | "created": int, 41 | "model": str, 42 | "choices": [ 43 | { 44 | "index": int, 45 | "message": { 46 | "role": str, 47 | "content": str, 48 | }, 49 | "finish_reason": str, 50 | } 51 | ], 52 | "usage": { 53 | "prompt_tokens": int, 54 | "completion_tokens": int, 55 | "total_tokens": int, 56 | }, 57 | } 58 | EXCEPTIONS_TO_TEST = [ 59 | openai.error.InvalidRequestError("Mocked invalid request", "mocked_param"), 60 | openai.error.AuthenticationError("Mocked authentication error", "mocked_param"), 61 | openai.error.APIConnectionError("Mocked API connection error", "mocked_param"), 62 | openai.error.APIError("Mocked API error", "mocked_param"), 63 | openai.error.RateLimitError("Mocked rate limit error", "mocked_param"), 64 | openai.error.Timeout("Mocked timeout", "mocked_param"), 65 | openai.error.ServiceUnavailableError( 66 | "Mocked service unavailable error", "mocked_param" 67 | ), 68 | ] 69 | -------------------------------------------------------------------------------- /trellis_dag/utils/generate.py: -------------------------------------------------------------------------------- 1 | # write util function generate_data which takes in a voluptuous schema (for a dict)\ 2 | # and returns a dict which matches the schema 3 | 4 | # this is how the input schema is set, from `Node`: 5 | # def set_input(self, input: dict[str:Any], wipe=True) -> None: 6 | # if not isinstance(input, dict): 7 | # raise ValueError(f"Input {input} is not a valid dict") 8 | # self.input = {**input, **self.input} if not wipe else input 9 | 10 | # this is how the input schema is validated, from `Node`: 11 | # def validate_input(self) -> bool: 12 | # try: 13 | # return self._input_s(self.input) 14 | # except Invalid as e: 15 | # raise e -------------------------------------------------------------------------------- /trellis_dag/utils/status.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | class Status(Enum): 4 | PENDING = 0 5 | EXECUTING = 1 6 | SUCCESS = 2 7 | FAILED = 3 --------------------------------------------------------------------------------