├── pytest.ini ├── tests ├── __init__.py ├── utils │ ├── __init__.py │ └── utils.py ├── test_embedding_endpoint.py ├── test_generate_endpoint.py └── test_model_management_endpoint.py ├── ollama_python ├── __init__.py ├── models │ ├── __init__.py │ ├── embedding.py │ ├── model_management.py │ └── generate.py └── endpoints │ ├── __init__.py │ ├── embedding.py │ ├── base.py │ ├── generate.py │ └── model_management.py ├── .coveragerc ├── .pre-commit-config.yaml ├── pyproject.toml ├── setup.py ├── .github └── workflows │ ├── python-test.yml │ └── python-publish.yml ├── .gitignore ├── README.md ├── LICENSE └── poetry.lock /pytest.ini: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /ollama_python/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /ollama_python/models/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source = 3 | ollama_python 4 | omit = 5 | */tests/* 6 | */site-packages/* 7 | 8 | [report] 9 | fail_under = 100 10 | -------------------------------------------------------------------------------- /ollama_python/models/embedding.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel, Field 2 | 3 | 4 | class Embedding(BaseModel): 5 | """A model embedding""" 6 | 7 | embedding: list[float] = Field(..., description="The embedding of the text") 8 | -------------------------------------------------------------------------------- /ollama_python/endpoints/__init__.py: -------------------------------------------------------------------------------- 1 | from ollama_python.endpoints.generate import GenerateAPI # noqa 2 | from ollama_python.endpoints.model_management import ModelManagementAPI # noqa 3 | from ollama_python.endpoints.embedding import EmbeddingAPI # noqa 4 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v4.5.0 4 | hooks: 5 | - id: check-yaml 6 | - id: end-of-file-fixer 7 | - id: trailing-whitespace 8 | - repo: local 9 | hooks: 10 | - id: black 11 | name: Black 12 | entry: poetry run black 13 | language: system 14 | types: [python] 15 | - id: ruff 16 | name: ruff 17 | entry: poetry run ruff --fix 18 | types: [python] 19 | language: system 20 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "ollama-python" 3 | version = "0.1.2" 4 | description = "" 5 | authors = ["Richard "] 6 | readme = "README.md" 7 | 8 | [tool.poetry.dependencies] 9 | python = "^3.9" 10 | pydantic = "^2.5.3" 11 | requests = "^2.31.0" 12 | httpx = "^0.26.0" 13 | responses = "^0.24.1" 14 | 15 | 16 | [tool.poetry.group.dev.dependencies] 17 | ruff = "^0.1.13" 18 | pytest = "^7.4.4" 19 | pytest-cov = "^4.1.0" 20 | pre-commit = "^3.6.0" 21 | black = "^23.12.1" 22 | 23 | [build-system] 24 | requires = ["poetry-core"] 25 | build-backend = "poetry.core.masonry.api" 26 | -------------------------------------------------------------------------------- /tests/utils/utils.py: -------------------------------------------------------------------------------- 1 | import json 2 | import responses 3 | from typing import Union, Optional 4 | 5 | 6 | def mock_api_response( 7 | endpoint: str, 8 | response_body: Optional[Union[dict, list[dict]]] = None, 9 | request_type: str = responses.POST, 10 | status: int = 200, 11 | stream: bool = False, 12 | ): 13 | """Mock the API responses for the given endpoint""" 14 | endpoint = f"http://test-servers/api{endpoint}" 15 | if stream: 16 | body = "\n".join(json.dumps(item) for item in response_body).encode() 17 | responses.add(request_type, endpoint, body=body, status=status, stream=True) 18 | else: 19 | responses.add(request_type, endpoint, json=response_body, status=status) 20 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | setup( 4 | name="ollama_python", 5 | version="0.1.1", 6 | author="Richard Ogunyale", 7 | author_email="kogunyale01@gmail.com", 8 | description="Python Wrapper around Ollama API Endpoints", 9 | long_description=open("README.md").read(), 10 | long_description_content_type="text/markdown", 11 | url="https://github.com/kennyrich/ollama-python", 12 | packages=find_packages(), 13 | install_requires=[ 14 | "httpx >=0.26.0", 15 | "pydantic >=2.5.3", 16 | "requests>=2.31.0", 17 | "responses >=0.24.1", 18 | ], 19 | classifiers=[ 20 | "Programming Language :: Python :: 3", 21 | "License :: OSI Approved :: MIT License", 22 | "Operating System :: OS Independent", 23 | ], 24 | python_requires=">=3.9", 25 | ) 26 | -------------------------------------------------------------------------------- /.github/workflows/python-test.yml: -------------------------------------------------------------------------------- 1 | name: Python Tests 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | pull_request: 7 | branches: [ main ] 8 | 9 | jobs: 10 | test: 11 | runs-on: ubuntu-latest 12 | strategy: 13 | matrix: 14 | python-version: ["3.9", "3.10", "3.11", "3.12"] 15 | 16 | steps: 17 | - uses: actions/checkout@v2 18 | - name: Set up Python 19 | uses: actions/setup-python@v2 20 | with: 21 | python-version: ${{ matrix.python-version }} 22 | - name: Install Poetry 23 | run: pip install poetry 24 | - name: Install dependencies 25 | run: poetry install 26 | - name: Get changed files 27 | id: changed-files 28 | uses: tj-actions/changed-files@v20 29 | - name: Run Linter 30 | if: ${{ steps.changed-files.outputs.all_changed_files }} 31 | run: poetry run pre-commit run --color=always --files ${{ steps.changed-files.outputs.all_changed_files }} 32 | - name: Run tests 33 | run: poetry run pytest --cov=ollama_python --cov-report=term-missing --cov-fail-under=100 34 | -------------------------------------------------------------------------------- /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will upload a Python Package using Twine when a release is created 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries 3 | 4 | # This workflow uses actions that are not certified by GitHub. 5 | # They are provided by a third-party and are governed by 6 | # separate terms of service, privacy policy, and support 7 | # documentation. 8 | 9 | name: Upload Python Package 10 | 11 | on: 12 | release: 13 | types: [published] 14 | 15 | permissions: 16 | contents: read 17 | 18 | jobs: 19 | deploy: 20 | 21 | runs-on: ubuntu-latest 22 | strategy: 23 | matrix: 24 | python-version: ["3.9"] 25 | 26 | steps: 27 | - uses: actions/checkout@v3 28 | - name: Set up Python 29 | uses: actions/setup-python@v3 30 | with: 31 | python-version: ${{ matrix.python-version }} 32 | - name: Install dependencies 33 | run: | 34 | python -m pip install --upgrade pip 35 | pip install build 36 | - name: Build package 37 | run: python -m build 38 | - name: Publish package 39 | uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 40 | with: 41 | user: __token__ 42 | password: ${{ secrets.PYPI_API_TOKEN }} 43 | -------------------------------------------------------------------------------- /ollama_python/endpoints/embedding.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | from ollama_python.endpoints.base import BaseAPI 3 | from ollama_python.models.generate import Options 4 | from ollama_python.models.embedding import Embedding 5 | 6 | 7 | class EmbeddingAPI(BaseAPI): 8 | def __init__(self, model: str, base_url: str = "http://localhost:11434/api"): 9 | """ 10 | Initialize the embedding API 11 | :param base_url: The base URL of the API 12 | """ 13 | super().__init__(base_url=base_url) 14 | self.model = model 15 | 16 | def get_embedding(self, prompt: str, options: Optional[dict] = None) -> Embedding: 17 | """ 18 | Get the embedding for the given prompt 19 | :param prompt: The prompt to get the embedding for 20 | :param options: Additional model parameters listed in the documentation for the Modelfile such as temperature 21 | :return: The embedding 22 | """ 23 | parameters = {"prompt": prompt, "model": self.model} 24 | 25 | if options: 26 | validated_options = Options( 27 | **options 28 | ) # Basically to validate the types by Pydantic 29 | options_dict = validated_options.model_dump(exclude_none=True) 30 | parameters["options"] = options_dict 31 | 32 | return self._post( 33 | parameters=parameters, endpoint="embedding", return_type=Embedding 34 | ) 35 | -------------------------------------------------------------------------------- /tests/test_embedding_endpoint.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import responses 3 | from requests.exceptions import HTTPError 4 | from ollama_python.endpoints.embedding import EmbeddingAPI 5 | from ollama_python.models.embedding import Embedding 6 | from tests.utils.utils import mock_api_response 7 | 8 | 9 | @pytest.fixture 10 | def embedding_api() -> EmbeddingAPI: 11 | return EmbeddingAPI( 12 | model="test-embedding-model", base_url="http://test-servers/api" 13 | ) 14 | 15 | 16 | @responses.activate 17 | def test_get_embedding_success(embedding_api): 18 | result = { 19 | "embedding": [1, 2, 3, 5, 6, 7], 20 | } 21 | mock_api_response("/embedding", result) 22 | embedding = embedding_api.get_embedding(prompt="test prompt") 23 | 24 | assert isinstance(embedding, Embedding) 25 | assert embedding.embedding == result["embedding"] 26 | 27 | 28 | @responses.activate 29 | def test_get_embedding_success_with_options(embedding_api): 30 | result = { 31 | "embedding": [1, 2, 3, 5, 6, 7], 32 | } 33 | mock_api_response("/embedding", result) 34 | embedding = embedding_api.get_embedding( 35 | prompt="test prompt", options={"temperature": 0.5} 36 | ) 37 | 38 | assert isinstance(embedding, Embedding) 39 | assert embedding.embedding == result["embedding"] 40 | 41 | 42 | def test_get_embedding_failure_with_invalid_options(embedding_api): 43 | with pytest.raises(ValueError): 44 | embedding_api.get_embedding( 45 | prompt="test prompt", options={"invalid": 0.5, "temperature": 0.5} 46 | ) 47 | 48 | 49 | @responses.activate 50 | def test_get_embedding_failure(embedding_api): 51 | with pytest.raises(HTTPError): 52 | mock_api_response("/embedding", status=400) 53 | embedding_api.get_embedding(prompt="test prompt") 54 | -------------------------------------------------------------------------------- /ollama_python/models/model_management.py: -------------------------------------------------------------------------------- 1 | """Models for OLLAMA model management endpoints""" 2 | from pydantic import BaseModel, Field 3 | from typing import Optional, Union 4 | 5 | 6 | class ResponsePayload(BaseModel): 7 | """A model creation request""" 8 | 9 | status: str = Field(..., description="The status of the request") 10 | digest: Optional[str] = Field(None, description="The digest of the model") 11 | total: Optional[int] = Field(None, description="The total number of models") 12 | completed: Optional[int] = Field(None, description="The number of completed models") 13 | 14 | 15 | class ModelDetails(BaseModel): 16 | """Details about a model""" 17 | 18 | format: str = Field(..., description="The format of the model") 19 | family: str = Field(..., description="The family of the model") 20 | families: Optional[Union[str, list[str]]] = Field( 21 | None, description="The families of the model" 22 | ) 23 | parameter_size: str = Field(..., description="The parameter size of the model") 24 | quantization_level: str = Field( 25 | ..., description="The quantization level of the model" 26 | ) 27 | 28 | 29 | class ModelTag(BaseModel): 30 | """Information about the model""" 31 | 32 | name: str = Field(..., description="The name of the model") 33 | digest: str = Field(..., description="The digest of the model") 34 | size: int = Field(..., description="The size of the model in bytes") 35 | modified_at: str = Field(..., description="The time the model was created") 36 | details: ModelDetails = Field(..., description="Details about the model") 37 | 38 | 39 | class ModelInformation(BaseModel): 40 | """Information about a model""" 41 | 42 | modelfile: str = Field(..., description="The path to the model file") 43 | parameters: str = Field(..., description="The parameters of the model") 44 | template: str = Field(..., description="The template of the model") 45 | details: ModelDetails = Field(..., description="Details about the model") 46 | 47 | 48 | class ModelTagList(BaseModel): 49 | """A list of models""" 50 | 51 | models: list[ModelTag] = Field(..., description="The list of models") 52 | -------------------------------------------------------------------------------- /ollama_python/endpoints/base.py: -------------------------------------------------------------------------------- 1 | """Base API for all endpoints""" 2 | import json 3 | import requests 4 | from typing import Callable, Generator, Optional 5 | 6 | 7 | class BaseAPI: 8 | def __init__(self, base_url: str = "http://localhost:11434/api"): 9 | """ 10 | Initialize the base API endpoint 11 | :param base_url: The base URL of the API 12 | """ 13 | self.base_url = self._format_base_url(base_url=base_url) 14 | 15 | def _format_base_url(self, base_url: str) -> str: 16 | """ 17 | Format the base URL 18 | :param base_url: The base URL to format 19 | :return: The formatted base URL 20 | """ 21 | if base_url.endswith("/"): 22 | base_url = base_url[:-1] 23 | return base_url 24 | 25 | def _stream( 26 | self, endpoint: str, parameters: dict, return_type: Optional[Callable] = None 27 | ) -> Generator: 28 | """ 29 | Stream the response from the given endpoint 30 | :param endpoint: The endpoint to stream from 31 | :param parameters: The parameters to send 32 | :return: A generator that yields the response 33 | """ 34 | with requests.post( 35 | f"{self.base_url}/{endpoint}", json=parameters, stream=True 36 | ) as response: 37 | response.raise_for_status() 38 | for line in response.iter_lines(): 39 | if line: 40 | resp = json.loads(line) 41 | yield return_type(**resp) if return_type else resp 42 | 43 | def _post( 44 | self, 45 | endpoint: str, 46 | parameters: Optional[dict] = None, 47 | return_type: Optional[Callable] = None, 48 | ): 49 | """ 50 | Send a POST request to the given endpoint 51 | :param endpoint: 52 | :param parameters: 53 | :param return_type: 54 | :return: 55 | """ 56 | response = requests.post(f"{self.base_url}/{endpoint}", json=parameters) 57 | response.raise_for_status() 58 | return return_type(**response.json()) if return_type else response.status_code 59 | 60 | def _get(self, endpoint: str, return_type: Optional[Callable] = None): 61 | """ 62 | Send a GET request to the given endpoint 63 | :param endpoint: 64 | :param return_type: 65 | :return: 66 | """ 67 | response = requests.get(f"{self.base_url}/{endpoint}") 68 | response.raise_for_status() 69 | return return_type(**response.json()) if return_type else response.status_code 70 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Spyder project settings 132 | .spyderproject 133 | .spyproject 134 | 135 | # Rope project settings 136 | .ropeproject 137 | 138 | # mkdocs documentation 139 | /site 140 | 141 | # mypy 142 | .mypy_cache/ 143 | .dmypy.json 144 | dmypy.json 145 | 146 | # Pyre type checker 147 | .pyre/ 148 | 149 | # pytype static type analyzer 150 | .pytype/ 151 | 152 | # Cython debug symbols 153 | cython_debug/ 154 | 155 | # PyCharm 156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 158 | # and can be added to the global gitignore or merged into this file. For a more nuclear 159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 160 | .idea/ 161 | -------------------------------------------------------------------------------- /ollama_python/endpoints/generate.py: -------------------------------------------------------------------------------- 1 | from ollama_python.models.generate import ( 2 | Completion, 3 | Options, 4 | StreamCompletion, 5 | ChatCompletion, 6 | Message, 7 | StreamChatCompletion, 8 | ) 9 | from ollama_python.endpoints.base import BaseAPI 10 | from typing import BinaryIO, Optional, Generator, Union 11 | 12 | 13 | class GenerateAPI(BaseAPI): 14 | def __init__(self, model: str, base_url: str = "http://localhost:11434/api"): 15 | """ 16 | Initialize the Generate API endpoint 17 | 18 | :param model: The model to use for generating completions 19 | :param base_url: The base URL of the API 20 | """ 21 | super().__init__(base_url=base_url) 22 | self.model = model 23 | 24 | def generate( 25 | self, 26 | prompt: str, 27 | images: Optional[list[Union[str, BinaryIO]]] = None, 28 | options: Optional[dict] = None, 29 | system: Optional[str] = None, 30 | stream: bool = False, 31 | format: Optional[str] = None, 32 | template: Optional[str] = None, 33 | context: Optional[list[int]] = None, 34 | raw: bool = False, 35 | ) -> Union[Completion, Generator]: 36 | """ 37 | Generate a completion using the given prompt 38 | 39 | :param prompt: The prompt to use for generating the completion 40 | :param images : A list of base64-encoded images (for multimodal models such as llava) 41 | :param options: Additional model parameters listed in the documentation for the Modelfile such as temperature 42 | :param system: System message to (overrides what is defined in the Modelfile) 43 | :param stream: If false the response will be returned as a single response object, rather than a stream of objects 44 | :param format: The format of the response, currently only support "json" 45 | :param template: the prompt template to use (overrides what is defined in the Modelfile) 46 | :param context: The context parameter returned from a previous request to /generate, this can be used to keep a short conversational memory 47 | :param raw: If true no formatting will be applied to the prompt. You may choose to use the raw parameter if you are specifying a full templated prompt in your request to the API. 48 | :return: The completion 49 | """ 50 | if format != "json" and format is not None: 51 | raise ValueError("Only JSON format is supported") 52 | 53 | parameters = { 54 | "prompt": prompt, 55 | "model": self.model, 56 | "raw": raw, 57 | "stream": stream, 58 | "system": system, 59 | "context": context, 60 | "template": template, 61 | } 62 | 63 | if options: 64 | validated_options = Options( 65 | **options 66 | ) # Basically to validate the types by Pydantic 67 | options_dict = validated_options.model_dump(exclude_none=True) 68 | parameters["options"] = options_dict 69 | 70 | if images: 71 | parameters["images"] = images 72 | 73 | if format: 74 | parameters["format"] = format 75 | 76 | if stream: 77 | return self._stream( 78 | parameters=parameters, endpoint="generate", return_type=StreamCompletion 79 | ) 80 | 81 | return self._post( 82 | parameters=parameters, endpoint="generate", return_type=Completion 83 | ) 84 | 85 | def generate_chat_completion( 86 | self, 87 | messages: list[dict], 88 | format: Optional[str] = None, 89 | options: Optional[dict] = None, 90 | template: Optional[str] = None, 91 | stream: bool = False, 92 | ) -> Union[ChatCompletion, Generator]: 93 | """ 94 | Generate a completion using the given prompt 95 | :param messages: The list of messages e.g [{"role": "user", "content": "Hello"}] 96 | :param options: Additional model parameters listed in the documentation for the Modelfile such as temperature 97 | :param stream: If false the response will be returned as a single response object, rather than a stream of objects 98 | :param format: The format of the response, currently only support "json" 99 | :param template: the prompt template to use (overrides what is defined in the Modelfile) 100 | """ 101 | if format != "json" and format is not None: 102 | raise ValueError("Only JSON format is supported") 103 | 104 | # validating the message input 105 | [Message(**message) for message in messages] 106 | 107 | parameters = { 108 | "model": self.model, 109 | "messages": messages, 110 | "stream": stream, 111 | "template": template, 112 | } 113 | 114 | if options: 115 | validated_options = Options(**options) 116 | options_dict = validated_options.model_dump(exclude_none=True) 117 | parameters["options"] = options_dict 118 | 119 | if format: 120 | parameters["format"] = format 121 | 122 | if stream: 123 | return self._stream( 124 | parameters=parameters, endpoint="chat", return_type=StreamChatCompletion 125 | ) 126 | 127 | return self._post( 128 | parameters=parameters, endpoint="chat", return_type=ChatCompletion 129 | ) 130 | -------------------------------------------------------------------------------- /ollama_python/endpoints/model_management.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Generator, Union 2 | import requests 3 | from ollama_python.endpoints.base import BaseAPI 4 | from ollama_python.models.model_management import ( 5 | ResponsePayload, 6 | ModelTagList, 7 | ModelInformation, 8 | ) 9 | 10 | 11 | class ModelManagementAPI(BaseAPI): 12 | """ 13 | A client for the model management endpoints 14 | """ 15 | 16 | def create( 17 | self, 18 | name: str, 19 | model_file: Optional[str] = None, 20 | stream: bool = False, 21 | path: Optional[str] = None, 22 | ) -> Union[ResponsePayload, Generator]: 23 | """ 24 | Create a model 25 | :param name: The name of the model 26 | :param model_file: The path to the model file 27 | :param stream: If false the response will be returned as a single response object, rather than a stream of objects 28 | :param path: The path to the model file 29 | :return: 30 | """ 31 | parameters = { 32 | "name": name, 33 | "model_file": model_file, 34 | "stream": stream, 35 | "path": path, 36 | } 37 | 38 | if stream: 39 | return self._stream( 40 | parameters=parameters, endpoint="create", return_type=ResponsePayload 41 | ) 42 | 43 | return self._post( 44 | parameters=parameters, endpoint="create", return_type=ResponsePayload 45 | ) 46 | 47 | def check_blob_exists(self, digest: str) -> int: 48 | """ 49 | Check if a blob exists 50 | :param digest: The digest of the blob to check 51 | :return: The status code of the request 52 | """ 53 | response = requests.head(f"{self.base_url}/blob/{digest}") 54 | response.raise_for_status() 55 | 56 | return response.status_code 57 | 58 | def create_blob(self, digest: str) -> int: 59 | """ 60 | Create a blob 61 | :param digest: The digest of the blob to create 62 | :return: The status code of the request 63 | """ 64 | endpoint = f"blob/{digest}" 65 | 66 | return self._post(endpoint=endpoint, parameters=None) 67 | 68 | def list_local_models(self) -> ModelTagList: 69 | """ 70 | List all tags 71 | :return: A list of local models 72 | """ 73 | return self._get(endpoint="tags", return_type=ModelTagList) 74 | 75 | def show(self, name: str) -> ModelInformation: 76 | """ 77 | Show a model 78 | :param name: The name of the model to show 79 | :return: The status code of the request 80 | """ 81 | 82 | return self._post( 83 | endpoint="show", parameters={"name": name}, return_type=ModelInformation 84 | ) 85 | 86 | def copy(self, source: str, destination: str) -> int: 87 | """ 88 | Copy a model 89 | :param source: The source model to copy 90 | :param destination: The destination model to copy to 91 | :return: The status code of the request 92 | """ 93 | 94 | return self._post( 95 | endpoint="copy", parameters={"source": source, "destination": destination} 96 | ) 97 | 98 | def delete(self, name: str) -> int: 99 | """ 100 | Delete a model 101 | :param name: The name of the model to delete 102 | :return: The status code of the request 103 | """ 104 | 105 | return self._post(endpoint="delete", parameters={"name": name}) 106 | 107 | def pull( 108 | self, name: str, insecure: Optional[bool] = None, stream: bool = False 109 | ) -> Union[ResponsePayload, Generator]: 110 | """ 111 | Download a model from the ollama library. Cancelled pulls are resumed from where they left off, 112 | and multiple calls will share the same download progress. 113 | :param name: The name of the model to pull 114 | :param insecure: Allow insecure connections to the library. Only use this if you are pulling from 115 | your own library during development. 116 | :param stream: if false the response will be returned as a single response object, rather than a stream of objects 117 | :return: ResponsePayload if stream is false, otherwise a generator that yields the response 118 | """ 119 | parameters = { 120 | "name": name, 121 | "stream": stream, 122 | } 123 | if parameters: 124 | parameters["insecure"] = insecure 125 | if stream: 126 | return self._stream( 127 | endpoint="pull", parameters=parameters, return_type=ResponsePayload 128 | ) 129 | return self._post( 130 | endpoint="pull", parameters=parameters, return_type=ResponsePayload 131 | ) 132 | 133 | def push( 134 | self, name: str, insecure: Optional[bool] = None, stream: bool = False 135 | ) -> Union[ResponsePayload, Generator]: 136 | """ 137 | Upload a model to the ollama library. Requires registering for ollama.ai and adding a public key first. 138 | :param name: The name of the model to push 139 | :param insecure: Allow insecure connections to the library. Only use this if you are pushing to 140 | your own library during development. 141 | :param stream: if false the response will be returned as a single response object, rather than a stream of objects 142 | :return: ResponsePayload if stream is false, otherwise a generator that yields the response 143 | """ 144 | parameters = { 145 | "name": name, 146 | "stream": stream, 147 | } 148 | if parameters: 149 | parameters["insecure"] = insecure 150 | if stream: 151 | return self._stream( 152 | endpoint="push", parameters=parameters, return_type=ResponsePayload 153 | ) 154 | return self._post( 155 | endpoint="push", parameters=parameters, return_type=ResponsePayload 156 | ) 157 | -------------------------------------------------------------------------------- /tests/test_generate_endpoint.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import responses 3 | from ollama_python.endpoints.generate import GenerateAPI 4 | from ollama_python.models.generate import ( 5 | Completion, 6 | StreamCompletion, 7 | ChatCompletion, 8 | Message, 9 | StreamChatCompletion, 10 | ) 11 | from tests.utils.utils import mock_api_response 12 | 13 | 14 | GENERATE_ENDPOINT = "/generate" 15 | GENERATE_CHAT_ENDPOINT = "/chat" 16 | 17 | 18 | @pytest.fixture 19 | def generate_api() -> GenerateAPI: 20 | return GenerateAPI(model="test-model", base_url="http://test-servers/api") 21 | 22 | 23 | @responses.activate 24 | def test_generate_completion_success_without_streaming(generate_api): 25 | result = { 26 | "model": "test-model", 27 | "created_at": "2023-08-04T19:22:45.499127Z", 28 | "response": "This is a sample response", 29 | "done": True, 30 | "context": [1, 2, 3], 31 | "total_duration": 10706818083, 32 | "load_duration": 6338219291, 33 | "prompt_eval_count": 26, 34 | "prompt_eval_duration": 130079000, 35 | "eval_count": 259, 36 | "eval_duration": 4232710000, 37 | } 38 | mock_api_response(GENERATE_ENDPOINT, result) 39 | completion = generate_api.generate( 40 | prompt="test prompt", images=["random_images"], options={"temperature": 0.5} 41 | ) 42 | 43 | assert isinstance(completion, Completion) 44 | assert completion.response == result["response"] 45 | assert completion.done 46 | assert completion.model == result["model"] 47 | 48 | 49 | @responses.activate 50 | def test_generate_completion_success_with_streaming(generate_api): 51 | result = [ 52 | { 53 | "model": "test-model", 54 | "created_at": "2023-08-04T08:52:19.385406455-07:00", 55 | "response": "The", 56 | "done": False, 57 | }, 58 | { 59 | "model": "test-model", 60 | "created_at": "2023-08-04T19:22:45.499127Z", 61 | "response": "Sun is blue", 62 | "done": True, 63 | "context": [1, 2, 3], 64 | "total_duration": 10706818083, 65 | "load_duration": 6338219291, 66 | "prompt_eval_count": 26, 67 | "prompt_eval_duration": 130079000, 68 | "eval_count": 259, 69 | "eval_duration": 4232710000, 70 | }, 71 | ] 72 | mock_api_response(GENERATE_ENDPOINT, result, stream=True) 73 | results = list( 74 | generate_api.generate(prompt="test prompt", format="json", stream=True) 75 | ) 76 | assert all(isinstance(completion, StreamCompletion) for completion in results) 77 | assert all(completion.model == "test-model" for completion in results) 78 | assert any(completion.done for completion in results) 79 | 80 | 81 | def test_generate_completion_failure(generate_api): 82 | with pytest.raises(ValueError): 83 | generate_api.generate(prompt="test prompt", format="csv") 84 | 85 | 86 | def test_generate_completions_with_invalid_options(generate_api): 87 | with pytest.raises(ValueError): 88 | generate_api.generate(prompt="test prompt", options={"invalid_option": "test"}) 89 | 90 | 91 | @responses.activate 92 | def test_generate_chat_completion_success_without_streaming(generate_api): 93 | result = { 94 | "model": "test-model", 95 | "created_at": "2023-08-04T19:22:45.499127Z", 96 | "message": [ 97 | {"role": "user", "content": "Hello"}, 98 | {"role": "assistant", "content": "Hi"}, 99 | ], 100 | "done": True, 101 | "context": [1, 2, 3], 102 | "total_duration": 10706818083, 103 | "load_duration": 6338219291, 104 | "prompt_eval_count": 26, 105 | "prompt_eval_duration": 130079000, 106 | "eval_count": 259, 107 | "eval_duration": 4232710000, 108 | } 109 | mock_api_response(GENERATE_CHAT_ENDPOINT, result) 110 | completion = generate_api.generate_chat_completion( 111 | messages=[{"role": "user", "content": "Hello"}] 112 | ) 113 | 114 | assert isinstance(completion, ChatCompletion) 115 | assert completion.message == [Message(**msg) for msg in result["message"]] 116 | assert completion.done 117 | assert completion.model == result["model"] 118 | 119 | 120 | @responses.activate 121 | def test_generate_chat_completion_success_with_streaming(generate_api): 122 | result = [ 123 | { 124 | "model": "test-model", 125 | "created_at": "2023-08-04T08:52:19.385406455-07:00", 126 | "messages": [{"role": "user", "content": "Hello"}], 127 | "done": False, 128 | }, 129 | { 130 | "model": "test-model", 131 | "created_at": "2023-08-04T19:22:45.499127Z", 132 | "messages": [ 133 | {"role": "user", "content": "Hello"}, 134 | {"role": "assistant", "content": "Hi"}, 135 | ], 136 | "done": True, 137 | "context": [1, 2, 3], 138 | "total_duration": 10706818083, 139 | "load_duration": 6338219291, 140 | "prompt_eval_count": 26, 141 | "prompt_eval_duration": 130079000, 142 | "eval_count": 259, 143 | "eval_duration": 4232710000, 144 | }, 145 | ] 146 | mock_api_response(GENERATE_CHAT_ENDPOINT, result, stream=True) 147 | results = list( 148 | generate_api.generate_chat_completion( 149 | messages=[{"role": "user", "content": "Hello"}], 150 | format="json", 151 | options={"seed": 42}, 152 | stream=True, 153 | ) 154 | ) 155 | 156 | assert all(isinstance(completion, StreamChatCompletion) for completion in results) 157 | assert all(completion.model == "test-model" for completion in results) 158 | assert any(completion.done for completion in results) 159 | 160 | 161 | def test_generate_chat_completion_invalid_format(generate_api): 162 | with pytest.raises(ValueError): 163 | generate_api.generate_chat_completion( 164 | messages=[{"role": "user", "content": "Hello"}], format="csv" 165 | ) 166 | 167 | 168 | def test_generate_chat_completion_invalid_options(generate_api): 169 | with pytest.raises(ValueError): 170 | generate_api.generate_chat_completion( 171 | messages=[{"role": "user", "content": "Hello"}], 172 | options={"invalid_option": "test"}, 173 | ) 174 | 175 | 176 | @pytest.mark.parametrize( 177 | "messages", 178 | [ 179 | [{"role": "user", "content": "Hello", "invalid_field": "test"}], 180 | [{"role": "bot", "content": "Hello"}], 181 | ], 182 | ) 183 | def test_generate_chat_completion_invalid_messages(messages): 184 | with pytest.raises(ValueError): 185 | generate_api = GenerateAPI( 186 | model="test-model", base_url="http://test-servers/api" 187 | ) 188 | generate_api.generate_chat_completion(messages=messages) 189 | -------------------------------------------------------------------------------- /tests/test_model_management_endpoint.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import responses 3 | from ollama_python.endpoints.model_management import ModelManagementAPI 4 | from ollama_python.models.model_management import ( 5 | ResponsePayload, 6 | ModelTagList, 7 | ModelInformation, 8 | ) 9 | from tests.utils.utils import mock_api_response 10 | from requests.exceptions import HTTPError 11 | 12 | 13 | @pytest.fixture 14 | def model_management_api() -> ModelManagementAPI: 15 | return ModelManagementAPI(base_url="http://test-servers/api/") 16 | 17 | 18 | @responses.activate 19 | def test_create_management_api_success_without_streaming(model_management_api): 20 | result = {"status": "success"} 21 | mock_api_response("/create", result) 22 | response = model_management_api.create( 23 | name="test-model", model_file="sample model file" 24 | ) 25 | 26 | assert isinstance(response, ResponsePayload) 27 | assert response.status == "success" 28 | 29 | 30 | @responses.activate 31 | def test_create_management_api_success_with_streaming(model_management_api): 32 | result = [{"status": "creating system layer"}, {"status": "success"}] 33 | mock_api_response("/create", result, stream=True) 34 | response = list( 35 | model_management_api.create( 36 | name="test-model", model_file="sample model file", stream=True 37 | ) 38 | ) 39 | 40 | assert len(response) == 2 41 | assert response[0].status == "creating system layer" 42 | assert response[1].status == "success" 43 | 44 | 45 | @responses.activate 46 | def test_check_blob_exists(model_management_api): 47 | mock_api_response("/blob/test-digest", status=200, request_type=responses.HEAD) 48 | response = model_management_api.check_blob_exists(digest="test-digest") 49 | 50 | assert response == 200 51 | 52 | 53 | @responses.activate 54 | def test_check_blob_exists_failure(model_management_api): 55 | with pytest.raises(HTTPError): 56 | mock_api_response("/blob/test-digest", status=400, request_type=responses.HEAD) 57 | model_management_api.check_blob_exists(digest="test-digest") 58 | 59 | 60 | @responses.activate 61 | def test_create_blob(model_management_api): 62 | mock_api_response("/blob/test-digest", status=200) 63 | response = model_management_api.create_blob(digest="test-digest") 64 | 65 | assert response == 200 66 | 67 | 68 | @responses.activate 69 | def test_create_blob_failure(model_management_api): 70 | with pytest.raises(HTTPError): 71 | mock_api_response("/blob/test-digest", status=400) 72 | model_management_api.create_blob(digest="test-digest") 73 | 74 | 75 | @responses.activate 76 | def test_list_tags(model_management_api): 77 | result = { 78 | "models": [ 79 | { 80 | "name": "test-model", 81 | "digest": "test-digest", 82 | "size": 100, 83 | "modified_at": "2023-08-04T19:22:45.499127Z", 84 | "details": { 85 | "format": "test-format", 86 | "family": "test-family", 87 | "families": ["test-family"], 88 | "parameter_size": "test-parameter-size", 89 | "quantization_level": "test-quantization-level", 90 | }, 91 | } 92 | ] 93 | } 94 | mock_api_response("/tags", result, request_type=responses.GET) 95 | response = model_management_api.list_local_models() 96 | 97 | assert isinstance(response, ModelTagList) 98 | assert len(response.models) == 1 99 | assert response.models[0].name == "test-model" 100 | assert response.models[0].digest == "test-digest" 101 | assert response.models[0].size == 100 102 | assert response.models[0].modified_at == "2023-08-04T19:22:45.499127Z" 103 | assert response.models[0].details.format == "test-format" 104 | assert response.models[0].details.family == "test-family" 105 | assert response.models[0].details.families == ["test-family"] 106 | assert response.models[0].details.parameter_size == "test-parameter-size" 107 | assert response.models[0].details.quantization_level == "test-quantization-level" 108 | 109 | 110 | @responses.activate 111 | def test_list_tags_failure(model_management_api): 112 | with pytest.raises(HTTPError): 113 | mock_api_response("/tags", status=400, request_type=responses.GET) 114 | model_management_api.list_local_models() 115 | 116 | 117 | @responses.activate 118 | def test_show(model_management_api): 119 | result = { 120 | "modelfile": "test-modelfile", 121 | "parameters": "test-parameters", 122 | "template": "test-template", 123 | "details": { 124 | "format": "test-format", 125 | "family": "test-family", 126 | "families": ["test-family"], 127 | "parameter_size": "test-parameter-size", 128 | "quantization_level": "test-quantization-level", 129 | }, 130 | } 131 | mock_api_response("/show", result) 132 | response = model_management_api.show(name="test-model") 133 | 134 | assert isinstance(response, ModelInformation) 135 | assert response.modelfile == "test-modelfile" 136 | assert response.parameters == "test-parameters" 137 | assert response.template == "test-template" 138 | assert response.details.format == "test-format" 139 | assert response.details.family == "test-family" 140 | assert response.details.families == ["test-family"] 141 | assert response.details.parameter_size == "test-parameter-size" 142 | assert response.details.quantization_level == "test-quantization-level" 143 | 144 | 145 | @responses.activate 146 | def test_show_failure(model_management_api): 147 | with pytest.raises(HTTPError): 148 | mock_api_response("/show", status=400) 149 | model_management_api.show(name="test-model") 150 | 151 | 152 | @responses.activate 153 | def test_copy(model_management_api): 154 | mock_api_response("/copy", status=200) 155 | response = model_management_api.copy( 156 | source="test-source", destination="test-destination" 157 | ) 158 | 159 | assert response == 200 160 | 161 | 162 | @responses.activate 163 | def test_copy_failure(model_management_api): 164 | with pytest.raises(HTTPError): 165 | mock_api_response("/copy", status=400) 166 | model_management_api.copy(source="test-source", destination="test-destination") 167 | 168 | 169 | @responses.activate 170 | def test_delete(model_management_api): 171 | mock_api_response("/delete", status=200) 172 | response = model_management_api.delete(name="test-model") 173 | 174 | assert response == 200 175 | 176 | 177 | @responses.activate 178 | def test_delete_failure(model_management_api): 179 | with pytest.raises(HTTPError): 180 | mock_api_response("/delete", status=400) 181 | model_management_api.delete(name="test-model") 182 | 183 | 184 | @responses.activate 185 | def test_pull_success_without_streaming(model_management_api): 186 | result = {"status": "success"} 187 | mock_api_response("/pull", result) 188 | response = model_management_api.pull(name="test-model", insecure=True) 189 | 190 | assert isinstance(response, ResponsePayload) 191 | assert response.status == "success" 192 | 193 | 194 | @responses.activate 195 | def test_pull_success_with_streaming(model_management_api): 196 | result = [{"status": "creating system layer"}, {"status": "success"}] 197 | mock_api_response("/pull", result, stream=True) 198 | response = list( 199 | model_management_api.pull(name="test-model", insecure=True, stream=True) 200 | ) 201 | 202 | assert len(response) == 2 203 | assert response[0].status == "creating system layer" 204 | assert response[1].status == "success" 205 | 206 | 207 | @responses.activate 208 | def test_pull_failure(model_management_api): 209 | with pytest.raises(HTTPError): 210 | mock_api_response("/pull", status=400) 211 | model_management_api.pull(name="test-model", insecure=True) 212 | 213 | 214 | @responses.activate 215 | def test_push_success_without_streaming(model_management_api): 216 | result = {"status": "success"} 217 | mock_api_response("/push", result) 218 | response = model_management_api.push(name="test-model", insecure=True) 219 | 220 | assert isinstance(response, ResponsePayload) 221 | assert response.status == "success" 222 | 223 | 224 | @responses.activate 225 | def test_push_success_with_streaming(model_management_api): 226 | result = [{"status": "creating system layer"}, {"status": "success"}] 227 | mock_api_response("/push", result, stream=True) 228 | response = list( 229 | model_management_api.push(name="test-model", insecure=True, stream=True) 230 | ) 231 | 232 | assert len(response) == 2 233 | assert response[0].status == "creating system layer" 234 | assert response[1].status == "success" 235 | 236 | 237 | @responses.activate 238 | def test_push_failure(model_management_api): 239 | with pytest.raises(HTTPError): 240 | mock_api_response("/push", status=400) 241 | model_management_api.push(name="test-model", insecure=True) 242 | -------------------------------------------------------------------------------- /ollama_python/models/generate.py: -------------------------------------------------------------------------------- 1 | """Models for the OlLAMA generate endpoint""" 2 | from pydantic import BaseModel, Field, ConfigDict 3 | from typing import Optional, Literal, Union 4 | 5 | 6 | class Message(BaseModel): 7 | """A message prompt sent to the OLLAMA generate chat endpoint""" 8 | 9 | role: Literal["system", "user", "assistant"] = Field( 10 | ..., description="The role of the message" 11 | ) 12 | content: str = Field(..., description="The content of the message") 13 | images: Optional[list[Union[bytes, str]]] = Field( 14 | None, description="A list of base64-encoded images" 15 | ) 16 | 17 | model_config = ConfigDict(extra="forbid") 18 | 19 | 20 | class BaseCompletion(BaseModel): 21 | """A base completion returned by the OLLAMA generate endpoint""" 22 | 23 | model: str = Field(..., description="The model used to generate the response") 24 | created_at: str = Field(..., description="The time the request was made") 25 | done: bool = Field(..., description="Whether the response is complete") 26 | context: list[int] = Field( 27 | ..., 28 | description="An encoding of the conversation used in this response, this can be sent in the next request to keep a conversational memory", 29 | ) 30 | total_duration: int = Field(..., description="Time spent generating the response") 31 | load_duration: int = Field( 32 | ..., description="Time spent in nanoseconds loading the model" 33 | ) 34 | prompt_eval_count: Optional[int] = Field( 35 | None, description="Number of tokens in the prompt" 36 | ) 37 | prompt_eval_duration: int = Field( 38 | ..., description="Time spent in nanoseconds evaluating the prompt" 39 | ) 40 | eval_count: int = Field(..., description="Number of tokens in the response") 41 | eval_duration: int = Field( 42 | ..., description="Time in nanoseconds spent generating the response" 43 | ) 44 | 45 | 46 | class Completion(BaseCompletion): 47 | """A completion returned by the OlLAMA generate Completion endpoint""" 48 | 49 | response: str = Field(..., description="The generated response") 50 | 51 | 52 | class ChatCompletion(BaseCompletion): 53 | """A completion returned by the OlLAMA generate Chat endpoint""" 54 | 55 | message: list[Message] = Field(..., description="The generated messages") 56 | 57 | 58 | class StreamCompletion(Completion): 59 | """A completion returned by the OlLAMA generate Completion endpoint when streaming the response""" 60 | 61 | context: Optional[list[int]] = Field( 62 | None, 63 | description="An encoding of the conversation used in this response, this can be sent in the next request to keep a conversational memory", 64 | ) 65 | total_duration: Optional[int] = Field( 66 | None, description="Time spent generating the response" 67 | ) 68 | load_duration: Optional[int] = Field( 69 | None, description="Time spent in nanoseconds loading the model" 70 | ) 71 | prompt_eval_count: Optional[int] = Field( 72 | None, description="Number of tokens in the prompt" 73 | ) 74 | prompt_eval_duration: Optional[int] = Field( 75 | None, description="Time spent in nanoseconds evaluating the prompt" 76 | ) 77 | eval_count: Optional[int] = Field( 78 | None, description="Number of tokens in the response" 79 | ) 80 | eval_duration: Optional[int] = Field( 81 | None, description="Time in nanoseconds spent generating the response" 82 | ) 83 | 84 | 85 | class StreamChatCompletion(ChatCompletion): 86 | """A completion result returned by the OlLAMA generate Chat endpoint when streaming the response""" 87 | 88 | message: Optional[list[Message]] = None 89 | 90 | context: Optional[list[int]] = Field( 91 | None, 92 | description="An encoding of the conversation used in this response, this can be sent in the next request to keep a conversational memory", 93 | ) 94 | total_duration: Optional[int] = Field( 95 | None, description="Time spent generating the response" 96 | ) 97 | load_duration: Optional[int] = Field( 98 | None, description="Time spent in nanoseconds loading the model" 99 | ) 100 | prompt_eval_count: Optional[int] = Field( 101 | None, description="Number of tokens in the prompt" 102 | ) 103 | prompt_eval_duration: Optional[int] = Field( 104 | None, description="Time spent in nanoseconds evaluating the prompt" 105 | ) 106 | eval_count: Optional[int] = Field( 107 | None, description="Number of tokens in the response" 108 | ) 109 | eval_duration: Optional[int] = Field( 110 | None, description="Time in nanoseconds spent generating the response" 111 | ) 112 | 113 | 114 | class Options(BaseModel): 115 | """Valid options for the OlLAMA generate endpoint""" 116 | 117 | num_keep: Optional[int] = None 118 | seed: Optional[int] = Field( 119 | None, 120 | description="Sets the random number seed to use for generation. Setting this to a specific number will make the model generate the same text for the same prompt. (Default: 0)", 121 | ) 122 | num_predict: Optional[int] = Field( 123 | None, 124 | description="Maximum number of tokens to predict when generating text. (Default: 128, -1 = infinite generation, -2 = fill context)", 125 | ) 126 | top_k: Optional[int] = Field( 127 | None, 128 | description="Reduces the probability of generating nonsense. A higher value (e.g. 100) will give more diverse answers, while a lower value (e.g. 10) will be more conservative. (Default: 40)", 129 | ) 130 | top_p: Optional[float] = Field( 131 | None, 132 | description="Works together with top-k. A higher value (e.g., 0.95) will lead to more diverse text, while a lower value (e.g., 0.5) will generate more focused and conservative text. (Default: 0.9)", 133 | ) 134 | tfs_z: Optional[float] = Field( 135 | None, 136 | description="Tail free sampling is used to reduce the impact of less probable tokens from the output. A higher value (e.g., 2.0) will reduce the impact more, while a value of 1.0 disables this setting. (default: 1)", 137 | ) 138 | typical_p: Optional[float] = None 139 | repeat_last_n: Optional[int] = Field( 140 | None, 141 | description="Sets how far back for the model to look back to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx)", 142 | ) 143 | temperature: Optional[float] = Field( 144 | None, 145 | description="The temperature of the model. Increasing the temperature will make the model answer more creatively. (Default: 0.8)", 146 | ) 147 | repeat_penalty: Optional[float] = Field( 148 | None, 149 | description="Sets how strongly to penalize repetitions. A higher value (e.g., 1.5) will penalize repetitions more strongly, while a lower value (e.g., 0.9) will be more lenient. (Default: 1.1)", 150 | ) 151 | presence_penalty: Optional[float] = None 152 | frequency_penalty: Optional[float] = None 153 | mirostat: Optional[int] = Field( 154 | None, 155 | description="Enable Mirostat sampling for controlling perplexity. (default: 0, 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0)", 156 | ) 157 | mirostat_tau: Optional[float] = Field( 158 | None, 159 | description="Controls the balance between coherence and diversity of the output. A lower value will result in more focused and coherent text. (Default: 5.0)", 160 | ) 161 | mirostat_eta: Optional[float] = Field( 162 | None, 163 | description="Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive. (Default: 0.1)", 164 | ) 165 | penalize_newline: Optional[bool] = None 166 | stop: Optional[list[str]] = Field( 167 | None, 168 | description="Sets the stop sequences to use. When this pattern is encountered the LLM will stop generating text and return. Multiple stop patterns may be set by specifying multiple separate stop parameters in a modelfile.", 169 | ) 170 | numa: Optional[bool] = None 171 | num_ctx: Optional[int] = Field( 172 | None, 173 | description="Sets the size of the context window used to generate the next token. (Default: 2048)", 174 | ) 175 | num_batch: Optional[int] = None 176 | num_gqa: Optional[int] = Field( 177 | None, 178 | description="The number of GQA groups in the transformer layer. Required for some models, for example it is 8 for llama2:70b", 179 | ) 180 | num_gpu: Optional[int] = Field( 181 | None, 182 | description="The number of layers to send to the GPU(s). On macOS it defaults to 1 to enable metal support, 0 to disable.", 183 | ) 184 | main_gpu: Optional[int] = None 185 | low_vram: Optional[bool] = None 186 | f16_kv: Optional[bool] = None 187 | vocab_only: Optional[bool] = None 188 | use_mmap: Optional[bool] = None 189 | use_mlock: Optional[bool] = None 190 | embedding_only: Optional[bool] = None 191 | rope_frequency_base: Optional[float] = None 192 | rope_frequency_scale: Optional[float] = None 193 | num_thread: Optional[int] = Field( 194 | None, 195 | description="Sets the number of threads to use during computation. By default, Ollama will detect this for optimal performance. It is recommended to set this value to the number of physical CPU cores your system has (as opposed to the logical number of cores).", 196 | ) 197 | 198 | model_config = ConfigDict(extra="forbid") 199 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Ollama Python Library 2 | The ollama python library provides the easiest way to integrate your python project with [Ollama](https://github.com/KennyRich/ollama-python) 3 | 4 | 5 | ## Getting Started 6 | This requires a python version of 3.9 or higher 7 | 8 | ```shell 9 | pip install ollama-python 10 | ``` 11 | 12 | The python package splits the functionality into three core endpoints 13 | 1. Model Management Endpoints: This includes the ability to create, delete, pull, push and list models amongst others 14 | 2. Generate Endpoint: This includes the generate and chat endpoints in Ollama 15 | 3. Embedding Endpoint: This includes the ability to generate embeddings for a given text 16 | 17 | Pydantic is used to verify user input and Responses from the server are parsed into pydantic models 18 | 19 | ## Example Usage 20 | ### Generate Endpoint 21 | #### Completions (Generate) 22 | ##### Without Streaming 23 | ```python 24 | from ollama_python.endpoints import GenerateAPI 25 | 26 | api = GenerateAPI(base_url="http://localhost:8000", model="mistral") 27 | result = api.generate(prompt="Hello World", options=dict(num_tokens=10), format="json") 28 | ``` 29 | 30 | ##### With Streaming 31 | ```python 32 | from ollama_python.endpoints import GenerateAPI 33 | 34 | api = GenerateAPI(base_url="http://localhost:8000", model="mistral") 35 | for res in api.generate(prompt="Hello World", options=dict(num_tokens=10), format="json", stream=True): 36 | print(res.response) 37 | ``` 38 | 39 | #### Chat Completions 40 | ##### Without Streaming 41 | ```python 42 | from ollama_python.endpoints import GenerateAPI 43 | 44 | api = GenerateAPI(base_url="http://localhost:8000", model="mistral") 45 | messages = [{'role': 'user', 'content': 'Why is the sky blue?'}] 46 | 47 | result = api.generate_chat_completion(messages=messages, options=dict(num_tokens=10), format="json") 48 | ``` 49 | 50 | ##### With Streaming 51 | ```python 52 | from ollama_python.endpoints import GenerateAPI 53 | 54 | api = GenerateAPI(base_url="http://localhost:8000", model="mistral") 55 | messages = [{'role': 'user', 'content': 'Why is the sky blue?'}] 56 | 57 | for res in api.generate_chat_completion(messages=messages, options=dict(num_tokens=10), format="json", stream=True): 58 | print(res.message) 59 | ``` 60 | 61 | ###### Chat request with images 62 | ```python 63 | from ollama_python.endpoints import GenerateAPI 64 | 65 | api = GenerateAPI(base_url="http://localhost:8000", model="llava") 66 | messages = [{'role': 'user', 'content': 'What is in this image', 'image': 'iVBORw0KGgoAAAANSUhEUgAAAG0AAABmCAYAAADBPx+VAAAACXBIWXMAAAsTAAALEwEAmp'}] 67 | 68 | result = api.generate_chat_completion(messages=messages, options=dict(num_tokens=10), format="json") 69 | print(result.message) 70 | ``` 71 | 72 | 73 | ### Embeddings Endpoint 74 | #### Generate Embeddings 75 | ```python 76 | from ollama_python.endpoints import EmbeddingAPI 77 | 78 | api = EmbeddingAPI(base_url="http://localhost:8000", model="mistral") 79 | result = api.get_embedding(prompt="Hello World", options=dict(seed=10)) 80 | ``` 81 | 82 | ### Model Management Endpoints 83 | #### Create a model 84 | ##### Without Streaming 85 | ```python 86 | from ollama_python.endpoints import ModelManagementAPI 87 | 88 | api = ModelManagementAPI(base_url="http://localhost:8000") 89 | result = api.create(name="test_model", model_file="random model_file") 90 | ``` 91 | ##### With Streaming 92 | ```python 93 | from ollama_python.endpoints import ModelManagementAPI 94 | 95 | api = ModelManagementAPI(base_url="http://localhost:8000") 96 | for res in api.create(name="test_model", model_file="random model_file", stream=True): 97 | print(res.status) 98 | ``` 99 | 100 | ### Check if a blob exists 101 | ```python 102 | from ollama_python.endpoints import ModelManagementAPI 103 | 104 | api = ModelManagementAPI(base_url="http://localhost:8000") 105 | result = api.check_blob_exists(digest="sha256:29fdb92e57cf0827ded04ae6461b5931d01fa595843f55d36f5b275a52087dd2") 106 | ``` 107 | 108 | ### Create a blob 109 | ```python 110 | from ollama_python.endpoints import ModelManagementAPI 111 | 112 | api = ModelManagementAPI(base_url="http://localhost:8000") 113 | result = api.create_blob(digest="sha256:29fdb92e57cf0827ded04ae6461b5931d01fa595843f55d36f5b275a52087dd2") 114 | ``` 115 | 116 | ### List local models 117 | ```python 118 | from ollama_python.endpoints import ModelManagementAPI 119 | 120 | api = ModelManagementAPI(base_url="http://localhost:8000") 121 | result = api.list_local_models() 122 | 123 | print(result.models) 124 | ``` 125 | 126 | ### Show model information 127 | ```python 128 | from ollama_python.endpoints import ModelManagementAPI 129 | 130 | api = ModelManagementAPI(base_url="http://localhost:8000") 131 | result = api.show(name="mistral") 132 | 133 | print(result.details) 134 | ``` 135 | 136 | ### Copy a model 137 | ```python 138 | from ollama_python.endpoints import ModelManagementAPI 139 | 140 | api = ModelManagementAPI(base_url="http://localhost:8000") 141 | result = api.copy(source="mistral", destination="mistral_copy") 142 | ``` 143 | 144 | ### Delete a model 145 | ```python 146 | from ollama_python.endpoints import ModelManagementAPI 147 | 148 | api = ModelManagementAPI(base_url="http://localhost:8000") 149 | api.delete(name="mistral_copy") 150 | ``` 151 | 152 | ### Pull a model 153 | ##### Without Streaming 154 | ```python 155 | from ollama_python.endpoints import ModelManagementAPI 156 | 157 | api = ModelManagementAPI(base_url="http://localhost:8000") 158 | result = api.pull(name="mistral") 159 | print(result.status) 160 | ``` 161 | 162 | ##### With Streaming 163 | ```python 164 | from ollama_python.endpoints import ModelManagementAPI 165 | 166 | api = ModelManagementAPI(base_url="http://localhost:8000") 167 | for res in api.pull(name="mistral", stream=True): 168 | print(res.status) 169 | ``` 170 | 171 | ### Push a model 172 | ##### Without Streaming 173 | ```python 174 | from ollama_python.endpoints import ModelManagementAPI 175 | 176 | api = ModelManagementAPI(base_url="http://localhost:8000") 177 | result = api.push(name="mistral") 178 | print(result.status) 179 | ``` 180 | 181 | ##### With Streaming 182 | ```python 183 | from ollama_python.endpoints import ModelManagementAPI 184 | 185 | api = ModelManagementAPI(base_url="http://localhost:8000") 186 | for res in api.push(name="mistral", stream=True): 187 | print(res.status) 188 | ``` 189 | 190 | 191 | 192 | ### Valid Options/Parameters 193 | 194 | | Parameter | Description | Value Type | Example Usage | 195 | | -------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------- | -------------------- | 196 | | mirostat | Enable Mirostat sampling for controlling perplexity. (default: 0, 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0) | int | mirostat 0 | 197 | | mirostat_eta | Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive. (Default: 0.1) | float | mirostat_eta 0.1 | 198 | | mirostat_tau | Controls the balance between coherence and diversity of the output. A lower value will result in more focused and coherent text. (Default: 5.0) | float | mirostat_tau 5.0 | 199 | | num_ctx | Sets the size of the context window used to generate the next token. (Default: 2048) | int | num_ctx 4096 | 200 | | num_gqa | The number of GQA groups in the transformer layer. Required for some models, for example it is 8 for llama2:70b | int | num_gqa 1 | 201 | | num_gpu | The number of layers to send to the GPU(s). On macOS it defaults to 1 to enable metal support, 0 to disable. | int | num_gpu 50 | 202 | | num_thread | Sets the number of threads to use during computation. By default, Ollama will detect this for optimal performance. It is recommended to set this value to the number of physical CPU cores your system has (as opposed to the logical number of cores). | int | num_thread 8 | 203 | | repeat_last_n | Sets how far back for the model to look back to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx) | int | repeat_last_n 64 | 204 | | repeat_penalty | Sets how strongly to penalize repetitions. A higher value (e.g., 1.5) will penalize repetitions more strongly, while a lower value (e.g., 0.9) will be more lenient. (Default: 1.1) | float | repeat_penalty 1.1 | 205 | | temperature | The temperature of the model. Increasing the temperature will make the model answer more creatively. (Default: 0.8) | float | temperature 0.7 | 206 | | seed | Sets the random number seed to use for generation. Setting this to a specific number will make the model generate the same text for the same prompt. (Default: 0) | int | seed 42 | 207 | | stop | Sets the stop sequences to use. When this pattern is encountered the LLM will stop generating text and return. Multiple stop patterns may be set by specifying multiple separate `stop` parameters in a modelfile. | string | stop "AI assistant:" | 208 | | tfs_z | Tail free sampling is used to reduce the impact of less probable tokens from the output. A higher value (e.g., 2.0) will reduce the impact more, while a value of 1.0 disables this setting. (default: 1) | float | tfs_z 1 | 209 | | num_predict | Maximum number of tokens to predict when generating text. (Default: 128, -1 = infinite generation, -2 = fill context) | int | num_predict 42 | 210 | | top_k | Reduces the probability of generating nonsense. A higher value (e.g. 100) will give more diverse answers, while a lower value (e.g. 10) will be more conservative. (Default: 40) | int | top_k 40 | 211 | | top_p | Works together with top-k. A higher value (e.g., 0.95) will lead to more diverse text, while a lower value (e.g., 0.5) will generate more focused and conservative text. (Default: 0.9) | float | top_p 0.9 | 212 | 213 | 214 | ## Todo 215 | Add support for Asynchronous version of the library 216 | 217 | ## To Contribute 218 | 1. Clone the repo 219 | 2. Run `poetry install` 220 | 3. Run `pre-commit install` 221 | 222 | Then you're ready to contribute to the repo 223 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [2024] [Richard Ogunyale] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. 2 | 3 | [[package]] 4 | name = "annotated-types" 5 | version = "0.6.0" 6 | description = "Reusable constraint types to use with typing.Annotated" 7 | category = "main" 8 | optional = false 9 | python-versions = ">=3.8" 10 | files = [ 11 | {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, 12 | {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, 13 | ] 14 | 15 | [[package]] 16 | name = "anyio" 17 | version = "4.2.0" 18 | description = "High level compatibility layer for multiple asynchronous event loop implementations" 19 | category = "main" 20 | optional = false 21 | python-versions = ">=3.8" 22 | files = [ 23 | {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, 24 | {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, 25 | ] 26 | 27 | [package.dependencies] 28 | exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} 29 | idna = ">=2.8" 30 | sniffio = ">=1.1" 31 | typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} 32 | 33 | [package.extras] 34 | doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] 35 | test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] 36 | trio = ["trio (>=0.23)"] 37 | 38 | [[package]] 39 | name = "black" 40 | version = "23.12.1" 41 | description = "The uncompromising code formatter." 42 | category = "dev" 43 | optional = false 44 | python-versions = ">=3.8" 45 | files = [ 46 | {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, 47 | {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, 48 | {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, 49 | {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, 50 | {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, 51 | {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, 52 | {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, 53 | {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, 54 | {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, 55 | {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, 56 | {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, 57 | {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, 58 | {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, 59 | {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, 60 | {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, 61 | {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, 62 | {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, 63 | {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, 64 | {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, 65 | {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, 66 | {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, 67 | {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, 68 | ] 69 | 70 | [package.dependencies] 71 | click = ">=8.0.0" 72 | mypy-extensions = ">=0.4.3" 73 | packaging = ">=22.0" 74 | pathspec = ">=0.9.0" 75 | platformdirs = ">=2" 76 | tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} 77 | typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} 78 | 79 | [package.extras] 80 | colorama = ["colorama (>=0.4.3)"] 81 | d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] 82 | jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] 83 | uvloop = ["uvloop (>=0.15.2)"] 84 | 85 | [[package]] 86 | name = "certifi" 87 | version = "2023.11.17" 88 | description = "Python package for providing Mozilla's CA Bundle." 89 | category = "main" 90 | optional = false 91 | python-versions = ">=3.6" 92 | files = [ 93 | {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, 94 | {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, 95 | ] 96 | 97 | [[package]] 98 | name = "cfgv" 99 | version = "3.4.0" 100 | description = "Validate configuration and produce human readable error messages." 101 | category = "dev" 102 | optional = false 103 | python-versions = ">=3.8" 104 | files = [ 105 | {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, 106 | {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, 107 | ] 108 | 109 | [[package]] 110 | name = "charset-normalizer" 111 | version = "3.3.2" 112 | description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." 113 | category = "main" 114 | optional = false 115 | python-versions = ">=3.7.0" 116 | files = [ 117 | {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, 118 | {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, 119 | {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, 120 | {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, 121 | {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, 122 | {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, 123 | {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, 124 | {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, 125 | {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, 126 | {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, 127 | {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, 128 | {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, 129 | {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, 130 | {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, 131 | {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, 132 | {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, 133 | {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, 134 | {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, 135 | {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, 136 | {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, 137 | {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, 138 | {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, 139 | {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, 140 | {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, 141 | {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, 142 | {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, 143 | {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, 144 | {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, 145 | {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, 146 | {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, 147 | {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, 148 | {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, 149 | {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, 150 | {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, 151 | {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, 152 | {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, 153 | {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, 154 | {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, 155 | {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, 156 | {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, 157 | {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, 158 | {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, 159 | {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, 160 | {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, 161 | {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, 162 | {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, 163 | {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, 164 | {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, 165 | {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, 166 | {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, 167 | {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, 168 | {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, 169 | {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, 170 | {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, 171 | {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, 172 | {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, 173 | {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, 174 | {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, 175 | {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, 176 | {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, 177 | {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, 178 | {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, 179 | {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, 180 | {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, 181 | {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, 182 | {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, 183 | {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, 184 | {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, 185 | {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, 186 | {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, 187 | {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, 188 | {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, 189 | {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, 190 | {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, 191 | {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, 192 | {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, 193 | {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, 194 | {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, 195 | {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, 196 | {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, 197 | {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, 198 | {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, 199 | {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, 200 | {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, 201 | {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, 202 | {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, 203 | {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, 204 | {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, 205 | {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, 206 | {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, 207 | ] 208 | 209 | [[package]] 210 | name = "click" 211 | version = "8.1.7" 212 | description = "Composable command line interface toolkit" 213 | category = "dev" 214 | optional = false 215 | python-versions = ">=3.7" 216 | files = [ 217 | {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, 218 | {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, 219 | ] 220 | 221 | [package.dependencies] 222 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 223 | 224 | [[package]] 225 | name = "colorama" 226 | version = "0.4.6" 227 | description = "Cross-platform colored terminal text." 228 | category = "dev" 229 | optional = false 230 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" 231 | files = [ 232 | {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, 233 | {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, 234 | ] 235 | 236 | [[package]] 237 | name = "coverage" 238 | version = "7.4.0" 239 | description = "Code coverage measurement for Python" 240 | category = "dev" 241 | optional = false 242 | python-versions = ">=3.8" 243 | files = [ 244 | {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, 245 | {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, 246 | {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, 247 | {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, 248 | {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, 249 | {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, 250 | {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, 251 | {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, 252 | {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, 253 | {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, 254 | {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, 255 | {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, 256 | {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, 257 | {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, 258 | {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, 259 | {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, 260 | {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, 261 | {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, 262 | {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, 263 | {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, 264 | {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, 265 | {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, 266 | {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, 267 | {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, 268 | {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, 269 | {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, 270 | {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, 271 | {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, 272 | {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, 273 | {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, 274 | {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, 275 | {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, 276 | {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, 277 | {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, 278 | {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, 279 | {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, 280 | {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, 281 | {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, 282 | {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, 283 | {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, 284 | {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, 285 | {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, 286 | {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, 287 | {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, 288 | {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, 289 | {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, 290 | {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, 291 | {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, 292 | {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, 293 | {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, 294 | {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, 295 | {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, 296 | ] 297 | 298 | [package.dependencies] 299 | tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} 300 | 301 | [package.extras] 302 | toml = ["tomli"] 303 | 304 | [[package]] 305 | name = "distlib" 306 | version = "0.3.8" 307 | description = "Distribution utilities" 308 | category = "dev" 309 | optional = false 310 | python-versions = "*" 311 | files = [ 312 | {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, 313 | {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, 314 | ] 315 | 316 | [[package]] 317 | name = "exceptiongroup" 318 | version = "1.2.0" 319 | description = "Backport of PEP 654 (exception groups)" 320 | category = "main" 321 | optional = false 322 | python-versions = ">=3.7" 323 | files = [ 324 | {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, 325 | {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, 326 | ] 327 | 328 | [package.extras] 329 | test = ["pytest (>=6)"] 330 | 331 | [[package]] 332 | name = "filelock" 333 | version = "3.13.1" 334 | description = "A platform independent file lock." 335 | category = "dev" 336 | optional = false 337 | python-versions = ">=3.8" 338 | files = [ 339 | {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, 340 | {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, 341 | ] 342 | 343 | [package.extras] 344 | docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] 345 | testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] 346 | typing = ["typing-extensions (>=4.8)"] 347 | 348 | [[package]] 349 | name = "h11" 350 | version = "0.14.0" 351 | description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" 352 | category = "main" 353 | optional = false 354 | python-versions = ">=3.7" 355 | files = [ 356 | {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, 357 | {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, 358 | ] 359 | 360 | [[package]] 361 | name = "httpcore" 362 | version = "1.0.2" 363 | description = "A minimal low-level HTTP client." 364 | category = "main" 365 | optional = false 366 | python-versions = ">=3.8" 367 | files = [ 368 | {file = "httpcore-1.0.2-py3-none-any.whl", hash = "sha256:096cc05bca73b8e459a1fc3dcf585148f63e534eae4339559c9b8a8d6399acc7"}, 369 | {file = "httpcore-1.0.2.tar.gz", hash = "sha256:9fc092e4799b26174648e54b74ed5f683132a464e95643b226e00c2ed2fa6535"}, 370 | ] 371 | 372 | [package.dependencies] 373 | certifi = "*" 374 | h11 = ">=0.13,<0.15" 375 | 376 | [package.extras] 377 | asyncio = ["anyio (>=4.0,<5.0)"] 378 | http2 = ["h2 (>=3,<5)"] 379 | socks = ["socksio (>=1.0.0,<2.0.0)"] 380 | trio = ["trio (>=0.22.0,<0.23.0)"] 381 | 382 | [[package]] 383 | name = "httpx" 384 | version = "0.26.0" 385 | description = "The next generation HTTP client." 386 | category = "main" 387 | optional = false 388 | python-versions = ">=3.8" 389 | files = [ 390 | {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"}, 391 | {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"}, 392 | ] 393 | 394 | [package.dependencies] 395 | anyio = "*" 396 | certifi = "*" 397 | httpcore = ">=1.0.0,<2.0.0" 398 | idna = "*" 399 | sniffio = "*" 400 | 401 | [package.extras] 402 | brotli = ["brotli", "brotlicffi"] 403 | cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] 404 | http2 = ["h2 (>=3,<5)"] 405 | socks = ["socksio (>=1.0.0,<2.0.0)"] 406 | 407 | [[package]] 408 | name = "identify" 409 | version = "2.5.33" 410 | description = "File identification library for Python" 411 | category = "dev" 412 | optional = false 413 | python-versions = ">=3.8" 414 | files = [ 415 | {file = "identify-2.5.33-py2.py3-none-any.whl", hash = "sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34"}, 416 | {file = "identify-2.5.33.tar.gz", hash = "sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d"}, 417 | ] 418 | 419 | [package.extras] 420 | license = ["ukkonen"] 421 | 422 | [[package]] 423 | name = "idna" 424 | version = "3.6" 425 | description = "Internationalized Domain Names in Applications (IDNA)" 426 | category = "main" 427 | optional = false 428 | python-versions = ">=3.5" 429 | files = [ 430 | {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, 431 | {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, 432 | ] 433 | 434 | [[package]] 435 | name = "iniconfig" 436 | version = "2.0.0" 437 | description = "brain-dead simple config-ini parsing" 438 | category = "dev" 439 | optional = false 440 | python-versions = ">=3.7" 441 | files = [ 442 | {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, 443 | {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, 444 | ] 445 | 446 | [[package]] 447 | name = "mypy-extensions" 448 | version = "1.0.0" 449 | description = "Type system extensions for programs checked with the mypy type checker." 450 | category = "dev" 451 | optional = false 452 | python-versions = ">=3.5" 453 | files = [ 454 | {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, 455 | {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, 456 | ] 457 | 458 | [[package]] 459 | name = "nodeenv" 460 | version = "1.8.0" 461 | description = "Node.js virtual environment builder" 462 | category = "dev" 463 | optional = false 464 | python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" 465 | files = [ 466 | {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, 467 | {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, 468 | ] 469 | 470 | [package.dependencies] 471 | setuptools = "*" 472 | 473 | [[package]] 474 | name = "packaging" 475 | version = "23.2" 476 | description = "Core utilities for Python packages" 477 | category = "dev" 478 | optional = false 479 | python-versions = ">=3.7" 480 | files = [ 481 | {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, 482 | {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, 483 | ] 484 | 485 | [[package]] 486 | name = "pathspec" 487 | version = "0.12.1" 488 | description = "Utility library for gitignore style pattern matching of file paths." 489 | category = "dev" 490 | optional = false 491 | python-versions = ">=3.8" 492 | files = [ 493 | {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, 494 | {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, 495 | ] 496 | 497 | [[package]] 498 | name = "platformdirs" 499 | version = "4.1.0" 500 | description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 501 | category = "dev" 502 | optional = false 503 | python-versions = ">=3.8" 504 | files = [ 505 | {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, 506 | {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, 507 | ] 508 | 509 | [package.extras] 510 | docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] 511 | test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] 512 | 513 | [[package]] 514 | name = "pluggy" 515 | version = "1.3.0" 516 | description = "plugin and hook calling mechanisms for python" 517 | category = "dev" 518 | optional = false 519 | python-versions = ">=3.8" 520 | files = [ 521 | {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, 522 | {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, 523 | ] 524 | 525 | [package.extras] 526 | dev = ["pre-commit", "tox"] 527 | testing = ["pytest", "pytest-benchmark"] 528 | 529 | [[package]] 530 | name = "pre-commit" 531 | version = "3.6.0" 532 | description = "A framework for managing and maintaining multi-language pre-commit hooks." 533 | category = "dev" 534 | optional = false 535 | python-versions = ">=3.9" 536 | files = [ 537 | {file = "pre_commit-3.6.0-py2.py3-none-any.whl", hash = "sha256:c255039ef399049a5544b6ce13d135caba8f2c28c3b4033277a788f434308376"}, 538 | {file = "pre_commit-3.6.0.tar.gz", hash = "sha256:d30bad9abf165f7785c15a21a1f46da7d0677cb00ee7ff4c579fd38922efe15d"}, 539 | ] 540 | 541 | [package.dependencies] 542 | cfgv = ">=2.0.0" 543 | identify = ">=1.0.0" 544 | nodeenv = ">=0.11.1" 545 | pyyaml = ">=5.1" 546 | virtualenv = ">=20.10.0" 547 | 548 | [[package]] 549 | name = "pydantic" 550 | version = "2.5.3" 551 | description = "Data validation using Python type hints" 552 | category = "main" 553 | optional = false 554 | python-versions = ">=3.7" 555 | files = [ 556 | {file = "pydantic-2.5.3-py3-none-any.whl", hash = "sha256:d0caf5954bee831b6bfe7e338c32b9e30c85dfe080c843680783ac2b631673b4"}, 557 | {file = "pydantic-2.5.3.tar.gz", hash = "sha256:b3ef57c62535b0941697cce638c08900d87fcb67e29cfa99e8a68f747f393f7a"}, 558 | ] 559 | 560 | [package.dependencies] 561 | annotated-types = ">=0.4.0" 562 | pydantic-core = "2.14.6" 563 | typing-extensions = ">=4.6.1" 564 | 565 | [package.extras] 566 | email = ["email-validator (>=2.0.0)"] 567 | 568 | [[package]] 569 | name = "pydantic-core" 570 | version = "2.14.6" 571 | description = "" 572 | category = "main" 573 | optional = false 574 | python-versions = ">=3.7" 575 | files = [ 576 | {file = "pydantic_core-2.14.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:72f9a942d739f09cd42fffe5dc759928217649f070056f03c70df14f5770acf9"}, 577 | {file = "pydantic_core-2.14.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6a31d98c0d69776c2576dda4b77b8e0c69ad08e8b539c25c7d0ca0dc19a50d6c"}, 578 | {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aa90562bc079c6c290f0512b21768967f9968e4cfea84ea4ff5af5d917016e4"}, 579 | {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:370ffecb5316ed23b667d99ce4debe53ea664b99cc37bfa2af47bc769056d534"}, 580 | {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f85f3843bdb1fe80e8c206fe6eed7a1caeae897e496542cee499c374a85c6e08"}, 581 | {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9862bf828112e19685b76ca499b379338fd4c5c269d897e218b2ae8fcb80139d"}, 582 | {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036137b5ad0cb0004c75b579445a1efccd072387a36c7f217bb8efd1afbe5245"}, 583 | {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92879bce89f91f4b2416eba4429c7b5ca22c45ef4a499c39f0c5c69257522c7c"}, 584 | {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0c08de15d50fa190d577e8591f0329a643eeaed696d7771760295998aca6bc66"}, 585 | {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:36099c69f6b14fc2c49d7996cbf4f87ec4f0e66d1c74aa05228583225a07b590"}, 586 | {file = "pydantic_core-2.14.6-cp310-none-win32.whl", hash = "sha256:7be719e4d2ae6c314f72844ba9d69e38dff342bc360379f7c8537c48e23034b7"}, 587 | {file = "pydantic_core-2.14.6-cp310-none-win_amd64.whl", hash = "sha256:36fa402dcdc8ea7f1b0ddcf0df4254cc6b2e08f8cd80e7010d4c4ae6e86b2a87"}, 588 | {file = "pydantic_core-2.14.6-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:dea7fcd62915fb150cdc373212141a30037e11b761fbced340e9db3379b892d4"}, 589 | {file = "pydantic_core-2.14.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffff855100bc066ff2cd3aa4a60bc9534661816b110f0243e59503ec2df38421"}, 590 | {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b027c86c66b8627eb90e57aee1f526df77dc6d8b354ec498be9a757d513b92b"}, 591 | {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00b1087dabcee0b0ffd104f9f53d7d3eaddfaa314cdd6726143af6bc713aa27e"}, 592 | {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75ec284328b60a4e91010c1acade0c30584f28a1f345bc8f72fe8b9e46ec6a96"}, 593 | {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e1f4744eea1501404b20b0ac059ff7e3f96a97d3e3f48ce27a139e053bb370b"}, 594 | {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2602177668f89b38b9f84b7b3435d0a72511ddef45dc14446811759b82235a1"}, 595 | {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c8edaea3089bf908dd27da8f5d9e395c5b4dc092dbcce9b65e7156099b4b937"}, 596 | {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:478e9e7b360dfec451daafe286998d4a1eeaecf6d69c427b834ae771cad4b622"}, 597 | {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b6ca36c12a5120bad343eef193cc0122928c5c7466121da7c20f41160ba00ba2"}, 598 | {file = "pydantic_core-2.14.6-cp311-none-win32.whl", hash = "sha256:2b8719037e570639e6b665a4050add43134d80b687288ba3ade18b22bbb29dd2"}, 599 | {file = "pydantic_core-2.14.6-cp311-none-win_amd64.whl", hash = "sha256:78ee52ecc088c61cce32b2d30a826f929e1708f7b9247dc3b921aec367dc1b23"}, 600 | {file = "pydantic_core-2.14.6-cp311-none-win_arm64.whl", hash = "sha256:a19b794f8fe6569472ff77602437ec4430f9b2b9ec7a1105cfd2232f9ba355e6"}, 601 | {file = "pydantic_core-2.14.6-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:667aa2eac9cd0700af1ddb38b7b1ef246d8cf94c85637cbb03d7757ca4c3fdec"}, 602 | {file = "pydantic_core-2.14.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cdee837710ef6b56ebd20245b83799fce40b265b3b406e51e8ccc5b85b9099b7"}, 603 | {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c5bcf3414367e29f83fd66f7de64509a8fd2368b1edf4351e862910727d3e51"}, 604 | {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26a92ae76f75d1915806b77cf459811e772d8f71fd1e4339c99750f0e7f6324f"}, 605 | {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a983cca5ed1dd9a35e9e42ebf9f278d344603bfcb174ff99a5815f953925140a"}, 606 | {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cb92f9061657287eded380d7dc455bbf115430b3aa4741bdc662d02977e7d0af"}, 607 | {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ace1e220b078c8e48e82c081e35002038657e4b37d403ce940fa679e57113b"}, 608 | {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef633add81832f4b56d3b4c9408b43d530dfca29e68fb1b797dcb861a2c734cd"}, 609 | {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7e90d6cc4aad2cc1f5e16ed56e46cebf4877c62403a311af20459c15da76fd91"}, 610 | {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e8a5ac97ea521d7bde7621d86c30e86b798cdecd985723c4ed737a2aa9e77d0c"}, 611 | {file = "pydantic_core-2.14.6-cp312-none-win32.whl", hash = "sha256:f27207e8ca3e5e021e2402ba942e5b4c629718e665c81b8b306f3c8b1ddbb786"}, 612 | {file = "pydantic_core-2.14.6-cp312-none-win_amd64.whl", hash = "sha256:b3e5fe4538001bb82e2295b8d2a39356a84694c97cb73a566dc36328b9f83b40"}, 613 | {file = "pydantic_core-2.14.6-cp312-none-win_arm64.whl", hash = "sha256:64634ccf9d671c6be242a664a33c4acf12882670b09b3f163cd00a24cffbd74e"}, 614 | {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:24368e31be2c88bd69340fbfe741b405302993242ccb476c5c3ff48aeee1afe0"}, 615 | {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:e33b0834f1cf779aa839975f9d8755a7c2420510c0fa1e9fa0497de77cd35d2c"}, 616 | {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6af4b3f52cc65f8a0bc8b1cd9676f8c21ef3e9132f21fed250f6958bd7223bed"}, 617 | {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d15687d7d7f40333bd8266f3814c591c2e2cd263fa2116e314f60d82086e353a"}, 618 | {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:095b707bb287bfd534044166ab767bec70a9bba3175dcdc3371782175c14e43c"}, 619 | {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94fc0e6621e07d1e91c44e016cc0b189b48db053061cc22d6298a611de8071bb"}, 620 | {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce830e480f6774608dedfd4a90c42aac4a7af0a711f1b52f807130c2e434c06"}, 621 | {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a306cdd2ad3a7d795d8e617a58c3a2ed0f76c8496fb7621b6cd514eb1532cae8"}, 622 | {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2f5fa187bde8524b1e37ba894db13aadd64faa884657473b03a019f625cee9a8"}, 623 | {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:438027a975cc213a47c5d70672e0d29776082155cfae540c4e225716586be75e"}, 624 | {file = "pydantic_core-2.14.6-cp37-none-win32.whl", hash = "sha256:f96ae96a060a8072ceff4cfde89d261837b4294a4f28b84a28765470d502ccc6"}, 625 | {file = "pydantic_core-2.14.6-cp37-none-win_amd64.whl", hash = "sha256:e646c0e282e960345314f42f2cea5e0b5f56938c093541ea6dbf11aec2862391"}, 626 | {file = "pydantic_core-2.14.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:db453f2da3f59a348f514cfbfeb042393b68720787bbef2b4c6068ea362c8149"}, 627 | {file = "pydantic_core-2.14.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3860c62057acd95cc84044e758e47b18dcd8871a328ebc8ccdefd18b0d26a21b"}, 628 | {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36026d8f99c58d7044413e1b819a67ca0e0b8ebe0f25e775e6c3d1fabb3c38fb"}, 629 | {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ed1af8692bd8d2a29d702f1a2e6065416d76897d726e45a1775b1444f5928a7"}, 630 | {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:314ccc4264ce7d854941231cf71b592e30d8d368a71e50197c905874feacc8a8"}, 631 | {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:982487f8931067a32e72d40ab6b47b1628a9c5d344be7f1a4e668fb462d2da42"}, 632 | {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dbe357bc4ddda078f79d2a36fc1dd0494a7f2fad83a0a684465b6f24b46fe80"}, 633 | {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2f6ffc6701a0eb28648c845f4945a194dc7ab3c651f535b81793251e1185ac3d"}, 634 | {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7f5025db12fc6de7bc1104d826d5aee1d172f9ba6ca936bf6474c2148ac336c1"}, 635 | {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dab03ed811ed1c71d700ed08bde8431cf429bbe59e423394f0f4055f1ca0ea60"}, 636 | {file = "pydantic_core-2.14.6-cp38-none-win32.whl", hash = "sha256:dfcbebdb3c4b6f739a91769aea5ed615023f3c88cb70df812849aef634c25fbe"}, 637 | {file = "pydantic_core-2.14.6-cp38-none-win_amd64.whl", hash = "sha256:99b14dbea2fdb563d8b5a57c9badfcd72083f6006caf8e126b491519c7d64ca8"}, 638 | {file = "pydantic_core-2.14.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:4ce8299b481bcb68e5c82002b96e411796b844d72b3e92a3fbedfe8e19813eab"}, 639 | {file = "pydantic_core-2.14.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b9a9d92f10772d2a181b5ca339dee066ab7d1c9a34ae2421b2a52556e719756f"}, 640 | {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd9e98b408384989ea4ab60206b8e100d8687da18b5c813c11e92fd8212a98e0"}, 641 | {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4f86f1f318e56f5cbb282fe61eb84767aee743ebe32c7c0834690ebea50c0a6b"}, 642 | {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86ce5fcfc3accf3a07a729779d0b86c5d0309a4764c897d86c11089be61da160"}, 643 | {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dcf1978be02153c6a31692d4fbcc2a3f1db9da36039ead23173bc256ee3b91b"}, 644 | {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eedf97be7bc3dbc8addcef4142f4b4164066df0c6f36397ae4aaed3eb187d8ab"}, 645 | {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5f916acf8afbcab6bacbb376ba7dc61f845367901ecd5e328fc4d4aef2fcab0"}, 646 | {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8a14c192c1d724c3acbfb3f10a958c55a2638391319ce8078cb36c02283959b9"}, 647 | {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0348b1dc6b76041516e8a854ff95b21c55f5a411c3297d2ca52f5528e49d8411"}, 648 | {file = "pydantic_core-2.14.6-cp39-none-win32.whl", hash = "sha256:de2a0645a923ba57c5527497daf8ec5df69c6eadf869e9cd46e86349146e5975"}, 649 | {file = "pydantic_core-2.14.6-cp39-none-win_amd64.whl", hash = "sha256:aca48506a9c20f68ee61c87f2008f81f8ee99f8d7f0104bff3c47e2d148f89d9"}, 650 | {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d5c28525c19f5bb1e09511669bb57353d22b94cf8b65f3a8d141c389a55dec95"}, 651 | {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:78d0768ee59baa3de0f4adac9e3748b4b1fffc52143caebddfd5ea2961595277"}, 652 | {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b93785eadaef932e4fe9c6e12ba67beb1b3f1e5495631419c784ab87e975670"}, 653 | {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a874f21f87c485310944b2b2734cd6d318765bcbb7515eead33af9641816506e"}, 654 | {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89f4477d915ea43b4ceea6756f63f0288941b6443a2b28c69004fe07fde0d0d"}, 655 | {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:172de779e2a153d36ee690dbc49c6db568d7b33b18dc56b69a7514aecbcf380d"}, 656 | {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dfcebb950aa7e667ec226a442722134539e77c575f6cfaa423f24371bb8d2e94"}, 657 | {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:55a23dcd98c858c0db44fc5c04fc7ed81c4b4d33c653a7c45ddaebf6563a2f66"}, 658 | {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:4241204e4b36ab5ae466ecec5c4c16527a054c69f99bba20f6f75232a6a534e2"}, 659 | {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e574de99d735b3fc8364cba9912c2bec2da78775eba95cbb225ef7dda6acea24"}, 660 | {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1302a54f87b5cd8528e4d6d1bf2133b6aa7c6122ff8e9dc5220fbc1e07bffebd"}, 661 | {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8e81e4b55930e5ffab4a68db1af431629cf2e4066dbdbfef65348b8ab804ea8"}, 662 | {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c99462ffc538717b3e60151dfaf91125f637e801f5ab008f81c402f1dff0cd0f"}, 663 | {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e4cf2d5829f6963a5483ec01578ee76d329eb5caf330ecd05b3edd697e7d768a"}, 664 | {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cf10b7d58ae4a1f07fccbf4a0a956d705356fea05fb4c70608bb6fa81d103cda"}, 665 | {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:399ac0891c284fa8eb998bcfa323f2234858f5d2efca3950ae58c8f88830f145"}, 666 | {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c6a5c79b28003543db3ba67d1df336f253a87d3112dac3a51b94f7d48e4c0e1"}, 667 | {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599c87d79cab2a6a2a9df4aefe0455e61e7d2aeede2f8577c1b7c0aec643ee8e"}, 668 | {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43e166ad47ba900f2542a80d83f9fc65fe99eb63ceec4debec160ae729824052"}, 669 | {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a0b5db001b98e1c649dd55afa928e75aa4087e587b9524a4992316fa23c9fba"}, 670 | {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:747265448cb57a9f37572a488a57d873fd96bf51e5bb7edb52cfb37124516da4"}, 671 | {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ebe3416785f65c28f4f9441e916bfc8a54179c8dea73c23023f7086fa601c5d"}, 672 | {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:86c963186ca5e50d5c8287b1d1c9d3f8f024cbe343d048c5bd282aec2d8641f2"}, 673 | {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e0641b506486f0b4cd1500a2a65740243e8670a2549bb02bc4556a83af84ae03"}, 674 | {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71d72ca5eaaa8d38c8df16b7deb1a2da4f650c41b58bb142f3fb75d5ad4a611f"}, 675 | {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e524624eace5c59af499cd97dc18bb201dc6a7a2da24bfc66ef151c69a5f2a"}, 676 | {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3dde6cac75e0b0902778978d3b1646ca9f438654395a362cb21d9ad34b24acf"}, 677 | {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:00646784f6cd993b1e1c0e7b0fdcbccc375d539db95555477771c27555e3c556"}, 678 | {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23598acb8ccaa3d1d875ef3b35cb6376535095e9405d91a3d57a8c7db5d29341"}, 679 | {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7f41533d7e3cf9520065f610b41ac1c76bc2161415955fbcead4981b22c7611e"}, 680 | {file = "pydantic_core-2.14.6.tar.gz", hash = "sha256:1fd0c1d395372843fba13a51c28e3bb9d59bd7aebfeb17358ffaaa1e4dbbe948"}, 681 | ] 682 | 683 | [package.dependencies] 684 | typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" 685 | 686 | [[package]] 687 | name = "pytest" 688 | version = "7.4.4" 689 | description = "pytest: simple powerful testing with Python" 690 | category = "dev" 691 | optional = false 692 | python-versions = ">=3.7" 693 | files = [ 694 | {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, 695 | {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, 696 | ] 697 | 698 | [package.dependencies] 699 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 700 | exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} 701 | iniconfig = "*" 702 | packaging = "*" 703 | pluggy = ">=0.12,<2.0" 704 | tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} 705 | 706 | [package.extras] 707 | testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] 708 | 709 | [[package]] 710 | name = "pytest-cov" 711 | version = "4.1.0" 712 | description = "Pytest plugin for measuring coverage." 713 | category = "dev" 714 | optional = false 715 | python-versions = ">=3.7" 716 | files = [ 717 | {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, 718 | {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, 719 | ] 720 | 721 | [package.dependencies] 722 | coverage = {version = ">=5.2.1", extras = ["toml"]} 723 | pytest = ">=4.6" 724 | 725 | [package.extras] 726 | testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] 727 | 728 | [[package]] 729 | name = "pyyaml" 730 | version = "6.0.1" 731 | description = "YAML parser and emitter for Python" 732 | category = "main" 733 | optional = false 734 | python-versions = ">=3.6" 735 | files = [ 736 | {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, 737 | {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, 738 | {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, 739 | {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, 740 | {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, 741 | {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, 742 | {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, 743 | {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, 744 | {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, 745 | {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, 746 | {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, 747 | {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, 748 | {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, 749 | {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, 750 | {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, 751 | {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, 752 | {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, 753 | {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, 754 | {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, 755 | {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, 756 | {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, 757 | {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, 758 | {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, 759 | {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, 760 | {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, 761 | {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, 762 | {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, 763 | {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, 764 | {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, 765 | {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, 766 | {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, 767 | {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, 768 | {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, 769 | {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, 770 | {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, 771 | {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, 772 | {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, 773 | {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, 774 | {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, 775 | {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, 776 | ] 777 | 778 | [[package]] 779 | name = "requests" 780 | version = "2.31.0" 781 | description = "Python HTTP for Humans." 782 | category = "main" 783 | optional = false 784 | python-versions = ">=3.7" 785 | files = [ 786 | {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, 787 | {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, 788 | ] 789 | 790 | [package.dependencies] 791 | certifi = ">=2017.4.17" 792 | charset-normalizer = ">=2,<4" 793 | idna = ">=2.5,<4" 794 | urllib3 = ">=1.21.1,<3" 795 | 796 | [package.extras] 797 | socks = ["PySocks (>=1.5.6,!=1.5.7)"] 798 | use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] 799 | 800 | [[package]] 801 | name = "responses" 802 | version = "0.24.1" 803 | description = "A utility library for mocking out the `requests` Python library." 804 | category = "main" 805 | optional = false 806 | python-versions = ">=3.8" 807 | files = [ 808 | {file = "responses-0.24.1-py3-none-any.whl", hash = "sha256:a2b43f4c08bfb9c9bd242568328c65a34b318741d3fab884ac843c5ceeb543f9"}, 809 | {file = "responses-0.24.1.tar.gz", hash = "sha256:b127c6ca3f8df0eb9cc82fd93109a3007a86acb24871834c47b77765152ecf8c"}, 810 | ] 811 | 812 | [package.dependencies] 813 | pyyaml = "*" 814 | requests = ">=2.30.0,<3.0" 815 | urllib3 = ">=1.25.10,<3.0" 816 | 817 | [package.extras] 818 | tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] 819 | 820 | [[package]] 821 | name = "ruff" 822 | version = "0.1.13" 823 | description = "An extremely fast Python linter and code formatter, written in Rust." 824 | category = "dev" 825 | optional = false 826 | python-versions = ">=3.7" 827 | files = [ 828 | {file = "ruff-0.1.13-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:e3fd36e0d48aeac672aa850045e784673449ce619afc12823ea7868fcc41d8ba"}, 829 | {file = "ruff-0.1.13-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9fb6b3b86450d4ec6a6732f9f60c4406061b6851c4b29f944f8c9d91c3611c7a"}, 830 | {file = "ruff-0.1.13-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b13ba5d7156daaf3fd08b6b993360a96060500aca7e307d95ecbc5bb47a69296"}, 831 | {file = "ruff-0.1.13-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9ebb40442f7b531e136d334ef0851412410061e65d61ca8ce90d894a094feb22"}, 832 | {file = "ruff-0.1.13-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:226b517f42d59a543d6383cfe03cccf0091e3e0ed1b856c6824be03d2a75d3b6"}, 833 | {file = "ruff-0.1.13-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5f0312ba1061e9b8c724e9a702d3c8621e3c6e6c2c9bd862550ab2951ac75c16"}, 834 | {file = "ruff-0.1.13-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2f59bcf5217c661254bd6bc42d65a6fd1a8b80c48763cb5c2293295babd945dd"}, 835 | {file = "ruff-0.1.13-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6894b00495e00c27b6ba61af1fc666f17de6140345e5ef27dd6e08fb987259d"}, 836 | {file = "ruff-0.1.13-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a1600942485c6e66119da294c6294856b5c86fd6df591ce293e4a4cc8e72989"}, 837 | {file = "ruff-0.1.13-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ee3febce7863e231a467f90e681d3d89210b900d49ce88723ce052c8761be8c7"}, 838 | {file = "ruff-0.1.13-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:dcaab50e278ff497ee4d1fe69b29ca0a9a47cd954bb17963628fa417933c6eb1"}, 839 | {file = "ruff-0.1.13-py3-none-musllinux_1_2_i686.whl", hash = "sha256:f57de973de4edef3ad3044d6a50c02ad9fc2dff0d88587f25f1a48e3f72edf5e"}, 840 | {file = "ruff-0.1.13-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:7a36fa90eb12208272a858475ec43ac811ac37e91ef868759770b71bdabe27b6"}, 841 | {file = "ruff-0.1.13-py3-none-win32.whl", hash = "sha256:a623349a505ff768dad6bd57087e2461be8db58305ebd5577bd0e98631f9ae69"}, 842 | {file = "ruff-0.1.13-py3-none-win_amd64.whl", hash = "sha256:f988746e3c3982bea7f824c8fa318ce7f538c4dfefec99cd09c8770bd33e6539"}, 843 | {file = "ruff-0.1.13-py3-none-win_arm64.whl", hash = "sha256:6bbbc3042075871ec17f28864808540a26f0f79a4478c357d3e3d2284e832998"}, 844 | {file = "ruff-0.1.13.tar.gz", hash = "sha256:e261f1baed6291f434ffb1d5c6bd8051d1c2a26958072d38dfbec39b3dda7352"}, 845 | ] 846 | 847 | [[package]] 848 | name = "setuptools" 849 | version = "69.0.3" 850 | description = "Easily download, build, install, upgrade, and uninstall Python packages" 851 | category = "dev" 852 | optional = false 853 | python-versions = ">=3.8" 854 | files = [ 855 | {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, 856 | {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, 857 | ] 858 | 859 | [package.extras] 860 | docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] 861 | testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] 862 | testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] 863 | 864 | [[package]] 865 | name = "sniffio" 866 | version = "1.3.0" 867 | description = "Sniff out which async library your code is running under" 868 | category = "main" 869 | optional = false 870 | python-versions = ">=3.7" 871 | files = [ 872 | {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, 873 | {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, 874 | ] 875 | 876 | [[package]] 877 | name = "tomli" 878 | version = "2.0.1" 879 | description = "A lil' TOML parser" 880 | category = "dev" 881 | optional = false 882 | python-versions = ">=3.7" 883 | files = [ 884 | {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, 885 | {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, 886 | ] 887 | 888 | [[package]] 889 | name = "typing-extensions" 890 | version = "4.9.0" 891 | description = "Backported and Experimental Type Hints for Python 3.8+" 892 | category = "main" 893 | optional = false 894 | python-versions = ">=3.8" 895 | files = [ 896 | {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, 897 | {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, 898 | ] 899 | 900 | [[package]] 901 | name = "urllib3" 902 | version = "2.1.0" 903 | description = "HTTP library with thread-safe connection pooling, file post, and more." 904 | category = "main" 905 | optional = false 906 | python-versions = ">=3.8" 907 | files = [ 908 | {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, 909 | {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, 910 | ] 911 | 912 | [package.extras] 913 | brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] 914 | socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] 915 | zstd = ["zstandard (>=0.18.0)"] 916 | 917 | [[package]] 918 | name = "virtualenv" 919 | version = "20.25.0" 920 | description = "Virtual Python Environment builder" 921 | category = "dev" 922 | optional = false 923 | python-versions = ">=3.7" 924 | files = [ 925 | {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"}, 926 | {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"}, 927 | ] 928 | 929 | [package.dependencies] 930 | distlib = ">=0.3.7,<1" 931 | filelock = ">=3.12.2,<4" 932 | platformdirs = ">=3.9.1,<5" 933 | 934 | [package.extras] 935 | docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] 936 | test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] 937 | 938 | [metadata] 939 | lock-version = "2.0" 940 | python-versions = "^3.9" 941 | content-hash = "16f0bdfdfd35fd7cb07901df5aa159796c53d75cf07c919e555cde26ff109473" 942 | --------------------------------------------------------------------------------