├── .github └── workflows │ ├── backport.yml │ └── ci.yml ├── .gitignore ├── .readthedocs.yml ├── CHANGELOG.md ├── LICENSE ├── MANIFEST.in ├── NOTICE ├── README.md ├── docs └── sphinx │ ├── client_utils.rst │ ├── conf.py │ ├── exceptions.rst │ ├── index.rst │ ├── installation.rst │ ├── logging.rst │ ├── node_pool.rst │ ├── nodes.rst │ ├── responses.rst │ ├── serializers.rst │ └── transport.rst ├── elastic_transport ├── __init__.py ├── _async_transport.py ├── _compat.py ├── _exceptions.py ├── _models.py ├── _node │ ├── __init__.py │ ├── _base.py │ ├── _base_async.py │ ├── _http_aiohttp.py │ ├── _http_httpx.py │ ├── _http_requests.py │ ├── _http_urllib3.py │ └── _urllib3_chain_certs.py ├── _node_pool.py ├── _otel.py ├── _response.py ├── _serializer.py ├── _transport.py ├── _utils.py ├── _version.py ├── client_utils.py └── py.typed ├── noxfile.py ├── requirements-min.txt ├── setup.cfg ├── setup.py ├── tests ├── __init__.py ├── async_ │ ├── __init__.py │ ├── test_async_transport.py │ ├── test_httpbin.py │ └── test_httpserver.py ├── conftest.py ├── node │ ├── __init__.py │ ├── test_base.py │ ├── test_http_aiohttp.py │ ├── test_http_httpx.py │ ├── test_http_requests.py │ ├── test_http_urllib3.py │ ├── test_tls_versions.py │ └── test_urllib3_chain_certs.py ├── test_client_utils.py ├── test_exceptions.py ├── test_httpbin.py ├── test_httpserver.py ├── test_logging.py ├── test_models.py ├── test_node_pool.py ├── test_otel.py ├── test_package.py ├── test_response.py ├── test_serializer.py ├── test_transport.py └── test_utils.py └── utils ├── build-dists.py └── license-headers.py /.github/workflows/backport.yml: -------------------------------------------------------------------------------- 1 | name: Backport 2 | on: 3 | pull_request_target: 4 | types: 5 | - closed 6 | - labeled 7 | 8 | jobs: 9 | backport: 10 | name: Backport 11 | runs-on: ubuntu-latest 12 | # Only react to merged PRs for security reasons. 13 | # See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target. 14 | if: > 15 | github.event.pull_request.merged 16 | && ( 17 | github.event.action == 'closed' 18 | || ( 19 | github.event.action == 'labeled' 20 | && contains(github.event.label.name, 'backport') 21 | ) 22 | ) 23 | steps: 24 | - uses: tibdex/backport@9565281eda0731b1d20c4025c43339fb0a23812e # v2.0.4 25 | with: 26 | github_token: ${{ secrets.GITHUB_TOKEN }} 27 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: CI 3 | 4 | on: [push, pull_request] 5 | 6 | env: 7 | FORCE_COLOR: 1 8 | 9 | jobs: 10 | package: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Checkout repository 14 | uses: actions/checkout@v1 15 | - name: Set up Python 3.x 16 | uses: actions/setup-python@v5 17 | with: 18 | python-version: 3.x 19 | - name: Install dependencies 20 | run: python3 -m pip install setuptools wheel twine 21 | - name: Build dists 22 | run: python3 utils/build-dists.py 23 | 24 | lint: 25 | runs-on: ubuntu-latest 26 | steps: 27 | - name: Checkout Repository 28 | uses: actions/checkout@v1 29 | - name: Set up Python 3.x 30 | uses: actions/setup-python@v5 31 | with: 32 | python-version: 3.x 33 | - name: Install dependencies 34 | run: python3 -m pip install nox 35 | - name: Lint the code 36 | run: nox -s lint 37 | env: 38 | # Workaround for development versions 39 | # https://github.com/aio-libs/aiohttp/issues/7675 40 | AIOHTTP_NO_EXTENSIONS: 1 41 | 42 | test: 43 | strategy: 44 | fail-fast: false 45 | matrix: 46 | python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] 47 | os: ["ubuntu-latest"] 48 | experimental: [false] 49 | nox-session: [''] 50 | include: 51 | - python-version: "3.8" 52 | os: "ubuntu-latest" 53 | experimental: false 54 | nox-session: "test-min-deps" 55 | 56 | runs-on: ${{ matrix.os }} 57 | name: test-${{ matrix.python-version }} ${{ matrix.nox-session }} 58 | continue-on-error: ${{ matrix.experimental }} 59 | steps: 60 | - name: Checkout repository 61 | uses: actions/checkout@v2 62 | 63 | - name: Set up Python - ${{ matrix.python-version }} 64 | uses: actions/setup-python@v5 65 | with: 66 | python-version: ${{ matrix.python-version }} 67 | allow-prereleases: true 68 | 69 | - name: Install Dependencies 70 | run: python -m pip install --upgrade nox 71 | 72 | - name: Run tests 73 | run: nox -s ${NOX_SESSION:-test-$PYTHON_VERSION} 74 | env: 75 | PYTHON_VERSION: ${{ matrix.python-version }} 76 | NOX_SESSION: ${{ matrix.nox-session }} 77 | # Required for development versions of Python 78 | AIOHTTP_NO_EXTENSIONS: 1 79 | FROZENLIST_NO_EXTENSIONS: 1 80 | YARL_NO_EXTENSIONS: 1 81 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/sphinx/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 98 | __pypackages__/ 99 | 100 | # Celery stuff 101 | celerybeat-schedule 102 | celerybeat.pid 103 | 104 | # SageMath parsed files 105 | *.sage.py 106 | 107 | # Environments 108 | .env 109 | .venv 110 | env/ 111 | venv/ 112 | ENV/ 113 | env.bak/ 114 | venv.bak/ 115 | 116 | # Spyder project settings 117 | .spyderproject 118 | .spyproject 119 | 120 | # Rope project settings 121 | .ropeproject 122 | 123 | # mkdocs documentation 124 | /site 125 | 126 | # mypy 127 | .mypy_cache/ 128 | .dmypy.json 129 | dmypy.json 130 | 131 | # Pyre type checker 132 | .pyre/ 133 | 134 | # pytype static type analyzer 135 | .pytype/ 136 | 137 | # Cython debug symbols 138 | cython_debug/ 139 | 140 | # sample code for GitHub issues 141 | issues/ 142 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | os: ubuntu-22.04 5 | tools: 6 | # To work around https://github.com/aio-libs/aiohttp/issues/7675, we need 7 | # to set AIOHTTP_NO_EXTENSIONS to 1 but it has to be done in 8 | # https://readthedocs.org/dashboard/elastic-transport-python/environmentvariables/ 9 | # because of https://github.com/readthedocs/readthedocs.org/issues/6311 10 | python: "3" 11 | 12 | python: 13 | install: 14 | - method: pip 15 | path: . 16 | extra_requirements: 17 | - develop 18 | 19 | sphinx: 20 | configuration: docs/sphinx/conf.py 21 | fail_on_warning: true 22 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## 8.17.1 (2025-03-12) 4 | 5 | * Ensure compatibility with httpx v0.28.0+ ([#222](https://github.com/elastic/elastic-transport-python/pull/222), contributed by Arch Linux maintainer @carlsmedstad) 6 | * Add missing NOTICE file ([#229](https://github.com/elastic/elastic-transport-python/pull/229), reported by Debian Maintainer @schoekek) 7 | 8 | ## 8.17.0 (2025-01-07) 9 | 10 | * Fix use of SSLContext with sniffing ([#199](https://github.com/elastic/elastic-transport-python/pull/199)) 11 | * Fix enabled_cleanup_closed warning ([#202](https://github.com/elastic/elastic-transport-python/pull/202)) 12 | * Remove unneeded install requirement ([#196](https://github.com/elastic/elastic-transport-python/pull/196)) 13 | * Fix aiohttp call type: ignore differently ([#190](https://github.com/elastic/elastic-transport-python/pull/190)) 14 | 15 | ## 8.15.1 (2024-10-09) 16 | 17 | * Add explicit Python 3.13 support ([#189](https://github.com/elastic/elastic-transport-python/pull/189)) 18 | 19 | ## 8.15.0 (2024-08-09) 20 | 21 | * Removed call to `raise_for_status()` when using `HttpxAsyncHttpNode` to prevent exceptions being raised for 404 responses ([#182](https://github.com/elastic/elastic-transport-python/pull/182)) 22 | * Documented response classes ([#175](https://github.com/elastic/elastic-transport-python/pull/175)) 23 | * Dropped support for Python 3.7 ([#179](https://github.com/elastic/elastic-transport-python/pull/179)) 24 | 25 | ## 8.13.1 (2024-04-28) 26 | 27 | - Fixed requests 2.32 compatibility (#164) 28 | - Fixed TypeError when two nodes are declared dead at the same time (#115, contributed by @floxay) 29 | - Added `TransportApiResponse` (#160, #161, contributed by @JessicaGarson) 30 | 31 | ## 8.13.0 32 | 33 | - Added support for the HTTPX client with asyncio (#137, contributed by @b4sus) 34 | - Added optional orjson serializer support (#152) 35 | 36 | ## 8.12.0 37 | 38 | - Fix basic auth built from percent-encoded URLs (#143) 39 | 40 | ## 8.11.0 41 | 42 | - Always set default HTTPS port to 443 (#127) 43 | - Drop support for Python 3.6 (#109) 44 | - Include tests in sdist (#122, contributed by @parona-source) 45 | - Fix `__iter__` return type to Iterator (#129, contributed by @altescy) 46 | 47 | ## 8.10.0 48 | 49 | - Support urllib3 2.x in addition to urllib3 1.26.x ([#121](https://github.com/elastic/elastic-transport-python/pull/121)) 50 | - Add 409 to `NOT_DEAD_NODE_HTTP_STATUSES` ([#120](https://github.com/elastic/elastic-transport-python/pull/120)) 51 | 52 | ## 8.4.1 53 | 54 | - Fixed an issue where a large number of consecutive failures to connect to a node would raise an `OverflowError`. 55 | - Fixed an issue to ensure that `ApiResponse` can be pickled. 56 | 57 | ## 8.4.0 58 | 59 | ### Added 60 | 61 | - Added method for clients to use default ports for URL scheme. 62 | 63 | ## 8.1.2 64 | 65 | ### Fixed 66 | 67 | - Fixed issue when connecting to an IP address with HTTPS enabled would result in a `ValueError` for a mismatch between `check_hostname` and `server_hostname`. 68 | 69 | ## 8.1.1 70 | 71 | ### Fixed 72 | 73 | - Fixed `JsonSerializer` to return `None` if a response using `Content-Type: application/json` is empty instead of raising an error. 74 | 75 | ## 8.1.0 76 | 77 | ### Fixed 78 | 79 | - Fixed `Urllib3HttpNode` and `RequestsHttpNode` to never require a valid certificate chain when using `ssl_assert_fingerprint`. Instead the internal HTTP client libraries will explicitly disable verifying the certificate chain and instead rely only on the certificate fingerprint for verification. 80 | 81 | ## 8.0.1 82 | 83 | ### Fixed 84 | 85 | - Fixed `AiohttpHttpNode` to close TLS connections that aren't properly shutdown by the server instead of leaking them 86 | - Fixed `Urllib3HttpNode` to respect `path_prefix` setting in `NodeConfig` 87 | 88 | ## 8.0.0 89 | 90 | ### Added 91 | 92 | - Added support for asyncio with `AsyncTransport` and `AiohttpHttpNode` 93 | - Added `JsonSerializer`, `NdjsonSerializer` 94 | - Added `connections_per_node` parameter to `RequestsHttpNode` 95 | - Added support for `ssl_assert_fingerprint` to `RequestsHttpNode` 96 | - Added **experimental** support for pinning non-leaf certificates 97 | via `ssl_assert_fingerprint` when using CPython 3.10+ 98 | - Added support for node discovery via "sniffing" using the 99 | `sniff_callback` transport parameter 100 | - Added ability to specify `ssl_version` via `ssl.TLSVersion` enum 101 | instead of `ssl.PROTOCOL_TLSvX` for Python 3.7+ 102 | - Added `elastic_transport.client_utils` module to help writing API clients 103 | - Added `elastic_transport.debug_logging` method to enable all logging for debugging purposes 104 | - Added option to set `requests.Session.auth` within `RequestsHttpNode` via `NodeConfig._extras['requests.session.auth']` 105 | 106 | ### Changed 107 | 108 | - Changed `*Connection` classes to use `*Node` terminology 109 | - Changed `connection_class` to `node_class` 110 | - Changed `ConnectionPool` to `NodePool` 111 | - Changed `ConnectionSelector` to `NodeSelector` 112 | - Changed `NodeSelector(randomize_hosts)` parameter to `randomize_nodes` 113 | - Changed `NodeSelector.get_connection()` method to `get()` 114 | - Changed `elastic_transport.connection` logger name to `elastic_transport.node` 115 | - Changed `Urllib3HttpNode(connections_per_host)` parameter to `connections_per_node` 116 | - Changed return type of `BaseNode.perform_request()` to `NamedTuple(meta=ApiResponseMeta, body=bytes)` 117 | - Changed return type of `Transport.perform_request()` to `NamedTuple(meta=ApiResponseMeta, body=Any)` 118 | - Changed name of `Deserializer` into `SerializersCollection` 119 | - Changed `ssl_version` to denote the minimum TLS version instead of the only TLS version 120 | - Changed the base class for `ApiError` to be `Exception` instead of `TransportError`. 121 | `TransportError` is now only for errors that occur at the transport layer. 122 | - Changed `Urllib3HttpNode` to block on new connections when the internal connection pool is exhausted 123 | 124 | ### Removed 125 | 126 | - Removed support for Python 2.7 127 | - Removed `DummyConnectionPool` and `EmptyConnectionPool` in favor of `NodePool`. 128 | 129 | ### Fixed 130 | 131 | - Fixed a work-around with `AiohttpHttpNode` where `method="HEAD"` requests wouldn't mark the internal connection as reusable. This work-around is no longer needed when `aiohttp>=3.7.0` is installed. 132 | - Fixed logic for splitting `aiohttp.__version__` when determining if `HEAD` bug is fixed. 133 | 134 | ## 7.15.0 (2021-09-20) 135 | 136 | Release created to be compatible with 7.15 clients 137 | 138 | ## 7.14.0 (2021-08-02) 139 | 140 | Release created to be compatible with 7.14 clients 141 | 142 | ## 7.13.0 (2021-05-24) 143 | 144 | Release created to be compatible with 7.13 clients 145 | 146 | ## 7.12.0 (2021-03-22) 147 | 148 | Release created to be compatible with 7.12 clients 149 | 150 | ## 7.11.0 (2021-02-10) 151 | 152 | ### Added 153 | 154 | - Added the `X-Elastic-Client-Meta` HTTP header ([PR #4](https://github.com/elastic/elastic-transport-python/pull/4)) 155 | - Added HTTP response headers to `Response` and `TransportError` 156 | ([PR #5](https://github.com/elastic/elastic-transport-python/pull/5)) 157 | - Added the `QueryParams` data structure for representing 158 | an ordered sequence of key-value pairs for the URL query 159 | ([PR #6](https://github.com/elastic/elastic-transport-python/pull/6)) 160 | 161 | ### Changed 162 | 163 | - Changed `Connection.perform_request()` to take `target` instead of 164 | `path` and `params`. Instead `path` and `params` are created within 165 | `Transport.perform_request()` ([PR #6](https://github.com/elastic/elastic-transport-python/pull/6)) 166 | 167 | ## 0.1.0b0 (2020-10-21) 168 | 169 | - Initial beta release of `elastic-transport-python` 170 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include MANIFEST.in 3 | include README.md 4 | include CHANGELOG.md 5 | include setup.py 6 | include elastic_transport/py.typed 7 | 8 | graft tests 9 | 10 | prune docs/_build 11 | recursive-exclude * __pycache__ 12 | recursive-exclude * *.py[co] 13 | -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | Elastic Transport Library for Python 2 | Copyright 2025 Elasticsearch B.V. 3 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # elastic-transport-python 2 | 3 | [![PyPI](https://img.shields.io/pypi/v/elastic-transport)](https://pypi.org/project/elastic-transport) 4 | [![Python Versions](https://img.shields.io/pypi/pyversions/elastic-transport)](https://pypi.org/project/elastic-transport) 5 | [![PyPI Downloads](https://static.pepy.tech/badge/elastic-transport)](https://pepy.tech/project/elastic-transport) 6 | [![CI Status](https://img.shields.io/github/actions/workflow/status/elastic/elastic-transport-python/ci.yml)](https://github.com/elastic/elastic-transport-python/actions) 7 | 8 | Transport classes and utilities shared among Python Elastic client libraries 9 | 10 | This library was lifted from [`elasticsearch-py`](https://github.com/elastic/elasticsearch-py) 11 | and then transformed to be used across all Elastic services 12 | rather than only Elasticsearch. 13 | 14 | ### Installing from PyPI 15 | 16 | ``` 17 | $ python -m pip install elastic-transport 18 | ``` 19 | 20 | Versioning follows the major and minor version of the Elastic Stack version and 21 | the patch number is incremented for bug fixes within a minor release. 22 | 23 | ## Documentation 24 | 25 | Documentation including an API reference is available on [Read the Docs](https://elastic-transport-python.readthedocs.io). 26 | 27 | ## License 28 | 29 | `elastic-transport-python` is available under the Apache-2.0 license. 30 | For more details see [LICENSE](https://github.com/elastic/elastic-transport-python/blob/main/LICENSE). 31 | -------------------------------------------------------------------------------- /docs/sphinx/client_utils.rst: -------------------------------------------------------------------------------- 1 | Client Utilities 2 | ================ 3 | 4 | Reusable utilities for creating API clients using ``elastic_transport``. 5 | 6 | .. py:currentmodule:: elastic_transport.client_utils 7 | 8 | .. automodule:: elastic_transport.client_utils 9 | :members: 10 | -------------------------------------------------------------------------------- /docs/sphinx/conf.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import datetime 19 | import os 20 | import sys 21 | 22 | sys.path.insert(0, os.path.abspath("../..")) 23 | 24 | from elastic_transport import __version__ # noqa 25 | 26 | project = "elastic-transport" 27 | copyright = f"{datetime.date.today().year} Elasticsearch B.V." 28 | author = "Seth Michael Larson" 29 | version = __version__ 30 | release = __version__ 31 | 32 | extensions = [ 33 | "sphinx.ext.autodoc", 34 | "sphinx.ext.intersphinx", 35 | "sphinx_autodoc_typehints", 36 | ] 37 | 38 | pygments_style = "sphinx" 39 | pygments_dark_style = "monokai" 40 | 41 | templates_path = [] 42 | exclude_patterns = [] 43 | html_theme = "furo" 44 | html_static_path = [] 45 | 46 | intersphinx_mapping = { 47 | "python": ("https://docs.python.org/3", None), 48 | "requests": ("https://docs.python-requests.org/en/latest", None), 49 | } 50 | -------------------------------------------------------------------------------- /docs/sphinx/exceptions.rst: -------------------------------------------------------------------------------- 1 | Exceptions & Warnings 2 | ===================== 3 | 4 | .. py:currentmodule:: elastic_transport 5 | 6 | Transport Errors 7 | ---------------- 8 | 9 | .. autoclass:: TransportError 10 | :members: 11 | 12 | .. autoclass:: TlsError 13 | :members: 14 | 15 | .. autoclass:: ConnectionError 16 | :members: 17 | 18 | .. autoclass:: ConnectionTimeout 19 | :members: 20 | 21 | .. autoclass:: SerializationError 22 | :members: 23 | 24 | .. autoclass:: SniffingError 25 | :members: 26 | 27 | .. autoclass:: ApiError 28 | :members: 29 | 30 | Warnings 31 | -------- 32 | 33 | .. py:currentmodule:: elastic_transport 34 | 35 | .. autoclass:: TransportWarning 36 | 37 | .. autoclass:: SecurityWarning 38 | -------------------------------------------------------------------------------- /docs/sphinx/index.rst: -------------------------------------------------------------------------------- 1 | API Reference 2 | ============= 3 | 4 | .. toctree:: 5 | :maxdepth: 2 6 | 7 | installation 8 | nodes 9 | responses 10 | exceptions 11 | logging 12 | transport 13 | node_pool 14 | serializers 15 | client_utils 16 | -------------------------------------------------------------------------------- /docs/sphinx/installation.rst: -------------------------------------------------------------------------------- 1 | Installation 2 | ============ 3 | 4 | Install with ``pip`` like so: 5 | 6 | ``$ python -m pip install elastic-transport`` 7 | 8 | Additional dependencies are required to use some features of the ``elastic-transport`` package. 9 | 10 | Install the ``requests`` package to use :class:`elastic_transport.RequestsHttpNode`. 11 | 12 | Install the ``aiohttp`` package to use :class:`elastic_transport.AiohttpHttpNode`. 13 | 14 | Install the ``httpx`` package to use :class:`elastic_transport.HttpxAsyncHttpNode`. 15 | -------------------------------------------------------------------------------- /docs/sphinx/logging.rst: -------------------------------------------------------------------------------- 1 | Logging 2 | ======= 3 | 4 | .. py:currentmodule:: elastic_transport 5 | 6 | Available loggers 7 | ----------------- 8 | 9 | - ``elastic_transport.node_pool``: Logs activity within the :class:`elastic_transport.NodePool` like nodes switching between "alive" and "dead" 10 | - ``elastic_transport.transport``: Logs requests and responses in addition to retries, errors, and sniffing. 11 | - ``elastic_transport.node``: Logs all network activity for individual :class:`elastic_transport.BaseNode` instances. This logger is recommended only for human debugging as the logs are unstructured and meant primarily for human consumption from the command line. 12 | 13 | Debugging requests and responses 14 | -------------------------------- 15 | 16 | .. autofunction:: elastic_transport.debug_logging 17 | 18 | .. warning:: 19 | 20 | This method shouldn't be enabled in production as it's extremely verbose. Should only be used for debugging manually. 21 | 22 | .. code-block:: python 23 | 24 | import elastic_transport 25 | from elasticsearch import Elasticsearch 26 | 27 | # In this example we're debugging an Elasticsearch client: 28 | client = Elasticsearch(...) 29 | 30 | # Use `elastic_transport.debug_logging()` before the request 31 | elastic_transport.debug_logging() 32 | 33 | client.search( 34 | index="example-index", 35 | query={ 36 | "match": { 37 | "text-field": "value" 38 | } 39 | }, 40 | typed_keys=True 41 | ) 42 | 43 | The following script will output these logs about the HTTP request and response: 44 | 45 | .. code-block:: 46 | 47 | [2021-11-23T14:11:20] > POST /example-index/_search?typed_keys=true HTTP/1.1 48 | > Accept: application/json 49 | > Accept-Encoding: gzip 50 | > Authorization: Basic 51 | > Connection: keep-alive 52 | > Content-Encoding: gzip 53 | > Content-Type: application/json 54 | > User-Agent: elastic-transport-python/8.1.0+dev 55 | > X-Elastic-Client-Meta: es=8.1.0p,py=3.9.2,t=8.1.0p,ur=1.26.7 56 | > {"query":{"match":{"text-field":"value"}}} 57 | < HTTP/1.1 200 OK 58 | < Content-Encoding: gzip 59 | < Content-Length: 165 60 | < Content-Type: application/json;charset=utf-8 61 | < Date: Tue, 23 Nov 2021 20:11:20 GMT 62 | < X-Cloud-Request-Id: ctSE59hPSCugrCPM4A2GUQ 63 | < X-Elastic-Product: Elasticsearch 64 | < X-Found-Handling-Cluster: 40c9b5837c8f4dd083f05eac950fd50c 65 | < X-Found-Handling-Instance: instance-0000000001 66 | < {"hits":{...}} 67 | 68 | Notice how the ``Authorization`` HTTP header is hidden and the complete HTTP request and response method, target, headers, status, and bodies are logged for debugging. 69 | -------------------------------------------------------------------------------- /docs/sphinx/node_pool.rst: -------------------------------------------------------------------------------- 1 | Node Pool 2 | ========= 3 | 4 | .. py:currentmodule:: elastic_transport 5 | 6 | .. autoclass:: NodePool 7 | :members: 8 | 9 | Node selectors 10 | -------------- 11 | 12 | .. autoclass:: NodeSelector 13 | :members: 14 | 15 | .. autoclass:: RandomSelector 16 | .. autoclass:: RoundRobinSelector 17 | -------------------------------------------------------------------------------- /docs/sphinx/nodes.rst: -------------------------------------------------------------------------------- 1 | Nodes 2 | ===== 3 | 4 | .. py:currentmodule:: elastic_transport 5 | 6 | Configuring nodes 7 | ----------------- 8 | 9 | .. autoclass:: elastic_transport::NodeConfig 10 | :members: 11 | 12 | 13 | Node classes 14 | ------------ 15 | 16 | .. autoclass:: Urllib3HttpNode 17 | :members: 18 | 19 | .. autoclass:: RequestsHttpNode 20 | :members: 21 | 22 | .. autoclass:: AiohttpHttpNode 23 | :members: 24 | 25 | .. autoclass:: HttpxAsyncHttpNode 26 | :members: 27 | 28 | Custom node classes 29 | ------------------- 30 | 31 | You can define your own node class like so: 32 | 33 | .. code-block:: python 34 | 35 | from typing import Optional 36 | from elastic_transport import Urllib3HttpNode, NodeConfig, ApiResponseMeta, HttpHeaders 37 | from elastic_transport.client_utils import DefaultType, DEFAULT 38 | 39 | class CustomHttpNode(Urllib3HttpNode): 40 | def perform_request( 41 | self, 42 | method: str, 43 | target: str, 44 | body: Optional[bytes] = None, 45 | headers: Optional[HttpHeaders] = None, 46 | request_timeout: Union[DefaultType, Optional[float]] = DEFAULT, 47 | ) -> Tuple[ApiResponseMeta, bytes]: 48 | # Define your HTTP request method here... 49 | 50 | and once you have a custom node class you can pass the class to :class:`elastic_transport.Transport` or an API client like so: 51 | 52 | .. code-block:: python 53 | 54 | # Example using a Transport instance: 55 | from elastic_transport import Transport 56 | 57 | transport = Transport(..., node_class=CustomHttpNode) 58 | 59 | # Example using an API client: 60 | from elasticsearch import Elasticsearch 61 | 62 | client = Elasticsearch(..., node_class=CustomHttpNode) 63 | -------------------------------------------------------------------------------- /docs/sphinx/responses.rst: -------------------------------------------------------------------------------- 1 | Responses 2 | ========= 3 | 4 | .. py:currentmodule:: elastic_transport 5 | 6 | 7 | Response headers 8 | ---------------- 9 | 10 | .. autoclass:: elastic_transport::HttpHeaders 11 | :members: freeze 12 | 13 | Metadata 14 | -------- 15 | 16 | .. autoclass:: ApiResponseMeta 17 | :members: 18 | 19 | Response classes 20 | ---------------- 21 | 22 | .. autoclass:: ApiResponse 23 | :members: 24 | 25 | .. autoclass:: BinaryApiResponse 26 | :members: 27 | :show-inheritance: 28 | 29 | .. autoclass:: HeadApiResponse 30 | :members: 31 | :show-inheritance: 32 | 33 | .. autoclass:: ListApiResponse 34 | :members: 35 | :show-inheritance: 36 | 37 | .. autoclass:: ObjectApiResponse 38 | :members: 39 | :show-inheritance: 40 | 41 | .. autoclass:: TextApiResponse 42 | :members: 43 | :show-inheritance: 44 | -------------------------------------------------------------------------------- /docs/sphinx/serializers.rst: -------------------------------------------------------------------------------- 1 | Serializers 2 | =========== 3 | 4 | .. py:currentmodule:: elastic_transport 5 | 6 | .. autoclass:: Serializer 7 | :members: 8 | 9 | .. autoclass:: JsonSerializer 10 | :members: 11 | 12 | .. autoclass:: OrjsonSerializer 13 | :members: 14 | 15 | .. autoclass:: TextSerializer 16 | :members: 17 | 18 | .. autoclass:: NdjsonSerializer 19 | :members: 20 | -------------------------------------------------------------------------------- /docs/sphinx/transport.rst: -------------------------------------------------------------------------------- 1 | Transport 2 | ========= 3 | 4 | .. py:currentmodule:: elastic_transport 5 | 6 | .. autoclass:: Transport 7 | :members: 8 | 9 | .. autoclass:: AsyncTransport 10 | :members: 11 | -------------------------------------------------------------------------------- /elastic_transport/__init__.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | """Transport classes and utilities shared among Python Elastic client libraries""" 19 | 20 | import logging 21 | 22 | from ._async_transport import AsyncTransport as AsyncTransport 23 | from ._exceptions import ( 24 | ApiError, 25 | ConnectionError, 26 | ConnectionTimeout, 27 | SecurityWarning, 28 | SerializationError, 29 | SniffingError, 30 | TlsError, 31 | TransportError, 32 | TransportWarning, 33 | ) 34 | from ._models import ApiResponseMeta, HttpHeaders, NodeConfig, SniffOptions 35 | from ._node import ( 36 | AiohttpHttpNode, 37 | BaseAsyncNode, 38 | BaseNode, 39 | HttpxAsyncHttpNode, 40 | RequestsHttpNode, 41 | Urllib3HttpNode, 42 | ) 43 | from ._node_pool import NodePool, NodeSelector, RandomSelector, RoundRobinSelector 44 | from ._otel import OpenTelemetrySpan 45 | from ._response import ApiResponse as ApiResponse 46 | from ._response import BinaryApiResponse as BinaryApiResponse 47 | from ._response import HeadApiResponse as HeadApiResponse 48 | from ._response import ListApiResponse as ListApiResponse 49 | from ._response import ObjectApiResponse as ObjectApiResponse 50 | from ._response import TextApiResponse as TextApiResponse 51 | from ._serializer import ( 52 | JsonSerializer, 53 | NdjsonSerializer, 54 | Serializer, 55 | SerializerCollection, 56 | TextSerializer, 57 | ) 58 | from ._transport import Transport as Transport 59 | from ._transport import TransportApiResponse 60 | from ._utils import fixup_module_metadata 61 | from ._version import __version__ as __version__ # noqa 62 | 63 | __all__ = [ 64 | "AiohttpHttpNode", 65 | "ApiError", 66 | "ApiResponse", 67 | "ApiResponseMeta", 68 | "AsyncTransport", 69 | "BaseAsyncNode", 70 | "BaseNode", 71 | "BinaryApiResponse", 72 | "ConnectionError", 73 | "ConnectionTimeout", 74 | "HeadApiResponse", 75 | "HttpHeaders", 76 | "HttpxAsyncHttpNode", 77 | "JsonSerializer", 78 | "ListApiResponse", 79 | "NdjsonSerializer", 80 | "NodeConfig", 81 | "NodePool", 82 | "NodeSelector", 83 | "ObjectApiResponse", 84 | "OpenTelemetrySpan", 85 | "RandomSelector", 86 | "RequestsHttpNode", 87 | "RoundRobinSelector", 88 | "SecurityWarning", 89 | "SerializationError", 90 | "Serializer", 91 | "SerializerCollection", 92 | "SniffOptions", 93 | "SniffingError", 94 | "TextApiResponse", 95 | "TextSerializer", 96 | "TlsError", 97 | "Transport", 98 | "TransportApiResponse", 99 | "TransportError", 100 | "TransportWarning", 101 | "Urllib3HttpNode", 102 | ] 103 | 104 | try: 105 | from elastic_transport._serializer import OrjsonSerializer # noqa: F401 106 | 107 | __all__.append("OrjsonSerializer") 108 | except ImportError: 109 | pass 110 | 111 | _logger = logging.getLogger("elastic_transport") 112 | _logger.addHandler(logging.NullHandler()) 113 | del _logger 114 | 115 | fixup_module_metadata(__name__, globals()) 116 | del fixup_module_metadata 117 | 118 | 119 | def debug_logging() -> None: 120 | """Enables logging on all ``elastic_transport.*`` loggers and attaches a 121 | :class:`logging.StreamHandler` instance to each. This is an easy way to 122 | visualize the network activity occurring on the client or debug a client issue. 123 | """ 124 | handler = logging.StreamHandler() 125 | formatter = logging.Formatter( 126 | "[%(asctime)s] %(message)s", datefmt="%Y-%m-%dT%H:%M:%S" 127 | ) 128 | handler.setFormatter(formatter) 129 | for logger in ( 130 | logging.getLogger("elastic_transport.node"), 131 | logging.getLogger("elastic_transport.node_pool"), 132 | logging.getLogger("elastic_transport.transport"), 133 | ): 134 | logger.addHandler(handler) 135 | logger.setLevel(logging.DEBUG) 136 | -------------------------------------------------------------------------------- /elastic_transport/_compat.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import inspect 19 | import sys 20 | from pathlib import Path 21 | from typing import Any, Awaitable, TypeVar, Union 22 | from urllib.parse import quote as _quote 23 | from urllib.parse import urlencode, urlparse 24 | 25 | string_types = (str, bytes) 26 | 27 | T = TypeVar("T") 28 | 29 | 30 | async def await_if_coro(coro: Union[T, Awaitable[T]]) -> T: 31 | if inspect.iscoroutine(coro): 32 | return await coro # type: ignore 33 | return coro # type: ignore 34 | 35 | 36 | _QUOTE_ALWAYS_SAFE = frozenset( 37 | "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789_.-~" 38 | ) 39 | 40 | 41 | def quote(string: str, safe: str = "/") -> str: 42 | # Redefines 'urllib.parse.quote()' to always have the '~' character 43 | # within the 'ALWAYS_SAFE' list. The character was added in Python 3.7 44 | safe = "".join(_QUOTE_ALWAYS_SAFE.union(set(safe))) 45 | return _quote(string, safe) 46 | 47 | 48 | try: 49 | from threading import Lock 50 | except ImportError: 51 | 52 | class Lock: # type: ignore 53 | def __enter__(self) -> None: 54 | pass 55 | 56 | def __exit__(self, *_: Any) -> None: 57 | pass 58 | 59 | def acquire(self, _: bool = True) -> bool: 60 | return True 61 | 62 | def release(self) -> None: 63 | pass 64 | 65 | 66 | def warn_stacklevel() -> int: 67 | """Dynamically determine warning stacklevel for warnings based on the call stack""" 68 | try: 69 | # Grab the root module from the current module '__name__' 70 | module_name = __name__.partition(".")[0] 71 | module_path = Path(sys.modules[module_name].__file__) # type: ignore[arg-type] 72 | 73 | # If the module is a folder we're looking at 74 | # subdirectories, otherwise we're looking for 75 | # an exact match. 76 | module_is_folder = module_path.name == "__init__.py" 77 | if module_is_folder: 78 | module_path = module_path.parent 79 | 80 | # Look through frames until we find a file that 81 | # isn't a part of our module, then return that stacklevel. 82 | for level, frame in enumerate(inspect.stack()): 83 | # Garbage collecting frames 84 | frame_filename = Path(frame.filename) 85 | del frame 86 | 87 | if ( 88 | # If the module is a folder we look at subdirectory 89 | module_is_folder 90 | and module_path not in frame_filename.parents 91 | ) or ( 92 | # Otherwise we're looking for an exact match. 93 | not module_is_folder 94 | and module_path != frame_filename 95 | ): 96 | return level 97 | except KeyError: 98 | pass 99 | return 0 100 | 101 | 102 | __all__ = [ 103 | "await_if_coro", 104 | "quote", 105 | "urlparse", 106 | "urlencode", 107 | "string_types", 108 | "Lock", 109 | ] 110 | -------------------------------------------------------------------------------- /elastic_transport/_exceptions.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | from typing import Any, Tuple 19 | 20 | from ._models import ApiResponseMeta 21 | 22 | 23 | class TransportWarning(Warning): 24 | """Generic warning for the 'elastic-transport' package.""" 25 | 26 | 27 | class SecurityWarning(TransportWarning): 28 | """Warning for potentially insecure configurations.""" 29 | 30 | 31 | class TransportError(Exception): 32 | """Generic exception for the 'elastic-transport' package. 33 | 34 | For the 'errors' attribute, errors are ordered from 35 | most recently raised (index=0) to least recently raised (index=N) 36 | 37 | If an HTTP status code is available with the error it 38 | will be stored under 'status'. If HTTP headers are available 39 | they are stored under 'headers'. 40 | """ 41 | 42 | def __init__(self, message: Any, errors: Tuple[Exception, ...] = ()): 43 | super().__init__(message) 44 | self.errors = tuple(errors) 45 | self.message = message 46 | 47 | def __repr__(self) -> str: 48 | parts = [repr(self.message)] 49 | if self.errors: 50 | parts.append(f"errors={self.errors!r}") 51 | return "{}({})".format(self.__class__.__name__, ", ".join(parts)) 52 | 53 | def __str__(self) -> str: 54 | return str(self.message) 55 | 56 | 57 | class SniffingError(TransportError): 58 | """Error that occurs during the sniffing of nodes""" 59 | 60 | 61 | class SerializationError(TransportError): 62 | """Error that occurred during the serialization or 63 | deserialization of an HTTP message body 64 | """ 65 | 66 | 67 | class ConnectionError(TransportError): 68 | """Error raised by the HTTP connection""" 69 | 70 | def __str__(self) -> str: 71 | if self.errors: 72 | return f"Connection error caused by: {self.errors[0].__class__.__name__}({self.errors[0]})" 73 | return "Connection error" 74 | 75 | 76 | class TlsError(ConnectionError): 77 | """Error raised by during the TLS handshake""" 78 | 79 | def __str__(self) -> str: 80 | if self.errors: 81 | return f"TLS error caused by: {self.errors[0].__class__.__name__}({self.errors[0]})" 82 | return "TLS error" 83 | 84 | 85 | class ConnectionTimeout(TransportError): 86 | """Connection timed out during an operation""" 87 | 88 | def __str__(self) -> str: 89 | if self.errors: 90 | return f"Connection timeout caused by: {self.errors[0].__class__.__name__}({self.errors[0]})" 91 | return "Connection timed out" 92 | 93 | 94 | class ApiError(Exception): 95 | """Base-class for clients that raise errors due to a response such as '404 Not Found'""" 96 | 97 | def __init__( 98 | self, 99 | message: str, 100 | meta: ApiResponseMeta, 101 | body: Any, 102 | errors: Tuple[Exception, ...] = (), 103 | ): 104 | super().__init__(message) 105 | self.message = message 106 | self.errors = errors 107 | self.meta = meta 108 | self.body = body 109 | 110 | def __repr__(self) -> str: 111 | parts = [repr(self.message)] 112 | if self.meta: 113 | parts.append(f"meta={self.meta!r}") 114 | if self.errors: 115 | parts.append(f"errors={self.errors!r}") 116 | if self.body is not None: 117 | parts.append(f"body={self.body!r}") 118 | return "{}({})".format(self.__class__.__name__, ", ".join(parts)) 119 | 120 | def __str__(self) -> str: 121 | return f"[{self.meta.status}] {self.message}" 122 | -------------------------------------------------------------------------------- /elastic_transport/_node/__init__.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | from ._base import BaseNode, NodeApiResponse 19 | from ._base_async import BaseAsyncNode 20 | from ._http_aiohttp import AiohttpHttpNode 21 | from ._http_httpx import HttpxAsyncHttpNode 22 | from ._http_requests import RequestsHttpNode 23 | from ._http_urllib3 import Urllib3HttpNode 24 | 25 | __all__ = [ 26 | "AiohttpHttpNode", 27 | "BaseNode", 28 | "BaseAsyncNode", 29 | "NodeApiResponse", 30 | "RequestsHttpNode", 31 | "Urllib3HttpNode", 32 | "HttpxAsyncHttpNode", 33 | ] 34 | -------------------------------------------------------------------------------- /elastic_transport/_node/_base_async.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | from typing import Optional, Union 19 | 20 | from .._models import HttpHeaders 21 | from ..client_utils import DEFAULT, DefaultType 22 | from ._base import BaseNode, NodeApiResponse 23 | 24 | 25 | class BaseAsyncNode(BaseNode): 26 | """Base class for Async HTTP node implementations""" 27 | 28 | async def perform_request( # type: ignore[override] 29 | self, 30 | method: str, 31 | target: str, 32 | body: Optional[bytes] = None, 33 | headers: Optional[HttpHeaders] = None, 34 | request_timeout: Union[DefaultType, Optional[float]] = DEFAULT, 35 | ) -> NodeApiResponse: 36 | raise NotImplementedError() # pragma: nocover 37 | 38 | async def close(self) -> None: # type: ignore[override] 39 | raise NotImplementedError() # pragma: nocover 40 | -------------------------------------------------------------------------------- /elastic_transport/_node/_http_httpx.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import gzip 19 | import os.path 20 | import ssl 21 | import time 22 | import warnings 23 | from typing import Literal, Optional, Union 24 | 25 | from .._compat import warn_stacklevel 26 | from .._exceptions import ConnectionError, ConnectionTimeout, SecurityWarning, TlsError 27 | from .._models import ApiResponseMeta, HttpHeaders, NodeConfig 28 | from ..client_utils import DEFAULT, DefaultType, client_meta_version 29 | from ._base import ( 30 | BUILTIN_EXCEPTIONS, 31 | DEFAULT_CA_CERTS, 32 | RERAISE_EXCEPTIONS, 33 | NodeApiResponse, 34 | ssl_context_from_node_config, 35 | ) 36 | from ._base_async import BaseAsyncNode 37 | 38 | try: 39 | import httpx 40 | 41 | _HTTPX_AVAILABLE = True 42 | _HTTPX_META_VERSION = client_meta_version(httpx.__version__) 43 | except ImportError: 44 | _HTTPX_AVAILABLE = False 45 | _HTTPX_META_VERSION = "" 46 | 47 | 48 | class HttpxAsyncHttpNode(BaseAsyncNode): 49 | _CLIENT_META_HTTP_CLIENT = ("hx", _HTTPX_META_VERSION) 50 | 51 | def __init__(self, config: NodeConfig): 52 | if not _HTTPX_AVAILABLE: # pragma: nocover 53 | raise ValueError("You must have 'httpx' installed to use HttpxNode") 54 | super().__init__(config) 55 | 56 | if config.ssl_assert_fingerprint: 57 | raise ValueError( 58 | "httpx does not support certificate pinning. https://github.com/encode/httpx/issues/761" 59 | ) 60 | 61 | ssl_context: Union[ssl.SSLContext, Literal[False]] = False 62 | if config.scheme == "https": 63 | if config.ssl_context is not None: 64 | ssl_context = ssl_context_from_node_config(config) 65 | else: 66 | ssl_context = ssl_context_from_node_config(config) 67 | 68 | ca_certs = ( 69 | DEFAULT_CA_CERTS if config.ca_certs is None else config.ca_certs 70 | ) 71 | if config.verify_certs: 72 | if not ca_certs: 73 | raise ValueError( 74 | "Root certificates are missing for certificate " 75 | "validation. Either pass them in using the ca_certs parameter or " 76 | "install certifi to use it automatically." 77 | ) 78 | else: 79 | if config.ssl_show_warn: 80 | warnings.warn( 81 | f"Connecting to {self.base_url!r} using TLS with verify_certs=False is insecure", 82 | stacklevel=warn_stacklevel(), 83 | category=SecurityWarning, 84 | ) 85 | 86 | if ca_certs is not None: 87 | if os.path.isfile(ca_certs): 88 | ssl_context.load_verify_locations(cafile=ca_certs) 89 | elif os.path.isdir(ca_certs): 90 | ssl_context.load_verify_locations(capath=ca_certs) 91 | else: 92 | raise ValueError("ca_certs parameter is not a path") 93 | 94 | # Use client_cert and client_key variables for SSL certificate configuration. 95 | if config.client_cert and not os.path.isfile(config.client_cert): 96 | raise ValueError("client_cert is not a path to a file") 97 | if config.client_key and not os.path.isfile(config.client_key): 98 | raise ValueError("client_key is not a path to a file") 99 | if config.client_cert and config.client_key: 100 | ssl_context.load_cert_chain(config.client_cert, config.client_key) 101 | elif config.client_cert: 102 | ssl_context.load_cert_chain(config.client_cert) 103 | 104 | self.client = httpx.AsyncClient( 105 | base_url=f"{config.scheme}://{config.host}:{config.port}", 106 | limits=httpx.Limits(max_connections=config.connections_per_node), 107 | verify=ssl_context or False, 108 | timeout=config.request_timeout, 109 | ) 110 | 111 | async def perform_request( # type: ignore[override] 112 | self, 113 | method: str, 114 | target: str, 115 | body: Optional[bytes] = None, 116 | headers: Optional[HttpHeaders] = None, 117 | request_timeout: Union[DefaultType, Optional[float]] = DEFAULT, 118 | ) -> NodeApiResponse: 119 | resolved_headers = self._headers.copy() 120 | if headers: 121 | resolved_headers.update(headers) 122 | 123 | if body: 124 | if self._http_compress: 125 | resolved_body = gzip.compress(body) 126 | resolved_headers["content-encoding"] = "gzip" 127 | else: 128 | resolved_body = body 129 | else: 130 | resolved_body = None 131 | 132 | try: 133 | start = time.perf_counter() 134 | if request_timeout is DEFAULT: 135 | resp = await self.client.request( 136 | method, 137 | target, 138 | content=resolved_body, 139 | headers=dict(resolved_headers), 140 | ) 141 | else: 142 | resp = await self.client.request( 143 | method, 144 | target, 145 | content=resolved_body, 146 | headers=dict(resolved_headers), 147 | timeout=request_timeout, 148 | ) 149 | response_body = resp.read() 150 | duration = time.perf_counter() - start 151 | except RERAISE_EXCEPTIONS + BUILTIN_EXCEPTIONS: 152 | raise 153 | except Exception as e: 154 | err: Exception 155 | if isinstance(e, (TimeoutError, httpx.TimeoutException)): 156 | err = ConnectionTimeout( 157 | "Connection timed out during request", errors=(e,) 158 | ) 159 | elif isinstance(e, ssl.SSLError): 160 | err = TlsError(str(e), errors=(e,)) 161 | # Detect SSL errors for httpx v0.28.0+ 162 | # Needed until https://github.com/encode/httpx/issues/3350 is fixed 163 | elif isinstance(e, httpx.ConnectError) and e.__cause__: 164 | context = e.__cause__.__context__ 165 | if isinstance(context, ssl.SSLError): 166 | err = TlsError(str(context), errors=(e,)) 167 | else: 168 | err = ConnectionError(str(e), errors=(e,)) 169 | else: 170 | err = ConnectionError(str(e), errors=(e,)) 171 | self._log_request( 172 | method=method, 173 | target=target, 174 | headers=resolved_headers, 175 | body=body, 176 | exception=err, 177 | ) 178 | raise err from None 179 | 180 | meta = ApiResponseMeta( 181 | resp.status_code, 182 | resp.http_version, 183 | HttpHeaders(resp.headers), 184 | duration, 185 | self.config, 186 | ) 187 | 188 | self._log_request( 189 | method=method, 190 | target=target, 191 | headers=resolved_headers, 192 | body=body, 193 | meta=meta, 194 | response=response_body, 195 | ) 196 | 197 | return NodeApiResponse(meta, response_body) 198 | 199 | async def close(self) -> None: # type: ignore[override] 200 | await self.client.aclose() 201 | -------------------------------------------------------------------------------- /elastic_transport/_node/_http_urllib3.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import gzip 19 | import ssl 20 | import time 21 | import warnings 22 | from typing import Any, Dict, Optional, Union 23 | 24 | try: 25 | from importlib import metadata 26 | except ImportError: 27 | import importlib_metadata as metadata # type: ignore[no-redef] 28 | 29 | import urllib3 30 | from urllib3.exceptions import ConnectTimeoutError, NewConnectionError, ReadTimeoutError 31 | from urllib3.util.retry import Retry 32 | 33 | from .._compat import warn_stacklevel 34 | from .._exceptions import ConnectionError, ConnectionTimeout, SecurityWarning, TlsError 35 | from .._models import ApiResponseMeta, HttpHeaders, NodeConfig 36 | from ..client_utils import DEFAULT, DefaultType, client_meta_version 37 | from ._base import ( 38 | BUILTIN_EXCEPTIONS, 39 | DEFAULT_CA_CERTS, 40 | RERAISE_EXCEPTIONS, 41 | BaseNode, 42 | NodeApiResponse, 43 | ssl_context_from_node_config, 44 | ) 45 | 46 | try: 47 | from ._urllib3_chain_certs import HTTPSConnectionPool 48 | except (ImportError, AttributeError): 49 | HTTPSConnectionPool = urllib3.HTTPSConnectionPool # type: ignore[assignment,misc] 50 | 51 | 52 | class Urllib3HttpNode(BaseNode): 53 | """Default synchronous node class using the ``urllib3`` library via HTTP""" 54 | 55 | _CLIENT_META_HTTP_CLIENT = ("ur", client_meta_version(metadata.version("urllib3"))) 56 | 57 | def __init__(self, config: NodeConfig): 58 | super().__init__(config) 59 | 60 | pool_class = urllib3.HTTPConnectionPool 61 | kw: Dict[str, Any] = {} 62 | 63 | if config.scheme == "https": 64 | pool_class = HTTPSConnectionPool 65 | ssl_context = ssl_context_from_node_config(config) 66 | kw["ssl_context"] = ssl_context 67 | 68 | if config.ssl_assert_hostname and config.ssl_assert_fingerprint: 69 | raise ValueError( 70 | "Can't specify both 'ssl_assert_hostname' and 'ssl_assert_fingerprint'" 71 | ) 72 | 73 | # Fingerprint verification doesn't require CA certificates being loaded. 74 | # We also want to disable other verification methods as we only care 75 | # about the fingerprint of the certificates, not whether they form 76 | # a verified chain to a trust anchor. 77 | elif config.ssl_assert_fingerprint: 78 | # Manually disable these in the right order on the SSLContext 79 | # so urllib3 won't think we want conflicting things. 80 | ssl_context.check_hostname = False 81 | ssl_context.verify_mode = ssl.CERT_NONE 82 | 83 | kw.update( 84 | { 85 | "assert_fingerprint": config.ssl_assert_fingerprint, 86 | "assert_hostname": False, 87 | "cert_reqs": "CERT_NONE", 88 | } 89 | ) 90 | 91 | else: 92 | kw["assert_hostname"] = config.ssl_assert_hostname 93 | 94 | # Convert all sentinel values to their actual default 95 | # values if not using an SSLContext. 96 | ca_certs = ( 97 | DEFAULT_CA_CERTS if config.ca_certs is None else config.ca_certs 98 | ) 99 | if config.verify_certs: 100 | if not ca_certs: 101 | raise ValueError( 102 | "Root certificates are missing for certificate " 103 | "validation. Either pass them in using the ca_certs parameter or " 104 | "install certifi to use it automatically." 105 | ) 106 | 107 | kw.update( 108 | { 109 | "cert_reqs": "CERT_REQUIRED", 110 | "ca_certs": ca_certs, 111 | "cert_file": config.client_cert, 112 | "key_file": config.client_key, 113 | } 114 | ) 115 | else: 116 | kw["cert_reqs"] = "CERT_NONE" 117 | 118 | if config.ssl_show_warn: 119 | warnings.warn( 120 | f"Connecting to {self.base_url!r} using TLS with verify_certs=False is insecure", 121 | stacklevel=warn_stacklevel(), 122 | category=SecurityWarning, 123 | ) 124 | else: 125 | urllib3.disable_warnings() 126 | 127 | self.pool = pool_class( 128 | config.host, 129 | port=config.port, 130 | timeout=urllib3.Timeout(total=config.request_timeout), 131 | maxsize=config.connections_per_node, 132 | block=True, 133 | **kw, 134 | ) 135 | 136 | def perform_request( 137 | self, 138 | method: str, 139 | target: str, 140 | body: Optional[bytes] = None, 141 | headers: Optional[HttpHeaders] = None, 142 | request_timeout: Union[DefaultType, Optional[float]] = DEFAULT, 143 | ) -> NodeApiResponse: 144 | if self.path_prefix: 145 | target = f"{self.path_prefix}{target}" 146 | 147 | start = time.time() 148 | try: 149 | kw = {} 150 | if request_timeout is not DEFAULT: 151 | kw["timeout"] = request_timeout 152 | 153 | request_headers = self._headers.copy() 154 | if headers: 155 | request_headers.update(headers) 156 | 157 | body_to_send: Optional[bytes] 158 | if body: 159 | if self._http_compress: 160 | body_to_send = gzip.compress(body) 161 | request_headers["content-encoding"] = "gzip" 162 | else: 163 | body_to_send = body 164 | else: 165 | body_to_send = None 166 | 167 | response = self.pool.urlopen( 168 | method, 169 | target, 170 | body=body_to_send, 171 | retries=Retry(False), 172 | headers=request_headers, 173 | **kw, # type: ignore[arg-type] 174 | ) 175 | response_headers = HttpHeaders(response.headers) 176 | data = response.data 177 | duration = time.time() - start 178 | 179 | except RERAISE_EXCEPTIONS: 180 | raise 181 | except Exception as e: 182 | err: Exception 183 | if isinstance(e, NewConnectionError): 184 | err = ConnectionError(str(e), errors=(e,)) 185 | elif isinstance(e, (ConnectTimeoutError, ReadTimeoutError)): 186 | err = ConnectionTimeout( 187 | "Connection timed out during request", errors=(e,) 188 | ) 189 | elif isinstance(e, (ssl.SSLError, urllib3.exceptions.SSLError)): 190 | err = TlsError(str(e), errors=(e,)) 191 | elif isinstance(e, BUILTIN_EXCEPTIONS): 192 | raise 193 | else: 194 | err = ConnectionError(str(e), errors=(e,)) 195 | self._log_request( 196 | method=method, 197 | target=target, 198 | headers=request_headers, 199 | body=body, 200 | exception=err, 201 | ) 202 | raise err from e 203 | 204 | meta = ApiResponseMeta( 205 | node=self.config, 206 | duration=duration, 207 | http_version="1.1", 208 | status=response.status, 209 | headers=response_headers, 210 | ) 211 | self._log_request( 212 | method=method, 213 | target=target, 214 | headers=request_headers, 215 | body=body, 216 | meta=meta, 217 | response=data, 218 | ) 219 | return NodeApiResponse( 220 | meta, 221 | data, 222 | ) 223 | 224 | def close(self) -> None: 225 | """ 226 | Explicitly closes connection 227 | """ 228 | self.pool.close() 229 | -------------------------------------------------------------------------------- /elastic_transport/_node/_urllib3_chain_certs.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import hashlib 19 | import sys 20 | from binascii import hexlify, unhexlify 21 | from hmac import compare_digest 22 | from typing import Any, List, Optional 23 | 24 | import _ssl # type: ignore 25 | import urllib3 26 | import urllib3.connection 27 | 28 | from ._base import RERAISE_EXCEPTIONS 29 | 30 | if sys.version_info < (3, 10) or sys.implementation.name != "cpython": 31 | raise ImportError("Only supported on CPython 3.10+") 32 | 33 | _ENCODING_DER: int = _ssl.ENCODING_DER 34 | _HASHES_BY_LENGTH = {32: hashlib.md5, 40: hashlib.sha1, 64: hashlib.sha256} 35 | 36 | __all__ = ["HTTPSConnectionPool"] 37 | 38 | 39 | class HTTPSConnection(urllib3.connection.HTTPSConnection): 40 | def __init__(self, *args: Any, **kwargs: Any) -> None: 41 | self._elastic_assert_fingerprint: Optional[str] = None 42 | super().__init__(*args, **kwargs) 43 | 44 | def connect(self) -> None: 45 | super().connect() 46 | # Hack to prevent a warning within HTTPSConnectionPool._validate_conn() 47 | if self._elastic_assert_fingerprint: 48 | self.is_verified = True 49 | 50 | 51 | class HTTPSConnectionPool(urllib3.HTTPSConnectionPool): 52 | ConnectionCls = HTTPSConnection 53 | 54 | """HTTPSConnectionPool implementation which supports ``assert_fingerprint`` 55 | on certificates within the chain instead of only the leaf cert using private 56 | APIs in CPython 3.10+ 57 | """ 58 | 59 | def __init__( 60 | self, *args: Any, assert_fingerprint: Optional[str] = None, **kwargs: Any 61 | ) -> None: 62 | self._elastic_assert_fingerprint = ( 63 | assert_fingerprint.replace(":", "").lower() if assert_fingerprint else None 64 | ) 65 | 66 | # Complain about fingerprint length earlier than urllib3 does. 67 | if ( 68 | self._elastic_assert_fingerprint 69 | and len(self._elastic_assert_fingerprint) not in _HASHES_BY_LENGTH 70 | ): 71 | valid_lengths = "', '".join(map(str, sorted(_HASHES_BY_LENGTH.keys()))) 72 | raise ValueError( 73 | f"Fingerprint of invalid length '{len(self._elastic_assert_fingerprint)}'" 74 | f", should be one of '{valid_lengths}'" 75 | ) 76 | 77 | if self._elastic_assert_fingerprint: 78 | # Skip fingerprinting by urllib3 as we'll do it ourselves 79 | kwargs["assert_fingerprint"] = None 80 | 81 | super().__init__(*args, **kwargs) 82 | 83 | def _new_conn(self) -> HTTPSConnection: 84 | """ 85 | Return a fresh :class:`urllib3.connection.HTTPSConnection`. 86 | """ 87 | conn: HTTPSConnection = super()._new_conn() # type: ignore[assignment] 88 | # Tell our custom connection if we'll assert fingerprint ourselves 89 | conn._elastic_assert_fingerprint = self._elastic_assert_fingerprint 90 | return conn 91 | 92 | def _validate_conn(self, conn: HTTPSConnection) -> None: # type: ignore[override] 93 | """ 94 | Called right before a request is made, after the socket is created. 95 | """ 96 | super(HTTPSConnectionPool, self)._validate_conn(conn) 97 | 98 | if self._elastic_assert_fingerprint: 99 | hash_func = _HASHES_BY_LENGTH[len(self._elastic_assert_fingerprint)] 100 | assert_fingerprint = unhexlify( 101 | self._elastic_assert_fingerprint.lower() 102 | .replace(":", "") 103 | .encode("ascii") 104 | ) 105 | 106 | fingerprints: List[bytes] 107 | try: 108 | if sys.version_info >= (3, 13): 109 | fingerprints = [ 110 | hash_func(cert).digest() 111 | for cert in conn.sock.get_verified_chain() # type: ignore 112 | ] 113 | else: 114 | # 'get_verified_chain()' and 'Certificate.public_bytes()' are private APIs 115 | # in CPython 3.10. They're not documented anywhere yet but seem to work 116 | # and we need them for Security on by Default so... onwards we go! 117 | # See: https://github.com/python/cpython/pull/25467 118 | fingerprints = [ 119 | hash_func(cert.public_bytes(_ENCODING_DER)).digest() 120 | for cert in conn.sock._sslobj.get_verified_chain() # type: ignore[union-attr] 121 | ] 122 | except RERAISE_EXCEPTIONS: # pragma: nocover 123 | raise 124 | # Because these are private APIs we are super careful here 125 | # so that if anything "goes wrong" we fallback on the old behavior. 126 | except Exception: # pragma: nocover 127 | fingerprints = [] 128 | 129 | # Only add the peercert in front of the chain if it's not there for some reason. 130 | # This is to make sure old behavior of 'ssl_assert_fingerprint' still works. 131 | peercert_fingerprint = hash_func(conn.sock.getpeercert(True)).digest() # type: ignore[union-attr] 132 | if peercert_fingerprint not in fingerprints: # pragma: nocover 133 | fingerprints.insert(0, peercert_fingerprint) 134 | 135 | # If any match then that's a success! We always run them 136 | # all through though because of constant time concerns. 137 | success = False 138 | for fingerprint in fingerprints: 139 | success |= compare_digest(fingerprint, assert_fingerprint) 140 | 141 | # Give users all the fingerprints we checked against in 142 | # order of peer -> root CA. 143 | if not success: 144 | raise urllib3.exceptions.SSLError( 145 | 'Fingerprints did not match. Expected "{0}", got "{1}".'.format( 146 | self._elastic_assert_fingerprint, 147 | '", "'.join([x.decode() for x in map(hexlify, fingerprints)]), 148 | ) 149 | ) 150 | conn.is_verified = success 151 | -------------------------------------------------------------------------------- /elastic_transport/_otel.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | from __future__ import annotations 19 | 20 | from typing import TYPE_CHECKING, Literal, Mapping 21 | 22 | if TYPE_CHECKING: 23 | from opentelemetry.trace import Span 24 | 25 | 26 | # A list of the Elasticsearch endpoints that qualify as "search" endpoints. The search query in 27 | # the request body may be captured for these endpoints, depending on the body capture strategy. 28 | SEARCH_ENDPOINTS = ( 29 | "search", 30 | "async_search.submit", 31 | "msearch", 32 | "eql.search", 33 | "esql.query", 34 | "terms_enum", 35 | "search_template", 36 | "msearch_template", 37 | "render_search_template", 38 | ) 39 | 40 | 41 | class OpenTelemetrySpan: 42 | def __init__( 43 | self, 44 | otel_span: Span | None, 45 | endpoint_id: str | None = None, 46 | body_strategy: Literal["omit", "raw"] = "omit", 47 | ): 48 | self.otel_span = otel_span 49 | self.body_strategy = body_strategy 50 | self.endpoint_id = endpoint_id 51 | 52 | def set_node_metadata( 53 | self, host: str, port: int, base_url: str, target: str 54 | ) -> None: 55 | if self.otel_span is None: 56 | return 57 | 58 | # url.full does not contain auth info which is passed as headers 59 | self.otel_span.set_attribute("url.full", base_url + target) 60 | self.otel_span.set_attribute("server.address", host) 61 | self.otel_span.set_attribute("server.port", port) 62 | 63 | def set_elastic_cloud_metadata(self, headers: Mapping[str, str]) -> None: 64 | if self.otel_span is None: 65 | return 66 | 67 | cluster_name = headers.get("X-Found-Handling-Cluster") 68 | if cluster_name is not None: 69 | self.otel_span.set_attribute("db.elasticsearch.cluster.name", cluster_name) 70 | node_name = headers.get("X-Found-Handling-Instance") 71 | if node_name is not None: 72 | self.otel_span.set_attribute("db.elasticsearch.node.name", node_name) 73 | 74 | def set_db_statement(self, serialized_body: bytes) -> None: 75 | if self.otel_span is None: 76 | return 77 | 78 | if self.body_strategy == "omit": 79 | return 80 | elif self.body_strategy == "raw" and self.endpoint_id in SEARCH_ENDPOINTS: 81 | self.otel_span.set_attribute( 82 | "db.statement", serialized_body.decode("utf-8") 83 | ) 84 | -------------------------------------------------------------------------------- /elastic_transport/_response.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | from typing import ( 19 | Any, 20 | Dict, 21 | Generic, 22 | Iterator, 23 | List, 24 | NoReturn, 25 | Tuple, 26 | TypeVar, 27 | Union, 28 | overload, 29 | ) 30 | 31 | from ._models import ApiResponseMeta 32 | 33 | _BodyType = TypeVar("_BodyType") 34 | _ObjectBodyType = TypeVar("_ObjectBodyType") 35 | _ListItemBodyType = TypeVar("_ListItemBodyType") 36 | 37 | 38 | class ApiResponse(Generic[_BodyType]): 39 | """Base class for all API response classes""" 40 | 41 | __slots__ = ("_body", "_meta") 42 | 43 | def __init__( 44 | self, 45 | *args: Any, 46 | **kwargs: Any, 47 | ): 48 | def _raise_typeerror() -> NoReturn: 49 | raise TypeError("Must pass 'meta' and 'body' to ApiResponse") from None 50 | 51 | # Working around pre-releases of elasticsearch-python 52 | # that would use raw=... instead of body=... 53 | try: 54 | if bool(args) == bool(kwargs): 55 | _raise_typeerror() 56 | elif args and len(args) == 2: 57 | body, meta = args 58 | elif kwargs and "raw" in kwargs: 59 | body = kwargs.pop("raw") 60 | meta = kwargs.pop("meta") 61 | kwargs.pop("body_cls", None) 62 | elif kwargs and "body" in kwargs: 63 | body = kwargs.pop("body") 64 | meta = kwargs.pop("meta") 65 | kwargs.pop("body_cls", None) 66 | else: 67 | _raise_typeerror() 68 | except KeyError: 69 | _raise_typeerror() 70 | # If there are still kwargs left over 71 | # and we're not in positional mode... 72 | if not args and kwargs: 73 | _raise_typeerror() 74 | 75 | self._body = body 76 | self._meta = meta 77 | 78 | def __repr__(self) -> str: 79 | return f"{type(self).__name__}({self.body!r})" 80 | 81 | def __contains__(self, item: Any) -> bool: 82 | return item in self._body 83 | 84 | def __eq__(self, other: object) -> bool: 85 | if isinstance(other, ApiResponse): 86 | other = other.body 87 | return self._body == other # type: ignore[no-any-return] 88 | 89 | def __ne__(self, other: object) -> bool: 90 | if isinstance(other, ApiResponse): 91 | other = other.body 92 | return self._body != other # type: ignore[no-any-return] 93 | 94 | def __getitem__(self, item: Any) -> Any: 95 | return self._body[item] 96 | 97 | def __getattr__(self, attr: str) -> Any: 98 | return getattr(self._body, attr) 99 | 100 | def __getstate__(self) -> Tuple[_BodyType, ApiResponseMeta]: 101 | return self._body, self._meta 102 | 103 | def __setstate__(self, state: Tuple[_BodyType, ApiResponseMeta]) -> None: 104 | self._body, self._meta = state 105 | 106 | def __len__(self) -> int: 107 | return len(self._body) 108 | 109 | def __iter__(self) -> Iterator[Any]: 110 | return iter(self._body) 111 | 112 | def __str__(self) -> str: 113 | return str(self._body) 114 | 115 | def __bool__(self) -> bool: 116 | return bool(self._body) 117 | 118 | @property 119 | def meta(self) -> ApiResponseMeta: 120 | """Response metadata""" 121 | return self._meta # type: ignore[no-any-return] 122 | 123 | @property 124 | def body(self) -> _BodyType: 125 | """User-friendly view into the raw response with type hints if applicable""" 126 | return self._body # type: ignore[no-any-return] 127 | 128 | @property 129 | def raw(self) -> _BodyType: 130 | return self.body 131 | 132 | 133 | class TextApiResponse(ApiResponse[str]): 134 | """API responses which are text such as 'text/plain' or 'text/csv'""" 135 | 136 | def __iter__(self) -> Iterator[str]: 137 | return iter(self.body) 138 | 139 | def __getitem__(self, item: Union[int, slice]) -> str: 140 | return self.body[item] 141 | 142 | @property 143 | def body(self) -> str: 144 | return self._body # type: ignore[no-any-return] 145 | 146 | 147 | class BinaryApiResponse(ApiResponse[bytes]): 148 | """API responses which are a binary response such as Mapbox vector tiles""" 149 | 150 | def __iter__(self) -> Iterator[int]: 151 | return iter(self.body) 152 | 153 | @overload 154 | def __getitem__(self, item: slice) -> bytes: ... 155 | 156 | @overload 157 | def __getitem__(self, item: int) -> int: ... 158 | 159 | def __getitem__(self, item: Union[int, slice]) -> Union[int, bytes]: 160 | return self.body[item] 161 | 162 | @property 163 | def body(self) -> bytes: 164 | return self._body # type: ignore[no-any-return] 165 | 166 | 167 | class HeadApiResponse(ApiResponse[bool]): 168 | """API responses which are for an 'exists' / HEAD API request""" 169 | 170 | def __init__(self, meta: ApiResponseMeta): 171 | super().__init__(body=200 <= meta.status < 300, meta=meta) 172 | 173 | def __bool__(self) -> bool: 174 | return 200 <= self.meta.status < 300 175 | 176 | @property 177 | def body(self) -> bool: 178 | return bool(self) 179 | 180 | 181 | class ObjectApiResponse(Generic[_ObjectBodyType], ApiResponse[Dict[str, Any]]): 182 | """API responses which are for a JSON object""" 183 | 184 | def __getitem__(self, item: str) -> Any: 185 | return self.body[item] # type: ignore[index] 186 | 187 | def __iter__(self) -> Iterator[str]: 188 | return iter(self._body) 189 | 190 | @property 191 | def body(self) -> _ObjectBodyType: # type: ignore[override] 192 | return self._body # type: ignore[no-any-return] 193 | 194 | 195 | class ListApiResponse( 196 | Generic[_ListItemBodyType], 197 | ApiResponse[List[Any]], 198 | ): 199 | """API responses which are a list of items. Can be NDJSON or a JSON list""" 200 | 201 | @overload 202 | def __getitem__(self, item: slice) -> List[_ListItemBodyType]: ... 203 | 204 | @overload 205 | def __getitem__(self, item: int) -> _ListItemBodyType: ... 206 | 207 | def __getitem__( 208 | self, item: Union[int, slice] 209 | ) -> Union[_ListItemBodyType, List[_ListItemBodyType]]: 210 | return self.body[item] 211 | 212 | def __iter__(self) -> Iterator[_ListItemBodyType]: 213 | return iter(self.body) 214 | 215 | @property 216 | def body(self) -> List[_ListItemBodyType]: 217 | return self._body # type: ignore[no-any-return] 218 | -------------------------------------------------------------------------------- /elastic_transport/_serializer.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import json 19 | import re 20 | import uuid 21 | from datetime import date 22 | from decimal import Decimal 23 | from typing import Any, ClassVar, Mapping, Optional 24 | 25 | from ._exceptions import SerializationError 26 | 27 | try: 28 | import orjson 29 | except ModuleNotFoundError: 30 | orjson = None # type: ignore[assignment] 31 | 32 | 33 | class Serializer: 34 | """Serializer interface.""" 35 | 36 | mimetype: ClassVar[str] 37 | 38 | def loads(self, data: bytes) -> Any: # pragma: nocover 39 | raise NotImplementedError() 40 | 41 | def dumps(self, data: Any) -> bytes: # pragma: nocover 42 | raise NotImplementedError() 43 | 44 | 45 | class TextSerializer(Serializer): 46 | """Text serializer to and from UTF-8.""" 47 | 48 | mimetype: ClassVar[str] = "text/*" 49 | 50 | def loads(self, data: bytes) -> str: 51 | if isinstance(data, str): 52 | return data 53 | try: 54 | return data.decode("utf-8", "surrogatepass") 55 | except UnicodeError as e: 56 | raise SerializationError( 57 | f"Unable to deserialize as text: {data!r}", errors=(e,) 58 | ) 59 | 60 | def dumps(self, data: str) -> bytes: 61 | # The body is already encoded to bytes 62 | # so we forward the request body along. 63 | if isinstance(data, bytes): 64 | return data 65 | try: 66 | return data.encode("utf-8", "surrogatepass") 67 | except (AttributeError, UnicodeError, TypeError) as e: 68 | raise SerializationError( 69 | f"Unable to serialize to text: {data!r}", errors=(e,) 70 | ) 71 | 72 | 73 | class JsonSerializer(Serializer): 74 | """JSON serializer relying on the standard library json module.""" 75 | 76 | mimetype: ClassVar[str] = "application/json" 77 | 78 | def default(self, data: Any) -> Any: 79 | if isinstance(data, date): 80 | return data.isoformat() 81 | elif isinstance(data, uuid.UUID): 82 | return str(data) 83 | elif isinstance(data, Decimal): 84 | return float(data) 85 | raise SerializationError( 86 | message=f"Unable to serialize to JSON: {data!r} (type: {type(data).__name__})", 87 | ) 88 | 89 | def json_dumps(self, data: Any) -> bytes: 90 | return json.dumps( 91 | data, default=self.default, ensure_ascii=False, separators=(",", ":") 92 | ).encode("utf-8", "surrogatepass") 93 | 94 | def json_loads(self, data: bytes) -> Any: 95 | return json.loads(data) 96 | 97 | def loads(self, data: bytes) -> Any: 98 | # Sometimes responses use Content-Type: json but actually 99 | # don't contain any data. We should return something instead 100 | # of erroring in these cases. 101 | if data == b"": 102 | return None 103 | 104 | try: 105 | return self.json_loads(data) 106 | except (ValueError, TypeError) as e: 107 | raise SerializationError( 108 | message=f"Unable to deserialize as JSON: {data!r}", errors=(e,) 109 | ) 110 | 111 | def dumps(self, data: Any) -> bytes: 112 | # The body is already encoded to bytes 113 | # so we forward the request body along. 114 | if isinstance(data, str): 115 | return data.encode("utf-8", "surrogatepass") 116 | elif isinstance(data, bytes): 117 | return data 118 | 119 | try: 120 | return self.json_dumps(data) 121 | # This should be captured by the .default() 122 | # call but just in case we also wrap these. 123 | except (ValueError, UnicodeError, TypeError) as e: # pragma: nocover 124 | raise SerializationError( 125 | message=f"Unable to serialize to JSON: {data!r} (type: {type(data).__name__})", 126 | errors=(e,), 127 | ) 128 | 129 | 130 | if orjson is not None: 131 | 132 | class OrjsonSerializer(JsonSerializer): 133 | """JSON serializer relying on the orjson package. 134 | 135 | Only available if orjson if installed. It is faster, especially for vectors, but is also stricter. 136 | """ 137 | 138 | def json_dumps(self, data: Any) -> bytes: 139 | return orjson.dumps( 140 | data, default=self.default, option=orjson.OPT_SERIALIZE_NUMPY 141 | ) 142 | 143 | def json_loads(self, data: bytes) -> Any: 144 | return orjson.loads(data) 145 | 146 | 147 | class NdjsonSerializer(JsonSerializer): 148 | """Newline delimited JSON (NDJSON) serializer relying on the standard library json module.""" 149 | 150 | mimetype: ClassVar[str] = "application/x-ndjson" 151 | 152 | def loads(self, data: bytes) -> Any: 153 | ndjson = [] 154 | for line in re.split(b"[\n\r]", data): 155 | if not line: 156 | continue 157 | try: 158 | ndjson.append(self.json_loads(line)) 159 | except (ValueError, TypeError) as e: 160 | raise SerializationError( 161 | message=f"Unable to deserialize as NDJSON: {data!r}", errors=(e,) 162 | ) 163 | return ndjson 164 | 165 | def dumps(self, data: Any) -> bytes: 166 | # The body is already encoded to bytes 167 | # so we forward the request body along. 168 | if isinstance(data, (bytes, str)): 169 | data = (data,) 170 | 171 | buffer = bytearray() 172 | for line in data: 173 | if isinstance(line, str): 174 | line = line.encode("utf-8", "surrogatepass") 175 | if isinstance(line, bytes): 176 | buffer += line 177 | # Ensure that there is always a final newline 178 | if not line.endswith(b"\n"): 179 | buffer += b"\n" 180 | else: 181 | try: 182 | buffer += self.json_dumps(line) 183 | buffer += b"\n" 184 | # This should be captured by the .default() 185 | # call but just in case we also wrap these. 186 | except (ValueError, UnicodeError, TypeError) as e: # pragma: nocover 187 | raise SerializationError( 188 | message=f"Unable to serialize to NDJSON: {data!r} (type: {type(data).__name__})", 189 | errors=(e,), 190 | ) 191 | 192 | return bytes(buffer) 193 | 194 | 195 | DEFAULT_SERIALIZERS = { 196 | JsonSerializer.mimetype: JsonSerializer(), 197 | TextSerializer.mimetype: TextSerializer(), 198 | NdjsonSerializer.mimetype: NdjsonSerializer(), 199 | } 200 | 201 | 202 | class SerializerCollection: 203 | """Collection of serializers that can be fetched by mimetype. Used by 204 | :class:`elastic_transport.Transport` to serialize and deserialize native 205 | Python types into bytes before passing to a node. 206 | """ 207 | 208 | def __init__( 209 | self, 210 | serializers: Optional[Mapping[str, Serializer]] = None, 211 | default_mimetype: str = "application/json", 212 | ): 213 | if serializers is None: 214 | serializers = DEFAULT_SERIALIZERS 215 | try: 216 | self.default_serializer = serializers[default_mimetype] 217 | except KeyError: 218 | raise ValueError( 219 | f"Must configure a serializer for the default mimetype {default_mimetype!r}" 220 | ) from None 221 | self.serializers = dict(serializers) 222 | 223 | def dumps(self, data: Any, mimetype: Optional[str] = None) -> bytes: 224 | return self.get_serializer(mimetype).dumps(data) 225 | 226 | def loads(self, data: bytes, mimetype: Optional[str] = None) -> Any: 227 | return self.get_serializer(mimetype).loads(data) 228 | 229 | def get_serializer(self, mimetype: Optional[str]) -> Serializer: 230 | # split out charset 231 | if mimetype is None: 232 | serializer = self.default_serializer 233 | else: 234 | mimetype, _, _ = mimetype.partition(";") 235 | try: 236 | serializer = self.serializers[mimetype] 237 | except KeyError: 238 | # Try for '/*' types after the specific type fails. 239 | try: 240 | mimetype_supertype = mimetype.partition("/")[0] 241 | serializer = self.serializers[f"{mimetype_supertype}/*"] 242 | except KeyError: 243 | raise SerializationError( 244 | f"Unknown mimetype, not able to serialize or deserialize: {mimetype}" 245 | ) from None 246 | return serializer 247 | -------------------------------------------------------------------------------- /elastic_transport/_utils.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import re 19 | from typing import Any, Dict, Union 20 | 21 | 22 | def fixup_module_metadata(module_name: str, namespace: Dict[str, Any]) -> None: 23 | # Yoinked from python-trio/outcome, thanks Nathaniel! License: MIT 24 | def fix_one(obj: Any) -> None: 25 | mod = getattr(obj, "__module__", None) 26 | if mod is not None and mod.startswith("elastic_transport."): 27 | obj.__module__ = module_name 28 | if isinstance(obj, type): 29 | for attr_value in obj.__dict__.values(): 30 | fix_one(attr_value) 31 | 32 | for objname in namespace["__all__"]: 33 | obj = namespace[objname] 34 | fix_one(obj) 35 | 36 | 37 | IPV4_PAT = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}" 38 | IPV4_RE = re.compile("^" + IPV4_PAT + "$") 39 | 40 | HEX_PAT = "[0-9A-Fa-f]{1,4}" 41 | LS32_PAT = "(?:{hex}:{hex}|{ipv4})".format(hex=HEX_PAT, ipv4=IPV4_PAT) 42 | _subs = {"hex": HEX_PAT, "ls32": LS32_PAT} 43 | _variations = [ 44 | # 6( h16 ":" ) ls32 45 | "(?:%(hex)s:){6}%(ls32)s", 46 | # "::" 5( h16 ":" ) ls32 47 | "::(?:%(hex)s:){5}%(ls32)s", 48 | # [ h16 ] "::" 4( h16 ":" ) ls32 49 | "(?:%(hex)s)?::(?:%(hex)s:){4}%(ls32)s", 50 | # [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32 51 | "(?:(?:%(hex)s:)?%(hex)s)?::(?:%(hex)s:){3}%(ls32)s", 52 | # [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32 53 | "(?:(?:%(hex)s:){0,2}%(hex)s)?::(?:%(hex)s:){2}%(ls32)s", 54 | # [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32 55 | "(?:(?:%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s", 56 | # [ *4( h16 ":" ) h16 ] "::" ls32 57 | "(?:(?:%(hex)s:){0,4}%(hex)s)?::%(ls32)s", 58 | # [ *5( h16 ":" ) h16 ] "::" h16 59 | "(?:(?:%(hex)s:){0,5}%(hex)s)?::%(hex)s", 60 | # [ *6( h16 ":" ) h16 ] "::" 61 | "(?:(?:%(hex)s:){0,6}%(hex)s)?::", 62 | ] 63 | IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")" 64 | UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~" 65 | ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+" 66 | BRACELESS_IPV6_ADDRZ_PAT = IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?" 67 | BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + BRACELESS_IPV6_ADDRZ_PAT + "$") 68 | 69 | 70 | def is_ipaddress(hostname: Union[str, bytes]) -> bool: 71 | """Detects whether the hostname given is an IPv4 or IPv6 address. 72 | Also detects IPv6 addresses with Zone IDs. 73 | """ 74 | # Copied from urllib3. License: MIT 75 | if isinstance(hostname, bytes): 76 | # IDN A-label bytes are ASCII compatible. 77 | hostname = hostname.decode("ascii") 78 | hostname = hostname.strip("[]") 79 | return bool(IPV4_RE.match(hostname) or BRACELESS_IPV6_ADDRZ_RE.match(hostname)) 80 | -------------------------------------------------------------------------------- /elastic_transport/_version.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | __version__ = "8.17.1" 19 | -------------------------------------------------------------------------------- /elastic_transport/client_utils.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import base64 19 | import binascii 20 | import dataclasses 21 | import re 22 | import urllib.parse 23 | from platform import python_version 24 | from typing import Optional, Tuple, TypeVar, Union 25 | from urllib.parse import quote as _quote 26 | 27 | from urllib3.exceptions import LocationParseError 28 | from urllib3.util import parse_url 29 | 30 | from ._models import DEFAULT, DefaultType, NodeConfig 31 | from ._utils import fixup_module_metadata 32 | from ._version import __version__ 33 | 34 | __all__ = [ 35 | "CloudId", 36 | "DEFAULT", 37 | "DefaultType", 38 | "basic_auth_to_header", 39 | "client_meta_version", 40 | "create_user_agent", 41 | "dataclasses", 42 | "parse_cloud_id", 43 | "percent_encode", 44 | "resolve_default", 45 | "to_bytes", 46 | "to_str", 47 | "url_to_node_config", 48 | ] 49 | 50 | T = TypeVar("T") 51 | 52 | 53 | def resolve_default(val: Union[DefaultType, T], default: T) -> T: 54 | """Resolves a value that could be the ``DEFAULT`` sentinel 55 | into either the given value or the default value. 56 | """ 57 | return val if val is not DEFAULT else default 58 | 59 | 60 | def create_user_agent(name: str, version: str) -> str: 61 | """Creates the 'User-Agent' header given the library name and version""" 62 | return ( 63 | f"{name}/{version} (Python/{python_version()}; elastic-transport/{__version__})" 64 | ) 65 | 66 | 67 | def client_meta_version(version: str) -> str: 68 | """Converts a Python version into a version string 69 | compatible with the ``X-Elastic-Client-Meta`` HTTP header. 70 | """ 71 | match = re.match(r"^([0-9][0-9.]*[0-9]|[0-9])(.*)$", version) 72 | if match is None: 73 | raise ValueError( 74 | "Version {version!r} not formatted like a Python version string" 75 | ) 76 | version, version_suffix = match.groups() 77 | 78 | # Don't treat post-releases as pre-releases. 79 | if re.search(r"^\.post[0-9]*$", version_suffix): 80 | return version 81 | if version_suffix: 82 | version += "p" 83 | return version 84 | 85 | 86 | @dataclasses.dataclass(frozen=True, repr=True) 87 | class CloudId: 88 | #: Name of the cluster in Elastic Cloud 89 | cluster_name: str 90 | #: Host and port of the Elasticsearch instance 91 | es_address: Optional[Tuple[str, int]] 92 | #: Host and port of the Kibana instance 93 | kibana_address: Optional[Tuple[str, int]] 94 | 95 | 96 | def parse_cloud_id(cloud_id: str) -> CloudId: 97 | """Parses an Elastic Cloud ID into its components""" 98 | try: 99 | cloud_id = to_str(cloud_id) 100 | cluster_name, _, cloud_id = cloud_id.partition(":") 101 | parts = to_str(binascii.a2b_base64(to_bytes(cloud_id, "ascii")), "ascii").split( 102 | "$" 103 | ) 104 | parent_dn = parts[0] 105 | if not parent_dn: 106 | raise ValueError() # Caught and re-raised properly below 107 | 108 | es_uuid: Optional[str] 109 | kibana_uuid: Optional[str] 110 | try: 111 | es_uuid = parts[1] 112 | except IndexError: 113 | es_uuid = None 114 | try: 115 | kibana_uuid = parts[2] or None 116 | except IndexError: 117 | kibana_uuid = None 118 | 119 | if ":" in parent_dn: 120 | parent_dn, _, parent_port = parent_dn.rpartition(":") 121 | port = int(parent_port) 122 | else: 123 | port = 443 124 | except (ValueError, IndexError, UnicodeError): 125 | raise ValueError("Cloud ID is not properly formatted") from None 126 | 127 | es_host = f"{es_uuid}.{parent_dn}" if es_uuid else None 128 | kibana_host = f"{kibana_uuid}.{parent_dn}" if kibana_uuid else None 129 | 130 | return CloudId( 131 | cluster_name=cluster_name, 132 | es_address=(es_host, port) if es_host else None, 133 | kibana_address=(kibana_host, port) if kibana_host else None, 134 | ) 135 | 136 | 137 | def to_str( 138 | value: Union[str, bytes], encoding: str = "utf-8", errors: str = "strict" 139 | ) -> str: 140 | if isinstance(value, bytes): 141 | return value.decode(encoding, errors) 142 | return value 143 | 144 | 145 | def to_bytes( 146 | value: Union[str, bytes], encoding: str = "utf-8", errors: str = "strict" 147 | ) -> bytes: 148 | if isinstance(value, str): 149 | return value.encode(encoding, errors) 150 | return value 151 | 152 | 153 | # Python 3.7 added '~' to the safe list for urllib.parse.quote() 154 | _QUOTE_ALWAYS_SAFE = frozenset( 155 | "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789_.-~" 156 | ) 157 | 158 | 159 | def percent_encode( 160 | string: Union[bytes, str], 161 | safe: str = "/", 162 | encoding: Optional[str] = None, 163 | errors: Optional[str] = None, 164 | ) -> str: 165 | """Percent-encodes a string so it can be used in an HTTP request target""" 166 | # Redefines 'urllib.parse.quote()' to always have the '~' character 167 | # within the 'ALWAYS_SAFE' list. The character was added in Python 3.7 168 | safe = "".join(_QUOTE_ALWAYS_SAFE.union(set(safe))) 169 | return _quote(string, safe, encoding=encoding, errors=errors) # type: ignore[arg-type] 170 | 171 | 172 | def basic_auth_to_header(basic_auth: Tuple[str, str]) -> str: 173 | """Converts a 2-tuple into a 'Basic' HTTP Authorization header""" 174 | if ( 175 | not isinstance(basic_auth, tuple) 176 | or len(basic_auth) != 2 177 | or any(not isinstance(item, (str, bytes)) for item in basic_auth) 178 | ): 179 | raise ValueError( 180 | "'basic_auth' must be a 2-tuple of str/bytes (username, password)" 181 | ) 182 | return ( 183 | f"Basic {base64.b64encode(b':'.join(to_bytes(x) for x in basic_auth)).decode()}" 184 | ) 185 | 186 | 187 | def url_to_node_config( 188 | url: str, use_default_ports_for_scheme: bool = False 189 | ) -> NodeConfig: 190 | """Constructs a :class:`elastic_transport.NodeConfig` instance from a URL. 191 | If a username/password are specified in the URL they are converted to an 192 | 'Authorization' header. Always fills in a default port for HTTPS. 193 | 194 | :param url: URL to transform into a NodeConfig. 195 | :param use_default_ports_for_scheme: If 'True' will resolve default ports for HTTP. 196 | """ 197 | try: 198 | parsed_url = parse_url(url) 199 | except LocationParseError: 200 | raise ValueError(f"Could not parse URL {url!r}") from None 201 | 202 | parsed_port: Optional[int] = parsed_url.port 203 | if parsed_url.port is None and parsed_url.scheme is not None: 204 | # Always fill in a default port for HTTPS 205 | if parsed_url.scheme == "https": 206 | parsed_port = 443 207 | # Only fill HTTP default port when asked to explicitly 208 | elif parsed_url.scheme == "http" and use_default_ports_for_scheme: 209 | parsed_port = 80 210 | 211 | if any( 212 | component in (None, "") 213 | for component in (parsed_url.scheme, parsed_url.host, parsed_port) 214 | ): 215 | raise ValueError( 216 | "URL must include a 'scheme', 'host', and 'port' component (ie 'https://localhost:9200')" 217 | ) 218 | assert parsed_url.scheme is not None 219 | assert parsed_url.host is not None 220 | assert parsed_port is not None 221 | 222 | headers = {} 223 | if parsed_url.auth: 224 | # `urllib3.util.url_parse` ensures `parsed_url` is correctly 225 | # percent-encoded but does not percent-decode userinfo, so we have to 226 | # do it ourselves to build the basic auth header correctly. 227 | encoded_username, _, encoded_password = parsed_url.auth.partition(":") 228 | username = urllib.parse.unquote(encoded_username) 229 | password = urllib.parse.unquote(encoded_password) 230 | 231 | headers["authorization"] = basic_auth_to_header((username, password)) 232 | 233 | host = parsed_url.host.strip("[]") 234 | if not parsed_url.path or parsed_url.path == "/": 235 | path_prefix = "" 236 | else: 237 | path_prefix = parsed_url.path 238 | 239 | return NodeConfig( 240 | scheme=parsed_url.scheme, 241 | host=host, 242 | port=parsed_port, 243 | path_prefix=path_prefix, 244 | headers=headers, 245 | ) 246 | 247 | 248 | fixup_module_metadata(__name__, globals()) 249 | del fixup_module_metadata 250 | -------------------------------------------------------------------------------- /elastic_transport/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/elastic/elastic-transport-python/f42e1ab0aabec35fc069690301e12345565dc74b/elastic_transport/py.typed -------------------------------------------------------------------------------- /noxfile.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import nox 19 | 20 | SOURCE_FILES = ( 21 | "noxfile.py", 22 | "setup.py", 23 | "elastic_transport/", 24 | "utils/", 25 | "tests/", 26 | "docs/sphinx/", 27 | ) 28 | 29 | 30 | @nox.session() 31 | def format(session): 32 | session.install("black~=24.0", "isort", "pyupgrade") 33 | session.run("black", "--target-version=py37", *SOURCE_FILES) 34 | session.run("isort", *SOURCE_FILES) 35 | session.run("python", "utils/license-headers.py", "fix", *SOURCE_FILES) 36 | 37 | lint(session) 38 | 39 | 40 | @nox.session 41 | def lint(session): 42 | session.install( 43 | "flake8", 44 | "black~=24.0", 45 | "isort", 46 | "mypy==1.7.1", 47 | "types-requests", 48 | "types-certifi", 49 | ) 50 | # https://github.com/python/typeshed/issues/10786 51 | session.run( 52 | "python", "-m", "pip", "uninstall", "--yes", "types-urllib3", silent=True 53 | ) 54 | session.install(".[develop]") 55 | session.run("black", "--check", "--target-version=py37", *SOURCE_FILES) 56 | session.run("isort", "--check", *SOURCE_FILES) 57 | session.run("flake8", "--ignore=E501,W503,E203,E704", *SOURCE_FILES) 58 | session.run("python", "utils/license-headers.py", "check", *SOURCE_FILES) 59 | session.run("mypy", "--strict", "--show-error-codes", "elastic_transport/") 60 | 61 | 62 | @nox.session(python=["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]) 63 | def test(session): 64 | session.install(".[develop]") 65 | session.run( 66 | "pytest", 67 | "--cov=elastic_transport", 68 | *(session.posargs or ("tests/",)), 69 | env={"PYTHONWARNINGS": "always::DeprecationWarning"}, 70 | ) 71 | session.run("coverage", "report", "-m") 72 | 73 | 74 | @nox.session(name="test-min-deps", python="3.8") 75 | def test_min_deps(session): 76 | session.install("-r", "requirements-min.txt", ".[develop]", silent=False) 77 | session.run( 78 | "pytest", 79 | "--cov=elastic_transport", 80 | *(session.posargs or ("tests/",)), 81 | env={"PYTHONWARNINGS": "always::DeprecationWarning"}, 82 | ) 83 | session.run("coverage", "report", "-m") 84 | 85 | 86 | @nox.session(python="3") 87 | def docs(session): 88 | session.install(".[develop]") 89 | 90 | session.chdir("docs/sphinx") 91 | session.run( 92 | "sphinx-build", 93 | "-T", 94 | "-E", 95 | "-b", 96 | "html", 97 | "-d", 98 | "_build/doctrees", 99 | "-D", 100 | "language=en", 101 | ".", 102 | "_build/html", 103 | ) 104 | -------------------------------------------------------------------------------- /requirements-min.txt: -------------------------------------------------------------------------------- 1 | requests==2.26.0 2 | urllib3==1.26.2 3 | aiohttp==3.8.0 4 | httpx==0.27.0 5 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [isort] 2 | profile = black 3 | 4 | [tool:pytest] 5 | addopts = -vvv --cov-report=term-missing --cov=elastic_transport 6 | asyncio_default_fixture_loop_scope = "function" 7 | 8 | [coverage:report] 9 | omit = 10 | elastic_transport/_compat.py 11 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import os 19 | import re 20 | 21 | from setuptools import find_packages, setup 22 | 23 | base_dir = os.path.dirname(os.path.abspath(__file__)) 24 | with open(os.path.join(base_dir, "elastic_transport/_version.py")) as f: 25 | version = re.search(r"__version__\s+=\s+\"([^\"]+)\"", f.read()).group(1) 26 | 27 | with open(os.path.join(base_dir, "README.md")) as f: 28 | long_description = f.read() 29 | 30 | packages = [ 31 | package 32 | for package in find_packages(exclude=["tests"]) 33 | if package.startswith("elastic_transport") 34 | ] 35 | 36 | setup( 37 | name="elastic-transport", 38 | description="Transport classes and utilities shared among Python Elastic client libraries", 39 | long_description=long_description, 40 | long_description_content_type="text/markdown", 41 | version=version, 42 | author="Elastic Client Library Maintainers", 43 | author_email="client-libs@elastic.co", 44 | url="https://github.com/elastic/elastic-transport-python", 45 | project_urls={ 46 | "Source Code": "https://github.com/elastic/elastic-transport-python", 47 | "Issue Tracker": "https://github.com/elastic/elastic-transport-python/issues", 48 | "Documentation": "https://elastic-transport-python.readthedocs.io", 49 | }, 50 | package_data={"elastic_transport": ["py.typed"]}, 51 | packages=packages, 52 | install_requires=[ 53 | "urllib3>=1.26.2, <3", 54 | "certifi", 55 | ], 56 | python_requires=">=3.8", 57 | extras_require={ 58 | "develop": [ 59 | "pytest", 60 | "pytest-cov", 61 | "pytest-mock", 62 | "pytest-asyncio", 63 | "pytest-httpserver", 64 | "trustme", 65 | "requests", 66 | "aiohttp", 67 | "httpx", 68 | "respx", 69 | "opentelemetry-api", 70 | "opentelemetry-sdk", 71 | "orjson", 72 | # Override Read the Docs default (sphinx<2) 73 | "sphinx>2", 74 | "furo", 75 | "sphinx-autodoc-typehints", 76 | ], 77 | }, 78 | classifiers=[ 79 | "Development Status :: 5 - Production/Stable", 80 | "License :: OSI Approved :: Apache Software License", 81 | "Intended Audience :: Developers", 82 | "Operating System :: OS Independent", 83 | "Programming Language :: Python", 84 | "Programming Language :: Python :: 3", 85 | "Programming Language :: Python :: 3.8", 86 | "Programming Language :: Python :: 3.9", 87 | "Programming Language :: Python :: 3.10", 88 | "Programming Language :: Python :: 3.11", 89 | "Programming Language :: Python :: 3.12", 90 | "Programming Language :: Python :: 3.13", 91 | "Programming Language :: Python :: Implementation :: CPython", 92 | "Programming Language :: Python :: Implementation :: PyPy", 93 | ], 94 | ) 95 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | -------------------------------------------------------------------------------- /tests/async_/__init__.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | -------------------------------------------------------------------------------- /tests/async_/test_httpbin.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import dataclasses 19 | import json 20 | 21 | import pytest 22 | 23 | from elastic_transport import AiohttpHttpNode, AsyncTransport 24 | from elastic_transport._node._base import DEFAULT_USER_AGENT 25 | 26 | from ..test_httpbin import parse_httpbin 27 | 28 | 29 | @pytest.mark.asyncio 30 | async def test_simple_request(httpbin_node_config): 31 | t = AsyncTransport([httpbin_node_config]) 32 | 33 | resp, data = await t.perform_request( 34 | "GET", 35 | "/anything?key[]=1&key[]=2&q1&q2=", 36 | headers={"Custom": "headeR", "content-type": "application/json"}, 37 | body={"JSON": "body"}, 38 | ) 39 | assert resp.status == 200 40 | assert data["method"] == "GET" 41 | assert data["url"] == "https://httpbin.org/anything?key[]=1&key[]=2&q1&q2=" 42 | 43 | # httpbin makes no-value query params into '' 44 | assert data["args"] == { 45 | "key[]": ["1", "2"], 46 | "q1": "", 47 | "q2": "", 48 | } 49 | assert data["data"] == '{"JSON":"body"}' 50 | assert data["json"] == {"JSON": "body"} 51 | 52 | request_headers = { 53 | "Content-Type": "application/json", 54 | "Content-Length": "15", 55 | "Custom": "headeR", 56 | "Host": "httpbin.org", 57 | } 58 | assert all(v == data["headers"][k] for k, v in request_headers.items()) 59 | 60 | 61 | @pytest.mark.asyncio 62 | async def test_node(httpbin_node_config): 63 | def new_node(**kwargs): 64 | return AiohttpHttpNode(dataclasses.replace(httpbin_node_config, **kwargs)) 65 | 66 | node = new_node() 67 | resp, data = await node.perform_request("GET", "/anything") 68 | assert resp.status == 200 69 | parsed = parse_httpbin(data) 70 | assert parsed == { 71 | "headers": { 72 | "Host": "httpbin.org", 73 | "User-Agent": DEFAULT_USER_AGENT, 74 | }, 75 | "method": "GET", 76 | "url": "https://httpbin.org/anything", 77 | } 78 | 79 | node = new_node(http_compress=True) 80 | resp, data = await node.perform_request("GET", "/anything") 81 | assert resp.status == 200 82 | parsed = parse_httpbin(data) 83 | assert parsed == { 84 | "headers": { 85 | "Accept-Encoding": "gzip", 86 | "Host": "httpbin.org", 87 | "User-Agent": DEFAULT_USER_AGENT, 88 | }, 89 | "method": "GET", 90 | "url": "https://httpbin.org/anything", 91 | } 92 | 93 | resp, data = await node.perform_request("GET", "/anything", body=b"hello, world!") 94 | assert resp.status == 200 95 | parsed = parse_httpbin(data) 96 | assert parsed == { 97 | "headers": { 98 | "Accept-Encoding": "gzip", 99 | "Content-Encoding": "gzip", 100 | "Content-Type": "application/octet-stream", 101 | "Content-Length": "33", 102 | "Host": "httpbin.org", 103 | "User-Agent": DEFAULT_USER_AGENT, 104 | }, 105 | "method": "GET", 106 | "url": "https://httpbin.org/anything", 107 | } 108 | 109 | resp, data = await node.perform_request( 110 | "POST", 111 | "/anything", 112 | body=json.dumps({"key": "value"}).encode("utf-8"), 113 | headers={"content-type": "application/json"}, 114 | ) 115 | assert resp.status == 200 116 | parsed = parse_httpbin(data) 117 | assert parsed == { 118 | "headers": { 119 | "Accept-Encoding": "gzip", 120 | "Content-Encoding": "gzip", 121 | "Content-Length": "36", 122 | "Content-Type": "application/json", 123 | "Host": "httpbin.org", 124 | "User-Agent": DEFAULT_USER_AGENT, 125 | }, 126 | "method": "POST", 127 | "url": "https://httpbin.org/anything", 128 | } 129 | -------------------------------------------------------------------------------- /tests/async_/test_httpserver.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import warnings 19 | 20 | import pytest 21 | 22 | from elastic_transport import AsyncTransport 23 | 24 | 25 | @pytest.mark.asyncio 26 | async def test_simple_request(https_server_ip_node_config): 27 | with warnings.catch_warnings(): 28 | warnings.simplefilter("error") 29 | 30 | t = AsyncTransport([https_server_ip_node_config]) 31 | 32 | resp, data = await t.perform_request("GET", "/foobar") 33 | assert resp.status == 200 34 | assert data == {"foo": "bar"} 35 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import hashlib 19 | import logging 20 | import socket 21 | import ssl 22 | 23 | import pytest 24 | import trustme 25 | from pytest_httpserver import HTTPServer 26 | 27 | from elastic_transport import ApiResponseMeta, BaseNode, HttpHeaders, NodeConfig 28 | from elastic_transport._node import NodeApiResponse 29 | 30 | 31 | class DummyNode(BaseNode): 32 | def __init__(self, config: NodeConfig): 33 | super().__init__(config) 34 | self.exception = config._extras.pop("exception", None) 35 | self.status = config._extras.pop("status", 200) 36 | self.body = config._extras.pop("body", b"{}") 37 | self.calls = [] 38 | self._headers = config._extras.pop("headers", {}) 39 | 40 | def perform_request(self, *args, **kwargs): 41 | self.calls.append((args, kwargs)) 42 | if self.exception: 43 | raise self.exception 44 | meta = ApiResponseMeta( 45 | node=self.config, 46 | duration=0.0, 47 | http_version="1.1", 48 | status=self.status, 49 | headers=HttpHeaders(self._headers), 50 | ) 51 | return NodeApiResponse(meta, self.body) 52 | 53 | 54 | class AsyncDummyNode(DummyNode): 55 | async def perform_request(self, *args, **kwargs): 56 | self.calls.append((args, kwargs)) 57 | if self.exception: 58 | raise self.exception 59 | meta = ApiResponseMeta( 60 | node=self.config, 61 | duration=0.0, 62 | http_version="1.1", 63 | status=self.status, 64 | headers=HttpHeaders(self._headers), 65 | ) 66 | return NodeApiResponse(meta, self.body) 67 | 68 | 69 | @pytest.fixture(scope="session", params=[True, False]) 70 | def httpbin_cert_fingerprint(request) -> str: 71 | """Gets the SHA256 fingerprint of the certificate for 'httpbin.org'""" 72 | sock = socket.create_connection(("httpbin.org", 443)) 73 | ctx = ssl.create_default_context() 74 | ctx.check_hostname = False 75 | ctx.verify_mode = ssl.CERT_NONE 76 | sock = ctx.wrap_socket(sock) 77 | digest = hashlib.sha256(sock.getpeercert(binary_form=True)).hexdigest() 78 | assert len(digest) == 64 79 | sock.close() 80 | if request.param: 81 | return digest 82 | else: 83 | return ":".join([digest[i : i + 2] for i in range(0, len(digest), 2)]) 84 | 85 | 86 | @pytest.fixture(scope="session") 87 | def httpbin_node_config() -> NodeConfig: 88 | try: 89 | sock = socket.create_connection(("httpbin.org", 443)) 90 | except Exception as e: 91 | pytest.skip(f"Couldn't connect to httpbin.org, internet not connected? {e}") 92 | sock.close() 93 | return NodeConfig( 94 | "https", "httpbin.org", 443, verify_certs=False, ssl_show_warn=False 95 | ) 96 | 97 | 98 | @pytest.fixture(scope="function", autouse=True) 99 | def elastic_transport_logging(): 100 | for name in ("node", "node_pool", "transport"): 101 | logger = logging.getLogger(f"elastic_transport.{name}") 102 | for handler in logger.handlers[:]: 103 | logger.removeHandler(handler) 104 | 105 | 106 | @pytest.fixture(scope="session") 107 | def https_server_ip_node_config(tmp_path_factory: pytest.TempPathFactory) -> NodeConfig: 108 | ca = trustme.CA() 109 | tmpdir = tmp_path_factory.mktemp("certs") 110 | ca_cert_path = str(tmpdir / "ca.pem") 111 | ca.cert_pem.write_to_path(ca_cert_path) 112 | 113 | localhost_cert = ca.issue_cert("127.0.0.1") 114 | context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) 115 | 116 | crt = localhost_cert.cert_chain_pems[0] 117 | key = localhost_cert.private_key_pem 118 | with crt.tempfile() as crt_file, key.tempfile() as key_file: 119 | context.load_cert_chain(crt_file, key_file) 120 | 121 | server = HTTPServer(ssl_context=context) 122 | server.expect_request("/foobar").respond_with_json({"foo": "bar"}) 123 | 124 | server.start() 125 | yield NodeConfig("https", "127.0.0.1", server.port, ca_certs=ca_cert_path) 126 | server.clear() 127 | if server.is_running(): 128 | server.stop() 129 | -------------------------------------------------------------------------------- /tests/node/__init__.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | -------------------------------------------------------------------------------- /tests/node/test_base.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import pytest 19 | 20 | from elastic_transport import ( 21 | AiohttpHttpNode, 22 | HttpxAsyncHttpNode, 23 | NodeConfig, 24 | RequestsHttpNode, 25 | Urllib3HttpNode, 26 | ) 27 | from elastic_transport._node._base import ssl_context_from_node_config 28 | 29 | 30 | @pytest.mark.parametrize( 31 | "node_cls", [Urllib3HttpNode, RequestsHttpNode, AiohttpHttpNode, HttpxAsyncHttpNode] 32 | ) 33 | def test_unknown_parameter(node_cls): 34 | with pytest.raises(TypeError): 35 | node_cls(unknown_option=1) 36 | 37 | 38 | @pytest.mark.parametrize( 39 | "host, check_hostname", 40 | [ 41 | ("127.0.0.1", False), 42 | ("::1", False), 43 | ("localhost", True), 44 | ], 45 | ) 46 | def test_ssl_context_from_node_config(host, check_hostname): 47 | node_config = NodeConfig("https", host, 443) 48 | ctx = ssl_context_from_node_config(node_config) 49 | assert ctx.check_hostname == check_hostname 50 | -------------------------------------------------------------------------------- /tests/node/test_http_httpx.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import gzip 19 | import ssl 20 | import warnings 21 | 22 | import pytest 23 | import respx 24 | 25 | from elastic_transport import HttpxAsyncHttpNode, NodeConfig 26 | from elastic_transport._node._base import DEFAULT_USER_AGENT 27 | 28 | 29 | def create_node(node_config: NodeConfig): 30 | return HttpxAsyncHttpNode(node_config) 31 | 32 | 33 | class TestHttpxAsyncNodeCreation: 34 | def test_ssl_context(self): 35 | ssl_context = ssl.create_default_context() 36 | with warnings.catch_warnings(record=True) as w: 37 | node = create_node( 38 | NodeConfig( 39 | scheme="https", 40 | host="localhost", 41 | port=80, 42 | ssl_context=ssl_context, 43 | ) 44 | ) 45 | assert node.client._transport._pool._ssl_context is ssl_context 46 | assert len(w) == 0 47 | 48 | def test_uses_https_if_verify_certs_is_off(self): 49 | with warnings.catch_warnings(record=True) as w: 50 | _ = create_node(NodeConfig("https", "localhost", 443, verify_certs=False)) 51 | assert ( 52 | str(w[0].message) 53 | == "Connecting to 'https://localhost:443' using TLS with verify_certs=False is insecure" 54 | ) 55 | 56 | def test_no_warn_when_uses_https_if_verify_certs_is_off(self): 57 | with warnings.catch_warnings(record=True) as w: 58 | _ = create_node( 59 | NodeConfig( 60 | "https", 61 | "localhost", 62 | 443, 63 | verify_certs=False, 64 | ssl_show_warn=False, 65 | ) 66 | ) 67 | assert 0 == len(w) 68 | 69 | def test_ca_certs_with_verify_ssl_false_raises_error(self): 70 | with pytest.raises(ValueError) as exc: 71 | create_node( 72 | NodeConfig( 73 | "https", 74 | "localhost", 75 | 443, 76 | ca_certs="/ca/certs", 77 | verify_certs=False, 78 | ) 79 | ) 80 | assert ( 81 | str(exc.value) == "You cannot use 'ca_certs' when 'verify_certs=False'" 82 | ) 83 | 84 | 85 | @pytest.mark.asyncio 86 | class TestHttpxAsyncNode: 87 | @respx.mock 88 | async def test_simple_request(self): 89 | node = create_node(NodeConfig(scheme="http", host="localhost", port=80)) 90 | respx.get("http://localhost/index") 91 | await node.perform_request( 92 | "GET", "/index", b"hello world", headers={"key": "value"} 93 | ) 94 | request = respx.calls.last.request 95 | assert request.content == b"hello world" 96 | assert { 97 | "key": "value", 98 | "connection": "keep-alive", 99 | "user-agent": DEFAULT_USER_AGENT, 100 | }.items() <= request.headers.items() 101 | 102 | @respx.mock 103 | async def test_compression(self): 104 | node = create_node( 105 | NodeConfig(scheme="http", host="localhost", port=80, http_compress=True) 106 | ) 107 | respx.get("http://localhost/index") 108 | await node.perform_request("GET", "/index", b"hello world") 109 | request = respx.calls.last.request 110 | assert gzip.decompress(request.content) == b"hello world" 111 | assert {"content-encoding": "gzip"}.items() <= request.headers.items() 112 | 113 | @respx.mock 114 | async def test_default_timeout(self): 115 | node = create_node( 116 | NodeConfig(scheme="http", host="localhost", port=80, request_timeout=10) 117 | ) 118 | respx.get("http://localhost/index") 119 | await node.perform_request("GET", "/index", b"hello world") 120 | request = respx.calls.last.request 121 | assert request.extensions["timeout"]["connect"] == 10 122 | 123 | @respx.mock 124 | async def test_overwritten_timeout(self): 125 | node = create_node( 126 | NodeConfig(scheme="http", host="localhost", port=80, request_timeout=10) 127 | ) 128 | respx.get("http://localhost/index") 129 | await node.perform_request("GET", "/index", b"hello world", request_timeout=15) 130 | request = respx.calls.last.request 131 | assert request.extensions["timeout"]["connect"] == 15 132 | 133 | @respx.mock 134 | async def test_merge_headers(self): 135 | node = create_node( 136 | NodeConfig("http", "localhost", 80, headers={"h1": "v1", "h2": "v2"}) 137 | ) 138 | respx.get("http://localhost/index") 139 | await node.perform_request( 140 | "GET", "/index", b"hello world", headers={"h2": "v2p", "h3": "v3"} 141 | ) 142 | request = respx.calls.last.request 143 | assert request.headers["h1"] == "v1" 144 | assert request.headers["h2"] == "v2p" 145 | assert request.headers["h3"] == "v3" 146 | 147 | 148 | def test_ssl_assert_fingerprint(httpbin_cert_fingerprint): 149 | with pytest.raises(ValueError, match="httpx does not support certificate pinning"): 150 | HttpxAsyncHttpNode( 151 | NodeConfig( 152 | scheme="https", 153 | host="httpbin.org", 154 | port=443, 155 | ssl_assert_fingerprint=httpbin_cert_fingerprint, 156 | ) 157 | ) 158 | -------------------------------------------------------------------------------- /tests/node/test_http_requests.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import gzip 19 | import ssl 20 | import warnings 21 | from unittest.mock import Mock, patch 22 | 23 | import pytest 24 | import requests 25 | from requests.auth import HTTPBasicAuth 26 | 27 | from elastic_transport import NodeConfig, RequestsHttpNode 28 | from elastic_transport._node._base import DEFAULT_USER_AGENT 29 | 30 | 31 | class TestRequestsHttpNode: 32 | def _get_mock_node(self, node_config, response_body=b"{}"): 33 | node = RequestsHttpNode(node_config) 34 | 35 | def _dummy_send(*args, **kwargs): 36 | dummy_response = Mock() 37 | dummy_response.headers = {} 38 | dummy_response.status_code = 200 39 | dummy_response.content = response_body 40 | dummy_response.request = args[0] 41 | dummy_response.cookies = {} 42 | _dummy_send.call_args = (args, kwargs) 43 | return dummy_response 44 | 45 | node.session.send = _dummy_send 46 | return node 47 | 48 | def _get_request(self, node, *args, **kwargs) -> requests.PreparedRequest: 49 | resp, data = node.perform_request(*args, **kwargs) 50 | status = resp.status 51 | assert 200 == status 52 | assert b"{}" == data 53 | 54 | timeout = kwargs.pop("request_timeout", node.config.request_timeout) 55 | args, kwargs = node.session.send.call_args 56 | assert timeout == kwargs["timeout"] 57 | assert 1 == len(args) 58 | return args[0] 59 | 60 | def test_close_session(self): 61 | node = RequestsHttpNode(NodeConfig("http", "localhost", 80)) 62 | with patch.object(node.session, "close") as pool_close: 63 | node.close() 64 | pool_close.assert_called_with() 65 | 66 | def test_ssl_context(self): 67 | ctx = ssl.create_default_context() 68 | node = RequestsHttpNode(NodeConfig("https", "localhost", 80, ssl_context=ctx)) 69 | adapter = node.session.get_adapter("https://localhost:80") 70 | assert adapter.poolmanager.connection_pool_kw["ssl_context"] is ctx 71 | 72 | def test_merge_headers(self): 73 | node = self._get_mock_node( 74 | NodeConfig("http", "localhost", 80, headers={"h1": "v1", "h2": "v2"}) 75 | ) 76 | req = self._get_request(node, "GET", "/", headers={"h2": "v2p", "h3": "v3"}) 77 | assert req.headers["h1"] == "v1" 78 | assert req.headers["h2"] == "v2p" 79 | assert req.headers["h3"] == "v3" 80 | 81 | def test_default_headers(self): 82 | node = self._get_mock_node(NodeConfig("http", "localhost", 80)) 83 | req = self._get_request(node, "GET", "/") 84 | assert req.headers == { 85 | "connection": "keep-alive", 86 | "user-agent": DEFAULT_USER_AGENT, 87 | } 88 | 89 | def test_no_http_compression(self): 90 | node = self._get_mock_node( 91 | NodeConfig("http", "localhost", 80, http_compress=False) 92 | ) 93 | assert not node.config.http_compress 94 | assert "accept-encoding" not in node.headers 95 | 96 | node.perform_request("GET", "/") 97 | (req,), _ = node.session.send.call_args 98 | 99 | assert req.body is None 100 | assert "accept-encoding" not in req.headers 101 | assert "content-encoding" not in req.headers 102 | 103 | node.perform_request("GET", "/", body=b"hello, world!") 104 | (req,), _ = node.session.send.call_args 105 | 106 | assert req.body == b"hello, world!" 107 | assert "accept-encoding" not in req.headers 108 | assert "content-encoding" not in req.headers 109 | 110 | @pytest.mark.parametrize("empty_body", [None, b""]) 111 | def test_http_compression(self, empty_body): 112 | node = self._get_mock_node( 113 | NodeConfig("http", "localhost", 80, http_compress=True) 114 | ) 115 | assert node.config.http_compress is True 116 | assert node.headers["accept-encoding"] == "gzip" 117 | 118 | # 'content-encoding' shouldn't be set at a connection level. 119 | # Should be applied only if the request is sent with a body. 120 | assert "content-encoding" not in node.headers 121 | 122 | node.perform_request("GET", "/", body=b"{}") 123 | (req,), _ = node.session.send.call_args 124 | 125 | assert gzip.decompress(req.body) == b"{}" 126 | assert req.headers["accept-encoding"] == "gzip" 127 | assert req.headers["content-encoding"] == "gzip" 128 | 129 | node.perform_request("GET", "/", body=empty_body) 130 | (req,), _ = node.session.send.call_args 131 | 132 | assert req.body is None 133 | assert req.headers["accept-encoding"] == "gzip" 134 | print(req.headers) 135 | assert "content-encoding" not in req.headers 136 | 137 | @pytest.mark.parametrize("request_timeout", [None, 15]) 138 | def test_timeout_override_default(self, request_timeout): 139 | node = self._get_mock_node( 140 | NodeConfig("http", "localhost", 80, request_timeout=request_timeout) 141 | ) 142 | assert node.config.request_timeout == request_timeout 143 | 144 | node.perform_request("GET", "/") 145 | _, kwargs = node.session.send.call_args 146 | 147 | assert kwargs["timeout"] == request_timeout 148 | 149 | node.perform_request("GET", "/", request_timeout=5) 150 | _, kwargs = node.session.send.call_args 151 | 152 | assert kwargs["timeout"] == 5 153 | 154 | node.perform_request("GET", "/", request_timeout=None) 155 | _, kwargs = node.session.send.call_args 156 | 157 | assert kwargs["timeout"] is None 158 | 159 | def test_uses_https_if_verify_certs_is_off(self): 160 | with warnings.catch_warnings(record=True) as w: 161 | RequestsHttpNode(NodeConfig("https", "localhost", 443, verify_certs=False)) 162 | 163 | assert 1 == len(w) 164 | assert ( 165 | "Connecting to 'https://localhost:443' using TLS with verify_certs=False is insecure" 166 | == str(w[0].message) 167 | ) 168 | 169 | def test_no_warn_when_uses_https_if_verify_certs_is_off(self): 170 | with warnings.catch_warnings(record=True) as w: 171 | RequestsHttpNode( 172 | NodeConfig( 173 | "https", "localhost", 443, verify_certs=False, ssl_show_warn=False 174 | ) 175 | ) 176 | assert 0 == len(w) 177 | 178 | def test_no_warning_when_using_ssl_context(self): 179 | ctx = ssl.create_default_context() 180 | with warnings.catch_warnings(record=True) as w: 181 | RequestsHttpNode(NodeConfig("https", "localhost", 443, ssl_context=ctx)) 182 | assert 0 == len(w) 183 | 184 | def test_ca_certs_with_verify_ssl_false_raises_error(self): 185 | with pytest.raises(ValueError) as e: 186 | RequestsHttpNode( 187 | NodeConfig( 188 | "https", "localhost", 443, ca_certs="/ca/certs", verify_certs=False 189 | ) 190 | ) 191 | assert str(e.value) == "You cannot use 'ca_certs' when 'verify_certs=False'" 192 | 193 | def test_client_cert_is_used_as_session_cert(self): 194 | conn = RequestsHttpNode( 195 | NodeConfig("https", "localhost", 443, client_cert="/client/cert") 196 | ) 197 | assert conn.session.cert == "/client/cert" 198 | 199 | conn = RequestsHttpNode( 200 | NodeConfig( 201 | "https", 202 | "localhost", 203 | 443, 204 | client_cert="/client/cert", 205 | client_key="/client/key", 206 | ) 207 | ) 208 | assert conn.session.cert == ("/client/cert", "/client/key") 209 | 210 | def test_ca_certs_is_used_as_session_verify(self): 211 | conn = RequestsHttpNode( 212 | NodeConfig("https", "localhost", 443, ca_certs="/ca/certs") 213 | ) 214 | assert conn.session.verify == "/ca/certs" 215 | 216 | def test_surrogatepass_into_bytes(self): 217 | data = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" 218 | node = self._get_mock_node( 219 | NodeConfig("http", "localhost", 80), response_body=data 220 | ) 221 | _, data = node.perform_request("GET", "/") 222 | assert b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" == data 223 | 224 | @pytest.mark.parametrize("_extras", [None, {}, {"requests.session.auth": None}]) 225 | def test_requests_no_session_auth(self, _extras): 226 | node = self._get_mock_node(NodeConfig("http", "localhost", 80, _extras=_extras)) 227 | assert node.session.auth is None 228 | 229 | def test_requests_custom_auth(self): 230 | auth = HTTPBasicAuth("username", "password") 231 | node = self._get_mock_node( 232 | NodeConfig("http", "localhost", 80, _extras={"requests.session.auth": auth}) 233 | ) 234 | assert node.session.auth is auth 235 | node.perform_request("GET", "/") 236 | (request,), _ = node.session.send.call_args 237 | assert request.headers["authorization"] == "Basic dXNlcm5hbWU6cGFzc3dvcmQ=" 238 | -------------------------------------------------------------------------------- /tests/node/test_http_urllib3.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import gzip 19 | import re 20 | import ssl 21 | import warnings 22 | from unittest.mock import Mock, patch 23 | 24 | import pytest 25 | import urllib3 26 | from urllib3.response import HTTPHeaderDict 27 | 28 | from elastic_transport import NodeConfig, TransportError, Urllib3HttpNode 29 | from elastic_transport._node._base import DEFAULT_USER_AGENT 30 | 31 | 32 | class TestUrllib3HttpNode: 33 | def _get_mock_node(self, node_config, response_body=b"{}"): 34 | node = Urllib3HttpNode(node_config) 35 | 36 | def _dummy_urlopen(*args, **kwargs): 37 | dummy_response = Mock() 38 | dummy_response.headers = HTTPHeaderDict({}) 39 | dummy_response.status = 200 40 | dummy_response.data = response_body 41 | _dummy_urlopen.call_args = (args, kwargs) 42 | return dummy_response 43 | 44 | node.pool.urlopen = _dummy_urlopen 45 | return node 46 | 47 | def test_close_pool(self): 48 | node = Urllib3HttpNode(NodeConfig("http", "localhost", 80)) 49 | with patch.object(node.pool, "close") as pool_close: 50 | node.close() 51 | pool_close.assert_called_with() 52 | 53 | def test_ssl_context(self): 54 | ctx = ssl.create_default_context() 55 | node = Urllib3HttpNode(NodeConfig("https", "localhost", 80, ssl_context=ctx)) 56 | assert len(node.pool.conn_kw.keys()) == 1 57 | assert isinstance(node.pool.conn_kw["ssl_context"], ssl.SSLContext) 58 | assert node.scheme == "https" 59 | 60 | def test_no_http_compression(self): 61 | node = self._get_mock_node( 62 | NodeConfig("http", "localhost", 80, http_compress=False) 63 | ) 64 | assert not node.config.http_compress 65 | assert "accept-encoding" not in node.headers 66 | 67 | node.perform_request("GET", "/") 68 | (_, _), kwargs = node.pool.urlopen.call_args 69 | 70 | assert kwargs["body"] is None 71 | assert "accept-encoding" not in kwargs["headers"] 72 | assert "content-encoding" not in kwargs["headers"] 73 | 74 | node.perform_request("GET", "/", body=b"hello, world!") 75 | (_, _), kwargs = node.pool.urlopen.call_args 76 | 77 | assert kwargs["body"] == b"hello, world!" 78 | assert "accept-encoding" not in kwargs["headers"] 79 | assert "content-encoding" not in kwargs["headers"] 80 | 81 | @pytest.mark.parametrize( 82 | ["request_target", "expected_target"], 83 | [ 84 | ("/_search", "/prefix/_search"), 85 | ("/?key=val", "/prefix/?key=val"), 86 | ("/_search?key=val/", "/prefix/_search?key=val/"), 87 | ], 88 | ) 89 | def test_path_prefix_applied_to_target(self, request_target, expected_target): 90 | node = self._get_mock_node( 91 | NodeConfig("http", "localhost", 80, path_prefix="/prefix") 92 | ) 93 | 94 | node.perform_request("GET", request_target) 95 | (_, target), _ = node.pool.urlopen.call_args 96 | 97 | assert target == expected_target 98 | 99 | @pytest.mark.parametrize("empty_body", [None, b""]) 100 | def test_http_compression(self, empty_body): 101 | node = self._get_mock_node( 102 | NodeConfig("http", "localhost", 80, http_compress=True) 103 | ) 104 | assert node.config.http_compress is True 105 | assert node.headers["accept-encoding"] == "gzip" 106 | 107 | # 'content-encoding' shouldn't be set at a connection level. 108 | # Should be applied only if the request is sent with a body. 109 | assert "content-encoding" not in node.headers 110 | 111 | node.perform_request("GET", "/", body=b"{}") 112 | 113 | (_, _), kwargs = node.pool.urlopen.call_args 114 | 115 | body = kwargs["body"] 116 | assert gzip.decompress(body) == b"{}" 117 | assert kwargs["headers"]["accept-encoding"] == "gzip" 118 | assert kwargs["headers"]["content-encoding"] == "gzip" 119 | 120 | node.perform_request("GET", "/", body=empty_body) 121 | 122 | (_, _), kwargs = node.pool.urlopen.call_args 123 | 124 | assert kwargs["body"] is None 125 | assert kwargs["headers"]["accept-encoding"] == "gzip" 126 | assert "content-encoding" not in kwargs["headers"] 127 | 128 | def test_default_headers(self): 129 | node = self._get_mock_node(NodeConfig("http", "localhost", 80)) 130 | node.perform_request("GET", "/") 131 | (_, _), kwargs = node.pool.urlopen.call_args 132 | assert kwargs["headers"] == { 133 | "connection": "keep-alive", 134 | "user-agent": DEFAULT_USER_AGENT, 135 | } 136 | 137 | @pytest.mark.parametrize("request_timeout", [None, 15]) 138 | def test_timeout_override_default(self, request_timeout): 139 | node = Urllib3HttpNode( 140 | NodeConfig("http", "localhost", 80, request_timeout=request_timeout) 141 | ) 142 | assert node.config.request_timeout == request_timeout 143 | assert node.pool.timeout.total == request_timeout 144 | 145 | with patch.object(node.pool, "urlopen") as pool_urlopen: 146 | resp = Mock() 147 | resp.status = 200 148 | resp.headers = {} 149 | pool_urlopen.return_value = resp 150 | 151 | node.perform_request("GET", "/", request_timeout=request_timeout) 152 | _, kwargs = pool_urlopen.call_args 153 | 154 | assert kwargs["timeout"] == request_timeout 155 | 156 | def test_uses_https_if_verify_certs_is_off(self): 157 | with warnings.catch_warnings(record=True) as w: 158 | con = Urllib3HttpNode( 159 | NodeConfig("https", "localhost", 443, verify_certs=False) 160 | ) 161 | assert 1 == len(w) 162 | assert ( 163 | "Connecting to 'https://localhost:443' using TLS with verify_certs=False is insecure" 164 | == str(w[0].message) 165 | ) 166 | 167 | assert isinstance(con.pool, urllib3.HTTPSConnectionPool) 168 | 169 | def test_no_warn_when_uses_https_if_verify_certs_is_off(self): 170 | with warnings.catch_warnings(record=True) as w: 171 | con = Urllib3HttpNode( 172 | NodeConfig( 173 | "https", "localhost", 443, verify_certs=False, ssl_show_warn=False 174 | ) 175 | ) 176 | 177 | assert 0 == len(w) 178 | assert isinstance(con.pool, urllib3.HTTPSConnectionPool) 179 | 180 | def test_no_warning_when_using_ssl_context(self): 181 | ctx = ssl.create_default_context() 182 | with warnings.catch_warnings(record=True) as w: 183 | Urllib3HttpNode(NodeConfig("https", "localhost", 443, ssl_context=ctx)) 184 | assert 0 == len(w) 185 | 186 | def test_surrogatepass_into_bytes(self): 187 | data = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" 188 | con = self._get_mock_node( 189 | NodeConfig("http", "localhost", 80), response_body=data 190 | ) 191 | _, data = con.perform_request("GET", "/") 192 | assert b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" == data 193 | 194 | @pytest.mark.xfail 195 | @patch("elastic_transport._node._base.logger") 196 | def test_uncompressed_body_logged(self, logger): 197 | con = self._get_mock_node(connection_params={"http_compress": True}) 198 | con.perform_request("GET", "/", body=b'{"example": "body"}') 199 | 200 | assert 2 == logger.debug.call_count 201 | req, resp = logger.debug.call_args_list 202 | 203 | assert '> {"example": "body"}' == req[0][0] % req[0][1:] 204 | assert "< {}" == resp[0][0] % resp[0][1:] 205 | 206 | @pytest.mark.xfail 207 | @patch("elastic_transport._node._base.logger") 208 | def test_failed_request_logs(self, logger): 209 | conn = Urllib3HttpNode() 210 | 211 | with patch.object(conn.pool, "urlopen") as pool_urlopen: 212 | resp = Mock() 213 | resp.data = b'{"answer":42}' 214 | resp.status = 500 215 | resp.headers = {} 216 | pool_urlopen.return_value = resp 217 | 218 | with pytest.raises(TransportError) as e: 219 | conn.perform_request( 220 | "GET", 221 | "/?param=42", 222 | b"{}", 223 | ) 224 | 225 | assert repr(e.value) == "InternalServerError({'answer': 42}, status=500)" 226 | 227 | # log url and duration 228 | assert 1 == logger.warning.call_count 229 | assert re.match( 230 | r"^GET http://localhost/\?param=42 \[status:500 request:0.[0-9]{3}s\]", 231 | logger.warning.call_args[0][0] % logger.warning.call_args[0][1:], 232 | ) 233 | assert 2 == logger.debug.call_count 234 | req, resp = logger.debug.call_args_list 235 | assert "> {}" == req[0][0] % req[0][1:] 236 | assert '< {"answer":42}' == resp[0][0] % resp[0][1:] 237 | -------------------------------------------------------------------------------- /tests/node/test_tls_versions.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import functools 19 | import socket 20 | import ssl 21 | 22 | import pytest 23 | 24 | from elastic_transport import ( 25 | AiohttpHttpNode, 26 | HttpxAsyncHttpNode, 27 | NodeConfig, 28 | RequestsHttpNode, 29 | TlsError, 30 | Urllib3HttpNode, 31 | ) 32 | from elastic_transport._compat import await_if_coro 33 | from elastic_transport.client_utils import url_to_node_config 34 | 35 | TLSv1_0_URL = "https://tls-v1-0.badssl.com:1010" 36 | TLSv1_1_URL = "https://tls-v1-1.badssl.com:1011" 37 | TLSv1_2_URL = "https://tls-v1-2.badssl.com:1012" 38 | 39 | node_classes = pytest.mark.parametrize( 40 | "node_class", 41 | [AiohttpHttpNode, Urllib3HttpNode, RequestsHttpNode, HttpxAsyncHttpNode], 42 | ) 43 | 44 | supported_version_params = [ 45 | (TLSv1_0_URL, ssl.PROTOCOL_TLSv1), 46 | (TLSv1_1_URL, ssl.PROTOCOL_TLSv1_1), 47 | (TLSv1_2_URL, ssl.PROTOCOL_TLSv1_2), 48 | (TLSv1_2_URL, None), 49 | ] 50 | unsupported_version_params = [ 51 | (TLSv1_0_URL, None), 52 | (TLSv1_1_URL, None), 53 | (TLSv1_0_URL, ssl.PROTOCOL_TLSv1_1), 54 | (TLSv1_0_URL, ssl.PROTOCOL_TLSv1_2), 55 | (TLSv1_1_URL, ssl.PROTOCOL_TLSv1_2), 56 | ] 57 | 58 | try: 59 | from ssl import TLSVersion 60 | except ImportError: 61 | pass 62 | else: 63 | supported_version_params.extend( 64 | [ 65 | (TLSv1_0_URL, TLSVersion.TLSv1), 66 | (TLSv1_1_URL, TLSVersion.TLSv1_1), 67 | (TLSv1_2_URL, TLSVersion.TLSv1_2), 68 | ] 69 | ) 70 | unsupported_version_params.extend( 71 | [ 72 | (TLSv1_0_URL, TLSVersion.TLSv1_1), 73 | (TLSv1_0_URL, TLSVersion.TLSv1_2), 74 | (TLSv1_1_URL, TLSVersion.TLSv1_2), 75 | (TLSv1_0_URL, TLSVersion.TLSv1_3), 76 | (TLSv1_1_URL, TLSVersion.TLSv1_3), 77 | (TLSv1_2_URL, TLSVersion.TLSv1_3), 78 | ] 79 | ) 80 | 81 | 82 | @functools.lru_cache() 83 | def tlsv1_1_supported() -> bool: 84 | # OpenSSL distributions on Ubuntu/Debian disable TLSv1.1 and before incorrectly. 85 | # So we try to detect that and skip tests when needed. 86 | try: 87 | sock = socket.create_connection(("tls-v1-1.badssl.com", 1011)) 88 | ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1_1) 89 | sock = ctx.wrap_socket(sock, server_hostname="tls-v1-1.badssl.com") 90 | sock.close() 91 | except ssl.SSLError: 92 | return False 93 | return True 94 | 95 | 96 | @node_classes 97 | @pytest.mark.parametrize( 98 | ["url", "ssl_version"], 99 | supported_version_params, 100 | ) 101 | @pytest.mark.asyncio 102 | async def test_supported_tls_versions(node_class, url: str, ssl_version: int): 103 | if url in (TLSv1_0_URL, TLSv1_1_URL) and not tlsv1_1_supported(): 104 | pytest.skip("TLSv1.1 isn't supported by this OpenSSL distribution") 105 | node_config = url_to_node_config(url).replace(ssl_version=ssl_version) 106 | node = node_class(node_config) 107 | 108 | resp, _ = await await_if_coro(node.perform_request("GET", "/")) 109 | assert resp.status == 200 110 | 111 | 112 | @node_classes 113 | @pytest.mark.parametrize( 114 | ["url", "ssl_version"], 115 | unsupported_version_params, 116 | ) 117 | @pytest.mark.asyncio 118 | async def test_unsupported_tls_versions(node_class, url: str, ssl_version: int): 119 | node_config = url_to_node_config(url).replace(ssl_version=ssl_version) 120 | node = node_class(node_config) 121 | 122 | with pytest.raises(TlsError) as e: 123 | await await_if_coro(node.perform_request("GET", "/")) 124 | assert "unsupported protocol" in str(e.value) or "handshake failure" in str(e.value) 125 | 126 | 127 | @node_classes 128 | @pytest.mark.parametrize("ssl_version", [0, "TLSv1", object()]) 129 | def test_ssl_version_value_error(node_class, ssl_version): 130 | with pytest.raises(ValueError) as e: 131 | node_class(NodeConfig("https", "localhost", 9200, ssl_version=ssl_version)) 132 | assert str(e.value) == ( 133 | f"Unsupported value for 'ssl_version': {ssl_version!r}. Must be either " 134 | "'ssl.PROTOCOL_TLSvX' or 'ssl.TLSVersion.TLSvX'" 135 | ) 136 | -------------------------------------------------------------------------------- /tests/node/test_urllib3_chain_certs.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import sys 19 | import warnings 20 | 21 | import pytest 22 | 23 | from elastic_transport import NodeConfig, RequestsHttpNode, TlsError, Urllib3HttpNode 24 | 25 | requires_ssl_assert_fingerprint_in_chain = pytest.mark.skipif( 26 | sys.version_info < (3, 10) or sys.implementation.name != "cpython", 27 | reason="Requires CPython 3.10+", 28 | ) 29 | 30 | 31 | @requires_ssl_assert_fingerprint_in_chain 32 | @pytest.mark.parametrize("node_cls", [Urllib3HttpNode, RequestsHttpNode]) 33 | def test_ssl_assert_fingerprint_invalid_length(node_cls): 34 | with pytest.raises(ValueError) as e: 35 | node_cls( 36 | NodeConfig( 37 | "https", 38 | "httpbin.org", 39 | 443, 40 | ssl_assert_fingerprint="0000", 41 | ) 42 | ) 43 | 44 | assert ( 45 | str(e.value) 46 | == "Fingerprint of invalid length '4', should be one of '32', '40', '64'" 47 | ) 48 | 49 | 50 | @requires_ssl_assert_fingerprint_in_chain 51 | @pytest.mark.parametrize("node_cls", [Urllib3HttpNode, RequestsHttpNode]) 52 | @pytest.mark.parametrize( 53 | "ssl_assert_fingerprint", 54 | [ 55 | "8ecde6884f3d87b1125ba31ac3fcb13d7016de7f57cc904fe1cb97c6ae98196e", 56 | "8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e", 57 | "8ECDE6884F3D87B1125BA31AC3FCB13D7016DE7F57CC904FE1CB97C6AE98196E", 58 | ], 59 | ) 60 | def test_assert_fingerprint_in_cert_chain(node_cls, ssl_assert_fingerprint): 61 | with warnings.catch_warnings(record=True) as w: 62 | node = node_cls( 63 | NodeConfig( 64 | "https", 65 | "httpbin.org", 66 | 443, 67 | ssl_assert_fingerprint=ssl_assert_fingerprint, 68 | ) 69 | ) 70 | meta, _ = node.perform_request("GET", "/") 71 | assert meta.status == 200 72 | 73 | assert w == [] 74 | 75 | 76 | @requires_ssl_assert_fingerprint_in_chain 77 | @pytest.mark.parametrize("node_cls", [Urllib3HttpNode, RequestsHttpNode]) 78 | def test_assert_fingerprint_in_cert_chain_failure(node_cls): 79 | node = node_cls( 80 | NodeConfig( 81 | "https", 82 | "httpbin.org", 83 | 443, 84 | ssl_assert_fingerprint="0" * 64, 85 | ) 86 | ) 87 | 88 | with pytest.raises(TlsError) as e: 89 | node.perform_request("GET", "/") 90 | 91 | err = str(e.value) 92 | assert "Fingerprints did not match." in err 93 | # This is the bad value we "expected" 94 | assert ( 95 | 'Expected "0000000000000000000000000000000000000000000000000000000000000000",' 96 | in err 97 | ) 98 | # This is the root CA for httpbin.org with a leading comma to denote more than one cert was listed. 99 | assert ', "8ecde6884f3d87b1125ba31ac3fcb13d7016de7f57cc904fe1cb97c6ae98196e"' in err 100 | -------------------------------------------------------------------------------- /tests/test_client_utils.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | from platform import python_version 19 | 20 | import pytest 21 | 22 | from elastic_transport import Urllib3HttpNode, __version__ 23 | from elastic_transport.client_utils import ( 24 | basic_auth_to_header, 25 | client_meta_version, 26 | create_user_agent, 27 | parse_cloud_id, 28 | url_to_node_config, 29 | ) 30 | 31 | 32 | def test_create_user_agent(): 33 | assert create_user_agent( 34 | "enterprise-search-python", "7.10.0" 35 | ) == "enterprise-search-python/7.10.0 (Python/{}; elastic-transport/{})".format( 36 | python_version(), 37 | __version__, 38 | ) 39 | 40 | 41 | @pytest.mark.parametrize( 42 | ["version", "meta_version"], 43 | [ 44 | ("7.10.0", "7.10.0"), 45 | ("7.10.0-alpha1", "7.10.0p"), 46 | ("3.9.0b1", "3.9.0p"), 47 | ("3.9.pre1", "3.9p"), 48 | ("3.7.4.post1", "3.7.4"), 49 | ("3.7.4.post", "3.7.4"), 50 | ], 51 | ) 52 | def test_client_meta_version(version, meta_version): 53 | assert client_meta_version(version) == meta_version 54 | 55 | 56 | def test_parse_cloud_id(): 57 | cloud_id = parse_cloud_id( 58 | "cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVk" 59 | "MWNmMjIxMTBlMmY5NyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5Ng==" 60 | ) 61 | assert cloud_id.cluster_name == "cluster" 62 | assert cloud_id.es_address == ( 63 | "4fa8821e75634032bed1cf22110e2f97.us-east-1.aws.found.io", 64 | 443, 65 | ) 66 | assert cloud_id.kibana_address == ( 67 | "4fa8821e75634032bed1cf22110e2f96.us-east-1.aws.found.io", 68 | 443, 69 | ) 70 | 71 | 72 | @pytest.mark.parametrize( 73 | ["cloud_id", "port"], 74 | [ 75 | ( 76 | ":dXMtZWFzdC0xLmF3cy5mb3VuZC5pbzo5MjQzJDRmYTg4MjFlNzU2MzQwMzJiZ" 77 | "WQxY2YyMjExMGUyZjk3JDRmYTg4MjFlNzU2MzQwMzJiZWQxY2YyMjExMGUyZjk2", 78 | 9243, 79 | ), 80 | ( 81 | ":dXMtZWFzdC0xLmF3cy5mb3VuZC5pbzo0NDMkNGZhODgyMWU3NTYzNDAzMmJlZD" 82 | "FjZjIyMTEwZTJmOTckNGZhODgyMWU3NTYzNDAzMmJlZDFjZjIyMTEwZTJmOTY=", 83 | 443, 84 | ), 85 | ], 86 | ) 87 | def test_parse_cloud_id_ports(cloud_id, port): 88 | cloud_id = parse_cloud_id(cloud_id) 89 | assert cloud_id.cluster_name == "" 90 | assert cloud_id.es_address == ( 91 | "4fa8821e75634032bed1cf22110e2f97.us-east-1.aws.found.io", 92 | port, 93 | ) 94 | assert cloud_id.kibana_address == ( 95 | "4fa8821e75634032bed1cf22110e2f96.us-east-1.aws.found.io", 96 | port, 97 | ) 98 | 99 | 100 | @pytest.mark.parametrize( 101 | "cloud_id", 102 | [ 103 | "cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5NyQ=", 104 | "cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5Nw==", 105 | ], 106 | ) 107 | def test_parse_cloud_id_no_kibana(cloud_id): 108 | cloud_id = parse_cloud_id(cloud_id) 109 | assert cloud_id.cluster_name == "cluster" 110 | assert cloud_id.es_address == ( 111 | "4fa8821e75634032bed1cf22110e2f97.us-east-1.aws.found.io", 112 | 443, 113 | ) 114 | assert cloud_id.kibana_address is None 115 | 116 | 117 | @pytest.mark.parametrize( 118 | "cloud_id", 119 | [ 120 | "cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbzo0NDMkJA==", 121 | "cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbzo0NDM=", 122 | ], 123 | ) 124 | def test_parse_cloud_id_no_es(cloud_id): 125 | cloud_id = parse_cloud_id(cloud_id) 126 | assert cloud_id.cluster_name == "cluster" 127 | assert cloud_id.es_address is None 128 | assert cloud_id.kibana_address is None 129 | 130 | 131 | @pytest.mark.parametrize( 132 | "cloud_id", 133 | [ 134 | "cluster:", 135 | "dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5NyQ=", 136 | "cluster:ā", 137 | ], 138 | ) 139 | def test_invalid_cloud_id(cloud_id): 140 | with pytest.raises(ValueError) as e: 141 | parse_cloud_id(cloud_id) 142 | assert str(e.value) == "Cloud ID is not properly formatted" 143 | 144 | 145 | @pytest.mark.parametrize( 146 | ["url", "node_base_url", "path_prefix"], 147 | [ 148 | ("https://localhost", "https://localhost:443", ""), 149 | ("http://localhost:3002", "http://localhost:3002", ""), 150 | ("http://127.0.0.1:3002", "http://127.0.0.1:3002", ""), 151 | ("http://127.0.0.1:3002/", "http://127.0.0.1:3002", ""), 152 | ( 153 | "http://127.0.0.1:3002/path-prefix", 154 | "http://127.0.0.1:3002/path-prefix", 155 | "/path-prefix", 156 | ), 157 | ( 158 | "http://localhost:3002/url-prefix/", 159 | "http://localhost:3002/url-prefix", 160 | "/url-prefix", 161 | ), 162 | ( 163 | "https://localhost/url-prefix", 164 | "https://localhost:443/url-prefix", 165 | "/url-prefix", 166 | ), 167 | ("http://[::1]:3002/url-prefix", "http://[::1]:3002/url-prefix", "/url-prefix"), 168 | ("https://[::1]:0/", "https://[::1]:0", ""), 169 | ], 170 | ) 171 | def test_url_to_node_config(url, node_base_url, path_prefix): 172 | node_config = url_to_node_config(url) 173 | assert Urllib3HttpNode(node_config).base_url == node_base_url 174 | 175 | assert "[" not in node_config.host 176 | assert isinstance(node_config.port, int) 177 | assert node_config.path_prefix == path_prefix 178 | assert url.lower().startswith(node_config.scheme) 179 | 180 | 181 | @pytest.mark.parametrize( 182 | "url", 183 | [ 184 | "localhost:0", 185 | "[::1]:3002/url-prefix", 186 | "localhost", 187 | "localhost/", 188 | "localhost:3", 189 | "[::1]/url-prefix/", 190 | "[::1]", 191 | "[::1]:3002", 192 | "http://localhost", 193 | "localhost/url-prefix/", 194 | "localhost:3002/url-prefix", 195 | "http://localhost/url-prefix", 196 | ], 197 | ) 198 | def test_url_to_node_config_error_missing_component(url): 199 | with pytest.raises(ValueError) as e: 200 | url_to_node_config(url) 201 | assert ( 202 | str(e.value) 203 | == "URL must include a 'scheme', 'host', and 'port' component (ie 'https://localhost:9200')" 204 | ) 205 | 206 | 207 | @pytest.mark.parametrize( 208 | ["url", "port"], 209 | [ 210 | ("http://127.0.0.1", 80), 211 | ("http://[::1]", 80), 212 | ("HTTPS://localhost", 443), 213 | ("https://localhost/url-prefix", 443), 214 | ], 215 | ) 216 | def test_url_to_node_config_use_default_ports_for_scheme(url, port): 217 | node_config = url_to_node_config(url, use_default_ports_for_scheme=True) 218 | assert node_config.port == port 219 | 220 | 221 | def test_url_with_auth_into_authorization(): 222 | node_config = url_to_node_config("http://localhost:9200") 223 | assert node_config.headers == {} 224 | 225 | node_config = url_to_node_config("http://@localhost:9200") 226 | assert node_config.headers == {} 227 | 228 | node_config = url_to_node_config("http://user:pass@localhost:9200") 229 | assert node_config.headers == {"Authorization": "Basic dXNlcjpwYXNz"} 230 | 231 | node_config = url_to_node_config("http://user:@localhost:9200") 232 | assert node_config.headers == {"Authorization": "Basic dXNlcjo="} 233 | 234 | node_config = url_to_node_config("http://user@localhost:9200") 235 | assert node_config.headers == {"Authorization": "Basic dXNlcjo="} 236 | 237 | node_config = url_to_node_config("http://me@example.com:password@localhost:9200") 238 | assert node_config.headers == { 239 | "Authorization": "Basic bWVAZXhhbXBsZS5jb206cGFzc3dvcmQ=" 240 | } 241 | 242 | # ensure username and password are passed to basic auth unmodified 243 | basic_auth = basic_auth_to_header(("user:@", "@password")) 244 | node_config = url_to_node_config("http://user:@:@password@localhost:9200") 245 | assert node_config.headers == {"Authorization": basic_auth} 246 | node_config = url_to_node_config("http://user%3A%40:%40password@localhost:9200") 247 | assert node_config.headers == {"Authorization": basic_auth} 248 | 249 | 250 | @pytest.mark.parametrize( 251 | "basic_auth", ["", b"", ("",), ("", 1), (1, ""), ["", ""], False, object()] 252 | ) 253 | def test_basic_auth_errors(basic_auth): 254 | with pytest.raises(ValueError) as e: 255 | basic_auth_to_header(basic_auth) 256 | assert ( 257 | str(e.value) 258 | == "'basic_auth' must be a 2-tuple of str/bytes (username, password)" 259 | ) 260 | -------------------------------------------------------------------------------- /tests/test_exceptions.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import pytest 19 | 20 | from elastic_transport import ApiError, ApiResponseMeta, TransportError 21 | 22 | 23 | def test_exception_repr_and_str(): 24 | e = TransportError({"errors": [{"status": 500}]}) 25 | assert repr(e) == "TransportError({'errors': [{'status': 500}]})" 26 | assert str(e) == "{'errors': [{'status': 500}]}" 27 | 28 | e = TransportError("error", errors=(ValueError("value error"),)) 29 | assert repr(e) == "TransportError('error', errors={!r})".format( 30 | e.errors, 31 | ) 32 | assert str(e) == "error" 33 | 34 | 35 | def test_api_error_status_repr(): 36 | e = ApiError( 37 | {"errors": [{"status": 500}]}, 38 | body={}, 39 | meta=ApiResponseMeta( 40 | status=500, http_version="1.1", headers={}, duration=0.0, node=None 41 | ), 42 | ) 43 | assert ( 44 | repr(e) 45 | == "ApiError({'errors': [{'status': 500}]}, meta=ApiResponseMeta(status=500, http_version='1.1', headers={}, duration=0.0, node=None), body={})" 46 | ) 47 | assert str(e) == "[500] {'errors': [{'status': 500}]}" 48 | 49 | 50 | def test_api_error_is_not_transport_error(): 51 | with pytest.raises(ApiError): 52 | try: 53 | raise ApiError("", None, None) 54 | except TransportError: 55 | pass 56 | 57 | 58 | def test_transport_error_is_not_api_error(): 59 | with pytest.raises(TransportError): 60 | try: 61 | raise TransportError( 62 | "", 63 | ) 64 | except ApiError: 65 | pass 66 | -------------------------------------------------------------------------------- /tests/test_httpbin.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import dataclasses 19 | import json 20 | 21 | import pytest 22 | 23 | from elastic_transport import Transport 24 | from elastic_transport._node._base import DEFAULT_USER_AGENT 25 | from elastic_transport._transport import NODE_CLASS_NAMES 26 | 27 | 28 | @pytest.mark.parametrize("node_class", ["urllib3", "requests"]) 29 | def test_simple_request(node_class, httpbin_node_config): 30 | t = Transport([httpbin_node_config], node_class=node_class) 31 | 32 | resp, data = t.perform_request( 33 | "GET", 34 | "/anything?key[]=1&key[]=2&q1&q2=", 35 | headers={"Custom": "headeR", "content-type": "application/json"}, 36 | body={"JSON": "body"}, 37 | ) 38 | assert resp.status == 200 39 | assert data["method"] == "GET" 40 | assert data["url"] == "https://httpbin.org/anything?key[]=1&key[]=2&q1&q2=" 41 | 42 | # httpbin makes no-value query params into '' 43 | assert data["args"] == { 44 | "key[]": ["1", "2"], 45 | "q1": "", 46 | "q2": "", 47 | } 48 | assert data["data"] == '{"JSON":"body"}' 49 | assert data["json"] == {"JSON": "body"} 50 | 51 | request_headers = { 52 | "Content-Type": "application/json", 53 | "Content-Length": "15", 54 | "Custom": "headeR", 55 | "Host": "httpbin.org", 56 | } 57 | assert all(v == data["headers"][k] for k, v in request_headers.items()) 58 | 59 | 60 | @pytest.mark.parametrize("node_class", ["urllib3", "requests"]) 61 | def test_node(node_class, httpbin_node_config): 62 | def new_node(**kwargs): 63 | return NODE_CLASS_NAMES[node_class]( 64 | dataclasses.replace(httpbin_node_config, **kwargs) 65 | ) 66 | 67 | node = new_node() 68 | resp, data = node.perform_request("GET", "/anything") 69 | assert resp.status == 200 70 | parsed = parse_httpbin(data) 71 | assert parsed == { 72 | "headers": { 73 | "Accept-Encoding": "identity", 74 | "Host": "httpbin.org", 75 | "User-Agent": DEFAULT_USER_AGENT, 76 | }, 77 | "method": "GET", 78 | "url": "https://httpbin.org/anything", 79 | } 80 | 81 | node = new_node(http_compress=True) 82 | resp, data = node.perform_request("GET", "/anything") 83 | assert resp.status == 200 84 | parsed = parse_httpbin(data) 85 | assert parsed == { 86 | "headers": { 87 | "Accept-Encoding": "gzip", 88 | "Host": "httpbin.org", 89 | "User-Agent": DEFAULT_USER_AGENT, 90 | }, 91 | "method": "GET", 92 | "url": "https://httpbin.org/anything", 93 | } 94 | 95 | resp, data = node.perform_request("GET", "/anything", body=b"hello, world!") 96 | assert resp.status == 200 97 | parsed = parse_httpbin(data) 98 | assert parsed == { 99 | "headers": { 100 | "Accept-Encoding": "gzip", 101 | "Content-Encoding": "gzip", 102 | "Content-Length": "33", 103 | "Host": "httpbin.org", 104 | "User-Agent": DEFAULT_USER_AGENT, 105 | }, 106 | "method": "GET", 107 | "url": "https://httpbin.org/anything", 108 | } 109 | 110 | resp, data = node.perform_request( 111 | "POST", 112 | "/anything", 113 | body=json.dumps({"key": "value"}).encode("utf-8"), 114 | headers={"content-type": "application/json"}, 115 | ) 116 | assert resp.status == 200 117 | parsed = parse_httpbin(data) 118 | assert parsed == { 119 | "headers": { 120 | "Accept-Encoding": "gzip", 121 | "Content-Encoding": "gzip", 122 | "Content-Length": "36", 123 | "Content-Type": "application/json", 124 | "Host": "httpbin.org", 125 | "User-Agent": DEFAULT_USER_AGENT, 126 | }, 127 | "method": "POST", 128 | "url": "https://httpbin.org/anything", 129 | } 130 | 131 | 132 | def parse_httpbin(value): 133 | """Parses a response from httpbin.org/anything by stripping all the variable things""" 134 | if isinstance(value, bytes): 135 | value = json.loads(value) 136 | else: 137 | value = value.copy() 138 | value.pop("origin", None) 139 | value.pop("data", None) 140 | value["headers"].pop("X-Amzn-Trace-Id", None) 141 | value = {k: v for k, v in value.items() if v} 142 | return value 143 | -------------------------------------------------------------------------------- /tests/test_httpserver.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import warnings 19 | 20 | import pytest 21 | 22 | from elastic_transport import Transport 23 | 24 | 25 | @pytest.mark.parametrize("node_class", ["urllib3", "requests"]) 26 | def test_simple_request(node_class, https_server_ip_node_config): 27 | with warnings.catch_warnings(): 28 | warnings.simplefilter("error") 29 | 30 | t = Transport([https_server_ip_node_config], node_class=node_class) 31 | 32 | resp, data = t.perform_request("GET", "/foobar") 33 | assert resp.status == 200 34 | assert data == {"foo": "bar"} 35 | -------------------------------------------------------------------------------- /tests/test_logging.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import io 19 | import logging 20 | 21 | import pytest 22 | 23 | from elastic_transport import ( 24 | AiohttpHttpNode, 25 | ConnectionError, 26 | HttpHeaders, 27 | RequestsHttpNode, 28 | Urllib3HttpNode, 29 | debug_logging, 30 | ) 31 | from elastic_transport._compat import await_if_coro 32 | from elastic_transport._node._base import DEFAULT_USER_AGENT 33 | 34 | node_class = pytest.mark.parametrize( 35 | "node_class", [Urllib3HttpNode, RequestsHttpNode, AiohttpHttpNode] 36 | ) 37 | 38 | 39 | @node_class 40 | @pytest.mark.asyncio 41 | async def test_debug_logging(node_class, httpbin_node_config): 42 | debug_logging() 43 | 44 | stream = io.StringIO() 45 | logging.getLogger("elastic_transport.node").addHandler( 46 | logging.StreamHandler(stream) 47 | ) 48 | 49 | node = node_class(httpbin_node_config) 50 | await await_if_coro( 51 | node.perform_request( 52 | "GET", 53 | "/anything", 54 | body=b'{"key":"value"}', 55 | headers=HttpHeaders({"Content-Type": "application/json"}), 56 | ) 57 | ) 58 | 59 | print(node_class) 60 | print(stream.getvalue()) 61 | 62 | lines = stream.getvalue().split("\n") 63 | print(lines) 64 | for line in [ 65 | "> GET /anything HTTP/1.1", 66 | "> Connection: keep-alive", 67 | "> Content-Type: application/json", 68 | f"> User-Agent: {DEFAULT_USER_AGENT}", 69 | '> {"key":"value"}', 70 | "< HTTP/1.1 200 OK", 71 | "< Access-Control-Allow-Credentials: true", 72 | "< Access-Control-Allow-Origin: *", 73 | "< Connection: close", 74 | "< Content-Type: application/json", 75 | "< {", 76 | ' "args": {}, ', 77 | ' "data": "{\\"key\\":\\"value\\"}", ', 78 | ' "files": {}, ', 79 | ' "form": {}, ', 80 | ' "headers": {', 81 | ' "Content-Type": "application/json", ', 82 | ' "Host": "httpbin.org", ', 83 | f' "User-Agent": "{DEFAULT_USER_AGENT}", ', 84 | " }, ", 85 | ' "json": {', 86 | ' "key": "value"', 87 | " }, ", 88 | ' "method": "GET", ', 89 | ' "url": "https://httpbin.org/anything"', 90 | "}", 91 | ]: 92 | assert line in lines 93 | 94 | 95 | @node_class 96 | @pytest.mark.asyncio 97 | async def test_debug_logging_uncompressed_body(httpbin_node_config, node_class): 98 | debug_logging() 99 | stream = io.StringIO() 100 | logging.getLogger("elastic_transport.node").addHandler( 101 | logging.StreamHandler(stream) 102 | ) 103 | 104 | node = node_class(httpbin_node_config.replace(http_compress=True)) 105 | await await_if_coro( 106 | node.perform_request( 107 | "GET", 108 | "/anything", 109 | body=b'{"key":"value"}', 110 | headers=HttpHeaders({"Content-Type": "application/json"}), 111 | ) 112 | ) 113 | 114 | lines = stream.getvalue().split("\n") 115 | print(lines) 116 | assert '> {"key":"value"}' in lines 117 | 118 | 119 | @node_class 120 | @pytest.mark.asyncio 121 | async def test_debug_logging_no_body(httpbin_node_config, node_class): 122 | debug_logging() 123 | stream = io.StringIO() 124 | logging.getLogger("elastic_transport.node").addHandler( 125 | logging.StreamHandler(stream) 126 | ) 127 | 128 | node = node_class(httpbin_node_config) 129 | await await_if_coro( 130 | node.perform_request( 131 | "HEAD", 132 | "/anything", 133 | ) 134 | ) 135 | 136 | lines = stream.getvalue().split("\n")[:-3] 137 | assert "> HEAD /anything HTTP/1.1" in lines 138 | 139 | 140 | @node_class 141 | @pytest.mark.asyncio 142 | async def test_debug_logging_error(httpbin_node_config, node_class): 143 | debug_logging() 144 | stream = io.StringIO() 145 | logging.getLogger("elastic_transport.node").addHandler( 146 | logging.StreamHandler(stream) 147 | ) 148 | 149 | node = node_class(httpbin_node_config.replace(host="not.a.valid.host")) 150 | with pytest.raises(ConnectionError): 151 | await await_if_coro( 152 | node.perform_request( 153 | "HEAD", 154 | "/anything", 155 | ) 156 | ) 157 | 158 | lines = stream.getvalue().split("\n")[:-3] 159 | assert "> HEAD /anything HTTP/?.?" in lines 160 | assert all(not line.startswith("<") for line in lines) 161 | -------------------------------------------------------------------------------- /tests/test_models.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import dataclasses 19 | 20 | import pytest 21 | 22 | from elastic_transport import HttpHeaders, NodeConfig 23 | 24 | 25 | def test_empty_node_config(): 26 | config = NodeConfig(scheme="https", host="localhost", port=9200) 27 | 28 | assert dataclasses.asdict(config) == { 29 | "ca_certs": None, 30 | "client_cert": None, 31 | "client_key": None, 32 | "connections_per_node": 10, 33 | "headers": {}, 34 | "host": "localhost", 35 | "http_compress": False, 36 | "path_prefix": "", 37 | "port": 9200, 38 | "request_timeout": 10, 39 | "scheme": "https", 40 | "ssl_assert_fingerprint": None, 41 | "ssl_assert_hostname": None, 42 | "ssl_context": None, 43 | "ssl_show_warn": True, 44 | "ssl_version": None, 45 | "verify_certs": True, 46 | "_extras": {}, 47 | } 48 | 49 | # Default HttpHeaders should be empty and frozen 50 | assert len(config.headers) == 0 51 | assert config.headers.frozen 52 | 53 | 54 | def test_headers_frozen(): 55 | headers = HttpHeaders() 56 | assert headers.frozen is False 57 | 58 | headers["key"] = "value" 59 | headers.pop("Key") 60 | 61 | headers["key"] = "value" 62 | assert headers.freeze() is headers 63 | assert headers.frozen is True 64 | 65 | with pytest.raises(ValueError) as e: 66 | headers["key"] = "value" 67 | assert str(e.value) == "Can't modify headers that have been frozen" 68 | 69 | with pytest.raises(ValueError) as e: 70 | headers.pop("key") 71 | assert str(e.value) == "Can't modify headers that have been frozen" 72 | assert len(headers) == 1 73 | assert headers == {"key": "value"} 74 | 75 | assert headers.copy() is not headers 76 | assert headers.copy().frozen is False 77 | 78 | 79 | @pytest.mark.parametrize( 80 | ["headers", "string"], 81 | [ 82 | ({"field": "value"}, "{'field': 'value'}"), 83 | ({"Authorization": "value"}, "{'Authorization': ''}"), 84 | ({"authorization": "Basic"}, "{'authorization': ''}"), 85 | ({"authorization": "Basic abc"}, "{'authorization': 'Basic '}"), 86 | ({"authorization": "ApiKey abc"}, "{'authorization': 'ApiKey '}"), 87 | ({"authorization": "Bearer abc"}, "{'authorization': 'Bearer '}"), 88 | ], 89 | ) 90 | def test_headers_hide_auth(headers, string): 91 | assert repr(HttpHeaders(headers)) == string 92 | -------------------------------------------------------------------------------- /tests/test_node_pool.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | 19 | import random 20 | import threading 21 | import time 22 | 23 | import pytest 24 | 25 | from elastic_transport import NodeConfig, NodePool, Urllib3HttpNode 26 | 27 | 28 | def test_node_pool_repr(): 29 | node_configs = [NodeConfig("http", "localhost", x) for x in range(5)] 30 | random.shuffle(node_configs) 31 | pool = NodePool(node_configs, node_class=Urllib3HttpNode) 32 | assert repr(pool) == "" 33 | 34 | 35 | def test_node_pool_empty_error(): 36 | with pytest.raises(ValueError) as e: 37 | NodePool([], node_class=Urllib3HttpNode) 38 | assert str(e.value) == "Must specify at least one NodeConfig" 39 | 40 | 41 | def test_node_pool_duplicate_node_configs(): 42 | node_config = NodeConfig("http", "localhost", 80) 43 | with pytest.raises(ValueError) as e: 44 | NodePool([node_config, node_config], node_class=Urllib3HttpNode) 45 | assert str(e.value) == "Cannot use duplicate NodeConfigs within a NodePool" 46 | 47 | 48 | def test_node_pool_get(): 49 | node_config = NodeConfig("http", "localhost", 80) 50 | pool = NodePool([node_config], node_class=Urllib3HttpNode) 51 | assert pool.get().config is node_config 52 | 53 | 54 | def test_node_pool_remove_seed_node(): 55 | node_config = NodeConfig("http", "localhost", 80) 56 | pool = NodePool([node_config], node_class=Urllib3HttpNode) 57 | 58 | pool.remove(node_config) # Calling .remove() on a seed node is a no-op 59 | assert len(pool._removed_nodes) == 0 60 | 61 | 62 | def test_node_pool_add_and_remove_non_seed_node(): 63 | node_config1 = NodeConfig("http", "localhost", 80) 64 | node_config2 = NodeConfig("http", "localhost", 81) 65 | pool = NodePool([node_config1], node_class=Urllib3HttpNode) 66 | 67 | pool.add(node_config2) 68 | assert any(pool.get().config is node_config2 for _ in range(10)) 69 | 70 | pool.remove(node_config2) 71 | assert len(pool._removed_nodes) == 1 72 | 73 | # We never return a 'removed' node 74 | assert all(pool.get().config is node_config1 for _ in range(10)) 75 | 76 | # We add it back, now we should .get() the node again. 77 | pool.add(node_config2) 78 | assert any(pool.get().config is node_config2 for _ in range(10)) 79 | 80 | 81 | def test_added_node_is_used_first(): 82 | node_config1 = NodeConfig("http", "localhost", 80) 83 | node_config2 = NodeConfig("http", "localhost", 81) 84 | pool = NodePool([node_config1], node_class=Urllib3HttpNode) 85 | 86 | node1 = pool.get() 87 | pool.mark_dead(node1) 88 | 89 | pool.add(node_config2) 90 | assert pool.get().config is node_config2 91 | 92 | 93 | def test_round_robin_selector(): 94 | node_configs = [NodeConfig("http", "localhost", x) for x in range(5)] 95 | random.shuffle(node_configs) 96 | pool = NodePool( 97 | node_configs, node_class=Urllib3HttpNode, node_selector_class="round_robin" 98 | ) 99 | 100 | get_node_configs = [pool.get() for _ in node_configs] 101 | for node_config in get_node_configs: 102 | assert pool.get() is node_config 103 | 104 | 105 | @pytest.mark.parametrize( 106 | "node_configs", 107 | [ 108 | [NodeConfig("http", "localhost", 80)], 109 | [NodeConfig("http", "localhost", 80), NodeConfig("http", "localhost", 81)], 110 | ], 111 | ) 112 | def test_all_dead_nodes_still_gets_node(node_configs): 113 | pool = NodePool(node_configs, node_class=Urllib3HttpNode) 114 | 115 | for _ in node_configs: 116 | pool.mark_dead(pool.get()) 117 | assert len(pool._alive_nodes) == 0 118 | 119 | node = pool.get() 120 | assert node.config in node_configs 121 | assert len(pool._alive_nodes) < 2 122 | 123 | 124 | def test_unknown_selector_class(): 125 | with pytest.raises(ValueError) as e: 126 | NodePool( 127 | [NodeConfig("http", "localhost", 80)], 128 | node_class=Urllib3HttpNode, 129 | node_selector_class="unknown", 130 | ) 131 | assert str(e.value) == ( 132 | "Unknown option for selector_class: 'unknown'. " 133 | "Available options are: 'random', 'round_robin'" 134 | ) 135 | 136 | 137 | def test_disable_randomize_nodes(): 138 | node_configs = [NodeConfig("http", "localhost", x) for x in range(100)] 139 | pool = NodePool(node_configs, node_class=Urllib3HttpNode, randomize_nodes=False) 140 | 141 | assert [pool.get().config for _ in node_configs] == node_configs 142 | 143 | 144 | def test_nodes_randomized_by_default(): 145 | node_configs = [NodeConfig("http", "localhost", x) for x in range(100)] 146 | pool = NodePool(node_configs, node_class=Urllib3HttpNode) 147 | 148 | assert [pool.get().config for _ in node_configs] != node_configs 149 | 150 | 151 | def test_dead_nodes_are_skipped(): 152 | node_configs = [NodeConfig("http", "localhost", x) for x in range(2)] 153 | pool = NodePool(node_configs, node_class=Urllib3HttpNode) 154 | dead_node = pool.get() 155 | pool.mark_dead(dead_node) 156 | 157 | alive_node = pool.get() 158 | assert dead_node.config != alive_node.config 159 | 160 | assert all([pool.get().config == alive_node.config for _ in range(10)]) 161 | 162 | 163 | def test_dead_node_backoff_calculation(): 164 | node_configs = [NodeConfig("http", "localhost", 9200)] 165 | pool = NodePool( 166 | node_configs, 167 | node_class=Urllib3HttpNode, 168 | dead_node_backoff_factor=0.5, 169 | max_dead_node_backoff=3.5, 170 | ) 171 | node = pool.get() 172 | pool.mark_dead(node, _now=0) 173 | 174 | assert pool._dead_consecutive_failures == {node.config: 1} 175 | assert pool._dead_nodes.queue == [(0.5, node)] 176 | 177 | assert pool.get() is node 178 | pool.mark_dead(node, _now=0) 179 | 180 | assert pool._dead_consecutive_failures == {node.config: 2} 181 | assert pool._dead_nodes.queue == [(1.0, node)] 182 | 183 | assert pool.get() is node 184 | pool.mark_dead(node, _now=0) 185 | 186 | assert pool._dead_consecutive_failures == {node.config: 3} 187 | assert pool._dead_nodes.queue == [(2.0, node)] 188 | 189 | assert pool.get() is node 190 | pool.mark_dead(node, _now=0) 191 | 192 | assert pool._dead_consecutive_failures == {node.config: 4} 193 | assert pool._dead_nodes.queue == [(3.5, node)] 194 | 195 | assert pool.get() is node 196 | pool.mark_dead(node, _now=0) 197 | 198 | pool._dead_consecutive_failures = {node.config: 13292} 199 | assert pool._dead_nodes.queue == [(3.5, node)] 200 | 201 | assert pool.get() is node 202 | pool.mark_live(node) 203 | 204 | assert pool._dead_consecutive_failures == {} 205 | assert pool._dead_nodes.queue == [] 206 | 207 | 208 | def test_add_node_after_sniffing(): 209 | node_configs = [NodeConfig("http", "localhost", 9200)] 210 | pool = NodePool( 211 | node_configs, 212 | node_class=Urllib3HttpNode, 213 | ) 214 | 215 | # Initial node is marked as dead 216 | node = pool.get() 217 | pool.mark_dead(node) 218 | 219 | new_node_config = NodeConfig("http", "localhost", 9201) 220 | pool.add(new_node_config) 221 | 222 | # Internal flag is updated properly 223 | assert pool._all_nodes_len_1 is False 224 | 225 | # We get the new node instead of the old one 226 | new_node = pool.get() 227 | assert new_node.config == new_node_config 228 | 229 | # The old node is still on timeout so we should only get the new one. 230 | for _ in range(10): 231 | assert pool.get() is new_node 232 | 233 | 234 | @pytest.mark.parametrize("pool_size", [1, 8]) 235 | def test_threading_test(pool_size): 236 | pool = NodePool( 237 | [NodeConfig("http", "localhost", x) for x in range(pool_size)], 238 | node_class=Urllib3HttpNode, 239 | ) 240 | start = time.time() 241 | 242 | class ThreadTest(threading.Thread): 243 | def __init__(self): 244 | super().__init__() 245 | self.nodes_gotten = 0 246 | 247 | def run(self) -> None: 248 | nonlocal pool 249 | 250 | while time.time() < start + 2: 251 | node = pool.get() 252 | self.nodes_gotten += 1 253 | if random.random() > 0.9: 254 | pool.mark_dead(node) 255 | else: 256 | pool.mark_live(node) 257 | 258 | threads = [ThreadTest() for _ in range(pool_size * 2)] 259 | [thread.start() for thread in threads] 260 | [thread.join() for thread in threads] 261 | 262 | assert sum(thread.nodes_gotten for thread in threads) >= 10000 263 | -------------------------------------------------------------------------------- /tests/test_otel.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | from opentelemetry.sdk.trace import TracerProvider, export 19 | from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter 20 | 21 | from elastic_transport import JsonSerializer 22 | from elastic_transport._otel import OpenTelemetrySpan 23 | 24 | 25 | def setup_tracing(): 26 | tracer_provider = TracerProvider() 27 | memory_exporter = InMemorySpanExporter() 28 | span_processor = export.SimpleSpanProcessor(memory_exporter) 29 | tracer_provider.add_span_processor(span_processor) 30 | tracer = tracer_provider.get_tracer(__name__) 31 | 32 | return tracer, memory_exporter 33 | 34 | 35 | def test_no_span(): 36 | # With telemetry disabled, those calls should not raise 37 | span = OpenTelemetrySpan(None) 38 | span.set_db_statement(JsonSerializer().dumps({"timeout": "1m"})) 39 | span.set_node_metadata( 40 | "localhost", 41 | 9200, 42 | "http://localhost:9200/", 43 | "_ml/anomaly_detectors/my-job/_open", 44 | ) 45 | span.set_elastic_cloud_metadata( 46 | { 47 | "X-Found-Handling-Cluster": "e9106fc68e3044f0b1475b04bf4ffd5f", 48 | "X-Found-Handling-Instance": "instance-0000000001", 49 | } 50 | ) 51 | 52 | 53 | def test_detailed_span(): 54 | tracer, memory_exporter = setup_tracing() 55 | with tracer.start_as_current_span("ml.open_job") as otel_span: 56 | span = OpenTelemetrySpan( 57 | otel_span, 58 | endpoint_id="my-job/_open", 59 | body_strategy="omit", 60 | ) 61 | 62 | span.set_db_statement(JsonSerializer().dumps({"timeout": "1m"})) 63 | span.set_node_metadata( 64 | "localhost", 65 | 9200, 66 | "http://localhost:9200/", 67 | "_ml/anomaly_detectors/my-job/_open", 68 | ) 69 | span.set_elastic_cloud_metadata( 70 | { 71 | "X-Found-Handling-Cluster": "e9106fc68e3044f0b1475b04bf4ffd5f", 72 | "X-Found-Handling-Instance": "instance-0000000001", 73 | } 74 | ) 75 | 76 | spans = memory_exporter.get_finished_spans() 77 | assert len(spans) == 1 78 | assert spans[0].name == "ml.open_job" 79 | assert spans[0].attributes == { 80 | "url.full": "http://localhost:9200/_ml/anomaly_detectors/my-job/_open", 81 | "server.address": "localhost", 82 | "server.port": 9200, 83 | "db.elasticsearch.cluster.name": "e9106fc68e3044f0b1475b04bf4ffd5f", 84 | "db.elasticsearch.node.name": "instance-0000000001", 85 | } 86 | 87 | 88 | def test_db_statement(): 89 | tracer, memory_exporter = setup_tracing() 90 | with tracer.start_as_current_span("search") as otel_span: 91 | span = OpenTelemetrySpan(otel_span, endpoint_id="search", body_strategy="raw") 92 | span.set_db_statement(JsonSerializer().dumps({"query": {"match_all": {}}})) 93 | 94 | spans = memory_exporter.get_finished_spans() 95 | assert len(spans) == 1 96 | assert spans[0].name == "search" 97 | assert spans[0].attributes == { 98 | "db.statement": '{"query":{"match_all":{}}}', 99 | } 100 | -------------------------------------------------------------------------------- /tests/test_package.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import pytest 19 | 20 | import elastic_transport 21 | from elastic_transport import client_utils 22 | 23 | modules = pytest.mark.parametrize("module", [elastic_transport, client_utils]) 24 | 25 | 26 | @modules 27 | def test__all__sorted(module): 28 | module_all = module.__all__.copy() 29 | # Optional dependencies are added at the end 30 | if "OrjsonSerializer" in module_all: 31 | module_all.remove("OrjsonSerializer") 32 | assert module_all == sorted(module_all) 33 | 34 | 35 | @modules 36 | def test__all__is_importable(module): 37 | assert {attr for attr in module.__all__ if hasattr(module, attr)} == set( 38 | module.__all__ 39 | ) 40 | 41 | 42 | def test_module_rewritten(): 43 | assert repr(elastic_transport.Transport) == "" 44 | -------------------------------------------------------------------------------- /tests/test_response.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import pickle 19 | 20 | import pytest 21 | 22 | from elastic_transport import ( 23 | ApiResponseMeta, 24 | BinaryApiResponse, 25 | HeadApiResponse, 26 | HttpHeaders, 27 | ListApiResponse, 28 | ObjectApiResponse, 29 | TextApiResponse, 30 | ) 31 | 32 | meta = ApiResponseMeta( 33 | status=200, http_version="1.1", headers=HttpHeaders(), duration=0, node=None 34 | ) 35 | 36 | 37 | @pytest.mark.parametrize( 38 | "response_cls", 39 | [TextApiResponse, BinaryApiResponse, ObjectApiResponse, ListApiResponse], 40 | ) 41 | def test_response_meta(response_cls): 42 | resp = response_cls(meta=meta, body=None) 43 | assert resp.meta is meta 44 | 45 | assert resp == resp 46 | assert resp.body == resp 47 | assert resp == resp.body 48 | assert not resp != resp 49 | assert not (resp != resp.body) 50 | 51 | 52 | def test_head_response(): 53 | resp = HeadApiResponse(meta=meta) 54 | 55 | assert resp 56 | assert resp.body is True 57 | assert bool(resp) is True 58 | assert resp.meta is meta 59 | 60 | assert repr(resp) == "HeadApiResponse(True)" 61 | 62 | 63 | def test_text_response(): 64 | resp = TextApiResponse(body="Hello, world", meta=meta) 65 | assert resp.body == "Hello, world" 66 | assert len(resp) == 12 67 | assert resp.lower() == "hello, world" 68 | assert list(resp) == ["H", "e", "l", "l", "o", ",", " ", "w", "o", "r", "l", "d"] 69 | 70 | assert repr(resp) == "TextApiResponse('Hello, world')" 71 | 72 | 73 | def test_binary_response(): 74 | resp = BinaryApiResponse(body=b"Hello, world", meta=meta) 75 | assert resp.body == b"Hello, world" 76 | assert len(resp) == 12 77 | assert resp[0] == 72 78 | assert resp[:2] == b"He" 79 | assert resp.lower() == b"hello, world" 80 | assert resp.decode() == "Hello, world" 81 | assert list(resp) == [72, 101, 108, 108, 111, 44, 32, 119, 111, 114, 108, 100] 82 | 83 | assert repr(resp) == "BinaryApiResponse(b'Hello, world')" 84 | 85 | 86 | def test_list_response(): 87 | resp = ListApiResponse(body=[1, 2, 3], meta=meta) 88 | assert list(resp) == [1, 2, 3] 89 | assert resp.body == [1, 2, 3] 90 | assert resp[1] == 2 91 | 92 | assert repr(resp) == "ListApiResponse([1, 2, 3])" 93 | 94 | 95 | def test_list_object_response(): 96 | resp = ObjectApiResponse(body={"k1": 1, "k2": 2}, meta=meta) 97 | assert set(resp.keys()) == {"k1", "k2"} 98 | assert resp["k2"] == 2 99 | assert resp.body == {"k1": 1, "k2": 2} 100 | 101 | assert repr(resp) == "ObjectApiResponse({'k1': 1, 'k2': 2})" 102 | 103 | 104 | @pytest.mark.parametrize( 105 | "resp_cls", [ObjectApiResponse, ListApiResponse, TextApiResponse, BinaryApiResponse] 106 | ) 107 | @pytest.mark.parametrize( 108 | ["args", "kwargs"], 109 | [ 110 | ((), {}), 111 | ((1,), {}), 112 | ((1,), {"raw": 1}), 113 | ((1,), {"body": 1}), 114 | ((1,), {"meta": 1}), 115 | ((), {"raw": 1, "body": 1}), 116 | ((), {"raw": 1, "body": 1, "meta": 1}), 117 | ((1,), {"raw": 1, "meta": 1}), 118 | ((1,), {"meta": 1, "body": 1}), 119 | ((1, 1), {"meta": 1, "body": 1}), 120 | ((), {"meta": 1, "body": 1, "unk": 1}), 121 | ], 122 | ) 123 | def test_constructor_type_errors(resp_cls, args, kwargs): 124 | with pytest.raises(TypeError) as e: 125 | resp_cls(*args, **kwargs) 126 | assert str(e.value) == "Must pass 'meta' and 'body' to ApiResponse" 127 | 128 | 129 | def test_constructor_allowed(): 130 | resp = HeadApiResponse(meta) 131 | resp = HeadApiResponse(meta=meta) 132 | 133 | resp = ObjectApiResponse({}, meta) 134 | assert resp == {} 135 | 136 | resp = ObjectApiResponse(meta=meta, raw={}) 137 | assert resp == {} 138 | 139 | resp = ObjectApiResponse(meta=meta, raw={}, body_cls=int) 140 | assert resp == {} 141 | 142 | resp = ObjectApiResponse(meta=meta, body={}, body_cls=int) 143 | assert resp == {} 144 | 145 | 146 | @pytest.mark.parametrize( 147 | "response_cls, body", 148 | [ 149 | (TextApiResponse, "Hello World"), 150 | (BinaryApiResponse, b"Hello World"), 151 | (ObjectApiResponse, {"Hello": "World"}), 152 | (ListApiResponse, ["Hello", "World"]), 153 | ], 154 | ) 155 | def test_pickle(response_cls, body): 156 | resp = response_cls(meta=meta, body=body) 157 | pickled_resp = pickle.loads(pickle.dumps(resp)) 158 | assert pickled_resp == resp 159 | assert pickled_resp.meta == resp.meta 160 | -------------------------------------------------------------------------------- /tests/test_serializer.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import uuid 19 | from datetime import date 20 | from decimal import Decimal 21 | 22 | import pytest 23 | 24 | from elastic_transport import ( 25 | JsonSerializer, 26 | NdjsonSerializer, 27 | OrjsonSerializer, 28 | SerializationError, 29 | SerializerCollection, 30 | TextSerializer, 31 | ) 32 | from elastic_transport._serializer import DEFAULT_SERIALIZERS 33 | 34 | serializers = SerializerCollection(DEFAULT_SERIALIZERS) 35 | 36 | 37 | @pytest.fixture(params=[JsonSerializer, OrjsonSerializer]) 38 | def json_serializer(request: pytest.FixtureRequest): 39 | yield request.param() 40 | 41 | 42 | def test_date_serialization(json_serializer): 43 | assert b'{"d":"2010-10-01"}' == json_serializer.dumps({"d": date(2010, 10, 1)}) 44 | 45 | 46 | def test_decimal_serialization(json_serializer): 47 | assert b'{"d":3.8}' == json_serializer.dumps({"d": Decimal("3.8")}) 48 | 49 | 50 | def test_uuid_serialization(json_serializer): 51 | assert b'{"d":"00000000-0000-0000-0000-000000000003"}' == json_serializer.dumps( 52 | {"d": uuid.UUID("00000000-0000-0000-0000-000000000003")} 53 | ) 54 | 55 | 56 | def test_serializes_nan(): 57 | assert b'{"d":NaN}' == JsonSerializer().dumps({"d": float("NaN")}) 58 | # NaN is invalid JSON, and orjson silently converts it to null 59 | assert b'{"d":null}' == OrjsonSerializer().dumps({"d": float("NaN")}) 60 | 61 | 62 | def test_raises_serialization_error_on_dump_error(json_serializer): 63 | with pytest.raises(SerializationError): 64 | json_serializer.dumps(object()) 65 | with pytest.raises(SerializationError): 66 | TextSerializer().dumps({}) 67 | 68 | 69 | def test_raises_serialization_error_on_load_error(json_serializer): 70 | with pytest.raises(SerializationError): 71 | json_serializer.loads(object()) 72 | with pytest.raises(SerializationError): 73 | json_serializer.loads(b"{{") 74 | 75 | 76 | def test_json_unicode_is_handled(json_serializer): 77 | assert ( 78 | json_serializer.dumps({"你好": "你好"}) 79 | == b'{"\xe4\xbd\xa0\xe5\xa5\xbd":"\xe4\xbd\xa0\xe5\xa5\xbd"}' 80 | ) 81 | assert json_serializer.loads( 82 | b'{"\xe4\xbd\xa0\xe5\xa5\xbd":"\xe4\xbd\xa0\xe5\xa5\xbd"}' 83 | ) == {"你好": "你好"} 84 | 85 | 86 | def test_text_unicode_is_handled(): 87 | text_serializer = TextSerializer() 88 | assert text_serializer.dumps("你好") == b"\xe4\xbd\xa0\xe5\xa5\xbd" 89 | assert text_serializer.loads(b"\xe4\xbd\xa0\xe5\xa5\xbd") == "你好" 90 | 91 | 92 | def test_json_unicode_surrogates_handled(): 93 | assert ( 94 | JsonSerializer().dumps({"key": "你好\uda6a"}) 95 | == b'{"key":"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa"}' 96 | ) 97 | assert JsonSerializer().loads( 98 | b'{"key":"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa"}' 99 | ) == {"key": "你好\uda6a"} 100 | 101 | # orjson is strict about UTF-8 102 | with pytest.raises(SerializationError): 103 | OrjsonSerializer().dumps({"key": "你好\uda6a"}) 104 | 105 | with pytest.raises(SerializationError): 106 | OrjsonSerializer().loads(b'{"key":"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa"}') 107 | 108 | 109 | def test_text_unicode_surrogates_handled(json_serializer): 110 | text_serializer = TextSerializer() 111 | assert ( 112 | text_serializer.dumps("你好\uda6a") == b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" 113 | ) 114 | assert ( 115 | text_serializer.loads(b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa") == "你好\uda6a" 116 | ) 117 | 118 | 119 | def test_deserializes_json_by_default(): 120 | assert {"some": "data"} == serializers.loads(b'{"some":"data"}') 121 | 122 | 123 | def test_deserializes_text_with_correct_ct(): 124 | assert '{"some":"data"}' == serializers.loads(b'{"some":"data"}', "text/plain") 125 | assert '{"some":"data"}' == serializers.loads( 126 | b'{"some":"data"}', "text/plain; charset=whatever" 127 | ) 128 | 129 | 130 | def test_raises_serialization_error_on_unknown_mimetype(): 131 | with pytest.raises(SerializationError) as e: 132 | serializers.loads(b"{}", "fake/type") 133 | assert ( 134 | str(e.value) 135 | == "Unknown mimetype, not able to serialize or deserialize: fake/type" 136 | ) 137 | 138 | 139 | def test_raises_improperly_configured_when_default_mimetype_cannot_be_deserialized(): 140 | with pytest.raises(ValueError) as e: 141 | SerializerCollection({}) 142 | assert ( 143 | str(e.value) 144 | == "Must configure a serializer for the default mimetype 'application/json'" 145 | ) 146 | 147 | 148 | def test_text_asterisk_works_for_all_text_types(): 149 | assert serializers.loads(b"{}", "text/html") == "{}" 150 | assert serializers.dumps("{}", "text/html") == b"{}" 151 | 152 | 153 | @pytest.mark.parametrize("should_strip", [False, b"\n", b"\r\n"]) 154 | def test_ndjson_loads(should_strip): 155 | serializer = NdjsonSerializer() 156 | data = ( 157 | b'{"key":"value"}\n' 158 | b'{"number":0.1,"one":1}\n' 159 | b'{"list":[1,2,3]}\r\n' 160 | b'{"unicode":"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa"}\r\n' 161 | ) 162 | if should_strip: 163 | data = data.strip(should_strip) 164 | data = serializer.loads(data) 165 | 166 | assert data == [ 167 | {"key": "value"}, 168 | {"number": 0.1, "one": 1}, 169 | {"list": [1, 2, 3]}, 170 | {"unicode": "你好\uda6a"}, 171 | ] 172 | 173 | 174 | def test_ndjson_dumps(): 175 | serializer = NdjsonSerializer() 176 | data = serializer.dumps( 177 | [ 178 | {"key": "value"}, 179 | {"number": 0.1, "one": 1}, 180 | {"list": [1, 2, 3]}, 181 | {"unicode": "你好\uda6a"}, 182 | '{"key:"value"}', 183 | b'{"bytes":"too"}', 184 | ] 185 | ) 186 | assert data == ( 187 | b'{"key":"value"}\n' 188 | b'{"number":0.1,"one":1}\n' 189 | b'{"list":[1,2,3]}\n' 190 | b'{"unicode":"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa"}\n' 191 | b'{"key:"value"}\n' 192 | b'{"bytes":"too"}\n' 193 | ) 194 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import pytest 19 | 20 | from elastic_transport._utils import is_ipaddress 21 | 22 | 23 | @pytest.mark.parametrize( 24 | "addr", 25 | [ 26 | # IPv6 27 | "::1", 28 | "::", 29 | "FE80::8939:7684:D84b:a5A4%251", 30 | # IPv4 31 | "127.0.0.1", 32 | "8.8.8.8", 33 | b"127.0.0.1", 34 | # IPv6 w/ Zone IDs 35 | "FE80::8939:7684:D84b:a5A4%251", 36 | b"FE80::8939:7684:D84b:a5A4%251", 37 | "FE80::8939:7684:D84b:a5A4%19", 38 | b"FE80::8939:7684:D84b:a5A4%19", 39 | ], 40 | ) 41 | def test_is_ipaddress(addr): 42 | assert is_ipaddress(addr) 43 | 44 | 45 | @pytest.mark.parametrize( 46 | "addr", 47 | [ 48 | "www.python.org", 49 | b"www.python.org", 50 | "v2.sg.media-imdb.com", 51 | b"v2.sg.media-imdb.com", 52 | ], 53 | ) 54 | def test_is_not_ipaddress(addr): 55 | assert not is_ipaddress(addr) 56 | -------------------------------------------------------------------------------- /utils/build-dists.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | """A command line tool for building and verifying releases 19 | Can be used for building both 'elasticsearch' and 'elasticsearchX' dists. 20 | Only requires 'name' in 'setup.py' and the directory to be changed. 21 | """ 22 | 23 | import contextlib 24 | import os 25 | import re 26 | import shutil 27 | import tempfile 28 | 29 | base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 30 | tmp_dir = None 31 | 32 | 33 | def shlex_quote(s): 34 | # Backport of shlex.quote() to Python 2.x 35 | _find_unsafe = re.compile(r"[^\w@%+=:,./-]").search 36 | 37 | if not s: 38 | return "''" 39 | if _find_unsafe(s) is None: 40 | return s 41 | 42 | # use single quotes, and put single quotes into double quotes 43 | # the string $'b is then quoted as '$'"'"'b' 44 | return "'" + s.replace("'", "'\"'\"'") + "'" 45 | 46 | 47 | @contextlib.contextmanager 48 | def set_tmp_dir(): 49 | global tmp_dir 50 | tmp_dir = tempfile.mkdtemp() 51 | yield tmp_dir 52 | shutil.rmtree(tmp_dir) 53 | tmp_dir = None 54 | 55 | 56 | def run(argv, expect_exit_code=0): 57 | global tmp_dir 58 | if tmp_dir is None: 59 | os.chdir(base_dir) 60 | else: 61 | os.chdir(tmp_dir) 62 | 63 | cmd = " ".join(shlex_quote(x) for x in argv) 64 | print("$ " + cmd) 65 | exit_code = os.system(cmd) 66 | if exit_code != expect_exit_code: 67 | print( 68 | "Command exited incorrectly: should have been %d was %d" 69 | % (expect_exit_code, exit_code) 70 | ) 71 | exit(exit_code or 1) 72 | 73 | 74 | def test_dist(dist): 75 | with set_tmp_dir() as tmp_dir: 76 | # Build the venv and install the dist 77 | run(("python", "-m", "venv", os.path.join(tmp_dir, "venv"))) 78 | venv_python = os.path.join(tmp_dir, "venv/bin/python") 79 | run((venv_python, "-m", "pip", "install", "-U", "pip")) 80 | run((venv_python, "-m", "pip", "install", dist)) 81 | 82 | # Test out importing from the package 83 | run( 84 | ( 85 | venv_python, 86 | "-c", 87 | "from elastic_transport import Transport, Urllib3HttpNode, RequestsHttpNode", 88 | ) 89 | ) 90 | 91 | # Uninstall the dist, see that we can't import things anymore 92 | run((venv_python, "-m", "pip", "uninstall", "--yes", "elastic-transport")) 93 | run( 94 | (venv_python, "-c", "from elastic_transport import Transport"), 95 | expect_exit_code=256, 96 | ) 97 | 98 | 99 | def main(): 100 | run(("rm", "-rf", "build/", "dist/", "*.egg-info", ".eggs")) 101 | 102 | # Install and run python-build to create sdist/wheel 103 | run(("python", "-m", "pip", "install", "-U", "build")) 104 | run(("python", "-m", "build")) 105 | 106 | for dist in os.listdir(os.path.join(base_dir, "dist")): 107 | test_dist(os.path.join(base_dir, "dist", dist)) 108 | 109 | # After this run 'python -m twine upload dist/*' 110 | print( 111 | "\n\n" 112 | "===============================\n\n" 113 | " * Releases are ready! *\n\n" 114 | "$ python -m twine upload dist/*\n\n" 115 | "===============================" 116 | ) 117 | 118 | 119 | if __name__ == "__main__": 120 | main() 121 | -------------------------------------------------------------------------------- /utils/license-headers.py: -------------------------------------------------------------------------------- 1 | # Licensed to Elasticsearch B.V. under one or more contributor 2 | # license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright 4 | # ownership. Elasticsearch B.V. licenses this file to you under 5 | # the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | 18 | """Script which verifies that all source files have a license header. 19 | Has two modes: 'fix' and 'check'. 'fix' fixes problems, 'check' will 20 | error out if 'fix' would have changed the file. 21 | """ 22 | 23 | import os 24 | import sys 25 | from itertools import chain 26 | from typing import Iterator, List 27 | 28 | lines_to_keep = ["# -*- coding: utf-8 -*-\n", "#!/usr/bin/env python\n"] 29 | license_header_lines = [ 30 | "# Licensed to Elasticsearch B.V. under one or more contributor\n", 31 | "# license agreements. See the NOTICE file distributed with\n", 32 | "# this work for additional information regarding copyright\n", 33 | "# ownership. Elasticsearch B.V. licenses this file to you under\n", 34 | '# the Apache License, Version 2.0 (the "License"); you may\n', 35 | "# not use this file except in compliance with the License.\n", 36 | "# You may obtain a copy of the License at\n", 37 | "#\n", 38 | "# http://www.apache.org/licenses/LICENSE-2.0\n", 39 | "#\n", 40 | "# Unless required by applicable law or agreed to in writing,\n", 41 | "# software distributed under the License is distributed on an\n", 42 | '# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n', 43 | "# KIND, either express or implied. See the License for the\n", 44 | "# specific language governing permissions and limitations\n", 45 | "# under the License.\n", 46 | "\n", 47 | ] 48 | 49 | 50 | def find_files_to_fix(sources: List[str]) -> Iterator[str]: 51 | """Iterates over all files and dirs in 'sources' and returns 52 | only the filepaths that need fixing. 53 | """ 54 | for source in sources: 55 | if os.path.isfile(source) and does_file_need_fix(source): 56 | yield source 57 | elif os.path.isdir(source): 58 | for root, _, filenames in os.walk(source): 59 | for filename in filenames: 60 | filepath = os.path.join(root, filename) 61 | if does_file_need_fix(filepath): 62 | yield filepath 63 | 64 | 65 | def does_file_need_fix(filepath: str) -> bool: 66 | if not filepath.endswith(".py"): 67 | return False 68 | with open(filepath) as f: 69 | first_license_line = None 70 | for line in f: 71 | if line == license_header_lines[0]: 72 | first_license_line = line 73 | break 74 | elif line not in lines_to_keep: 75 | return True 76 | for header_line, line in zip( 77 | license_header_lines, chain((first_license_line,), f) 78 | ): 79 | if line != header_line: 80 | return True 81 | return False 82 | 83 | 84 | def add_header_to_file(filepath: str) -> None: 85 | with open(filepath) as f: 86 | lines = list(f) 87 | i = 0 88 | for i, line in enumerate(lines): 89 | if line not in lines_to_keep: 90 | break 91 | lines = lines[:i] + license_header_lines + lines[i:] 92 | with open(filepath, mode="w") as f: 93 | f.truncate() 94 | f.write("".join(lines)) 95 | print(f"Fixed {os.path.relpath(filepath, os.getcwd())}") 96 | 97 | 98 | def main(): 99 | mode = sys.argv[1] 100 | assert mode in ("fix", "check") 101 | sources = [os.path.abspath(x) for x in sys.argv[2:]] 102 | files_to_fix = find_files_to_fix(sources) 103 | 104 | if mode == "fix": 105 | for filepath in files_to_fix: 106 | add_header_to_file(filepath) 107 | else: 108 | no_license_headers = list(files_to_fix) 109 | if no_license_headers: 110 | print("No license header found in:") 111 | cwd = os.getcwd() 112 | [ 113 | print(f" - {os.path.relpath(filepath, cwd)}") 114 | for filepath in no_license_headers 115 | ] 116 | sys.exit(1) 117 | else: 118 | print("All files had license header") 119 | 120 | 121 | if __name__ == "__main__": 122 | main() 123 | --------------------------------------------------------------------------------