├── docs ├── _static │ ├── .keep │ └── logo-redis.svg ├── _templates │ └── .keep ├── genindex.rst ├── lock.rst ├── backoff.rst ├── examples │ ├── opentelemetry │ │ ├── requirements.txt │ │ ├── image │ │ │ └── redis-py-trace.png │ │ ├── config │ │ │ ├── vector.toml │ │ │ ├── otel-collector.yaml │ │ │ └── alertmanager.yml │ │ ├── README.md │ │ ├── main.py │ │ └── docker-compose.yml │ └── README.md ├── exceptions.rst ├── images │ └── opentelemetry │ │ ├── redis-metrics.png │ │ ├── redis-py-trace.png │ │ ├── tree-of-spans.png │ │ └── distributed-tracing.png ├── requirements.txt ├── examples.rst ├── commands.rst ├── index.rst ├── connections.rst ├── retry.rst └── redismodules.rst ├── redis ├── py.typed ├── auth │ ├── __init__.py │ ├── idp.py │ └── err.py ├── http │ └── __init__.py ├── multidb │ ├── __init__.py │ ├── exception.py │ └── event.py ├── asyncio │ ├── http │ │ └── __init__.py │ ├── multidb │ │ ├── __init__.py │ │ ├── failure_detector.py │ │ ├── database.py │ │ └── event.py │ ├── utils.py │ ├── __init__.py │ └── retry.py ├── commands │ ├── search │ │ ├── dialect.py │ │ ├── _util.py │ │ ├── profile_information.py │ │ ├── document.py │ │ ├── hybrid_result.py │ │ ├── suggestion.py │ │ ├── index_definition.py │ │ └── result.py │ ├── json │ │ ├── _util.py │ │ ├── path.py │ │ └── decoders.py │ ├── __init__.py │ ├── timeseries │ │ ├── utils.py │ │ └── info.py │ ├── vectorset │ │ └── __init__.py │ ├── redismodules.py │ └── helpers.py ├── _parsers │ ├── __init__.py │ └── encoders.py ├── crc.py ├── credentials.py ├── typing.py ├── __init__.py └── data_structure.py ├── tests ├── __init__.py ├── test_auth │ ├── __init__.py │ └── test_token.py ├── test_http │ └── __init__.py ├── test_asyncio │ ├── __init__.py │ ├── test_multidb │ │ └── __init__.py │ ├── test_scenario │ │ └── __init__.py │ ├── testdata │ │ └── will_play_text.csv.bz2 │ ├── test_utils.py │ ├── compat.py │ ├── test_usage_counter.py │ ├── test_sentinel_managed_connection.py │ ├── mocks.py │ ├── helpers.py │ └── test_monitor.py ├── test_multidb │ ├── __init__.py │ └── test_circuit.py ├── test_scenario │ └── __init__.py ├── testdata │ └── will_play_text.csv.bz2 ├── test_backoff.py ├── test_utils.py ├── test_sentinel_managed_connection.py ├── test_helpers.py ├── mocks.py ├── ssl_utils.py ├── helpers.py ├── test_driver_info.py ├── test_event.py ├── test_background.py ├── test_parsers │ └── test_helpers.py ├── test_monitor.py ├── test_data_structure.py └── test_encoding.py ├── benchmarks ├── __init__.py ├── socket_read_size.py ├── base.py ├── cluster_async_pipeline.py └── command_packer_benchmark.py ├── .coveragerc ├── .github ├── CODEOWNERS ├── dependabot.yml ├── workflows │ ├── spellcheck.yml │ ├── release-drafter.yml │ ├── pypi-publish.yaml │ ├── docs.yaml │ ├── install_and_test.sh │ ├── hiredis-py-integration.yaml │ └── codeql-analysis.yml ├── ISSUE_TEMPLATE.md ├── spellcheck-settings.yml ├── PULL_REQUEST_TEMPLATE.md ├── release-drafter-config.yml └── wordlist.txt ├── .dockerignore ├── doctests ├── requirements.txt ├── run_examples.sh ├── cmds_string.py ├── string_set_get.py ├── cmds_servermgmt.py ├── cmds_set.py ├── dt_bitfield.py ├── dt_hll.py ├── cmds_cnxmgmt.py ├── dt_cuckoo.py ├── dt_cms.py ├── dt_topk.py ├── dt_bloom.py ├── dt_geo.py ├── README.md ├── dt_string.py ├── dt_tdigest.py ├── cmds_hash.py ├── cmds_sorted_set.py ├── query_geo.py ├── query_range.py ├── dt_hash.py ├── trans_pipe.py ├── cmds_list.py ├── query_em.py ├── dt_ss.py ├── query_ft.py ├── query_combined.py └── query_agg.py ├── codecov.yml ├── .readthedocs.yml ├── dockers └── sentinel.conf ├── .gitignore ├── .mypy.ini ├── dev_requirements.txt ├── LICENSE ├── whitelist.py └── docker-compose.yml /docs/_static/.keep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /redis/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /benchmarks/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/_templates/.keep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /redis/auth/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /redis/http/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /redis/multidb/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/test_auth/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/test_http/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /redis/asyncio/http/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /redis/asyncio/multidb/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/test_asyncio/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/test_multidb/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/test_scenario/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source = redis 3 | -------------------------------------------------------------------------------- /tests/test_asyncio/test_multidb/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/genindex.rst: -------------------------------------------------------------------------------- 1 | Module Index 2 | ============ -------------------------------------------------------------------------------- /tests/test_asyncio/test_scenario/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | doctests/* @dmaier-redislabs 2 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | **/__pycache__ 2 | **/*.pyc 3 | .coverage 4 | .coverage.* 5 | -------------------------------------------------------------------------------- /docs/lock.rst: -------------------------------------------------------------------------------- 1 | Lock 2 | ######### 3 | 4 | .. automodule:: redis.lock 5 | :members: -------------------------------------------------------------------------------- /docs/backoff.rst: -------------------------------------------------------------------------------- 1 | .. _backoff-label: 2 | 3 | Backoff 4 | ############# 5 | 6 | .. automodule:: redis.backoff 7 | :members: -------------------------------------------------------------------------------- /tests/testdata/will_play_text.csv.bz2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/redis/redis-py/HEAD/tests/testdata/will_play_text.csv.bz2 -------------------------------------------------------------------------------- /docs/examples/opentelemetry/requirements.txt: -------------------------------------------------------------------------------- 1 | redis==4.3.4 2 | uptrace==1.14.0 3 | opentelemetry-instrumentation-redis==0.35b0 4 | -------------------------------------------------------------------------------- /docs/exceptions.rst: -------------------------------------------------------------------------------- 1 | .. _exceptions-label: 2 | 3 | Exceptions 4 | ########## 5 | 6 | .. automodule:: redis.exceptions 7 | :members: -------------------------------------------------------------------------------- /docs/images/opentelemetry/redis-metrics.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/redis/redis-py/HEAD/docs/images/opentelemetry/redis-metrics.png -------------------------------------------------------------------------------- /docs/images/opentelemetry/redis-py-trace.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/redis/redis-py/HEAD/docs/images/opentelemetry/redis-py-trace.png -------------------------------------------------------------------------------- /docs/images/opentelemetry/tree-of-spans.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/redis/redis-py/HEAD/docs/images/opentelemetry/tree-of-spans.png -------------------------------------------------------------------------------- /doctests/requirements.txt: -------------------------------------------------------------------------------- 1 | numpy 2 | pandas 3 | requests 4 | sentence_transformers 5 | tabulate 6 | redis #install latest stable version 7 | -------------------------------------------------------------------------------- /docs/images/opentelemetry/distributed-tracing.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/redis/redis-py/HEAD/docs/images/opentelemetry/distributed-tracing.png -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx>=5.0,<7.0 2 | docutils<0.18 3 | nbsphinx 4 | sphinx_gallery 5 | ipython 6 | sphinx-autodoc-typehints 7 | furo 8 | pandoc 9 | -------------------------------------------------------------------------------- /redis/commands/search/dialect.py: -------------------------------------------------------------------------------- 1 | # Value for the default dialect to be used as a part of 2 | # Search or Aggregate query. 3 | DEFAULT_DIALECT = 2 4 | -------------------------------------------------------------------------------- /tests/test_asyncio/testdata/will_play_text.csv.bz2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/redis/redis-py/HEAD/tests/test_asyncio/testdata/will_play_text.csv.bz2 -------------------------------------------------------------------------------- /docs/examples/opentelemetry/image/redis-py-trace.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/redis/redis-py/HEAD/docs/examples/opentelemetry/image/redis-py-trace.png -------------------------------------------------------------------------------- /docs/examples/README.md: -------------------------------------------------------------------------------- 1 | # Examples 2 | 3 | Examples of redis-py usage go here. They're being linked to the [generated documentation](https://redis.readthedocs.org). 4 | -------------------------------------------------------------------------------- /redis/commands/json/_util.py: -------------------------------------------------------------------------------- 1 | from typing import List, Mapping, Union 2 | 3 | JsonType = Union[ 4 | str, int, float, bool, None, Mapping[str, "JsonType"], List["JsonType"] 5 | ] 6 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | labels: 6 | - "maintenance" 7 | schedule: 8 | interval: "monthly" 9 | -------------------------------------------------------------------------------- /redis/commands/search/_util.py: -------------------------------------------------------------------------------- 1 | def to_string(s, encoding: str = "utf-8"): 2 | if isinstance(s, str): 3 | return s 4 | elif isinstance(s, bytes): 5 | return s.decode(encoding, "ignore") 6 | else: 7 | return s # Not a string we care about 8 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | ignore: 2 | - "benchmarks/**" 3 | - "tasks.py" 4 | 5 | codecov: 6 | require_ci_to_pass: yes 7 | 8 | coverage: 9 | precision: 2 10 | round: down 11 | range: "80...100" 12 | status: 13 | patch: off # off for now as it yells about everything 14 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | python: 4 | install: 5 | - requirements: docs/requirements.txt 6 | - method: pip 7 | path: . 8 | 9 | build: 10 | os: ubuntu-20.04 11 | tools: 12 | python: "3.10" 13 | 14 | sphinx: 15 | configuration: docs/conf.py 16 | -------------------------------------------------------------------------------- /tests/test_asyncio/test_utils.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | import redis 3 | 4 | 5 | async def redis_server_time(client: redis.Redis): 6 | seconds, milliseconds = await client.time() 7 | timestamp = float(f"{seconds}.{milliseconds}") 8 | return datetime.fromtimestamp(timestamp) 9 | -------------------------------------------------------------------------------- /doctests/run_examples.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | 4 | basepath=`readlink -f $1` 5 | if [ $? -ne 0 ]; then 6 | basepath=`readlink -f $(dirname $0)` 7 | fi 8 | echo "No path specified, using ${basepath}" 9 | 10 | set -e 11 | cd ${basepath} 12 | for i in `ls ${basepath}/*.py`; do 13 | redis-cli flushdb 14 | python $i 15 | done 16 | -------------------------------------------------------------------------------- /redis/commands/search/profile_information.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | 4 | class ProfileInformation: 5 | """ 6 | Wrapper around FT.PROFILE response 7 | """ 8 | 9 | def __init__(self, info: Any) -> None: 10 | self._info: Any = info 11 | 12 | @property 13 | def info(self) -> Any: 14 | return self._info 15 | -------------------------------------------------------------------------------- /dockers/sentinel.conf: -------------------------------------------------------------------------------- 1 | sentinel resolve-hostnames yes 2 | sentinel monitor redis-py-test redis 6379 2 3 | # Be much more tolerant to transient stalls (index builds, GC, I/O) 4 | sentinel down-after-milliseconds redis-py-test 60000 5 | # Avoid rapid repeated failover attempts 6 | sentinel failover-timeout redis-py-test 180000 7 | # Keep it conservative: sync one replica at a time 8 | sentinel parallel-syncs redis-py-test 1 -------------------------------------------------------------------------------- /doctests/cmds_string.py: -------------------------------------------------------------------------------- 1 | # EXAMPLE: cmds_string 2 | # HIDE_START 3 | import redis 4 | 5 | r = redis.Redis(host="localhost", port=6379, db=0, decode_responses=True) 6 | # HIDE_END 7 | 8 | # STEP_START incr 9 | res = r.set("mykey", "10") 10 | print(res) 11 | # >>> True 12 | res = r.incr("mykey") 13 | print(res) 14 | # >>> 11 15 | # REMOVE_START 16 | assert res == 11 17 | r.delete("mykey") 18 | # REMOVE_END 19 | # STEP_END 20 | -------------------------------------------------------------------------------- /tests/test_asyncio/compat.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | try: 4 | from contextlib import aclosing 5 | except ImportError: 6 | import contextlib 7 | 8 | @contextlib.asynccontextmanager 9 | async def aclosing(thing): 10 | try: 11 | yield thing 12 | finally: 13 | await thing.aclose() 14 | 15 | 16 | def create_task(coroutine): 17 | return asyncio.create_task(coroutine) 18 | -------------------------------------------------------------------------------- /.github/workflows/spellcheck.yml: -------------------------------------------------------------------------------- 1 | name: spellcheck 2 | on: 3 | pull_request: 4 | jobs: 5 | check-spelling: 6 | runs-on: ubuntu-latest 7 | steps: 8 | - name: Checkout 9 | uses: actions/checkout@v6 10 | - name: Check Spelling 11 | uses: rojopolis/spellcheck-github-actions@0.55.0 12 | with: 13 | config_path: .github/spellcheck-settings.yml 14 | task_name: Markdown 15 | -------------------------------------------------------------------------------- /tests/test_asyncio/test_usage_counter.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import pytest 4 | 5 | 6 | @pytest.mark.asyncio 7 | async def test_usage_counter(r): 8 | async def dummy_task(): 9 | async with r: 10 | await asyncio.sleep(0.01) 11 | 12 | tasks = [dummy_task() for _ in range(20)] 13 | await asyncio.gather(*tasks) 14 | 15 | # After all tasks have completed, the usage counter should be back to zero. 16 | assert r._usage_counter == 0 17 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | redis.egg-info 3 | build/ 4 | dist/ 5 | dump.rdb 6 | _build 7 | vagrant/.vagrant 8 | .python-version 9 | .cache 10 | .eggs 11 | .idea 12 | .vscode 13 | .coverage 14 | env 15 | venv 16 | coverage.xml 17 | .venv* 18 | *.xml 19 | .coverage* 20 | prof 21 | profile_output* 22 | docker/stunnel/keys 23 | /dockers/*/node-*/* 24 | /dockers/*/tls/* 25 | /dockers/standalone/ 26 | /dockers/cluster/ 27 | /dockers/replica/ 28 | /dockers/sentinel/ 29 | /dockers/redis-stack/ 30 | -------------------------------------------------------------------------------- /redis/commands/json/path.py: -------------------------------------------------------------------------------- 1 | class Path: 2 | """This class represents a path in a JSON value.""" 3 | 4 | strPath = "" 5 | 6 | @staticmethod 7 | def root_path(): 8 | """Return the root path's string representation.""" 9 | return "." 10 | 11 | def __init__(self, path): 12 | """Make a new path based on the string representation in `path`.""" 13 | self.strPath = path 14 | 15 | def __repr__(self): 16 | return self.strPath 17 | -------------------------------------------------------------------------------- /docs/examples.rst: -------------------------------------------------------------------------------- 1 | Examples 2 | ######## 3 | 4 | .. toctree:: 5 | :maxdepth: 3 6 | :glob: 7 | 8 | examples/connection_examples 9 | examples/ssl_connection_examples 10 | examples/asyncio_examples 11 | examples/search_json_examples 12 | examples/set_and_get_examples 13 | examples/search_vector_similarity_examples 14 | examples/pipeline_examples 15 | examples/timeseries_examples 16 | examples/redis-stream-example 17 | examples/opentelemetry_api_examples 18 | -------------------------------------------------------------------------------- /redis/commands/search/document.py: -------------------------------------------------------------------------------- 1 | class Document: 2 | """ 3 | Represents a single document in a result set 4 | """ 5 | 6 | def __init__(self, id, payload=None, **fields): 7 | self.id = id 8 | self.payload = payload 9 | for k, v in fields.items(): 10 | setattr(self, k, v) 11 | 12 | def __repr__(self): 13 | return f"Document {self.__dict__}" 14 | 15 | def __getitem__(self, item): 16 | value = getattr(self, item) 17 | return value 18 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | Thanks for wanting to report an issue you've found in redis-py. Please delete this text and fill in the template below. 2 | It is of course not always possible to reduce your code to a small test case, but it's highly appreciated to have as much data as possible. Thank you! 3 | 4 | **Version**: What redis-py and what redis version is the issue happening on? 5 | 6 | **Platform**: What platform / version? (For example Python 3.5.1 on Windows 7 / Ubuntu 15.10 / Azure) 7 | 8 | **Description**: Description of your issue, stack traces from errors and code that reproduces the issue 9 | -------------------------------------------------------------------------------- /doctests/string_set_get.py: -------------------------------------------------------------------------------- 1 | # EXAMPLE: set_and_get 2 | # HIDE_START 3 | """ 4 | Code samples for data structure store quickstart pages: 5 | https://redis.io/docs/latest/develop/get-started/data-store/ 6 | """ 7 | 8 | import redis 9 | 10 | r = redis.Redis(host="localhost", port=6379, db=0, decode_responses=True) 11 | # HIDE_END 12 | 13 | res = r.set("bike:1", "Process 134") 14 | print(res) 15 | # >>> True 16 | # REMOVE_START 17 | assert res 18 | # REMOVE_END 19 | 20 | res = r.get("bike:1") 21 | print(res) 22 | # >>> "Process 134" 23 | # REMOVE_START 24 | assert res == "Process 134" 25 | # REMOVE_END 26 | -------------------------------------------------------------------------------- /redis/multidb/exception.py: -------------------------------------------------------------------------------- 1 | class NoValidDatabaseException(Exception): 2 | pass 3 | 4 | 5 | class UnhealthyDatabaseException(Exception): 6 | """Exception raised when a database is unhealthy due to an underlying exception.""" 7 | 8 | def __init__(self, message, database, original_exception): 9 | super().__init__(message) 10 | self.database = database 11 | self.original_exception = original_exception 12 | 13 | 14 | class TemporaryUnavailableException(Exception): 15 | """Exception raised when all databases in setup are temporary unavailable.""" 16 | 17 | pass 18 | -------------------------------------------------------------------------------- /doctests/cmds_servermgmt.py: -------------------------------------------------------------------------------- 1 | # EXAMPLE: cmds_servermgmt 2 | # HIDE_START 3 | import redis 4 | 5 | r = redis.Redis(decode_responses=True) 6 | # HIDE_END 7 | 8 | # STEP_START flushall 9 | # REMOVE_START 10 | r.set("foo", "1") 11 | r.set("bar", "2") 12 | r.set("baz", "3") 13 | # REMOVE_END 14 | res1 = r.flushall(asynchronous=False) 15 | print(res1) # >>> True 16 | 17 | res2 = r.keys() 18 | print(res2) # >>> [] 19 | 20 | # REMOVE_START 21 | assert res1 == True 22 | assert res2 == [] 23 | # REMOVE_END 24 | # STEP_END 25 | 26 | # STEP_START info 27 | res3 = r.info() 28 | print(res3) 29 | # >>> {'redis_version': '7.4.0', 'redis_git_sha1': 'c9d29f6a',...} 30 | # STEP_END -------------------------------------------------------------------------------- /redis/commands/__init__.py: -------------------------------------------------------------------------------- 1 | from .cluster import READ_COMMANDS, AsyncRedisClusterCommands, RedisClusterCommands 2 | from .core import AsyncCoreCommands, CoreCommands 3 | from .helpers import list_or_args 4 | from .redismodules import AsyncRedisModuleCommands, RedisModuleCommands 5 | from .sentinel import AsyncSentinelCommands, SentinelCommands 6 | 7 | __all__ = [ 8 | "AsyncCoreCommands", 9 | "AsyncRedisClusterCommands", 10 | "AsyncRedisModuleCommands", 11 | "AsyncSentinelCommands", 12 | "CoreCommands", 13 | "READ_COMMANDS", 14 | "RedisClusterCommands", 15 | "RedisModuleCommands", 16 | "SentinelCommands", 17 | "list_or_args", 18 | ] 19 | -------------------------------------------------------------------------------- /.github/spellcheck-settings.yml: -------------------------------------------------------------------------------- 1 | matrix: 2 | - name: Markdown 3 | expect_match: false 4 | apsell: 5 | lang: en 6 | d: en_US 7 | ignore-case: true 8 | dictionary: 9 | wordlists: 10 | - .github/wordlist.txt 11 | output: wordlist.dic 12 | pipeline: 13 | - pyspelling.filters.markdown: 14 | markdown_extensions: 15 | - markdown.extensions.extra: 16 | - pyspelling.filters.html: 17 | comments: false 18 | attributes: 19 | - alt 20 | ignores: 21 | - ':matches(code, pre)' 22 | - code 23 | - pre 24 | - blockquote 25 | - img 26 | sources: 27 | - '*.md' 28 | - 'docs/*.rst' 29 | - 'docs/*.ipynb' 30 | -------------------------------------------------------------------------------- /tests/test_backoff.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import Mock 2 | 3 | import pytest 4 | 5 | from redis.backoff import ExponentialWithJitterBackoff 6 | 7 | 8 | def test_exponential_with_jitter_backoff(monkeypatch: pytest.MonkeyPatch) -> None: 9 | mock_random = Mock(side_effect=[0.25, 0.5, 0.75, 1.0, 0.9]) 10 | monkeypatch.setattr("random.random", mock_random) 11 | 12 | bo = ExponentialWithJitterBackoff(cap=5, base=1) 13 | 14 | assert bo.compute(0) == 0.25 # min(5, 0.25*2^0) 15 | assert bo.compute(1) == 1.0 # min(5, 0.5*2^1) 16 | assert bo.compute(2) == 3.0 # min(5, 0.75*2^2) 17 | assert bo.compute(3) == 5.0 # min(5, 1*2^3) 18 | assert bo.compute(4) == 5.0 # min(5, 0.9*2^4) 19 | -------------------------------------------------------------------------------- /.mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | #, docs/examples, tests 3 | files = redis 4 | check_untyped_defs = True 5 | follow_imports_for_stubs asyncio.= True 6 | #disallow_any_decorated = True 7 | disallow_subclassing_any = True 8 | #disallow_untyped_calls = True 9 | disallow_untyped_decorators = True 10 | #disallow_untyped_defs = True 11 | implicit_reexport = False 12 | no_implicit_optional = True 13 | show_error_codes = True 14 | strict_equality = True 15 | warn_incomplete_stub = True 16 | warn_redundant_casts = True 17 | warn_unreachable = True 18 | warn_unused_ignores = True 19 | disallow_any_unimported = True 20 | #warn_return_any = True 21 | 22 | [mypy-redis.asyncio.lock] 23 | # TODO: Remove once locks has been rewritten 24 | ignore_errors = True 25 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ### Description of change 2 | 3 | _Please provide a description of the change here._ 4 | 5 | ### Pull Request check-list 6 | 7 | _Please make sure to review and check all of these items:_ 8 | 9 | - [ ] Do tests and lints pass with this change? 10 | - [ ] Do the CI tests pass with this change (enable it first in your forked repo and wait for the github action build to finish)? 11 | - [ ] Is the new or changed code fully tested? 12 | - [ ] Is a documentation update included (if this change modifies existing APIs, or introduces new ones)? 13 | - [ ] Is there an example added to the examples folder (if applicable)? 14 | 15 | _NOTE: these things are not required to open a PR and can be done 16 | afterwards / while the PR is open._ 17 | -------------------------------------------------------------------------------- /redis/auth/idp.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | from redis.auth.token import TokenInterface 4 | 5 | """ 6 | This interface is the facade of an identity provider 7 | """ 8 | 9 | 10 | class IdentityProviderInterface(ABC): 11 | """ 12 | Receive a token from the identity provider. 13 | Receiving a token only works when being authenticated. 14 | """ 15 | 16 | @abstractmethod 17 | def request_token(self, force_refresh=False) -> TokenInterface: 18 | pass 19 | 20 | 21 | class IdentityProviderConfigInterface(ABC): 22 | """ 23 | Configuration class that provides a configured identity provider. 24 | """ 25 | 26 | @abstractmethod 27 | def get_provider(self) -> IdentityProviderInterface: 28 | pass 29 | -------------------------------------------------------------------------------- /doctests/cmds_set.py: -------------------------------------------------------------------------------- 1 | # EXAMPLE: cmds_set 2 | # HIDE_START 3 | import redis 4 | 5 | r = redis.Redis(decode_responses=True) 6 | # HIDE_END 7 | 8 | # STEP_START sadd 9 | res1 = r.sadd("myset", "Hello", "World") 10 | print(res1) # >>> 2 11 | 12 | res2 = r.sadd("myset", "World") 13 | print(res2) # >>> 0 14 | 15 | res3 = r.smembers("myset") 16 | print(res3) # >>> {'Hello', 'World'} 17 | 18 | # REMOVE_START 19 | assert res3 == {'Hello', 'World'} 20 | r.delete('myset') 21 | # REMOVE_END 22 | # STEP_END 23 | 24 | # STEP_START smembers 25 | res4 = r.sadd("myset", "Hello", "World") 26 | print(res4) # >>> 2 27 | 28 | res5 = r.smembers("myset") 29 | print(res5) # >>> {'Hello', 'World'} 30 | 31 | # REMOVE_START 32 | assert res5 == {'Hello', 'World'} 33 | r.delete('myset') 34 | # REMOVE_END 35 | # STEP_END -------------------------------------------------------------------------------- /redis/_parsers/__init__.py: -------------------------------------------------------------------------------- 1 | from .base import ( 2 | AsyncPushNotificationsParser, 3 | BaseParser, 4 | PushNotificationsParser, 5 | _AsyncRESPBase, 6 | ) 7 | from .commands import AsyncCommandsParser, CommandsParser 8 | from .encoders import Encoder 9 | from .hiredis import _AsyncHiredisParser, _HiredisParser 10 | from .resp2 import _AsyncRESP2Parser, _RESP2Parser 11 | from .resp3 import _AsyncRESP3Parser, _RESP3Parser 12 | 13 | __all__ = [ 14 | "AsyncCommandsParser", 15 | "_AsyncHiredisParser", 16 | "_AsyncRESPBase", 17 | "_AsyncRESP2Parser", 18 | "_AsyncRESP3Parser", 19 | "AsyncPushNotificationsParser", 20 | "CommandsParser", 21 | "Encoder", 22 | "BaseParser", 23 | "_HiredisParser", 24 | "_RESP2Parser", 25 | "_RESP3Parser", 26 | "PushNotificationsParser", 27 | ] 28 | -------------------------------------------------------------------------------- /redis/crc.py: -------------------------------------------------------------------------------- 1 | from binascii import crc_hqx 2 | 3 | from redis.typing import EncodedT 4 | 5 | # Redis Cluster's key space is divided into 16384 slots. 6 | # For more information see: https://github.com/redis/redis/issues/2576 7 | REDIS_CLUSTER_HASH_SLOTS = 16384 8 | 9 | __all__ = ["key_slot", "REDIS_CLUSTER_HASH_SLOTS"] 10 | 11 | 12 | def key_slot(key: EncodedT, bucket: int = REDIS_CLUSTER_HASH_SLOTS) -> int: 13 | """Calculate key slot for a given key. 14 | See Keys distribution model in https://redis.io/topics/cluster-spec 15 | :param key - bytes 16 | :param bucket - int 17 | """ 18 | start = key.find(b"{") 19 | if start > -1: 20 | end = key.find(b"}", start + 1) 21 | if end > -1 and end != start + 1: 22 | key = key[start + 1 : end] 23 | return crc_hqx(key, 0) % bucket 24 | -------------------------------------------------------------------------------- /redis/auth/err.py: -------------------------------------------------------------------------------- 1 | from typing import Iterable 2 | 3 | 4 | class RequestTokenErr(Exception): 5 | """ 6 | Represents an exception during token request. 7 | """ 8 | 9 | def __init__(self, *args): 10 | super().__init__(*args) 11 | 12 | 13 | class InvalidTokenSchemaErr(Exception): 14 | """ 15 | Represents an exception related to invalid token schema. 16 | """ 17 | 18 | def __init__(self, missing_fields: Iterable[str] = []): 19 | super().__init__( 20 | "Unexpected token schema. Following fields are missing: " 21 | + ", ".join(missing_fields) 22 | ) 23 | 24 | 25 | class TokenRenewalErr(Exception): 26 | """ 27 | Represents an exception during token renewal process. 28 | """ 29 | 30 | def __init__(self, *args): 31 | super().__init__(*args) 32 | -------------------------------------------------------------------------------- /redis/asyncio/utils.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING, Any 2 | 3 | if TYPE_CHECKING: 4 | from redis.asyncio.client import Pipeline, Redis 5 | 6 | 7 | def from_url(url: str, **kwargs: Any) -> "Redis": 8 | """ 9 | Returns an active Redis client generated from the given database URL. 10 | 11 | Will attempt to extract the database id from the path url fragment, if 12 | none is provided. 13 | """ 14 | from redis.asyncio.client import Redis 15 | 16 | return Redis.from_url(url, **kwargs) 17 | 18 | 19 | class pipeline: # noqa: N801 20 | def __init__(self, redis_obj: "Redis"): 21 | self.p: "Pipeline" = redis_obj.pipeline() 22 | 23 | async def __aenter__(self) -> "Pipeline": 24 | return self.p 25 | 26 | async def __aexit__(self, exc_type, exc_value, traceback): 27 | await self.p.execute() 28 | del self.p 29 | -------------------------------------------------------------------------------- /.github/workflows/release-drafter.yml: -------------------------------------------------------------------------------- 1 | name: Release Drafter 2 | 3 | on: 4 | push: 5 | # branches to consider in the event; optional, defaults to all 6 | branches: 7 | - master 8 | 9 | permissions: {} 10 | jobs: 11 | update_release_draft: 12 | permissions: 13 | pull-requests: write # to add label to PR (release-drafter/release-drafter) 14 | contents: write # to create a github release (release-drafter/release-drafter) 15 | 16 | runs-on: ubuntu-latest 17 | steps: 18 | # Drafts your next Release notes as Pull Requests are merged into "master" 19 | - uses: release-drafter/release-drafter@v6 20 | with: 21 | # (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml 22 | config-name: release-drafter-config.yml 23 | env: 24 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 25 | -------------------------------------------------------------------------------- /.github/workflows/pypi-publish.yaml: -------------------------------------------------------------------------------- 1 | name: Publish tag to Pypi 2 | 3 | on: 4 | release: 5 | types: [published] 6 | workflow_dispatch: 7 | 8 | permissions: 9 | contents: read # to fetch code (actions/checkout) 10 | 11 | jobs: 12 | 13 | build_and_package: 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@v6 17 | - name: install python 18 | uses: actions/setup-python@v6 19 | with: 20 | python-version: "3.10" 21 | - run: pip install build twine 22 | 23 | - name: Build package 24 | run: python -m build . 25 | 26 | - name: Basic package test prior to upload 27 | run: | 28 | twine check dist/* 29 | 30 | - name: Publish to Pypi 31 | uses: pypa/gh-action-pypi-publish@release/v1 32 | with: 33 | user: __token__ 34 | password: ${{ secrets.PYPI_API_TOKEN }} 35 | -------------------------------------------------------------------------------- /doctests/dt_bitfield.py: -------------------------------------------------------------------------------- 1 | # EXAMPLE: bitfield_tutorial 2 | # HIDE_START 3 | """ 4 | Code samples for Bitfield doc pages: 5 | https://redis.io/docs/latest/develop/data-types/bitfields/ 6 | """ 7 | import redis 8 | 9 | r = redis.Redis(decode_responses=True) 10 | # HIDE_END 11 | 12 | # REMOVE_START 13 | r.delete("bike:1:stats") 14 | # REMOVE_END 15 | 16 | # STEP_START bf 17 | bf = r.bitfield("bike:1:stats") 18 | res1 = bf.set("u32", "#0", 1000).execute() 19 | print(res1) # >>> [0] 20 | 21 | res2 = bf.incrby("u32", "#0", -50).incrby("u32", "#1", 1).execute() 22 | print(res2) # >>> [950, 1] 23 | 24 | res3 = bf.incrby("u32", "#0", 500).incrby("u32", "#1", 1).execute() 25 | print(res3) # >>> [1450, 2] 26 | 27 | res4 = bf.get("u32", "#0").get("u32", "#1").execute() 28 | print(res4) # >>> [1450, 2] 29 | # STEP_END 30 | 31 | # REMOVE_START 32 | assert res1 == [0] 33 | assert res4 == [1450, 2] 34 | # REMOVE_END 35 | -------------------------------------------------------------------------------- /doctests/dt_hll.py: -------------------------------------------------------------------------------- 1 | # # EXAMPLE: hll_tutorial 2 | # HIDE_START 3 | """ 4 | Code samples for HyperLogLog doc pages: 5 | https://redis.io/docs/latest/develop/data-types/probabilistic/hyperloglogs/ 6 | """ 7 | 8 | import redis 9 | 10 | r = redis.Redis(decode_responses=True) 11 | # HIDE_END 12 | 13 | # REMOVE_START 14 | r.delete("bikes", "commuter_bikes", "all_bikes") 15 | # REMOVE_END 16 | 17 | # STEP_START pfadd 18 | res1 = r.pfadd("bikes", "Hyperion", "Deimos", "Phoebe", "Quaoar") 19 | print(res1) # >>> 1 20 | 21 | res2 = r.pfcount("bikes") 22 | print(res2) # >>> 4 23 | 24 | res3 = r.pfadd("commuter_bikes", "Salacia", "Mimas", "Quaoar") 25 | print(res3) # >>> 1 26 | 27 | res4 = r.pfmerge("all_bikes", "bikes", "commuter_bikes") 28 | print(res4) # >>> True 29 | 30 | res5 = r.pfcount("all_bikes") 31 | print(res5) # >>> 6 32 | # STEP_END 33 | 34 | # REMOVE_START 35 | assert res4 is True 36 | # REMOVE_END 37 | -------------------------------------------------------------------------------- /benchmarks/socket_read_size.py: -------------------------------------------------------------------------------- 1 | from base import Benchmark 2 | 3 | from redis.connection import PythonParser, _HiredisParser 4 | 5 | 6 | class SocketReadBenchmark(Benchmark): 7 | ARGUMENTS = ( 8 | {"name": "parser", "values": [PythonParser, _HiredisParser]}, 9 | { 10 | "name": "value_size", 11 | "values": [10, 100, 1000, 10000, 100000, 1000000, 10000000, 100000000], 12 | }, 13 | {"name": "read_size", "values": [4096, 8192, 16384, 32768, 65536, 131072]}, 14 | ) 15 | 16 | def setup(self, value_size, read_size, parser): 17 | r = self.get_client(parser_class=parser, socket_read_size=read_size) 18 | r.set("benchmark", "a" * value_size) 19 | 20 | def run(self, value_size, read_size, parser): 21 | r = self.get_client() 22 | r.get("benchmark") 23 | 24 | 25 | if __name__ == "__main__": 26 | SocketReadBenchmark().run_benchmark() 27 | -------------------------------------------------------------------------------- /doctests/cmds_cnxmgmt.py: -------------------------------------------------------------------------------- 1 | # EXAMPLE: cmds_cnxmgmt 2 | # HIDE_START 3 | import redis 4 | 5 | r = redis.Redis(decode_responses=True) 6 | # HIDE_END 7 | 8 | # STEP_START auth1 9 | # REMOVE_START 10 | r.config_set("requirepass", "temp_pass") 11 | # REMOVE_END 12 | res1 = r.auth(password="temp_pass") 13 | print(res1) # >>> True 14 | 15 | res2 = r.auth(password="temp_pass", username="default") 16 | print(res2) # >>> True 17 | 18 | # REMOVE_START 19 | assert res1 == True 20 | assert res2 == True 21 | r.config_set("requirepass", "") 22 | # REMOVE_END 23 | # STEP_END 24 | 25 | # STEP_START auth2 26 | # REMOVE_START 27 | r.acl_setuser("test-user", enabled=True, passwords=["+strong_password"], commands=["+acl"]) 28 | # REMOVE_END 29 | res = r.auth(username="test-user", password="strong_password") 30 | print(res) # >>> True 31 | 32 | # REMOVE_START 33 | assert res == True 34 | r.acl_deluser("test-user") 35 | # REMOVE_END 36 | # STEP_END 37 | -------------------------------------------------------------------------------- /doctests/dt_cuckoo.py: -------------------------------------------------------------------------------- 1 | # EXAMPLE: cuckoo_tutorial 2 | # HIDE_START 3 | """ 4 | Code samples for Cuckoo filter doc pages: 5 | https://redis.io/docs/latest/develop/data-types/probabilistic/cuckoo-filter/ 6 | """ 7 | import redis 8 | 9 | r = redis.Redis(decode_responses=True) 10 | # HIDE_END 11 | 12 | # REMOVE_START 13 | r.delete("bikes:models") 14 | # REMOVE_END 15 | 16 | # STEP_START cuckoo 17 | res1 = r.cf().reserve("bikes:models", 1000000) 18 | print(res1) # >>> True 19 | 20 | res2 = r.cf().add("bikes:models", "Smoky Mountain Striker") 21 | print(res2) # >>> 1 22 | 23 | res3 = r.cf().exists("bikes:models", "Smoky Mountain Striker") 24 | print(res3) # >>> 1 25 | 26 | res4 = r.cf().exists("bikes:models", "Terrible Bike Name") 27 | print(res4) # >>> 0 28 | 29 | res5 = r.cf().delete("bikes:models", "Smoky Mountain Striker") 30 | print(res5) # >>> 1 31 | # STEP_END 32 | 33 | # REMOVE_START 34 | assert res1 is True 35 | assert res5 == 1 36 | # REMOVE_END 37 | -------------------------------------------------------------------------------- /redis/commands/search/hybrid_result.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Any, Dict, List, Union 3 | 4 | 5 | @dataclass 6 | class HybridResult: 7 | """ 8 | Represents the result of a hybrid search query execution 9 | Returned by the `hybrid_search` command, when using RESP version 2. 10 | """ 11 | 12 | total_results: int 13 | results: List[Dict[str, Any]] 14 | warnings: List[Union[str, bytes]] 15 | execution_time: float 16 | 17 | 18 | class HybridCursorResult: 19 | def __init__(self, search_cursor_id: int, vsim_cursor_id: int) -> None: 20 | """ 21 | Represents the result of a hybrid search query execution with cursor 22 | 23 | search_cursor_id: int - cursor id for the search query 24 | vsim_cursor_id: int - cursor id for the vector similarity query 25 | """ 26 | self.search_cursor_id = search_cursor_id 27 | self.vsim_cursor_id = vsim_cursor_id 28 | -------------------------------------------------------------------------------- /doctests/dt_cms.py: -------------------------------------------------------------------------------- 1 | # EXAMPLE: cms_tutorial 2 | # HIDE_START 3 | """ 4 | Code samples for Count-min sketch doc pages: 5 | https://redis.io/docs/latest/develop/data-types/probabilistic/count-min-sketch/ 6 | """ 7 | import redis 8 | 9 | r = redis.Redis(decode_responses=True) 10 | # HIDE_END 11 | # REMOVE_START 12 | r.delete("bikes:profit") 13 | # REMOVE_END 14 | 15 | # STEP_START cms 16 | res1 = r.cms().initbyprob("bikes:profit", 0.001, 0.002) 17 | print(res1) # >>> True 18 | 19 | res2 = r.cms().incrby("bikes:profit", ["Smoky Mountain Striker"], [100]) 20 | print(res2) # >>> [100] 21 | 22 | res3 = r.cms().incrby( 23 | "bikes:profit", ["Rocky Mountain Racer", "Cloudy City Cruiser"], [200, 150] 24 | ) 25 | print(res3) # >>> [200, 150] 26 | 27 | res4 = r.cms().query("bikes:profit", "Smoky Mountain Striker") 28 | print(res4) # >>> [100] 29 | 30 | res5 = r.cms().info("bikes:profit") 31 | print(res5.width, res5.depth, res5.count) # >>> 2000 9 450 32 | # STEP_END 33 | -------------------------------------------------------------------------------- /doctests/dt_topk.py: -------------------------------------------------------------------------------- 1 | # EXAMPLE: topk_tutorial 2 | # HIDE_START 3 | """ 4 | Code samples for Top-K pages: 5 | https://redis.io/docs/latest/develop/data-types/probabilistic/top-k/ 6 | """ 7 | 8 | import redis 9 | 10 | r = redis.Redis(decode_responses=True) 11 | # HIDE_END 12 | 13 | # REMOVE_START 14 | r.delete("bikes:keywords") 15 | # REMOVE_END 16 | 17 | # STEP_START topk 18 | res1 = r.topk().reserve("bikes:keywords", 5, 2000, 7, 0.925) 19 | print(res1) # >>> True 20 | 21 | res2 = r.topk().add( 22 | "bikes:keywords", 23 | "store", 24 | "seat", 25 | "handlebars", 26 | "handles", 27 | "pedals", 28 | "tires", 29 | "store", 30 | "seat", 31 | ) 32 | print(res2) # >>> [None, None, None, None, None, 'handlebars', None, None] 33 | 34 | res3 = r.topk().list("bikes:keywords") 35 | print(res3) # >>> ['store', 'seat', 'pedals', 'tires', 'handles'] 36 | 37 | res4 = r.topk().query("bikes:keywords", "store", "handlebars") 38 | print(res4) # >>> [1, 0] 39 | -------------------------------------------------------------------------------- /doctests/dt_bloom.py: -------------------------------------------------------------------------------- 1 | # EXAMPLE: bf_tutorial 2 | # HIDE_START 3 | """ 4 | Code samples for Bloom filter doc pages: 5 | https://redis.io/docs/latest/develop/data-types/probabilistic/bloom-filter/ 6 | """ 7 | import redis 8 | 9 | r = redis.Redis(decode_responses=True) 10 | # HIDE_END 11 | 12 | # STEP_START bloom 13 | res1 = r.bf().reserve("bikes:models", 0.01, 1000) 14 | print(res1) # >>> True 15 | 16 | res2 = r.bf().add("bikes:models", "Smoky Mountain Striker") 17 | print(res2) # >>> True 18 | 19 | res3 = r.bf().exists("bikes:models", "Smoky Mountain Striker") 20 | print(res3) # >>> True 21 | 22 | res4 = r.bf().madd( 23 | "bikes:models", 24 | "Rocky Mountain Racer", 25 | "Cloudy City Cruiser", 26 | "Windy City Wippet", 27 | ) 28 | print(res4) # >>> [True, True, True] 29 | 30 | res5 = r.bf().mexists( 31 | "bikes:models", 32 | "Rocky Mountain Racer", 33 | "Cloudy City Cruiser", 34 | "Windy City Wippet", 35 | ) 36 | print(res5) # >>> [True, True, True] 37 | # STEP_END 38 | 39 | # REMOVE_START 40 | assert res1 is True 41 | # REMOVE_END 42 | -------------------------------------------------------------------------------- /dev_requirements.txt: -------------------------------------------------------------------------------- 1 | build 2 | build==1.2.2.post1 ; platform_python_implementation == "PyPy" 3 | click==8.0.4 4 | invoke==2.2.0 5 | packaging>=20.4 6 | packaging==24.2 ; platform_python_implementation == "PyPy" 7 | 8 | pytest 9 | pytest==8.3.4 ; platform_python_implementation == "PyPy" 10 | pytest-asyncio>=0.23.0 11 | pytest-asyncio==1.1.0 ; platform_python_implementation == "PyPy" 12 | pytest-cov 13 | coverage<7.11.1 14 | pytest-cov==6.0.0 ; platform_python_implementation == "PyPy" 15 | coverage==7.6.12 ; platform_python_implementation == "PyPy" 16 | pytest-profiling==1.8.1 17 | pytest-timeout 18 | pytest-timeout==2.3.1 ; platform_python_implementation == "PyPy" 19 | 20 | ruff==0.9.6 21 | ujson>=4.2.0 22 | uvloop<=0.21.0; platform_python_implementation == "CPython" and python_version < "3.14" 23 | uvloop>=0.22; platform_python_implementation == "CPython" and python_version >= "3.14" 24 | vulture>=2.3.0 25 | 26 | numpy>=1.24.0 ; platform_python_implementation == "CPython" 27 | numpy>=1.24.0,<2.0 ; platform_python_implementation == "PyPy" 28 | 29 | redis-entraid==1.0.0 30 | pybreaker>=1.4.0 31 | -------------------------------------------------------------------------------- /docs/commands.rst: -------------------------------------------------------------------------------- 1 | Redis Commands 2 | ############## 3 | 4 | Core Commands 5 | ************* 6 | 7 | The following functions can be used to replicate their equivalent `Redis command `_. Generally they can be used as functions on your redis connection. For the simplest example, see below: 8 | 9 | Getting and settings data in redis:: 10 | 11 | import redis 12 | r = redis.Redis(decode_responses=True) 13 | r.set('mykey', 'thevalueofmykey') 14 | r.get('mykey') 15 | 16 | .. autoclass:: redis.commands.core.CoreCommands 17 | :inherited-members: 18 | 19 | Sentinel Commands 20 | ***************** 21 | .. autoclass:: redis.commands.sentinel.SentinelCommands 22 | :inherited-members: 23 | 24 | Redis Cluster Commands 25 | ********************** 26 | 27 | The following `Redis commands `_ are available within a `Redis Cluster `_. Generally they can be used as functions on your redis connection. 28 | 29 | .. autoclass:: redis.commands.cluster.RedisClusterCommands 30 | :inherited-members: 31 | -------------------------------------------------------------------------------- /docs/examples/opentelemetry/config/vector.toml: -------------------------------------------------------------------------------- 1 | [sources.syslog_logs] 2 | type = "demo_logs" 3 | format = "syslog" 4 | interval = 0.1 5 | 6 | [sources.apache_common_logs] 7 | type = "demo_logs" 8 | format = "apache_common" 9 | interval = 0.1 10 | 11 | [sources.apache_error_logs] 12 | type = "demo_logs" 13 | format = "apache_error" 14 | interval = 0.1 15 | 16 | [sources.json_logs] 17 | type = "demo_logs" 18 | format = "json" 19 | interval = 0.1 20 | 21 | # Parse Syslog logs 22 | # See the Vector Remap Language reference for more info: https://vrl.dev 23 | [transforms.parse_logs] 24 | type = "remap" 25 | inputs = ["syslog_logs"] 26 | source = ''' 27 | . = parse_syslog!(string!(.message)) 28 | ''' 29 | 30 | # Export data to Uptrace. 31 | [sinks.uptrace] 32 | type = "http" 33 | inputs = ["parse_logs", "apache_common_logs", "apache_error_logs", "json_logs"] 34 | encoding.codec = "json" 35 | framing.method = "newline_delimited" 36 | compression = "gzip" 37 | uri = "http://uptrace:14318/api/v1/vector/logs" 38 | #uri = "https://api.uptrace.dev/api/v1/vector/logs" 39 | headers.uptrace-dsn = "http://project2_secret_token@localhost:14317/2" 40 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022-2023, Redis, inc. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | import pytest 3 | from redis.utils import compare_versions 4 | 5 | 6 | @pytest.mark.parametrize( 7 | "version1,version2,expected_res", 8 | [ 9 | ("1.0.0", "0.9.0", -1), 10 | ("1.0.0", "1.0.0", 0), 11 | ("0.9.0", "1.0.0", 1), 12 | ("1.09.0", "1.9.0", 0), 13 | ("1.090.0", "1.9.0", -1), 14 | ("1", "0.9.0", -1), 15 | ("1", "1.0.0", 0), 16 | ], 17 | ids=[ 18 | "version1 > version2", 19 | "version1 == version2", 20 | "version1 < version2", 21 | "version1 == version2 - different minor format", 22 | "version1 > version2 - different minor format", 23 | "version1 > version2 - major version only", 24 | "version1 == version2 - major version only", 25 | ], 26 | ) 27 | def test_compare_versions(version1, version2, expected_res): 28 | assert compare_versions(version1, version2) == expected_res 29 | 30 | 31 | def redis_server_time(client): 32 | seconds, milliseconds = client.time() 33 | timestamp = float(f"{seconds}.{milliseconds}") 34 | return datetime.fromtimestamp(timestamp) 35 | -------------------------------------------------------------------------------- /doctests/dt_geo.py: -------------------------------------------------------------------------------- 1 | # EXAMPLE: geo_tutorial 2 | # HIDE_START 3 | """ 4 | Code samples for Geospatial doc pages: 5 | https://redis.io/docs/latest/develop/data-types/geospatial/ 6 | """ 7 | import redis 8 | 9 | r = redis.Redis(decode_responses=True) 10 | # HIDE_END 11 | # REMOVE_START 12 | r.delete("bikes:rentable") 13 | # REMOVE_END 14 | 15 | # STEP_START geoadd 16 | res1 = r.geoadd("bikes:rentable", [-122.27652, 37.805186, "station:1"]) 17 | print(res1) # >>> 1 18 | 19 | res2 = r.geoadd("bikes:rentable", [-122.2674626, 37.8062344, "station:2"]) 20 | print(res2) # >>> 1 21 | 22 | res3 = r.geoadd("bikes:rentable", [-122.2469854, 37.8104049, "station:3"]) 23 | print(res3) # >>> 1 24 | # STEP_END 25 | 26 | # REMOVE_START 27 | assert res1 == 1 28 | assert res2 == 1 29 | assert res3 == 1 30 | # REMOVE_END 31 | 32 | # STEP_START geosearch 33 | res4 = r.geosearch( 34 | "bikes:rentable", 35 | longitude=-122.27652, 36 | latitude=37.805186, 37 | radius=5, 38 | unit="km", 39 | ) 40 | print(res4) # >>> ['station:1', 'station:2', 'station:3'] 41 | # STEP_END 42 | 43 | # REMOVE_START 44 | assert res4 == ["station:1", "station:2", "station:3"] 45 | # REMOVE_END 46 | -------------------------------------------------------------------------------- /tests/test_sentinel_managed_connection.py: -------------------------------------------------------------------------------- 1 | import socket 2 | 3 | from redis.retry import Retry 4 | from redis.sentinel import SentinelManagedConnection 5 | from redis.backoff import NoBackoff 6 | from unittest import mock 7 | 8 | 9 | def test_connect_retry_on_timeout_error(master_host): 10 | """Test that the _connect function is retried in case of a timeout""" 11 | connection_pool = mock.Mock() 12 | connection_pool.get_master_address = mock.Mock( 13 | return_value=(master_host[0], master_host[1]) 14 | ) 15 | conn = SentinelManagedConnection( 16 | retry_on_timeout=True, 17 | retry=Retry(NoBackoff(), 3), 18 | connection_pool=connection_pool, 19 | ) 20 | origin_connect = conn._connect 21 | conn._connect = mock.Mock() 22 | 23 | def mock_connect(): 24 | # connect only on the last retry 25 | if conn._connect.call_count <= 2: 26 | raise socket.timeout 27 | else: 28 | return origin_connect() 29 | 30 | conn._connect.side_effect = mock_connect 31 | conn.connect() 32 | assert conn._connect.call_count == 3 33 | assert connection_pool.get_master_address.call_count == 3 34 | conn.disconnect() 35 | -------------------------------------------------------------------------------- /tests/test_helpers.py: -------------------------------------------------------------------------------- 1 | import string 2 | 3 | from redis.commands.helpers import ( 4 | delist, 5 | list_or_args, 6 | nativestr, 7 | parse_to_list, 8 | random_string, 9 | ) 10 | 11 | 12 | def test_list_or_args(): 13 | k = ["hello, world"] 14 | a = ["some", "argument", "list"] 15 | assert list_or_args(k, a) == k + a 16 | 17 | for i in ["banana", b"banana"]: 18 | assert list_or_args(i, a) == [i] + a 19 | 20 | 21 | def test_parse_to_list(): 22 | assert parse_to_list(None) == [] 23 | r = ["hello", b"my name", "45", "555.55", "is simon!", None] 24 | assert parse_to_list(r) == ["hello", "my name", 45, 555.55, "is simon!", None] 25 | 26 | 27 | def test_nativestr(): 28 | assert nativestr("teststr") == "teststr" 29 | assert nativestr(b"teststr") == "teststr" 30 | assert nativestr("null") is None 31 | 32 | 33 | def test_delist(): 34 | assert delist(None) is None 35 | assert delist([b"hello", "world", b"banana"]) == ["hello", "world", "banana"] 36 | 37 | 38 | def test_random_string(): 39 | assert len(random_string()) == 10 40 | assert len(random_string(15)) == 15 41 | for a in random_string(): 42 | assert a in string.ascii_lowercase 43 | -------------------------------------------------------------------------------- /.github/workflows/docs.yaml: -------------------------------------------------------------------------------- 1 | name: Docs CI 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | - '[0-9].[0-9]' 8 | pull_request: 9 | branches: 10 | - master 11 | - '[0-9].[0-9]' 12 | schedule: 13 | - cron: '0 1 * * *' # nightly build 14 | 15 | concurrency: 16 | group: ${{ github.event.pull_request.number || github.ref }}-docs 17 | cancel-in-progress: true 18 | 19 | permissions: 20 | contents: read # to fetch code (actions/checkout) 21 | 22 | jobs: 23 | 24 | build-docs: 25 | name: Build docs 26 | runs-on: ubuntu-latest 27 | steps: 28 | - uses: actions/checkout@v6 29 | - uses: actions/setup-python@v6 30 | with: 31 | python-version: "3.10" 32 | cache: 'pip' 33 | - name: install deps 34 | run: | 35 | sudo apt-get update -yqq 36 | sudo apt-get install -yqq pandoc make 37 | - name: run code linters 38 | run: | 39 | pip install -r dev_requirements.txt -r docs/requirements.txt 40 | invoke build-docs 41 | 42 | - name: upload docs 43 | uses: actions/upload-artifact@v5 44 | with: 45 | name: redis-py-docs 46 | path: | 47 | docs/_build/html 48 | -------------------------------------------------------------------------------- /tests/mocks.py: -------------------------------------------------------------------------------- 1 | # Various mocks for testing 2 | 3 | 4 | class MockSocket: 5 | """ 6 | A class simulating an readable socket, optionally raising a 7 | special exception every other read. 8 | """ 9 | 10 | class TestError(BaseException): 11 | pass 12 | 13 | def __init__(self, data, interrupt_every=0): 14 | self.data = data 15 | self.counter = 0 16 | self.pos = 0 17 | self.interrupt_every = interrupt_every 18 | 19 | def tick(self): 20 | self.counter += 1 21 | if not self.interrupt_every: 22 | return 23 | if (self.counter % self.interrupt_every) == 0: 24 | raise self.TestError() 25 | 26 | def recv(self, bufsize): 27 | self.tick() 28 | bufsize = min(5, bufsize) # truncate the read size 29 | result = self.data[self.pos : self.pos + bufsize] 30 | self.pos += len(result) 31 | return result 32 | 33 | def recv_into(self, buffer, nbytes=0, flags=0): 34 | self.tick() 35 | if nbytes == 0: 36 | nbytes = len(buffer) 37 | nbytes = min(5, nbytes) # truncate the read size 38 | result = self.data[self.pos : self.pos + nbytes] 39 | self.pos += len(result) 40 | buffer[: len(result)] = result 41 | return len(result) 42 | -------------------------------------------------------------------------------- /.github/release-drafter-config.yml: -------------------------------------------------------------------------------- 1 | name-template: '$NEXT_MINOR_VERSION' 2 | tag-template: 'v$NEXT_MINOR_VERSION' 3 | filter-by-commitish: true 4 | commitish: master 5 | autolabeler: 6 | - label: 'maintenance' 7 | files: 8 | - '*.md' 9 | - '.github/*' 10 | - label: 'bug' 11 | branch: 12 | - '/bug-.+' 13 | - label: 'maintenance' 14 | branch: 15 | - '/maintenance-.+' 16 | - label: 'feature' 17 | branch: 18 | - '/feature-.+' 19 | categories: 20 | - title: '🔥 Breaking Changes' 21 | labels: 22 | - 'breakingchange' 23 | - title: '🧪 Experimental Features' 24 | labels: 25 | - 'experimental' 26 | - title: '🚀 New Features' 27 | labels: 28 | - 'feature' 29 | - 'enhancement' 30 | - title: '🐛 Bug Fixes' 31 | labels: 32 | - 'fix' 33 | - 'bugfix' 34 | - 'bug' 35 | - 'BUG' 36 | - title: '🧰 Maintenance' 37 | labels: 38 | - 'maintenance' 39 | - 'dependencies' 40 | - 'documentation' 41 | - 'docs' 42 | - 'testing' 43 | change-template: '- $TITLE (#$NUMBER)' 44 | exclude-labels: 45 | - 'skip-changelog' 46 | template: | 47 | # Changes 48 | 49 | $CHANGES 50 | 51 | ## Contributors 52 | We'd like to thank all the contributors who worked on this release! 53 | 54 | $CONTRIBUTORS 55 | 56 | -------------------------------------------------------------------------------- /tests/test_asyncio/test_sentinel_managed_connection.py: -------------------------------------------------------------------------------- 1 | import socket 2 | from unittest import mock 3 | 4 | import pytest 5 | from redis.asyncio.retry import Retry 6 | from redis.asyncio.sentinel import SentinelManagedConnection 7 | from redis.backoff import NoBackoff 8 | 9 | pytestmark = pytest.mark.asyncio 10 | 11 | 12 | async def test_connect_retry_on_timeout_error(connect_args): 13 | """Test that the _connect function is retried in case of a timeout""" 14 | connection_pool = mock.AsyncMock() 15 | connection_pool.get_master_address = mock.AsyncMock( 16 | return_value=(connect_args["host"], connect_args["port"]) 17 | ) 18 | conn = SentinelManagedConnection( 19 | retry_on_timeout=True, 20 | retry=Retry(NoBackoff(), 3), 21 | connection_pool=connection_pool, 22 | ) 23 | origin_connect = conn._connect 24 | conn._connect = mock.AsyncMock() 25 | 26 | async def mock_connect(): 27 | # connect only on the last retry 28 | if conn._connect.call_count <= 2: 29 | raise socket.timeout 30 | else: 31 | return await origin_connect() 32 | 33 | conn._connect.side_effect = mock_connect 34 | await conn.connect() 35 | assert conn._connect.call_count == 3 36 | assert connection_pool.get_master_address.call_count == 3 37 | await conn.disconnect() 38 | -------------------------------------------------------------------------------- /doctests/README.md: -------------------------------------------------------------------------------- 1 | # Command examples for redis.io 2 | 3 | ## How to add an example 4 | 5 | Create regular python file in the current folder with meaningful name. It makes sense prefix example files with 6 | command category (e.g. string, set, list, hash, etc) to make navigation in the folder easier. Files ending in *.py* 7 | are automatically run by the test suite. 8 | 9 | ### Special markup 10 | 11 | See https://github.com/redis-stack/redis-stack-website#readme for more details. 12 | 13 | ## How to test examples 14 | 15 | Examples are standalone python scripts, committed to the *doctests* directory. These scripts assume that the 16 | ```doctests/requirements.txt``` and ```dev_requirements.txt``` from this repository have been installed, as per below. 17 | 18 | ```bash 19 | pip install -r dev_requirements.txt 20 | pip uninstall -y redis # uninstall Redis package installed via redis-entraid 21 | pip install -r doctests/requirements.txt 22 | ``` 23 | 24 | Note - the CI process, runs linters against the examples. Assuming 25 | the requirements above have been installed you can run ```ruff check yourfile.py``` and ```ruff format yourfile.py``` 26 | locally to validate the linting, prior to CI. 27 | 28 | Just include necessary assertions in the example file and run 29 | ```bash 30 | sh doctests/run_examples.sh 31 | ``` 32 | to test all examples in the current folder. 33 | -------------------------------------------------------------------------------- /docs/examples/opentelemetry/README.md: -------------------------------------------------------------------------------- 1 | # Example for redis-py OpenTelemetry instrumentation 2 | 3 | This example demonstrates how to monitor Redis using [OpenTelemetry](https://opentelemetry.io/) and 4 | [Uptrace](https://github.com/uptrace/uptrace). It requires Docker to start Redis Server and Uptrace. 5 | 6 | See 7 | [Monitoring redis-py performance with OpenTelemetry](https://redis.readthedocs.io/en/latest/opentelemetry.html) 8 | for details. 9 | 10 | **Step 1**. Download the example using Git: 11 | 12 | ```shell 13 | git clone https://github.com/redis/redis-py.git 14 | cd example/opentelemetry 15 | ``` 16 | 17 | **Step 2**. Optionally, create a virtualenv: 18 | 19 | ```shell 20 | python3 -m venv .venv 21 | source .venv/bin/active 22 | ``` 23 | 24 | **Step 3**. Install dependencies: 25 | 26 | ```shell 27 | pip install -e . 28 | ``` 29 | 30 | **Step 4**. Start the services using Docker and make sure Uptrace is running: 31 | 32 | ```shell 33 | docker-compose up -d 34 | docker-compose logs uptrace 35 | ``` 36 | 37 | **Step 5**. Run the Redis client example and follow the link from the CLI to view the trace: 38 | 39 | ```shell 40 | python3 main.py 41 | trace: http://localhost:14318/traces/ee029d8782242c8ed38b16d961093b35 42 | ``` 43 | 44 | ![Redis trace](./image/redis-py-trace.png) 45 | 46 | You can also open Uptrace UI at [http://localhost:14318](http://localhost:14318) to view available 47 | spans, logs, and metrics. 48 | -------------------------------------------------------------------------------- /doctests/dt_string.py: -------------------------------------------------------------------------------- 1 | # EXAMPLE: set_tutorial 2 | # HIDE_START 3 | """ 4 | Code samples for String doc pages: 5 | https://redis.io/docs/latest/develop/data-types/strings/ 6 | """ 7 | 8 | import redis 9 | 10 | r = redis.Redis(decode_responses=True) 11 | # HIDE_END 12 | 13 | # STEP_START set_get 14 | res1 = r.set("bike:1", "Deimos") 15 | print(res1) # True 16 | res2 = r.get("bike:1") 17 | print(res2) # Deimos 18 | # STEP_END 19 | 20 | # REMOVE_START 21 | assert res1 22 | assert res2 == "Deimos" 23 | # REMOVE_END 24 | 25 | # STEP_START setnx_xx 26 | res3 = r.set("bike:1", "bike", nx=True) 27 | print(res3) # None 28 | print(r.get("bike:1")) # Deimos 29 | res4 = r.set("bike:1", "bike", xx=True) 30 | print(res4) # True 31 | # STEP_END 32 | 33 | # REMOVE_START 34 | assert res3 is None 35 | assert res4 36 | # REMOVE_END 37 | 38 | # STEP_START mset 39 | res5 = r.mset({"bike:1": "Deimos", "bike:2": "Ares", "bike:3": "Vanth"}) 40 | print(res5) # True 41 | res6 = r.mget(["bike:1", "bike:2", "bike:3"]) 42 | print(res6) # ['Deimos', 'Ares', 'Vanth'] 43 | # STEP_END 44 | 45 | # REMOVE_START 46 | assert res5 47 | assert res6 == ["Deimos", "Ares", "Vanth"] 48 | # REMOVE_END 49 | 50 | # STEP_START incr 51 | r.set("total_crashes", 0) 52 | res7 = r.incr("total_crashes") 53 | print(res7) # 1 54 | res8 = r.incrby("total_crashes", 10) 55 | print(res8) # 11 56 | # STEP_END 57 | 58 | # REMOVE_START 59 | assert res7 == 1 60 | assert res8 == 11 61 | # REMOVE_END 62 | -------------------------------------------------------------------------------- /redis/asyncio/multidb/failure_detector.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | from redis.multidb.failure_detector import FailureDetector 4 | 5 | 6 | class AsyncFailureDetector(ABC): 7 | @abstractmethod 8 | async def register_failure(self, exception: Exception, cmd: tuple) -> None: 9 | """Register a failure that occurred during command execution.""" 10 | pass 11 | 12 | @abstractmethod 13 | async def register_command_execution(self, cmd: tuple) -> None: 14 | """Register a command execution.""" 15 | pass 16 | 17 | @abstractmethod 18 | def set_command_executor(self, command_executor) -> None: 19 | """Set the command executor for this failure.""" 20 | pass 21 | 22 | 23 | class FailureDetectorAsyncWrapper(AsyncFailureDetector): 24 | """ 25 | Async wrapper for the failure detector. 26 | """ 27 | 28 | def __init__(self, failure_detector: FailureDetector) -> None: 29 | self._failure_detector = failure_detector 30 | 31 | async def register_failure(self, exception: Exception, cmd: tuple) -> None: 32 | self._failure_detector.register_failure(exception, cmd) 33 | 34 | async def register_command_execution(self, cmd: tuple) -> None: 35 | self._failure_detector.register_command_execution(cmd) 36 | 37 | def set_command_executor(self, command_executor) -> None: 38 | self._failure_detector.set_command_executor(command_executor) 39 | -------------------------------------------------------------------------------- /whitelist.py: -------------------------------------------------------------------------------- 1 | exc_type # unused variable (/data/repos/redis/redis-py/redis/client.py:1045) 2 | exc_value # unused variable (/data/repos/redis/redis-py/redis/client.py:1045) 3 | traceback # unused variable (/data/repos/redis/redis-py/redis/client.py:1045) 4 | exc_type # unused variable (/data/repos/redis/redis-py/redis/client.py:1211) 5 | exc_value # unused variable (/data/repos/redis/redis-py/redis/client.py:1211) 6 | traceback # unused variable (/data/repos/redis/redis-py/redis/client.py:1211) 7 | exc_type # unused variable (/data/repos/redis/redis-py/redis/client.py:1589) 8 | exc_value # unused variable (/data/repos/redis/redis-py/redis/client.py:1589) 9 | traceback # unused variable (/data/repos/redis/redis-py/redis/client.py:1589) 10 | exc_type # unused variable (/data/repos/redis/redis-py/redis/lock.py:156) 11 | exc_value # unused variable (/data/repos/redis/redis-py/redis/lock.py:156) 12 | traceback # unused variable (/data/repos/redis/redis-py/redis/lock.py:156) 13 | exc_type # unused variable (/data/repos/redis/redis-py/redis/asyncio/utils.py:26) 14 | exc_value # unused variable (/data/repos/redis/redis-py/redis/asyncio/utils.py:26) 15 | traceback # unused variable (/data/repos/redis/redis-py/redis/asyncio/utils.py:26) 16 | AsyncConnectionPool # unused import (//data/repos/redis/redis-py/redis/typing.py:9) 17 | AsyncRedis # unused import (//data/repos/redis/redis-py/redis/commands/core.py:49) 18 | TargetNodesT # unused import (//data/repos/redis/redis-py/redis/commands/cluster.py:46) 19 | -------------------------------------------------------------------------------- /.github/workflows/install_and_test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | SUFFIX=$1 6 | if [ -z ${SUFFIX} ]; then 7 | echo "Supply valid python package extension such as whl or tar.gz. Exiting." 8 | exit 3 9 | fi 10 | 11 | script=`pwd`/${BASH_SOURCE[0]} 12 | HERE=`dirname ${script}` 13 | ROOT=`realpath ${HERE}/../..` 14 | 15 | cd ${ROOT} 16 | DESTENV=${ROOT}/.venvforinstall 17 | if [ -d ${DESTENV} ]; then 18 | rm -rf ${DESTENV} 19 | fi 20 | python -m venv ${DESTENV} 21 | source ${DESTENV}/bin/activate 22 | pip install --upgrade --quiet pip 23 | pip install --quiet -r dev_requirements.txt 24 | pip uninstall -y redis # uninstall Redis package installed via redis-entraid 25 | invoke devenv --endpoints=all-stack 26 | invoke package 27 | 28 | # find packages 29 | PKG=`ls ${ROOT}/dist/*.${SUFFIX}` 30 | ls -l ${PKG} 31 | 32 | TESTDIR=${ROOT}/STAGETESTS 33 | if [ -d ${TESTDIR} ]; then 34 | rm -rf ${TESTDIR} 35 | fi 36 | mkdir ${TESTDIR} 37 | cp -R ${ROOT}/tests ${TESTDIR}/tests 38 | cd ${TESTDIR} 39 | 40 | # install, run tests 41 | pip install ${PKG} 42 | # Redis tests 43 | pytest -m 'not onlycluster' --ignore=tests/test_scenario --ignore=tests/test_asyncio/test_scenario 44 | # RedisCluster tests 45 | CLUSTER_URL="redis://localhost:16379/0" 46 | CLUSTER_SSL_URL="rediss://localhost:27379/0" 47 | pytest -m 'not onlynoncluster and not redismod and not ssl' \ 48 | --ignore=tests/test_scenario \ 49 | --ignore=tests/test_asyncio/test_scenario \ 50 | --redis-url="${CLUSTER_URL}" \ 51 | --redis-ssl-url="${CLUSTER_SSL_URL}" 52 | -------------------------------------------------------------------------------- /redis/commands/timeseries/utils.py: -------------------------------------------------------------------------------- 1 | from ..helpers import nativestr 2 | 3 | 4 | def list_to_dict(aList): 5 | return {nativestr(aList[i][0]): nativestr(aList[i][1]) for i in range(len(aList))} 6 | 7 | 8 | def parse_range(response, **kwargs): 9 | """Parse range response. Used by TS.RANGE and TS.REVRANGE.""" 10 | return [tuple((r[0], float(r[1]))) for r in response] 11 | 12 | 13 | def parse_m_range(response): 14 | """Parse multi range response. Used by TS.MRANGE and TS.MREVRANGE.""" 15 | res = [] 16 | for item in response: 17 | res.append({nativestr(item[0]): [list_to_dict(item[1]), parse_range(item[2])]}) 18 | return sorted(res, key=lambda d: list(d.keys())) 19 | 20 | 21 | def parse_get(response): 22 | """Parse get response. Used by TS.GET.""" 23 | if not response: 24 | return None 25 | return int(response[0]), float(response[1]) 26 | 27 | 28 | def parse_m_get(response): 29 | """Parse multi get response. Used by TS.MGET.""" 30 | res = [] 31 | for item in response: 32 | if not item[2]: 33 | res.append({nativestr(item[0]): [list_to_dict(item[1]), None, None]}) 34 | else: 35 | res.append( 36 | { 37 | nativestr(item[0]): [ 38 | list_to_dict(item[1]), 39 | int(item[2][0]), 40 | float(item[2][1]), 41 | ] 42 | } 43 | ) 44 | return sorted(res, key=lambda d: list(d.keys())) 45 | -------------------------------------------------------------------------------- /docs/examples/opentelemetry/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import time 4 | 5 | import redis 6 | import uptrace 7 | from opentelemetry import trace 8 | from opentelemetry.instrumentation.redis import RedisInstrumentor 9 | 10 | tracer = trace.get_tracer("app_or_package_name", "1.0.0") 11 | 12 | 13 | def main(): 14 | uptrace.configure_opentelemetry( 15 | dsn="http://project2_secret_token@localhost:14317/2", 16 | service_name="myservice", 17 | service_version="1.0.0", 18 | ) 19 | RedisInstrumentor().instrument() 20 | 21 | client = redis.StrictRedis(host="localhost", port=6379) 22 | 23 | span = handle_request(client) 24 | print("trace:", uptrace.trace_url(span)) 25 | 26 | for i in range(10000): 27 | handle_request(client) 28 | time.sleep(1) 29 | 30 | 31 | def handle_request(client): 32 | with tracer.start_as_current_span( 33 | "handle-request", kind=trace.SpanKind.CLIENT 34 | ) as span: 35 | client.get("my-key") 36 | client.set("hello", "world") 37 | client.mset( 38 | { 39 | "employee_name": "Adam Adams", 40 | "employee_age": 30, 41 | "position": "Software Engineer", 42 | } 43 | ) 44 | 45 | pipe = client.pipeline() 46 | pipe.set("foo", 5) 47 | pipe.set("bar", 18.5) 48 | pipe.set("blee", "hello world!") 49 | pipe.execute() 50 | 51 | return span 52 | 53 | 54 | if __name__ == "__main__": 55 | main() 56 | -------------------------------------------------------------------------------- /redis/commands/vectorset/__init__.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from redis._parsers.helpers import pairs_to_dict 4 | from redis.commands.vectorset.utils import ( 5 | parse_vemb_result, 6 | parse_vlinks_result, 7 | parse_vsim_result, 8 | ) 9 | 10 | from ..helpers import get_protocol_version 11 | from .commands import ( 12 | VEMB_CMD, 13 | VGETATTR_CMD, 14 | VINFO_CMD, 15 | VLINKS_CMD, 16 | VSIM_CMD, 17 | VectorSetCommands, 18 | ) 19 | 20 | 21 | class VectorSet(VectorSetCommands): 22 | def __init__(self, client, **kwargs): 23 | """Create a new VectorSet client.""" 24 | # Set the module commands' callbacks 25 | self._MODULE_CALLBACKS = { 26 | VEMB_CMD: parse_vemb_result, 27 | VSIM_CMD: parse_vsim_result, 28 | VGETATTR_CMD: lambda r: r and json.loads(r) or None, 29 | } 30 | 31 | self._RESP2_MODULE_CALLBACKS = { 32 | VINFO_CMD: lambda r: r and pairs_to_dict(r) or None, 33 | VLINKS_CMD: parse_vlinks_result, 34 | } 35 | self._RESP3_MODULE_CALLBACKS = {} 36 | 37 | self.client = client 38 | self.execute_command = client.execute_command 39 | 40 | if get_protocol_version(self.client) in ["3", 3]: 41 | self._MODULE_CALLBACKS.update(self._RESP3_MODULE_CALLBACKS) 42 | else: 43 | self._MODULE_CALLBACKS.update(self._RESP2_MODULE_CALLBACKS) 44 | 45 | for k, v in self._MODULE_CALLBACKS.items(): 46 | self.client.set_response_callback(k, v) 47 | -------------------------------------------------------------------------------- /benchmarks/base.py: -------------------------------------------------------------------------------- 1 | import functools 2 | import itertools 3 | import sys 4 | import timeit 5 | 6 | import redis 7 | 8 | 9 | class Benchmark: 10 | ARGUMENTS = () 11 | 12 | def __init__(self): 13 | self._client = None 14 | 15 | def get_client(self, **kwargs): 16 | # eventually make this more robust and take optional args from 17 | # argparse 18 | if self._client is None or kwargs: 19 | defaults = {"db": 9} 20 | defaults.update(kwargs) 21 | pool = redis.ConnectionPool(**kwargs) 22 | self._client = redis.Redis(connection_pool=pool) 23 | return self._client 24 | 25 | def setup(self, **kwargs): 26 | pass 27 | 28 | def run(self, **kwargs): 29 | pass 30 | 31 | def run_benchmark(self): 32 | group_names = [group["name"] for group in self.ARGUMENTS] 33 | group_values = [group["values"] for group in self.ARGUMENTS] 34 | for value_set in itertools.product(*group_values): 35 | pairs = list(zip(group_names, value_set)) 36 | arg_string = ", ".join(f"{p[0]}={p[1]}" for p in pairs) 37 | sys.stdout.write(f"Benchmark: {arg_string}... ") 38 | sys.stdout.flush() 39 | kwargs = dict(pairs) 40 | setup = functools.partial(self.setup, **kwargs) 41 | run = functools.partial(self.run, **kwargs) 42 | t = timeit.timeit(stmt=run, setup=setup, number=1000) 43 | sys.stdout.write(f"{t:f}\n") 44 | sys.stdout.flush() 45 | -------------------------------------------------------------------------------- /tests/test_asyncio/mocks.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | # Helper Mocking classes for the tests. 4 | 5 | 6 | class MockStream: 7 | """ 8 | A class simulating an asyncio input buffer, optionally raising a 9 | special exception every other read. 10 | """ 11 | 12 | class TestError(BaseException): 13 | pass 14 | 15 | def __init__(self, data, interrupt_every=0): 16 | self.data = data 17 | self.counter = 0 18 | self.pos = 0 19 | self.interrupt_every = interrupt_every 20 | 21 | def tick(self): 22 | self.counter += 1 23 | if not self.interrupt_every: 24 | return 25 | if (self.counter % self.interrupt_every) == 0: 26 | raise self.TestError() 27 | 28 | async def read(self, want): 29 | self.tick() 30 | want = 5 31 | result = self.data[self.pos : self.pos + want] 32 | self.pos += len(result) 33 | return result 34 | 35 | async def readline(self): 36 | self.tick() 37 | find = self.data.find(b"\n", self.pos) 38 | if find >= 0: 39 | result = self.data[self.pos : find + 1] 40 | else: 41 | result = self.data[self.pos :] 42 | self.pos += len(result) 43 | return result 44 | 45 | async def readexactly(self, length): 46 | self.tick() 47 | result = self.data[self.pos : self.pos + length] 48 | if len(result) < length: 49 | raise asyncio.IncompleteReadError(result, None) 50 | self.pos += len(result) 51 | return result 52 | -------------------------------------------------------------------------------- /tests/ssl_utils.py: -------------------------------------------------------------------------------- 1 | import enum 2 | import os 3 | from collections import namedtuple 4 | 5 | CLIENT_CERT_NAME = "client.crt" 6 | CLIENT_KEY_NAME = "client.key" 7 | SERVER_CERT_NAME = "redis.crt" 8 | SERVER_KEY_NAME = "redis.key" 9 | CA_CERT_NAME = "ca.crt" 10 | 11 | 12 | class CertificateType(str, enum.Enum): 13 | client = "client" 14 | server = "server" 15 | 16 | 17 | TLSFiles = namedtuple("TLSFiles", ["certfile", "keyfile", "ca_certfile"]) 18 | 19 | 20 | def get_tls_certificates( 21 | subdir: str = "standalone", 22 | cert_type: CertificateType = CertificateType.client, 23 | ): 24 | root = os.path.join(os.path.dirname(__file__), "..") 25 | cert_subdir = ("dockers", subdir, "tls") 26 | cert_dir = os.path.abspath(os.path.join(root, *cert_subdir)) 27 | if not os.path.isdir(cert_dir): # github actions package validation case 28 | cert_dir = os.path.abspath(os.path.join(root, "..", *cert_subdir)) 29 | if not os.path.isdir(cert_dir): 30 | raise OSError(f"No SSL certificates found. They should be in {cert_dir}") 31 | 32 | if cert_type == CertificateType.client: 33 | return TLSFiles( 34 | os.path.join(cert_dir, CLIENT_CERT_NAME), 35 | os.path.join(cert_dir, CLIENT_KEY_NAME), 36 | os.path.join(cert_dir, CA_CERT_NAME), 37 | ) 38 | elif cert_type == CertificateType.server: 39 | return TLSFiles( 40 | os.path.join(cert_dir, SERVER_CERT_NAME), 41 | os.path.join(cert_dir, SERVER_KEY_NAME), 42 | os.path.join(cert_dir, CA_CERT_NAME), 43 | ) 44 | -------------------------------------------------------------------------------- /docs/examples/opentelemetry/config/otel-collector.yaml: -------------------------------------------------------------------------------- 1 | extensions: 2 | health_check: 3 | pprof: 4 | endpoint: 0.0.0.0:1777 5 | zpages: 6 | endpoint: 0.0.0.0:55679 7 | 8 | receivers: 9 | otlp: 10 | protocols: 11 | grpc: 12 | http: 13 | hostmetrics: 14 | collection_interval: 10s 15 | scrapers: 16 | cpu: 17 | disk: 18 | load: 19 | filesystem: 20 | memory: 21 | network: 22 | paging: 23 | redis: 24 | endpoint: "redis-server:6379" 25 | collection_interval: 10s 26 | jaeger: 27 | protocols: 28 | grpc: 29 | 30 | processors: 31 | resourcedetection: 32 | detectors: ["system"] 33 | batch: 34 | send_batch_size: 10000 35 | timeout: 10s 36 | 37 | exporters: 38 | logging: 39 | logLevel: debug 40 | otlp: 41 | endpoint: uptrace:14317 42 | tls: 43 | insecure: true 44 | headers: { "uptrace-dsn": "http://project2_secret_token@localhost:14317/2" } 45 | 46 | service: 47 | # telemetry: 48 | # logs: 49 | # level: DEBUG 50 | pipelines: 51 | traces: 52 | receivers: [otlp, jaeger] 53 | processors: [batch] 54 | exporters: [otlp, logging] 55 | metrics: 56 | receivers: [otlp] 57 | processors: [batch] 58 | exporters: [otlp] 59 | metrics/hostmetrics: 60 | receivers: [hostmetrics, redis] 61 | processors: [batch, resourcedetection] 62 | exporters: [otlp] 63 | logs: 64 | receivers: [otlp] 65 | processors: [batch] 66 | exporters: [otlp] 67 | 68 | extensions: [health_check, pprof, zpages] 69 | -------------------------------------------------------------------------------- /redis/commands/json/decoders.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import re 3 | 4 | from ..helpers import nativestr 5 | 6 | 7 | def bulk_of_jsons(d): 8 | """Replace serialized JSON values with objects in a 9 | bulk array response (list). 10 | """ 11 | 12 | def _f(b): 13 | for index, item in enumerate(b): 14 | if item is not None: 15 | b[index] = d(item) 16 | return b 17 | 18 | return _f 19 | 20 | 21 | def decode_dict_keys(obj): 22 | """Decode the keys of the given dictionary with utf-8.""" 23 | newobj = copy.copy(obj) 24 | for k in obj.keys(): 25 | if isinstance(k, bytes): 26 | newobj[k.decode("utf-8")] = newobj[k] 27 | newobj.pop(k) 28 | return newobj 29 | 30 | 31 | def unstring(obj): 32 | """ 33 | Attempt to parse string to native integer formats. 34 | One can't simply call int/float in a try/catch because there is a 35 | semantic difference between (for example) 15.0 and 15. 36 | """ 37 | floatreg = "^\\d+.\\d+$" 38 | match = re.findall(floatreg, obj) 39 | if match != []: 40 | return float(match[0]) 41 | 42 | intreg = "^\\d+$" 43 | match = re.findall(intreg, obj) 44 | if match != []: 45 | return int(match[0]) 46 | return obj 47 | 48 | 49 | def decode_list(b): 50 | """ 51 | Given a non-deserializable object, make a best effort to 52 | return a useful set of results. 53 | """ 54 | if isinstance(b, list): 55 | return [nativestr(obj) for obj in b] 56 | elif isinstance(b, bytes): 57 | return unstring(nativestr(b)) 58 | elif isinstance(b, str): 59 | return unstring(b) 60 | return b 61 | -------------------------------------------------------------------------------- /tests/helpers.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from time import sleep 3 | from typing import Callable 4 | 5 | 6 | def wait_for_condition( 7 | predicate: Callable[[], bool], 8 | timeout: float = 0.2, 9 | check_interval: float = 0.01, 10 | error_message: str = "Timeout waiting for condition", 11 | ) -> None: 12 | """ 13 | Poll a condition until it becomes True or timeout is reached. 14 | 15 | Args: 16 | predicate: A callable that returns True when the condition is met 17 | timeout: Maximum time to wait in seconds (default: 0.2s = 20 * 0.01s) 18 | check_interval: Time to sleep between checks in seconds (default: 0.01s) 19 | error_message: Error message to raise if timeout occurs 20 | 21 | Raises: 22 | AssertionError: If the condition is not met within the timeout period 23 | 24 | Example: 25 | # Wait for circuit breaker to open 26 | wait_for_condition( 27 | lambda: cb2.state == CBState.OPEN, 28 | timeout=0.2, 29 | error_message="Timeout waiting for cb2 to open" 30 | ) 31 | 32 | # Wait for failover strategy to select a specific database 33 | wait_for_condition( 34 | lambda: client.command_executor.active_database is mock_db, 35 | timeout=0.2, 36 | error_message="Timeout waiting for active database to change" 37 | ) 38 | """ 39 | max_retries = int(timeout / check_interval) 40 | 41 | for attempt in range(max_retries): 42 | if predicate(): 43 | logging.debug(f"Condition met after {attempt} attempts") 44 | return 45 | sleep(check_interval) 46 | 47 | raise AssertionError(error_message) 48 | -------------------------------------------------------------------------------- /tests/test_asyncio/helpers.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | from typing import Callable 4 | 5 | 6 | async def wait_for_condition( 7 | predicate: Callable[[], bool], 8 | timeout: float = 0.2, 9 | check_interval: float = 0.01, 10 | error_message: str = "Timeout waiting for condition", 11 | ) -> None: 12 | """ 13 | Poll a condition until it becomes True or timeout is reached. 14 | 15 | Args: 16 | predicate: A callable that returns True when the condition is met 17 | timeout: Maximum time to wait in seconds (default: 0.2s = 20 * 0.01s) 18 | check_interval: Time to sleep between checks in seconds (default: 0.01s) 19 | error_message: Error message to raise if timeout occurs 20 | 21 | Raises: 22 | AssertionError: If the condition is not met within the timeout period 23 | 24 | Example: 25 | # Wait for circuit breaker to open 26 | await wait_for_condition( 27 | lambda: cb2.state == CBState.OPEN, 28 | timeout=0.2, 29 | error_message="Timeout waiting for cb2 to open" 30 | ) 31 | 32 | # Wait for failover strategy to select a specific database 33 | await wait_for_condition( 34 | lambda: client.command_executor.active_database is mock_db, 35 | timeout=0.2, 36 | error_message="Timeout waiting for active database to change" 37 | ) 38 | """ 39 | max_retries = int(timeout / check_interval) 40 | 41 | for attempt in range(max_retries): 42 | if predicate(): 43 | logging.debug(f"Condition met after {attempt} attempts") 44 | return 45 | await asyncio.sleep(check_interval) 46 | 47 | raise AssertionError(error_message) 48 | -------------------------------------------------------------------------------- /redis/asyncio/__init__.py: -------------------------------------------------------------------------------- 1 | from redis.asyncio.client import Redis, StrictRedis 2 | from redis.asyncio.cluster import RedisCluster 3 | from redis.asyncio.connection import ( 4 | BlockingConnectionPool, 5 | Connection, 6 | ConnectionPool, 7 | SSLConnection, 8 | UnixDomainSocketConnection, 9 | ) 10 | from redis.asyncio.sentinel import ( 11 | Sentinel, 12 | SentinelConnectionPool, 13 | SentinelManagedConnection, 14 | SentinelManagedSSLConnection, 15 | ) 16 | from redis.asyncio.utils import from_url 17 | from redis.backoff import default_backoff 18 | from redis.exceptions import ( 19 | AuthenticationError, 20 | AuthenticationWrongNumberOfArgsError, 21 | BusyLoadingError, 22 | ChildDeadlockedError, 23 | ConnectionError, 24 | DataError, 25 | InvalidResponse, 26 | OutOfMemoryError, 27 | PubSubError, 28 | ReadOnlyError, 29 | RedisError, 30 | ResponseError, 31 | TimeoutError, 32 | WatchError, 33 | ) 34 | 35 | __all__ = [ 36 | "AuthenticationError", 37 | "AuthenticationWrongNumberOfArgsError", 38 | "BlockingConnectionPool", 39 | "BusyLoadingError", 40 | "ChildDeadlockedError", 41 | "Connection", 42 | "ConnectionError", 43 | "ConnectionPool", 44 | "DataError", 45 | "from_url", 46 | "default_backoff", 47 | "InvalidResponse", 48 | "PubSubError", 49 | "OutOfMemoryError", 50 | "ReadOnlyError", 51 | "Redis", 52 | "RedisCluster", 53 | "RedisError", 54 | "ResponseError", 55 | "Sentinel", 56 | "SentinelConnectionPool", 57 | "SentinelManagedConnection", 58 | "SentinelManagedSSLConnection", 59 | "SSLConnection", 60 | "StrictRedis", 61 | "TimeoutError", 62 | "UnixDomainSocketConnection", 63 | "WatchError", 64 | ] 65 | -------------------------------------------------------------------------------- /redis/commands/search/suggestion.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from ._util import to_string 4 | 5 | 6 | class Suggestion: 7 | """ 8 | Represents a single suggestion being sent or returned from the 9 | autocomplete server 10 | """ 11 | 12 | def __init__( 13 | self, string: str, score: float = 1.0, payload: Optional[str] = None 14 | ) -> None: 15 | self.string = to_string(string) 16 | self.payload = to_string(payload) 17 | self.score = score 18 | 19 | def __repr__(self) -> str: 20 | return self.string 21 | 22 | 23 | class SuggestionParser: 24 | """ 25 | Internal class used to parse results from the `SUGGET` command. 26 | This needs to consume either 1, 2, or 3 values at a time from 27 | the return value depending on what objects were requested 28 | """ 29 | 30 | def __init__(self, with_scores: bool, with_payloads: bool, ret) -> None: 31 | self.with_scores = with_scores 32 | self.with_payloads = with_payloads 33 | 34 | if with_scores and with_payloads: 35 | self.sugsize = 3 36 | self._scoreidx = 1 37 | self._payloadidx = 2 38 | elif with_scores: 39 | self.sugsize = 2 40 | self._scoreidx = 1 41 | elif with_payloads: 42 | self.sugsize = 2 43 | self._payloadidx = 1 44 | else: 45 | self.sugsize = 1 46 | self._scoreidx = -1 47 | 48 | self._sugs = ret 49 | 50 | def __iter__(self): 51 | for i in range(0, len(self._sugs), self.sugsize): 52 | ss = self._sugs[i] 53 | score = float(self._sugs[i + self._scoreidx]) if self.with_scores else 1.0 54 | payload = self._sugs[i + self._payloadidx] if self.with_payloads else None 55 | yield Suggestion(ss, score, payload) 56 | -------------------------------------------------------------------------------- /doctests/dt_tdigest.py: -------------------------------------------------------------------------------- 1 | # EXAMPLE: tdigest_tutorial 2 | # HIDE_START 3 | """ 4 | Code samples for t-digest pages: 5 | https://redis.io/docs/latest/develop/data-types/probabilistic/t-digest/ 6 | """ 7 | 8 | import redis 9 | 10 | r = redis.Redis(decode_responses=True) 11 | # HIDE_END 12 | 13 | # REMOVE_START 14 | r.delete("racer_ages") 15 | r.delete("bikes:sales") 16 | # REMOVE_END 17 | 18 | # STEP_START tdig_start 19 | res1 = r.tdigest().create("bikes:sales", 100) 20 | print(res1) # >>> True 21 | 22 | res2 = r.tdigest().add("bikes:sales", [21]) 23 | print(res2) # >>> OK 24 | 25 | res3 = r.tdigest().add("bikes:sales", [150, 95, 75, 34]) 26 | print(res3) # >>> OK 27 | # STEP_END 28 | 29 | # REMOVE_START 30 | assert res1 is True 31 | # REMOVE_END 32 | 33 | # STEP_START tdig_cdf 34 | res4 = r.tdigest().create("racer_ages") 35 | print(res4) # >>> True 36 | 37 | res5 = r.tdigest().add( 38 | "racer_ages", 39 | [ 40 | 45.88, 41 | 44.2, 42 | 58.03, 43 | 19.76, 44 | 39.84, 45 | 69.28, 46 | 50.97, 47 | 25.41, 48 | 19.27, 49 | 85.71, 50 | 42.63, 51 | ], 52 | ) 53 | print(res5) # >>> OK 54 | 55 | res6 = r.tdigest().rank("racer_ages", 50) 56 | print(res6) # >>> [7] 57 | 58 | res7 = r.tdigest().rank("racer_ages", 50, 40) 59 | print(res7) # >>> [7, 4] 60 | # STEP_END 61 | 62 | # STEP_START tdig_quant 63 | res8 = r.tdigest().quantile("racer_ages", 0.5) 64 | print(res8) # >>> [44.2] 65 | 66 | res9 = r.tdigest().byrank("racer_ages", 4) 67 | print(res9) # >>> [42.63] 68 | # STEP_END 69 | 70 | # STEP_START tdig_min 71 | res10 = r.tdigest().min("racer_ages") 72 | print(res10) # >>> 19.27 73 | 74 | res11 = r.tdigest().max("racer_ages") 75 | print(res11) # >>> 85.71 76 | # STEP_END 77 | 78 | # STEP_START tdig_reset 79 | res12 = r.tdigest().reset("racer_ages") 80 | print(res12) # >>> OK 81 | # STEP_END 82 | -------------------------------------------------------------------------------- /redis/_parsers/encoders.py: -------------------------------------------------------------------------------- 1 | from ..exceptions import DataError 2 | 3 | 4 | class Encoder: 5 | "Encode strings to bytes-like and decode bytes-like to strings" 6 | 7 | __slots__ = "encoding", "encoding_errors", "decode_responses" 8 | 9 | def __init__(self, encoding, encoding_errors, decode_responses): 10 | self.encoding = encoding 11 | self.encoding_errors = encoding_errors 12 | self.decode_responses = decode_responses 13 | 14 | def encode(self, value): 15 | "Return a bytestring or bytes-like representation of the value" 16 | if isinstance(value, (bytes, memoryview)): 17 | return value 18 | elif isinstance(value, bool): 19 | # special case bool since it is a subclass of int 20 | raise DataError( 21 | "Invalid input of type: 'bool'. Convert to a " 22 | "bytes, string, int or float first." 23 | ) 24 | elif isinstance(value, (int, float)): 25 | value = repr(value).encode() 26 | elif not isinstance(value, str): 27 | # a value we don't know how to deal with. throw an error 28 | typename = type(value).__name__ 29 | raise DataError( 30 | f"Invalid input of type: '{typename}'. " 31 | f"Convert to a bytes, string, int or float first." 32 | ) 33 | if isinstance(value, str): 34 | value = value.encode(self.encoding, self.encoding_errors) 35 | return value 36 | 37 | def decode(self, value, force=False): 38 | "Return a unicode string from the bytes-like representation" 39 | if self.decode_responses or force: 40 | if isinstance(value, memoryview): 41 | value = value.tobytes() 42 | if isinstance(value, bytes): 43 | value = value.decode(self.encoding, self.encoding_errors) 44 | return value 45 | -------------------------------------------------------------------------------- /tests/test_driver_info.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from redis.driver_info import DriverInfo 4 | from redis.utils import get_lib_version 5 | 6 | 7 | def test_driver_info_default_name_no_upstream(): 8 | info = DriverInfo() 9 | assert info.formatted_name == "redis-py" 10 | assert info.upstream_drivers == [] 11 | assert info.lib_version == get_lib_version() 12 | 13 | 14 | def test_driver_info_custom_lib_version(): 15 | info = DriverInfo(lib_version="5.0.0") 16 | assert info.lib_version == "5.0.0" 17 | assert info.formatted_name == "redis-py" 18 | 19 | 20 | def test_driver_info_single_upstream(): 21 | info = DriverInfo().add_upstream_driver("django-redis", "5.4.0") 22 | assert info.formatted_name == "redis-py(django-redis_v5.4.0)" 23 | 24 | 25 | def test_driver_info_multiple_upstreams_latest_first(): 26 | info = DriverInfo() 27 | info.add_upstream_driver("django-redis", "5.4.0") 28 | info.add_upstream_driver("celery", "5.4.1") 29 | assert info.formatted_name == "redis-py(celery_v5.4.1;django-redis_v5.4.0)" 30 | 31 | 32 | @pytest.mark.parametrize( 33 | "name", 34 | [ 35 | "DjangoRedis", # must start with lowercase 36 | "django redis", # spaces not allowed 37 | "django{redis}", # braces not allowed 38 | "django:redis", # ':' not allowed by validation regex 39 | ], 40 | ) 41 | def test_driver_info_invalid_name(name): 42 | info = DriverInfo() 43 | with pytest.raises(ValueError): 44 | info.add_upstream_driver(name, "3.2.0") 45 | 46 | 47 | @pytest.mark.parametrize( 48 | "version", 49 | [ 50 | "3.2.0 beta", # space not allowed 51 | "3.2.0)", # brace not allowed 52 | "3.2.0\n", # newline not allowed 53 | ], 54 | ) 55 | def test_driver_info_invalid_version(version): 56 | info = DriverInfo() 57 | with pytest.raises(ValueError): 58 | info.add_upstream_driver("django-redis", version) 59 | -------------------------------------------------------------------------------- /tests/test_multidb/test_circuit.py: -------------------------------------------------------------------------------- 1 | import pybreaker 2 | import pytest 3 | 4 | from redis.multidb.circuit import ( 5 | PBCircuitBreakerAdapter, 6 | State as CbState, 7 | CircuitBreaker, 8 | ) 9 | 10 | 11 | @pytest.mark.onlynoncluster 12 | class TestPBCircuitBreaker: 13 | @pytest.mark.parametrize( 14 | "mock_db", 15 | [ 16 | {"weight": 0.7, "circuit": {"state": CbState.CLOSED}}, 17 | ], 18 | indirect=True, 19 | ) 20 | def test_cb_correctly_configured(self, mock_db): 21 | pb_circuit = pybreaker.CircuitBreaker(reset_timeout=5) 22 | adapter = PBCircuitBreakerAdapter(cb=pb_circuit) 23 | assert adapter.state == CbState.CLOSED 24 | 25 | adapter.state = CbState.OPEN 26 | assert adapter.state == CbState.OPEN 27 | 28 | adapter.state = CbState.HALF_OPEN 29 | assert adapter.state == CbState.HALF_OPEN 30 | 31 | adapter.state = CbState.CLOSED 32 | assert adapter.state == CbState.CLOSED 33 | 34 | assert adapter.grace_period == 5 35 | adapter.grace_period = 10 36 | 37 | assert adapter.grace_period == 10 38 | 39 | adapter.database = mock_db 40 | assert adapter.database == mock_db 41 | 42 | def test_cb_executes_callback_on_state_changed(self): 43 | pb_circuit = pybreaker.CircuitBreaker(reset_timeout=5) 44 | adapter = PBCircuitBreakerAdapter(cb=pb_circuit) 45 | called_count = 0 46 | 47 | def callback(cb: CircuitBreaker, old_state: CbState, new_state: CbState): 48 | nonlocal called_count 49 | assert old_state == CbState.CLOSED 50 | assert new_state == CbState.HALF_OPEN 51 | assert isinstance(cb, PBCircuitBreakerAdapter) 52 | called_count += 1 53 | 54 | adapter.on_state_changed(callback) 55 | adapter.state = CbState.HALF_OPEN 56 | 57 | assert called_count == 1 58 | -------------------------------------------------------------------------------- /docs/examples/opentelemetry/config/alertmanager.yml: -------------------------------------------------------------------------------- 1 | # See https://prometheus.io/docs/alerting/latest/configuration/ for details. 2 | 3 | global: 4 | # The smarthost and SMTP sender used for mail notifications. 5 | smtp_smarthost: "mailhog:1025" 6 | smtp_from: "alertmanager@example.com" 7 | smtp_require_tls: false 8 | 9 | receivers: 10 | - name: "team-X" 11 | email_configs: 12 | - to: "some-receiver@example.com" 13 | send_resolved: true 14 | 15 | # The root route on which each incoming alert enters. 16 | route: 17 | # The labels by which incoming alerts are grouped together. For example, 18 | # multiple alerts coming in for cluster=A and alertname=LatencyHigh would 19 | # be batched into a single group. 20 | group_by: ["alertname", "cluster", "service"] 21 | 22 | # When a new group of alerts is created by an incoming alert, wait at 23 | # least 'group_wait' to send the initial notification. 24 | # This way ensures that you get multiple alerts for the same group that start 25 | # firing shortly after another are batched together on the first 26 | # notification. 27 | group_wait: 30s 28 | 29 | # When the first notification was sent, wait 'group_interval' to send a batch 30 | # of new alerts that started firing for that group. 31 | group_interval: 5m 32 | 33 | # If an alert has successfully been sent, wait 'repeat_interval' to 34 | # resend them. 35 | repeat_interval: 3h 36 | 37 | # A default receiver 38 | receiver: team-X 39 | 40 | # All the above attributes are inherited by all child routes and can 41 | # overwritten on each. 42 | 43 | # The child route trees. 44 | routes: 45 | # This route matches error alerts created from spans or logs. 46 | - matchers: 47 | - alert_kind="error" 48 | group_interval: 24h 49 | receiver: team-X 50 | 51 | # The directory from which notification templates are read. 52 | templates: 53 | - "/etc/alertmanager/template/*.tmpl" 54 | -------------------------------------------------------------------------------- /doctests/cmds_hash.py: -------------------------------------------------------------------------------- 1 | # EXAMPLE: cmds_hash 2 | # HIDE_START 3 | import redis 4 | 5 | r = redis.Redis(host="localhost", port=6379, db=0, decode_responses=True) 6 | # HIDE_END 7 | 8 | # STEP_START hset 9 | res1 = r.hset("myhash", "field1", "Hello") 10 | print(res1) 11 | # >>> 1 12 | 13 | res2 = r.hget("myhash", "field1") 14 | print(res2) 15 | # >>> Hello 16 | 17 | res3 = r.hset("myhash", mapping={"field2": "Hi", "field3": "World"}) 18 | print(res3) 19 | # >>> 2 20 | 21 | res4 = r.hget("myhash", "field2") 22 | print(res4) 23 | # >>> Hi 24 | 25 | res5 = r.hget("myhash", "field3") 26 | print(res5) 27 | # >>> World 28 | 29 | res6 = r.hgetall("myhash") 30 | print(res6) 31 | # >>> { "field1": "Hello", "field2": "Hi", "field3": "World" } 32 | 33 | # REMOVE_START 34 | assert res1 == 1 35 | assert res2 == "Hello" 36 | assert res3 == 2 37 | assert res4 == "Hi" 38 | assert res5 == "World" 39 | assert res6 == { "field1": "Hello", "field2": "Hi", "field3": "World" } 40 | r.delete("myhash") 41 | # REMOVE_END 42 | # STEP_END 43 | 44 | # STEP_START hget 45 | res7 = r.hset("myhash", "field1", "foo") 46 | print(res7) 47 | # >>> 1 48 | 49 | res8 = r.hget("myhash", "field1") 50 | print(res8) 51 | # >>> foo 52 | 53 | res9 = r.hget("myhash", "field2") 54 | print(res9) 55 | # >>> None 56 | 57 | # REMOVE_START 58 | assert res7 == 1 59 | assert res8 == "foo" 60 | assert res9 == None 61 | r.delete("myhash") 62 | # REMOVE_END 63 | # STEP_END 64 | 65 | # STEP_START hgetall 66 | res10 = r.hset("myhash", mapping={"field1": "Hello", "field2": "World"}) 67 | 68 | res11 = r.hgetall("myhash") 69 | print(res11) # >>> { "field1": "Hello", "field2": "World" } 70 | 71 | # REMOVE_START 72 | assert res11 == { "field1": "Hello", "field2": "World" } 73 | r.delete("myhash") 74 | # REMOVE_END 75 | # STEP_END 76 | 77 | # STEP_START hvals 78 | res10 = r.hset("myhash", mapping={"field1": "Hello", "field2": "World"}) 79 | 80 | res11 = r.hvals("myhash") 81 | print(res11) # >>> [ "Hello", "World" ] 82 | 83 | # REMOVE_START 84 | assert res11 == [ "Hello", "World" ] 85 | r.delete("myhash") 86 | # REMOVE_END 87 | # STEP_END -------------------------------------------------------------------------------- /redis/credentials.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from abc import ABC, abstractmethod 3 | from typing import Any, Callable, Optional, Tuple, Union 4 | 5 | logger = logging.getLogger(__name__) 6 | 7 | 8 | class CredentialProvider: 9 | """ 10 | Credentials Provider. 11 | """ 12 | 13 | def get_credentials(self) -> Union[Tuple[str], Tuple[str, str]]: 14 | raise NotImplementedError("get_credentials must be implemented") 15 | 16 | async def get_credentials_async(self) -> Union[Tuple[str], Tuple[str, str]]: 17 | logger.warning( 18 | "This method is added for backward compatability. " 19 | "Please override it in your implementation." 20 | ) 21 | return self.get_credentials() 22 | 23 | 24 | class StreamingCredentialProvider(CredentialProvider, ABC): 25 | """ 26 | Credential provider that streams credentials in the background. 27 | """ 28 | 29 | @abstractmethod 30 | def on_next(self, callback: Callable[[Any], None]): 31 | """ 32 | Specifies the callback that should be invoked 33 | when the next credentials will be retrieved. 34 | 35 | :param callback: Callback with 36 | :return: 37 | """ 38 | pass 39 | 40 | @abstractmethod 41 | def on_error(self, callback: Callable[[Exception], None]): 42 | pass 43 | 44 | @abstractmethod 45 | def is_streaming(self) -> bool: 46 | pass 47 | 48 | 49 | class UsernamePasswordCredentialProvider(CredentialProvider): 50 | """ 51 | Simple implementation of CredentialProvider that just wraps static 52 | username and password. 53 | """ 54 | 55 | def __init__(self, username: Optional[str] = None, password: Optional[str] = None): 56 | self.username = username or "" 57 | self.password = password or "" 58 | 59 | def get_credentials(self): 60 | if self.username: 61 | return self.username, self.password 62 | return (self.password,) 63 | 64 | async def get_credentials_async(self) -> Union[Tuple[str], Tuple[str, str]]: 65 | return self.get_credentials() 66 | -------------------------------------------------------------------------------- /redis/asyncio/retry.py: -------------------------------------------------------------------------------- 1 | from asyncio import sleep 2 | from typing import TYPE_CHECKING, Any, Awaitable, Callable, Tuple, Type, TypeVar 3 | 4 | from redis.exceptions import ConnectionError, RedisError, TimeoutError 5 | from redis.retry import AbstractRetry 6 | 7 | T = TypeVar("T") 8 | 9 | if TYPE_CHECKING: 10 | from redis.backoff import AbstractBackoff 11 | 12 | 13 | class Retry(AbstractRetry[RedisError]): 14 | __hash__ = AbstractRetry.__hash__ 15 | 16 | def __init__( 17 | self, 18 | backoff: "AbstractBackoff", 19 | retries: int, 20 | supported_errors: Tuple[Type[RedisError], ...] = ( 21 | ConnectionError, 22 | TimeoutError, 23 | ), 24 | ): 25 | super().__init__(backoff, retries, supported_errors) 26 | 27 | def __eq__(self, other: Any) -> bool: 28 | if not isinstance(other, Retry): 29 | return NotImplemented 30 | 31 | return ( 32 | self._backoff == other._backoff 33 | and self._retries == other._retries 34 | and set(self._supported_errors) == set(other._supported_errors) 35 | ) 36 | 37 | async def call_with_retry( 38 | self, do: Callable[[], Awaitable[T]], fail: Callable[[RedisError], Any] 39 | ) -> T: 40 | """ 41 | Execute an operation that might fail and returns its result, or 42 | raise the exception that was thrown depending on the `Backoff` object. 43 | `do`: the operation to call. Expects no argument. 44 | `fail`: the failure handler, expects the last error that was thrown 45 | """ 46 | self._backoff.reset() 47 | failures = 0 48 | while True: 49 | try: 50 | return await do() 51 | except self._supported_errors as error: 52 | failures += 1 53 | await fail(error) 54 | if self._retries >= 0 and failures > self._retries: 55 | raise error 56 | backoff = self._backoff.compute(failures) 57 | if backoff > 0: 58 | await sleep(backoff) 59 | -------------------------------------------------------------------------------- /docs/examples/opentelemetry/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3" 2 | 3 | services: 4 | clickhouse: 5 | image: clickhouse/clickhouse-server:22.7 6 | restart: on-failure 7 | environment: 8 | CLICKHOUSE_DB: uptrace 9 | healthcheck: 10 | test: ["CMD", "wget", "--spider", "-q", "localhost:8123/ping"] 11 | interval: 1s 12 | timeout: 1s 13 | retries: 30 14 | volumes: 15 | - ch_data:/var/lib/clickhouse 16 | ports: 17 | - "8123:8123" 18 | - "9000:9000" 19 | 20 | uptrace: 21 | image: "uptrace/uptrace:1.2.0" 22 | #image: 'uptrace/uptrace-dev:latest' 23 | restart: on-failure 24 | volumes: 25 | - uptrace_data:/var/lib/uptrace 26 | - ./uptrace.yml:/etc/uptrace/uptrace.yml 27 | #environment: 28 | # - DEBUG=2 29 | ports: 30 | - "14317:14317" 31 | - "14318:14318" 32 | depends_on: 33 | clickhouse: 34 | condition: service_healthy 35 | 36 | otel-collector: 37 | image: otel/opentelemetry-collector-contrib:0.58.0 38 | restart: on-failure 39 | volumes: 40 | - ./config/otel-collector.yaml:/etc/otelcol-contrib/config.yaml 41 | ports: 42 | - "4317:4317" 43 | - "4318:4318" 44 | 45 | vector: 46 | image: timberio/vector:0.24.X-alpine 47 | volumes: 48 | - ./config/vector.toml:/etc/vector/vector.toml:ro 49 | 50 | alertmanager: 51 | image: prom/alertmanager:v0.24.0 52 | restart: on-failure 53 | volumes: 54 | - ./config/alertmanager.yml:/etc/alertmanager/config.yml 55 | - alertmanager_data:/alertmanager 56 | ports: 57 | - 9093:9093 58 | command: 59 | - "--config.file=/etc/alertmanager/config.yml" 60 | - "--storage.path=/alertmanager" 61 | 62 | mailhog: 63 | image: mailhog/mailhog:v1.0.1 64 | restart: on-failure 65 | ports: 66 | - "8025:8025" 67 | 68 | redis-server: 69 | image: redis 70 | ports: 71 | - "6379:6379" 72 | redis-cli: 73 | image: redis 74 | 75 | volumes: 76 | uptrace_data: 77 | driver: local 78 | ch_data: 79 | driver: local 80 | alertmanager_data: 81 | driver: local 82 | -------------------------------------------------------------------------------- /redis/asyncio/multidb/database.py: -------------------------------------------------------------------------------- 1 | from abc import abstractmethod 2 | from typing import Optional, Union 3 | 4 | from redis.asyncio import Redis, RedisCluster 5 | from redis.data_structure import WeightedList 6 | from redis.multidb.circuit import CircuitBreaker 7 | from redis.multidb.database import AbstractDatabase, BaseDatabase 8 | from redis.typing import Number 9 | 10 | 11 | class AsyncDatabase(AbstractDatabase): 12 | """Database with an underlying asynchronous redis client.""" 13 | 14 | @property 15 | @abstractmethod 16 | def client(self) -> Union[Redis, RedisCluster]: 17 | """The underlying redis client.""" 18 | pass 19 | 20 | @client.setter 21 | @abstractmethod 22 | def client(self, client: Union[Redis, RedisCluster]): 23 | """Set the underlying redis client.""" 24 | pass 25 | 26 | @property 27 | @abstractmethod 28 | def circuit(self) -> CircuitBreaker: 29 | """Circuit breaker for the current database.""" 30 | pass 31 | 32 | @circuit.setter 33 | @abstractmethod 34 | def circuit(self, circuit: CircuitBreaker): 35 | """Set the circuit breaker for the current database.""" 36 | pass 37 | 38 | 39 | Databases = WeightedList[tuple[AsyncDatabase, Number]] 40 | 41 | 42 | class Database(BaseDatabase, AsyncDatabase): 43 | def __init__( 44 | self, 45 | client: Union[Redis, RedisCluster], 46 | circuit: CircuitBreaker, 47 | weight: float, 48 | health_check_url: Optional[str] = None, 49 | ): 50 | self._client = client 51 | self._cb = circuit 52 | self._cb.database = self 53 | super().__init__(weight, health_check_url) 54 | 55 | @property 56 | def client(self) -> Union[Redis, RedisCluster]: 57 | return self._client 58 | 59 | @client.setter 60 | def client(self, client: Union[Redis, RedisCluster]): 61 | self._client = client 62 | 63 | @property 64 | def circuit(self) -> CircuitBreaker: 65 | return self._cb 66 | 67 | @circuit.setter 68 | def circuit(self, circuit: CircuitBreaker): 69 | self._cb = circuit 70 | -------------------------------------------------------------------------------- /doctests/cmds_sorted_set.py: -------------------------------------------------------------------------------- 1 | # EXAMPLE: cmds_sorted_set 2 | # HIDE_START 3 | import redis 4 | 5 | r = redis.Redis(host="localhost", port=6379, db=0, decode_responses=True) 6 | # HIDE_END 7 | 8 | # STEP_START zadd 9 | res = r.zadd("myzset", {"one": 1}) 10 | print(res) 11 | # >>> 1 12 | # REMOVE_START 13 | assert res == 1 14 | # REMOVE_END 15 | 16 | res = r.zadd("myzset", {"uno": 1}) 17 | print(res) 18 | # >>> 1 19 | # REMOVE_START 20 | assert res == 1 21 | # REMOVE_END 22 | 23 | res = r.zadd("myzset", {"two": 2, "three": 3}) 24 | print(res) 25 | # >>> 2 26 | # REMOVE_START 27 | assert res == 2 28 | # REMOVE_END 29 | 30 | res = r.zrange("myzset", 0, -1, withscores=True) 31 | # >>> [('one', 1.0), ('uno', 1.0), ('two', 2.0), ('three', 3.0)] 32 | # REMOVE_START 33 | assert res == [('one', 1.0), ('uno', 1.0), ('two', 2.0), ('three', 3.0)] 34 | # REMOVE_END 35 | 36 | # REMOVE_START 37 | r.delete("myzset") 38 | # REMOVE_END 39 | # STEP_END 40 | 41 | # STEP_START zrange1 42 | res = r.zadd("myzset", {"one": 1, "two":2, "three":3}) 43 | print(res) 44 | # >>> 3 45 | 46 | res = r.zrange("myzset", 0, -1) 47 | print(res) 48 | # >>> ['one', 'two', 'three'] 49 | # REMOVE_START 50 | assert res == ['one', 'two', 'three'] 51 | # REMOVE_END 52 | 53 | res = r.zrange("myzset", 2, 3) 54 | print(res) 55 | # >>> ['three'] 56 | # REMOVE_START 57 | assert res == ['three'] 58 | # REMOVE_END 59 | 60 | res = r.zrange("myzset", -2, -1) 61 | print(res) 62 | # >>> ['two', 'three'] 63 | # REMOVE_START 64 | assert res == ['two', 'three'] 65 | r.delete("myzset") 66 | # REMOVE_END 67 | # STEP_END 68 | 69 | # STEP_START zrange2 70 | res = r.zadd("myzset", {"one": 1, "two":2, "three":3}) 71 | res = r.zrange("myzset", 0, 1, withscores=True) 72 | print(res) 73 | # >>> [('one', 1.0), ('two', 2.0)] 74 | # REMOVE_START 75 | assert res == [('one', 1.0), ('two', 2.0)] 76 | r.delete("myzset") 77 | # REMOVE_END 78 | # STEP_END 79 | 80 | # STEP_START zrange3 81 | res = r.zadd("myzset", {"one": 1, "two":2, "three":3}) 82 | res = r.zrange("myzset", 2, 3, byscore=True, offset=1, num=1) 83 | print(res) 84 | # >>> ['three'] 85 | # REMOVE_START 86 | assert res == ['three'] 87 | r.delete("myzset") 88 | # REMOVE_END 89 | # STEP_END 90 | -------------------------------------------------------------------------------- /redis/typing.py: -------------------------------------------------------------------------------- 1 | # from __future__ import annotations 2 | 3 | from datetime import datetime, timedelta 4 | from typing import ( 5 | TYPE_CHECKING, 6 | Any, 7 | Awaitable, 8 | Iterable, 9 | Mapping, 10 | Protocol, 11 | Type, 12 | TypeVar, 13 | Union, 14 | ) 15 | 16 | if TYPE_CHECKING: 17 | from redis._parsers import Encoder 18 | 19 | 20 | Number = Union[int, float] 21 | EncodedT = Union[bytes, bytearray, memoryview] 22 | DecodedT = Union[str, int, float] 23 | EncodableT = Union[EncodedT, DecodedT] 24 | AbsExpiryT = Union[int, datetime] 25 | ExpiryT = Union[int, timedelta] 26 | ZScoreBoundT = Union[float, str] # str allows for the [ or ( prefix 27 | BitfieldOffsetT = Union[int, str] # str allows for #x syntax 28 | _StringLikeT = Union[bytes, str, memoryview] 29 | KeyT = _StringLikeT # Main redis key space 30 | PatternT = _StringLikeT # Patterns matched against keys, fields etc 31 | FieldT = EncodableT # Fields within hash tables, streams and geo commands 32 | KeysT = Union[KeyT, Iterable[KeyT]] 33 | ResponseT = Union[Awaitable[Any], Any] 34 | ChannelT = _StringLikeT 35 | GroupT = _StringLikeT # Consumer group 36 | ConsumerT = _StringLikeT # Consumer name 37 | StreamIdT = Union[int, _StringLikeT] 38 | ScriptTextT = _StringLikeT 39 | TimeoutSecT = Union[int, float, _StringLikeT] 40 | # Mapping is not covariant in the key type, which prevents 41 | # Mapping[_StringLikeT, X] from accepting arguments of type Dict[str, X]. Using 42 | # a TypeVar instead of a Union allows mappings with any of the permitted types 43 | # to be passed. Care is needed if there is more than one such mapping in a 44 | # type signature because they will all be required to be the same key type. 45 | AnyKeyT = TypeVar("AnyKeyT", bytes, str, memoryview) 46 | AnyFieldT = TypeVar("AnyFieldT", bytes, str, memoryview) 47 | AnyChannelT = TypeVar("AnyChannelT", bytes, str, memoryview) 48 | 49 | ExceptionMappingT = Mapping[str, Union[Type[Exception], Mapping[str, Type[Exception]]]] 50 | 51 | 52 | class CommandsProtocol(Protocol): 53 | def execute_command(self, *args, **options) -> ResponseT: ... 54 | 55 | 56 | class ClusterCommandsProtocol(CommandsProtocol): 57 | encoder: "Encoder" 58 | -------------------------------------------------------------------------------- /doctests/query_geo.py: -------------------------------------------------------------------------------- 1 | # EXAMPLE: query_geo 2 | # HIDE_START 3 | import json 4 | import sys 5 | import redis 6 | from redis.commands.json.path import Path 7 | from redis.commands.search.field import GeoField, GeoShapeField 8 | from redis.commands.search.index_definition import IndexDefinition, IndexType 9 | from redis.commands.search.query import Query 10 | 11 | r = redis.Redis(decode_responses=True) 12 | 13 | # create index 14 | schema = ( 15 | GeoField("$.store_location", as_name="store_location"), 16 | GeoShapeField("$.pickup_zone", coord_system=GeoShapeField.FLAT, as_name="pickup_zone") 17 | ) 18 | 19 | index = r.ft("idx:bicycle") 20 | index.create_index( 21 | schema, 22 | definition=IndexDefinition(prefix=["bicycle:"], index_type=IndexType.JSON), 23 | ) 24 | 25 | # load data 26 | with open("data/query_em.json") as f: 27 | bicycles = json.load(f) 28 | 29 | pipeline = r.pipeline(transaction=False) 30 | for bid, bicycle in enumerate(bicycles): 31 | pipeline.json().set(f'bicycle:{bid}', Path.root_path(), bicycle) 32 | pipeline.execute() 33 | # HIDE_END 34 | 35 | # STEP_START geo1 36 | params_dict = {"lon": -0.1778, "lat": 51.5524, "radius": 20, "units": "mi"} 37 | q = Query("@store_location:[$lon $lat $radius $units]").dialect(2) 38 | res = index.search(q, query_params=params_dict) 39 | print(res) 40 | # >>> Result{1 total, docs: [Document {'id': 'bicycle:5', ... 41 | # REMOVE_START 42 | assert res.total == 1 43 | # REMOVE_END 44 | # STEP_END 45 | 46 | # STEP_START geo2 47 | params_dict = {"bike": "POINT(-0.1278 51.5074)"} 48 | q = Query("@pickup_zone:[CONTAINS $bike]").dialect(3) 49 | res = index.search(q, query_params=params_dict) 50 | print(res.total) # >>> 1 51 | # REMOVE_START 52 | assert res.total == 1 53 | # REMOVE_END 54 | # STEP_END 55 | 56 | # STEP_START geo3 57 | params_dict = {"europe": "POLYGON((-25 35, 40 35, 40 70, -25 70, -25 35))"} 58 | q = Query("@pickup_zone:[WITHIN $europe]").dialect(3) 59 | res = index.search(q, query_params=params_dict) 60 | print(res.total) # >>> 5 61 | # REMOVE_START 62 | assert res.total == 5 63 | # REMOVE_END 64 | # STEP_END 65 | 66 | # REMOVE_START 67 | # destroy index and data 68 | r.ft("idx:bicycle").dropindex(delete_documents=True) 69 | # REMOVE_END 70 | -------------------------------------------------------------------------------- /doctests/query_range.py: -------------------------------------------------------------------------------- 1 | # EXAMPLE: query_range 2 | # HIDE_START 3 | import json 4 | import sys 5 | import redis 6 | from redis.commands.json.path import Path 7 | from redis.commands.search.field import TextField, NumericField, TagField 8 | from redis.commands.search.index_definition import IndexDefinition, IndexType 9 | from redis.commands.search.query import NumericFilter, Query 10 | 11 | r = redis.Redis(decode_responses=True) 12 | 13 | # create index 14 | schema = ( 15 | TextField("$.description", as_name="description"), 16 | NumericField("$.price", as_name="price"), 17 | TagField("$.condition", as_name="condition"), 18 | ) 19 | 20 | index = r.ft("idx:bicycle") 21 | index.create_index( 22 | schema, 23 | definition=IndexDefinition(prefix=["bicycle:"], index_type=IndexType.JSON), 24 | ) 25 | 26 | # load data 27 | with open("data/query_em.json") as f: 28 | bicycles = json.load(f) 29 | 30 | pipeline = r.pipeline(transaction=False) 31 | for bid, bicycle in enumerate(bicycles): 32 | pipeline.json().set(f'bicycle:{bid}', Path.root_path(), bicycle) 33 | pipeline.execute() 34 | # HIDE_END 35 | 36 | # STEP_START range1 37 | res = index.search(Query("@price:[500 1000]")) 38 | print(res.total) 39 | # >>> 3 40 | # REMOVE_START 41 | assert res.total == 3 42 | # REMOVE_END 43 | # STEP_END 44 | 45 | # STEP_START range2 46 | query = Query("*").add_filter(NumericFilter("price", 500, 1000)) 47 | res = index.search(query) 48 | print(res.total) 49 | # >>> 3 50 | # REMOVE_START 51 | assert res.total == 3 52 | # REMOVE_END 53 | # STEP_END 54 | 55 | # STEP_START range3 56 | query = Query("*").add_filter(NumericFilter("price", "(1000", "+inf")) 57 | res = index.search(query) 58 | print(res.total) 59 | # >>> 5 60 | # REMOVE_START 61 | assert res.total == 5 62 | # REMOVE_END 63 | # STEP_END 64 | 65 | # STEP_START range4 66 | query = Query('@price:[-inf 2000]').sort_by('price').paging(0, 5) 67 | res = index.search(query) 68 | print(res.total) 69 | print(res) 70 | # >>> Result{7 total, docs: [Document {'id': 'bicycle:0', ... }, Document {'id': 'bicycle:7', ... }, Document {'id': 'bicycle:5', ... }, ...] 71 | # REMOVE_START 72 | assert res.total == 7 73 | # REMOVE_END 74 | # STEP_END 75 | 76 | # REMOVE_START 77 | # destroy index and data 78 | r.ft("idx:bicycle").dropindex(delete_documents=True) 79 | # REMOVE_END 80 | -------------------------------------------------------------------------------- /tests/test_event.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import Mock, AsyncMock 2 | 3 | from redis.event import ( 4 | EventListenerInterface, 5 | EventDispatcher, 6 | AsyncEventListenerInterface, 7 | ) 8 | 9 | 10 | class TestEventDispatcher: 11 | def test_register_listeners(self): 12 | mock_event = Mock(spec=object) 13 | mock_event_listener = Mock(spec=EventListenerInterface) 14 | listener_called = 0 15 | 16 | def callback(event): 17 | nonlocal listener_called 18 | listener_called += 1 19 | 20 | mock_event_listener.listen = callback 21 | 22 | # Register via constructor 23 | dispatcher = EventDispatcher( 24 | event_listeners={type(mock_event): [mock_event_listener]} 25 | ) 26 | dispatcher.dispatch(mock_event) 27 | 28 | assert listener_called == 1 29 | 30 | # Register additional listener for the same event 31 | mock_another_event_listener = Mock(spec=EventListenerInterface) 32 | mock_another_event_listener.listen = callback 33 | dispatcher.register_listeners( 34 | mappings={type(mock_event): [mock_another_event_listener]} 35 | ) 36 | dispatcher.dispatch(mock_event) 37 | 38 | assert listener_called == 3 39 | 40 | async def test_register_listeners_async(self): 41 | mock_event = Mock(spec=object) 42 | mock_event_listener = AsyncMock(spec=AsyncEventListenerInterface) 43 | listener_called = 0 44 | 45 | async def callback(event): 46 | nonlocal listener_called 47 | listener_called += 1 48 | 49 | mock_event_listener.listen = callback 50 | 51 | # Register via constructor 52 | dispatcher = EventDispatcher( 53 | event_listeners={type(mock_event): [mock_event_listener]} 54 | ) 55 | await dispatcher.dispatch_async(mock_event) 56 | 57 | assert listener_called == 1 58 | 59 | # Register additional listener for the same event 60 | mock_another_event_listener = Mock(spec=AsyncEventListenerInterface) 61 | mock_another_event_listener.listen = callback 62 | dispatcher.register_listeners( 63 | mappings={type(mock_event): [mock_another_event_listener]} 64 | ) 65 | await dispatcher.dispatch_async(mock_event) 66 | 67 | assert listener_called == 3 68 | -------------------------------------------------------------------------------- /.github/workflows/hiredis-py-integration.yaml: -------------------------------------------------------------------------------- 1 | name: Hiredis-py integration tests 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | redis-py-branch: 7 | description: 'redis-py branch to run tests on' 8 | required: true 9 | default: 'master' 10 | hiredis-branch: 11 | description: 'hiredis-py branch to run tests on' 12 | required: true 13 | default: 'master' 14 | 15 | concurrency: 16 | group: ${{ github.event.pull_request.number || github.ref }}-hiredis-integration 17 | cancel-in-progress: true 18 | 19 | permissions: 20 | contents: read # to fetch code (actions/checkout) 21 | 22 | env: 23 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} 24 | # this speeds up coverage with Python 3.12: https://github.com/nedbat/coveragepy/issues/1665 25 | COVERAGE_CORE: sysmon 26 | CURRENT_CLIENT_LIBS_TEST_STACK_IMAGE_TAG: '8.4.0' 27 | CURRENT_REDIS_VERSION: '8.4.0' 28 | 29 | jobs: 30 | redis_version: 31 | runs-on: ubuntu-latest 32 | outputs: 33 | CURRENT: ${{ env.CURRENT_REDIS_VERSION }} 34 | steps: 35 | - name: Compute outputs 36 | run: | 37 | echo "CURRENT=${{ env.CURRENT_REDIS_VERSION }}" >> $GITHUB_OUTPUT 38 | 39 | hiredis-tests: 40 | runs-on: ubuntu-latest 41 | needs: [redis_version] 42 | timeout-minutes: 60 43 | strategy: 44 | max-parallel: 15 45 | fail-fast: false 46 | matrix: 47 | redis-version: [ '${{ needs.redis_version.outputs.CURRENT }}' ] 48 | python-version: [ '3.10', '3.14'] 49 | parser-backend: [ 'hiredis' ] 50 | hiredis-version: [ 'unstable' ] 51 | event-loop: [ 'asyncio' ] 52 | env: 53 | ACTIONS_ALLOW_UNSECURE_COMMANDS: true 54 | name: Redis ${{ matrix.redis-version }}; Python ${{ matrix.python-version }}; RESP Parser:${{matrix.parser-backend}} (${{ matrix.hiredis-version }}); EL:${{matrix.event-loop}} 55 | steps: 56 | - uses: actions/checkout@v6 57 | with: 58 | ref: ${{ inputs.redis-py-branch }} 59 | - name: Run tests 60 | uses: ./.github/actions/run-tests 61 | with: 62 | python-version: ${{ matrix.python-version }} 63 | parser-backend: ${{ matrix.parser-backend }} 64 | redis-version: ${{ matrix.redis-version }} 65 | hiredis-version: ${{ matrix.hiredis-version }} 66 | hiredis-branch: ${{ inputs.hiredis-branch }} 67 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. redis-py documentation master file, created by 2 | sphinx-quickstart on Thu Jul 28 13:55:57 2011. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | redis-py - Python Client for Redis 7 | ==================================== 8 | 9 | Getting Started 10 | **************** 11 | 12 | `redis-py `_ requires a running Redis server, and Python 3.7+. See the `Redis 13 | quickstart `_ for Redis installation instructions. 14 | 15 | redis-py can be installed using pip via ``pip install redis``. 16 | 17 | 18 | Quickly connecting to redis 19 | *************************** 20 | 21 | There are two quick ways to connect to Redis. 22 | 23 | **Assuming you run Redis on localhost:6379 (the default)** 24 | 25 | .. code-block:: python 26 | 27 | import redis 28 | r = redis.Redis() 29 | r.ping() 30 | 31 | **Running redis on foo.bar.com, port 12345** 32 | 33 | .. code-block:: python 34 | 35 | import redis 36 | r = redis.Redis(host='foo.bar.com', port=12345) 37 | r.ping() 38 | 39 | **Another example with foo.bar.com, port 12345** 40 | 41 | .. code-block:: python 42 | 43 | import redis 44 | r = redis.from_url('redis://foo.bar.com:12345') 45 | r.ping() 46 | 47 | After that, you probably want to `run redis commands `_. 48 | 49 | .. toctree:: 50 | :hidden: 51 | 52 | genindex 53 | 54 | Redis Command Functions 55 | *********************** 56 | .. toctree:: 57 | :maxdepth: 2 58 | 59 | commands 60 | redismodules 61 | 62 | Module Documentation 63 | ******************** 64 | .. toctree:: 65 | :maxdepth: 1 66 | 67 | connections 68 | clustering 69 | multi_database 70 | exceptions 71 | backoff 72 | lock 73 | retry 74 | lua_scripting 75 | opentelemetry 76 | resp3_features 77 | advanced_features 78 | examples 79 | 80 | Contributing 81 | ************* 82 | 83 | - `How to contribute `_ 84 | - `Issue Tracker `_ 85 | - `Source Code `_ 86 | - `Release History `_ 87 | 88 | License 89 | ******* 90 | 91 | This project is licensed under the `MIT license `_. 92 | -------------------------------------------------------------------------------- /.github/wordlist.txt: -------------------------------------------------------------------------------- 1 | APM 2 | ARGV 3 | BFCommands 4 | balancer 5 | CacheImpl 6 | cancelling 7 | CAS 8 | CFCommands 9 | CMSCommands 10 | ClusterNode 11 | ClusterNodes 12 | ClusterPipeline 13 | ClusterPubSub 14 | ConnectionPool 15 | config 16 | CoreCommands 17 | DatabaseConfig 18 | DNS 19 | EchoHealthCheck 20 | EVAL 21 | EVALSHA 22 | failover 23 | FQDN 24 | Grokzen's 25 | Healthcheck 26 | HealthCheckPolicies 27 | healthcheck 28 | healthchecks 29 | INCR 30 | IOError 31 | Instrumentations 32 | JSONCommands 33 | Jaeger 34 | Ludovico 35 | Magnocavallo 36 | MultiDbConfig 37 | MultiDBClient 38 | McCurdy 39 | NOSCRIPT 40 | NoValidDatabaseException 41 | NUMPAT 42 | NUMPT 43 | NUMSUB 44 | OSS 45 | OpenCensus 46 | OpenTelemetry 47 | OpenTracing 48 | Otel 49 | PubSub 50 | READONLY 51 | RediSearch 52 | RedisBloom 53 | RedisCluster 54 | RedisClusterCommands 55 | RedisClusterException 56 | RedisClusters 57 | RedisInstrumentor 58 | RedisJSON 59 | RedisTimeSeries 60 | SHA 61 | SLA 62 | SearchCommands 63 | SentinelCommands 64 | SentinelConnectionPool 65 | Sharded 66 | Solovyov 67 | SpanKind 68 | Specfiying 69 | StatusCode 70 | TCP 71 | TemporaryUnavailableException 72 | TLS 73 | TOPKCommands 74 | TimeSeriesCommands 75 | Uptrace 76 | ValueError 77 | WATCHed 78 | WatchError 79 | api 80 | args 81 | async 82 | asyncio 83 | autoclass 84 | automodule 85 | backoff 86 | bdb 87 | behaviour 88 | bool 89 | boolean 90 | booleans 91 | bysource 92 | charset 93 | del 94 | dev 95 | docstring 96 | docstrings 97 | eg 98 | exc 99 | firsttimersonly 100 | fo 101 | genindex 102 | gmail 103 | hiredis 104 | http 105 | idx 106 | iff 107 | ini 108 | json 109 | keyslot 110 | keyspace 111 | kwarg 112 | kwargs 113 | linters 114 | localhost 115 | lua 116 | makeapullrequest 117 | maxdepth 118 | mget 119 | microservice 120 | microservices 121 | mset 122 | multikey 123 | mykey 124 | nonatomic 125 | observability 126 | opentelemetry 127 | oss 128 | performant 129 | pmessage 130 | png 131 | pre 132 | psubscribe 133 | pubsub 134 | punsubscribe 135 | py 136 | pypi 137 | quickstart 138 | readonly 139 | readwrite 140 | redis 141 | redismodules 142 | reinitialization 143 | replicaof 144 | repo 145 | runtime 146 | sedrik 147 | sharded 148 | ssl 149 | str 150 | stunnel 151 | subcommands 152 | thevalueofmykey 153 | timeseries 154 | toctree 155 | topk 156 | triaging 157 | txt 158 | un 159 | unicode 160 | url 161 | virtualenv 162 | www 163 | yaml 164 | -------------------------------------------------------------------------------- /redis/__init__.py: -------------------------------------------------------------------------------- 1 | from redis import asyncio # noqa 2 | from redis.backoff import default_backoff 3 | from redis.client import Redis, StrictRedis 4 | from redis.driver_info import DriverInfo 5 | from redis.cluster import RedisCluster 6 | from redis.connection import ( 7 | BlockingConnectionPool, 8 | Connection, 9 | ConnectionPool, 10 | SSLConnection, 11 | UnixDomainSocketConnection, 12 | ) 13 | from redis.credentials import CredentialProvider, UsernamePasswordCredentialProvider 14 | from redis.exceptions import ( 15 | AuthenticationError, 16 | AuthenticationWrongNumberOfArgsError, 17 | BusyLoadingError, 18 | ChildDeadlockedError, 19 | ConnectionError, 20 | CrossSlotTransactionError, 21 | DataError, 22 | InvalidPipelineStack, 23 | InvalidResponse, 24 | MaxConnectionsError, 25 | OutOfMemoryError, 26 | PubSubError, 27 | ReadOnlyError, 28 | RedisClusterException, 29 | RedisError, 30 | ResponseError, 31 | TimeoutError, 32 | WatchError, 33 | ) 34 | from redis.sentinel import ( 35 | Sentinel, 36 | SentinelConnectionPool, 37 | SentinelManagedConnection, 38 | SentinelManagedSSLConnection, 39 | ) 40 | from redis.utils import from_url 41 | 42 | 43 | def int_or_str(value): 44 | try: 45 | return int(value) 46 | except ValueError: 47 | return value 48 | 49 | 50 | __version__ = "7.0.1" 51 | 52 | VERSION = tuple(map(int_or_str, __version__.split("."))) 53 | 54 | 55 | __all__ = [ 56 | "AuthenticationError", 57 | "AuthenticationWrongNumberOfArgsError", 58 | "BlockingConnectionPool", 59 | "BusyLoadingError", 60 | "ChildDeadlockedError", 61 | "Connection", 62 | "ConnectionError", 63 | "ConnectionPool", 64 | "CredentialProvider", 65 | "CrossSlotTransactionError", 66 | "DataError", 67 | "DriverInfo", 68 | "from_url", 69 | "default_backoff", 70 | "InvalidPipelineStack", 71 | "InvalidResponse", 72 | "MaxConnectionsError", 73 | "OutOfMemoryError", 74 | "PubSubError", 75 | "ReadOnlyError", 76 | "Redis", 77 | "RedisCluster", 78 | "RedisClusterException", 79 | "RedisError", 80 | "ResponseError", 81 | "Sentinel", 82 | "SentinelConnectionPool", 83 | "SentinelManagedConnection", 84 | "SentinelManagedSSLConnection", 85 | "SSLConnection", 86 | "UsernamePasswordCredentialProvider", 87 | "StrictRedis", 88 | "TimeoutError", 89 | "UnixDomainSocketConnection", 90 | "WatchError", 91 | ] 92 | -------------------------------------------------------------------------------- /doctests/dt_hash.py: -------------------------------------------------------------------------------- 1 | # EXAMPLE: hash_tutorial 2 | # HIDE_START 3 | """ 4 | Code samples for Hash doc pages: 5 | https://redis.io/docs/latest/develop/data-types/hashes/ 6 | """ 7 | import redis 8 | 9 | r = redis.Redis(decode_responses=True) 10 | # HIDE_END 11 | # STEP_START set_get_all 12 | res1 = r.hset( 13 | "bike:1", 14 | mapping={ 15 | "model": "Deimos", 16 | "brand": "Ergonom", 17 | "type": "Enduro bikes", 18 | "price": 4972, 19 | }, 20 | ) 21 | print(res1) 22 | # >>> 4 23 | 24 | res2 = r.hget("bike:1", "model") 25 | print(res2) 26 | # >>> 'Deimos' 27 | 28 | res3 = r.hget("bike:1", "price") 29 | print(res3) 30 | # >>> '4972' 31 | 32 | res4 = r.hgetall("bike:1") 33 | print(res4) 34 | # >>> {'model': 'Deimos', 'brand': 'Ergonom', 'type': 'Enduro bikes', 'price': '4972'} 35 | 36 | # STEP_END 37 | 38 | # REMOVE_START 39 | assert res1 == 4 40 | assert res2 == "Deimos" 41 | assert res3 == "4972" 42 | assert res4 == { 43 | "model": "Deimos", 44 | "brand": "Ergonom", 45 | "type": "Enduro bikes", 46 | "price": "4972", 47 | } 48 | # REMOVE_END 49 | 50 | # STEP_START hmget 51 | res5 = r.hmget("bike:1", ["model", "price"]) 52 | print(res5) 53 | # >>> ['Deimos', '4972'] 54 | # STEP_END 55 | 56 | # REMOVE_START 57 | assert res5 == ["Deimos", "4972"] 58 | # REMOVE_END 59 | 60 | # STEP_START hincrby 61 | res6 = r.hincrby("bike:1", "price", 100) 62 | print(res6) 63 | # >>> 5072 64 | res7 = r.hincrby("bike:1", "price", -100) 65 | print(res7) 66 | # >>> 4972 67 | # STEP_END 68 | 69 | # REMOVE_START 70 | assert res6 == 5072 71 | assert res7 == 4972 72 | # REMOVE_END 73 | 74 | 75 | # STEP_START incrby_get_mget 76 | res11 = r.hincrby("bike:1:stats", "rides", 1) 77 | print(res11) 78 | # >>> 1 79 | res12 = r.hincrby("bike:1:stats", "rides", 1) 80 | print(res12) 81 | # >>> 2 82 | res13 = r.hincrby("bike:1:stats", "rides", 1) 83 | print(res13) 84 | # >>> 3 85 | res14 = r.hincrby("bike:1:stats", "crashes", 1) 86 | print(res14) 87 | # >>> 1 88 | res15 = r.hincrby("bike:1:stats", "owners", 1) 89 | print(res15) 90 | # >>> 1 91 | res16 = r.hget("bike:1:stats", "rides") 92 | print(res16) 93 | # >>> 3 94 | res17 = r.hmget("bike:1:stats", ["crashes", "owners"]) 95 | print(res17) 96 | # >>> ['1', '1'] 97 | # STEP_END 98 | 99 | # REMOVE_START 100 | assert res11 == 1 101 | assert res12 == 2 102 | assert res13 == 3 103 | assert res14 == 1 104 | assert res15 == 1 105 | assert res16 == "3" 106 | assert res17 == ["1", "1"] 107 | # REMOVE_END 108 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | name: "CodeQL" 13 | 14 | on: 15 | push: 16 | branches: [ master ] 17 | pull_request: 18 | # The branches below must be a subset of the branches above 19 | branches: [ master ] 20 | 21 | jobs: 22 | analyze: 23 | name: Analyze 24 | runs-on: ubuntu-latest 25 | permissions: 26 | actions: read 27 | contents: read 28 | security-events: write 29 | 30 | strategy: 31 | fail-fast: false 32 | matrix: 33 | language: [ 'python' ] 34 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] 35 | # Learn more about CodeQL language support at https://git.io/codeql-language-support 36 | 37 | steps: 38 | - name: Checkout repository 39 | uses: actions/checkout@v6 40 | 41 | # Initializes the CodeQL tools for scanning. 42 | - name: Initialize CodeQL 43 | uses: github/codeql-action/init@v4 44 | with: 45 | languages: ${{ matrix.language }} 46 | # If you wish to specify custom queries, you can do so here or in a config file. 47 | # By default, queries listed here will override any specified in a config file. 48 | # Prefix the list here with "+" to use these queries and those in the config file. 49 | # queries: ./path/to/local/query, your-org/your-repo/queries@main 50 | 51 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 52 | # If this step fails, then you should remove it and run the build manually (see below) 53 | - name: Autobuild 54 | uses: github/codeql-action/autobuild@v4 55 | 56 | # ℹ️ Command-line programs to run using the OS shell. 57 | # 📚 https://git.io/JvXDl 58 | 59 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines 60 | # and modify them (or add more) to build your code if your project 61 | # uses a compiled language 62 | 63 | #- run: | 64 | # make bootstrap 65 | # make release 66 | 67 | - name: Perform CodeQL Analysis 68 | uses: github/codeql-action/analyze@v4 69 | -------------------------------------------------------------------------------- /tests/test_background.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from time import sleep 3 | 4 | import pytest 5 | 6 | from redis.background import BackgroundScheduler 7 | 8 | 9 | class TestBackgroundScheduler: 10 | def test_run_once(self): 11 | execute_counter = 0 12 | one = "arg1" 13 | two = 9999 14 | 15 | def callback(arg1: str, arg2: int): 16 | nonlocal execute_counter 17 | nonlocal one 18 | nonlocal two 19 | 20 | execute_counter += 1 21 | 22 | assert arg1 == one 23 | assert arg2 == two 24 | 25 | scheduler = BackgroundScheduler() 26 | scheduler.run_once(0.1, callback, one, two) 27 | assert execute_counter == 0 28 | 29 | sleep(0.15) 30 | 31 | assert execute_counter == 1 32 | 33 | @pytest.mark.parametrize( 34 | "interval,timeout,call_count", 35 | [ 36 | (0.012, 0.04, 3), 37 | (0.035, 0.04, 1), 38 | (0.045, 0.04, 0), 39 | ], 40 | ) 41 | def test_run_recurring(self, interval, timeout, call_count): 42 | execute_counter = [] 43 | one = "arg1" 44 | two = 9999 45 | 46 | def callback(arg1: str, arg2: int): 47 | nonlocal execute_counter 48 | nonlocal one 49 | nonlocal two 50 | 51 | execute_counter.append(1) 52 | 53 | assert arg1 == one 54 | assert arg2 == two 55 | 56 | scheduler = BackgroundScheduler() 57 | scheduler.run_recurring(interval, callback, one, two) 58 | assert len(execute_counter) == 0 59 | 60 | sleep(timeout) 61 | 62 | assert len(execute_counter) == call_count 63 | 64 | @pytest.mark.asyncio 65 | @pytest.mark.parametrize( 66 | "interval,timeout,call_count", 67 | [ 68 | (0.012, 0.04, 3), 69 | (0.035, 0.04, 1), 70 | (0.045, 0.04, 0), 71 | ], 72 | ) 73 | async def test_run_recurring_async(self, interval, timeout, call_count): 74 | execute_counter = [] 75 | one = "arg1" 76 | two = 9999 77 | 78 | async def callback(arg1: str, arg2: int): 79 | nonlocal execute_counter 80 | nonlocal one 81 | nonlocal two 82 | 83 | execute_counter.append(1) 84 | 85 | assert arg1 == one 86 | assert arg2 == two 87 | 88 | scheduler = BackgroundScheduler() 89 | await scheduler.run_recurring_async(interval, callback, one, two) 90 | assert len(execute_counter) == 0 91 | 92 | await asyncio.sleep(timeout) 93 | 94 | assert len(execute_counter) == call_count 95 | -------------------------------------------------------------------------------- /tests/test_parsers/test_helpers.py: -------------------------------------------------------------------------------- 1 | from redis._parsers.helpers import parse_info, parse_client_list 2 | 3 | 4 | def test_parse_info(): 5 | info_output = """ 6 | # Modules 7 | module:name=search,ver=999999,api=1,filters=0,usedby=[],using=[ReJSON],options=[handle-io-errors] 8 | 9 | # search_fields_statistics 10 | search_fields_text:Text=3 11 | search_fields_tag:Tag=2,Sortable=1 12 | 13 | # search_version 14 | search_version:99.99.99 15 | search_redis_version:7.2.2 - oss 16 | 17 | # search_runtime_configurations 18 | search_query_timeout_ms:500 19 | """ 20 | info = parse_info(info_output) 21 | 22 | assert isinstance(info["modules"], list) 23 | assert isinstance(info["modules"][0], dict) 24 | assert info["modules"][0]["name"] == "search" 25 | 26 | assert isinstance(info["search_fields_text"], dict) 27 | assert info["search_fields_text"]["Text"] == 3 28 | 29 | assert isinstance(info["search_fields_tag"], dict) 30 | assert info["search_fields_tag"]["Tag"] == 2 31 | assert info["search_fields_tag"]["Sortable"] == 1 32 | 33 | assert info["search_version"] == "99.99.99" 34 | assert info["search_redis_version"] == "7.2.2 - oss" 35 | assert info["search_query_timeout_ms"] == 500 36 | 37 | 38 | def test_parse_info_list(): 39 | info_output = """ 40 | list_one:a, 41 | list_two:a b,,c,10,1.1 42 | """ 43 | info = parse_info(info_output) 44 | 45 | assert isinstance(info["list_one"], list) 46 | assert info["list_one"] == ["a"] 47 | 48 | assert isinstance(info["list_two"], list) 49 | assert info["list_two"] == ["a b", "c", 10, 1.1] 50 | 51 | 52 | def test_parse_info_list_dict_mixed(): 53 | info_output = """ 54 | list_one:a,b=1 55 | list_two:a b=foo,,c,d=bar,e, 56 | """ 57 | info = parse_info(info_output) 58 | 59 | assert isinstance(info["list_one"], dict) 60 | assert info["list_one"] == {"a": True, "b": 1} 61 | 62 | assert isinstance(info["list_two"], dict) 63 | assert info["list_two"] == {"a b": "foo", "c": True, "d": "bar", "e": True} 64 | 65 | 66 | def test_parse_client_list(): 67 | response = "id=7 addr=/tmp/redis sock/redis.sock:0 fd=9 name=test=_complex_[name] age=-1 idle=0 cmd=client|list user=default lib-name=go-redis(,go1.24.4) lib-ver=" 68 | expected = [ 69 | { 70 | "id": "7", 71 | "addr": "/tmp/redis sock/redis.sock:0", 72 | "fd": "9", 73 | "name": "test=_complex_[name]", 74 | "age": "-1", 75 | "idle": "0", 76 | "cmd": "client|list", 77 | "user": "default", 78 | "lib-name": "go-redis(,go1.24.4)", 79 | "lib-ver": "", 80 | } 81 | ] 82 | clients = parse_client_list(response) 83 | assert clients == expected 84 | -------------------------------------------------------------------------------- /tests/test_monitor.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from .conftest import ( 4 | skip_if_redis_enterprise, 5 | skip_ifnot_redis_enterprise, 6 | wait_for_command, 7 | ) 8 | 9 | 10 | @pytest.mark.onlynoncluster 11 | class TestMonitor: 12 | def test_wait_command_not_found(self, r): 13 | "Make sure the wait_for_command func works when command is not found" 14 | with r.monitor() as m: 15 | response = wait_for_command(r, m, "nothing") 16 | assert response is None 17 | 18 | def test_response_values(self, r): 19 | db = r.connection_pool.connection_kwargs.get("db", 0) 20 | with r.monitor() as m: 21 | r.ping() 22 | response = wait_for_command(r, m, "PING") 23 | assert isinstance(response["time"], float) 24 | assert response["db"] == db 25 | assert response["client_type"] in ("tcp", "unix") 26 | assert isinstance(response["client_address"], str) 27 | assert isinstance(response["client_port"], str) 28 | assert response["command"] == "PING" 29 | 30 | def test_command_with_quoted_key(self, r): 31 | with r.monitor() as m: 32 | r.get('foo"bar') 33 | response = wait_for_command(r, m, 'GET foo"bar') 34 | assert response["command"] == 'GET foo"bar' 35 | 36 | def test_command_with_binary_data(self, r): 37 | with r.monitor() as m: 38 | byte_string = b"foo\x92" 39 | r.get(byte_string) 40 | response = wait_for_command(r, m, "GET foo\\x92") 41 | assert response["command"] == "GET foo\\x92" 42 | 43 | def test_command_with_escaped_data(self, r): 44 | with r.monitor() as m: 45 | byte_string = b"foo\\x92" 46 | r.get(byte_string) 47 | response = wait_for_command(r, m, "GET foo\\\\x92") 48 | assert response["command"] == "GET foo\\\\x92" 49 | 50 | @skip_if_redis_enterprise() 51 | def test_lua_script(self, r): 52 | with r.monitor() as m: 53 | script = 'return redis.call("GET", "foo")' 54 | assert r.eval(script, 0) is None 55 | response = wait_for_command(r, m, "GET foo") 56 | assert response["command"] == "GET foo" 57 | assert response["client_type"] == "lua" 58 | assert response["client_address"] == "lua" 59 | assert response["client_port"] == "" 60 | 61 | @skip_ifnot_redis_enterprise() 62 | def test_lua_script_in_enterprise(self, r): 63 | with r.monitor() as m: 64 | script = 'return redis.call("GET", "foo")' 65 | assert r.eval(script, 0) is None 66 | response = wait_for_command(r, m, "GET foo") 67 | assert response is None 68 | -------------------------------------------------------------------------------- /redis/commands/search/index_definition.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class IndexType(Enum): 5 | """Enum of the currently supported index types.""" 6 | 7 | HASH = 1 8 | JSON = 2 9 | 10 | 11 | class IndexDefinition: 12 | """IndexDefinition is used to define a index definition for automatic 13 | indexing on Hash or Json update.""" 14 | 15 | def __init__( 16 | self, 17 | prefix=[], 18 | filter=None, 19 | language_field=None, 20 | language=None, 21 | score_field=None, 22 | score=1.0, 23 | payload_field=None, 24 | index_type=None, 25 | ): 26 | self.args = [] 27 | self._append_index_type(index_type) 28 | self._append_prefix(prefix) 29 | self._append_filter(filter) 30 | self._append_language(language_field, language) 31 | self._append_score(score_field, score) 32 | self._append_payload(payload_field) 33 | 34 | def _append_index_type(self, index_type): 35 | """Append `ON HASH` or `ON JSON` according to the enum.""" 36 | if index_type is IndexType.HASH: 37 | self.args.extend(["ON", "HASH"]) 38 | elif index_type is IndexType.JSON: 39 | self.args.extend(["ON", "JSON"]) 40 | elif index_type is not None: 41 | raise RuntimeError(f"index_type must be one of {list(IndexType)}") 42 | 43 | def _append_prefix(self, prefix): 44 | """Append PREFIX.""" 45 | if len(prefix) > 0: 46 | self.args.append("PREFIX") 47 | self.args.append(len(prefix)) 48 | for p in prefix: 49 | self.args.append(p) 50 | 51 | def _append_filter(self, filter): 52 | """Append FILTER.""" 53 | if filter is not None: 54 | self.args.append("FILTER") 55 | self.args.append(filter) 56 | 57 | def _append_language(self, language_field, language): 58 | """Append LANGUAGE_FIELD and LANGUAGE.""" 59 | if language_field is not None: 60 | self.args.append("LANGUAGE_FIELD") 61 | self.args.append(language_field) 62 | if language is not None: 63 | self.args.append("LANGUAGE") 64 | self.args.append(language) 65 | 66 | def _append_score(self, score_field, score): 67 | """Append SCORE_FIELD and SCORE.""" 68 | if score_field is not None: 69 | self.args.append("SCORE_FIELD") 70 | self.args.append(score_field) 71 | if score is not None: 72 | self.args.append("SCORE") 73 | self.args.append(score) 74 | 75 | def _append_payload(self, payload_field): 76 | """Append PAYLOAD_FIELD.""" 77 | if payload_field is not None: 78 | self.args.append("PAYLOAD_FIELD") 79 | self.args.append(payload_field) 80 | -------------------------------------------------------------------------------- /redis/data_structure.py: -------------------------------------------------------------------------------- 1 | import threading 2 | from typing import Any, Generic, List, TypeVar 3 | 4 | from redis.typing import Number 5 | 6 | T = TypeVar("T") 7 | 8 | 9 | class WeightedList(Generic[T]): 10 | """ 11 | Thread-safe weighted list. 12 | """ 13 | 14 | def __init__(self): 15 | self._items: List[tuple[Any, Number]] = [] 16 | self._lock = threading.RLock() 17 | 18 | def add(self, item: Any, weight: float) -> None: 19 | """Add item with weight, maintaining sorted order""" 20 | with self._lock: 21 | # Find insertion point using binary search 22 | left, right = 0, len(self._items) 23 | while left < right: 24 | mid = (left + right) // 2 25 | if self._items[mid][1] < weight: 26 | right = mid 27 | else: 28 | left = mid + 1 29 | 30 | self._items.insert(left, (item, weight)) 31 | 32 | def remove(self, item): 33 | """Remove first occurrence of item""" 34 | with self._lock: 35 | for i, (stored_item, weight) in enumerate(self._items): 36 | if stored_item == item: 37 | self._items.pop(i) 38 | return weight 39 | raise ValueError("Item not found") 40 | 41 | def get_by_weight_range( 42 | self, min_weight: float, max_weight: float 43 | ) -> List[tuple[Any, Number]]: 44 | """Get all items within weight range""" 45 | with self._lock: 46 | result = [] 47 | for item, weight in self._items: 48 | if min_weight <= weight <= max_weight: 49 | result.append((item, weight)) 50 | return result 51 | 52 | def get_top_n(self, n: int) -> List[tuple[Any, Number]]: 53 | """Get top N the highest weighted items""" 54 | with self._lock: 55 | return [(item, weight) for item, weight in self._items[:n]] 56 | 57 | def update_weight(self, item, new_weight: float): 58 | with self._lock: 59 | """Update weight of an item""" 60 | old_weight = self.remove(item) 61 | self.add(item, new_weight) 62 | return old_weight 63 | 64 | def __iter__(self): 65 | """Iterate in descending weight order""" 66 | with self._lock: 67 | items_copy = ( 68 | self._items.copy() 69 | ) # Create snapshot as lock released after each 'yield' 70 | 71 | for item, weight in items_copy: 72 | yield item, weight 73 | 74 | def __len__(self): 75 | with self._lock: 76 | return len(self._items) 77 | 78 | def __getitem__(self, index) -> tuple[Any, Number]: 79 | with self._lock: 80 | item, weight = self._items[index] 81 | return item, weight 82 | -------------------------------------------------------------------------------- /tests/test_auth/test_token.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timezone 2 | 3 | import pytest 4 | from redis.auth.err import InvalidTokenSchemaErr 5 | from redis.auth.token import JWToken, SimpleToken 6 | 7 | 8 | class TestToken: 9 | def test_simple_token(self): 10 | token = SimpleToken( 11 | "value", 12 | (datetime.now(timezone.utc).timestamp() * 1000) + 1000, 13 | (datetime.now(timezone.utc).timestamp() * 1000), 14 | {"key": "value"}, 15 | ) 16 | 17 | assert token.ttl() == pytest.approx(1000, 10) 18 | assert token.is_expired() is False 19 | assert token.try_get("key") == "value" 20 | assert token.get_value() == "value" 21 | assert token.get_expires_at_ms() == pytest.approx( 22 | (datetime.now(timezone.utc).timestamp() * 1000) + 100, 10 23 | ) 24 | assert token.get_received_at_ms() == pytest.approx( 25 | (datetime.now(timezone.utc).timestamp() * 1000), 10 26 | ) 27 | 28 | token = SimpleToken( 29 | "value", 30 | -1, 31 | (datetime.now(timezone.utc).timestamp() * 1000), 32 | {"key": "value"}, 33 | ) 34 | 35 | assert token.ttl() == -1 36 | assert token.is_expired() is False 37 | assert token.get_expires_at_ms() == -1 38 | 39 | def test_jwt_token(self): 40 | jwt = pytest.importorskip("jwt") 41 | 42 | token = { 43 | "exp": datetime.now(timezone.utc).timestamp() + 100, 44 | "iat": datetime.now(timezone.utc).timestamp(), 45 | "key": "value", 46 | } 47 | encoded = jwt.encode(token, "secret", algorithm="HS256") 48 | jwt_token = JWToken(encoded) 49 | 50 | assert jwt_token.ttl() == pytest.approx(100000, 10) 51 | assert jwt_token.is_expired() is False 52 | assert jwt_token.try_get("key") == "value" 53 | assert jwt_token.get_value() == encoded 54 | assert jwt_token.get_expires_at_ms() == pytest.approx( 55 | (datetime.now(timezone.utc).timestamp() * 1000) + 100000, 10 56 | ) 57 | assert jwt_token.get_received_at_ms() == pytest.approx( 58 | (datetime.now(timezone.utc).timestamp() * 1000), 10 59 | ) 60 | 61 | token = { 62 | "exp": -1, 63 | "iat": datetime.now(timezone.utc).timestamp(), 64 | "key": "value", 65 | } 66 | encoded = jwt.encode(token, "secret", algorithm="HS256") 67 | jwt_token = JWToken(encoded) 68 | 69 | assert jwt_token.ttl() == -1 70 | assert jwt_token.is_expired() is False 71 | assert jwt_token.get_expires_at_ms() == -1000 72 | 73 | with pytest.raises(InvalidTokenSchemaErr): 74 | token = {"key": "value"} 75 | encoded = jwt.encode(token, "secret", algorithm="HS256") 76 | JWToken(encoded) 77 | -------------------------------------------------------------------------------- /tests/test_asyncio/test_monitor.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from tests.conftest import skip_if_redis_enterprise, skip_ifnot_redis_enterprise 3 | 4 | from .conftest import wait_for_command 5 | 6 | 7 | @pytest.mark.onlynoncluster 8 | class TestMonitor: 9 | async def test_wait_command_not_found(self, r): 10 | """Make sure the wait_for_command func works when command is not found""" 11 | async with r.monitor() as m: 12 | response = await wait_for_command(r, m, "nothing") 13 | assert response is None 14 | 15 | async def test_response_values(self, r): 16 | db = r.connection_pool.connection_kwargs.get("db", 0) 17 | async with r.monitor() as m: 18 | await r.ping() 19 | response = await wait_for_command(r, m, "PING") 20 | assert isinstance(response["time"], float) 21 | assert response["db"] == db 22 | assert response["client_type"] in ("tcp", "unix") 23 | assert isinstance(response["client_address"], str) 24 | assert isinstance(response["client_port"], str) 25 | assert response["command"] == "PING" 26 | 27 | async def test_command_with_quoted_key(self, r): 28 | async with r.monitor() as m: 29 | await r.get('foo"bar') 30 | response = await wait_for_command(r, m, 'GET foo"bar') 31 | assert response["command"] == 'GET foo"bar' 32 | 33 | async def test_command_with_binary_data(self, r): 34 | async with r.monitor() as m: 35 | byte_string = b"foo\x92" 36 | await r.get(byte_string) 37 | response = await wait_for_command(r, m, "GET foo\\x92") 38 | assert response["command"] == "GET foo\\x92" 39 | 40 | async def test_command_with_escaped_data(self, r): 41 | async with r.monitor() as m: 42 | byte_string = b"foo\\x92" 43 | await r.get(byte_string) 44 | response = await wait_for_command(r, m, "GET foo\\\\x92") 45 | assert response["command"] == "GET foo\\\\x92" 46 | 47 | @skip_if_redis_enterprise() 48 | async def test_lua_script(self, r): 49 | async with r.monitor() as m: 50 | script = 'return redis.call("GET", "foo")' 51 | assert await r.eval(script, 0) is None 52 | response = await wait_for_command(r, m, "GET foo") 53 | assert response["command"] == "GET foo" 54 | assert response["client_type"] == "lua" 55 | assert response["client_address"] == "lua" 56 | assert response["client_port"] == "" 57 | 58 | @skip_ifnot_redis_enterprise() 59 | async def test_lua_script_in_enterprise(self, r): 60 | async with r.monitor() as m: 61 | script = 'return redis.call("GET", "foo")' 62 | assert await r.eval(script, 0) is None 63 | response = await wait_for_command(r, m, "GET foo") 64 | assert response is None 65 | -------------------------------------------------------------------------------- /doctests/trans_pipe.py: -------------------------------------------------------------------------------- 1 | # EXAMPLE: pipe_trans_tutorial 2 | # HIDE_START 3 | """ 4 | Code samples for vector database quickstart pages: 5 | https://redis.io/docs/latest/develop/get-started/vector-database/ 6 | """ 7 | # HIDE_END 8 | import redis 9 | 10 | # STEP_START basic_pipe 11 | r = redis.Redis(decode_responses=True) 12 | # REMOVE_START 13 | for i in range(5): 14 | r.delete(f"seat:{i}") 15 | 16 | r.delete("shellpath") 17 | # REMOVE_END 18 | 19 | pipe = r.pipeline() 20 | 21 | for i in range(5): 22 | pipe.set(f"seat:{i}", f"#{i}") 23 | 24 | set_5_result = pipe.execute() 25 | print(set_5_result) # >>> [True, True, True, True, True] 26 | 27 | pipe = r.pipeline() 28 | 29 | # "Chain" pipeline commands together. 30 | get_3_result = pipe.get("seat:0").get("seat:3").get("seat:4").execute() 31 | print(get_3_result) # >>> ['#0', '#3', '#4'] 32 | # STEP_END 33 | # REMOVE_START 34 | assert set_5_result == [True, True, True, True, True] 35 | assert get_3_result == ['#0', '#3', '#4'] 36 | # REMOVE_END 37 | 38 | # STEP_START trans_watch 39 | r.set("shellpath", "/usr/syscmds/") 40 | 41 | with r.pipeline() as pipe: 42 | # Repeat until successful. 43 | while True: 44 | try: 45 | # Watch the key we are about to change. 46 | pipe.watch("shellpath") 47 | 48 | # The pipeline executes commands directly (instead of 49 | # buffering them) from immediately after the `watch()` 50 | # call until we begin the transaction. 51 | current_path = pipe.get("shellpath") 52 | new_path = current_path + ":/usr/mycmds/" 53 | 54 | # Start the transaction, which will enable buffering 55 | # again for the remaining commands. 56 | pipe.multi() 57 | 58 | pipe.set("shellpath", new_path) 59 | 60 | pipe.execute() 61 | 62 | # The transaction succeeded, so break out of the loop. 63 | break 64 | except redis.WatchError: 65 | # The transaction failed, so continue with the next attempt. 66 | continue 67 | 68 | get_path_result = r.get("shellpath") 69 | print(get_path_result) # >>> '/usr/syscmds/:/usr/mycmds/' 70 | # STEP_END 71 | # REMOVE_START 72 | assert get_path_result == '/usr/syscmds/:/usr/mycmds/' 73 | r.delete("shellpath") 74 | # REMOVE_END 75 | 76 | # STEP_START watch_conv_method 77 | r.set("shellpath", "/usr/syscmds/") 78 | 79 | 80 | def watched_sequence(pipe): 81 | current_path = pipe.get("shellpath") 82 | new_path = current_path + ":/usr/mycmds/" 83 | 84 | pipe.multi() 85 | 86 | pipe.set("shellpath", new_path) 87 | 88 | 89 | trans_result = r.transaction(watched_sequence, "shellpath") 90 | print(trans_result) # True 91 | 92 | get_path_result = r.get("shellpath") 93 | print(get_path_result) # >>> '/usr/syscmds/:/usr/mycmds/' 94 | # REMOVE_START 95 | assert trans_result 96 | assert get_path_result == '/usr/syscmds/:/usr/mycmds/' 97 | # REMOVE_END 98 | # STEP_END 99 | -------------------------------------------------------------------------------- /redis/commands/search/result.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from ._util import to_string 4 | from .document import Document 5 | 6 | 7 | class Result: 8 | """ 9 | Represents the result of a search query, and has an array of Document 10 | objects 11 | """ 12 | 13 | def __init__( 14 | self, 15 | res, 16 | hascontent, 17 | duration=0, 18 | has_payload=False, 19 | with_scores=False, 20 | field_encodings: Optional[dict] = None, 21 | ): 22 | """ 23 | - duration: the execution time of the query 24 | - has_payload: whether the query has payloads 25 | - with_scores: whether the query has scores 26 | - field_encodings: a dictionary of field encodings if any is provided 27 | """ 28 | 29 | self.total = res[0] 30 | self.duration = duration 31 | self.docs = [] 32 | 33 | step = 1 34 | if hascontent: 35 | step = step + 1 36 | if has_payload: 37 | step = step + 1 38 | if with_scores: 39 | step = step + 1 40 | 41 | offset = 2 if with_scores else 1 42 | 43 | for i in range(1, len(res), step): 44 | id = to_string(res[i]) 45 | payload = to_string(res[i + offset]) if has_payload else None 46 | # fields_offset = 2 if has_payload else 1 47 | fields_offset = offset + 1 if has_payload else offset 48 | score = float(res[i + 1]) if with_scores else None 49 | 50 | fields = {} 51 | if hascontent and res[i + fields_offset] is not None: 52 | keys = map(to_string, res[i + fields_offset][::2]) 53 | values = res[i + fields_offset][1::2] 54 | 55 | for key, value in zip(keys, values): 56 | if field_encodings is None or key not in field_encodings: 57 | fields[key] = to_string(value) 58 | continue 59 | 60 | encoding = field_encodings[key] 61 | 62 | # If the encoding is None, we don't need to decode the value 63 | if encoding is None: 64 | fields[key] = value 65 | else: 66 | fields[key] = to_string(value, encoding=encoding) 67 | 68 | try: 69 | del fields["id"] 70 | except KeyError: 71 | pass 72 | 73 | try: 74 | fields["json"] = fields["$"] 75 | del fields["$"] 76 | except KeyError: 77 | pass 78 | 79 | doc = ( 80 | Document(id, score=score, payload=payload, **fields) 81 | if with_scores 82 | else Document(id, payload=payload, **fields) 83 | ) 84 | self.docs.append(doc) 85 | 86 | def __repr__(self) -> str: 87 | return f"Result{{{self.total} total, docs: {self.docs}}}" 88 | -------------------------------------------------------------------------------- /doctests/cmds_list.py: -------------------------------------------------------------------------------- 1 | # EXAMPLE: cmds_list 2 | # HIDE_START 3 | import redis 4 | 5 | r = redis.Redis(decode_responses=True) 6 | # HIDE_END 7 | 8 | # STEP_START lpush 9 | res1 = r.lpush("mylist", "world") 10 | print(res1) # >>> 1 11 | 12 | res2 = r.lpush("mylist", "hello") 13 | print(res2) # >>> 2 14 | 15 | res3 = r.lrange("mylist", 0, -1) 16 | print(res3) # >>> [ "hello", "world" ] 17 | 18 | # REMOVE_START 19 | assert res3 == [ "hello", "world" ] 20 | r.delete("mylist") 21 | # REMOVE_END 22 | # STEP_END 23 | 24 | # STEP_START lrange 25 | res4 = r.rpush("mylist", "one"); 26 | print(res4) # >>> 1 27 | 28 | res5 = r.rpush("mylist", "two") 29 | print(res5) # >>> 2 30 | 31 | res6 = r.rpush("mylist", "three") 32 | print(res6) # >>> 3 33 | 34 | res7 = r.lrange('mylist', 0, 0) 35 | print(res7) # >>> [ 'one' ] 36 | 37 | res8 = r.lrange('mylist', -3, 2) 38 | print(res8) # >>> [ 'one', 'two', 'three' ] 39 | 40 | res9 = r.lrange('mylist', -100, 100) 41 | print(res9) # >>> [ 'one', 'two', 'three' ] 42 | 43 | res10 = r.lrange('mylist', 5, 10) 44 | print(res10) # >>> [] 45 | 46 | # REMOVE_START 47 | assert res7 == [ 'one' ] 48 | assert res8 == [ 'one', 'two', 'three' ] 49 | assert res9 == [ 'one', 'two', 'three' ] 50 | assert res10 == [] 51 | r.delete('mylist') 52 | # REMOVE_END 53 | # STEP_END 54 | 55 | # STEP_START llen 56 | res11 = r.lpush("mylist", "World") 57 | print(res11) # >>> 1 58 | 59 | res12 = r.lpush("mylist", "Hello") 60 | print(res12) # >>> 2 61 | 62 | res13 = r.llen("mylist") 63 | print(res13) # >>> 2 64 | 65 | # REMOVE_START 66 | assert res13 == 2 67 | r.delete("mylist") 68 | # REMOVE_END 69 | # STEP_END 70 | 71 | # STEP_START rpush 72 | res14 = r.rpush("mylist", "hello") 73 | print(res14) # >>> 1 74 | 75 | res15 = r.rpush("mylist", "world") 76 | print(res15) # >>> 2 77 | 78 | res16 = r.lrange("mylist", 0, -1) 79 | print(res16) # >>> [ "hello", "world" ] 80 | 81 | # REMOVE_START 82 | assert res16 == [ "hello", "world" ] 83 | r.delete("mylist") 84 | # REMOVE_END 85 | # STEP_END 86 | 87 | # STEP_START lpop 88 | res17 = r.rpush("mylist", *["one", "two", "three", "four", "five"]) 89 | print(res17) # >>> 5 90 | 91 | res18 = r.lpop("mylist") 92 | print(res18) # >>> "one" 93 | 94 | res19 = r.lpop("mylist", 2) 95 | print(res19) # >>> ['two', 'three'] 96 | 97 | res17 = r.lrange("mylist", 0, -1) 98 | print(res17) # >>> [ "four", "five" ] 99 | 100 | # REMOVE_START 101 | assert res17 == [ "four", "five" ] 102 | r.delete("mylist") 103 | # REMOVE_END 104 | # STEP_END 105 | 106 | # STEP_START rpop 107 | res18 = r.rpush("mylist", *["one", "two", "three", "four", "five"]) 108 | print(res18) # >>> 5 109 | 110 | res19 = r.rpop("mylist") 111 | print(res19) # >>> "five" 112 | 113 | res20 = r.rpop("mylist", 2) 114 | print(res20) # >>> ['four', 'three'] 115 | 116 | res21 = r.lrange("mylist", 0, -1) 117 | print(res21) # >>> [ "one", "two" ] 118 | 119 | # REMOVE_START 120 | assert res21 == [ "one", "two" ] 121 | r.delete("mylist") 122 | # REMOVE_END 123 | # STEP_END -------------------------------------------------------------------------------- /tests/test_data_structure.py: -------------------------------------------------------------------------------- 1 | import concurrent 2 | import random 3 | from concurrent.futures import ThreadPoolExecutor 4 | from time import sleep 5 | 6 | from redis.data_structure import WeightedList 7 | 8 | 9 | class TestWeightedList: 10 | def test_add_items(self): 11 | wlist = WeightedList() 12 | 13 | wlist.add("item1", 3.0) 14 | wlist.add("item2", 2.0) 15 | wlist.add("item3", 4.0) 16 | wlist.add("item4", 4.0) 17 | 18 | assert wlist.get_top_n(4) == [ 19 | ("item3", 4.0), 20 | ("item4", 4.0), 21 | ("item1", 3.0), 22 | ("item2", 2.0), 23 | ] 24 | 25 | def test_remove_items(self): 26 | wlist = WeightedList() 27 | wlist.add("item1", 3.0) 28 | wlist.add("item2", 2.0) 29 | wlist.add("item3", 4.0) 30 | wlist.add("item4", 4.0) 31 | 32 | assert wlist.remove("item2") == 2.0 33 | assert wlist.remove("item4") == 4.0 34 | 35 | assert wlist.get_top_n(4) == [("item3", 4.0), ("item1", 3.0)] 36 | 37 | def test_get_by_weight_range(self): 38 | wlist = WeightedList() 39 | wlist.add("item1", 3.0) 40 | wlist.add("item2", 2.0) 41 | wlist.add("item3", 4.0) 42 | wlist.add("item4", 4.0) 43 | 44 | assert wlist.get_by_weight_range(2.0, 3.0) == [("item1", 3.0), ("item2", 2.0)] 45 | 46 | def test_update_weights(self): 47 | wlist = WeightedList() 48 | wlist.add("item1", 3.0) 49 | wlist.add("item2", 2.0) 50 | wlist.add("item3", 4.0) 51 | wlist.add("item4", 4.0) 52 | 53 | assert wlist.get_top_n(4) == [ 54 | ("item3", 4.0), 55 | ("item4", 4.0), 56 | ("item1", 3.0), 57 | ("item2", 2.0), 58 | ] 59 | 60 | wlist.update_weight("item2", 5.0) 61 | 62 | assert wlist.get_top_n(4) == [ 63 | ("item2", 5.0), 64 | ("item3", 4.0), 65 | ("item4", 4.0), 66 | ("item1", 3.0), 67 | ] 68 | 69 | def test_thread_safety(self) -> None: 70 | """Test thread safety with concurrent operations""" 71 | wl = WeightedList() 72 | 73 | def worker(worker_id): 74 | for i in range(100): 75 | # Add items 76 | wl.add(f"item_{worker_id}_{i}", random.uniform(0, 100)) 77 | 78 | # Read operations 79 | try: 80 | length = len(wl) 81 | if length > 0: 82 | wl.get_top_n(min(5, length)) 83 | wl.get_by_weight_range(20, 80) 84 | except Exception as e: 85 | print(f"Error in worker {worker_id}: {e}") 86 | 87 | sleep(0.001) # Small delay 88 | 89 | # Run multiple workers concurrently 90 | with ThreadPoolExecutor(max_workers=5) as executor: 91 | futures = [executor.submit(worker, i) for i in range(5)] 92 | concurrent.futures.wait(futures) 93 | 94 | assert len(wl) == 500 95 | -------------------------------------------------------------------------------- /redis/commands/redismodules.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from json import JSONDecoder, JSONEncoder 4 | from typing import TYPE_CHECKING 5 | 6 | if TYPE_CHECKING: 7 | from .bf import BFBloom, CFBloom, CMSBloom, TDigestBloom, TOPKBloom 8 | from .json import JSON 9 | from .search import AsyncSearch, Search 10 | from .timeseries import TimeSeries 11 | from .vectorset import VectorSet 12 | 13 | 14 | class RedisModuleCommands: 15 | """This class contains the wrapper functions to bring supported redis 16 | modules into the command namespace. 17 | """ 18 | 19 | def json(self, encoder=JSONEncoder(), decoder=JSONDecoder()) -> JSON: 20 | """Access the json namespace, providing support for redis json.""" 21 | 22 | from .json import JSON 23 | 24 | jj = JSON(client=self, encoder=encoder, decoder=decoder) 25 | return jj 26 | 27 | def ft(self, index_name="idx") -> Search: 28 | """Access the search namespace, providing support for redis search.""" 29 | 30 | from .search import Search 31 | 32 | s = Search(client=self, index_name=index_name) 33 | return s 34 | 35 | def ts(self) -> TimeSeries: 36 | """Access the timeseries namespace, providing support for 37 | redis timeseries data. 38 | """ 39 | 40 | from .timeseries import TimeSeries 41 | 42 | s = TimeSeries(client=self) 43 | return s 44 | 45 | def bf(self) -> BFBloom: 46 | """Access the bloom namespace.""" 47 | 48 | from .bf import BFBloom 49 | 50 | bf = BFBloom(client=self) 51 | return bf 52 | 53 | def cf(self) -> CFBloom: 54 | """Access the bloom namespace.""" 55 | 56 | from .bf import CFBloom 57 | 58 | cf = CFBloom(client=self) 59 | return cf 60 | 61 | def cms(self) -> CMSBloom: 62 | """Access the bloom namespace.""" 63 | 64 | from .bf import CMSBloom 65 | 66 | cms = CMSBloom(client=self) 67 | return cms 68 | 69 | def topk(self) -> TOPKBloom: 70 | """Access the bloom namespace.""" 71 | 72 | from .bf import TOPKBloom 73 | 74 | topk = TOPKBloom(client=self) 75 | return topk 76 | 77 | def tdigest(self) -> TDigestBloom: 78 | """Access the bloom namespace.""" 79 | 80 | from .bf import TDigestBloom 81 | 82 | tdigest = TDigestBloom(client=self) 83 | return tdigest 84 | 85 | def vset(self) -> VectorSet: 86 | """Access the VectorSet commands namespace.""" 87 | 88 | from .vectorset import VectorSet 89 | 90 | vset = VectorSet(client=self) 91 | return vset 92 | 93 | 94 | class AsyncRedisModuleCommands(RedisModuleCommands): 95 | def ft(self, index_name="idx") -> AsyncSearch: 96 | """Access the search namespace, providing support for redis search.""" 97 | 98 | from .search import AsyncSearch 99 | 100 | s = AsyncSearch(client=self, index_name=index_name) 101 | return s 102 | -------------------------------------------------------------------------------- /redis/asyncio/multidb/event.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from redis.asyncio import Redis 4 | from redis.asyncio.multidb.database import AsyncDatabase 5 | from redis.asyncio.multidb.failure_detector import AsyncFailureDetector 6 | from redis.event import AsyncEventListenerInterface, AsyncOnCommandsFailEvent 7 | 8 | 9 | class AsyncActiveDatabaseChanged: 10 | """ 11 | Event fired when an async active database has been changed. 12 | """ 13 | 14 | def __init__( 15 | self, 16 | old_database: AsyncDatabase, 17 | new_database: AsyncDatabase, 18 | command_executor, 19 | **kwargs, 20 | ): 21 | self._old_database = old_database 22 | self._new_database = new_database 23 | self._command_executor = command_executor 24 | self._kwargs = kwargs 25 | 26 | @property 27 | def old_database(self) -> AsyncDatabase: 28 | return self._old_database 29 | 30 | @property 31 | def new_database(self) -> AsyncDatabase: 32 | return self._new_database 33 | 34 | @property 35 | def command_executor(self): 36 | return self._command_executor 37 | 38 | @property 39 | def kwargs(self): 40 | return self._kwargs 41 | 42 | 43 | class ResubscribeOnActiveDatabaseChanged(AsyncEventListenerInterface): 44 | """ 45 | Re-subscribe the currently active pub / sub to a new active database. 46 | """ 47 | 48 | async def listen(self, event: AsyncActiveDatabaseChanged): 49 | old_pubsub = event.command_executor.active_pubsub 50 | 51 | if old_pubsub is not None: 52 | # Re-assign old channels and patterns so they will be automatically subscribed on connection. 53 | new_pubsub = event.new_database.client.pubsub(**event.kwargs) 54 | new_pubsub.channels = old_pubsub.channels 55 | new_pubsub.patterns = old_pubsub.patterns 56 | await new_pubsub.on_connect(None) 57 | event.command_executor.active_pubsub = new_pubsub 58 | await old_pubsub.aclose() 59 | 60 | 61 | class CloseConnectionOnActiveDatabaseChanged(AsyncEventListenerInterface): 62 | """ 63 | Close connection to the old active database. 64 | """ 65 | 66 | async def listen(self, event: AsyncActiveDatabaseChanged): 67 | await event.old_database.client.aclose() 68 | 69 | if isinstance(event.old_database.client, Redis): 70 | await event.old_database.client.connection_pool.update_active_connections_for_reconnect() 71 | await event.old_database.client.connection_pool.disconnect() 72 | 73 | 74 | class RegisterCommandFailure(AsyncEventListenerInterface): 75 | """ 76 | Event listener that registers command failures and passing it to the failure detectors. 77 | """ 78 | 79 | def __init__(self, failure_detectors: List[AsyncFailureDetector]): 80 | self._failure_detectors = failure_detectors 81 | 82 | async def listen(self, event: AsyncOnCommandsFailEvent) -> None: 83 | for failure_detector in self._failure_detectors: 84 | await failure_detector.register_failure(event.exception, event.commands) 85 | -------------------------------------------------------------------------------- /docs/connections.rst: -------------------------------------------------------------------------------- 1 | Connecting to Redis 2 | ################### 3 | 4 | 5 | Generic Client 6 | ************** 7 | 8 | This is the client used to connect directly to a standard Redis node. 9 | 10 | .. autoclass:: redis.Redis 11 | :members: 12 | 13 | 14 | Sentinel Client 15 | *************** 16 | 17 | Redis `Sentinel `_ provides high availability for Redis. There are commands that can only be executed against a Redis node running in sentinel mode. Connecting to those nodes, and executing commands against them requires a Sentinel connection. 18 | 19 | Connection example (assumes Redis exists on the ports listed below): 20 | 21 | >>> from redis import Sentinel 22 | >>> sentinel = Sentinel([('localhost', 26379)], socket_timeout=0.1) 23 | >>> sentinel.discover_master('mymaster') 24 | ('127.0.0.1', 6379) 25 | >>> sentinel.discover_slaves('mymaster') 26 | [('127.0.0.1', 6380)] 27 | 28 | Sentinel 29 | ======== 30 | .. autoclass:: redis.sentinel.Sentinel 31 | :members: 32 | 33 | SentinelConnectionPool 34 | ====================== 35 | .. autoclass:: redis.sentinel.SentinelConnectionPool 36 | :members: 37 | 38 | 39 | Cluster Client 40 | ************** 41 | 42 | This client is used for connecting to a Redis Cluster. 43 | 44 | RedisCluster 45 | ============ 46 | .. autoclass:: redis.cluster.RedisCluster 47 | :members: 48 | 49 | ClusterNode 50 | =========== 51 | .. autoclass:: redis.cluster.ClusterNode 52 | :members: 53 | 54 | 55 | Async Client 56 | ************ 57 | 58 | See complete example: `here `__ 59 | 60 | This client is used for communicating with Redis, asynchronously. 61 | 62 | .. autoclass:: redis.asyncio.client.Redis 63 | :members: 64 | 65 | 66 | Async Cluster Client 67 | ******************** 68 | 69 | RedisCluster (Async) 70 | ==================== 71 | .. autoclass:: redis.asyncio.cluster.RedisCluster 72 | :members: 73 | :member-order: bysource 74 | 75 | ClusterNode (Async) 76 | =================== 77 | .. autoclass:: redis.asyncio.cluster.ClusterNode 78 | :members: 79 | :member-order: bysource 80 | 81 | ClusterPipeline (Async) 82 | ======================= 83 | .. autoclass:: redis.asyncio.cluster.ClusterPipeline 84 | :members: execute_command, execute 85 | :member-order: bysource 86 | 87 | 88 | Connection 89 | ********** 90 | 91 | See complete example: `here `__ 92 | 93 | Connection 94 | ========== 95 | .. autoclass:: redis.connection.Connection 96 | :members: 97 | 98 | Connection (Async) 99 | ================== 100 | .. autoclass:: redis.asyncio.connection.Connection 101 | :members: 102 | 103 | 104 | Connection Pools 105 | **************** 106 | 107 | See complete example: `here `__ 108 | 109 | ConnectionPool 110 | ============== 111 | .. autoclass:: redis.connection.ConnectionPool 112 | :members: 113 | 114 | ConnectionPool (Async) 115 | ====================== 116 | .. autoclass:: redis.asyncio.connection.ConnectionPool 117 | :members: 118 | -------------------------------------------------------------------------------- /benchmarks/cluster_async_pipeline.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import functools 3 | import time 4 | 5 | import aioredis_cluster 6 | import aredis 7 | import uvloop 8 | 9 | import redis.asyncio as redispy 10 | 11 | 12 | def timer(func): 13 | @functools.wraps(func) 14 | async def wrapper(*args, **kwargs): 15 | tic = time.perf_counter() 16 | await func(*args, **kwargs) 17 | toc = time.perf_counter() 18 | return f"{toc - tic:.4f}" 19 | 20 | return wrapper 21 | 22 | 23 | @timer 24 | async def warmup(client): 25 | await asyncio.gather( 26 | *(asyncio.create_task(client.exists(f"bench:warmup_{i}")) for i in range(100)) 27 | ) 28 | 29 | 30 | @timer 31 | async def run(client): 32 | data_str = "a" * size 33 | data_int = int("1" * size) 34 | 35 | for i in range(count): 36 | with client.pipeline() as pipe: 37 | await ( 38 | pipe.set(f"bench:str_{i}", data_str) 39 | .set(f"bench:int_{i}", data_int) 40 | .get(f"bench:str_{i}") 41 | .get(f"bench:int_{i}") 42 | .hset("bench:hset", str(i), data_str) 43 | .hget("bench:hset", str(i)) 44 | .incr("bench:incr") 45 | .lpush("bench:lpush", data_int) 46 | .lrange("bench:lpush", 0, 300) 47 | .lpop("bench:lpush") 48 | .execute() 49 | ) 50 | 51 | 52 | async def main(loop): 53 | arc = aredis.StrictRedisCluster( 54 | host=host, 55 | port=port, 56 | password=password, 57 | max_connections=2**31, 58 | max_connections_per_node=2**31, 59 | readonly=False, 60 | reinitialize_steps=count, 61 | skip_full_coverage_check=True, 62 | decode_responses=False, 63 | max_idle_time=count, 64 | idle_check_interval=count, 65 | ) 66 | print(f"{loop} {await warmup(arc)} aredis") 67 | print(await run(arc)) 68 | arc.connection_pool.disconnect() 69 | 70 | aiorc = await aioredis_cluster.create_redis_cluster( 71 | [(host, port)], 72 | password=password, 73 | state_reload_interval=count, 74 | idle_connection_timeout=count, 75 | pool_maxsize=2**31, 76 | ) 77 | print(f"{loop} {await warmup(aiorc)} aioredis-cluster") 78 | print(await run(aiorc)) 79 | aiorc.close() 80 | await aiorc.wait_closed() 81 | 82 | async with redispy.RedisCluster( 83 | host=host, 84 | port=port, 85 | password=password, 86 | reinitialize_steps=count, 87 | read_from_replicas=False, 88 | decode_responses=False, 89 | max_connections=2**31, 90 | ) as rca: 91 | print(f"{loop} {await warmup(rca)} redispy") 92 | print(await run(rca)) 93 | 94 | 95 | if __name__ == "__main__": 96 | host = "localhost" 97 | port = 16379 98 | password = None 99 | 100 | count = 10000 101 | size = 256 102 | 103 | asyncio.run(main("asyncio")) 104 | 105 | uvloop.install() 106 | 107 | asyncio.run(main("uvloop")) 108 | -------------------------------------------------------------------------------- /doctests/query_em.py: -------------------------------------------------------------------------------- 1 | # EXAMPLE: query_em 2 | # HIDE_START 3 | import json 4 | import redis 5 | from redis.commands.json.path import Path 6 | from redis.commands.search.field import TextField, NumericField, TagField 7 | from redis.commands.search.index_definition import IndexDefinition, IndexType 8 | from redis.commands.search.query import NumericFilter, Query 9 | 10 | r = redis.Redis(decode_responses=True) 11 | 12 | # create index 13 | schema = ( 14 | TextField("$.description", as_name="description"), 15 | NumericField("$.price", as_name="price"), 16 | TagField("$.condition", as_name="condition"), 17 | ) 18 | 19 | index = r.ft("idx:bicycle") 20 | index.create_index( 21 | schema, 22 | definition=IndexDefinition(prefix=["bicycle:"], index_type=IndexType.JSON), 23 | ) 24 | 25 | # load data 26 | with open("data/query_em.json") as f: 27 | bicycles = json.load(f) 28 | 29 | pipeline = r.pipeline(transaction=False) 30 | for bid, bicycle in enumerate(bicycles): 31 | pipeline.json().set(f'bicycle:{bid}', Path.root_path(), bicycle) 32 | pipeline.execute() 33 | # HIDE_END 34 | 35 | # STEP_START em1 36 | res = index.search(Query("@price:[270 270]")) 37 | print(res.total) 38 | # >>> 1 39 | # REMOVE_START 40 | assert res.total == 1 41 | # REMOVE_END 42 | 43 | try: 44 | res = index.search(Query("@price:[270]")) # not yet supported in redis-py 45 | print(res.total) 46 | # >>> 1 47 | assert res.total == 1 48 | except: 49 | print("'@price:[270]' syntax not yet supported.") 50 | 51 | try: 52 | res = index.search(Query("@price==270")) # not yet supported in redis-py 53 | print(res.total) 54 | # >>> 1 55 | assert res.total == 1 56 | except: 57 | print("'@price==270' syntax not yet supported.") 58 | 59 | query = Query("*").add_filter(NumericFilter("price", 270, 270)) 60 | res = index.search(query) 61 | print(res.total) 62 | # >>> 1 63 | # REMOVE_START 64 | assert res.total == 1 65 | # REMOVE_END 66 | # STEP_END 67 | 68 | # STEP_START em2 69 | res = index.search(Query("@condition:{new}")) 70 | print(res.total) 71 | # >>> 5 72 | # REMOVE_START 73 | assert res.total == 5 74 | # REMOVE_END 75 | # STEP_END 76 | 77 | # STEP_START em3 78 | schema = ( 79 | TagField("$.email", as_name="email") 80 | ) 81 | 82 | idx_email = r.ft("idx:email") 83 | idx_email.create_index( 84 | schema, 85 | definition=IndexDefinition(prefix=["key:"], index_type=IndexType.JSON), 86 | ) 87 | r.json().set('key:1', Path.root_path(), '{"email": "test@redis.com"}') 88 | 89 | try: 90 | res = idx_email.search(Query("test@redis.com").dialect(2)) 91 | print(res) 92 | except: 93 | print("'test@redis.com' syntax not yet supported.") 94 | # REMOVE_START 95 | r.ft("idx:email").dropindex(delete_documents=True) 96 | # REMOVE_END 97 | # STEP_END 98 | 99 | # STEP_START em4 100 | res = index.search(Query("@description:\"rough terrain\"")) 101 | print(res.total) 102 | # >>> 1 (Result{1 total, docs: [Document {'id': 'bicycle:8'...) 103 | # REMOVE_START 104 | assert res.total == 1 105 | # REMOVE_END 106 | # STEP_END 107 | 108 | # REMOVE_START 109 | # destroy index and data 110 | r.ft("idx:bicycle").dropindex(delete_documents=True) 111 | # REMOVE_END 112 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # image tag 8.0-RC2-pre is the one matching the 8.0 GA release 3 | x-client-libs-stack-image: &client-libs-stack-image 4 | image: "redislabs/client-libs-test:${CLIENT_LIBS_TEST_STACK_IMAGE_TAG:-8.4.0}" 5 | 6 | x-client-libs-image: &client-libs-image 7 | image: "redislabs/client-libs-test:${CLIENT_LIBS_TEST_IMAGE_TAG:-8.4.0}" 8 | 9 | services: 10 | 11 | redis: 12 | <<: *client-libs-image 13 | container_name: redis-standalone 14 | environment: 15 | - TLS_ENABLED=yes 16 | - REDIS_CLUSTER=no 17 | - PORT=6379 18 | - TLS_PORT=6666 19 | command: ${REDIS_EXTRA_ARGS:---enable-debug-command yes --enable-module-command yes --tls-auth-clients optional --save ""} 20 | ports: 21 | - 6379:6379 22 | - 6666:6666 # TLS port 23 | volumes: 24 | - "./dockers/standalone:/redis/work" 25 | profiles: 26 | - standalone 27 | - sentinel 28 | - replica 29 | - all-stack 30 | - all 31 | 32 | replica: 33 | <<: *client-libs-image 34 | container_name: redis-replica 35 | depends_on: 36 | - redis 37 | environment: 38 | - TLS_ENABLED=no 39 | - REDIS_CLUSTER=no 40 | - PORT=6380 41 | command: ${REDIS_EXTRA_ARGS:---enable-debug-command yes --replicaof redis 6379 --protected-mode no --save ""} 42 | ports: 43 | - 6380:6380 44 | volumes: 45 | - "./dockers/replica:/redis/work" 46 | profiles: 47 | - replica 48 | - all-stack 49 | - all 50 | 51 | cluster: 52 | <<: *client-libs-image 53 | container_name: redis-cluster 54 | environment: 55 | - REDIS_CLUSTER=yes 56 | - NODES=6 57 | - REPLICAS=1 58 | - TLS_ENABLED=yes 59 | - PORT=16379 60 | - TLS_PORT=27379 61 | command: ${REDIS_EXTRA_ARGS:---enable-debug-command yes --enable-module-command yes --tls-auth-clients optional --save "" --tls-cluster yes} 62 | ports: 63 | - "16379-16384:16379-16384" 64 | - "27379-27384:27379-27384" 65 | volumes: 66 | - "./dockers/cluster:/redis/work" 67 | profiles: 68 | - cluster 69 | - all-stack 70 | - all 71 | 72 | sentinel: 73 | <<: *client-libs-image 74 | container_name: redis-sentinel 75 | depends_on: 76 | - redis 77 | environment: 78 | - REDIS_CLUSTER=no 79 | - NODES=3 80 | - PORT=26379 81 | command: ${REDIS_EXTRA_ARGS:---sentinel} 82 | ports: 83 | - 26379:26379 84 | - 26380:26380 85 | - 26381:26381 86 | volumes: 87 | - "./dockers/sentinel.conf:/redis/config-default/redis.conf" 88 | - "./dockers/sentinel:/redis/work" 89 | profiles: 90 | - sentinel 91 | - all-stack 92 | - all 93 | 94 | redis-stack: 95 | <<: *client-libs-stack-image 96 | container_name: redis-stack 97 | environment: 98 | - REDIS_CLUSTER=no 99 | - PORT=6379 100 | command: ${REDIS_EXTRA_ARGS:---enable-debug-command yes --enable-module-command yes --save ""} 101 | ports: 102 | - 6479:6379 103 | volumes: 104 | - "./dockers/redis-stack:/redis/work" 105 | profiles: 106 | - standalone 107 | - all-stack 108 | - all 109 | -------------------------------------------------------------------------------- /doctests/dt_ss.py: -------------------------------------------------------------------------------- 1 | # EXAMPLE: ss_tutorial 2 | # HIDE_START 3 | """ 4 | Code samples for Sorted set doc pages: 5 | https://redis.io/docs/latest/develop/data-types/sorted-sets/ 6 | """ 7 | 8 | import redis 9 | 10 | r = redis.Redis(decode_responses=True) 11 | # HIDE_END 12 | 13 | # REMOVE_START 14 | r.delete("racer_scores") 15 | # REMOVE_END 16 | 17 | # STEP_START zadd 18 | res1 = r.zadd("racer_scores", {"Norem": 10}) 19 | print(res1) # >>> 1 20 | 21 | res2 = r.zadd("racer_scores", {"Castilla": 12}) 22 | print(res2) # >>> 1 23 | 24 | res3 = r.zadd( 25 | "racer_scores", 26 | {"Sam-Bodden": 8, "Royce": 10, "Ford": 6, "Prickett": 14, "Castilla": 12}, 27 | ) 28 | print(res3) # >>> 4 29 | # STEP_END 30 | 31 | # REMOVE_START 32 | assert r.zcard("racer_scores") == 6 33 | # REMOVE_END 34 | 35 | # STEP_START zrange 36 | res4 = r.zrange("racer_scores", 0, -1) 37 | print(res4) # >>> ['Ford', 'Sam-Bodden', 'Norem', 'Royce', 'Castilla', 'Prickett'] 38 | 39 | res5 = r.zrevrange("racer_scores", 0, -1) 40 | print(res5) # >>> ['Prickett', 'Castilla', 'Royce', 'Norem', 'Sam-Bodden', 'Ford'] 41 | # STEP_END 42 | 43 | # STEP_START zrange_withscores 44 | res6 = r.zrange("racer_scores", 0, -1, withscores=True) 45 | print( 46 | res6 47 | ) 48 | # >>> [ 49 | # ('Ford', 6.0), ('Sam-Bodden', 8.0), ('Norem', 10.0), ('Royce', 10.0), 50 | # ('Castilla', 12.0), ('Prickett', 14.0) 51 | # ] 52 | # STEP_END 53 | 54 | # STEP_START zrangebyscore 55 | res7 = r.zrangebyscore("racer_scores", "-inf", 10) 56 | print(res7) # >>> ['Ford', 'Sam-Bodden', 'Norem', 'Royce'] 57 | # STEP_END 58 | 59 | # STEP_START zremrangebyscore 60 | res8 = r.zrem("racer_scores", "Castilla") 61 | print(res8) # >>> 1 62 | 63 | res9 = r.zremrangebyscore("racer_scores", "-inf", 9) 64 | print(res9) # >>> 2 65 | 66 | res10 = r.zrange("racer_scores", 0, -1) 67 | print(res10) # >>> ['Norem', 'Royce', 'Prickett'] 68 | # STEP_END 69 | 70 | # REMOVE_START 71 | assert r.zcard("racer_scores") == 3 72 | # REMOVE_END 73 | 74 | # STEP_START zrank 75 | res11 = r.zrank("racer_scores", "Norem") 76 | print(res11) # >>> 0 77 | 78 | res12 = r.zrevrank("racer_scores", "Norem") 79 | print(res12) # >>> 2 80 | # STEP_END 81 | 82 | # STEP_START zadd_lex 83 | res13 = r.zadd( 84 | "racer_scores", 85 | { 86 | "Norem": 0, 87 | "Sam-Bodden": 0, 88 | "Royce": 0, 89 | "Ford": 0, 90 | "Prickett": 0, 91 | "Castilla": 0, 92 | }, 93 | ) 94 | print(res13) # >>> 3 95 | 96 | res14 = r.zrange("racer_scores", 0, -1) 97 | print(res14) # >>> ['Castilla', 'Ford', 'Norem', 'Prickett', 'Royce', 'Sam-Bodden'] 98 | 99 | res15 = r.zrangebylex("racer_scores", "[A", "[L") 100 | print(res15) # >>> ['Castilla', 'Ford'] 101 | # STEP_END 102 | 103 | # STEP_START leaderboard 104 | res16 = r.zadd("racer_scores", {"Wood": 100}) 105 | print(res16) # >>> 1 106 | 107 | res17 = r.zadd("racer_scores", {"Henshaw": 100}) 108 | print(res17) # >>> 1 109 | 110 | res18 = r.zadd("racer_scores", {"Henshaw": 150}) 111 | print(res18) # >>> 0 112 | 113 | res19 = r.zincrby("racer_scores", 50, "Wood") 114 | print(res19) # >>> 150.0 115 | 116 | res20 = r.zincrby("racer_scores", 50, "Henshaw") 117 | print(res20) # >>> 200.0 118 | # STEP_END 119 | -------------------------------------------------------------------------------- /redis/multidb/event.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from redis.client import Redis 4 | from redis.event import EventListenerInterface, OnCommandsFailEvent 5 | from redis.multidb.database import SyncDatabase 6 | from redis.multidb.failure_detector import FailureDetector 7 | 8 | 9 | class ActiveDatabaseChanged: 10 | """ 11 | Event fired when an active database has been changed. 12 | """ 13 | 14 | def __init__( 15 | self, 16 | old_database: SyncDatabase, 17 | new_database: SyncDatabase, 18 | command_executor, 19 | **kwargs, 20 | ): 21 | self._old_database = old_database 22 | self._new_database = new_database 23 | self._command_executor = command_executor 24 | self._kwargs = kwargs 25 | 26 | @property 27 | def old_database(self) -> SyncDatabase: 28 | return self._old_database 29 | 30 | @property 31 | def new_database(self) -> SyncDatabase: 32 | return self._new_database 33 | 34 | @property 35 | def command_executor(self): 36 | return self._command_executor 37 | 38 | @property 39 | def kwargs(self): 40 | return self._kwargs 41 | 42 | 43 | class ResubscribeOnActiveDatabaseChanged(EventListenerInterface): 44 | """ 45 | Re-subscribe the currently active pub / sub to a new active database. 46 | """ 47 | 48 | def listen(self, event: ActiveDatabaseChanged): 49 | old_pubsub = event.command_executor.active_pubsub 50 | 51 | if old_pubsub is not None: 52 | # Re-assign old channels and patterns so they will be automatically subscribed on connection. 53 | new_pubsub = event.new_database.client.pubsub(**event.kwargs) 54 | new_pubsub.channels = old_pubsub.channels 55 | new_pubsub.patterns = old_pubsub.patterns 56 | new_pubsub.shard_channels = old_pubsub.shard_channels 57 | new_pubsub.on_connect(None) 58 | event.command_executor.active_pubsub = new_pubsub 59 | old_pubsub.close() 60 | 61 | 62 | class CloseConnectionOnActiveDatabaseChanged(EventListenerInterface): 63 | """ 64 | Close connection to the old active database. 65 | """ 66 | 67 | def listen(self, event: ActiveDatabaseChanged): 68 | event.old_database.client.close() 69 | 70 | if isinstance(event.old_database.client, Redis): 71 | event.old_database.client.connection_pool.update_active_connections_for_reconnect() 72 | event.old_database.client.connection_pool.disconnect() 73 | else: 74 | for node in event.old_database.client.nodes_manager.nodes_cache.values(): 75 | node.redis_connection.connection_pool.update_active_connections_for_reconnect() 76 | node.redis_connection.connection_pool.disconnect() 77 | 78 | 79 | class RegisterCommandFailure(EventListenerInterface): 80 | """ 81 | Event listener that registers command failures and passing it to the failure detectors. 82 | """ 83 | 84 | def __init__(self, failure_detectors: List[FailureDetector]): 85 | self._failure_detectors = failure_detectors 86 | 87 | def listen(self, event: OnCommandsFailEvent) -> None: 88 | for failure_detector in self._failure_detectors: 89 | failure_detector.register_failure(event.exception, event.commands) 90 | -------------------------------------------------------------------------------- /redis/commands/timeseries/info.py: -------------------------------------------------------------------------------- 1 | from ..helpers import nativestr 2 | from .utils import list_to_dict 3 | 4 | 5 | class TSInfo: 6 | """ 7 | Hold information and statistics on the time-series. 8 | Can be created using ``tsinfo`` command 9 | https://redis.io/docs/latest/commands/ts.info/ 10 | """ 11 | 12 | rules = [] 13 | labels = [] 14 | sourceKey = None 15 | chunk_count = None 16 | memory_usage = None 17 | total_samples = None 18 | retention_msecs = None 19 | last_time_stamp = None 20 | first_time_stamp = None 21 | 22 | max_samples_per_chunk = None 23 | chunk_size = None 24 | duplicate_policy = None 25 | 26 | def __init__(self, args): 27 | """ 28 | Hold information and statistics on the time-series. 29 | 30 | The supported params that can be passed as args: 31 | 32 | rules: 33 | A list of compaction rules of the time series. 34 | sourceKey: 35 | Key name for source time series in case the current series 36 | is a target of a rule. 37 | chunkCount: 38 | Number of Memory Chunks used for the time series. 39 | memoryUsage: 40 | Total number of bytes allocated for the time series. 41 | totalSamples: 42 | Total number of samples in the time series. 43 | labels: 44 | A list of label-value pairs that represent the metadata 45 | labels of the time series. 46 | retentionTime: 47 | Retention time, in milliseconds, for the time series. 48 | lastTimestamp: 49 | Last timestamp present in the time series. 50 | firstTimestamp: 51 | First timestamp present in the time series. 52 | maxSamplesPerChunk: 53 | Deprecated. 54 | chunkSize: 55 | Amount of memory, in bytes, allocated for data. 56 | duplicatePolicy: 57 | Policy that will define handling of duplicate samples. 58 | 59 | Can read more about on 60 | https://redis.io/docs/latest/develop/data-types/timeseries/configuration/#duplicate_policy 61 | """ 62 | response = dict(zip(map(nativestr, args[::2]), args[1::2])) 63 | self.rules = response.get("rules") 64 | self.source_key = response.get("sourceKey") 65 | self.chunk_count = response.get("chunkCount") 66 | self.memory_usage = response.get("memoryUsage") 67 | self.total_samples = response.get("totalSamples") 68 | self.labels = list_to_dict(response.get("labels")) 69 | self.retention_msecs = response.get("retentionTime") 70 | self.last_timestamp = response.get("lastTimestamp") 71 | self.first_timestamp = response.get("firstTimestamp") 72 | if "maxSamplesPerChunk" in response: 73 | self.max_samples_per_chunk = response["maxSamplesPerChunk"] 74 | self.chunk_size = ( 75 | self.max_samples_per_chunk * 16 76 | ) # backward compatible changes 77 | if "chunkSize" in response: 78 | self.chunk_size = response["chunkSize"] 79 | if "duplicatePolicy" in response: 80 | self.duplicate_policy = response["duplicatePolicy"] 81 | if isinstance(self.duplicate_policy, bytes): 82 | self.duplicate_policy = self.duplicate_policy.decode() 83 | 84 | def get(self, item): 85 | try: 86 | return self.__getitem__(item) 87 | except AttributeError: 88 | return None 89 | 90 | def __getitem__(self, item): 91 | return getattr(self, item) 92 | -------------------------------------------------------------------------------- /doctests/query_ft.py: -------------------------------------------------------------------------------- 1 | # EXAMPLE: query_ft 2 | # HIDE_START 3 | import json 4 | import sys 5 | import redis 6 | from redis.commands.json.path import Path 7 | from redis.commands.search.field import TextField, NumericField, TagField 8 | from redis.commands.search.index_definition import IndexDefinition, IndexType 9 | from redis.commands.search.query import NumericFilter, Query 10 | 11 | r = redis.Redis(decode_responses=True) 12 | 13 | # create index 14 | schema = ( 15 | TextField("$.brand", as_name="brand"), 16 | TextField("$.model", as_name="model"), 17 | TextField("$.description", as_name="description"), 18 | ) 19 | 20 | index = r.ft("idx:bicycle") 21 | index.create_index( 22 | schema, 23 | definition=IndexDefinition(prefix=["bicycle:"], index_type=IndexType.JSON), 24 | ) 25 | 26 | # load data 27 | with open("data/query_em.json") as f: 28 | bicycles = json.load(f) 29 | 30 | pipeline = r.pipeline(transaction=False) 31 | for bid, bicycle in enumerate(bicycles): 32 | pipeline.json().set(f'bicycle:{bid}', Path.root_path(), bicycle) 33 | pipeline.execute() 34 | # HIDE_END 35 | 36 | # STEP_START ft1 37 | res = index.search(Query("@description: kids")) 38 | print(res.total) 39 | # >>> 2 40 | # REMOVE_START 41 | assert res.total == 2 42 | # REMOVE_END 43 | # STEP_END 44 | 45 | # STEP_START ft2 46 | res = index.search(Query("@model: ka*")) 47 | print(res.total) 48 | # >>> 1 49 | # REMOVE_START 50 | assert res.total == 1 51 | # REMOVE_END 52 | # STEP_END 53 | 54 | # STEP_START ft3 55 | res = index.search(Query("@brand: *bikes")) 56 | print(res.total) 57 | # >>> 2 58 | # REMOVE_START 59 | assert res.total == 2 60 | # REMOVE_END 61 | # STEP_END 62 | 63 | # STEP_START ft4 64 | res = index.search(Query("%optamized%")) 65 | print(res) 66 | # >>> Result{1 total, docs: [Document {'id': 'bicycle:3', 'payload': None, 'json': '{"pickup_zone":"POLYGON((-80.2433 25.8067, -80.1333 25.8067, -80.1333 25.6967, -80.2433 25.6967, -80.2433 25.8067))","store_location":"-80.1918,25.7617","brand":"Eva","model":"Eva 291","price":3400,"description":"The sister company to Nord, Eva launched in 2005 as the first and only women-dedicated bicycle brand. Designed by women for women, allEva bikes are optimized for the feminine physique using analytics from a body metrics database. If you like 29ers, try the Eva 291. It’s a brand new bike for 2022.. This full-suspension, cross-country ride has been designed for velocity. The 291 has 100mm of front and rear travel, a superlight aluminum frame and fast-rolling 29-inch wheels. Yippee!","condition":"used"}'}]} 67 | # REMOVE_START 68 | assert res.total == 1 69 | # REMOVE_END 70 | # STEP_END 71 | 72 | # STEP_START ft5 73 | res = index.search(Query("%%optamised%%")) 74 | print(res) 75 | # >>> Result{1 total, docs: [Document {'id': 'bicycle:3', 'payload': None, 'json': '{"pickup_zone":"POLYGON((-80.2433 25.8067, -80.1333 25.8067, -80.1333 25.6967, -80.2433 25.6967, -80.2433 25.8067))","store_location":"-80.1918,25.7617","brand":"Eva","model":"Eva 291","price":3400,"description":"The sister company to Nord, Eva launched in 2005 as the first and only women-dedicated bicycle brand. Designed by women for women, allEva bikes are optimized for the feminine physique using analytics from a body metrics database. If you like 29ers, try the Eva 291. It’s a brand new bike for 2022.. This full-suspension, cross-country ride has been designed for velocity. The 291 has 100mm of front and rear travel, a superlight aluminum frame and fast-rolling 29-inch wheels. Yippee!","condition":"used"}'}]} 76 | # REMOVE_START 77 | assert res.total == 1 78 | # REMOVE_END 79 | # STEP_END 80 | 81 | # REMOVE_START 82 | # destroy index and data 83 | r.ft("idx:bicycle").dropindex(delete_documents=True) 84 | # REMOVE_END 85 | -------------------------------------------------------------------------------- /docs/retry.rst: -------------------------------------------------------------------------------- 1 | Retry Helpers 2 | ############# 3 | 4 | .. automodule:: redis.retry 5 | :members: 6 | 7 | 8 | Retry in Redis Standalone 9 | ************************** 10 | 11 | >>> from redis.backoff import ExponentialBackoff 12 | >>> from redis.retry import Retry 13 | >>> from redis.client import Redis 14 | >>> from redis.exceptions import ( 15 | >>> BusyLoadingError, 16 | >>> RedisError, 17 | >>> ) 18 | >>> 19 | >>> # Run 3 retries with exponential backoff strategy 20 | >>> retry = Retry(ExponentialBackoff(), 3) 21 | >>> # Redis client with retries on custom errors in addition to the errors 22 | >>> # that are already retried by default 23 | >>> r = Redis(host='localhost', port=6379, retry=retry, retry_on_error=[BusyLoadingError, RedisError]) 24 | 25 | As you can see from the example above, Redis client supports 2 parameters to configure the retry behaviour: 26 | 27 | * ``retry``: :class:`~.Retry` instance with a :ref:`backoff-label` strategy and the max number of retries 28 | * The :class:`~.Retry` instance has default set of :ref:`exceptions-label` to retry on, 29 | which can be overridden by passing a tuple with :ref:`exceptions-label` to the ``supported_errors`` parameter. 30 | * ``retry_on_error``: list of additional :ref:`exceptions-label` to retry on 31 | 32 | 33 | If no ``retry`` is provided, a default one is created with :class:`~.ExponentialWithJitterBackoff` as backoff strategy 34 | and 3 retries. 35 | 36 | 37 | Retry in Redis Cluster 38 | ************************** 39 | 40 | >>> from redis.backoff import ExponentialBackoff 41 | >>> from redis.retry import Retry 42 | >>> from redis.cluster import RedisCluster 43 | >>> 44 | >>> # Run 3 retries with exponential backoff strategy 45 | >>> retry = Retry(ExponentialBackoff(), 3) 46 | >>> # Redis Cluster client with retries 47 | >>> rc = RedisCluster(host='localhost', port=6379, retry=retry) 48 | 49 | Retry behaviour in Redis Cluster is a little bit different from Standalone: 50 | 51 | * ``retry``: :class:`~.Retry` instance with a :ref:`backoff-label` strategy and the max number of retries, default value is ``Retry(ExponentialWithJitterBackoff(base=1, cap=10), cluster_error_retry_attempts)`` 52 | * ``cluster_error_retry_attempts``: number of times to retry before raising an error when :class:`~.TimeoutError`, :class:`~.ConnectionError`, :class:`~.ClusterDownError` or :class:`~.SlotNotCoveredError` are encountered, default value is ``3`` 53 | * This argument is deprecated - it is used to initialize the number of retries for the retry object, 54 | only in the case when the ``retry`` object is not provided. 55 | When the ``retry`` argument is provided, the ``cluster_error_retry_attempts`` argument is ignored! 56 | 57 | * The retry object is not yet fully utilized in the cluster client. 58 | The retry object is used only to determine the number of retries for the cluster level calls. 59 | 60 | Let's consider the following example: 61 | 62 | >>> from redis.backoff import ExponentialBackoff 63 | >>> from redis.retry import Retry 64 | >>> from redis.cluster import RedisCluster 65 | >>> 66 | >>> rc = RedisCluster(host='localhost', port=6379, retry=Retry(ExponentialBackoff(), 6)) 67 | >>> rc.set('foo', 'bar') 68 | 69 | #. the client library calculates the hash slot for key 'foo'. 70 | #. given the hash slot, it then determines which node to connect to, in order to execute the command. 71 | #. during the connection, a :class:`~.ConnectionError` is raised. 72 | #. because we set ``retry=Retry(ExponentialBackoff(), 6)``, the cluster client starts a cluster update, removes the failed node from the startup nodes, and re-initializes the cluster. 73 | #. the cluster client retries the command until it either succeeds or the max number of retries is reached. -------------------------------------------------------------------------------- /doctests/query_combined.py: -------------------------------------------------------------------------------- 1 | # EXAMPLE: query_combined 2 | # HIDE_START 3 | import json 4 | import numpy as np 5 | import redis 6 | import warnings 7 | from redis.commands.json.path import Path 8 | from redis.commands.search.field import NumericField, TagField, TextField, VectorField 9 | from redis.commands.search.index_definition import IndexDefinition, IndexType 10 | from redis.commands.search.query import Query 11 | from sentence_transformers import SentenceTransformer 12 | 13 | 14 | def embed_text(model, text): 15 | return np.array(model.encode(text)).astype(np.float32).tobytes() 16 | 17 | warnings.filterwarnings("ignore", category=FutureWarning, message=r".*clean_up_tokenization_spaces.*") 18 | model = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2') 19 | query = "Bike for small kids" 20 | query_vector = embed_text(model, query) 21 | 22 | r = redis.Redis(decode_responses=True) 23 | 24 | # create index 25 | schema = ( 26 | TextField("$.description", no_stem=True, as_name="model"), 27 | TagField("$.condition", as_name="condition"), 28 | NumericField("$.price", as_name="price"), 29 | VectorField( 30 | "$.description_embeddings", 31 | "FLAT", 32 | { 33 | "TYPE": "FLOAT32", 34 | "DIM": 384, 35 | "DISTANCE_METRIC": "COSINE", 36 | }, 37 | as_name="vector", 38 | ), 39 | ) 40 | 41 | index = r.ft("idx:bicycle") 42 | index.create_index( 43 | schema, 44 | definition=IndexDefinition(prefix=["bicycle:"], index_type=IndexType.JSON), 45 | ) 46 | 47 | # load data 48 | with open("data/query_vector.json") as f: 49 | bicycles = json.load(f) 50 | 51 | pipeline = r.pipeline(transaction=False) 52 | for bid, bicycle in enumerate(bicycles): 53 | pipeline.json().set(f'bicycle:{bid}', Path.root_path(), bicycle) 54 | pipeline.execute() 55 | # HIDE_END 56 | 57 | # STEP_START combined1 58 | q = Query("@price:[500 1000] @condition:{new}") 59 | res = index.search(q) 60 | print(res.total) # >>> 1 61 | # REMOVE_START 62 | assert res.total == 1 63 | # REMOVE_END 64 | # STEP_END 65 | 66 | # STEP_START combined2 67 | q = Query("kids @price:[500 1000] @condition:{used}") 68 | res = index.search(q) 69 | print(res.total) # >>> 1 70 | # REMOVE_START 71 | assert res.total == 1 72 | # REMOVE_END 73 | # STEP_END 74 | 75 | # STEP_START combined3 76 | q = Query("(kids | small) @condition:{used}") 77 | res = index.search(q) 78 | print(res.total) # >>> 2 79 | # REMOVE_START 80 | assert res.total == 2 81 | # REMOVE_END 82 | # STEP_END 83 | 84 | # STEP_START combined4 85 | q = Query("@description:(kids | small) @condition:{used}") 86 | res = index.search(q) 87 | print(res.total) # >>> 0 88 | # REMOVE_START 89 | assert res.total == 0 90 | # REMOVE_END 91 | # STEP_END 92 | 93 | # STEP_START combined5 94 | q = Query("@description:(kids | small) @condition:{new | used}") 95 | res = index.search(q) 96 | print(res.total) # >>> 0 97 | # REMOVE_START 98 | assert res.total == 0 99 | # REMOVE_END 100 | # STEP_END 101 | 102 | # STEP_START combined6 103 | q = Query("@price:[500 1000] -@condition:{new}") 104 | res = index.search(q) 105 | print(res.total) # >>> 2 106 | # REMOVE_START 107 | assert res.total == 2 108 | # REMOVE_END 109 | # STEP_END 110 | 111 | # STEP_START combined7 112 | q = Query("(@price:[500 1000] -@condition:{new})=>[KNN 3 @vector $query_vector]").dialect(2) 113 | # put query string here 114 | res = index.search(q,{ 'query_vector': query_vector }) 115 | print(res.total) # >>> 2 116 | # REMOVE_START 117 | assert res.total == 2 118 | # REMOVE_END 119 | # STEP_END 120 | 121 | # REMOVE_START 122 | # destroy index and data 123 | r.ft("idx:bicycle").dropindex(delete_documents=True) 124 | # REMOVE_END 125 | -------------------------------------------------------------------------------- /redis/commands/helpers.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import random 3 | import string 4 | from typing import Any, Iterable, List, Tuple 5 | 6 | import redis 7 | from redis.typing import KeysT, KeyT 8 | 9 | 10 | def list_or_args(keys: KeysT, args: Tuple[KeyT, ...]) -> List[KeyT]: 11 | # returns a single new list combining keys and args 12 | try: 13 | iter(keys) 14 | # a string or bytes instance can be iterated, but indicates 15 | # keys wasn't passed as a list 16 | if isinstance(keys, (bytes, str)): 17 | keys = [keys] 18 | else: 19 | keys = list(keys) 20 | except TypeError: 21 | keys = [keys] 22 | if args: 23 | keys.extend(args) 24 | return keys 25 | 26 | 27 | def nativestr(x): 28 | """Return the decoded binary string, or a string, depending on type.""" 29 | r = x.decode("utf-8", "replace") if isinstance(x, bytes) else x 30 | if r == "null": 31 | return 32 | return r 33 | 34 | 35 | def delist(x): 36 | """Given a list of binaries, return the stringified version.""" 37 | if x is None: 38 | return x 39 | return [nativestr(obj) for obj in x] 40 | 41 | 42 | def parse_to_list(response): 43 | """Optimistically parse the response to a list.""" 44 | res = [] 45 | 46 | special_values = {"infinity", "nan", "-infinity"} 47 | 48 | if response is None: 49 | return res 50 | 51 | for item in response: 52 | if item is None: 53 | res.append(None) 54 | continue 55 | try: 56 | item_str = nativestr(item) 57 | except TypeError: 58 | res.append(None) 59 | continue 60 | 61 | if isinstance(item_str, str) and item_str.lower() in special_values: 62 | res.append(item_str) # Keep as string 63 | else: 64 | try: 65 | res.append(int(item)) 66 | except ValueError: 67 | try: 68 | res.append(float(item)) 69 | except ValueError: 70 | res.append(item_str) 71 | 72 | return res 73 | 74 | 75 | def random_string(length=10): 76 | """ 77 | Returns a random N character long string. 78 | """ 79 | return "".join( # nosec 80 | random.choice(string.ascii_lowercase) for x in range(length) 81 | ) 82 | 83 | 84 | def decode_dict_keys(obj): 85 | """Decode the keys of the given dictionary with utf-8.""" 86 | newobj = copy.copy(obj) 87 | for k in obj.keys(): 88 | if isinstance(k, bytes): 89 | newobj[k.decode("utf-8")] = newobj[k] 90 | newobj.pop(k) 91 | return newobj 92 | 93 | 94 | def get_protocol_version(client): 95 | if isinstance(client, redis.Redis) or isinstance(client, redis.asyncio.Redis): 96 | return client.connection_pool.connection_kwargs.get("protocol") 97 | elif isinstance(client, redis.cluster.AbstractRedisCluster): 98 | return client.nodes_manager.connection_kwargs.get("protocol") 99 | 100 | 101 | def at_most_one_value_set(iterable: Iterable[Any]): 102 | """ 103 | Checks that at most one of the values in the iterable is truthy. 104 | 105 | Args: 106 | iterable: An iterable of values to check. 107 | 108 | Returns: 109 | True if at most one value is truthy, False otherwise. 110 | 111 | Raises: 112 | Might raise an error if the values in iterable are not boolean-compatible. 113 | For example if the type of the values implement 114 | __len__ or __bool__ methods and they raise an error. 115 | """ 116 | values = (bool(x) for x in iterable) 117 | return sum(values) <= 1 118 | -------------------------------------------------------------------------------- /benchmarks/command_packer_benchmark.py: -------------------------------------------------------------------------------- 1 | from base import Benchmark 2 | 3 | from redis.connection import SYM_CRLF, SYM_DOLLAR, SYM_EMPTY, SYM_STAR, Connection 4 | 5 | 6 | class StringJoiningConnection(Connection): 7 | def send_packed_command(self, command, check_health=True): 8 | "Send an already packed command to the Redis server" 9 | if not self._sock: 10 | self.connect() 11 | try: 12 | self._sock.sendall(command) 13 | except OSError as e: 14 | self.disconnect() 15 | if len(e.args) == 1: 16 | _errno, errmsg = "UNKNOWN", e.args[0] 17 | else: 18 | _errno, errmsg = e.args 19 | raise ConnectionError(f"Error {_errno} while writing to socket. {errmsg}.") 20 | except Exception: 21 | self.disconnect() 22 | raise 23 | 24 | def pack_command(self, *args): 25 | "Pack a series of arguments into a value Redis command" 26 | args_output = SYM_EMPTY.join( 27 | [ 28 | SYM_EMPTY.join( 29 | (SYM_DOLLAR, str(len(k)).encode(), SYM_CRLF, k, SYM_CRLF) 30 | ) 31 | for k in map(self.encoder.encode, args) 32 | ] 33 | ) 34 | output = SYM_EMPTY.join( 35 | (SYM_STAR, str(len(args)).encode(), SYM_CRLF, args_output) 36 | ) 37 | return output 38 | 39 | 40 | class ListJoiningConnection(Connection): 41 | def send_packed_command(self, command, check_health=True): 42 | if not self._sock: 43 | self.connect() 44 | try: 45 | if isinstance(command, str): 46 | command = [command] 47 | for item in command: 48 | self._sock.sendall(item) 49 | except OSError as e: 50 | self.disconnect() 51 | if len(e.args) == 1: 52 | _errno, errmsg = "UNKNOWN", e.args[0] 53 | else: 54 | _errno, errmsg = e.args 55 | raise ConnectionError(f"Error {_errno} while writing to socket. {errmsg}.") 56 | except Exception: 57 | self.disconnect() 58 | raise 59 | 60 | def pack_command(self, *args): 61 | output = [] 62 | buff = SYM_EMPTY.join((SYM_STAR, str(len(args)).encode(), SYM_CRLF)) 63 | 64 | for k in map(self.encoder.encode, args): 65 | if len(buff) > 6000 or len(k) > 6000: 66 | buff = SYM_EMPTY.join( 67 | (buff, SYM_DOLLAR, str(len(k)).encode(), SYM_CRLF) 68 | ) 69 | output.append(buff) 70 | output.append(k) 71 | buff = SYM_CRLF 72 | else: 73 | buff = SYM_EMPTY.join( 74 | (buff, SYM_DOLLAR, str(len(k)).encode(), SYM_CRLF, k, SYM_CRLF) 75 | ) 76 | output.append(buff) 77 | return output 78 | 79 | 80 | class CommandPackerBenchmark(Benchmark): 81 | ARGUMENTS = ( 82 | { 83 | "name": "connection_class", 84 | "values": [StringJoiningConnection, ListJoiningConnection], 85 | }, 86 | { 87 | "name": "value_size", 88 | "values": [10, 100, 1000, 10000, 100000, 1000000, 10000000, 100000000], 89 | }, 90 | ) 91 | 92 | def setup(self, connection_class, value_size): 93 | self.get_client(connection_class=connection_class) 94 | 95 | def run(self, connection_class, value_size): 96 | r = self.get_client() 97 | x = "a" * value_size 98 | r.set("benchmark", x) 99 | 100 | 101 | if __name__ == "__main__": 102 | CommandPackerBenchmark().run_benchmark() 103 | -------------------------------------------------------------------------------- /tests/test_encoding.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import redis 3 | 4 | from .conftest import _get_client 5 | 6 | 7 | class TestEncoding: 8 | @pytest.fixture() 9 | def r(self, request): 10 | return _get_client(redis.Redis, request=request, decode_responses=True) 11 | 12 | @pytest.fixture() 13 | def r_no_decode(self, request): 14 | return _get_client(redis.Redis, request=request, decode_responses=False) 15 | 16 | def test_simple_encoding(self, r_no_decode): 17 | unicode_string = chr(3456) + "abcd" + chr(3421) 18 | r_no_decode["unicode-string"] = unicode_string.encode("utf-8") 19 | cached_val = r_no_decode["unicode-string"] 20 | assert isinstance(cached_val, bytes) 21 | assert unicode_string == cached_val.decode("utf-8") 22 | 23 | def test_simple_encoding_and_decoding(self, r): 24 | unicode_string = chr(3456) + "abcd" + chr(3421) 25 | r["unicode-string"] = unicode_string 26 | cached_val = r["unicode-string"] 27 | assert isinstance(cached_val, str) 28 | assert unicode_string == cached_val 29 | 30 | def test_memoryview_encoding(self, r_no_decode): 31 | unicode_string = chr(3456) + "abcd" + chr(3421) 32 | unicode_string_view = memoryview(unicode_string.encode("utf-8")) 33 | r_no_decode["unicode-string-memoryview"] = unicode_string_view 34 | cached_val = r_no_decode["unicode-string-memoryview"] 35 | # The cached value won't be a memoryview because it's a copy from Redis 36 | assert isinstance(cached_val, bytes) 37 | assert unicode_string == cached_val.decode("utf-8") 38 | 39 | def test_memoryview_encoding_and_decoding(self, r): 40 | unicode_string = chr(3456) + "abcd" + chr(3421) 41 | unicode_string_view = memoryview(unicode_string.encode("utf-8")) 42 | r["unicode-string-memoryview"] = unicode_string_view 43 | cached_val = r["unicode-string-memoryview"] 44 | assert isinstance(cached_val, str) 45 | assert unicode_string == cached_val 46 | 47 | def test_list_encoding(self, r): 48 | unicode_string = chr(3456) + "abcd" + chr(3421) 49 | result = [unicode_string, unicode_string, unicode_string] 50 | r.rpush("a", *result) 51 | assert r.lrange("a", 0, -1) == result 52 | 53 | 54 | class TestEncodingErrors: 55 | def test_ignore(self, request): 56 | r = _get_client( 57 | redis.Redis, 58 | request=request, 59 | decode_responses=True, 60 | encoding_errors="ignore", 61 | ) 62 | r.set("a", b"foo\xff") 63 | assert r.get("a") == "foo" 64 | 65 | def test_replace(self, request): 66 | r = _get_client( 67 | redis.Redis, 68 | request=request, 69 | decode_responses=True, 70 | encoding_errors="replace", 71 | ) 72 | r.set("a", b"foo\xff") 73 | assert r.get("a") == "foo\ufffd" 74 | 75 | 76 | class TestCommandsAreNotEncoded: 77 | @pytest.fixture() 78 | def r(self, request): 79 | return _get_client(redis.Redis, request=request, encoding="utf-8") 80 | 81 | def test_basic_command(self, r): 82 | r.set("hello", "world") 83 | 84 | 85 | class TestInvalidUserInput: 86 | def test_boolean_fails(self, r): 87 | with pytest.raises(redis.DataError): 88 | r.set("a", True) 89 | 90 | def test_none_fails(self, r): 91 | with pytest.raises(redis.DataError): 92 | r.set("a", None) 93 | 94 | def test_user_type_fails(self, r): 95 | class Foo: 96 | def __str__(self): 97 | return "Foo" 98 | 99 | with pytest.raises(redis.DataError): 100 | r.set("a", Foo()) 101 | -------------------------------------------------------------------------------- /docs/redismodules.rst: -------------------------------------------------------------------------------- 1 | Redis Modules Commands 2 | ###################### 3 | 4 | Accessing redis module commands requires the installation of the supported `Redis module `_. For a quick start with redis modules, try the `Redismod docker `_. 5 | 6 | 7 | RedisBloom Commands 8 | ******************* 9 | 10 | These are the commands for interacting with the `RedisBloom module `_. Below is a brief example, as well as documentation on the commands themselves. 11 | 12 | **Create and add to a bloom filter** 13 | 14 | .. code-block:: python 15 | 16 | import redis 17 | r = redis.Redis() 18 | r.bf().create("bloom", 0.01, 1000) 19 | r.bf().add("bloom", "foo") 20 | 21 | **Create and add to a cuckoo filter** 22 | 23 | .. code-block:: python 24 | 25 | import redis 26 | r = redis.Redis() 27 | r.cf().create("cuckoo", 1000) 28 | r.cf().add("cuckoo", "filter") 29 | 30 | **Create Count-Min Sketch and get information** 31 | 32 | .. code-block:: python 33 | 34 | import redis 35 | r = redis.Redis() 36 | r.cms().initbydim("dim", 1000, 5) 37 | r.cms().incrby("dim", ["foo"], [5]) 38 | r.cms().info("dim") 39 | 40 | **Create a topk list, and access the results** 41 | 42 | .. code-block:: python 43 | 44 | import redis 45 | r = redis.Redis() 46 | r.topk().reserve("mytopk", 3, 50, 4, 0.9) 47 | r.topk().info("mytopk") 48 | 49 | .. automodule:: redis.commands.bf.commands 50 | :members: BFCommands, CFCommands, CMSCommands, TOPKCommands 51 | 52 | ------ 53 | 54 | RedisJSON Commands 55 | ****************** 56 | 57 | These are the commands for interacting with the `RedisJSON module `_. Below is a brief example, as well as documentation on the commands themselves. 58 | 59 | **Create a json object** 60 | 61 | .. code-block:: python 62 | 63 | import redis 64 | r = redis.Redis() 65 | r.json().set("mykey", ".", {"hello": "world", "i am": ["a", "json", "object!"]}) 66 | 67 | Examples of how to combine search and json can be found `here `_. 68 | 69 | .. automodule:: redis.commands.json.commands 70 | :members: JSONCommands 71 | 72 | ----- 73 | 74 | RediSearch Commands 75 | ******************* 76 | 77 | These are the commands for interacting with the `RediSearch module `_. Below is a brief example, as well as documentation on the commands themselves. In the example 78 | below, an index named *my_index* is being created. When an index name is not specified, an index named *idx* is created. 79 | 80 | **Create a search index, and display its information** 81 | 82 | .. code-block:: python 83 | 84 | import redis 85 | from redis.commands.search.field import TextField 86 | 87 | r = redis.Redis() 88 | index_name = "my_index" 89 | schema = ( 90 | TextField("play", weight=5.0), 91 | TextField("ball"), 92 | ) 93 | r.ft(index_name).create_index(schema) 94 | print(r.ft(index_name).info()) 95 | 96 | 97 | .. automodule:: redis.commands.search.commands 98 | :members: SearchCommands 99 | 100 | ----- 101 | 102 | RedisTimeSeries Commands 103 | ************************ 104 | 105 | These are the commands for interacting with the `RedisTimeSeries module `_. Below is a brief example, as well as documentation on the commands themselves. 106 | 107 | 108 | **Create a timeseries object with 5 second retention** 109 | 110 | .. code-block:: python 111 | 112 | import redis 113 | r = redis.Redis() 114 | r.ts().create(2, retention_msecs=5000) 115 | 116 | .. automodule:: redis.commands.timeseries.commands 117 | :members: TimeSeriesCommands 118 | 119 | 120 | -------------------------------------------------------------------------------- /docs/_static/logo-redis.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /doctests/query_agg.py: -------------------------------------------------------------------------------- 1 | # EXAMPLE: query_agg 2 | # HIDE_START 3 | import json 4 | import redis 5 | from redis.commands.json.path import Path 6 | from redis.commands.search import Search 7 | from redis.commands.search.aggregation import AggregateRequest 8 | from redis.commands.search.field import NumericField, TagField 9 | from redis.commands.search.index_definition import IndexDefinition, IndexType 10 | import redis.commands.search.reducers as reducers 11 | 12 | r = redis.Redis(decode_responses=True) 13 | 14 | # create index 15 | schema = ( 16 | TagField("$.condition", as_name="condition"), 17 | NumericField("$.price", as_name="price"), 18 | ) 19 | 20 | index = r.ft("idx:bicycle") 21 | index.create_index( 22 | schema, 23 | definition=IndexDefinition(prefix=["bicycle:"], index_type=IndexType.JSON), 24 | ) 25 | 26 | # load data 27 | with open("data/query_em.json") as f: 28 | bicycles = json.load(f) 29 | 30 | pipeline = r.pipeline(transaction=False) 31 | for bid, bicycle in enumerate(bicycles): 32 | pipeline.json().set(f'bicycle:{bid}', Path.root_path(), bicycle) 33 | pipeline.execute() 34 | # HIDE_END 35 | 36 | # STEP_START agg1 37 | search = Search(r, index_name="idx:bicycle") 38 | aggregate_request = AggregateRequest(query='@condition:{new}') \ 39 | .load('__key', 'price') \ 40 | .apply(discounted='@price - (@price * 0.1)') 41 | res = search.aggregate(aggregate_request) 42 | print(len(res.rows)) # >>> 5 43 | print(res.rows) # >>> [['__key', 'bicycle:0', ... 44 | #[['__key', 'bicycle:0', 'price', '270', 'discounted', '243'], 45 | # ['__key', 'bicycle:5', 'price', '810', 'discounted', '729'], 46 | # ['__key', 'bicycle:6', 'price', '2300', 'discounted', '2070'], 47 | # ['__key', 'bicycle:7', 'price', '430', 'discounted', '387'], 48 | # ['__key', 'bicycle:8', 'price', '1200', 'discounted', '1080']] 49 | # REMOVE_START 50 | assert len(res.rows) == 5 51 | # REMOVE_END 52 | # STEP_END 53 | 54 | # STEP_START agg2 55 | search = Search(r, index_name="idx:bicycle") 56 | aggregate_request = AggregateRequest(query='*') \ 57 | .load('price') \ 58 | .apply(price_category='@price<1000') \ 59 | .group_by('@condition', reducers.sum('@price_category').alias('num_affordable')) 60 | res = search.aggregate(aggregate_request) 61 | print(len(res.rows)) # >>> 3 62 | print(res.rows) # >>> 63 | #[['condition', 'refurbished', 'num_affordable', '1'], 64 | # ['condition', 'used', 'num_affordable', '1'], 65 | # ['condition', 'new', 'num_affordable', '3']] 66 | # REMOVE_START 67 | assert len(res.rows) == 3 68 | # REMOVE_END 69 | # STEP_END 70 | 71 | # STEP_START agg3 72 | search = Search(r, index_name="idx:bicycle") 73 | aggregate_request = AggregateRequest(query='*') \ 74 | .apply(type="'bicycle'") \ 75 | .group_by('@type', reducers.count().alias('num_total')) 76 | res = search.aggregate(aggregate_request) 77 | print(len(res.rows)) # >>> 1 78 | print(res.rows) # >>> [['type', 'bicycle', 'num_total', '10']] 79 | # REMOVE_START 80 | assert len(res.rows) == 1 81 | # REMOVE_END 82 | # STEP_END 83 | 84 | # STEP_START agg4 85 | search = Search(r, index_name="idx:bicycle") 86 | aggregate_request = AggregateRequest(query='*') \ 87 | .load('__key') \ 88 | .group_by('@condition', reducers.tolist('__key').alias('bicycles')) 89 | res = search.aggregate(aggregate_request) 90 | print(len(res.rows)) # >>> 3 91 | print(res.rows) # >>> 92 | #[['condition', 'refurbished', 'bicycles', ['bicycle:9']], 93 | # ['condition', 'used', 'bicycles', ['bicycle:1', 'bicycle:2', 'bicycle:3', 'bicycle:4']], 94 | # ['condition', 'new', 'bicycles', ['bicycle:5', 'bicycle:6', 'bicycle:7', 'bicycle:0', 'bicycle:8']]] 95 | # REMOVE_START 96 | assert len(res.rows) == 3 97 | # REMOVE_END 98 | # STEP_END 99 | 100 | # REMOVE_START 101 | # destroy index and data 102 | r.ft("idx:bicycle").dropindex(delete_documents=True) 103 | # REMOVE_END 104 | --------------------------------------------------------------------------------