├── .coveragerc
├── .dockerignore
├── .github
├── CODEOWNERS
├── ISSUE_TEMPLATE.md
├── PULL_REQUEST_TEMPLATE.md
├── actions
│ └── run-tests
│ │ └── action.yml
├── dependabot.yml
├── release-drafter-config.yml
├── spellcheck-settings.yml
├── wordlist.txt
└── workflows
│ ├── codeql-analysis.yml
│ ├── docs.yaml
│ ├── hiredis-py-integration.yaml
│ ├── install_and_test.sh
│ ├── integration.yaml
│ ├── pypi-publish.yaml
│ ├── release-drafter.yml
│ ├── spellcheck.yml
│ └── stale-issues.yml
├── .gitignore
├── .mypy.ini
├── .readthedocs.yml
├── CHANGES
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── benchmarks
├── __init__.py
├── base.py
├── basic_operations.py
├── cluster_async.py
├── cluster_async_pipeline.py
├── command_packer_benchmark.py
└── socket_read_size.py
├── codecov.yml
├── dev_requirements.txt
├── docker-compose.yml
├── dockers
└── sentinel.conf
├── docs
├── Makefile
├── _static
│ ├── .keep
│ └── logo-redis.svg
├── _templates
│ └── .keep
├── advanced_features.rst
├── backoff.rst
├── clustering.rst
├── commands.rst
├── conf.py
├── connections.rst
├── examples.rst
├── examples
│ ├── README.md
│ ├── asyncio_examples.ipynb
│ ├── connection_examples.ipynb
│ ├── opentelemetry
│ │ ├── README.md
│ │ ├── config
│ │ │ ├── alertmanager.yml
│ │ │ ├── otel-collector.yaml
│ │ │ └── vector.toml
│ │ ├── docker-compose.yml
│ │ ├── image
│ │ │ └── redis-py-trace.png
│ │ ├── main.py
│ │ ├── requirements.txt
│ │ └── uptrace.yml
│ ├── opentelemetry_api_examples.ipynb
│ ├── pipeline_examples.ipynb
│ ├── redis-stream-example.ipynb
│ ├── search_json_examples.ipynb
│ ├── search_vector_similarity_examples.ipynb
│ ├── set_and_get_examples.ipynb
│ ├── ssl_connection_examples.ipynb
│ └── timeseries_examples.ipynb
├── exceptions.rst
├── genindex.rst
├── images
│ └── opentelemetry
│ │ ├── distributed-tracing.png
│ │ ├── redis-metrics.png
│ │ ├── redis-py-trace.png
│ │ └── tree-of-spans.png
├── index.rst
├── lock.rst
├── lua_scripting.rst
├── opentelemetry.rst
├── redismodules.rst
├── requirements.txt
├── resp3_features.rst
└── retry.rst
├── doctests
├── README.md
├── cmds_cnxmgmt.py
├── cmds_generic.py
├── cmds_hash.py
├── cmds_list.py
├── cmds_servermgmt.py
├── cmds_set.py
├── cmds_sorted_set.py
├── cmds_string.py
├── data
│ ├── query_em.json
│ └── query_vector.json
├── dt_bitfield.py
├── dt_bitmap.py
├── dt_bloom.py
├── dt_cms.py
├── dt_cuckoo.py
├── dt_geo.py
├── dt_hash.py
├── dt_hll.py
├── dt_json.py
├── dt_list.py
├── dt_set.py
├── dt_ss.py
├── dt_stream.py
├── dt_string.py
├── dt_tdigest.py
├── dt_topk.py
├── dt_vec_set.py
├── geo_index.py
├── home_json.py
├── query_agg.py
├── query_combined.py
├── query_em.py
├── query_ft.py
├── query_geo.py
├── query_range.py
├── requirements.txt
├── run_examples.sh
├── search_quickstart.py
├── search_vss.py
├── string_set_get.py
└── trans_pipe.py
├── pyproject.toml
├── redis
├── __init__.py
├── _parsers
│ ├── __init__.py
│ ├── base.py
│ ├── commands.py
│ ├── encoders.py
│ ├── helpers.py
│ ├── hiredis.py
│ ├── resp2.py
│ ├── resp3.py
│ └── socket.py
├── asyncio
│ ├── __init__.py
│ ├── client.py
│ ├── cluster.py
│ ├── connection.py
│ ├── lock.py
│ ├── retry.py
│ ├── sentinel.py
│ └── utils.py
├── auth
│ ├── __init__.py
│ ├── err.py
│ ├── idp.py
│ ├── token.py
│ └── token_manager.py
├── backoff.py
├── cache.py
├── client.py
├── cluster.py
├── commands
│ ├── __init__.py
│ ├── bf
│ │ ├── __init__.py
│ │ ├── commands.py
│ │ └── info.py
│ ├── cluster.py
│ ├── core.py
│ ├── helpers.py
│ ├── json
│ │ ├── __init__.py
│ │ ├── _util.py
│ │ ├── commands.py
│ │ ├── decoders.py
│ │ └── path.py
│ ├── redismodules.py
│ ├── search
│ │ ├── __init__.py
│ │ ├── _util.py
│ │ ├── aggregation.py
│ │ ├── commands.py
│ │ ├── dialect.py
│ │ ├── document.py
│ │ ├── field.py
│ │ ├── index_definition.py
│ │ ├── profile_information.py
│ │ ├── query.py
│ │ ├── querystring.py
│ │ ├── reducers.py
│ │ ├── result.py
│ │ └── suggestion.py
│ ├── sentinel.py
│ ├── timeseries
│ │ ├── __init__.py
│ │ ├── commands.py
│ │ ├── info.py
│ │ └── utils.py
│ └── vectorset
│ │ ├── __init__.py
│ │ ├── commands.py
│ │ └── utils.py
├── connection.py
├── crc.py
├── credentials.py
├── event.py
├── exceptions.py
├── lock.py
├── ocsp.py
├── py.typed
├── retry.py
├── sentinel.py
├── typing.py
└── utils.py
├── tasks.py
├── tests
├── __init__.py
├── conftest.py
├── entraid_utils.py
├── mocks.py
├── ssl_utils.py
├── test_asyncio
│ ├── __init__.py
│ ├── compat.py
│ ├── conftest.py
│ ├── mocks.py
│ ├── test_bloom.py
│ ├── test_cluster.py
│ ├── test_cluster_transaction.py
│ ├── test_commands.py
│ ├── test_connect.py
│ ├── test_connection.py
│ ├── test_connection_pool.py
│ ├── test_credentials.py
│ ├── test_cwe_404.py
│ ├── test_encoding.py
│ ├── test_hash.py
│ ├── test_json.py
│ ├── test_lock.py
│ ├── test_monitor.py
│ ├── test_pipeline.py
│ ├── test_pubsub.py
│ ├── test_retry.py
│ ├── test_scripting.py
│ ├── test_search.py
│ ├── test_sentinel.py
│ ├── test_sentinel_managed_connection.py
│ ├── test_ssl.py
│ ├── test_timeseries.py
│ ├── test_utils.py
│ ├── test_vsets.py
│ └── testdata
│ │ ├── jsontestdata.py
│ │ ├── titles.csv
│ │ └── will_play_text.csv.bz2
├── test_auth
│ ├── __init__.py
│ ├── test_token.py
│ └── test_token_manager.py
├── test_backoff.py
├── test_bloom.py
├── test_cache.py
├── test_cluster.py
├── test_cluster_transaction.py
├── test_command_parser.py
├── test_commands.py
├── test_connect.py
├── test_connection.py
├── test_connection_pool.py
├── test_credentials.py
├── test_encoding.py
├── test_function.py
├── test_hash.py
├── test_helpers.py
├── test_json.py
├── test_lock.py
├── test_monitor.py
├── test_multiprocessing.py
├── test_parsers
│ └── test_helpers.py
├── test_pipeline.py
├── test_pubsub.py
├── test_retry.py
├── test_scripting.py
├── test_search.py
├── test_sentinel.py
├── test_ssl.py
├── test_timeseries.py
├── test_utils.py
├── test_vsets.py
└── testdata
│ ├── jsontestdata.py
│ ├── titles.csv
│ └── will_play_text.csv.bz2
├── util
└── wait-for-it.sh
└── whitelist.py
/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | source = redis
3 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | **/__pycache__
2 | **/*.pyc
3 | .coverage
4 | .coverage.*
5 |
--------------------------------------------------------------------------------
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | doctests/* @dmaier-redislabs
2 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | Thanks for wanting to report an issue you've found in redis-py. Please delete this text and fill in the template below.
2 | It is of course not always possible to reduce your code to a small test case, but it's highly appreciated to have as much data as possible. Thank you!
3 |
4 | **Version**: What redis-py and what redis version is the issue happening on?
5 |
6 | **Platform**: What platform / version? (For example Python 3.5.1 on Windows 7 / Ubuntu 15.10 / Azure)
7 |
8 | **Description**: Description of your issue, stack traces from errors and code that reproduces the issue
9 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | ### Pull Request check-list
2 |
3 | _Please make sure to review and check all of these items:_
4 |
5 | - [ ] Do tests and lints pass with this change?
6 | - [ ] Do the CI tests pass with this change (enable it first in your forked repo and wait for the github action build to finish)?
7 | - [ ] Is the new or changed code fully tested?
8 | - [ ] Is a documentation update included (if this change modifies existing APIs, or introduces new ones)?
9 | - [ ] Is there an example added to the examples folder (if applicable)?
10 |
11 | _NOTE: these things are not required to open a PR and can be done
12 | afterwards / while the PR is open._
13 |
14 | ### Description of change
15 |
16 | _Please provide a description of the change here._
17 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: "github-actions"
4 | directory: "/"
5 | labels:
6 | - "maintenance"
7 | schedule:
8 | interval: "monthly"
9 |
--------------------------------------------------------------------------------
/.github/release-drafter-config.yml:
--------------------------------------------------------------------------------
1 | name-template: '$NEXT_MINOR_VERSION'
2 | tag-template: 'v$NEXT_MINOR_VERSION'
3 | filter-by-commitish: true
4 | commitish: master
5 | autolabeler:
6 | - label: 'maintenance'
7 | files:
8 | - '*.md'
9 | - '.github/*'
10 | - label: 'bug'
11 | branch:
12 | - '/bug-.+'
13 | - label: 'maintenance'
14 | branch:
15 | - '/maintenance-.+'
16 | - label: 'feature'
17 | branch:
18 | - '/feature-.+'
19 | categories:
20 | - title: '🔥 Breaking Changes'
21 | labels:
22 | - 'breakingchange'
23 | - title: '🧪 Experimental Features'
24 | labels:
25 | - 'experimental'
26 | - title: '🚀 New Features'
27 | labels:
28 | - 'feature'
29 | - 'enhancement'
30 | - title: '🐛 Bug Fixes'
31 | labels:
32 | - 'fix'
33 | - 'bugfix'
34 | - 'bug'
35 | - 'BUG'
36 | - title: '🧰 Maintenance'
37 | labels:
38 | - 'maintenance'
39 | - 'dependencies'
40 | - 'documentation'
41 | - 'docs'
42 | - 'testing'
43 | change-template: '- $TITLE (#$NUMBER)'
44 | exclude-labels:
45 | - 'skip-changelog'
46 | template: |
47 | # Changes
48 |
49 | $CHANGES
50 |
51 | ## Contributors
52 | We'd like to thank all the contributors who worked on this release!
53 |
54 | $CONTRIBUTORS
55 |
56 |
--------------------------------------------------------------------------------
/.github/spellcheck-settings.yml:
--------------------------------------------------------------------------------
1 | matrix:
2 | - name: Markdown
3 | expect_match: false
4 | apsell:
5 | lang: en
6 | d: en_US
7 | ignore-case: true
8 | dictionary:
9 | wordlists:
10 | - .github/wordlist.txt
11 | output: wordlist.dic
12 | pipeline:
13 | - pyspelling.filters.markdown:
14 | markdown_extensions:
15 | - markdown.extensions.extra:
16 | - pyspelling.filters.html:
17 | comments: false
18 | attributes:
19 | - alt
20 | ignores:
21 | - ':matches(code, pre)'
22 | - code
23 | - pre
24 | - blockquote
25 | - img
26 | sources:
27 | - '*.md'
28 | - 'docs/*.rst'
29 | - 'docs/*.ipynb'
30 |
--------------------------------------------------------------------------------
/.github/wordlist.txt:
--------------------------------------------------------------------------------
1 | APM
2 | ARGV
3 | BFCommands
4 | CacheImpl
5 | CAS
6 | CFCommands
7 | CMSCommands
8 | ClusterNode
9 | ClusterNodes
10 | ClusterPipeline
11 | ClusterPubSub
12 | ConnectionPool
13 | CoreCommands
14 | EVAL
15 | EVALSHA
16 | Grokzen's
17 | INCR
18 | IOError
19 | Instrumentations
20 | JSONCommands
21 | Jaeger
22 | Ludovico
23 | Magnocavallo
24 | McCurdy
25 | NOSCRIPT
26 | NUMPAT
27 | NUMPT
28 | NUMSUB
29 | OSS
30 | OpenCensus
31 | OpenTelemetry
32 | OpenTracing
33 | Otel
34 | PubSub
35 | READONLY
36 | RediSearch
37 | RedisBloom
38 | RedisCluster
39 | RedisClusterCommands
40 | RedisClusterException
41 | RedisClusters
42 | RedisInstrumentor
43 | RedisJSON
44 | RedisTimeSeries
45 | SHA
46 | SearchCommands
47 | SentinelCommands
48 | SentinelConnectionPool
49 | Sharded
50 | Solovyov
51 | SpanKind
52 | Specfiying
53 | StatusCode
54 | TCP
55 | TOPKCommands
56 | TimeSeriesCommands
57 | Uptrace
58 | ValueError
59 | WATCHed
60 | WatchError
61 | api
62 | args
63 | async
64 | asyncio
65 | autoclass
66 | automodule
67 | backoff
68 | bdb
69 | behaviour
70 | bool
71 | boolean
72 | booleans
73 | bysource
74 | charset
75 | del
76 | dev
77 | docstring
78 | docstrings
79 | eg
80 | exc
81 | firsttimersonly
82 | fo
83 | genindex
84 | gmail
85 | hiredis
86 | http
87 | idx
88 | iff
89 | ini
90 | json
91 | keyslot
92 | keyspace
93 | kwarg
94 | linters
95 | localhost
96 | lua
97 | makeapullrequest
98 | maxdepth
99 | mget
100 | microservice
101 | microservices
102 | mset
103 | multikey
104 | mykey
105 | nonatomic
106 | observability
107 | opentelemetry
108 | oss
109 | performant
110 | pmessage
111 | png
112 | pre
113 | psubscribe
114 | pubsub
115 | punsubscribe
116 | py
117 | pypi
118 | quickstart
119 | readonly
120 | readwrite
121 | redis
122 | redismodules
123 | reinitialization
124 | replicaof
125 | repo
126 | runtime
127 | sedrik
128 | sharded
129 | ssl
130 | str
131 | stunnel
132 | subcommands
133 | thevalueofmykey
134 | timeseries
135 | toctree
136 | topk
137 | triaging
138 | txt
139 | un
140 | unicode
141 | url
142 | virtualenv
143 | www
144 | yaml
145 |
--------------------------------------------------------------------------------
/.github/workflows/codeql-analysis.yml:
--------------------------------------------------------------------------------
1 | # For most projects, this workflow file will not need changing; you simply need
2 | # to commit it to your repository.
3 | #
4 | # You may wish to alter this file to override the set of languages analyzed,
5 | # or to provide custom queries or build logic.
6 | #
7 | # ******** NOTE ********
8 | # We have attempted to detect the languages in your repository. Please check
9 | # the `language` matrix defined below to confirm you have the correct set of
10 | # supported CodeQL languages.
11 | #
12 | name: "CodeQL"
13 |
14 | on:
15 | push:
16 | branches: [ master ]
17 | pull_request:
18 | # The branches below must be a subset of the branches above
19 | branches: [ master ]
20 |
21 | jobs:
22 | analyze:
23 | name: Analyze
24 | runs-on: ubuntu-latest
25 | permissions:
26 | actions: read
27 | contents: read
28 | security-events: write
29 |
30 | strategy:
31 | fail-fast: false
32 | matrix:
33 | language: [ 'python' ]
34 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
35 | # Learn more about CodeQL language support at https://git.io/codeql-language-support
36 |
37 | steps:
38 | - name: Checkout repository
39 | uses: actions/checkout@v4
40 |
41 | # Initializes the CodeQL tools for scanning.
42 | - name: Initialize CodeQL
43 | uses: github/codeql-action/init@v3
44 | with:
45 | languages: ${{ matrix.language }}
46 | # If you wish to specify custom queries, you can do so here or in a config file.
47 | # By default, queries listed here will override any specified in a config file.
48 | # Prefix the list here with "+" to use these queries and those in the config file.
49 | # queries: ./path/to/local/query, your-org/your-repo/queries@main
50 |
51 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
52 | # If this step fails, then you should remove it and run the build manually (see below)
53 | - name: Autobuild
54 | uses: github/codeql-action/autobuild@v3
55 |
56 | # ℹ️ Command-line programs to run using the OS shell.
57 | # 📚 https://git.io/JvXDl
58 |
59 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
60 | # and modify them (or add more) to build your code if your project
61 | # uses a compiled language
62 |
63 | #- run: |
64 | # make bootstrap
65 | # make release
66 |
67 | - name: Perform CodeQL Analysis
68 | uses: github/codeql-action/analyze@v3
69 |
--------------------------------------------------------------------------------
/.github/workflows/docs.yaml:
--------------------------------------------------------------------------------
1 | name: Docs CI
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 | - '[0-9].[0-9]'
8 | pull_request:
9 | branches:
10 | - master
11 | - '[0-9].[0-9]'
12 | schedule:
13 | - cron: '0 1 * * *' # nightly build
14 |
15 | concurrency:
16 | group: ${{ github.event.pull_request.number || github.ref }}-docs
17 | cancel-in-progress: true
18 |
19 | permissions:
20 | contents: read # to fetch code (actions/checkout)
21 |
22 | jobs:
23 |
24 | build-docs:
25 | name: Build docs
26 | runs-on: ubuntu-latest
27 | steps:
28 | - uses: actions/checkout@v4
29 | - uses: actions/setup-python@v5
30 | with:
31 | python-version: 3.9
32 | cache: 'pip'
33 | - name: install deps
34 | run: |
35 | sudo apt-get update -yqq
36 | sudo apt-get install -yqq pandoc make
37 | - name: run code linters
38 | run: |
39 | pip install -r dev_requirements.txt -r docs/requirements.txt
40 | invoke build-docs
41 |
42 | - name: upload docs
43 | uses: actions/upload-artifact@v4
44 | with:
45 | name: redis-py-docs
46 | path: |
47 | docs/_build/html
48 |
--------------------------------------------------------------------------------
/.github/workflows/hiredis-py-integration.yaml:
--------------------------------------------------------------------------------
1 | name: Hiredis-py integration tests
2 |
3 | on:
4 | workflow_dispatch:
5 | inputs:
6 | redis-py-branch:
7 | description: 'redis-py branch to run tests on'
8 | required: true
9 | default: 'master'
10 | hiredis-branch:
11 | description: 'hiredis-py branch to run tests on'
12 | required: true
13 | default: 'master'
14 |
15 | concurrency:
16 | group: ${{ github.event.pull_request.number || github.ref }}-hiredis-integration
17 | cancel-in-progress: true
18 |
19 | permissions:
20 | contents: read # to fetch code (actions/checkout)
21 |
22 | env:
23 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
24 | # this speeds up coverage with Python 3.12: https://github.com/nedbat/coveragepy/issues/1665
25 | COVERAGE_CORE: sysmon
26 | CURRENT_CLIENT_LIBS_TEST_STACK_IMAGE_TAG: 'rs-7.4.0-v2'
27 | CURRENT_REDIS_VERSION: '7.4.2'
28 |
29 | jobs:
30 | redis_version:
31 | runs-on: ubuntu-latest
32 | outputs:
33 | CURRENT: ${{ env.CURRENT_REDIS_VERSION }}
34 | steps:
35 | - name: Compute outputs
36 | run: |
37 | echo "CURRENT=${{ env.CURRENT_REDIS_VERSION }}" >> $GITHUB_OUTPUT
38 |
39 | hiredis-tests:
40 | runs-on: ubuntu-latest
41 | needs: [redis_version]
42 | timeout-minutes: 60
43 | strategy:
44 | max-parallel: 15
45 | fail-fast: false
46 | matrix:
47 | redis-version: [ '${{ needs.redis_version.outputs.CURRENT }}' ]
48 | python-version: [ '3.9', '3.13']
49 | parser-backend: [ 'hiredis' ]
50 | hiredis-version: [ 'unstable' ]
51 | event-loop: [ 'asyncio' ]
52 | env:
53 | ACTIONS_ALLOW_UNSECURE_COMMANDS: true
54 | name: Redis ${{ matrix.redis-version }}; Python ${{ matrix.python-version }}; RESP Parser:${{matrix.parser-backend}} (${{ matrix.hiredis-version }}); EL:${{matrix.event-loop}}
55 | steps:
56 | - uses: actions/checkout@v4
57 | with:
58 | ref: ${{ inputs.redis-py-branch }}
59 | - name: Run tests
60 | uses: ./.github/actions/run-tests
61 | with:
62 | python-version: ${{ matrix.python-version }}
63 | parser-backend: ${{ matrix.parser-backend }}
64 | redis-version: ${{ matrix.redis-version }}
65 | hiredis-version: ${{ matrix.hiredis-version }}
66 | hiredis-branch: ${{ inputs.hiredis-branch }}
--------------------------------------------------------------------------------
/.github/workflows/install_and_test.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 |
5 | SUFFIX=$1
6 | if [ -z ${SUFFIX} ]; then
7 | echo "Supply valid python package extension such as whl or tar.gz. Exiting."
8 | exit 3
9 | fi
10 |
11 | script=`pwd`/${BASH_SOURCE[0]}
12 | HERE=`dirname ${script}`
13 | ROOT=`realpath ${HERE}/../..`
14 |
15 | cd ${ROOT}
16 | DESTENV=${ROOT}/.venvforinstall
17 | if [ -d ${DESTENV} ]; then
18 | rm -rf ${DESTENV}
19 | fi
20 | python -m venv ${DESTENV}
21 | source ${DESTENV}/bin/activate
22 | pip install --upgrade --quiet pip
23 | pip install --quiet -r dev_requirements.txt
24 | pip uninstall -y redis # uninstall Redis package installed via redis-entraid
25 | invoke devenv --endpoints=all-stack
26 | invoke package
27 |
28 | # find packages
29 | PKG=`ls ${ROOT}/dist/*.${SUFFIX}`
30 | ls -l ${PKG}
31 |
32 | TESTDIR=${ROOT}/STAGETESTS
33 | if [ -d ${TESTDIR} ]; then
34 | rm -rf ${TESTDIR}
35 | fi
36 | mkdir ${TESTDIR}
37 | cp -R ${ROOT}/tests ${TESTDIR}/tests
38 | cd ${TESTDIR}
39 |
40 | # install, run tests
41 | pip install ${PKG}
42 | # Redis tests
43 | pytest -m 'not onlycluster'
44 | # RedisCluster tests
45 | CLUSTER_URL="redis://localhost:16379/0"
46 | CLUSTER_SSL_URL="rediss://localhost:27379/0"
47 | pytest -m 'not onlynoncluster and not redismod and not ssl' \
48 | --redis-url="${CLUSTER_URL}" --redis-ssl-url="${CLUSTER_SSL_URL}"
49 |
--------------------------------------------------------------------------------
/.github/workflows/pypi-publish.yaml:
--------------------------------------------------------------------------------
1 | name: Publish tag to Pypi
2 |
3 | on:
4 | release:
5 | types: [published]
6 | workflow_dispatch:
7 |
8 | permissions:
9 | contents: read # to fetch code (actions/checkout)
10 |
11 | jobs:
12 |
13 | build_and_package:
14 | runs-on: ubuntu-latest
15 | steps:
16 | - uses: actions/checkout@v4
17 | - name: install python
18 | uses: actions/setup-python@v5
19 | with:
20 | python-version: 3.9
21 | - run: pip install build twine
22 |
23 | - name: Build package
24 | run: python -m build .
25 |
26 | - name: Basic package test prior to upload
27 | run: |
28 | twine check dist/*
29 |
30 | - name: Publish to Pypi
31 | uses: pypa/gh-action-pypi-publish@release/v1
32 | with:
33 | user: __token__
34 | password: ${{ secrets.PYPI_API_TOKEN }}
35 |
--------------------------------------------------------------------------------
/.github/workflows/release-drafter.yml:
--------------------------------------------------------------------------------
1 | name: Release Drafter
2 |
3 | on:
4 | push:
5 | # branches to consider in the event; optional, defaults to all
6 | branches:
7 | - master
8 |
9 | permissions: {}
10 | jobs:
11 | update_release_draft:
12 | permissions:
13 | pull-requests: write # to add label to PR (release-drafter/release-drafter)
14 | contents: write # to create a github release (release-drafter/release-drafter)
15 |
16 | runs-on: ubuntu-latest
17 | steps:
18 | # Drafts your next Release notes as Pull Requests are merged into "master"
19 | - uses: release-drafter/release-drafter@v6
20 | with:
21 | # (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml
22 | config-name: release-drafter-config.yml
23 | env:
24 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
25 |
--------------------------------------------------------------------------------
/.github/workflows/spellcheck.yml:
--------------------------------------------------------------------------------
1 | name: spellcheck
2 | on:
3 | pull_request:
4 | jobs:
5 | check-spelling:
6 | runs-on: ubuntu-latest
7 | steps:
8 | - name: Checkout
9 | uses: actions/checkout@v4
10 | - name: Check Spelling
11 | uses: rojopolis/spellcheck-github-actions@0.48.0
12 | with:
13 | config_path: .github/spellcheck-settings.yml
14 | task_name: Markdown
15 |
--------------------------------------------------------------------------------
/.github/workflows/stale-issues.yml:
--------------------------------------------------------------------------------
1 | name: "Close stale issues"
2 | on:
3 | schedule:
4 | - cron: "0 0 * * *"
5 |
6 | permissions: {}
7 | jobs:
8 | stale:
9 | permissions:
10 | issues: write # to close stale issues (actions/stale)
11 | pull-requests: write # to close stale PRs (actions/stale)
12 |
13 | runs-on: ubuntu-latest
14 | steps:
15 | - uses: actions/stale@v9
16 | with:
17 | repo-token: ${{ secrets.GITHUB_TOKEN }}
18 | stale-issue-message: 'This issue is marked stale. It will be closed in 30 days if it is not updated.'
19 | stale-pr-message: 'This pull request is marked stale. It will be closed in 30 days if it is not updated.'
20 | days-before-stale: 365
21 | days-before-close: 30
22 | stale-issue-label: "Stale"
23 | stale-pr-label: "Stale"
24 | operations-per-run: 20
25 | remove-stale-when-updated: true
26 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | redis.egg-info
3 | build/
4 | dist/
5 | dump.rdb
6 | _build
7 | vagrant/.vagrant
8 | .python-version
9 | .cache
10 | .eggs
11 | .idea
12 | .vscode
13 | .coverage
14 | env
15 | venv
16 | coverage.xml
17 | .venv*
18 | *.xml
19 | .coverage*
20 | prof
21 | profile_output*
22 | docker/stunnel/keys
23 | /dockers/*/node-*/*
24 | /dockers/*/tls/*
25 | /dockers/standalone/
26 | /dockers/cluster/
27 | /dockers/replica/
28 | /dockers/sentinel/
29 | /dockers/redis-stack/
30 |
--------------------------------------------------------------------------------
/.mypy.ini:
--------------------------------------------------------------------------------
1 | [mypy]
2 | #, docs/examples, tests
3 | files = redis
4 | check_untyped_defs = True
5 | follow_imports_for_stubs asyncio.= True
6 | #disallow_any_decorated = True
7 | disallow_subclassing_any = True
8 | #disallow_untyped_calls = True
9 | disallow_untyped_decorators = True
10 | #disallow_untyped_defs = True
11 | implicit_reexport = False
12 | no_implicit_optional = True
13 | show_error_codes = True
14 | strict_equality = True
15 | warn_incomplete_stub = True
16 | warn_redundant_casts = True
17 | warn_unreachable = True
18 | warn_unused_ignores = True
19 | disallow_any_unimported = True
20 | #warn_return_any = True
21 |
22 | [mypy-redis.asyncio.lock]
23 | # TODO: Remove once locks has been rewritten
24 | ignore_errors = True
25 |
--------------------------------------------------------------------------------
/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 |
3 | python:
4 | install:
5 | - requirements: docs/requirements.txt
6 | - method: pip
7 | path: .
8 |
9 | build:
10 | os: ubuntu-20.04
11 | tools:
12 | python: "3.9"
13 |
14 | sphinx:
15 | configuration: docs/conf.py
16 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2022-2023, Redis, inc.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/benchmarks/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redis/redis-py/6246cbade4fa9ae879455b498ef8fef5250619b1/benchmarks/__init__.py
--------------------------------------------------------------------------------
/benchmarks/base.py:
--------------------------------------------------------------------------------
1 | import functools
2 | import itertools
3 | import sys
4 | import timeit
5 |
6 | import redis
7 |
8 |
9 | class Benchmark:
10 | ARGUMENTS = ()
11 |
12 | def __init__(self):
13 | self._client = None
14 |
15 | def get_client(self, **kwargs):
16 | # eventually make this more robust and take optional args from
17 | # argparse
18 | if self._client is None or kwargs:
19 | defaults = {"db": 9}
20 | defaults.update(kwargs)
21 | pool = redis.ConnectionPool(**kwargs)
22 | self._client = redis.Redis(connection_pool=pool)
23 | return self._client
24 |
25 | def setup(self, **kwargs):
26 | pass
27 |
28 | def run(self, **kwargs):
29 | pass
30 |
31 | def run_benchmark(self):
32 | group_names = [group["name"] for group in self.ARGUMENTS]
33 | group_values = [group["values"] for group in self.ARGUMENTS]
34 | for value_set in itertools.product(*group_values):
35 | pairs = list(zip(group_names, value_set))
36 | arg_string = ", ".join(f"{p[0]}={p[1]}" for p in pairs)
37 | sys.stdout.write(f"Benchmark: {arg_string}... ")
38 | sys.stdout.flush()
39 | kwargs = dict(pairs)
40 | setup = functools.partial(self.setup, **kwargs)
41 | run = functools.partial(self.run, **kwargs)
42 | t = timeit.timeit(stmt=run, setup=setup, number=1000)
43 | sys.stdout.write(f"{t:f}\n")
44 | sys.stdout.flush()
45 |
--------------------------------------------------------------------------------
/benchmarks/cluster_async_pipeline.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import functools
3 | import time
4 |
5 | import aioredis_cluster
6 | import aredis
7 | import uvloop
8 |
9 | import redis.asyncio as redispy
10 |
11 |
12 | def timer(func):
13 | @functools.wraps(func)
14 | async def wrapper(*args, **kwargs):
15 | tic = time.perf_counter()
16 | await func(*args, **kwargs)
17 | toc = time.perf_counter()
18 | return f"{toc - tic:.4f}"
19 |
20 | return wrapper
21 |
22 |
23 | @timer
24 | async def warmup(client):
25 | await asyncio.gather(
26 | *(asyncio.create_task(client.exists(f"bench:warmup_{i}")) for i in range(100))
27 | )
28 |
29 |
30 | @timer
31 | async def run(client):
32 | data_str = "a" * size
33 | data_int = int("1" * size)
34 |
35 | for i in range(count):
36 | with client.pipeline() as pipe:
37 | await (
38 | pipe.set(f"bench:str_{i}", data_str)
39 | .set(f"bench:int_{i}", data_int)
40 | .get(f"bench:str_{i}")
41 | .get(f"bench:int_{i}")
42 | .hset("bench:hset", str(i), data_str)
43 | .hget("bench:hset", str(i))
44 | .incr("bench:incr")
45 | .lpush("bench:lpush", data_int)
46 | .lrange("bench:lpush", 0, 300)
47 | .lpop("bench:lpush")
48 | .execute()
49 | )
50 |
51 |
52 | async def main(loop):
53 | arc = aredis.StrictRedisCluster(
54 | host=host,
55 | port=port,
56 | password=password,
57 | max_connections=2**31,
58 | max_connections_per_node=2**31,
59 | readonly=False,
60 | reinitialize_steps=count,
61 | skip_full_coverage_check=True,
62 | decode_responses=False,
63 | max_idle_time=count,
64 | idle_check_interval=count,
65 | )
66 | print(f"{loop} {await warmup(arc)} aredis")
67 | print(await run(arc))
68 | arc.connection_pool.disconnect()
69 |
70 | aiorc = await aioredis_cluster.create_redis_cluster(
71 | [(host, port)],
72 | password=password,
73 | state_reload_interval=count,
74 | idle_connection_timeout=count,
75 | pool_maxsize=2**31,
76 | )
77 | print(f"{loop} {await warmup(aiorc)} aioredis-cluster")
78 | print(await run(aiorc))
79 | aiorc.close()
80 | await aiorc.wait_closed()
81 |
82 | async with redispy.RedisCluster(
83 | host=host,
84 | port=port,
85 | password=password,
86 | reinitialize_steps=count,
87 | read_from_replicas=False,
88 | decode_responses=False,
89 | max_connections=2**31,
90 | ) as rca:
91 | print(f"{loop} {await warmup(rca)} redispy")
92 | print(await run(rca))
93 |
94 |
95 | if __name__ == "__main__":
96 | host = "localhost"
97 | port = 16379
98 | password = None
99 |
100 | count = 10000
101 | size = 256
102 |
103 | asyncio.run(main("asyncio"))
104 |
105 | uvloop.install()
106 |
107 | asyncio.run(main("uvloop"))
108 |
--------------------------------------------------------------------------------
/benchmarks/command_packer_benchmark.py:
--------------------------------------------------------------------------------
1 | from base import Benchmark
2 |
3 | from redis.connection import SYM_CRLF, SYM_DOLLAR, SYM_EMPTY, SYM_STAR, Connection
4 |
5 |
6 | class StringJoiningConnection(Connection):
7 | def send_packed_command(self, command, check_health=True):
8 | "Send an already packed command to the Redis server"
9 | if not self._sock:
10 | self.connect()
11 | try:
12 | self._sock.sendall(command)
13 | except OSError as e:
14 | self.disconnect()
15 | if len(e.args) == 1:
16 | _errno, errmsg = "UNKNOWN", e.args[0]
17 | else:
18 | _errno, errmsg = e.args
19 | raise ConnectionError(f"Error {_errno} while writing to socket. {errmsg}.")
20 | except Exception:
21 | self.disconnect()
22 | raise
23 |
24 | def pack_command(self, *args):
25 | "Pack a series of arguments into a value Redis command"
26 | args_output = SYM_EMPTY.join(
27 | [
28 | SYM_EMPTY.join(
29 | (SYM_DOLLAR, str(len(k)).encode(), SYM_CRLF, k, SYM_CRLF)
30 | )
31 | for k in map(self.encoder.encode, args)
32 | ]
33 | )
34 | output = SYM_EMPTY.join(
35 | (SYM_STAR, str(len(args)).encode(), SYM_CRLF, args_output)
36 | )
37 | return output
38 |
39 |
40 | class ListJoiningConnection(Connection):
41 | def send_packed_command(self, command, check_health=True):
42 | if not self._sock:
43 | self.connect()
44 | try:
45 | if isinstance(command, str):
46 | command = [command]
47 | for item in command:
48 | self._sock.sendall(item)
49 | except OSError as e:
50 | self.disconnect()
51 | if len(e.args) == 1:
52 | _errno, errmsg = "UNKNOWN", e.args[0]
53 | else:
54 | _errno, errmsg = e.args
55 | raise ConnectionError(f"Error {_errno} while writing to socket. {errmsg}.")
56 | except Exception:
57 | self.disconnect()
58 | raise
59 |
60 | def pack_command(self, *args):
61 | output = []
62 | buff = SYM_EMPTY.join((SYM_STAR, str(len(args)).encode(), SYM_CRLF))
63 |
64 | for k in map(self.encoder.encode, args):
65 | if len(buff) > 6000 or len(k) > 6000:
66 | buff = SYM_EMPTY.join(
67 | (buff, SYM_DOLLAR, str(len(k)).encode(), SYM_CRLF)
68 | )
69 | output.append(buff)
70 | output.append(k)
71 | buff = SYM_CRLF
72 | else:
73 | buff = SYM_EMPTY.join(
74 | (buff, SYM_DOLLAR, str(len(k)).encode(), SYM_CRLF, k, SYM_CRLF)
75 | )
76 | output.append(buff)
77 | return output
78 |
79 |
80 | class CommandPackerBenchmark(Benchmark):
81 | ARGUMENTS = (
82 | {
83 | "name": "connection_class",
84 | "values": [StringJoiningConnection, ListJoiningConnection],
85 | },
86 | {
87 | "name": "value_size",
88 | "values": [10, 100, 1000, 10000, 100000, 1000000, 10000000, 100000000],
89 | },
90 | )
91 |
92 | def setup(self, connection_class, value_size):
93 | self.get_client(connection_class=connection_class)
94 |
95 | def run(self, connection_class, value_size):
96 | r = self.get_client()
97 | x = "a" * value_size
98 | r.set("benchmark", x)
99 |
100 |
101 | if __name__ == "__main__":
102 | CommandPackerBenchmark().run_benchmark()
103 |
--------------------------------------------------------------------------------
/benchmarks/socket_read_size.py:
--------------------------------------------------------------------------------
1 | from base import Benchmark
2 |
3 | from redis.connection import PythonParser, _HiredisParser
4 |
5 |
6 | class SocketReadBenchmark(Benchmark):
7 | ARGUMENTS = (
8 | {"name": "parser", "values": [PythonParser, _HiredisParser]},
9 | {
10 | "name": "value_size",
11 | "values": [10, 100, 1000, 10000, 100000, 1000000, 10000000, 100000000],
12 | },
13 | {"name": "read_size", "values": [4096, 8192, 16384, 32768, 65536, 131072]},
14 | )
15 |
16 | def setup(self, value_size, read_size, parser):
17 | r = self.get_client(parser_class=parser, socket_read_size=read_size)
18 | r.set("benchmark", "a" * value_size)
19 |
20 | def run(self, value_size, read_size, parser):
21 | r = self.get_client()
22 | r.get("benchmark")
23 |
24 |
25 | if __name__ == "__main__":
26 | SocketReadBenchmark().run_benchmark()
27 |
--------------------------------------------------------------------------------
/codecov.yml:
--------------------------------------------------------------------------------
1 | ignore:
2 | - "benchmarks/**"
3 | - "tasks.py"
4 |
5 | codecov:
6 | require_ci_to_pass: yes
7 |
8 | coverage:
9 | precision: 2
10 | round: down
11 | range: "80...100"
12 | status:
13 | patch: off # off for now as it yells about everything
14 |
--------------------------------------------------------------------------------
/dev_requirements.txt:
--------------------------------------------------------------------------------
1 | build
2 | click==8.0.4
3 | invoke==2.2.0
4 | mock
5 | packaging>=20.4
6 | pytest
7 | pytest-asyncio>=0.23.0
8 | pytest-cov
9 | pytest-profiling==1.8.1
10 | pytest-timeout
11 | ruff==0.9.6
12 | ujson>=4.2.0
13 | uvloop
14 | vulture>=2.3.0
15 | numpy>=1.24.0
16 | redis-entraid==1.0.0
17 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # image tag 8.0-RC2-pre is the one matching the 8.0 GA release
3 | x-client-libs-stack-image: &client-libs-stack-image
4 | image: "redislabs/client-libs-test:${CLIENT_LIBS_TEST_STACK_IMAGE_TAG:-8.0-RC2-pre}"
5 |
6 | x-client-libs-image: &client-libs-image
7 | image: "redislabs/client-libs-test:${CLIENT_LIBS_TEST_IMAGE_TAG:-8.0-RC2-pre}"
8 |
9 | services:
10 |
11 | redis:
12 | <<: *client-libs-image
13 | container_name: redis-standalone
14 | environment:
15 | - TLS_ENABLED=yes
16 | - REDIS_CLUSTER=no
17 | - PORT=6379
18 | - TLS_PORT=6666
19 | command: ${REDIS_EXTRA_ARGS:---enable-debug-command yes --enable-module-command yes --tls-auth-clients optional --save ""}
20 | ports:
21 | - 6379:6379
22 | - 6666:6666 # TLS port
23 | volumes:
24 | - "./dockers/standalone:/redis/work"
25 | profiles:
26 | - standalone
27 | - sentinel
28 | - replica
29 | - all-stack
30 | - all
31 |
32 | replica:
33 | <<: *client-libs-image
34 | container_name: redis-replica
35 | depends_on:
36 | - redis
37 | environment:
38 | - TLS_ENABLED=no
39 | - REDIS_CLUSTER=no
40 | - PORT=6380
41 | command: ${REDIS_EXTRA_ARGS:---enable-debug-command yes --replicaof redis 6379 --protected-mode no --save ""}
42 | ports:
43 | - 6380:6380
44 | volumes:
45 | - "./dockers/replica:/redis/work"
46 | profiles:
47 | - replica
48 | - all-stack
49 | - all
50 |
51 | cluster:
52 | <<: *client-libs-image
53 | container_name: redis-cluster
54 | environment:
55 | - REDIS_CLUSTER=yes
56 | - NODES=6
57 | - REPLICAS=1
58 | - TLS_ENABLED=yes
59 | - PORT=16379
60 | - TLS_PORT=27379
61 | command: ${REDIS_EXTRA_ARGS:---enable-debug-command yes --enable-module-command yes --tls-auth-clients optional --save ""}
62 | ports:
63 | - "16379-16384:16379-16384"
64 | - "27379-27384:27379-27384"
65 | volumes:
66 | - "./dockers/cluster:/redis/work"
67 | profiles:
68 | - cluster
69 | - all-stack
70 | - all
71 |
72 | sentinel:
73 | <<: *client-libs-image
74 | container_name: redis-sentinel
75 | depends_on:
76 | - redis
77 | environment:
78 | - REDIS_CLUSTER=no
79 | - NODES=3
80 | - PORT=26379
81 | command: ${REDIS_EXTRA_ARGS:---sentinel}
82 | ports:
83 | - 26379:26379
84 | - 26380:26380
85 | - 26381:26381
86 | volumes:
87 | - "./dockers/sentinel.conf:/redis/config-default/redis.conf"
88 | - "./dockers/sentinel:/redis/work"
89 | profiles:
90 | - sentinel
91 | - all-stack
92 | - all
93 |
94 | redis-stack:
95 | <<: *client-libs-stack-image
96 | container_name: redis-stack
97 | environment:
98 | - REDIS_CLUSTER=no
99 | - PORT=6379
100 | command: ${REDIS_EXTRA_ARGS:---enable-debug-command yes --enable-module-command yes --save ""}
101 | ports:
102 | - 6479:6379
103 | volumes:
104 | - "./dockers/redis-stack:/redis/work"
105 | profiles:
106 | - standalone
107 | - all-stack
108 | - all
109 |
--------------------------------------------------------------------------------
/dockers/sentinel.conf:
--------------------------------------------------------------------------------
1 | sentinel resolve-hostnames yes
2 | sentinel monitor redis-py-test redis 6379 2
3 | sentinel down-after-milliseconds redis-py-test 5000
4 | sentinel failover-timeout redis-py-test 60000
5 | sentinel parallel-syncs redis-py-test 1
--------------------------------------------------------------------------------
/docs/_static/.keep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redis/redis-py/6246cbade4fa9ae879455b498ef8fef5250619b1/docs/_static/.keep
--------------------------------------------------------------------------------
/docs/_static/logo-redis.svg:
--------------------------------------------------------------------------------
1 |
11 |
--------------------------------------------------------------------------------
/docs/_templates/.keep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redis/redis-py/6246cbade4fa9ae879455b498ef8fef5250619b1/docs/_templates/.keep
--------------------------------------------------------------------------------
/docs/backoff.rst:
--------------------------------------------------------------------------------
1 | .. _backoff-label:
2 |
3 | Backoff
4 | #############
5 |
6 | .. automodule:: redis.backoff
7 | :members:
--------------------------------------------------------------------------------
/docs/commands.rst:
--------------------------------------------------------------------------------
1 | Redis Commands
2 | ##############
3 |
4 | Core Commands
5 | *************
6 |
7 | The following functions can be used to replicate their equivalent `Redis command `_. Generally they can be used as functions on your redis connection. For the simplest example, see below:
8 |
9 | Getting and settings data in redis::
10 |
11 | import redis
12 | r = redis.Redis(decode_responses=True)
13 | r.set('mykey', 'thevalueofmykey')
14 | r.get('mykey')
15 |
16 | .. autoclass:: redis.commands.core.CoreCommands
17 | :inherited-members:
18 |
19 | Sentinel Commands
20 | *****************
21 | .. autoclass:: redis.commands.sentinel.SentinelCommands
22 | :inherited-members:
23 |
24 | Redis Cluster Commands
25 | **********************
26 |
27 | The following `Redis commands `_ are available within a `Redis Cluster `_. Generally they can be used as functions on your redis connection.
28 |
29 | .. autoclass:: redis.commands.cluster.RedisClusterCommands
30 | :inherited-members:
31 |
--------------------------------------------------------------------------------
/docs/connections.rst:
--------------------------------------------------------------------------------
1 | Connecting to Redis
2 | ###################
3 |
4 |
5 | Generic Client
6 | **************
7 |
8 | This is the client used to connect directly to a standard Redis node.
9 |
10 | .. autoclass:: redis.Redis
11 | :members:
12 |
13 |
14 | Sentinel Client
15 | ***************
16 |
17 | Redis `Sentinel `_ provides high availability for Redis. There are commands that can only be executed against a Redis node running in sentinel mode. Connecting to those nodes, and executing commands against them requires a Sentinel connection.
18 |
19 | Connection example (assumes Redis exists on the ports listed below):
20 |
21 | >>> from redis import Sentinel
22 | >>> sentinel = Sentinel([('localhost', 26379)], socket_timeout=0.1)
23 | >>> sentinel.discover_master('mymaster')
24 | ('127.0.0.1', 6379)
25 | >>> sentinel.discover_slaves('mymaster')
26 | [('127.0.0.1', 6380)]
27 |
28 | Sentinel
29 | ========
30 | .. autoclass:: redis.sentinel.Sentinel
31 | :members:
32 |
33 | SentinelConnectionPool
34 | ======================
35 | .. autoclass:: redis.sentinel.SentinelConnectionPool
36 | :members:
37 |
38 |
39 | Cluster Client
40 | **************
41 |
42 | This client is used for connecting to a Redis Cluster.
43 |
44 | RedisCluster
45 | ============
46 | .. autoclass:: redis.cluster.RedisCluster
47 | :members:
48 |
49 | ClusterNode
50 | ===========
51 | .. autoclass:: redis.cluster.ClusterNode
52 | :members:
53 |
54 |
55 | Async Client
56 | ************
57 |
58 | See complete example: `here `__
59 |
60 | This client is used for communicating with Redis, asynchronously.
61 |
62 | .. autoclass:: redis.asyncio.client.Redis
63 | :members:
64 |
65 |
66 | Async Cluster Client
67 | ********************
68 |
69 | RedisCluster (Async)
70 | ====================
71 | .. autoclass:: redis.asyncio.cluster.RedisCluster
72 | :members:
73 | :member-order: bysource
74 |
75 | ClusterNode (Async)
76 | ===================
77 | .. autoclass:: redis.asyncio.cluster.ClusterNode
78 | :members:
79 | :member-order: bysource
80 |
81 | ClusterPipeline (Async)
82 | =======================
83 | .. autoclass:: redis.asyncio.cluster.ClusterPipeline
84 | :members: execute_command, execute
85 | :member-order: bysource
86 |
87 |
88 | Connection
89 | **********
90 |
91 | See complete example: `here `__
92 |
93 | Connection
94 | ==========
95 | .. autoclass:: redis.connection.Connection
96 | :members:
97 |
98 | Connection (Async)
99 | ==================
100 | .. autoclass:: redis.asyncio.connection.Connection
101 | :members:
102 |
103 |
104 | Connection Pools
105 | ****************
106 |
107 | See complete example: `here `__
108 |
109 | ConnectionPool
110 | ==============
111 | .. autoclass:: redis.connection.ConnectionPool
112 | :members:
113 |
114 | ConnectionPool (Async)
115 | ======================
116 | .. autoclass:: redis.asyncio.connection.ConnectionPool
117 | :members:
118 |
--------------------------------------------------------------------------------
/docs/examples.rst:
--------------------------------------------------------------------------------
1 | Examples
2 | ########
3 |
4 | .. toctree::
5 | :maxdepth: 3
6 | :glob:
7 |
8 | examples/connection_examples
9 | examples/ssl_connection_examples
10 | examples/asyncio_examples
11 | examples/search_json_examples
12 | examples/set_and_get_examples
13 | examples/search_vector_similarity_examples
14 | examples/pipeline_examples
15 | examples/timeseries_examples
16 | examples/redis-stream-example
17 | examples/opentelemetry_api_examples
18 |
--------------------------------------------------------------------------------
/docs/examples/README.md:
--------------------------------------------------------------------------------
1 | # Examples
2 |
3 | Examples of redis-py usage go here. They're being linked to the [generated documentation](https://redis-py.readthedocs.org).
4 |
--------------------------------------------------------------------------------
/docs/examples/opentelemetry/README.md:
--------------------------------------------------------------------------------
1 | # Example for redis-py OpenTelemetry instrumentation
2 |
3 | This example demonstrates how to monitor Redis using [OpenTelemetry](https://opentelemetry.io/) and
4 | [Uptrace](https://github.com/uptrace/uptrace). It requires Docker to start Redis Server and Uptrace.
5 |
6 | See
7 | [Monitoring redis-py performance with OpenTelemetry](https://redis-py.readthedocs.io/en/latest/opentelemetry.html)
8 | for details.
9 |
10 | **Step 1**. Download the example using Git:
11 |
12 | ```shell
13 | git clone https://github.com/redis/redis-py.git
14 | cd example/opentelemetry
15 | ```
16 |
17 | **Step 2**. Optionally, create a virtualenv:
18 |
19 | ```shell
20 | python3 -m venv .venv
21 | source .venv/bin/active
22 | ```
23 |
24 | **Step 3**. Install dependencies:
25 |
26 | ```shell
27 | pip install -e .
28 | ```
29 |
30 | **Step 4**. Start the services using Docker and make sure Uptrace is running:
31 |
32 | ```shell
33 | docker-compose up -d
34 | docker-compose logs uptrace
35 | ```
36 |
37 | **Step 5**. Run the Redis client example and follow the link from the CLI to view the trace:
38 |
39 | ```shell
40 | python3 main.py
41 | trace: http://localhost:14318/traces/ee029d8782242c8ed38b16d961093b35
42 | ```
43 |
44 | 
45 |
46 | You can also open Uptrace UI at [http://localhost:14318](http://localhost:14318) to view available
47 | spans, logs, and metrics.
48 |
--------------------------------------------------------------------------------
/docs/examples/opentelemetry/config/alertmanager.yml:
--------------------------------------------------------------------------------
1 | # See https://prometheus.io/docs/alerting/latest/configuration/ for details.
2 |
3 | global:
4 | # The smarthost and SMTP sender used for mail notifications.
5 | smtp_smarthost: "mailhog:1025"
6 | smtp_from: "alertmanager@example.com"
7 | smtp_require_tls: false
8 |
9 | receivers:
10 | - name: "team-X"
11 | email_configs:
12 | - to: "some-receiver@example.com"
13 | send_resolved: true
14 |
15 | # The root route on which each incoming alert enters.
16 | route:
17 | # The labels by which incoming alerts are grouped together. For example,
18 | # multiple alerts coming in for cluster=A and alertname=LatencyHigh would
19 | # be batched into a single group.
20 | group_by: ["alertname", "cluster", "service"]
21 |
22 | # When a new group of alerts is created by an incoming alert, wait at
23 | # least 'group_wait' to send the initial notification.
24 | # This way ensures that you get multiple alerts for the same group that start
25 | # firing shortly after another are batched together on the first
26 | # notification.
27 | group_wait: 30s
28 |
29 | # When the first notification was sent, wait 'group_interval' to send a batch
30 | # of new alerts that started firing for that group.
31 | group_interval: 5m
32 |
33 | # If an alert has successfully been sent, wait 'repeat_interval' to
34 | # resend them.
35 | repeat_interval: 3h
36 |
37 | # A default receiver
38 | receiver: team-X
39 |
40 | # All the above attributes are inherited by all child routes and can
41 | # overwritten on each.
42 |
43 | # The child route trees.
44 | routes:
45 | # This route matches error alerts created from spans or logs.
46 | - matchers:
47 | - alert_kind="error"
48 | group_interval: 24h
49 | receiver: team-X
50 |
51 | # The directory from which notification templates are read.
52 | templates:
53 | - "/etc/alertmanager/template/*.tmpl"
54 |
--------------------------------------------------------------------------------
/docs/examples/opentelemetry/config/otel-collector.yaml:
--------------------------------------------------------------------------------
1 | extensions:
2 | health_check:
3 | pprof:
4 | endpoint: 0.0.0.0:1777
5 | zpages:
6 | endpoint: 0.0.0.0:55679
7 |
8 | receivers:
9 | otlp:
10 | protocols:
11 | grpc:
12 | http:
13 | hostmetrics:
14 | collection_interval: 10s
15 | scrapers:
16 | cpu:
17 | disk:
18 | load:
19 | filesystem:
20 | memory:
21 | network:
22 | paging:
23 | redis:
24 | endpoint: "redis-server:6379"
25 | collection_interval: 10s
26 | jaeger:
27 | protocols:
28 | grpc:
29 |
30 | processors:
31 | resourcedetection:
32 | detectors: ["system"]
33 | batch:
34 | send_batch_size: 10000
35 | timeout: 10s
36 |
37 | exporters:
38 | logging:
39 | logLevel: debug
40 | otlp:
41 | endpoint: uptrace:14317
42 | tls:
43 | insecure: true
44 | headers: { "uptrace-dsn": "http://project2_secret_token@localhost:14317/2" }
45 |
46 | service:
47 | # telemetry:
48 | # logs:
49 | # level: DEBUG
50 | pipelines:
51 | traces:
52 | receivers: [otlp, jaeger]
53 | processors: [batch]
54 | exporters: [otlp, logging]
55 | metrics:
56 | receivers: [otlp]
57 | processors: [batch]
58 | exporters: [otlp]
59 | metrics/hostmetrics:
60 | receivers: [hostmetrics, redis]
61 | processors: [batch, resourcedetection]
62 | exporters: [otlp]
63 | logs:
64 | receivers: [otlp]
65 | processors: [batch]
66 | exporters: [otlp]
67 |
68 | extensions: [health_check, pprof, zpages]
69 |
--------------------------------------------------------------------------------
/docs/examples/opentelemetry/config/vector.toml:
--------------------------------------------------------------------------------
1 | [sources.syslog_logs]
2 | type = "demo_logs"
3 | format = "syslog"
4 | interval = 0.1
5 |
6 | [sources.apache_common_logs]
7 | type = "demo_logs"
8 | format = "apache_common"
9 | interval = 0.1
10 |
11 | [sources.apache_error_logs]
12 | type = "demo_logs"
13 | format = "apache_error"
14 | interval = 0.1
15 |
16 | [sources.json_logs]
17 | type = "demo_logs"
18 | format = "json"
19 | interval = 0.1
20 |
21 | # Parse Syslog logs
22 | # See the Vector Remap Language reference for more info: https://vrl.dev
23 | [transforms.parse_logs]
24 | type = "remap"
25 | inputs = ["syslog_logs"]
26 | source = '''
27 | . = parse_syslog!(string!(.message))
28 | '''
29 |
30 | # Export data to Uptrace.
31 | [sinks.uptrace]
32 | type = "http"
33 | inputs = ["parse_logs", "apache_common_logs", "apache_error_logs", "json_logs"]
34 | encoding.codec = "json"
35 | framing.method = "newline_delimited"
36 | compression = "gzip"
37 | uri = "http://uptrace:14318/api/v1/vector/logs"
38 | #uri = "https://api.uptrace.dev/api/v1/vector/logs"
39 | headers.uptrace-dsn = "http://project2_secret_token@localhost:14317/2"
40 |
--------------------------------------------------------------------------------
/docs/examples/opentelemetry/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3"
2 |
3 | services:
4 | clickhouse:
5 | image: clickhouse/clickhouse-server:22.7
6 | restart: on-failure
7 | environment:
8 | CLICKHOUSE_DB: uptrace
9 | healthcheck:
10 | test: ["CMD", "wget", "--spider", "-q", "localhost:8123/ping"]
11 | interval: 1s
12 | timeout: 1s
13 | retries: 30
14 | volumes:
15 | - ch_data:/var/lib/clickhouse
16 | ports:
17 | - "8123:8123"
18 | - "9000:9000"
19 |
20 | uptrace:
21 | image: "uptrace/uptrace:1.2.0"
22 | #image: 'uptrace/uptrace-dev:latest'
23 | restart: on-failure
24 | volumes:
25 | - uptrace_data:/var/lib/uptrace
26 | - ./uptrace.yml:/etc/uptrace/uptrace.yml
27 | #environment:
28 | # - DEBUG=2
29 | ports:
30 | - "14317:14317"
31 | - "14318:14318"
32 | depends_on:
33 | clickhouse:
34 | condition: service_healthy
35 |
36 | otel-collector:
37 | image: otel/opentelemetry-collector-contrib:0.58.0
38 | restart: on-failure
39 | volumes:
40 | - ./config/otel-collector.yaml:/etc/otelcol-contrib/config.yaml
41 | ports:
42 | - "4317:4317"
43 | - "4318:4318"
44 |
45 | vector:
46 | image: timberio/vector:0.24.X-alpine
47 | volumes:
48 | - ./config/vector.toml:/etc/vector/vector.toml:ro
49 |
50 | alertmanager:
51 | image: prom/alertmanager:v0.24.0
52 | restart: on-failure
53 | volumes:
54 | - ./config/alertmanager.yml:/etc/alertmanager/config.yml
55 | - alertmanager_data:/alertmanager
56 | ports:
57 | - 9093:9093
58 | command:
59 | - "--config.file=/etc/alertmanager/config.yml"
60 | - "--storage.path=/alertmanager"
61 |
62 | mailhog:
63 | image: mailhog/mailhog:v1.0.1
64 | restart: on-failure
65 | ports:
66 | - "8025:8025"
67 |
68 | redis-server:
69 | image: redis
70 | ports:
71 | - "6379:6379"
72 | redis-cli:
73 | image: redis
74 |
75 | volumes:
76 | uptrace_data:
77 | driver: local
78 | ch_data:
79 | driver: local
80 | alertmanager_data:
81 | driver: local
82 |
--------------------------------------------------------------------------------
/docs/examples/opentelemetry/image/redis-py-trace.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redis/redis-py/6246cbade4fa9ae879455b498ef8fef5250619b1/docs/examples/opentelemetry/image/redis-py-trace.png
--------------------------------------------------------------------------------
/docs/examples/opentelemetry/main.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import time
4 |
5 | import redis
6 | import uptrace
7 | from opentelemetry import trace
8 | from opentelemetry.instrumentation.redis import RedisInstrumentor
9 |
10 | tracer = trace.get_tracer("app_or_package_name", "1.0.0")
11 |
12 |
13 | def main():
14 | uptrace.configure_opentelemetry(
15 | dsn="http://project2_secret_token@localhost:14317/2",
16 | service_name="myservice",
17 | service_version="1.0.0",
18 | )
19 | RedisInstrumentor().instrument()
20 |
21 | client = redis.StrictRedis(host="localhost", port=6379)
22 |
23 | span = handle_request(client)
24 | print("trace:", uptrace.trace_url(span))
25 |
26 | for i in range(10000):
27 | handle_request(client)
28 | time.sleep(1)
29 |
30 |
31 | def handle_request(client):
32 | with tracer.start_as_current_span(
33 | "handle-request", kind=trace.SpanKind.CLIENT
34 | ) as span:
35 | client.get("my-key")
36 | client.set("hello", "world")
37 | client.mset(
38 | {
39 | "employee_name": "Adam Adams",
40 | "employee_age": 30,
41 | "position": "Software Engineer",
42 | }
43 | )
44 |
45 | pipe = client.pipeline()
46 | pipe.set("foo", 5)
47 | pipe.set("bar", 18.5)
48 | pipe.set("blee", "hello world!")
49 | pipe.execute()
50 |
51 | return span
52 |
53 |
54 | if __name__ == "__main__":
55 | main()
56 |
--------------------------------------------------------------------------------
/docs/examples/opentelemetry/requirements.txt:
--------------------------------------------------------------------------------
1 | redis==4.3.4
2 | uptrace==1.14.0
3 | opentelemetry-instrumentation-redis==0.35b0
4 |
--------------------------------------------------------------------------------
/docs/exceptions.rst:
--------------------------------------------------------------------------------
1 | .. _exceptions-label:
2 |
3 | Exceptions
4 | ##########
5 |
6 | .. automodule:: redis.exceptions
7 | :members:
--------------------------------------------------------------------------------
/docs/genindex.rst:
--------------------------------------------------------------------------------
1 | Module Index
2 | ============
--------------------------------------------------------------------------------
/docs/images/opentelemetry/distributed-tracing.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redis/redis-py/6246cbade4fa9ae879455b498ef8fef5250619b1/docs/images/opentelemetry/distributed-tracing.png
--------------------------------------------------------------------------------
/docs/images/opentelemetry/redis-metrics.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redis/redis-py/6246cbade4fa9ae879455b498ef8fef5250619b1/docs/images/opentelemetry/redis-metrics.png
--------------------------------------------------------------------------------
/docs/images/opentelemetry/redis-py-trace.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redis/redis-py/6246cbade4fa9ae879455b498ef8fef5250619b1/docs/images/opentelemetry/redis-py-trace.png
--------------------------------------------------------------------------------
/docs/images/opentelemetry/tree-of-spans.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redis/redis-py/6246cbade4fa9ae879455b498ef8fef5250619b1/docs/images/opentelemetry/tree-of-spans.png
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. redis-py documentation master file, created by
2 | sphinx-quickstart on Thu Jul 28 13:55:57 2011.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | redis-py - Python Client for Redis
7 | ====================================
8 |
9 | Getting Started
10 | ****************
11 |
12 | `redis-py `_ requires a running Redis server, and Python 3.7+. See the `Redis
13 | quickstart `_ for Redis installation instructions.
14 |
15 | redis-py can be installed using pip via ``pip install redis``.
16 |
17 |
18 | Quickly connecting to redis
19 | ***************************
20 |
21 | There are two quick ways to connect to Redis.
22 |
23 | **Assuming you run Redis on localhost:6379 (the default)**
24 |
25 | .. code-block:: python
26 |
27 | import redis
28 | r = redis.Redis()
29 | r.ping()
30 |
31 | **Running redis on foo.bar.com, port 12345**
32 |
33 | .. code-block:: python
34 |
35 | import redis
36 | r = redis.Redis(host='foo.bar.com', port=12345)
37 | r.ping()
38 |
39 | **Another example with foo.bar.com, port 12345**
40 |
41 | .. code-block:: python
42 |
43 | import redis
44 | r = redis.from_url('redis://foo.bar.com:12345')
45 | r.ping()
46 |
47 | After that, you probably want to `run redis commands `_.
48 |
49 | .. toctree::
50 | :hidden:
51 |
52 | genindex
53 |
54 | Redis Command Functions
55 | ***********************
56 | .. toctree::
57 | :maxdepth: 2
58 |
59 | commands
60 | redismodules
61 |
62 | Module Documentation
63 | ********************
64 | .. toctree::
65 | :maxdepth: 1
66 |
67 | connections
68 | clustering
69 | exceptions
70 | backoff
71 | lock
72 | retry
73 | lua_scripting
74 | opentelemetry
75 | resp3_features
76 | advanced_features
77 | examples
78 |
79 | Contributing
80 | *************
81 |
82 | - `How to contribute `_
83 | - `Issue Tracker `_
84 | - `Source Code `_
85 | - `Release History `_
86 |
87 | License
88 | *******
89 |
90 | This project is licensed under the `MIT license `_.
91 |
--------------------------------------------------------------------------------
/docs/lock.rst:
--------------------------------------------------------------------------------
1 | Lock
2 | #########
3 |
4 | .. automodule:: redis.lock
5 | :members:
--------------------------------------------------------------------------------
/docs/redismodules.rst:
--------------------------------------------------------------------------------
1 | Redis Modules Commands
2 | ######################
3 |
4 | Accessing redis module commands requires the installation of the supported `Redis module `_. For a quick start with redis modules, try the `Redismod docker `_.
5 |
6 |
7 | RedisBloom Commands
8 | *******************
9 |
10 | These are the commands for interacting with the `RedisBloom module `_. Below is a brief example, as well as documentation on the commands themselves.
11 |
12 | **Create and add to a bloom filter**
13 |
14 | .. code-block:: python
15 |
16 | import redis
17 | r = redis.Redis()
18 | r.bf().create("bloom", 0.01, 1000)
19 | r.bf().add("bloom", "foo")
20 |
21 | **Create and add to a cuckoo filter**
22 |
23 | .. code-block:: python
24 |
25 | import redis
26 | r = redis.Redis()
27 | r.cf().create("cuckoo", 1000)
28 | r.cf().add("cuckoo", "filter")
29 |
30 | **Create Count-Min Sketch and get information**
31 |
32 | .. code-block:: python
33 |
34 | import redis
35 | r = redis.Redis()
36 | r.cms().initbydim("dim", 1000, 5)
37 | r.cms().incrby("dim", ["foo"], [5])
38 | r.cms().info("dim")
39 |
40 | **Create a topk list, and access the results**
41 |
42 | .. code-block:: python
43 |
44 | import redis
45 | r = redis.Redis()
46 | r.topk().reserve("mytopk", 3, 50, 4, 0.9)
47 | r.topk().info("mytopk")
48 |
49 | .. automodule:: redis.commands.bf.commands
50 | :members: BFCommands, CFCommands, CMSCommands, TOPKCommands
51 |
52 | ------
53 |
54 | RedisJSON Commands
55 | ******************
56 |
57 | These are the commands for interacting with the `RedisJSON module `_. Below is a brief example, as well as documentation on the commands themselves.
58 |
59 | **Create a json object**
60 |
61 | .. code-block:: python
62 |
63 | import redis
64 | r = redis.Redis()
65 | r.json().set("mykey", ".", {"hello": "world", "i am": ["a", "json", "object!"]})
66 |
67 | Examples of how to combine search and json can be found `here `_.
68 |
69 | .. automodule:: redis.commands.json.commands
70 | :members: JSONCommands
71 |
72 | -----
73 |
74 | RediSearch Commands
75 | *******************
76 |
77 | These are the commands for interacting with the `RediSearch module `_. Below is a brief example, as well as documentation on the commands themselves. In the example
78 | below, an index named *my_index* is being created. When an index name is not specified, an index named *idx* is created.
79 |
80 | **Create a search index, and display its information**
81 |
82 | .. code-block:: python
83 |
84 | import redis
85 | from redis.commands.search.field import TextField
86 |
87 | r = redis.Redis()
88 | index_name = "my_index"
89 | schema = (
90 | TextField("play", weight=5.0),
91 | TextField("ball"),
92 | )
93 | r.ft(index_name).create_index(schema)
94 | print(r.ft(index_name).info())
95 |
96 |
97 | .. automodule:: redis.commands.search.commands
98 | :members: SearchCommands
99 |
100 | -----
101 |
102 | RedisTimeSeries Commands
103 | ************************
104 |
105 | These are the commands for interacting with the `RedisTimeSeries module `_. Below is a brief example, as well as documentation on the commands themselves.
106 |
107 |
108 | **Create a timeseries object with 5 second retention**
109 |
110 | .. code-block:: python
111 |
112 | import redis
113 | r = redis.Redis()
114 | r.ts().create(2, retention_msecs=5000)
115 |
116 | .. automodule:: redis.commands.timeseries.commands
117 | :members: TimeSeriesCommands
118 |
119 |
120 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | sphinx>=5.0,<7.0
2 | docutils<0.18
3 | nbsphinx
4 | sphinx_gallery
5 | ipython
6 | sphinx-autodoc-typehints
7 | furo
8 | pandoc
9 |
--------------------------------------------------------------------------------
/docs/retry.rst:
--------------------------------------------------------------------------------
1 | Retry Helpers
2 | #############
3 |
4 | .. automodule:: redis.retry
5 | :members:
6 |
7 |
8 | Retry in Redis Standalone
9 | **************************
10 |
11 | >>> from redis.backoff import ExponentialBackoff
12 | >>> from redis.retry import Retry
13 | >>> from redis.client import Redis
14 | >>> from redis.exceptions import (
15 | >>> BusyLoadingError,
16 | >>> RedisError,
17 | >>> )
18 | >>>
19 | >>> # Run 3 retries with exponential backoff strategy
20 | >>> retry = Retry(ExponentialBackoff(), 3)
21 | >>> # Redis client with retries on custom errors in addition to the errors
22 | >>> # that are already retried by default
23 | >>> r = Redis(host='localhost', port=6379, retry=retry, retry_on_error=[BusyLoadingError, RedisError])
24 |
25 | As you can see from the example above, Redis client supports 2 parameters to configure the retry behaviour:
26 |
27 | * ``retry``: :class:`~.Retry` instance with a :ref:`backoff-label` strategy and the max number of retries
28 | * The :class:`~.Retry` instance has default set of :ref:`exceptions-label` to retry on,
29 | which can be overridden by passing a tuple with :ref:`exceptions-label` to the ``supported_errors`` parameter.
30 | * ``retry_on_error``: list of additional :ref:`exceptions-label` to retry on
31 |
32 |
33 | If no ``retry`` is provided, a default one is created with :class:`~.ExponentialWithJitterBackoff` as backoff strategy
34 | and 3 retries.
35 |
36 |
37 | Retry in Redis Cluster
38 | **************************
39 |
40 | >>> from redis.backoff import ExponentialBackoff
41 | >>> from redis.retry import Retry
42 | >>> from redis.cluster import RedisCluster
43 | >>>
44 | >>> # Run 3 retries with exponential backoff strategy
45 | >>> retry = Retry(ExponentialBackoff(), 3)
46 | >>> # Redis Cluster client with retries
47 | >>> rc = RedisCluster(host='localhost', port=6379, retry=retry)
48 |
49 | Retry behaviour in Redis Cluster is a little bit different from Standalone:
50 |
51 | * ``retry``: :class:`~.Retry` instance with a :ref:`backoff-label` strategy and the max number of retries, default value is ``Retry(ExponentialWithJitterBackoff(base=1, cap=10), cluster_error_retry_attempts)``
52 | * ``cluster_error_retry_attempts``: number of times to retry before raising an error when :class:`~.TimeoutError`, :class:`~.ConnectionError`, :class:`~.ClusterDownError` or :class:`~.SlotNotCoveredError` are encountered, default value is ``3``
53 | * This argument is deprecated - it is used to initialize the number of retries for the retry object,
54 | only in the case when the ``retry`` object is not provided.
55 | When the ``retry`` argument is provided, the ``cluster_error_retry_attempts`` argument is ignored!
56 |
57 | * The retry object is not yet fully utilized in the cluster client.
58 | The retry object is used only to determine the number of retries for the cluster level calls.
59 |
60 | Let's consider the following example:
61 |
62 | >>> from redis.backoff import ExponentialBackoff
63 | >>> from redis.retry import Retry
64 | >>> from redis.cluster import RedisCluster
65 | >>>
66 | >>> rc = RedisCluster(host='localhost', port=6379, retry=Retry(ExponentialBackoff(), 6))
67 | >>> rc.set('foo', 'bar')
68 |
69 | #. the client library calculates the hash slot for key 'foo'.
70 | #. given the hash slot, it then determines which node to connect to, in order to execute the command.
71 | #. during the connection, a :class:`~.ConnectionError` is raised.
72 | #. because we set ``retry=Retry(ExponentialBackoff(), 6)``, the cluster client starts a cluster update, removes the failed node from the startup nodes, and re-initializes the cluster.
73 | #. the cluster client retries the command until it either succeeds or the max number of retries is reached.
--------------------------------------------------------------------------------
/doctests/README.md:
--------------------------------------------------------------------------------
1 | # Command examples for redis.io
2 |
3 | ## How to add an example
4 |
5 | Create regular python file in the current folder with meaningful name. It makes sense prefix example files with
6 | command category (e.g. string, set, list, hash, etc) to make navigation in the folder easier. Files ending in *.py*
7 | are automatically run by the test suite.
8 |
9 | ### Special markup
10 |
11 | See https://github.com/redis-stack/redis-stack-website#readme for more details.
12 |
13 | ## How to test examples
14 |
15 | Examples are standalone python scripts, committed to the *doctests* directory. These scripts assume that the
16 | ```doctests/requirements.txt``` and ```dev_requirements.txt``` from this repository have been installed, as per below.
17 |
18 | ```bash
19 | pip install -r dev_requirements.txt
20 | pip uninstall -y redis # uninstall Redis package installed via redis-entraid
21 | pip install -r doctests/requirements.txt
22 | ```
23 |
24 | Note - the CI process, runs linters against the examples. Assuming
25 | the requirements above have been installed you can run ```ruff check yourfile.py``` and ```ruff format yourfile.py```
26 | locally to validate the linting, prior to CI.
27 |
28 | Just include necessary assertions in the example file and run
29 | ```bash
30 | sh doctests/run_examples.sh
31 | ```
32 | to test all examples in the current folder.
33 |
--------------------------------------------------------------------------------
/doctests/cmds_cnxmgmt.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: cmds_cnxmgmt
2 | # HIDE_START
3 | import redis
4 |
5 | r = redis.Redis(decode_responses=True)
6 | # HIDE_END
7 |
8 | # STEP_START auth1
9 | # REMOVE_START
10 | r.config_set("requirepass", "temp_pass")
11 | # REMOVE_END
12 | res1 = r.auth(password="temp_pass")
13 | print(res1) # >>> True
14 |
15 | res2 = r.auth(password="temp_pass", username="default")
16 | print(res2) # >>> True
17 |
18 | # REMOVE_START
19 | assert res1 == True
20 | assert res2 == True
21 | r.config_set("requirepass", "")
22 | # REMOVE_END
23 | # STEP_END
24 |
25 | # STEP_START auth2
26 | # REMOVE_START
27 | r.acl_setuser("test-user", enabled=True, passwords=["+strong_password"], commands=["+acl"])
28 | # REMOVE_END
29 | res = r.auth(username="test-user", password="strong_password")
30 | print(res) # >>> True
31 |
32 | # REMOVE_START
33 | assert res == True
34 | r.acl_deluser("test-user")
35 | # REMOVE_END
36 | # STEP_END
37 |
--------------------------------------------------------------------------------
/doctests/cmds_hash.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: cmds_hash
2 | # HIDE_START
3 | import redis
4 |
5 | r = redis.Redis(host="localhost", port=6379, db=0, decode_responses=True)
6 | # HIDE_END
7 |
8 | # STEP_START hset
9 | res1 = r.hset("myhash", "field1", "Hello")
10 | print(res1)
11 | # >>> 1
12 |
13 | res2 = r.hget("myhash", "field1")
14 | print(res2)
15 | # >>> Hello
16 |
17 | res3 = r.hset("myhash", mapping={"field2": "Hi", "field3": "World"})
18 | print(res3)
19 | # >>> 2
20 |
21 | res4 = r.hget("myhash", "field2")
22 | print(res4)
23 | # >>> Hi
24 |
25 | res5 = r.hget("myhash", "field3")
26 | print(res5)
27 | # >>> World
28 |
29 | res6 = r.hgetall("myhash")
30 | print(res6)
31 | # >>> { "field1": "Hello", "field2": "Hi", "field3": "World" }
32 |
33 | # REMOVE_START
34 | assert res1 == 1
35 | assert res2 == "Hello"
36 | assert res3 == 2
37 | assert res4 == "Hi"
38 | assert res5 == "World"
39 | assert res6 == { "field1": "Hello", "field2": "Hi", "field3": "World" }
40 | r.delete("myhash")
41 | # REMOVE_END
42 | # STEP_END
43 |
44 | # STEP_START hget
45 | res7 = r.hset("myhash", "field1", "foo")
46 | print(res7)
47 | # >>> 1
48 |
49 | res8 = r.hget("myhash", "field1")
50 | print(res8)
51 | # >>> foo
52 |
53 | res9 = r.hget("myhash", "field2")
54 | print(res9)
55 | # >>> None
56 |
57 | # REMOVE_START
58 | assert res7 == 1
59 | assert res8 == "foo"
60 | assert res9 == None
61 | r.delete("myhash")
62 | # REMOVE_END
63 | # STEP_END
64 |
65 | # STEP_START hgetall
66 | res10 = r.hset("myhash", mapping={"field1": "Hello", "field2": "World"})
67 |
68 | res11 = r.hgetall("myhash")
69 | print(res11) # >>> { "field1": "Hello", "field2": "World" }
70 |
71 | # REMOVE_START
72 | assert res11 == { "field1": "Hello", "field2": "World" }
73 | r.delete("myhash")
74 | # REMOVE_END
75 | # STEP_END
76 |
77 | # STEP_START hvals
78 | res10 = r.hset("myhash", mapping={"field1": "Hello", "field2": "World"})
79 |
80 | res11 = r.hvals("myhash")
81 | print(res11) # >>> [ "Hello", "World" ]
82 |
83 | # REMOVE_START
84 | assert res11 == [ "Hello", "World" ]
85 | r.delete("myhash")
86 | # REMOVE_END
87 | # STEP_END
--------------------------------------------------------------------------------
/doctests/cmds_list.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: cmds_list
2 | # HIDE_START
3 | import redis
4 |
5 | r = redis.Redis(decode_responses=True)
6 | # HIDE_END
7 |
8 | # STEP_START lpush
9 | res1 = r.lpush("mylist", "world")
10 | print(res1) # >>> 1
11 |
12 | res2 = r.lpush("mylist", "hello")
13 | print(res2) # >>> 2
14 |
15 | res3 = r.lrange("mylist", 0, -1)
16 | print(res3) # >>> [ "hello", "world" ]
17 |
18 | # REMOVE_START
19 | assert res3 == [ "hello", "world" ]
20 | r.delete("mylist")
21 | # REMOVE_END
22 | # STEP_END
23 |
24 | # STEP_START lrange
25 | res4 = r.rpush("mylist", "one");
26 | print(res4) # >>> 1
27 |
28 | res5 = r.rpush("mylist", "two")
29 | print(res5) # >>> 2
30 |
31 | res6 = r.rpush("mylist", "three")
32 | print(res6) # >>> 3
33 |
34 | res7 = r.lrange('mylist', 0, 0)
35 | print(res7) # >>> [ 'one' ]
36 |
37 | res8 = r.lrange('mylist', -3, 2)
38 | print(res8) # >>> [ 'one', 'two', 'three' ]
39 |
40 | res9 = r.lrange('mylist', -100, 100)
41 | print(res9) # >>> [ 'one', 'two', 'three' ]
42 |
43 | res10 = r.lrange('mylist', 5, 10)
44 | print(res10) # >>> []
45 |
46 | # REMOVE_START
47 | assert res7 == [ 'one' ]
48 | assert res8 == [ 'one', 'two', 'three' ]
49 | assert res9 == [ 'one', 'two', 'three' ]
50 | assert res10 == []
51 | r.delete('mylist')
52 | # REMOVE_END
53 | # STEP_END
54 |
55 | # STEP_START llen
56 | res11 = r.lpush("mylist", "World")
57 | print(res11) # >>> 1
58 |
59 | res12 = r.lpush("mylist", "Hello")
60 | print(res12) # >>> 2
61 |
62 | res13 = r.llen("mylist")
63 | print(res13) # >>> 2
64 |
65 | # REMOVE_START
66 | assert res13 == 2
67 | r.delete("mylist")
68 | # REMOVE_END
69 | # STEP_END
70 |
71 | # STEP_START rpush
72 | res14 = r.rpush("mylist", "hello")
73 | print(res14) # >>> 1
74 |
75 | res15 = r.rpush("mylist", "world")
76 | print(res15) # >>> 2
77 |
78 | res16 = r.lrange("mylist", 0, -1)
79 | print(res16) # >>> [ "hello", "world" ]
80 |
81 | # REMOVE_START
82 | assert res16 == [ "hello", "world" ]
83 | r.delete("mylist")
84 | # REMOVE_END
85 | # STEP_END
86 |
87 | # STEP_START lpop
88 | res17 = r.rpush("mylist", *["one", "two", "three", "four", "five"])
89 | print(res17) # >>> 5
90 |
91 | res18 = r.lpop("mylist")
92 | print(res18) # >>> "one"
93 |
94 | res19 = r.lpop("mylist", 2)
95 | print(res19) # >>> ['two', 'three']
96 |
97 | res17 = r.lrange("mylist", 0, -1)
98 | print(res17) # >>> [ "four", "five" ]
99 |
100 | # REMOVE_START
101 | assert res17 == [ "four", "five" ]
102 | r.delete("mylist")
103 | # REMOVE_END
104 | # STEP_END
105 |
106 | # STEP_START rpop
107 | res18 = r.rpush("mylist", *["one", "two", "three", "four", "five"])
108 | print(res18) # >>> 5
109 |
110 | res19 = r.rpop("mylist")
111 | print(res19) # >>> "five"
112 |
113 | res20 = r.rpop("mylist", 2)
114 | print(res20) # >>> ['four', 'three']
115 |
116 | res21 = r.lrange("mylist", 0, -1)
117 | print(res21) # >>> [ "one", "two" ]
118 |
119 | # REMOVE_START
120 | assert res21 == [ "one", "two" ]
121 | r.delete("mylist")
122 | # REMOVE_END
123 | # STEP_END
--------------------------------------------------------------------------------
/doctests/cmds_servermgmt.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: cmds_servermgmt
2 | # HIDE_START
3 | import redis
4 |
5 | r = redis.Redis(decode_responses=True)
6 | # HIDE_END
7 |
8 | # STEP_START flushall
9 | # REMOVE_START
10 | r.set("foo", "1")
11 | r.set("bar", "2")
12 | r.set("baz", "3")
13 | # REMOVE_END
14 | res1 = r.flushall(asynchronous=False)
15 | print(res1) # >>> True
16 |
17 | res2 = r.keys()
18 | print(res2) # >>> []
19 |
20 | # REMOVE_START
21 | assert res1 == True
22 | assert res2 == []
23 | # REMOVE_END
24 | # STEP_END
25 |
26 | # STEP_START info
27 | res3 = r.info()
28 | print(res3)
29 | # >>> {'redis_version': '7.4.0', 'redis_git_sha1': 'c9d29f6a',...}
30 | # STEP_END
--------------------------------------------------------------------------------
/doctests/cmds_set.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: cmds_set
2 | # HIDE_START
3 | import redis
4 |
5 | r = redis.Redis(decode_responses=True)
6 | # HIDE_END
7 |
8 | # STEP_START sadd
9 | res1 = r.sadd("myset", "Hello", "World")
10 | print(res1) # >>> 2
11 |
12 | res2 = r.sadd("myset", "World")
13 | print(res2) # >>> 0
14 |
15 | res3 = r.smembers("myset")
16 | print(res3) # >>> {'Hello', 'World'}
17 |
18 | # REMOVE_START
19 | assert res3 == {'Hello', 'World'}
20 | r.delete('myset')
21 | # REMOVE_END
22 | # STEP_END
23 |
24 | # STEP_START smembers
25 | res4 = r.sadd("myset", "Hello", "World")
26 | print(res4) # >>> 2
27 |
28 | res5 = r.smembers("myset")
29 | print(res5) # >>> {'Hello', 'World'}
30 |
31 | # REMOVE_START
32 | assert res5 == {'Hello', 'World'}
33 | r.delete('myset')
34 | # REMOVE_END
35 | # STEP_END
--------------------------------------------------------------------------------
/doctests/cmds_sorted_set.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: cmds_sorted_set
2 | # HIDE_START
3 | import redis
4 |
5 | r = redis.Redis(host="localhost", port=6379, db=0, decode_responses=True)
6 | # HIDE_END
7 |
8 | # STEP_START zadd
9 | res = r.zadd("myzset", {"one": 1})
10 | print(res)
11 | # >>> 1
12 | # REMOVE_START
13 | assert res == 1
14 | # REMOVE_END
15 |
16 | res = r.zadd("myzset", {"uno": 1})
17 | print(res)
18 | # >>> 1
19 | # REMOVE_START
20 | assert res == 1
21 | # REMOVE_END
22 |
23 | res = r.zadd("myzset", {"two": 2, "three": 3})
24 | print(res)
25 | # >>> 2
26 | # REMOVE_START
27 | assert res == 2
28 | # REMOVE_END
29 |
30 | res = r.zrange("myzset", 0, -1, withscores=True)
31 | # >>> [('one', 1.0), ('uno', 1.0), ('two', 2.0), ('three', 3.0)]
32 | # REMOVE_START
33 | assert res == [('one', 1.0), ('uno', 1.0), ('two', 2.0), ('three', 3.0)]
34 | # REMOVE_END
35 |
36 | # REMOVE_START
37 | r.delete("myzset")
38 | # REMOVE_END
39 | # STEP_END
40 |
41 | # STEP_START zrange1
42 | res = r.zadd("myzset", {"one": 1, "two":2, "three":3})
43 | print(res)
44 | # >>> 3
45 |
46 | res = r.zrange("myzset", 0, -1)
47 | print(res)
48 | # >>> ['one', 'two', 'three']
49 | # REMOVE_START
50 | assert res == ['one', 'two', 'three']
51 | # REMOVE_END
52 |
53 | res = r.zrange("myzset", 2, 3)
54 | print(res)
55 | # >>> ['three']
56 | # REMOVE_START
57 | assert res == ['three']
58 | # REMOVE_END
59 |
60 | res = r.zrange("myzset", -2, -1)
61 | print(res)
62 | # >>> ['two', 'three']
63 | # REMOVE_START
64 | assert res == ['two', 'three']
65 | r.delete("myzset")
66 | # REMOVE_END
67 | # STEP_END
68 |
69 | # STEP_START zrange2
70 | res = r.zadd("myzset", {"one": 1, "two":2, "three":3})
71 | res = r.zrange("myzset", 0, 1, withscores=True)
72 | print(res)
73 | # >>> [('one', 1.0), ('two', 2.0)]
74 | # REMOVE_START
75 | assert res == [('one', 1.0), ('two', 2.0)]
76 | r.delete("myzset")
77 | # REMOVE_END
78 | # STEP_END
79 |
80 | # STEP_START zrange3
81 | res = r.zadd("myzset", {"one": 1, "two":2, "three":3})
82 | res = r.zrange("myzset", 2, 3, byscore=True, offset=1, num=1)
83 | print(res)
84 | # >>> ['three']
85 | # REMOVE_START
86 | assert res == ['three']
87 | r.delete("myzset")
88 | # REMOVE_END
89 | # STEP_END
90 |
--------------------------------------------------------------------------------
/doctests/cmds_string.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: cmds_string
2 | # HIDE_START
3 | import redis
4 |
5 | r = redis.Redis(host="localhost", port=6379, db=0, decode_responses=True)
6 | # HIDE_END
7 |
8 | # STEP_START incr
9 | res = r.set("mykey", "10")
10 | print(res)
11 | # >>> True
12 | res = r.incr("mykey")
13 | print(res)
14 | # >>> 11
15 | # REMOVE_START
16 | assert res == 11
17 | r.delete("mykey")
18 | # REMOVE_END
19 | # STEP_END
20 |
--------------------------------------------------------------------------------
/doctests/dt_bitfield.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: bitfield_tutorial
2 | # HIDE_START
3 | """
4 | Code samples for Bitfield doc pages:
5 | https://redis.io/docs/latest/develop/data-types/bitfields/
6 | """
7 | import redis
8 |
9 | r = redis.Redis(decode_responses=True)
10 | # HIDE_END
11 |
12 | # REMOVE_START
13 | r.delete("bike:1:stats")
14 | # REMOVE_END
15 |
16 | # STEP_START bf
17 | bf = r.bitfield("bike:1:stats")
18 | res1 = bf.set("u32", "#0", 1000).execute()
19 | print(res1) # >>> [0]
20 |
21 | res2 = bf.incrby("u32", "#0", -50).incrby("u32", "#1", 1).execute()
22 | print(res2) # >>> [950, 1]
23 |
24 | res3 = bf.incrby("u32", "#0", 500).incrby("u32", "#1", 1).execute()
25 | print(res3) # >>> [1450, 2]
26 |
27 | res4 = bf.get("u32", "#0").get("u32", "#1").execute()
28 | print(res4) # >>> [1450, 2]
29 | # STEP_END
30 |
31 | # REMOVE_START
32 | assert res1 == [0]
33 | assert res4 == [1450, 2]
34 | # REMOVE_END
35 |
--------------------------------------------------------------------------------
/doctests/dt_bitmap.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: bitmap_tutorial
2 | # HIDE_START
3 | """
4 | Code samples for Bitmap doc pages:
5 | https://redis.io/docs/latest/develop/data-types/bitmaps/
6 | """
7 | import redis
8 |
9 | r = redis.Redis(decode_responses=True)
10 | # HIDE_END
11 |
12 | # REMOVE_START
13 | r.delete("pings:2024-01-01-00:00")
14 | # REMOVE_END
15 |
16 | # STEP_START ping
17 | res1 = r.setbit("pings:2024-01-01-00:00", 123, 1)
18 | print(res1) # >>> 0
19 |
20 | res2 = r.getbit("pings:2024-01-01-00:00", 123)
21 | print(res2) # >>> 1
22 |
23 | res3 = r.getbit("pings:2024-01-01-00:00", 456)
24 | print(res3) # >>> 0
25 | # STEP_END
26 |
27 | # REMOVE_START
28 | assert res1 == 0
29 | # REMOVE_END
30 |
31 | # STEP_START bitcount
32 | # HIDE_START
33 | r.setbit("pings:2024-01-01-00:00", 123, 1)
34 | # HIDE_END
35 | res4 = r.bitcount("pings:2024-01-01-00:00")
36 | print(res4) # >>> 1
37 | # STEP_END
38 | # REMOVE_START
39 | assert res4 == 1
40 | # REMOVE_END
41 |
--------------------------------------------------------------------------------
/doctests/dt_bloom.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: bf_tutorial
2 | # HIDE_START
3 | """
4 | Code samples for Bloom filter doc pages:
5 | https://redis.io/docs/latest/develop/data-types/probabilistic/bloom-filter/
6 | """
7 | import redis
8 |
9 | r = redis.Redis(decode_responses=True)
10 | # HIDE_END
11 |
12 | # STEP_START bloom
13 | res1 = r.bf().reserve("bikes:models", 0.01, 1000)
14 | print(res1) # >>> True
15 |
16 | res2 = r.bf().add("bikes:models", "Smoky Mountain Striker")
17 | print(res2) # >>> True
18 |
19 | res3 = r.bf().exists("bikes:models", "Smoky Mountain Striker")
20 | print(res3) # >>> True
21 |
22 | res4 = r.bf().madd(
23 | "bikes:models",
24 | "Rocky Mountain Racer",
25 | "Cloudy City Cruiser",
26 | "Windy City Wippet",
27 | )
28 | print(res4) # >>> [True, True, True]
29 |
30 | res5 = r.bf().mexists(
31 | "bikes:models",
32 | "Rocky Mountain Racer",
33 | "Cloudy City Cruiser",
34 | "Windy City Wippet",
35 | )
36 | print(res5) # >>> [True, True, True]
37 | # STEP_END
38 |
39 | # REMOVE_START
40 | assert res1 is True
41 | # REMOVE_END
42 |
--------------------------------------------------------------------------------
/doctests/dt_cms.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: cms_tutorial
2 | # HIDE_START
3 | """
4 | Code samples for Count-min sketch doc pages:
5 | https://redis.io/docs/latest/develop/data-types/probabilistic/count-min-sketch/
6 | """
7 | import redis
8 |
9 | r = redis.Redis(decode_responses=True)
10 | # HIDE_END
11 | # REMOVE_START
12 | r.delete("bikes:profit")
13 | # REMOVE_END
14 |
15 | # STEP_START cms
16 | res1 = r.cms().initbyprob("bikes:profit", 0.001, 0.002)
17 | print(res1) # >>> True
18 |
19 | res2 = r.cms().incrby("bikes:profit", ["Smoky Mountain Striker"], [100])
20 | print(res2) # >>> [100]
21 |
22 | res3 = r.cms().incrby(
23 | "bikes:profit", ["Rocky Mountain Racer", "Cloudy City Cruiser"], [200, 150]
24 | )
25 | print(res3) # >>> [200, 150]
26 |
27 | res4 = r.cms().query("bikes:profit", "Smoky Mountain Striker")
28 | print(res4) # >>> [100]
29 |
30 | res5 = r.cms().info("bikes:profit")
31 | print(res5.width, res5.depth, res5.count) # >>> 2000 9 450
32 | # STEP_END
33 |
--------------------------------------------------------------------------------
/doctests/dt_cuckoo.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: cuckoo_tutorial
2 | # HIDE_START
3 | """
4 | Code samples for Cuckoo filter doc pages:
5 | https://redis.io/docs/latest/develop/data-types/probabilistic/cuckoo-filter/
6 | """
7 | import redis
8 |
9 | r = redis.Redis(decode_responses=True)
10 | # HIDE_END
11 |
12 | # REMOVE_START
13 | r.delete("bikes:models")
14 | # REMOVE_END
15 |
16 | # STEP_START cuckoo
17 | res1 = r.cf().reserve("bikes:models", 1000000)
18 | print(res1) # >>> True
19 |
20 | res2 = r.cf().add("bikes:models", "Smoky Mountain Striker")
21 | print(res2) # >>> 1
22 |
23 | res3 = r.cf().exists("bikes:models", "Smoky Mountain Striker")
24 | print(res3) # >>> 1
25 |
26 | res4 = r.cf().exists("bikes:models", "Terrible Bike Name")
27 | print(res4) # >>> 0
28 |
29 | res5 = r.cf().delete("bikes:models", "Smoky Mountain Striker")
30 | print(res5) # >>> 1
31 | # STEP_END
32 |
33 | # REMOVE_START
34 | assert res1 is True
35 | assert res5 == 1
36 | # REMOVE_END
37 |
--------------------------------------------------------------------------------
/doctests/dt_geo.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: geo_tutorial
2 | # HIDE_START
3 | """
4 | Code samples for Geospatial doc pages:
5 | https://redis.io/docs/latest/develop/data-types/geospatial/
6 | """
7 | import redis
8 |
9 | r = redis.Redis(decode_responses=True)
10 | # HIDE_END
11 | # REMOVE_START
12 | r.delete("bikes:rentable")
13 | # REMOVE_END
14 |
15 | # STEP_START geoadd
16 | res1 = r.geoadd("bikes:rentable", [-122.27652, 37.805186, "station:1"])
17 | print(res1) # >>> 1
18 |
19 | res2 = r.geoadd("bikes:rentable", [-122.2674626, 37.8062344, "station:2"])
20 | print(res2) # >>> 1
21 |
22 | res3 = r.geoadd("bikes:rentable", [-122.2469854, 37.8104049, "station:3"])
23 | print(res3) # >>> 1
24 | # STEP_END
25 |
26 | # REMOVE_START
27 | assert res1 == 1
28 | assert res2 == 1
29 | assert res3 == 1
30 | # REMOVE_END
31 |
32 | # STEP_START geosearch
33 | res4 = r.geosearch(
34 | "bikes:rentable",
35 | longitude=-122.27652,
36 | latitude=37.805186,
37 | radius=5,
38 | unit="km",
39 | )
40 | print(res4) # >>> ['station:1', 'station:2', 'station:3']
41 | # STEP_END
42 |
43 | # REMOVE_START
44 | assert res4 == ["station:1", "station:2", "station:3"]
45 | # REMOVE_END
46 |
--------------------------------------------------------------------------------
/doctests/dt_hash.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: hash_tutorial
2 | # HIDE_START
3 | """
4 | Code samples for Hash doc pages:
5 | https://redis.io/docs/latest/develop/data-types/hashes/
6 | """
7 | import redis
8 |
9 | r = redis.Redis(decode_responses=True)
10 | # HIDE_END
11 | # STEP_START set_get_all
12 | res1 = r.hset(
13 | "bike:1",
14 | mapping={
15 | "model": "Deimos",
16 | "brand": "Ergonom",
17 | "type": "Enduro bikes",
18 | "price": 4972,
19 | },
20 | )
21 | print(res1)
22 | # >>> 4
23 |
24 | res2 = r.hget("bike:1", "model")
25 | print(res2)
26 | # >>> 'Deimos'
27 |
28 | res3 = r.hget("bike:1", "price")
29 | print(res3)
30 | # >>> '4972'
31 |
32 | res4 = r.hgetall("bike:1")
33 | print(res4)
34 | # >>> {'model': 'Deimos', 'brand': 'Ergonom', 'type': 'Enduro bikes', 'price': '4972'}
35 |
36 | # STEP_END
37 |
38 | # REMOVE_START
39 | assert res1 == 4
40 | assert res2 == "Deimos"
41 | assert res3 == "4972"
42 | assert res4 == {
43 | "model": "Deimos",
44 | "brand": "Ergonom",
45 | "type": "Enduro bikes",
46 | "price": "4972",
47 | }
48 | # REMOVE_END
49 |
50 | # STEP_START hmget
51 | res5 = r.hmget("bike:1", ["model", "price"])
52 | print(res5)
53 | # >>> ['Deimos', '4972']
54 | # STEP_END
55 |
56 | # REMOVE_START
57 | assert res5 == ["Deimos", "4972"]
58 | # REMOVE_END
59 |
60 | # STEP_START hincrby
61 | res6 = r.hincrby("bike:1", "price", 100)
62 | print(res6)
63 | # >>> 5072
64 | res7 = r.hincrby("bike:1", "price", -100)
65 | print(res7)
66 | # >>> 4972
67 | # STEP_END
68 |
69 | # REMOVE_START
70 | assert res6 == 5072
71 | assert res7 == 4972
72 | # REMOVE_END
73 |
74 |
75 | # STEP_START incrby_get_mget
76 | res11 = r.hincrby("bike:1:stats", "rides", 1)
77 | print(res11)
78 | # >>> 1
79 | res12 = r.hincrby("bike:1:stats", "rides", 1)
80 | print(res12)
81 | # >>> 2
82 | res13 = r.hincrby("bike:1:stats", "rides", 1)
83 | print(res13)
84 | # >>> 3
85 | res14 = r.hincrby("bike:1:stats", "crashes", 1)
86 | print(res14)
87 | # >>> 1
88 | res15 = r.hincrby("bike:1:stats", "owners", 1)
89 | print(res15)
90 | # >>> 1
91 | res16 = r.hget("bike:1:stats", "rides")
92 | print(res16)
93 | # >>> 3
94 | res17 = r.hmget("bike:1:stats", ["crashes", "owners"])
95 | print(res17)
96 | # >>> ['1', '1']
97 | # STEP_END
98 |
99 | # REMOVE_START
100 | assert res11 == 1
101 | assert res12 == 2
102 | assert res13 == 3
103 | assert res14 == 1
104 | assert res15 == 1
105 | assert res16 == "3"
106 | assert res17 == ["1", "1"]
107 | # REMOVE_END
108 |
--------------------------------------------------------------------------------
/doctests/dt_hll.py:
--------------------------------------------------------------------------------
1 | # # EXAMPLE: hll_tutorial
2 | # HIDE_START
3 | """
4 | Code samples for HyperLogLog doc pages:
5 | https://redis.io/docs/latest/develop/data-types/probabilistic/hyperloglogs/
6 | """
7 |
8 | import redis
9 |
10 | r = redis.Redis(decode_responses=True)
11 | # HIDE_END
12 |
13 | # REMOVE_START
14 | r.delete("bikes", "commuter_bikes", "all_bikes")
15 | # REMOVE_END
16 |
17 | # STEP_START pfadd
18 | res1 = r.pfadd("bikes", "Hyperion", "Deimos", "Phoebe", "Quaoar")
19 | print(res1) # >>> 1
20 |
21 | res2 = r.pfcount("bikes")
22 | print(res2) # >>> 4
23 |
24 | res3 = r.pfadd("commuter_bikes", "Salacia", "Mimas", "Quaoar")
25 | print(res3) # >>> 1
26 |
27 | res4 = r.pfmerge("all_bikes", "bikes", "commuter_bikes")
28 | print(res4) # >>> True
29 |
30 | res5 = r.pfcount("all_bikes")
31 | print(res5) # >>> 6
32 | # STEP_END
33 |
34 | # REMOVE_START
35 | assert res4 is True
36 | # REMOVE_END
37 |
--------------------------------------------------------------------------------
/doctests/dt_ss.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: ss_tutorial
2 | # HIDE_START
3 | """
4 | Code samples for Sorted set doc pages:
5 | https://redis.io/docs/latest/develop/data-types/sorted-sets/
6 | """
7 |
8 | import redis
9 |
10 | r = redis.Redis(decode_responses=True)
11 | # HIDE_END
12 |
13 | # REMOVE_START
14 | r.delete("racer_scores")
15 | # REMOVE_END
16 |
17 | # STEP_START zadd
18 | res1 = r.zadd("racer_scores", {"Norem": 10})
19 | print(res1) # >>> 1
20 |
21 | res2 = r.zadd("racer_scores", {"Castilla": 12})
22 | print(res2) # >>> 1
23 |
24 | res3 = r.zadd(
25 | "racer_scores",
26 | {"Sam-Bodden": 8, "Royce": 10, "Ford": 6, "Prickett": 14, "Castilla": 12},
27 | )
28 | print(res3) # >>> 4
29 | # STEP_END
30 |
31 | # REMOVE_START
32 | assert r.zcard("racer_scores") == 6
33 | # REMOVE_END
34 |
35 | # STEP_START zrange
36 | res4 = r.zrange("racer_scores", 0, -1)
37 | print(res4) # >>> ['Ford', 'Sam-Bodden', 'Norem', 'Royce', 'Castilla', 'Prickett']
38 |
39 | res5 = r.zrevrange("racer_scores", 0, -1)
40 | print(res5) # >>> ['Prickett', 'Castilla', 'Royce', 'Norem', 'Sam-Bodden', 'Ford']
41 | # STEP_END
42 |
43 | # STEP_START zrange_withscores
44 | res6 = r.zrange("racer_scores", 0, -1, withscores=True)
45 | print(
46 | res6
47 | )
48 | # >>> [
49 | # ('Ford', 6.0), ('Sam-Bodden', 8.0), ('Norem', 10.0), ('Royce', 10.0),
50 | # ('Castilla', 12.0), ('Prickett', 14.0)
51 | # ]
52 | # STEP_END
53 |
54 | # STEP_START zrangebyscore
55 | res7 = r.zrangebyscore("racer_scores", "-inf", 10)
56 | print(res7) # >>> ['Ford', 'Sam-Bodden', 'Norem', 'Royce']
57 | # STEP_END
58 |
59 | # STEP_START zremrangebyscore
60 | res8 = r.zrem("racer_scores", "Castilla")
61 | print(res8) # >>> 1
62 |
63 | res9 = r.zremrangebyscore("racer_scores", "-inf", 9)
64 | print(res9) # >>> 2
65 |
66 | res10 = r.zrange("racer_scores", 0, -1)
67 | print(res10) # >>> ['Norem', 'Royce', 'Prickett']
68 | # STEP_END
69 |
70 | # REMOVE_START
71 | assert r.zcard("racer_scores") == 3
72 | # REMOVE_END
73 |
74 | # STEP_START zrank
75 | res11 = r.zrank("racer_scores", "Norem")
76 | print(res11) # >>> 0
77 |
78 | res12 = r.zrevrank("racer_scores", "Norem")
79 | print(res12) # >>> 2
80 | # STEP_END
81 |
82 | # STEP_START zadd_lex
83 | res13 = r.zadd(
84 | "racer_scores",
85 | {
86 | "Norem": 0,
87 | "Sam-Bodden": 0,
88 | "Royce": 0,
89 | "Ford": 0,
90 | "Prickett": 0,
91 | "Castilla": 0,
92 | },
93 | )
94 | print(res13) # >>> 3
95 |
96 | res14 = r.zrange("racer_scores", 0, -1)
97 | print(res14) # >>> ['Castilla', 'Ford', 'Norem', 'Prickett', 'Royce', 'Sam-Bodden']
98 |
99 | res15 = r.zrangebylex("racer_scores", "[A", "[L")
100 | print(res15) # >>> ['Castilla', 'Ford']
101 | # STEP_END
102 |
103 | # STEP_START leaderboard
104 | res16 = r.zadd("racer_scores", {"Wood": 100})
105 | print(res16) # >>> 1
106 |
107 | res17 = r.zadd("racer_scores", {"Henshaw": 100})
108 | print(res17) # >>> 1
109 |
110 | res18 = r.zadd("racer_scores", {"Henshaw": 150})
111 | print(res18) # >>> 0
112 |
113 | res19 = r.zincrby("racer_scores", 50, "Wood")
114 | print(res19) # >>> 150.0
115 |
116 | res20 = r.zincrby("racer_scores", 50, "Henshaw")
117 | print(res20) # >>> 200.0
118 | # STEP_END
119 |
--------------------------------------------------------------------------------
/doctests/dt_string.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: set_tutorial
2 | # HIDE_START
3 | """
4 | Code samples for String doc pages:
5 | https://redis.io/docs/latest/develop/data-types/strings/
6 | """
7 |
8 | import redis
9 |
10 | r = redis.Redis(decode_responses=True)
11 | # HIDE_END
12 |
13 | # STEP_START set_get
14 | res1 = r.set("bike:1", "Deimos")
15 | print(res1) # True
16 | res2 = r.get("bike:1")
17 | print(res2) # Deimos
18 | # STEP_END
19 |
20 | # REMOVE_START
21 | assert res1
22 | assert res2 == "Deimos"
23 | # REMOVE_END
24 |
25 | # STEP_START setnx_xx
26 | res3 = r.set("bike:1", "bike", nx=True)
27 | print(res3) # None
28 | print(r.get("bike:1")) # Deimos
29 | res4 = r.set("bike:1", "bike", xx=True)
30 | print(res4) # True
31 | # STEP_END
32 |
33 | # REMOVE_START
34 | assert res3 is None
35 | assert res4
36 | # REMOVE_END
37 |
38 | # STEP_START mset
39 | res5 = r.mset({"bike:1": "Deimos", "bike:2": "Ares", "bike:3": "Vanth"})
40 | print(res5) # True
41 | res6 = r.mget(["bike:1", "bike:2", "bike:3"])
42 | print(res6) # ['Deimos', 'Ares', 'Vanth']
43 | # STEP_END
44 |
45 | # REMOVE_START
46 | assert res5
47 | assert res6 == ["Deimos", "Ares", "Vanth"]
48 | # REMOVE_END
49 |
50 | # STEP_START incr
51 | r.set("total_crashes", 0)
52 | res7 = r.incr("total_crashes")
53 | print(res7) # 1
54 | res8 = r.incrby("total_crashes", 10)
55 | print(res8) # 11
56 | # STEP_END
57 |
58 | # REMOVE_START
59 | assert res7 == 1
60 | assert res8 == 11
61 | # REMOVE_END
62 |
--------------------------------------------------------------------------------
/doctests/dt_tdigest.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: tdigest_tutorial
2 | # HIDE_START
3 | """
4 | Code samples for t-digest pages:
5 | https://redis.io/docs/latest/develop/data-types/probabilistic/t-digest/
6 | """
7 |
8 | import redis
9 |
10 | r = redis.Redis(decode_responses=True)
11 | # HIDE_END
12 |
13 | # REMOVE_START
14 | r.delete("racer_ages")
15 | r.delete("bikes:sales")
16 | # REMOVE_END
17 |
18 | # STEP_START tdig_start
19 | res1 = r.tdigest().create("bikes:sales", 100)
20 | print(res1) # >>> True
21 |
22 | res2 = r.tdigest().add("bikes:sales", [21])
23 | print(res2) # >>> OK
24 |
25 | res3 = r.tdigest().add("bikes:sales", [150, 95, 75, 34])
26 | print(res3) # >>> OK
27 | # STEP_END
28 |
29 | # REMOVE_START
30 | assert res1 is True
31 | # REMOVE_END
32 |
33 | # STEP_START tdig_cdf
34 | res4 = r.tdigest().create("racer_ages")
35 | print(res4) # >>> True
36 |
37 | res5 = r.tdigest().add(
38 | "racer_ages",
39 | [
40 | 45.88,
41 | 44.2,
42 | 58.03,
43 | 19.76,
44 | 39.84,
45 | 69.28,
46 | 50.97,
47 | 25.41,
48 | 19.27,
49 | 85.71,
50 | 42.63,
51 | ],
52 | )
53 | print(res5) # >>> OK
54 |
55 | res6 = r.tdigest().rank("racer_ages", 50)
56 | print(res6) # >>> [7]
57 |
58 | res7 = r.tdigest().rank("racer_ages", 50, 40)
59 | print(res7) # >>> [7, 4]
60 | # STEP_END
61 |
62 | # STEP_START tdig_quant
63 | res8 = r.tdigest().quantile("racer_ages", 0.5)
64 | print(res8) # >>> [44.2]
65 |
66 | res9 = r.tdigest().byrank("racer_ages", 4)
67 | print(res9) # >>> [42.63]
68 | # STEP_END
69 |
70 | # STEP_START tdig_min
71 | res10 = r.tdigest().min("racer_ages")
72 | print(res10) # >>> 19.27
73 |
74 | res11 = r.tdigest().max("racer_ages")
75 | print(res11) # >>> 85.71
76 | # STEP_END
77 |
78 | # STEP_START tdig_reset
79 | res12 = r.tdigest().reset("racer_ages")
80 | print(res12) # >>> OK
81 | # STEP_END
82 |
--------------------------------------------------------------------------------
/doctests/dt_topk.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: topk_tutorial
2 | # HIDE_START
3 | """
4 | Code samples for Top-K pages:
5 | https://redis.io/docs/latest/develop/data-types/probabilistic/top-k/
6 | """
7 |
8 | import redis
9 |
10 | r = redis.Redis(decode_responses=True)
11 | # HIDE_END
12 |
13 | # REMOVE_START
14 | r.delete("bikes:keywords")
15 | # REMOVE_END
16 |
17 | # STEP_START topk
18 | res1 = r.topk().reserve("bikes:keywords", 5, 2000, 7, 0.925)
19 | print(res1) # >>> True
20 |
21 | res2 = r.topk().add(
22 | "bikes:keywords",
23 | "store",
24 | "seat",
25 | "handlebars",
26 | "handles",
27 | "pedals",
28 | "tires",
29 | "store",
30 | "seat",
31 | )
32 | print(res2) # >>> [None, None, None, None, None, 'handlebars', None, None]
33 |
34 | res3 = r.topk().list("bikes:keywords")
35 | print(res3) # >>> ['store', 'seat', 'pedals', 'tires', 'handles']
36 |
37 | res4 = r.topk().query("bikes:keywords", "store", "handlebars")
38 | print(res4) # >>> [1, 0]
39 |
--------------------------------------------------------------------------------
/doctests/query_agg.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: query_agg
2 | # HIDE_START
3 | import json
4 | import redis
5 | from redis.commands.json.path import Path
6 | from redis.commands.search import Search
7 | from redis.commands.search.aggregation import AggregateRequest
8 | from redis.commands.search.field import NumericField, TagField
9 | from redis.commands.search.index_definition import IndexDefinition, IndexType
10 | import redis.commands.search.reducers as reducers
11 |
12 | r = redis.Redis(decode_responses=True)
13 |
14 | # create index
15 | schema = (
16 | TagField("$.condition", as_name="condition"),
17 | NumericField("$.price", as_name="price"),
18 | )
19 |
20 | index = r.ft("idx:bicycle")
21 | index.create_index(
22 | schema,
23 | definition=IndexDefinition(prefix=["bicycle:"], index_type=IndexType.JSON),
24 | )
25 |
26 | # load data
27 | with open("data/query_em.json") as f:
28 | bicycles = json.load(f)
29 |
30 | pipeline = r.pipeline(transaction=False)
31 | for bid, bicycle in enumerate(bicycles):
32 | pipeline.json().set(f'bicycle:{bid}', Path.root_path(), bicycle)
33 | pipeline.execute()
34 | # HIDE_END
35 |
36 | # STEP_START agg1
37 | search = Search(r, index_name="idx:bicycle")
38 | aggregate_request = AggregateRequest(query='@condition:{new}') \
39 | .load('__key', 'price') \
40 | .apply(discounted='@price - (@price * 0.1)')
41 | res = search.aggregate(aggregate_request)
42 | print(len(res.rows)) # >>> 5
43 | print(res.rows) # >>> [['__key', 'bicycle:0', ...
44 | #[['__key', 'bicycle:0', 'price', '270', 'discounted', '243'],
45 | # ['__key', 'bicycle:5', 'price', '810', 'discounted', '729'],
46 | # ['__key', 'bicycle:6', 'price', '2300', 'discounted', '2070'],
47 | # ['__key', 'bicycle:7', 'price', '430', 'discounted', '387'],
48 | # ['__key', 'bicycle:8', 'price', '1200', 'discounted', '1080']]
49 | # REMOVE_START
50 | assert len(res.rows) == 5
51 | # REMOVE_END
52 | # STEP_END
53 |
54 | # STEP_START agg2
55 | search = Search(r, index_name="idx:bicycle")
56 | aggregate_request = AggregateRequest(query='*') \
57 | .load('price') \
58 | .apply(price_category='@price<1000') \
59 | .group_by('@condition', reducers.sum('@price_category').alias('num_affordable'))
60 | res = search.aggregate(aggregate_request)
61 | print(len(res.rows)) # >>> 3
62 | print(res.rows) # >>>
63 | #[['condition', 'refurbished', 'num_affordable', '1'],
64 | # ['condition', 'used', 'num_affordable', '1'],
65 | # ['condition', 'new', 'num_affordable', '3']]
66 | # REMOVE_START
67 | assert len(res.rows) == 3
68 | # REMOVE_END
69 | # STEP_END
70 |
71 | # STEP_START agg3
72 | search = Search(r, index_name="idx:bicycle")
73 | aggregate_request = AggregateRequest(query='*') \
74 | .apply(type="'bicycle'") \
75 | .group_by('@type', reducers.count().alias('num_total'))
76 | res = search.aggregate(aggregate_request)
77 | print(len(res.rows)) # >>> 1
78 | print(res.rows) # >>> [['type', 'bicycle', 'num_total', '10']]
79 | # REMOVE_START
80 | assert len(res.rows) == 1
81 | # REMOVE_END
82 | # STEP_END
83 |
84 | # STEP_START agg4
85 | search = Search(r, index_name="idx:bicycle")
86 | aggregate_request = AggregateRequest(query='*') \
87 | .load('__key') \
88 | .group_by('@condition', reducers.tolist('__key').alias('bicycles'))
89 | res = search.aggregate(aggregate_request)
90 | print(len(res.rows)) # >>> 3
91 | print(res.rows) # >>>
92 | #[['condition', 'refurbished', 'bicycles', ['bicycle:9']],
93 | # ['condition', 'used', 'bicycles', ['bicycle:1', 'bicycle:2', 'bicycle:3', 'bicycle:4']],
94 | # ['condition', 'new', 'bicycles', ['bicycle:5', 'bicycle:6', 'bicycle:7', 'bicycle:0', 'bicycle:8']]]
95 | # REMOVE_START
96 | assert len(res.rows) == 3
97 | # REMOVE_END
98 | # STEP_END
99 |
100 | # REMOVE_START
101 | # destroy index and data
102 | r.ft("idx:bicycle").dropindex(delete_documents=True)
103 | # REMOVE_END
104 |
--------------------------------------------------------------------------------
/doctests/query_combined.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: query_combined
2 | # HIDE_START
3 | import json
4 | import numpy as np
5 | import redis
6 | import warnings
7 | from redis.commands.json.path import Path
8 | from redis.commands.search.field import NumericField, TagField, TextField, VectorField
9 | from redis.commands.search.index_definition import IndexDefinition, IndexType
10 | from redis.commands.search.query import Query
11 | from sentence_transformers import SentenceTransformer
12 |
13 |
14 | def embed_text(model, text):
15 | return np.array(model.encode(text)).astype(np.float32).tobytes()
16 |
17 | warnings.filterwarnings("ignore", category=FutureWarning, message=r".*clean_up_tokenization_spaces.*")
18 | model = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
19 | query = "Bike for small kids"
20 | query_vector = embed_text(model, query)
21 |
22 | r = redis.Redis(decode_responses=True)
23 |
24 | # create index
25 | schema = (
26 | TextField("$.description", no_stem=True, as_name="model"),
27 | TagField("$.condition", as_name="condition"),
28 | NumericField("$.price", as_name="price"),
29 | VectorField(
30 | "$.description_embeddings",
31 | "FLAT",
32 | {
33 | "TYPE": "FLOAT32",
34 | "DIM": 384,
35 | "DISTANCE_METRIC": "COSINE",
36 | },
37 | as_name="vector",
38 | ),
39 | )
40 |
41 | index = r.ft("idx:bicycle")
42 | index.create_index(
43 | schema,
44 | definition=IndexDefinition(prefix=["bicycle:"], index_type=IndexType.JSON),
45 | )
46 |
47 | # load data
48 | with open("data/query_vector.json") as f:
49 | bicycles = json.load(f)
50 |
51 | pipeline = r.pipeline(transaction=False)
52 | for bid, bicycle in enumerate(bicycles):
53 | pipeline.json().set(f'bicycle:{bid}', Path.root_path(), bicycle)
54 | pipeline.execute()
55 | # HIDE_END
56 |
57 | # STEP_START combined1
58 | q = Query("@price:[500 1000] @condition:{new}")
59 | res = index.search(q)
60 | print(res.total) # >>> 1
61 | # REMOVE_START
62 | assert res.total == 1
63 | # REMOVE_END
64 | # STEP_END
65 |
66 | # STEP_START combined2
67 | q = Query("kids @price:[500 1000] @condition:{used}")
68 | res = index.search(q)
69 | print(res.total) # >>> 1
70 | # REMOVE_START
71 | assert res.total == 1
72 | # REMOVE_END
73 | # STEP_END
74 |
75 | # STEP_START combined3
76 | q = Query("(kids | small) @condition:{used}")
77 | res = index.search(q)
78 | print(res.total) # >>> 2
79 | # REMOVE_START
80 | assert res.total == 2
81 | # REMOVE_END
82 | # STEP_END
83 |
84 | # STEP_START combined4
85 | q = Query("@description:(kids | small) @condition:{used}")
86 | res = index.search(q)
87 | print(res.total) # >>> 0
88 | # REMOVE_START
89 | assert res.total == 0
90 | # REMOVE_END
91 | # STEP_END
92 |
93 | # STEP_START combined5
94 | q = Query("@description:(kids | small) @condition:{new | used}")
95 | res = index.search(q)
96 | print(res.total) # >>> 0
97 | # REMOVE_START
98 | assert res.total == 0
99 | # REMOVE_END
100 | # STEP_END
101 |
102 | # STEP_START combined6
103 | q = Query("@price:[500 1000] -@condition:{new}")
104 | res = index.search(q)
105 | print(res.total) # >>> 2
106 | # REMOVE_START
107 | assert res.total == 2
108 | # REMOVE_END
109 | # STEP_END
110 |
111 | # STEP_START combined7
112 | q = Query("(@price:[500 1000] -@condition:{new})=>[KNN 3 @vector $query_vector]").dialect(2)
113 | # put query string here
114 | res = index.search(q,{ 'query_vector': query_vector })
115 | print(res.total) # >>> 2
116 | # REMOVE_START
117 | assert res.total == 2
118 | # REMOVE_END
119 | # STEP_END
120 |
121 | # REMOVE_START
122 | # destroy index and data
123 | r.ft("idx:bicycle").dropindex(delete_documents=True)
124 | # REMOVE_END
125 |
--------------------------------------------------------------------------------
/doctests/query_em.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: query_em
2 | # HIDE_START
3 | import json
4 | import redis
5 | from redis.commands.json.path import Path
6 | from redis.commands.search.field import TextField, NumericField, TagField
7 | from redis.commands.search.index_definition import IndexDefinition, IndexType
8 | from redis.commands.search.query import NumericFilter, Query
9 |
10 | r = redis.Redis(decode_responses=True)
11 |
12 | # create index
13 | schema = (
14 | TextField("$.description", as_name="description"),
15 | NumericField("$.price", as_name="price"),
16 | TagField("$.condition", as_name="condition"),
17 | )
18 |
19 | index = r.ft("idx:bicycle")
20 | index.create_index(
21 | schema,
22 | definition=IndexDefinition(prefix=["bicycle:"], index_type=IndexType.JSON),
23 | )
24 |
25 | # load data
26 | with open("data/query_em.json") as f:
27 | bicycles = json.load(f)
28 |
29 | pipeline = r.pipeline(transaction=False)
30 | for bid, bicycle in enumerate(bicycles):
31 | pipeline.json().set(f'bicycle:{bid}', Path.root_path(), bicycle)
32 | pipeline.execute()
33 | # HIDE_END
34 |
35 | # STEP_START em1
36 | res = index.search(Query("@price:[270 270]"))
37 | print(res.total)
38 | # >>> 1
39 | # REMOVE_START
40 | assert res.total == 1
41 | # REMOVE_END
42 |
43 | try:
44 | res = index.search(Query("@price:[270]")) # not yet supported in redis-py
45 | print(res.total)
46 | # >>> 1
47 | assert res.total == 1
48 | except:
49 | print("'@price:[270]' syntax not yet supported.")
50 |
51 | try:
52 | res = index.search(Query("@price==270")) # not yet supported in redis-py
53 | print(res.total)
54 | # >>> 1
55 | assert res.total == 1
56 | except:
57 | print("'@price==270' syntax not yet supported.")
58 |
59 | query = Query("*").add_filter(NumericFilter("price", 270, 270))
60 | res = index.search(query)
61 | print(res.total)
62 | # >>> 1
63 | # REMOVE_START
64 | assert res.total == 1
65 | # REMOVE_END
66 | # STEP_END
67 |
68 | # STEP_START em2
69 | res = index.search(Query("@condition:{new}"))
70 | print(res.total)
71 | # >>> 5
72 | # REMOVE_START
73 | assert res.total == 5
74 | # REMOVE_END
75 | # STEP_END
76 |
77 | # STEP_START em3
78 | schema = (
79 | TagField("$.email", as_name="email")
80 | )
81 |
82 | idx_email = r.ft("idx:email")
83 | idx_email.create_index(
84 | schema,
85 | definition=IndexDefinition(prefix=["key:"], index_type=IndexType.JSON),
86 | )
87 | r.json().set('key:1', Path.root_path(), '{"email": "test@redis.com"}')
88 |
89 | try:
90 | res = idx_email.search(Query("test@redis.com").dialect(2))
91 | print(res)
92 | except:
93 | print("'test@redis.com' syntax not yet supported.")
94 | # REMOVE_START
95 | r.ft("idx:email").dropindex(delete_documents=True)
96 | # REMOVE_END
97 | # STEP_END
98 |
99 | # STEP_START em4
100 | res = index.search(Query("@description:\"rough terrain\""))
101 | print(res.total)
102 | # >>> 1 (Result{1 total, docs: [Document {'id': 'bicycle:8'...)
103 | # REMOVE_START
104 | assert res.total == 1
105 | # REMOVE_END
106 | # STEP_END
107 |
108 | # REMOVE_START
109 | # destroy index and data
110 | r.ft("idx:bicycle").dropindex(delete_documents=True)
111 | # REMOVE_END
112 |
--------------------------------------------------------------------------------
/doctests/query_ft.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: query_ft
2 | # HIDE_START
3 | import json
4 | import sys
5 | import redis
6 | from redis.commands.json.path import Path
7 | from redis.commands.search.field import TextField, NumericField, TagField
8 | from redis.commands.search.index_definition import IndexDefinition, IndexType
9 | from redis.commands.search.query import NumericFilter, Query
10 |
11 | r = redis.Redis(decode_responses=True)
12 |
13 | # create index
14 | schema = (
15 | TextField("$.brand", as_name="brand"),
16 | TextField("$.model", as_name="model"),
17 | TextField("$.description", as_name="description"),
18 | )
19 |
20 | index = r.ft("idx:bicycle")
21 | index.create_index(
22 | schema,
23 | definition=IndexDefinition(prefix=["bicycle:"], index_type=IndexType.JSON),
24 | )
25 |
26 | # load data
27 | with open("data/query_em.json") as f:
28 | bicycles = json.load(f)
29 |
30 | pipeline = r.pipeline(transaction=False)
31 | for bid, bicycle in enumerate(bicycles):
32 | pipeline.json().set(f'bicycle:{bid}', Path.root_path(), bicycle)
33 | pipeline.execute()
34 | # HIDE_END
35 |
36 | # STEP_START ft1
37 | res = index.search(Query("@description: kids"))
38 | print(res.total)
39 | # >>> 2
40 | # REMOVE_START
41 | assert res.total == 2
42 | # REMOVE_END
43 | # STEP_END
44 |
45 | # STEP_START ft2
46 | res = index.search(Query("@model: ka*"))
47 | print(res.total)
48 | # >>> 1
49 | # REMOVE_START
50 | assert res.total == 1
51 | # REMOVE_END
52 | # STEP_END
53 |
54 | # STEP_START ft3
55 | res = index.search(Query("@brand: *bikes"))
56 | print(res.total)
57 | # >>> 2
58 | # REMOVE_START
59 | assert res.total == 2
60 | # REMOVE_END
61 | # STEP_END
62 |
63 | # STEP_START ft4
64 | res = index.search(Query("%optamized%"))
65 | print(res)
66 | # >>> Result{1 total, docs: [Document {'id': 'bicycle:3', 'payload': None, 'json': '{"pickup_zone":"POLYGON((-80.2433 25.8067, -80.1333 25.8067, -80.1333 25.6967, -80.2433 25.6967, -80.2433 25.8067))","store_location":"-80.1918,25.7617","brand":"Eva","model":"Eva 291","price":3400,"description":"The sister company to Nord, Eva launched in 2005 as the first and only women-dedicated bicycle brand. Designed by women for women, allEva bikes are optimized for the feminine physique using analytics from a body metrics database. If you like 29ers, try the Eva 291. It’s a brand new bike for 2022.. This full-suspension, cross-country ride has been designed for velocity. The 291 has 100mm of front and rear travel, a superlight aluminum frame and fast-rolling 29-inch wheels. Yippee!","condition":"used"}'}]}
67 | # REMOVE_START
68 | assert res.total == 1
69 | # REMOVE_END
70 | # STEP_END
71 |
72 | # STEP_START ft5
73 | res = index.search(Query("%%optamised%%"))
74 | print(res)
75 | # >>> Result{1 total, docs: [Document {'id': 'bicycle:3', 'payload': None, 'json': '{"pickup_zone":"POLYGON((-80.2433 25.8067, -80.1333 25.8067, -80.1333 25.6967, -80.2433 25.6967, -80.2433 25.8067))","store_location":"-80.1918,25.7617","brand":"Eva","model":"Eva 291","price":3400,"description":"The sister company to Nord, Eva launched in 2005 as the first and only women-dedicated bicycle brand. Designed by women for women, allEva bikes are optimized for the feminine physique using analytics from a body metrics database. If you like 29ers, try the Eva 291. It’s a brand new bike for 2022.. This full-suspension, cross-country ride has been designed for velocity. The 291 has 100mm of front and rear travel, a superlight aluminum frame and fast-rolling 29-inch wheels. Yippee!","condition":"used"}'}]}
76 | # REMOVE_START
77 | assert res.total == 1
78 | # REMOVE_END
79 | # STEP_END
80 |
81 | # REMOVE_START
82 | # destroy index and data
83 | r.ft("idx:bicycle").dropindex(delete_documents=True)
84 | # REMOVE_END
85 |
--------------------------------------------------------------------------------
/doctests/query_geo.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: query_geo
2 | # HIDE_START
3 | import json
4 | import sys
5 | import redis
6 | from redis.commands.json.path import Path
7 | from redis.commands.search.field import GeoField, GeoShapeField
8 | from redis.commands.search.index_definition import IndexDefinition, IndexType
9 | from redis.commands.search.query import Query
10 |
11 | r = redis.Redis(decode_responses=True)
12 |
13 | # create index
14 | schema = (
15 | GeoField("$.store_location", as_name="store_location"),
16 | GeoShapeField("$.pickup_zone", coord_system=GeoShapeField.FLAT, as_name="pickup_zone")
17 | )
18 |
19 | index = r.ft("idx:bicycle")
20 | index.create_index(
21 | schema,
22 | definition=IndexDefinition(prefix=["bicycle:"], index_type=IndexType.JSON),
23 | )
24 |
25 | # load data
26 | with open("data/query_em.json") as f:
27 | bicycles = json.load(f)
28 |
29 | pipeline = r.pipeline(transaction=False)
30 | for bid, bicycle in enumerate(bicycles):
31 | pipeline.json().set(f'bicycle:{bid}', Path.root_path(), bicycle)
32 | pipeline.execute()
33 | # HIDE_END
34 |
35 | # STEP_START geo1
36 | params_dict = {"lon": -0.1778, "lat": 51.5524, "radius": 20, "units": "mi"}
37 | q = Query("@store_location:[$lon $lat $radius $units]").dialect(2)
38 | res = index.search(q, query_params=params_dict)
39 | print(res)
40 | # >>> Result{1 total, docs: [Document {'id': 'bicycle:5', ...
41 | # REMOVE_START
42 | assert res.total == 1
43 | # REMOVE_END
44 | # STEP_END
45 |
46 | # STEP_START geo2
47 | params_dict = {"bike": "POINT(-0.1278 51.5074)"}
48 | q = Query("@pickup_zone:[CONTAINS $bike]").dialect(3)
49 | res = index.search(q, query_params=params_dict)
50 | print(res.total) # >>> 1
51 | # REMOVE_START
52 | assert res.total == 1
53 | # REMOVE_END
54 | # STEP_END
55 |
56 | # STEP_START geo3
57 | params_dict = {"europe": "POLYGON((-25 35, 40 35, 40 70, -25 70, -25 35))"}
58 | q = Query("@pickup_zone:[WITHIN $europe]").dialect(3)
59 | res = index.search(q, query_params=params_dict)
60 | print(res.total) # >>> 5
61 | # REMOVE_START
62 | assert res.total == 5
63 | # REMOVE_END
64 | # STEP_END
65 |
66 | # REMOVE_START
67 | # destroy index and data
68 | r.ft("idx:bicycle").dropindex(delete_documents=True)
69 | # REMOVE_END
70 |
--------------------------------------------------------------------------------
/doctests/query_range.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: query_range
2 | # HIDE_START
3 | import json
4 | import sys
5 | import redis
6 | from redis.commands.json.path import Path
7 | from redis.commands.search.field import TextField, NumericField, TagField
8 | from redis.commands.search.index_definition import IndexDefinition, IndexType
9 | from redis.commands.search.query import NumericFilter, Query
10 |
11 | r = redis.Redis(decode_responses=True)
12 |
13 | # create index
14 | schema = (
15 | TextField("$.description", as_name="description"),
16 | NumericField("$.price", as_name="price"),
17 | TagField("$.condition", as_name="condition"),
18 | )
19 |
20 | index = r.ft("idx:bicycle")
21 | index.create_index(
22 | schema,
23 | definition=IndexDefinition(prefix=["bicycle:"], index_type=IndexType.JSON),
24 | )
25 |
26 | # load data
27 | with open("data/query_em.json") as f:
28 | bicycles = json.load(f)
29 |
30 | pipeline = r.pipeline(transaction=False)
31 | for bid, bicycle in enumerate(bicycles):
32 | pipeline.json().set(f'bicycle:{bid}', Path.root_path(), bicycle)
33 | pipeline.execute()
34 | # HIDE_END
35 |
36 | # STEP_START range1
37 | res = index.search(Query("@price:[500 1000]"))
38 | print(res.total)
39 | # >>> 3
40 | # REMOVE_START
41 | assert res.total == 3
42 | # REMOVE_END
43 | # STEP_END
44 |
45 | # STEP_START range2
46 | query = Query("*").add_filter(NumericFilter("price", 500, 1000))
47 | res = index.search(query)
48 | print(res.total)
49 | # >>> 3
50 | # REMOVE_START
51 | assert res.total == 3
52 | # REMOVE_END
53 | # STEP_END
54 |
55 | # STEP_START range3
56 | query = Query("*").add_filter(NumericFilter("price", "(1000", "+inf"))
57 | res = index.search(query)
58 | print(res.total)
59 | # >>> 5
60 | # REMOVE_START
61 | assert res.total == 5
62 | # REMOVE_END
63 | # STEP_END
64 |
65 | # STEP_START range4
66 | query = Query('@price:[-inf 2000]').sort_by('price').paging(0, 5)
67 | res = index.search(query)
68 | print(res.total)
69 | print(res)
70 | # >>> Result{7 total, docs: [Document {'id': 'bicycle:0', ... }, Document {'id': 'bicycle:7', ... }, Document {'id': 'bicycle:5', ... }, ...]
71 | # REMOVE_START
72 | assert res.total == 7
73 | # REMOVE_END
74 | # STEP_END
75 |
76 | # REMOVE_START
77 | # destroy index and data
78 | r.ft("idx:bicycle").dropindex(delete_documents=True)
79 | # REMOVE_END
80 |
--------------------------------------------------------------------------------
/doctests/requirements.txt:
--------------------------------------------------------------------------------
1 | numpy
2 | pandas
3 | requests
4 | sentence_transformers
5 | tabulate
6 | redis #install latest stable version
7 |
--------------------------------------------------------------------------------
/doctests/run_examples.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 |
4 | basepath=`readlink -f $1`
5 | if [ $? -ne 0 ]; then
6 | basepath=`readlink -f $(dirname $0)`
7 | fi
8 | echo "No path specified, using ${basepath}"
9 |
10 | set -e
11 | cd ${basepath}
12 | for i in `ls ${basepath}/*.py`; do
13 | redis-cli flushdb
14 | python $i
15 | done
16 |
--------------------------------------------------------------------------------
/doctests/string_set_get.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: set_and_get
2 | # HIDE_START
3 | """
4 | Code samples for data structure store quickstart pages:
5 | https://redis.io/docs/latest/develop/get-started/data-store/
6 | """
7 |
8 | import redis
9 |
10 | r = redis.Redis(host="localhost", port=6379, db=0, decode_responses=True)
11 | # HIDE_END
12 |
13 | res = r.set("bike:1", "Process 134")
14 | print(res)
15 | # >>> True
16 | # REMOVE_START
17 | assert res
18 | # REMOVE_END
19 |
20 | res = r.get("bike:1")
21 | print(res)
22 | # >>> "Process 134"
23 | # REMOVE_START
24 | assert res == "Process 134"
25 | # REMOVE_END
26 |
--------------------------------------------------------------------------------
/doctests/trans_pipe.py:
--------------------------------------------------------------------------------
1 | # EXAMPLE: pipe_trans_tutorial
2 | # HIDE_START
3 | """
4 | Code samples for vector database quickstart pages:
5 | https://redis.io/docs/latest/develop/get-started/vector-database/
6 | """
7 | # HIDE_END
8 | import redis
9 |
10 | # STEP_START basic_pipe
11 | r = redis.Redis(decode_responses=True)
12 | # REMOVE_START
13 | for i in range(5):
14 | r.delete(f"seat:{i}")
15 |
16 | r.delete("shellpath")
17 | # REMOVE_END
18 |
19 | pipe = r.pipeline()
20 |
21 | for i in range(5):
22 | pipe.set(f"seat:{i}", f"#{i}")
23 |
24 | set_5_result = pipe.execute()
25 | print(set_5_result) # >>> [True, True, True, True, True]
26 |
27 | pipe = r.pipeline()
28 |
29 | # "Chain" pipeline commands together.
30 | get_3_result = pipe.get("seat:0").get("seat:3").get("seat:4").execute()
31 | print(get_3_result) # >>> ['#0', '#3', '#4']
32 | # STEP_END
33 | # REMOVE_START
34 | assert set_5_result == [True, True, True, True, True]
35 | assert get_3_result == ['#0', '#3', '#4']
36 | # REMOVE_END
37 |
38 | # STEP_START trans_watch
39 | r.set("shellpath", "/usr/syscmds/")
40 |
41 | with r.pipeline() as pipe:
42 | # Repeat until successful.
43 | while True:
44 | try:
45 | # Watch the key we are about to change.
46 | pipe.watch("shellpath")
47 |
48 | # The pipeline executes commands directly (instead of
49 | # buffering them) from immediately after the `watch()`
50 | # call until we begin the transaction.
51 | current_path = pipe.get("shellpath")
52 | new_path = current_path + ":/usr/mycmds/"
53 |
54 | # Start the transaction, which will enable buffering
55 | # again for the remaining commands.
56 | pipe.multi()
57 |
58 | pipe.set("shellpath", new_path)
59 |
60 | pipe.execute()
61 |
62 | # The transaction succeeded, so break out of the loop.
63 | break
64 | except redis.WatchError:
65 | # The transaction failed, so continue with the next attempt.
66 | continue
67 |
68 | get_path_result = r.get("shellpath")
69 | print(get_path_result) # >>> '/usr/syscmds/:/usr/mycmds/'
70 | # STEP_END
71 | # REMOVE_START
72 | assert get_path_result == '/usr/syscmds/:/usr/mycmds/'
73 | r.delete("shellpath")
74 | # REMOVE_END
75 |
76 | # STEP_START watch_conv_method
77 | r.set("shellpath", "/usr/syscmds/")
78 |
79 |
80 | def watched_sequence(pipe):
81 | current_path = pipe.get("shellpath")
82 | new_path = current_path + ":/usr/mycmds/"
83 |
84 | pipe.multi()
85 |
86 | pipe.set("shellpath", new_path)
87 |
88 |
89 | trans_result = r.transaction(watched_sequence, "shellpath")
90 | print(trans_result) # True
91 |
92 | get_path_result = r.get("shellpath")
93 | print(get_path_result) # >>> '/usr/syscmds/:/usr/mycmds/'
94 | # REMOVE_START
95 | assert trans_result
96 | assert get_path_result == '/usr/syscmds/:/usr/mycmds/'
97 | # REMOVE_END
98 | # STEP_END
99 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["hatchling"]
3 | build-backend = "hatchling.build"
4 |
5 | [project]
6 | name = "redis"
7 | dynamic = ["version"]
8 | description = "Python client for Redis database and key-value store"
9 | readme = "README.md"
10 | license = "MIT"
11 | requires-python = ">=3.9"
12 | authors = [{ name = "Redis Inc.", email = "oss@redis.com" }]
13 | keywords = ["Redis", "database", "key-value-store"]
14 | classifiers = [
15 | "Development Status :: 5 - Production/Stable",
16 | "Environment :: Console",
17 | "Intended Audience :: Developers",
18 | "License :: OSI Approved :: MIT License",
19 | "Operating System :: OS Independent",
20 | "Programming Language :: Python",
21 | "Programming Language :: Python :: 3",
22 | "Programming Language :: Python :: 3 :: Only",
23 | "Programming Language :: Python :: 3.9",
24 | "Programming Language :: Python :: 3.10",
25 | "Programming Language :: Python :: 3.11",
26 | "Programming Language :: Python :: 3.12",
27 | "Programming Language :: Python :: 3.13",
28 | "Programming Language :: Python :: Implementation :: CPython",
29 | "Programming Language :: Python :: Implementation :: PyPy",
30 | ]
31 | dependencies = ['async-timeout>=4.0.3; python_full_version<"3.11.3"']
32 |
33 | [project.optional-dependencies]
34 | hiredis = [
35 | "hiredis>=3.2.0",
36 | ]
37 | ocsp = [
38 | "cryptography>=36.0.1",
39 | "pyopenssl>=20.0.1",
40 | "requests>=2.31.0",
41 | ]
42 | jwt = [
43 | "PyJWT>=2.9.0",
44 | ]
45 |
46 | [project.urls]
47 | Changes = "https://github.com/redis/redis-py/releases"
48 | Code = "https://github.com/redis/redis-py"
49 | Documentation = "https://redis.readthedocs.io/en/latest/"
50 | Homepage = "https://github.com/redis/redis-py"
51 | "Issue tracker" = "https://github.com/redis/redis-py/issues"
52 |
53 | [tool.hatch.version]
54 | path = "redis/__init__.py"
55 |
56 | [tool.hatch.build.targets.sdist]
57 | include = ["/redis", "/tests", "dev_requirements.txt"]
58 |
59 | [tool.hatch.build.targets.wheel]
60 | include = ["/redis"]
61 |
62 | [tool.pytest.ini_options]
63 | addopts = "-s"
64 | markers = [
65 | "redismod: run only the redis module tests",
66 | "pipeline: pipeline tests",
67 | "onlycluster: marks tests to be run only with cluster mode redis",
68 | "onlynoncluster: marks tests to be run only with standalone redis",
69 | "ssl: marker for only the ssl tests",
70 | "asyncio: marker for async tests",
71 | "replica: replica tests",
72 | "experimental: run only experimental tests",
73 | "cp_integration: credential provider integration tests",
74 | ]
75 | asyncio_default_fixture_loop_scope = "function"
76 | asyncio_mode = "auto"
77 | timeout = 30
78 | filterwarnings = [
79 | "always",
80 | # Ignore a coverage warning when COVERAGE_CORE=sysmon for Pythons < 3.12.
81 | "ignore:sys.monitoring isn't available:coverage.exceptions.CoverageWarning",
82 | ]
83 |
84 | [tool.ruff]
85 | target-version = "py39"
86 | line-length = 88
87 | exclude = [
88 | "*.egg-info",
89 | "*.pyc",
90 | ".git",
91 | ".venv*",
92 | "build",
93 | "dist",
94 | "docker",
95 | "docs/*",
96 | "doctests/*",
97 | "tasks.py",
98 | "venv*",
99 | "whitelist.py",
100 | ]
101 |
102 | [tool.ruff.lint]
103 | ignore = [
104 | "E501", # line too long (taken care of with ruff format)
105 | "E741", # ambiguous variable name
106 | "N818", # Errors should have Error suffix
107 | ]
108 |
109 | select = ["E", "F", "FLY", "I", "N", "W"]
110 |
111 | [tool.ruff.lint.per-file-ignores]
112 | "redis/commands/bf/*" = [
113 | # the `bf` module uses star imports, so this is required there.
114 | "F405", # name may be undefined, or defined from star imports
115 | ]
116 | "redis/commands/{bf,timeseries,json,search}/*" = ["N"]
117 | "tests/*" = [
118 | "I", # TODO: could be enabled, plenty of changes
119 | "N801", # class name should use CapWords convention
120 | "N803", # argument name should be lowercase
121 | "N802", # function name should be lowercase
122 | "N806", # variable name should be lowercase
123 | ]
124 |
--------------------------------------------------------------------------------
/redis/__init__.py:
--------------------------------------------------------------------------------
1 | from redis import asyncio # noqa
2 | from redis.backoff import default_backoff
3 | from redis.client import Redis, StrictRedis
4 | from redis.cluster import RedisCluster
5 | from redis.connection import (
6 | BlockingConnectionPool,
7 | Connection,
8 | ConnectionPool,
9 | SSLConnection,
10 | UnixDomainSocketConnection,
11 | )
12 | from redis.credentials import CredentialProvider, UsernamePasswordCredentialProvider
13 | from redis.exceptions import (
14 | AuthenticationError,
15 | AuthenticationWrongNumberOfArgsError,
16 | BusyLoadingError,
17 | ChildDeadlockedError,
18 | ConnectionError,
19 | CrossSlotTransactionError,
20 | DataError,
21 | InvalidPipelineStack,
22 | InvalidResponse,
23 | OutOfMemoryError,
24 | PubSubError,
25 | ReadOnlyError,
26 | RedisClusterException,
27 | RedisError,
28 | ResponseError,
29 | TimeoutError,
30 | WatchError,
31 | )
32 | from redis.sentinel import (
33 | Sentinel,
34 | SentinelConnectionPool,
35 | SentinelManagedConnection,
36 | SentinelManagedSSLConnection,
37 | )
38 | from redis.utils import from_url
39 |
40 |
41 | def int_or_str(value):
42 | try:
43 | return int(value)
44 | except ValueError:
45 | return value
46 |
47 |
48 | __version__ = "6.2.0"
49 | VERSION = tuple(map(int_or_str, __version__.split(".")))
50 |
51 |
52 | __all__ = [
53 | "AuthenticationError",
54 | "AuthenticationWrongNumberOfArgsError",
55 | "BlockingConnectionPool",
56 | "BusyLoadingError",
57 | "ChildDeadlockedError",
58 | "Connection",
59 | "ConnectionError",
60 | "ConnectionPool",
61 | "CredentialProvider",
62 | "CrossSlotTransactionError",
63 | "DataError",
64 | "from_url",
65 | "default_backoff",
66 | "InvalidPipelineStack",
67 | "InvalidResponse",
68 | "OutOfMemoryError",
69 | "PubSubError",
70 | "ReadOnlyError",
71 | "Redis",
72 | "RedisCluster",
73 | "RedisClusterException",
74 | "RedisError",
75 | "ResponseError",
76 | "Sentinel",
77 | "SentinelConnectionPool",
78 | "SentinelManagedConnection",
79 | "SentinelManagedSSLConnection",
80 | "SSLConnection",
81 | "UsernamePasswordCredentialProvider",
82 | "StrictRedis",
83 | "TimeoutError",
84 | "UnixDomainSocketConnection",
85 | "WatchError",
86 | ]
87 |
--------------------------------------------------------------------------------
/redis/_parsers/__init__.py:
--------------------------------------------------------------------------------
1 | from .base import (
2 | AsyncPushNotificationsParser,
3 | BaseParser,
4 | PushNotificationsParser,
5 | _AsyncRESPBase,
6 | )
7 | from .commands import AsyncCommandsParser, CommandsParser
8 | from .encoders import Encoder
9 | from .hiredis import _AsyncHiredisParser, _HiredisParser
10 | from .resp2 import _AsyncRESP2Parser, _RESP2Parser
11 | from .resp3 import _AsyncRESP3Parser, _RESP3Parser
12 |
13 | __all__ = [
14 | "AsyncCommandsParser",
15 | "_AsyncHiredisParser",
16 | "_AsyncRESPBase",
17 | "_AsyncRESP2Parser",
18 | "_AsyncRESP3Parser",
19 | "AsyncPushNotificationsParser",
20 | "CommandsParser",
21 | "Encoder",
22 | "BaseParser",
23 | "_HiredisParser",
24 | "_RESP2Parser",
25 | "_RESP3Parser",
26 | "PushNotificationsParser",
27 | ]
28 |
--------------------------------------------------------------------------------
/redis/_parsers/encoders.py:
--------------------------------------------------------------------------------
1 | from ..exceptions import DataError
2 |
3 |
4 | class Encoder:
5 | "Encode strings to bytes-like and decode bytes-like to strings"
6 |
7 | __slots__ = "encoding", "encoding_errors", "decode_responses"
8 |
9 | def __init__(self, encoding, encoding_errors, decode_responses):
10 | self.encoding = encoding
11 | self.encoding_errors = encoding_errors
12 | self.decode_responses = decode_responses
13 |
14 | def encode(self, value):
15 | "Return a bytestring or bytes-like representation of the value"
16 | if isinstance(value, (bytes, memoryview)):
17 | return value
18 | elif isinstance(value, bool):
19 | # special case bool since it is a subclass of int
20 | raise DataError(
21 | "Invalid input of type: 'bool'. Convert to a "
22 | "bytes, string, int or float first."
23 | )
24 | elif isinstance(value, (int, float)):
25 | value = repr(value).encode()
26 | elif not isinstance(value, str):
27 | # a value we don't know how to deal with. throw an error
28 | typename = type(value).__name__
29 | raise DataError(
30 | f"Invalid input of type: '{typename}'. "
31 | f"Convert to a bytes, string, int or float first."
32 | )
33 | if isinstance(value, str):
34 | value = value.encode(self.encoding, self.encoding_errors)
35 | return value
36 |
37 | def decode(self, value, force=False):
38 | "Return a unicode string from the bytes-like representation"
39 | if self.decode_responses or force:
40 | if isinstance(value, memoryview):
41 | value = value.tobytes()
42 | if isinstance(value, bytes):
43 | value = value.decode(self.encoding, self.encoding_errors)
44 | return value
45 |
--------------------------------------------------------------------------------
/redis/asyncio/__init__.py:
--------------------------------------------------------------------------------
1 | from redis.asyncio.client import Redis, StrictRedis
2 | from redis.asyncio.cluster import RedisCluster
3 | from redis.asyncio.connection import (
4 | BlockingConnectionPool,
5 | Connection,
6 | ConnectionPool,
7 | SSLConnection,
8 | UnixDomainSocketConnection,
9 | )
10 | from redis.asyncio.sentinel import (
11 | Sentinel,
12 | SentinelConnectionPool,
13 | SentinelManagedConnection,
14 | SentinelManagedSSLConnection,
15 | )
16 | from redis.asyncio.utils import from_url
17 | from redis.backoff import default_backoff
18 | from redis.exceptions import (
19 | AuthenticationError,
20 | AuthenticationWrongNumberOfArgsError,
21 | BusyLoadingError,
22 | ChildDeadlockedError,
23 | ConnectionError,
24 | DataError,
25 | InvalidResponse,
26 | OutOfMemoryError,
27 | PubSubError,
28 | ReadOnlyError,
29 | RedisError,
30 | ResponseError,
31 | TimeoutError,
32 | WatchError,
33 | )
34 |
35 | __all__ = [
36 | "AuthenticationError",
37 | "AuthenticationWrongNumberOfArgsError",
38 | "BlockingConnectionPool",
39 | "BusyLoadingError",
40 | "ChildDeadlockedError",
41 | "Connection",
42 | "ConnectionError",
43 | "ConnectionPool",
44 | "DataError",
45 | "from_url",
46 | "default_backoff",
47 | "InvalidResponse",
48 | "PubSubError",
49 | "OutOfMemoryError",
50 | "ReadOnlyError",
51 | "Redis",
52 | "RedisCluster",
53 | "RedisError",
54 | "ResponseError",
55 | "Sentinel",
56 | "SentinelConnectionPool",
57 | "SentinelManagedConnection",
58 | "SentinelManagedSSLConnection",
59 | "SSLConnection",
60 | "StrictRedis",
61 | "TimeoutError",
62 | "UnixDomainSocketConnection",
63 | "WatchError",
64 | ]
65 |
--------------------------------------------------------------------------------
/redis/asyncio/retry.py:
--------------------------------------------------------------------------------
1 | from asyncio import sleep
2 | from typing import TYPE_CHECKING, Any, Awaitable, Callable, Tuple, Type, TypeVar
3 |
4 | from redis.exceptions import ConnectionError, RedisError, TimeoutError
5 |
6 | if TYPE_CHECKING:
7 | from redis.backoff import AbstractBackoff
8 |
9 |
10 | T = TypeVar("T")
11 |
12 |
13 | class Retry:
14 | """Retry a specific number of times after a failure"""
15 |
16 | __slots__ = "_backoff", "_retries", "_supported_errors"
17 |
18 | def __init__(
19 | self,
20 | backoff: "AbstractBackoff",
21 | retries: int,
22 | supported_errors: Tuple[Type[RedisError], ...] = (
23 | ConnectionError,
24 | TimeoutError,
25 | ),
26 | ):
27 | """
28 | Initialize a `Retry` object with a `Backoff` object
29 | that retries a maximum of `retries` times.
30 | `retries` can be negative to retry forever.
31 | You can specify the types of supported errors which trigger
32 | a retry with the `supported_errors` parameter.
33 | """
34 | self._backoff = backoff
35 | self._retries = retries
36 | self._supported_errors = supported_errors
37 |
38 | def update_supported_errors(self, specified_errors: list):
39 | """
40 | Updates the supported errors with the specified error types
41 | """
42 | self._supported_errors = tuple(
43 | set(self._supported_errors + tuple(specified_errors))
44 | )
45 |
46 | def get_retries(self) -> int:
47 | """
48 | Get the number of retries.
49 | """
50 | return self._retries
51 |
52 | def update_retries(self, value: int) -> None:
53 | """
54 | Set the number of retries.
55 | """
56 | self._retries = value
57 |
58 | async def call_with_retry(
59 | self, do: Callable[[], Awaitable[T]], fail: Callable[[RedisError], Any]
60 | ) -> T:
61 | """
62 | Execute an operation that might fail and returns its result, or
63 | raise the exception that was thrown depending on the `Backoff` object.
64 | `do`: the operation to call. Expects no argument.
65 | `fail`: the failure handler, expects the last error that was thrown
66 | """
67 | self._backoff.reset()
68 | failures = 0
69 | while True:
70 | try:
71 | return await do()
72 | except self._supported_errors as error:
73 | failures += 1
74 | await fail(error)
75 | if self._retries >= 0 and failures > self._retries:
76 | raise error
77 | backoff = self._backoff.compute(failures)
78 | if backoff > 0:
79 | await sleep(backoff)
80 |
--------------------------------------------------------------------------------
/redis/asyncio/utils.py:
--------------------------------------------------------------------------------
1 | from typing import TYPE_CHECKING
2 |
3 | if TYPE_CHECKING:
4 | from redis.asyncio.client import Pipeline, Redis
5 |
6 |
7 | def from_url(url, **kwargs):
8 | """
9 | Returns an active Redis client generated from the given database URL.
10 |
11 | Will attempt to extract the database id from the path url fragment, if
12 | none is provided.
13 | """
14 | from redis.asyncio.client import Redis
15 |
16 | return Redis.from_url(url, **kwargs)
17 |
18 |
19 | class pipeline: # noqa: N801
20 | def __init__(self, redis_obj: "Redis"):
21 | self.p: "Pipeline" = redis_obj.pipeline()
22 |
23 | async def __aenter__(self) -> "Pipeline":
24 | return self.p
25 |
26 | async def __aexit__(self, exc_type, exc_value, traceback):
27 | await self.p.execute()
28 | del self.p
29 |
--------------------------------------------------------------------------------
/redis/auth/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redis/redis-py/6246cbade4fa9ae879455b498ef8fef5250619b1/redis/auth/__init__.py
--------------------------------------------------------------------------------
/redis/auth/err.py:
--------------------------------------------------------------------------------
1 | from typing import Iterable
2 |
3 |
4 | class RequestTokenErr(Exception):
5 | """
6 | Represents an exception during token request.
7 | """
8 |
9 | def __init__(self, *args):
10 | super().__init__(*args)
11 |
12 |
13 | class InvalidTokenSchemaErr(Exception):
14 | """
15 | Represents an exception related to invalid token schema.
16 | """
17 |
18 | def __init__(self, missing_fields: Iterable[str] = []):
19 | super().__init__(
20 | "Unexpected token schema. Following fields are missing: "
21 | + ", ".join(missing_fields)
22 | )
23 |
24 |
25 | class TokenRenewalErr(Exception):
26 | """
27 | Represents an exception during token renewal process.
28 | """
29 |
30 | def __init__(self, *args):
31 | super().__init__(*args)
32 |
--------------------------------------------------------------------------------
/redis/auth/idp.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 |
3 | from redis.auth.token import TokenInterface
4 |
5 | """
6 | This interface is the facade of an identity provider
7 | """
8 |
9 |
10 | class IdentityProviderInterface(ABC):
11 | """
12 | Receive a token from the identity provider.
13 | Receiving a token only works when being authenticated.
14 | """
15 |
16 | @abstractmethod
17 | def request_token(self, force_refresh=False) -> TokenInterface:
18 | pass
19 |
20 |
21 | class IdentityProviderConfigInterface(ABC):
22 | """
23 | Configuration class that provides a configured identity provider.
24 | """
25 |
26 | @abstractmethod
27 | def get_provider(self) -> IdentityProviderInterface:
28 | pass
29 |
--------------------------------------------------------------------------------
/redis/auth/token.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 | from datetime import datetime, timezone
3 |
4 | from redis.auth.err import InvalidTokenSchemaErr
5 |
6 |
7 | class TokenInterface(ABC):
8 | @abstractmethod
9 | def is_expired(self) -> bool:
10 | pass
11 |
12 | @abstractmethod
13 | def ttl(self) -> float:
14 | pass
15 |
16 | @abstractmethod
17 | def try_get(self, key: str) -> str:
18 | pass
19 |
20 | @abstractmethod
21 | def get_value(self) -> str:
22 | pass
23 |
24 | @abstractmethod
25 | def get_expires_at_ms(self) -> float:
26 | pass
27 |
28 | @abstractmethod
29 | def get_received_at_ms(self) -> float:
30 | pass
31 |
32 |
33 | class TokenResponse:
34 | def __init__(self, token: TokenInterface):
35 | self._token = token
36 |
37 | def get_token(self) -> TokenInterface:
38 | return self._token
39 |
40 | def get_ttl_ms(self) -> float:
41 | return self._token.get_expires_at_ms() - self._token.get_received_at_ms()
42 |
43 |
44 | class SimpleToken(TokenInterface):
45 | def __init__(
46 | self, value: str, expires_at_ms: float, received_at_ms: float, claims: dict
47 | ) -> None:
48 | self.value = value
49 | self.expires_at = expires_at_ms
50 | self.received_at = received_at_ms
51 | self.claims = claims
52 |
53 | def ttl(self) -> float:
54 | if self.expires_at == -1:
55 | return -1
56 |
57 | return self.expires_at - (datetime.now(timezone.utc).timestamp() * 1000)
58 |
59 | def is_expired(self) -> bool:
60 | if self.expires_at == -1:
61 | return False
62 |
63 | return self.ttl() <= 0
64 |
65 | def try_get(self, key: str) -> str:
66 | return self.claims.get(key)
67 |
68 | def get_value(self) -> str:
69 | return self.value
70 |
71 | def get_expires_at_ms(self) -> float:
72 | return self.expires_at
73 |
74 | def get_received_at_ms(self) -> float:
75 | return self.received_at
76 |
77 |
78 | class JWToken(TokenInterface):
79 | REQUIRED_FIELDS = {"exp"}
80 |
81 | def __init__(self, token: str):
82 | try:
83 | import jwt
84 | except ImportError as ie:
85 | raise ImportError(
86 | f"The PyJWT library is required for {self.__class__.__name__}.",
87 | ) from ie
88 | self._value = token
89 | self._decoded = jwt.decode(
90 | self._value,
91 | options={"verify_signature": False},
92 | algorithms=[jwt.get_unverified_header(self._value).get("alg")],
93 | )
94 | self._validate_token()
95 |
96 | def is_expired(self) -> bool:
97 | exp = self._decoded["exp"]
98 | if exp == -1:
99 | return False
100 |
101 | return (
102 | self._decoded["exp"] * 1000 <= datetime.now(timezone.utc).timestamp() * 1000
103 | )
104 |
105 | def ttl(self) -> float:
106 | exp = self._decoded["exp"]
107 | if exp == -1:
108 | return -1
109 |
110 | return (
111 | self._decoded["exp"] * 1000 - datetime.now(timezone.utc).timestamp() * 1000
112 | )
113 |
114 | def try_get(self, key: str) -> str:
115 | return self._decoded.get(key)
116 |
117 | def get_value(self) -> str:
118 | return self._value
119 |
120 | def get_expires_at_ms(self) -> float:
121 | return float(self._decoded["exp"] * 1000)
122 |
123 | def get_received_at_ms(self) -> float:
124 | return datetime.now(timezone.utc).timestamp() * 1000
125 |
126 | def _validate_token(self):
127 | actual_fields = {x for x in self._decoded.keys()}
128 |
129 | if len(self.REQUIRED_FIELDS - actual_fields) != 0:
130 | raise InvalidTokenSchemaErr(self.REQUIRED_FIELDS - actual_fields)
131 |
--------------------------------------------------------------------------------
/redis/commands/__init__.py:
--------------------------------------------------------------------------------
1 | from .cluster import READ_COMMANDS, AsyncRedisClusterCommands, RedisClusterCommands
2 | from .core import AsyncCoreCommands, CoreCommands
3 | from .helpers import list_or_args
4 | from .redismodules import AsyncRedisModuleCommands, RedisModuleCommands
5 | from .sentinel import AsyncSentinelCommands, SentinelCommands
6 |
7 | __all__ = [
8 | "AsyncCoreCommands",
9 | "AsyncRedisClusterCommands",
10 | "AsyncRedisModuleCommands",
11 | "AsyncSentinelCommands",
12 | "CoreCommands",
13 | "READ_COMMANDS",
14 | "RedisClusterCommands",
15 | "RedisModuleCommands",
16 | "SentinelCommands",
17 | "list_or_args",
18 | ]
19 |
--------------------------------------------------------------------------------
/redis/commands/bf/info.py:
--------------------------------------------------------------------------------
1 | from ..helpers import nativestr
2 |
3 |
4 | class BFInfo:
5 | capacity = None
6 | size = None
7 | filterNum = None
8 | insertedNum = None
9 | expansionRate = None
10 |
11 | def __init__(self, args):
12 | response = dict(zip(map(nativestr, args[::2]), args[1::2]))
13 | self.capacity = response["Capacity"]
14 | self.size = response["Size"]
15 | self.filterNum = response["Number of filters"]
16 | self.insertedNum = response["Number of items inserted"]
17 | self.expansionRate = response["Expansion rate"]
18 |
19 | def get(self, item):
20 | try:
21 | return self.__getitem__(item)
22 | except AttributeError:
23 | return None
24 |
25 | def __getitem__(self, item):
26 | return getattr(self, item)
27 |
28 |
29 | class CFInfo:
30 | size = None
31 | bucketNum = None
32 | filterNum = None
33 | insertedNum = None
34 | deletedNum = None
35 | bucketSize = None
36 | expansionRate = None
37 | maxIteration = None
38 |
39 | def __init__(self, args):
40 | response = dict(zip(map(nativestr, args[::2]), args[1::2]))
41 | self.size = response["Size"]
42 | self.bucketNum = response["Number of buckets"]
43 | self.filterNum = response["Number of filters"]
44 | self.insertedNum = response["Number of items inserted"]
45 | self.deletedNum = response["Number of items deleted"]
46 | self.bucketSize = response["Bucket size"]
47 | self.expansionRate = response["Expansion rate"]
48 | self.maxIteration = response["Max iterations"]
49 |
50 | def get(self, item):
51 | try:
52 | return self.__getitem__(item)
53 | except AttributeError:
54 | return None
55 |
56 | def __getitem__(self, item):
57 | return getattr(self, item)
58 |
59 |
60 | class CMSInfo:
61 | width = None
62 | depth = None
63 | count = None
64 |
65 | def __init__(self, args):
66 | response = dict(zip(map(nativestr, args[::2]), args[1::2]))
67 | self.width = response["width"]
68 | self.depth = response["depth"]
69 | self.count = response["count"]
70 |
71 | def __getitem__(self, item):
72 | return getattr(self, item)
73 |
74 |
75 | class TopKInfo:
76 | k = None
77 | width = None
78 | depth = None
79 | decay = None
80 |
81 | def __init__(self, args):
82 | response = dict(zip(map(nativestr, args[::2]), args[1::2]))
83 | self.k = response["k"]
84 | self.width = response["width"]
85 | self.depth = response["depth"]
86 | self.decay = response["decay"]
87 |
88 | def __getitem__(self, item):
89 | return getattr(self, item)
90 |
91 |
92 | class TDigestInfo:
93 | compression = None
94 | capacity = None
95 | merged_nodes = None
96 | unmerged_nodes = None
97 | merged_weight = None
98 | unmerged_weight = None
99 | total_compressions = None
100 | memory_usage = None
101 |
102 | def __init__(self, args):
103 | response = dict(zip(map(nativestr, args[::2]), args[1::2]))
104 | self.compression = response["Compression"]
105 | self.capacity = response["Capacity"]
106 | self.merged_nodes = response["Merged nodes"]
107 | self.unmerged_nodes = response["Unmerged nodes"]
108 | self.merged_weight = response["Merged weight"]
109 | self.unmerged_weight = response["Unmerged weight"]
110 | self.total_compressions = response["Total compressions"]
111 | self.memory_usage = response["Memory usage"]
112 |
113 | def get(self, item):
114 | try:
115 | return self.__getitem__(item)
116 | except AttributeError:
117 | return None
118 |
119 | def __getitem__(self, item):
120 | return getattr(self, item)
121 |
--------------------------------------------------------------------------------
/redis/commands/helpers.py:
--------------------------------------------------------------------------------
1 | import copy
2 | import random
3 | import string
4 | from typing import List, Tuple
5 |
6 | import redis
7 | from redis.typing import KeysT, KeyT
8 |
9 |
10 | def list_or_args(keys: KeysT, args: Tuple[KeyT, ...]) -> List[KeyT]:
11 | # returns a single new list combining keys and args
12 | try:
13 | iter(keys)
14 | # a string or bytes instance can be iterated, but indicates
15 | # keys wasn't passed as a list
16 | if isinstance(keys, (bytes, str)):
17 | keys = [keys]
18 | else:
19 | keys = list(keys)
20 | except TypeError:
21 | keys = [keys]
22 | if args:
23 | keys.extend(args)
24 | return keys
25 |
26 |
27 | def nativestr(x):
28 | """Return the decoded binary string, or a string, depending on type."""
29 | r = x.decode("utf-8", "replace") if isinstance(x, bytes) else x
30 | if r == "null":
31 | return
32 | return r
33 |
34 |
35 | def delist(x):
36 | """Given a list of binaries, return the stringified version."""
37 | if x is None:
38 | return x
39 | return [nativestr(obj) for obj in x]
40 |
41 |
42 | def parse_to_list(response):
43 | """Optimistically parse the response to a list."""
44 | res = []
45 |
46 | special_values = {"infinity", "nan", "-infinity"}
47 |
48 | if response is None:
49 | return res
50 |
51 | for item in response:
52 | if item is None:
53 | res.append(None)
54 | continue
55 | try:
56 | item_str = nativestr(item)
57 | except TypeError:
58 | res.append(None)
59 | continue
60 |
61 | if isinstance(item_str, str) and item_str.lower() in special_values:
62 | res.append(item_str) # Keep as string
63 | else:
64 | try:
65 | res.append(int(item))
66 | except ValueError:
67 | try:
68 | res.append(float(item))
69 | except ValueError:
70 | res.append(item_str)
71 |
72 | return res
73 |
74 |
75 | def parse_list_to_dict(response):
76 | res = {}
77 | for i in range(0, len(response), 2):
78 | if isinstance(response[i], list):
79 | res["Child iterators"].append(parse_list_to_dict(response[i]))
80 | try:
81 | if isinstance(response[i + 1], list):
82 | res["Child iterators"].append(parse_list_to_dict(response[i + 1]))
83 | except IndexError:
84 | pass
85 | elif isinstance(response[i + 1], list):
86 | res["Child iterators"] = [parse_list_to_dict(response[i + 1])]
87 | else:
88 | try:
89 | res[response[i]] = float(response[i + 1])
90 | except (TypeError, ValueError):
91 | res[response[i]] = response[i + 1]
92 | return res
93 |
94 |
95 | def random_string(length=10):
96 | """
97 | Returns a random N character long string.
98 | """
99 | return "".join( # nosec
100 | random.choice(string.ascii_lowercase) for x in range(length)
101 | )
102 |
103 |
104 | def decode_dict_keys(obj):
105 | """Decode the keys of the given dictionary with utf-8."""
106 | newobj = copy.copy(obj)
107 | for k in obj.keys():
108 | if isinstance(k, bytes):
109 | newobj[k.decode("utf-8")] = newobj[k]
110 | newobj.pop(k)
111 | return newobj
112 |
113 |
114 | def get_protocol_version(client):
115 | if isinstance(client, redis.Redis) or isinstance(client, redis.asyncio.Redis):
116 | return client.connection_pool.connection_kwargs.get("protocol")
117 | elif isinstance(client, redis.cluster.AbstractRedisCluster):
118 | return client.nodes_manager.connection_kwargs.get("protocol")
119 |
--------------------------------------------------------------------------------
/redis/commands/json/_util.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict, List, Union
2 |
3 | JsonType = Union[str, int, float, bool, None, Dict[str, Any], List[Any]]
4 |
--------------------------------------------------------------------------------
/redis/commands/json/decoders.py:
--------------------------------------------------------------------------------
1 | import copy
2 | import re
3 |
4 | from ..helpers import nativestr
5 |
6 |
7 | def bulk_of_jsons(d):
8 | """Replace serialized JSON values with objects in a
9 | bulk array response (list).
10 | """
11 |
12 | def _f(b):
13 | for index, item in enumerate(b):
14 | if item is not None:
15 | b[index] = d(item)
16 | return b
17 |
18 | return _f
19 |
20 |
21 | def decode_dict_keys(obj):
22 | """Decode the keys of the given dictionary with utf-8."""
23 | newobj = copy.copy(obj)
24 | for k in obj.keys():
25 | if isinstance(k, bytes):
26 | newobj[k.decode("utf-8")] = newobj[k]
27 | newobj.pop(k)
28 | return newobj
29 |
30 |
31 | def unstring(obj):
32 | """
33 | Attempt to parse string to native integer formats.
34 | One can't simply call int/float in a try/catch because there is a
35 | semantic difference between (for example) 15.0 and 15.
36 | """
37 | floatreg = "^\\d+.\\d+$"
38 | match = re.findall(floatreg, obj)
39 | if match != []:
40 | return float(match[0])
41 |
42 | intreg = "^\\d+$"
43 | match = re.findall(intreg, obj)
44 | if match != []:
45 | return int(match[0])
46 | return obj
47 |
48 |
49 | def decode_list(b):
50 | """
51 | Given a non-deserializable object, make a best effort to
52 | return a useful set of results.
53 | """
54 | if isinstance(b, list):
55 | return [nativestr(obj) for obj in b]
56 | elif isinstance(b, bytes):
57 | return unstring(nativestr(b))
58 | elif isinstance(b, str):
59 | return unstring(b)
60 | return b
61 |
--------------------------------------------------------------------------------
/redis/commands/json/path.py:
--------------------------------------------------------------------------------
1 | class Path:
2 | """This class represents a path in a JSON value."""
3 |
4 | strPath = ""
5 |
6 | @staticmethod
7 | def root_path():
8 | """Return the root path's string representation."""
9 | return "."
10 |
11 | def __init__(self, path):
12 | """Make a new path based on the string representation in `path`."""
13 | self.strPath = path
14 |
15 | def __repr__(self):
16 | return self.strPath
17 |
--------------------------------------------------------------------------------
/redis/commands/redismodules.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from json import JSONDecoder, JSONEncoder
4 | from typing import TYPE_CHECKING
5 |
6 | if TYPE_CHECKING:
7 | from .bf import BFBloom, CFBloom, CMSBloom, TDigestBloom, TOPKBloom
8 | from .json import JSON
9 | from .search import AsyncSearch, Search
10 | from .timeseries import TimeSeries
11 | from .vectorset import VectorSet
12 |
13 |
14 | class RedisModuleCommands:
15 | """This class contains the wrapper functions to bring supported redis
16 | modules into the command namespace.
17 | """
18 |
19 | def json(self, encoder=JSONEncoder(), decoder=JSONDecoder()) -> JSON:
20 | """Access the json namespace, providing support for redis json."""
21 |
22 | from .json import JSON
23 |
24 | jj = JSON(client=self, encoder=encoder, decoder=decoder)
25 | return jj
26 |
27 | def ft(self, index_name="idx") -> Search:
28 | """Access the search namespace, providing support for redis search."""
29 |
30 | from .search import Search
31 |
32 | s = Search(client=self, index_name=index_name)
33 | return s
34 |
35 | def ts(self) -> TimeSeries:
36 | """Access the timeseries namespace, providing support for
37 | redis timeseries data.
38 | """
39 |
40 | from .timeseries import TimeSeries
41 |
42 | s = TimeSeries(client=self)
43 | return s
44 |
45 | def bf(self) -> BFBloom:
46 | """Access the bloom namespace."""
47 |
48 | from .bf import BFBloom
49 |
50 | bf = BFBloom(client=self)
51 | return bf
52 |
53 | def cf(self) -> CFBloom:
54 | """Access the bloom namespace."""
55 |
56 | from .bf import CFBloom
57 |
58 | cf = CFBloom(client=self)
59 | return cf
60 |
61 | def cms(self) -> CMSBloom:
62 | """Access the bloom namespace."""
63 |
64 | from .bf import CMSBloom
65 |
66 | cms = CMSBloom(client=self)
67 | return cms
68 |
69 | def topk(self) -> TOPKBloom:
70 | """Access the bloom namespace."""
71 |
72 | from .bf import TOPKBloom
73 |
74 | topk = TOPKBloom(client=self)
75 | return topk
76 |
77 | def tdigest(self) -> TDigestBloom:
78 | """Access the bloom namespace."""
79 |
80 | from .bf import TDigestBloom
81 |
82 | tdigest = TDigestBloom(client=self)
83 | return tdigest
84 |
85 | def vset(self) -> VectorSet:
86 | """Access the VectorSet commands namespace."""
87 |
88 | from .vectorset import VectorSet
89 |
90 | vset = VectorSet(client=self)
91 | return vset
92 |
93 |
94 | class AsyncRedisModuleCommands(RedisModuleCommands):
95 | def ft(self, index_name="idx") -> AsyncSearch:
96 | """Access the search namespace, providing support for redis search."""
97 |
98 | from .search import AsyncSearch
99 |
100 | s = AsyncSearch(client=self, index_name=index_name)
101 | return s
102 |
--------------------------------------------------------------------------------
/redis/commands/search/_util.py:
--------------------------------------------------------------------------------
1 | def to_string(s, encoding: str = "utf-8"):
2 | if isinstance(s, str):
3 | return s
4 | elif isinstance(s, bytes):
5 | return s.decode(encoding, "ignore")
6 | else:
7 | return s # Not a string we care about
8 |
--------------------------------------------------------------------------------
/redis/commands/search/dialect.py:
--------------------------------------------------------------------------------
1 | # Value for the default dialect to be used as a part of
2 | # Search or Aggregate query.
3 | DEFAULT_DIALECT = 2
4 |
--------------------------------------------------------------------------------
/redis/commands/search/document.py:
--------------------------------------------------------------------------------
1 | class Document:
2 | """
3 | Represents a single document in a result set
4 | """
5 |
6 | def __init__(self, id, payload=None, **fields):
7 | self.id = id
8 | self.payload = payload
9 | for k, v in fields.items():
10 | setattr(self, k, v)
11 |
12 | def __repr__(self):
13 | return f"Document {self.__dict__}"
14 |
15 | def __getitem__(self, item):
16 | value = getattr(self, item)
17 | return value
18 |
--------------------------------------------------------------------------------
/redis/commands/search/index_definition.py:
--------------------------------------------------------------------------------
1 | from enum import Enum
2 |
3 |
4 | class IndexType(Enum):
5 | """Enum of the currently supported index types."""
6 |
7 | HASH = 1
8 | JSON = 2
9 |
10 |
11 | class IndexDefinition:
12 | """IndexDefinition is used to define a index definition for automatic
13 | indexing on Hash or Json update."""
14 |
15 | def __init__(
16 | self,
17 | prefix=[],
18 | filter=None,
19 | language_field=None,
20 | language=None,
21 | score_field=None,
22 | score=1.0,
23 | payload_field=None,
24 | index_type=None,
25 | ):
26 | self.args = []
27 | self._append_index_type(index_type)
28 | self._append_prefix(prefix)
29 | self._append_filter(filter)
30 | self._append_language(language_field, language)
31 | self._append_score(score_field, score)
32 | self._append_payload(payload_field)
33 |
34 | def _append_index_type(self, index_type):
35 | """Append `ON HASH` or `ON JSON` according to the enum."""
36 | if index_type is IndexType.HASH:
37 | self.args.extend(["ON", "HASH"])
38 | elif index_type is IndexType.JSON:
39 | self.args.extend(["ON", "JSON"])
40 | elif index_type is not None:
41 | raise RuntimeError(f"index_type must be one of {list(IndexType)}")
42 |
43 | def _append_prefix(self, prefix):
44 | """Append PREFIX."""
45 | if len(prefix) > 0:
46 | self.args.append("PREFIX")
47 | self.args.append(len(prefix))
48 | for p in prefix:
49 | self.args.append(p)
50 |
51 | def _append_filter(self, filter):
52 | """Append FILTER."""
53 | if filter is not None:
54 | self.args.append("FILTER")
55 | self.args.append(filter)
56 |
57 | def _append_language(self, language_field, language):
58 | """Append LANGUAGE_FIELD and LANGUAGE."""
59 | if language_field is not None:
60 | self.args.append("LANGUAGE_FIELD")
61 | self.args.append(language_field)
62 | if language is not None:
63 | self.args.append("LANGUAGE")
64 | self.args.append(language)
65 |
66 | def _append_score(self, score_field, score):
67 | """Append SCORE_FIELD and SCORE."""
68 | if score_field is not None:
69 | self.args.append("SCORE_FIELD")
70 | self.args.append(score_field)
71 | if score is not None:
72 | self.args.append("SCORE")
73 | self.args.append(score)
74 |
75 | def _append_payload(self, payload_field):
76 | """Append PAYLOAD_FIELD."""
77 | if payload_field is not None:
78 | self.args.append("PAYLOAD_FIELD")
79 | self.args.append(payload_field)
80 |
--------------------------------------------------------------------------------
/redis/commands/search/profile_information.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 |
3 |
4 | class ProfileInformation:
5 | """
6 | Wrapper around FT.PROFILE response
7 | """
8 |
9 | def __init__(self, info: Any) -> None:
10 | self._info: Any = info
11 |
12 | @property
13 | def info(self) -> Any:
14 | return self._info
15 |
--------------------------------------------------------------------------------
/redis/commands/search/result.py:
--------------------------------------------------------------------------------
1 | from typing import Optional
2 |
3 | from ._util import to_string
4 | from .document import Document
5 |
6 |
7 | class Result:
8 | """
9 | Represents the result of a search query, and has an array of Document
10 | objects
11 | """
12 |
13 | def __init__(
14 | self,
15 | res,
16 | hascontent,
17 | duration=0,
18 | has_payload=False,
19 | with_scores=False,
20 | field_encodings: Optional[dict] = None,
21 | ):
22 | """
23 | - duration: the execution time of the query
24 | - has_payload: whether the query has payloads
25 | - with_scores: whether the query has scores
26 | - field_encodings: a dictionary of field encodings if any is provided
27 | """
28 |
29 | self.total = res[0]
30 | self.duration = duration
31 | self.docs = []
32 |
33 | step = 1
34 | if hascontent:
35 | step = step + 1
36 | if has_payload:
37 | step = step + 1
38 | if with_scores:
39 | step = step + 1
40 |
41 | offset = 2 if with_scores else 1
42 |
43 | for i in range(1, len(res), step):
44 | id = to_string(res[i])
45 | payload = to_string(res[i + offset]) if has_payload else None
46 | # fields_offset = 2 if has_payload else 1
47 | fields_offset = offset + 1 if has_payload else offset
48 | score = float(res[i + 1]) if with_scores else None
49 |
50 | fields = {}
51 | if hascontent and res[i + fields_offset] is not None:
52 | keys = map(to_string, res[i + fields_offset][::2])
53 | values = res[i + fields_offset][1::2]
54 |
55 | for key, value in zip(keys, values):
56 | if field_encodings is None or key not in field_encodings:
57 | fields[key] = to_string(value)
58 | continue
59 |
60 | encoding = field_encodings[key]
61 |
62 | # If the encoding is None, we don't need to decode the value
63 | if encoding is None:
64 | fields[key] = value
65 | else:
66 | fields[key] = to_string(value, encoding=encoding)
67 |
68 | try:
69 | del fields["id"]
70 | except KeyError:
71 | pass
72 |
73 | try:
74 | fields["json"] = fields["$"]
75 | del fields["$"]
76 | except KeyError:
77 | pass
78 |
79 | doc = (
80 | Document(id, score=score, payload=payload, **fields)
81 | if with_scores
82 | else Document(id, payload=payload, **fields)
83 | )
84 | self.docs.append(doc)
85 |
86 | def __repr__(self) -> str:
87 | return f"Result{{{self.total} total, docs: {self.docs}}}"
88 |
--------------------------------------------------------------------------------
/redis/commands/search/suggestion.py:
--------------------------------------------------------------------------------
1 | from typing import Optional
2 |
3 | from ._util import to_string
4 |
5 |
6 | class Suggestion:
7 | """
8 | Represents a single suggestion being sent or returned from the
9 | autocomplete server
10 | """
11 |
12 | def __init__(
13 | self, string: str, score: float = 1.0, payload: Optional[str] = None
14 | ) -> None:
15 | self.string = to_string(string)
16 | self.payload = to_string(payload)
17 | self.score = score
18 |
19 | def __repr__(self) -> str:
20 | return self.string
21 |
22 |
23 | class SuggestionParser:
24 | """
25 | Internal class used to parse results from the `SUGGET` command.
26 | This needs to consume either 1, 2, or 3 values at a time from
27 | the return value depending on what objects were requested
28 | """
29 |
30 | def __init__(self, with_scores: bool, with_payloads: bool, ret) -> None:
31 | self.with_scores = with_scores
32 | self.with_payloads = with_payloads
33 |
34 | if with_scores and with_payloads:
35 | self.sugsize = 3
36 | self._scoreidx = 1
37 | self._payloadidx = 2
38 | elif with_scores:
39 | self.sugsize = 2
40 | self._scoreidx = 1
41 | elif with_payloads:
42 | self.sugsize = 2
43 | self._payloadidx = 1
44 | else:
45 | self.sugsize = 1
46 | self._scoreidx = -1
47 |
48 | self._sugs = ret
49 |
50 | def __iter__(self):
51 | for i in range(0, len(self._sugs), self.sugsize):
52 | ss = self._sugs[i]
53 | score = float(self._sugs[i + self._scoreidx]) if self.with_scores else 1.0
54 | payload = self._sugs[i + self._payloadidx] if self.with_payloads else None
55 | yield Suggestion(ss, score, payload)
56 |
--------------------------------------------------------------------------------
/redis/commands/timeseries/__init__.py:
--------------------------------------------------------------------------------
1 | import redis
2 | from redis._parsers.helpers import bool_ok
3 |
4 | from ..helpers import get_protocol_version, parse_to_list
5 | from .commands import (
6 | ALTER_CMD,
7 | CREATE_CMD,
8 | CREATERULE_CMD,
9 | DEL_CMD,
10 | DELETERULE_CMD,
11 | GET_CMD,
12 | INFO_CMD,
13 | MGET_CMD,
14 | MRANGE_CMD,
15 | MREVRANGE_CMD,
16 | QUERYINDEX_CMD,
17 | RANGE_CMD,
18 | REVRANGE_CMD,
19 | TimeSeriesCommands,
20 | )
21 | from .info import TSInfo
22 | from .utils import parse_get, parse_m_get, parse_m_range, parse_range
23 |
24 |
25 | class TimeSeries(TimeSeriesCommands):
26 | """
27 | This class subclasses redis-py's `Redis` and implements RedisTimeSeries's
28 | commands (prefixed with "ts").
29 | The client allows to interact with RedisTimeSeries and use all of it's
30 | functionality.
31 | """
32 |
33 | def __init__(self, client=None, **kwargs):
34 | """Create a new RedisTimeSeries client."""
35 | # Set the module commands' callbacks
36 | self._MODULE_CALLBACKS = {
37 | ALTER_CMD: bool_ok,
38 | CREATE_CMD: bool_ok,
39 | CREATERULE_CMD: bool_ok,
40 | DELETERULE_CMD: bool_ok,
41 | }
42 |
43 | _RESP2_MODULE_CALLBACKS = {
44 | DEL_CMD: int,
45 | GET_CMD: parse_get,
46 | INFO_CMD: TSInfo,
47 | MGET_CMD: parse_m_get,
48 | MRANGE_CMD: parse_m_range,
49 | MREVRANGE_CMD: parse_m_range,
50 | RANGE_CMD: parse_range,
51 | REVRANGE_CMD: parse_range,
52 | QUERYINDEX_CMD: parse_to_list,
53 | }
54 | _RESP3_MODULE_CALLBACKS = {}
55 |
56 | self.client = client
57 | self.execute_command = client.execute_command
58 |
59 | if get_protocol_version(self.client) in ["3", 3]:
60 | self._MODULE_CALLBACKS.update(_RESP3_MODULE_CALLBACKS)
61 | else:
62 | self._MODULE_CALLBACKS.update(_RESP2_MODULE_CALLBACKS)
63 |
64 | for k, v in self._MODULE_CALLBACKS.items():
65 | self.client.set_response_callback(k, v)
66 |
67 | def pipeline(self, transaction=True, shard_hint=None):
68 | """Creates a pipeline for the TimeSeries module, that can be used
69 | for executing only TimeSeries commands and core commands.
70 |
71 | Usage example:
72 |
73 | r = redis.Redis()
74 | pipe = r.ts().pipeline()
75 | for i in range(100):
76 | pipeline.add("with_pipeline", i, 1.1 * i)
77 | pipeline.execute()
78 |
79 | """
80 | if isinstance(self.client, redis.RedisCluster):
81 | p = ClusterPipeline(
82 | nodes_manager=self.client.nodes_manager,
83 | commands_parser=self.client.commands_parser,
84 | startup_nodes=self.client.nodes_manager.startup_nodes,
85 | result_callbacks=self.client.result_callbacks,
86 | cluster_response_callbacks=self.client.cluster_response_callbacks,
87 | cluster_error_retry_attempts=self.client.retry.get_retries(),
88 | read_from_replicas=self.client.read_from_replicas,
89 | reinitialize_steps=self.client.reinitialize_steps,
90 | lock=self.client._lock,
91 | )
92 |
93 | else:
94 | p = Pipeline(
95 | connection_pool=self.client.connection_pool,
96 | response_callbacks=self._MODULE_CALLBACKS,
97 | transaction=transaction,
98 | shard_hint=shard_hint,
99 | )
100 | return p
101 |
102 |
103 | class ClusterPipeline(TimeSeriesCommands, redis.cluster.ClusterPipeline):
104 | """Cluster pipeline for the module."""
105 |
106 |
107 | class Pipeline(TimeSeriesCommands, redis.client.Pipeline):
108 | """Pipeline for the module."""
109 |
--------------------------------------------------------------------------------
/redis/commands/timeseries/info.py:
--------------------------------------------------------------------------------
1 | from ..helpers import nativestr
2 | from .utils import list_to_dict
3 |
4 |
5 | class TSInfo:
6 | """
7 | Hold information and statistics on the time-series.
8 | Can be created using ``tsinfo`` command
9 | https://redis.io/docs/latest/commands/ts.info/
10 | """
11 |
12 | rules = []
13 | labels = []
14 | sourceKey = None
15 | chunk_count = None
16 | memory_usage = None
17 | total_samples = None
18 | retention_msecs = None
19 | last_time_stamp = None
20 | first_time_stamp = None
21 |
22 | max_samples_per_chunk = None
23 | chunk_size = None
24 | duplicate_policy = None
25 |
26 | def __init__(self, args):
27 | """
28 | Hold information and statistics on the time-series.
29 |
30 | The supported params that can be passed as args:
31 |
32 | rules:
33 | A list of compaction rules of the time series.
34 | sourceKey:
35 | Key name for source time series in case the current series
36 | is a target of a rule.
37 | chunkCount:
38 | Number of Memory Chunks used for the time series.
39 | memoryUsage:
40 | Total number of bytes allocated for the time series.
41 | totalSamples:
42 | Total number of samples in the time series.
43 | labels:
44 | A list of label-value pairs that represent the metadata
45 | labels of the time series.
46 | retentionTime:
47 | Retention time, in milliseconds, for the time series.
48 | lastTimestamp:
49 | Last timestamp present in the time series.
50 | firstTimestamp:
51 | First timestamp present in the time series.
52 | maxSamplesPerChunk:
53 | Deprecated.
54 | chunkSize:
55 | Amount of memory, in bytes, allocated for data.
56 | duplicatePolicy:
57 | Policy that will define handling of duplicate samples.
58 |
59 | Can read more about on
60 | https://redis.io/docs/latest/develop/data-types/timeseries/configuration/#duplicate_policy
61 | """
62 | response = dict(zip(map(nativestr, args[::2]), args[1::2]))
63 | self.rules = response.get("rules")
64 | self.source_key = response.get("sourceKey")
65 | self.chunk_count = response.get("chunkCount")
66 | self.memory_usage = response.get("memoryUsage")
67 | self.total_samples = response.get("totalSamples")
68 | self.labels = list_to_dict(response.get("labels"))
69 | self.retention_msecs = response.get("retentionTime")
70 | self.last_timestamp = response.get("lastTimestamp")
71 | self.first_timestamp = response.get("firstTimestamp")
72 | if "maxSamplesPerChunk" in response:
73 | self.max_samples_per_chunk = response["maxSamplesPerChunk"]
74 | self.chunk_size = (
75 | self.max_samples_per_chunk * 16
76 | ) # backward compatible changes
77 | if "chunkSize" in response:
78 | self.chunk_size = response["chunkSize"]
79 | if "duplicatePolicy" in response:
80 | self.duplicate_policy = response["duplicatePolicy"]
81 | if isinstance(self.duplicate_policy, bytes):
82 | self.duplicate_policy = self.duplicate_policy.decode()
83 |
84 | def get(self, item):
85 | try:
86 | return self.__getitem__(item)
87 | except AttributeError:
88 | return None
89 |
90 | def __getitem__(self, item):
91 | return getattr(self, item)
92 |
--------------------------------------------------------------------------------
/redis/commands/timeseries/utils.py:
--------------------------------------------------------------------------------
1 | from ..helpers import nativestr
2 |
3 |
4 | def list_to_dict(aList):
5 | return {nativestr(aList[i][0]): nativestr(aList[i][1]) for i in range(len(aList))}
6 |
7 |
8 | def parse_range(response, **kwargs):
9 | """Parse range response. Used by TS.RANGE and TS.REVRANGE."""
10 | return [tuple((r[0], float(r[1]))) for r in response]
11 |
12 |
13 | def parse_m_range(response):
14 | """Parse multi range response. Used by TS.MRANGE and TS.MREVRANGE."""
15 | res = []
16 | for item in response:
17 | res.append({nativestr(item[0]): [list_to_dict(item[1]), parse_range(item[2])]})
18 | return sorted(res, key=lambda d: list(d.keys()))
19 |
20 |
21 | def parse_get(response):
22 | """Parse get response. Used by TS.GET."""
23 | if not response:
24 | return None
25 | return int(response[0]), float(response[1])
26 |
27 |
28 | def parse_m_get(response):
29 | """Parse multi get response. Used by TS.MGET."""
30 | res = []
31 | for item in response:
32 | if not item[2]:
33 | res.append({nativestr(item[0]): [list_to_dict(item[1]), None, None]})
34 | else:
35 | res.append(
36 | {
37 | nativestr(item[0]): [
38 | list_to_dict(item[1]),
39 | int(item[2][0]),
40 | float(item[2][1]),
41 | ]
42 | }
43 | )
44 | return sorted(res, key=lambda d: list(d.keys()))
45 |
--------------------------------------------------------------------------------
/redis/commands/vectorset/__init__.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 | from redis._parsers.helpers import pairs_to_dict
4 | from redis.commands.vectorset.utils import (
5 | parse_vemb_result,
6 | parse_vlinks_result,
7 | parse_vsim_result,
8 | )
9 |
10 | from ..helpers import get_protocol_version
11 | from .commands import (
12 | VEMB_CMD,
13 | VGETATTR_CMD,
14 | VINFO_CMD,
15 | VLINKS_CMD,
16 | VSIM_CMD,
17 | VectorSetCommands,
18 | )
19 |
20 |
21 | class VectorSet(VectorSetCommands):
22 | def __init__(self, client, **kwargs):
23 | """Create a new VectorSet client."""
24 | # Set the module commands' callbacks
25 | self._MODULE_CALLBACKS = {
26 | VEMB_CMD: parse_vemb_result,
27 | VGETATTR_CMD: lambda r: r and json.loads(r) or None,
28 | }
29 |
30 | self._RESP2_MODULE_CALLBACKS = {
31 | VINFO_CMD: lambda r: r and pairs_to_dict(r) or None,
32 | VSIM_CMD: parse_vsim_result,
33 | VLINKS_CMD: parse_vlinks_result,
34 | }
35 | self._RESP3_MODULE_CALLBACKS = {}
36 |
37 | self.client = client
38 | self.execute_command = client.execute_command
39 |
40 | if get_protocol_version(self.client) in ["3", 3]:
41 | self._MODULE_CALLBACKS.update(self._RESP3_MODULE_CALLBACKS)
42 | else:
43 | self._MODULE_CALLBACKS.update(self._RESP2_MODULE_CALLBACKS)
44 |
45 | for k, v in self._MODULE_CALLBACKS.items():
46 | self.client.set_response_callback(k, v)
47 |
--------------------------------------------------------------------------------
/redis/commands/vectorset/utils.py:
--------------------------------------------------------------------------------
1 | from redis._parsers.helpers import pairs_to_dict
2 | from redis.commands.vectorset.commands import CallbacksOptions
3 |
4 |
5 | def parse_vemb_result(response, **options):
6 | """
7 | Handle VEMB result since the command can returning different result
8 | structures depending on input options and on quantization type of the vector set.
9 |
10 | Parsing VEMB result into:
11 | - List[Union[bytes, Union[int, float]]]
12 | - Dict[str, Union[bytes, str, float]]
13 | """
14 | if response is None:
15 | return response
16 |
17 | if options.get(CallbacksOptions.RAW.value):
18 | result = {}
19 | result["quantization"] = (
20 | response[0].decode("utf-8")
21 | if options.get(CallbacksOptions.ALLOW_DECODING.value)
22 | else response[0]
23 | )
24 | result["raw"] = response[1]
25 | result["l2"] = float(response[2])
26 | if len(response) > 3:
27 | result["range"] = float(response[3])
28 | return result
29 | else:
30 | if options.get(CallbacksOptions.RESP3.value):
31 | return response
32 |
33 | result = []
34 | for i in range(len(response)):
35 | try:
36 | result.append(int(response[i]))
37 | except ValueError:
38 | # if the value is not an integer, it should be a float
39 | result.append(float(response[i]))
40 |
41 | return result
42 |
43 |
44 | def parse_vlinks_result(response, **options):
45 | """
46 | Handle VLINKS result since the command can be returning different result
47 | structures depending on input options.
48 | Parsing VLINKS result into:
49 | - List[List[str]]
50 | - List[Dict[str, Number]]
51 | """
52 | if response is None:
53 | return response
54 |
55 | if options.get(CallbacksOptions.WITHSCORES.value):
56 | result = []
57 | # Redis will return a list of list of strings.
58 | # This list have to be transformed to list of dicts
59 | for level_item in response:
60 | level_data_dict = {}
61 | for key, value in pairs_to_dict(level_item).items():
62 | value = float(value)
63 | level_data_dict[key] = value
64 | result.append(level_data_dict)
65 | return result
66 | else:
67 | # return the list of elements for each level
68 | # list of lists
69 | return response
70 |
71 |
72 | def parse_vsim_result(response, **options):
73 | """
74 | Handle VSIM result since the command can be returning different result
75 | structures depending on input options.
76 | Parsing VSIM result into:
77 | - List[List[str]]
78 | - List[Dict[str, Number]]
79 | """
80 | if response is None:
81 | return response
82 |
83 | if options.get(CallbacksOptions.WITHSCORES.value):
84 | # Redis will return a list of list of pairs.
85 | # This list have to be transformed to dict
86 | result_dict = {}
87 | for key, value in pairs_to_dict(response).items():
88 | value = float(value)
89 | result_dict[key] = value
90 | return result_dict
91 | else:
92 | # return the list of elements for each level
93 | # list of lists
94 | return response
95 |
--------------------------------------------------------------------------------
/redis/crc.py:
--------------------------------------------------------------------------------
1 | from binascii import crc_hqx
2 |
3 | from redis.typing import EncodedT
4 |
5 | # Redis Cluster's key space is divided into 16384 slots.
6 | # For more information see: https://github.com/redis/redis/issues/2576
7 | REDIS_CLUSTER_HASH_SLOTS = 16384
8 |
9 | __all__ = ["key_slot", "REDIS_CLUSTER_HASH_SLOTS"]
10 |
11 |
12 | def key_slot(key: EncodedT, bucket: int = REDIS_CLUSTER_HASH_SLOTS) -> int:
13 | """Calculate key slot for a given key.
14 | See Keys distribution model in https://redis.io/topics/cluster-spec
15 | :param key - bytes
16 | :param bucket - int
17 | """
18 | start = key.find(b"{")
19 | if start > -1:
20 | end = key.find(b"}", start + 1)
21 | if end > -1 and end != start + 1:
22 | key = key[start + 1 : end]
23 | return crc_hqx(key, 0) % bucket
24 |
--------------------------------------------------------------------------------
/redis/credentials.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from abc import ABC, abstractmethod
3 | from typing import Any, Callable, Optional, Tuple, Union
4 |
5 | logger = logging.getLogger(__name__)
6 |
7 |
8 | class CredentialProvider:
9 | """
10 | Credentials Provider.
11 | """
12 |
13 | def get_credentials(self) -> Union[Tuple[str], Tuple[str, str]]:
14 | raise NotImplementedError("get_credentials must be implemented")
15 |
16 | async def get_credentials_async(self) -> Union[Tuple[str], Tuple[str, str]]:
17 | logger.warning(
18 | "This method is added for backward compatability. "
19 | "Please override it in your implementation."
20 | )
21 | return self.get_credentials()
22 |
23 |
24 | class StreamingCredentialProvider(CredentialProvider, ABC):
25 | """
26 | Credential provider that streams credentials in the background.
27 | """
28 |
29 | @abstractmethod
30 | def on_next(self, callback: Callable[[Any], None]):
31 | """
32 | Specifies the callback that should be invoked
33 | when the next credentials will be retrieved.
34 |
35 | :param callback: Callback with
36 | :return:
37 | """
38 | pass
39 |
40 | @abstractmethod
41 | def on_error(self, callback: Callable[[Exception], None]):
42 | pass
43 |
44 | @abstractmethod
45 | def is_streaming(self) -> bool:
46 | pass
47 |
48 |
49 | class UsernamePasswordCredentialProvider(CredentialProvider):
50 | """
51 | Simple implementation of CredentialProvider that just wraps static
52 | username and password.
53 | """
54 |
55 | def __init__(self, username: Optional[str] = None, password: Optional[str] = None):
56 | self.username = username or ""
57 | self.password = password or ""
58 |
59 | def get_credentials(self):
60 | if self.username:
61 | return self.username, self.password
62 | return (self.password,)
63 |
64 | async def get_credentials_async(self) -> Union[Tuple[str], Tuple[str, str]]:
65 | return self.get_credentials()
66 |
--------------------------------------------------------------------------------
/redis/py.typed:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redis/redis-py/6246cbade4fa9ae879455b498ef8fef5250619b1/redis/py.typed
--------------------------------------------------------------------------------
/redis/retry.py:
--------------------------------------------------------------------------------
1 | import socket
2 | from time import sleep
3 | from typing import TYPE_CHECKING, Any, Callable, Iterable, Tuple, Type, TypeVar
4 |
5 | from redis.exceptions import ConnectionError, TimeoutError
6 |
7 | T = TypeVar("T")
8 |
9 | if TYPE_CHECKING:
10 | from redis.backoff import AbstractBackoff
11 |
12 |
13 | class Retry:
14 | """Retry a specific number of times after a failure"""
15 |
16 | def __init__(
17 | self,
18 | backoff: "AbstractBackoff",
19 | retries: int,
20 | supported_errors: Tuple[Type[Exception], ...] = (
21 | ConnectionError,
22 | TimeoutError,
23 | socket.timeout,
24 | ),
25 | ):
26 | """
27 | Initialize a `Retry` object with a `Backoff` object
28 | that retries a maximum of `retries` times.
29 | `retries` can be negative to retry forever.
30 | You can specify the types of supported errors which trigger
31 | a retry with the `supported_errors` parameter.
32 | """
33 | self._backoff = backoff
34 | self._retries = retries
35 | self._supported_errors = supported_errors
36 |
37 | def __eq__(self, other: Any) -> bool:
38 | if not isinstance(other, Retry):
39 | return NotImplemented
40 |
41 | return (
42 | self._backoff == other._backoff
43 | and self._retries == other._retries
44 | and set(self._supported_errors) == set(other._supported_errors)
45 | )
46 |
47 | def __hash__(self) -> int:
48 | return hash((self._backoff, self._retries, frozenset(self._supported_errors)))
49 |
50 | def update_supported_errors(
51 | self, specified_errors: Iterable[Type[Exception]]
52 | ) -> None:
53 | """
54 | Updates the supported errors with the specified error types
55 | """
56 | self._supported_errors = tuple(
57 | set(self._supported_errors + tuple(specified_errors))
58 | )
59 |
60 | def get_retries(self) -> int:
61 | """
62 | Get the number of retries.
63 | """
64 | return self._retries
65 |
66 | def update_retries(self, value: int) -> None:
67 | """
68 | Set the number of retries.
69 | """
70 | self._retries = value
71 |
72 | def call_with_retry(
73 | self,
74 | do: Callable[[], T],
75 | fail: Callable[[Exception], Any],
76 | ) -> T:
77 | """
78 | Execute an operation that might fail and returns its result, or
79 | raise the exception that was thrown depending on the `Backoff` object.
80 | `do`: the operation to call. Expects no argument.
81 | `fail`: the failure handler, expects the last error that was thrown
82 | """
83 | self._backoff.reset()
84 | failures = 0
85 | while True:
86 | try:
87 | return do()
88 | except self._supported_errors as error:
89 | failures += 1
90 | fail(error)
91 | if self._retries >= 0 and failures > self._retries:
92 | raise error
93 | backoff = self._backoff.compute(failures)
94 | if backoff > 0:
95 | sleep(backoff)
96 |
--------------------------------------------------------------------------------
/redis/typing.py:
--------------------------------------------------------------------------------
1 | # from __future__ import annotations
2 |
3 | from datetime import datetime, timedelta
4 | from typing import (
5 | TYPE_CHECKING,
6 | Any,
7 | Awaitable,
8 | Iterable,
9 | Mapping,
10 | Protocol,
11 | Type,
12 | TypeVar,
13 | Union,
14 | )
15 |
16 | if TYPE_CHECKING:
17 | from redis._parsers import Encoder
18 |
19 |
20 | Number = Union[int, float]
21 | EncodedT = Union[bytes, bytearray, memoryview]
22 | DecodedT = Union[str, int, float]
23 | EncodableT = Union[EncodedT, DecodedT]
24 | AbsExpiryT = Union[int, datetime]
25 | ExpiryT = Union[int, timedelta]
26 | ZScoreBoundT = Union[float, str] # str allows for the [ or ( prefix
27 | BitfieldOffsetT = Union[int, str] # str allows for #x syntax
28 | _StringLikeT = Union[bytes, str, memoryview]
29 | KeyT = _StringLikeT # Main redis key space
30 | PatternT = _StringLikeT # Patterns matched against keys, fields etc
31 | FieldT = EncodableT # Fields within hash tables, streams and geo commands
32 | KeysT = Union[KeyT, Iterable[KeyT]]
33 | ResponseT = Union[Awaitable[Any], Any]
34 | ChannelT = _StringLikeT
35 | GroupT = _StringLikeT # Consumer group
36 | ConsumerT = _StringLikeT # Consumer name
37 | StreamIdT = Union[int, _StringLikeT]
38 | ScriptTextT = _StringLikeT
39 | TimeoutSecT = Union[int, float, _StringLikeT]
40 | # Mapping is not covariant in the key type, which prevents
41 | # Mapping[_StringLikeT, X] from accepting arguments of type Dict[str, X]. Using
42 | # a TypeVar instead of a Union allows mappings with any of the permitted types
43 | # to be passed. Care is needed if there is more than one such mapping in a
44 | # type signature because they will all be required to be the same key type.
45 | AnyKeyT = TypeVar("AnyKeyT", bytes, str, memoryview)
46 | AnyFieldT = TypeVar("AnyFieldT", bytes, str, memoryview)
47 | AnyChannelT = TypeVar("AnyChannelT", bytes, str, memoryview)
48 |
49 | ExceptionMappingT = Mapping[str, Union[Type[Exception], Mapping[str, Type[Exception]]]]
50 |
51 |
52 | class CommandsProtocol(Protocol):
53 | def execute_command(self, *args, **options) -> ResponseT: ...
54 |
55 |
56 | class ClusterCommandsProtocol(CommandsProtocol):
57 | encoder: "Encoder"
58 |
--------------------------------------------------------------------------------
/tasks.py:
--------------------------------------------------------------------------------
1 | # https://github.com/pyinvoke/invoke/issues/833
2 | import inspect
3 | import os
4 | import shutil
5 |
6 | from invoke import run, task
7 |
8 | if not hasattr(inspect, "getargspec"):
9 | inspect.getargspec = inspect.getfullargspec
10 |
11 |
12 | @task
13 | def devenv(c, endpoints="all"):
14 | """Brings up the test environment, by wrapping docker compose."""
15 | clean(c)
16 | cmd = f"docker compose --profile {endpoints} up -d --build"
17 | run(cmd)
18 |
19 |
20 | @task
21 | def build_docs(c):
22 | """Generates the sphinx documentation."""
23 | run("pip install -r docs/requirements.txt")
24 | run("make -C docs html")
25 |
26 |
27 | @task
28 | def linters(c):
29 | """Run code linters"""
30 | run("ruff check tests redis")
31 | run("ruff format --check --diff tests redis")
32 | run("vulture redis whitelist.py --min-confidence 80")
33 |
34 |
35 | @task
36 | def all_tests(c):
37 | """Run all linters, and tests in redis-py."""
38 | linters(c)
39 | tests(c)
40 |
41 |
42 | @task
43 | def tests(c, uvloop=False, protocol=2, profile=False):
44 | """Run the redis-py test suite against the current python."""
45 | print("Starting Redis tests")
46 | standalone_tests(c, uvloop=uvloop, protocol=protocol, profile=profile)
47 | cluster_tests(c, uvloop=uvloop, protocol=protocol, profile=profile)
48 |
49 |
50 | @task
51 | def standalone_tests(
52 | c, uvloop=False, protocol=2, profile=False, redis_mod_url=None, extra_markers=""
53 | ):
54 | """Run tests against a standalone redis instance"""
55 | profile_arg = "--profile" if profile else ""
56 | redis_mod_url = f"--redis-mod-url={redis_mod_url}" if redis_mod_url else ""
57 | extra_markers = f" and {extra_markers}" if extra_markers else ""
58 |
59 | if uvloop:
60 | run(
61 | f"pytest {profile_arg} --protocol={protocol} {redis_mod_url} --cov=./ --cov-report=xml:coverage_resp{protocol}_uvloop.xml -m 'not onlycluster{extra_markers}' --uvloop --junit-xml=standalone-resp{protocol}-uvloop-results.xml"
62 | )
63 | else:
64 | run(
65 | f"pytest {profile_arg} --protocol={protocol} {redis_mod_url} --cov=./ --cov-report=xml:coverage_resp{protocol}.xml -m 'not onlycluster{extra_markers}' --junit-xml=standalone-resp{protocol}-results.xml"
66 | )
67 |
68 |
69 | @task
70 | def cluster_tests(c, uvloop=False, protocol=2, profile=False):
71 | """Run tests against a redis cluster"""
72 | profile_arg = "--profile" if profile else ""
73 | cluster_url = "redis://localhost:16379/0"
74 | cluster_tls_url = "rediss://localhost:27379/0"
75 | if uvloop:
76 | run(
77 | f"pytest {profile_arg} --protocol={protocol} --cov=./ --cov-report=xml:coverage_cluster_resp{protocol}_uvloop.xml -m 'not onlynoncluster and not redismod' --redis-url={cluster_url} --redis-ssl-url={cluster_tls_url} --junit-xml=cluster-resp{protocol}-uvloop-results.xml --uvloop"
78 | )
79 | else:
80 | run(
81 | f"pytest {profile_arg} --protocol={protocol} --cov=./ --cov-report=xml:coverage_cluster_resp{protocol}.xml -m 'not onlynoncluster and not redismod' --redis-url={cluster_url} --redis-ssl-url={cluster_tls_url} --junit-xml=cluster-resp{protocol}-results.xml"
82 | )
83 |
84 |
85 | @task
86 | def clean(c):
87 | """Stop all dockers, and clean up the built binaries, if generated."""
88 | if os.path.isdir("build"):
89 | shutil.rmtree("build")
90 | if os.path.isdir("dist"):
91 | shutil.rmtree("dist")
92 | run("docker compose --profile all rm -s -f")
93 |
94 |
95 | @task
96 | def package(c):
97 | """Create the python packages"""
98 | run("python -m build .")
99 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redis/redis-py/6246cbade4fa9ae879455b498ef8fef5250619b1/tests/__init__.py
--------------------------------------------------------------------------------
/tests/mocks.py:
--------------------------------------------------------------------------------
1 | # Various mocks for testing
2 |
3 |
4 | class MockSocket:
5 | """
6 | A class simulating an readable socket, optionally raising a
7 | special exception every other read.
8 | """
9 |
10 | class TestError(BaseException):
11 | pass
12 |
13 | def __init__(self, data, interrupt_every=0):
14 | self.data = data
15 | self.counter = 0
16 | self.pos = 0
17 | self.interrupt_every = interrupt_every
18 |
19 | def tick(self):
20 | self.counter += 1
21 | if not self.interrupt_every:
22 | return
23 | if (self.counter % self.interrupt_every) == 0:
24 | raise self.TestError()
25 |
26 | def recv(self, bufsize):
27 | self.tick()
28 | bufsize = min(5, bufsize) # truncate the read size
29 | result = self.data[self.pos : self.pos + bufsize]
30 | self.pos += len(result)
31 | return result
32 |
33 | def recv_into(self, buffer, nbytes=0, flags=0):
34 | self.tick()
35 | if nbytes == 0:
36 | nbytes = len(buffer)
37 | nbytes = min(5, nbytes) # truncate the read size
38 | result = self.data[self.pos : self.pos + nbytes]
39 | self.pos += len(result)
40 | buffer[: len(result)] = result
41 | return len(result)
42 |
--------------------------------------------------------------------------------
/tests/ssl_utils.py:
--------------------------------------------------------------------------------
1 | import enum
2 | import os
3 | from collections import namedtuple
4 |
5 | CLIENT_CERT_NAME = "client.crt"
6 | CLIENT_KEY_NAME = "client.key"
7 | SERVER_CERT_NAME = "redis.crt"
8 | SERVER_KEY_NAME = "redis.key"
9 | CA_CERT_NAME = "ca.crt"
10 |
11 |
12 | class CertificateType(str, enum.Enum):
13 | client = "client"
14 | server = "server"
15 |
16 |
17 | TLSFiles = namedtuple("TLSFiles", ["certfile", "keyfile", "ca_certfile"])
18 |
19 |
20 | def get_tls_certificates(
21 | subdir: str = "standalone",
22 | cert_type: CertificateType = CertificateType.client,
23 | ):
24 | root = os.path.join(os.path.dirname(__file__), "..")
25 | cert_subdir = ("dockers", subdir, "tls")
26 | cert_dir = os.path.abspath(os.path.join(root, *cert_subdir))
27 | if not os.path.isdir(cert_dir): # github actions package validation case
28 | cert_dir = os.path.abspath(os.path.join(root, "..", *cert_subdir))
29 | if not os.path.isdir(cert_dir):
30 | raise OSError(f"No SSL certificates found. They should be in {cert_dir}")
31 |
32 | if cert_type == CertificateType.client:
33 | return TLSFiles(
34 | os.path.join(cert_dir, CLIENT_CERT_NAME),
35 | os.path.join(cert_dir, CLIENT_KEY_NAME),
36 | os.path.join(cert_dir, CA_CERT_NAME),
37 | )
38 | elif cert_type == CertificateType.server:
39 | return TLSFiles(
40 | os.path.join(cert_dir, SERVER_CERT_NAME),
41 | os.path.join(cert_dir, SERVER_KEY_NAME),
42 | os.path.join(cert_dir, CA_CERT_NAME),
43 | )
44 |
--------------------------------------------------------------------------------
/tests/test_asyncio/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redis/redis-py/6246cbade4fa9ae879455b498ef8fef5250619b1/tests/test_asyncio/__init__.py
--------------------------------------------------------------------------------
/tests/test_asyncio/compat.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | from unittest import mock
3 |
4 | try:
5 | mock.AsyncMock
6 | except AttributeError:
7 | from unittest import mock
8 |
9 | try:
10 | from contextlib import aclosing
11 | except ImportError:
12 | import contextlib
13 |
14 | @contextlib.asynccontextmanager
15 | async def aclosing(thing):
16 | try:
17 | yield thing
18 | finally:
19 | await thing.aclose()
20 |
21 |
22 | def create_task(coroutine):
23 | return asyncio.create_task(coroutine)
24 |
--------------------------------------------------------------------------------
/tests/test_asyncio/mocks.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | # Helper Mocking classes for the tests.
4 |
5 |
6 | class MockStream:
7 | """
8 | A class simulating an asyncio input buffer, optionally raising a
9 | special exception every other read.
10 | """
11 |
12 | class TestError(BaseException):
13 | pass
14 |
15 | def __init__(self, data, interrupt_every=0):
16 | self.data = data
17 | self.counter = 0
18 | self.pos = 0
19 | self.interrupt_every = interrupt_every
20 |
21 | def tick(self):
22 | self.counter += 1
23 | if not self.interrupt_every:
24 | return
25 | if (self.counter % self.interrupt_every) == 0:
26 | raise self.TestError()
27 |
28 | async def read(self, want):
29 | self.tick()
30 | want = 5
31 | result = self.data[self.pos : self.pos + want]
32 | self.pos += len(result)
33 | return result
34 |
35 | async def readline(self):
36 | self.tick()
37 | find = self.data.find(b"\n", self.pos)
38 | if find >= 0:
39 | result = self.data[self.pos : find + 1]
40 | else:
41 | result = self.data[self.pos :]
42 | self.pos += len(result)
43 | return result
44 |
45 | async def readexactly(self, length):
46 | self.tick()
47 | result = self.data[self.pos : self.pos + length]
48 | if len(result) < length:
49 | raise asyncio.IncompleteReadError(result, None)
50 | self.pos += len(result)
51 | return result
52 |
--------------------------------------------------------------------------------
/tests/test_asyncio/test_monitor.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from tests.conftest import skip_if_redis_enterprise, skip_ifnot_redis_enterprise
3 |
4 | from .conftest import wait_for_command
5 |
6 |
7 | @pytest.mark.onlynoncluster
8 | class TestMonitor:
9 | async def test_wait_command_not_found(self, r):
10 | """Make sure the wait_for_command func works when command is not found"""
11 | async with r.monitor() as m:
12 | response = await wait_for_command(r, m, "nothing")
13 | assert response is None
14 |
15 | async def test_response_values(self, r):
16 | db = r.connection_pool.connection_kwargs.get("db", 0)
17 | async with r.monitor() as m:
18 | await r.ping()
19 | response = await wait_for_command(r, m, "PING")
20 | assert isinstance(response["time"], float)
21 | assert response["db"] == db
22 | assert response["client_type"] in ("tcp", "unix")
23 | assert isinstance(response["client_address"], str)
24 | assert isinstance(response["client_port"], str)
25 | assert response["command"] == "PING"
26 |
27 | async def test_command_with_quoted_key(self, r):
28 | async with r.monitor() as m:
29 | await r.get('foo"bar')
30 | response = await wait_for_command(r, m, 'GET foo"bar')
31 | assert response["command"] == 'GET foo"bar'
32 |
33 | async def test_command_with_binary_data(self, r):
34 | async with r.monitor() as m:
35 | byte_string = b"foo\x92"
36 | await r.get(byte_string)
37 | response = await wait_for_command(r, m, "GET foo\\x92")
38 | assert response["command"] == "GET foo\\x92"
39 |
40 | async def test_command_with_escaped_data(self, r):
41 | async with r.monitor() as m:
42 | byte_string = b"foo\\x92"
43 | await r.get(byte_string)
44 | response = await wait_for_command(r, m, "GET foo\\\\x92")
45 | assert response["command"] == "GET foo\\\\x92"
46 |
47 | @skip_if_redis_enterprise()
48 | async def test_lua_script(self, r):
49 | async with r.monitor() as m:
50 | script = 'return redis.call("GET", "foo")'
51 | assert await r.eval(script, 0) is None
52 | response = await wait_for_command(r, m, "GET foo")
53 | assert response["command"] == "GET foo"
54 | assert response["client_type"] == "lua"
55 | assert response["client_address"] == "lua"
56 | assert response["client_port"] == ""
57 |
58 | @skip_ifnot_redis_enterprise()
59 | async def test_lua_script_in_enterprise(self, r):
60 | async with r.monitor() as m:
61 | script = 'return redis.call("GET", "foo")'
62 | assert await r.eval(script, 0) is None
63 | response = await wait_for_command(r, m, "GET foo")
64 | assert response is None
65 |
--------------------------------------------------------------------------------
/tests/test_asyncio/test_sentinel_managed_connection.py:
--------------------------------------------------------------------------------
1 | import socket
2 |
3 | import pytest
4 | from redis.asyncio.retry import Retry
5 | from redis.asyncio.sentinel import SentinelManagedConnection
6 | from redis.backoff import NoBackoff
7 |
8 | from .compat import mock
9 |
10 | pytestmark = pytest.mark.asyncio
11 |
12 |
13 | async def test_connect_retry_on_timeout_error(connect_args):
14 | """Test that the _connect function is retried in case of a timeout"""
15 | connection_pool = mock.AsyncMock()
16 | connection_pool.get_master_address = mock.AsyncMock(
17 | return_value=(connect_args["host"], connect_args["port"])
18 | )
19 | conn = SentinelManagedConnection(
20 | retry_on_timeout=True,
21 | retry=Retry(NoBackoff(), 3),
22 | connection_pool=connection_pool,
23 | )
24 | origin_connect = conn._connect
25 | conn._connect = mock.AsyncMock()
26 |
27 | async def mock_connect():
28 | # connect only on the last retry
29 | if conn._connect.call_count <= 2:
30 | raise socket.timeout
31 | else:
32 | return await origin_connect()
33 |
34 | conn._connect.side_effect = mock_connect
35 | await conn.connect()
36 | assert conn._connect.call_count == 3
37 | await conn.disconnect()
38 |
--------------------------------------------------------------------------------
/tests/test_asyncio/test_ssl.py:
--------------------------------------------------------------------------------
1 | from urllib.parse import urlparse
2 | import pytest
3 | import pytest_asyncio
4 | import redis.asyncio as redis
5 |
6 | # Skip test or not based on cryptography installation
7 | try:
8 | import cryptography # noqa
9 |
10 | skip_if_cryptography = pytest.mark.skipif(False, reason="")
11 | skip_if_nocryptography = pytest.mark.skipif(False, reason="")
12 | except ImportError:
13 | skip_if_cryptography = pytest.mark.skipif(True, reason="cryptography not installed")
14 | skip_if_nocryptography = pytest.mark.skipif(
15 | True, reason="cryptography not installed"
16 | )
17 |
18 |
19 | @pytest.mark.ssl
20 | class TestSSL:
21 | """Tests for SSL connections in asyncio."""
22 |
23 | @pytest_asyncio.fixture()
24 | async def _get_client(self, request):
25 | ssl_url = request.config.option.redis_ssl_url
26 | p = urlparse(ssl_url)[1].split(":")
27 | client = redis.Redis(host=p[0], port=p[1], ssl=True)
28 | yield client
29 | await client.aclose()
30 |
31 | async def test_ssl_with_invalid_cert(self, _get_client):
32 | """Test SSL connection with invalid certificate."""
33 | pass
34 |
35 | async def test_cert_reqs_none_with_check_hostname(self, request):
36 | """Test that when ssl_cert_reqs=none is used with ssl_check_hostname=True,
37 | the connection is created successfully with check_hostname internally set to False"""
38 | ssl_url = request.config.option.redis_ssl_url
39 | parsed_url = urlparse(ssl_url)
40 | r = redis.Redis(
41 | host=parsed_url.hostname,
42 | port=parsed_url.port,
43 | ssl=True,
44 | ssl_cert_reqs="none",
45 | # Check that ssl_check_hostname is ignored, when ssl_cert_reqs=none
46 | ssl_check_hostname=True,
47 | )
48 | try:
49 | # Connection should be successful
50 | assert await r.ping()
51 | # check_hostname should have been automatically set to False
52 | assert r.connection_pool.connection_class == redis.SSLConnection
53 | conn = r.connection_pool.make_connection()
54 | assert conn.check_hostname is False
55 | finally:
56 | await r.aclose()
57 |
--------------------------------------------------------------------------------
/tests/test_asyncio/test_utils.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | import redis
3 |
4 |
5 | async def redis_server_time(client: redis.Redis):
6 | seconds, milliseconds = await client.time()
7 | timestamp = float(f"{seconds}.{milliseconds}")
8 | return datetime.fromtimestamp(timestamp)
9 |
--------------------------------------------------------------------------------
/tests/test_asyncio/testdata/will_play_text.csv.bz2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redis/redis-py/6246cbade4fa9ae879455b498ef8fef5250619b1/tests/test_asyncio/testdata/will_play_text.csv.bz2
--------------------------------------------------------------------------------
/tests/test_auth/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redis/redis-py/6246cbade4fa9ae879455b498ef8fef5250619b1/tests/test_auth/__init__.py
--------------------------------------------------------------------------------
/tests/test_auth/test_token.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime, timezone
2 |
3 | import pytest
4 | from redis.auth.err import InvalidTokenSchemaErr
5 | from redis.auth.token import JWToken, SimpleToken
6 |
7 |
8 | class TestToken:
9 | def test_simple_token(self):
10 | token = SimpleToken(
11 | "value",
12 | (datetime.now(timezone.utc).timestamp() * 1000) + 1000,
13 | (datetime.now(timezone.utc).timestamp() * 1000),
14 | {"key": "value"},
15 | )
16 |
17 | assert token.ttl() == pytest.approx(1000, 10)
18 | assert token.is_expired() is False
19 | assert token.try_get("key") == "value"
20 | assert token.get_value() == "value"
21 | assert token.get_expires_at_ms() == pytest.approx(
22 | (datetime.now(timezone.utc).timestamp() * 1000) + 100, 10
23 | )
24 | assert token.get_received_at_ms() == pytest.approx(
25 | (datetime.now(timezone.utc).timestamp() * 1000), 10
26 | )
27 |
28 | token = SimpleToken(
29 | "value",
30 | -1,
31 | (datetime.now(timezone.utc).timestamp() * 1000),
32 | {"key": "value"},
33 | )
34 |
35 | assert token.ttl() == -1
36 | assert token.is_expired() is False
37 | assert token.get_expires_at_ms() == -1
38 |
39 | def test_jwt_token(self):
40 | jwt = pytest.importorskip("jwt")
41 |
42 | token = {
43 | "exp": datetime.now(timezone.utc).timestamp() + 100,
44 | "iat": datetime.now(timezone.utc).timestamp(),
45 | "key": "value",
46 | }
47 | encoded = jwt.encode(token, "secret", algorithm="HS256")
48 | jwt_token = JWToken(encoded)
49 |
50 | assert jwt_token.ttl() == pytest.approx(100000, 10)
51 | assert jwt_token.is_expired() is False
52 | assert jwt_token.try_get("key") == "value"
53 | assert jwt_token.get_value() == encoded
54 | assert jwt_token.get_expires_at_ms() == pytest.approx(
55 | (datetime.now(timezone.utc).timestamp() * 1000) + 100000, 10
56 | )
57 | assert jwt_token.get_received_at_ms() == pytest.approx(
58 | (datetime.now(timezone.utc).timestamp() * 1000), 10
59 | )
60 |
61 | token = {
62 | "exp": -1,
63 | "iat": datetime.now(timezone.utc).timestamp(),
64 | "key": "value",
65 | }
66 | encoded = jwt.encode(token, "secret", algorithm="HS256")
67 | jwt_token = JWToken(encoded)
68 |
69 | assert jwt_token.ttl() == -1
70 | assert jwt_token.is_expired() is False
71 | assert jwt_token.get_expires_at_ms() == -1000
72 |
73 | with pytest.raises(InvalidTokenSchemaErr):
74 | token = {"key": "value"}
75 | encoded = jwt.encode(token, "secret", algorithm="HS256")
76 | JWToken(encoded)
77 |
--------------------------------------------------------------------------------
/tests/test_backoff.py:
--------------------------------------------------------------------------------
1 | from unittest.mock import Mock
2 |
3 | import pytest
4 |
5 | from redis.backoff import ExponentialWithJitterBackoff
6 |
7 |
8 | def test_exponential_with_jitter_backoff(monkeypatch: pytest.MonkeyPatch) -> None:
9 | mock_random = Mock(side_effect=[0.25, 0.5, 0.75, 1.0, 0.9])
10 | monkeypatch.setattr("random.random", mock_random)
11 |
12 | bo = ExponentialWithJitterBackoff(cap=5, base=1)
13 |
14 | assert bo.compute(0) == 0.25 # min(5, 0.25*2^0)
15 | assert bo.compute(1) == 1.0 # min(5, 0.5*2^1)
16 | assert bo.compute(2) == 3.0 # min(5, 0.75*2^2)
17 | assert bo.compute(3) == 5.0 # min(5, 1*2^3)
18 | assert bo.compute(4) == 5.0 # min(5, 0.9*2^4)
19 |
--------------------------------------------------------------------------------
/tests/test_command_parser.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from redis._parsers import CommandsParser
3 |
4 | from .conftest import (
5 | assert_resp_response,
6 | skip_if_redis_enterprise,
7 | skip_if_server_version_lt,
8 | )
9 |
10 |
11 | class TestCommandsParser:
12 | def test_init_commands(self, r):
13 | commands_parser = CommandsParser(r)
14 | assert commands_parser.commands is not None
15 | assert "get" in commands_parser.commands
16 |
17 | def test_get_keys_predetermined_key_location(self, r):
18 | commands_parser = CommandsParser(r)
19 | args1 = ["GET", "foo"]
20 | args2 = ["OBJECT", "encoding", "foo"]
21 | args3 = ["MGET", "foo", "bar", "foobar"]
22 | assert commands_parser.get_keys(r, *args1) == ["foo"]
23 | assert commands_parser.get_keys(r, *args2) == ["foo"]
24 | assert commands_parser.get_keys(r, *args3) == ["foo", "bar", "foobar"]
25 |
26 | @pytest.mark.filterwarnings("ignore:ResponseError")
27 | @skip_if_redis_enterprise()
28 | def test_get_moveable_keys(self, r):
29 | commands_parser = CommandsParser(r)
30 | args1 = [
31 | "EVAL",
32 | "return {KEYS[1],KEYS[2],ARGV[1],ARGV[2]}",
33 | 2,
34 | "key1",
35 | "key2",
36 | "first",
37 | "second",
38 | ]
39 | args2 = ["XREAD", "COUNT", 2, b"STREAMS", "mystream", "writers", 0, 0]
40 | args3 = ["ZUNIONSTORE", "out", 2, "zset1", "zset2", "WEIGHTS", 2, 3]
41 | args4 = ["GEORADIUS", "Sicily", 15, 37, 200, "km", "WITHCOORD", b"STORE", "out"]
42 | args5 = ["MEMORY USAGE", "foo"]
43 | args6 = [
44 | "MIGRATE",
45 | "192.168.1.34",
46 | 6379,
47 | "",
48 | 0,
49 | 5000,
50 | b"KEYS",
51 | "key1",
52 | "key2",
53 | "key3",
54 | ]
55 | args7 = ["MIGRATE", "192.168.1.34", 6379, "key1", 0, 5000]
56 |
57 | assert_resp_response(
58 | r,
59 | sorted(commands_parser.get_keys(r, *args1)),
60 | ["key1", "key2"],
61 | [b"key1", b"key2"],
62 | )
63 | assert_resp_response(
64 | r,
65 | sorted(commands_parser.get_keys(r, *args2)),
66 | ["mystream", "writers"],
67 | [b"mystream", b"writers"],
68 | )
69 | assert_resp_response(
70 | r,
71 | sorted(commands_parser.get_keys(r, *args3)),
72 | ["out", "zset1", "zset2"],
73 | [b"out", b"zset1", b"zset2"],
74 | )
75 | assert_resp_response(
76 | r,
77 | sorted(commands_parser.get_keys(r, *args4)),
78 | ["Sicily", "out"],
79 | [b"Sicily", b"out"],
80 | )
81 | assert sorted(commands_parser.get_keys(r, *args5)) in [["foo"], [b"foo"]]
82 | assert_resp_response(
83 | r,
84 | sorted(commands_parser.get_keys(r, *args6)),
85 | ["key1", "key2", "key3"],
86 | [b"key1", b"key2", b"key3"],
87 | )
88 | assert_resp_response(
89 | r, sorted(commands_parser.get_keys(r, *args7)), ["key1"], [b"key1"]
90 | )
91 |
92 | # A bug in redis<7.0 causes this to fail: https://github.com/redis/redis/issues/9493
93 | @skip_if_server_version_lt("7.0.0")
94 | def test_get_eval_keys_with_0_keys(self, r):
95 | commands_parser = CommandsParser(r)
96 | args = ["EVAL", "return {ARGV[1],ARGV[2]}", 0, "key1", "key2"]
97 | assert commands_parser.get_keys(r, *args) == []
98 |
99 | def test_get_pubsub_keys(self, r):
100 | commands_parser = CommandsParser(r)
101 | args1 = ["PUBLISH", "foo", "bar"]
102 | args2 = ["PUBSUB NUMSUB", "foo1", "foo2", "foo3"]
103 | args3 = ["PUBSUB channels", "*"]
104 | args4 = ["SUBSCRIBE", "foo1", "foo2", "foo3"]
105 | assert commands_parser.get_keys(r, *args1) == ["foo"]
106 | assert commands_parser.get_keys(r, *args2) == ["foo1", "foo2", "foo3"]
107 | assert commands_parser.get_keys(r, *args3) == ["*"]
108 | assert commands_parser.get_keys(r, *args4) == ["foo1", "foo2", "foo3"]
109 |
--------------------------------------------------------------------------------
/tests/test_encoding.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import redis
3 |
4 | from .conftest import _get_client
5 |
6 |
7 | class TestEncoding:
8 | @pytest.fixture()
9 | def r(self, request):
10 | return _get_client(redis.Redis, request=request, decode_responses=True)
11 |
12 | @pytest.fixture()
13 | def r_no_decode(self, request):
14 | return _get_client(redis.Redis, request=request, decode_responses=False)
15 |
16 | def test_simple_encoding(self, r_no_decode):
17 | unicode_string = chr(3456) + "abcd" + chr(3421)
18 | r_no_decode["unicode-string"] = unicode_string.encode("utf-8")
19 | cached_val = r_no_decode["unicode-string"]
20 | assert isinstance(cached_val, bytes)
21 | assert unicode_string == cached_val.decode("utf-8")
22 |
23 | def test_simple_encoding_and_decoding(self, r):
24 | unicode_string = chr(3456) + "abcd" + chr(3421)
25 | r["unicode-string"] = unicode_string
26 | cached_val = r["unicode-string"]
27 | assert isinstance(cached_val, str)
28 | assert unicode_string == cached_val
29 |
30 | def test_memoryview_encoding(self, r_no_decode):
31 | unicode_string = chr(3456) + "abcd" + chr(3421)
32 | unicode_string_view = memoryview(unicode_string.encode("utf-8"))
33 | r_no_decode["unicode-string-memoryview"] = unicode_string_view
34 | cached_val = r_no_decode["unicode-string-memoryview"]
35 | # The cached value won't be a memoryview because it's a copy from Redis
36 | assert isinstance(cached_val, bytes)
37 | assert unicode_string == cached_val.decode("utf-8")
38 |
39 | def test_memoryview_encoding_and_decoding(self, r):
40 | unicode_string = chr(3456) + "abcd" + chr(3421)
41 | unicode_string_view = memoryview(unicode_string.encode("utf-8"))
42 | r["unicode-string-memoryview"] = unicode_string_view
43 | cached_val = r["unicode-string-memoryview"]
44 | assert isinstance(cached_val, str)
45 | assert unicode_string == cached_val
46 |
47 | def test_list_encoding(self, r):
48 | unicode_string = chr(3456) + "abcd" + chr(3421)
49 | result = [unicode_string, unicode_string, unicode_string]
50 | r.rpush("a", *result)
51 | assert r.lrange("a", 0, -1) == result
52 |
53 |
54 | class TestEncodingErrors:
55 | def test_ignore(self, request):
56 | r = _get_client(
57 | redis.Redis,
58 | request=request,
59 | decode_responses=True,
60 | encoding_errors="ignore",
61 | )
62 | r.set("a", b"foo\xff")
63 | assert r.get("a") == "foo"
64 |
65 | def test_replace(self, request):
66 | r = _get_client(
67 | redis.Redis,
68 | request=request,
69 | decode_responses=True,
70 | encoding_errors="replace",
71 | )
72 | r.set("a", b"foo\xff")
73 | assert r.get("a") == "foo\ufffd"
74 |
75 |
76 | class TestCommandsAreNotEncoded:
77 | @pytest.fixture()
78 | def r(self, request):
79 | return _get_client(redis.Redis, request=request, encoding="utf-8")
80 |
81 | def test_basic_command(self, r):
82 | r.set("hello", "world")
83 |
84 |
85 | class TestInvalidUserInput:
86 | def test_boolean_fails(self, r):
87 | with pytest.raises(redis.DataError):
88 | r.set("a", True)
89 |
90 | def test_none_fails(self, r):
91 | with pytest.raises(redis.DataError):
92 | r.set("a", None)
93 |
94 | def test_user_type_fails(self, r):
95 | class Foo:
96 | def __str__(self):
97 | return "Foo"
98 |
99 | with pytest.raises(redis.DataError):
100 | r.set("a", Foo())
101 |
--------------------------------------------------------------------------------
/tests/test_helpers.py:
--------------------------------------------------------------------------------
1 | import string
2 |
3 | from redis.commands.helpers import (
4 | delist,
5 | list_or_args,
6 | nativestr,
7 | parse_to_list,
8 | random_string,
9 | )
10 |
11 |
12 | def test_list_or_args():
13 | k = ["hello, world"]
14 | a = ["some", "argument", "list"]
15 | assert list_or_args(k, a) == k + a
16 |
17 | for i in ["banana", b"banana"]:
18 | assert list_or_args(i, a) == [i] + a
19 |
20 |
21 | def test_parse_to_list():
22 | assert parse_to_list(None) == []
23 | r = ["hello", b"my name", "45", "555.55", "is simon!", None]
24 | assert parse_to_list(r) == ["hello", "my name", 45, 555.55, "is simon!", None]
25 |
26 |
27 | def test_nativestr():
28 | assert nativestr("teststr") == "teststr"
29 | assert nativestr(b"teststr") == "teststr"
30 | assert nativestr("null") is None
31 |
32 |
33 | def test_delist():
34 | assert delist(None) is None
35 | assert delist([b"hello", "world", b"banana"]) == ["hello", "world", "banana"]
36 |
37 |
38 | def test_random_string():
39 | assert len(random_string()) == 10
40 | assert len(random_string(15)) == 15
41 | for a in random_string():
42 | assert a in string.ascii_lowercase
43 |
--------------------------------------------------------------------------------
/tests/test_monitor.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from .conftest import (
4 | skip_if_redis_enterprise,
5 | skip_ifnot_redis_enterprise,
6 | wait_for_command,
7 | )
8 |
9 |
10 | @pytest.mark.onlynoncluster
11 | class TestMonitor:
12 | def test_wait_command_not_found(self, r):
13 | "Make sure the wait_for_command func works when command is not found"
14 | with r.monitor() as m:
15 | response = wait_for_command(r, m, "nothing")
16 | assert response is None
17 |
18 | def test_response_values(self, r):
19 | db = r.connection_pool.connection_kwargs.get("db", 0)
20 | with r.monitor() as m:
21 | r.ping()
22 | response = wait_for_command(r, m, "PING")
23 | assert isinstance(response["time"], float)
24 | assert response["db"] == db
25 | assert response["client_type"] in ("tcp", "unix")
26 | assert isinstance(response["client_address"], str)
27 | assert isinstance(response["client_port"], str)
28 | assert response["command"] == "PING"
29 |
30 | def test_command_with_quoted_key(self, r):
31 | with r.monitor() as m:
32 | r.get('foo"bar')
33 | response = wait_for_command(r, m, 'GET foo"bar')
34 | assert response["command"] == 'GET foo"bar'
35 |
36 | def test_command_with_binary_data(self, r):
37 | with r.monitor() as m:
38 | byte_string = b"foo\x92"
39 | r.get(byte_string)
40 | response = wait_for_command(r, m, "GET foo\\x92")
41 | assert response["command"] == "GET foo\\x92"
42 |
43 | def test_command_with_escaped_data(self, r):
44 | with r.monitor() as m:
45 | byte_string = b"foo\\x92"
46 | r.get(byte_string)
47 | response = wait_for_command(r, m, "GET foo\\\\x92")
48 | assert response["command"] == "GET foo\\\\x92"
49 |
50 | @skip_if_redis_enterprise()
51 | def test_lua_script(self, r):
52 | with r.monitor() as m:
53 | script = 'return redis.call("GET", "foo")'
54 | assert r.eval(script, 0) is None
55 | response = wait_for_command(r, m, "GET foo")
56 | assert response["command"] == "GET foo"
57 | assert response["client_type"] == "lua"
58 | assert response["client_address"] == "lua"
59 | assert response["client_port"] == ""
60 |
61 | @skip_ifnot_redis_enterprise()
62 | def test_lua_script_in_enterprise(self, r):
63 | with r.monitor() as m:
64 | script = 'return redis.call("GET", "foo")'
65 | assert r.eval(script, 0) is None
66 | response = wait_for_command(r, m, "GET foo")
67 | assert response is None
68 |
--------------------------------------------------------------------------------
/tests/test_parsers/test_helpers.py:
--------------------------------------------------------------------------------
1 | from redis._parsers.helpers import parse_info
2 |
3 |
4 | def test_parse_info():
5 | info_output = """
6 | # Modules
7 | module:name=search,ver=999999,api=1,filters=0,usedby=[],using=[ReJSON],options=[handle-io-errors]
8 |
9 | # search_fields_statistics
10 | search_fields_text:Text=3
11 | search_fields_tag:Tag=2,Sortable=1
12 |
13 | # search_version
14 | search_version:99.99.99
15 | search_redis_version:7.2.2 - oss
16 |
17 | # search_runtime_configurations
18 | search_query_timeout_ms:500
19 | """
20 | info = parse_info(info_output)
21 |
22 | assert isinstance(info["modules"], list)
23 | assert isinstance(info["modules"][0], dict)
24 | assert info["modules"][0]["name"] == "search"
25 |
26 | assert isinstance(info["search_fields_text"], dict)
27 | assert info["search_fields_text"]["Text"] == 3
28 |
29 | assert isinstance(info["search_fields_tag"], dict)
30 | assert info["search_fields_tag"]["Tag"] == 2
31 | assert info["search_fields_tag"]["Sortable"] == 1
32 |
33 | assert info["search_version"] == "99.99.99"
34 | assert info["search_redis_version"] == "7.2.2 - oss"
35 | assert info["search_query_timeout_ms"] == 500
36 |
37 |
38 | def test_parse_info_list():
39 | info_output = """
40 | list_one:a,
41 | list_two:a b,,c,10,1.1
42 | """
43 | info = parse_info(info_output)
44 |
45 | assert isinstance(info["list_one"], list)
46 | assert info["list_one"] == ["a"]
47 |
48 | assert isinstance(info["list_two"], list)
49 | assert info["list_two"] == ["a b", "c", 10, 1.1]
50 |
51 |
52 | def test_parse_info_list_dict_mixed():
53 | info_output = """
54 | list_one:a,b=1
55 | list_two:a b=foo,,c,d=bar,e,
56 | """
57 | info = parse_info(info_output)
58 |
59 | assert isinstance(info["list_one"], dict)
60 | assert info["list_one"] == {"a": True, "b": 1}
61 |
62 | assert isinstance(info["list_two"], dict)
63 | assert info["list_two"] == {"a b": "foo", "c": True, "d": "bar", "e": True}
64 |
--------------------------------------------------------------------------------
/tests/test_utils.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | import pytest
3 | from redis.utils import compare_versions
4 |
5 |
6 | @pytest.mark.parametrize(
7 | "version1,version2,expected_res",
8 | [
9 | ("1.0.0", "0.9.0", -1),
10 | ("1.0.0", "1.0.0", 0),
11 | ("0.9.0", "1.0.0", 1),
12 | ("1.09.0", "1.9.0", 0),
13 | ("1.090.0", "1.9.0", -1),
14 | ("1", "0.9.0", -1),
15 | ("1", "1.0.0", 0),
16 | ],
17 | ids=[
18 | "version1 > version2",
19 | "version1 == version2",
20 | "version1 < version2",
21 | "version1 == version2 - different minor format",
22 | "version1 > version2 - different minor format",
23 | "version1 > version2 - major version only",
24 | "version1 == version2 - major version only",
25 | ],
26 | )
27 | def test_compare_versions(version1, version2, expected_res):
28 | assert compare_versions(version1, version2) == expected_res
29 |
30 |
31 | def redis_server_time(client):
32 | seconds, milliseconds = client.time()
33 | timestamp = float(f"{seconds}.{milliseconds}")
34 | return datetime.fromtimestamp(timestamp)
35 |
--------------------------------------------------------------------------------
/tests/testdata/will_play_text.csv.bz2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redis/redis-py/6246cbade4fa9ae879455b498ef8fef5250619b1/tests/testdata/will_play_text.csv.bz2
--------------------------------------------------------------------------------
/whitelist.py:
--------------------------------------------------------------------------------
1 | exc_type # unused variable (/data/repos/redis/redis-py/redis/client.py:1045)
2 | exc_value # unused variable (/data/repos/redis/redis-py/redis/client.py:1045)
3 | traceback # unused variable (/data/repos/redis/redis-py/redis/client.py:1045)
4 | exc_type # unused variable (/data/repos/redis/redis-py/redis/client.py:1211)
5 | exc_value # unused variable (/data/repos/redis/redis-py/redis/client.py:1211)
6 | traceback # unused variable (/data/repos/redis/redis-py/redis/client.py:1211)
7 | exc_type # unused variable (/data/repos/redis/redis-py/redis/client.py:1589)
8 | exc_value # unused variable (/data/repos/redis/redis-py/redis/client.py:1589)
9 | traceback # unused variable (/data/repos/redis/redis-py/redis/client.py:1589)
10 | exc_type # unused variable (/data/repos/redis/redis-py/redis/lock.py:156)
11 | exc_value # unused variable (/data/repos/redis/redis-py/redis/lock.py:156)
12 | traceback # unused variable (/data/repos/redis/redis-py/redis/lock.py:156)
13 | exc_type # unused variable (/data/repos/redis/redis-py/redis/asyncio/utils.py:26)
14 | exc_value # unused variable (/data/repos/redis/redis-py/redis/asyncio/utils.py:26)
15 | traceback # unused variable (/data/repos/redis/redis-py/redis/asyncio/utils.py:26)
16 | AsyncConnectionPool # unused import (//data/repos/redis/redis-py/redis/typing.py:9)
17 | AsyncRedis # unused import (//data/repos/redis/redis-py/redis/commands/core.py:49)
18 | TargetNodesT # unused import (//data/repos/redis/redis-py/redis/commands/cluster.py:46)
19 |
--------------------------------------------------------------------------------