├── .bumpversion.cfg ├── .deepsource.toml ├── .github ├── release-drafter-config.yml └── workflows │ ├── check-pypi.yml │ ├── integration.yml │ ├── linters.yml │ ├── publish-pypi.yml │ └── release-drafter.yml ├── .gitignore ├── Dockerfile ├── LICENSE ├── README.rst ├── codecov.yml ├── docs ├── Makefile ├── api.rst ├── conf.py ├── images │ └── logo.png ├── index.rst ├── make.bat └── readme.rst ├── pyproject.toml ├── redisai ├── __init__.py ├── client.py ├── command_builder.py ├── dag.py ├── pipeline.py ├── postprocessor.py └── utils.py ├── test ├── test.py └── testdata │ ├── boston.onnx │ ├── dog.jpg │ ├── findsquare.onnx │ ├── graph.pb │ ├── mnist_model_quant.tflite │ ├── one.raw │ ├── pt-minimal.pt │ ├── resnet50.pb │ └── script.txt └── tox.ini /.bumpversion.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | current_version = 1.0.2 3 | commit = True 4 | tag = False 5 | parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\.(?P[a-z]+)(?P\d+))? 6 | serialize = 7 | {major}.{minor}.{patch} 8 | 9 | [bumpversion:file:setup.py] 10 | search = version="{current_version}" 11 | replace = version="{new_version}" 12 | 13 | [bumpversion:file:redisai/__init__.py] 14 | search = __version__ = "{current_version}" 15 | replace = __version__ = "{new_version}" 16 | 17 | [bumpversion:file:docs/conf.py] 18 | search = release = "{current_version}" 19 | replace = release = "{new_version}" 20 | -------------------------------------------------------------------------------- /.deepsource.toml: -------------------------------------------------------------------------------- 1 | version = 1 2 | 3 | [[analyzers]] 4 | name = "python" 5 | enabled = true 6 | 7 | [analyzers.meta] 8 | runtime_version = "3.x.x" 9 | -------------------------------------------------------------------------------- /.github/release-drafter-config.yml: -------------------------------------------------------------------------------- 1 | name-template: 'Version $NEXT_PATCH_VERSION' 2 | tag-template: 'v$NEXT_PATCH_VERSION' 3 | categories: 4 | - title: 'Features' 5 | labels: 6 | - 'feature' 7 | - 'enhancement' 8 | - title: 'Bug Fixes' 9 | labels: 10 | - 'fix' 11 | - 'bugfix' 12 | - 'bug' 13 | - title: 'Maintenance' 14 | label: 'chore' 15 | change-template: '- $TITLE (#$NUMBER)' 16 | exclude-labels: 17 | - 'skip-changelog' 18 | template: | 19 | ## Changes 20 | 21 | $CHANGES 22 | -------------------------------------------------------------------------------- /.github/workflows/check-pypi.yml: -------------------------------------------------------------------------------- 1 | name: Check if required secrets are set to publish to Pypi 2 | 3 | on: 4 | push: 5 | branches: 6 | - 'master' 7 | - 'main' 8 | - '[0-9].[0-9]' 9 | 10 | jobs: 11 | checksecret: 12 | name: check if PYPI_TOKEN and TESTPYPI_TOKEN are set in github secrets 13 | runs-on: ubuntu-latest 14 | steps: 15 | - name: Check PYPI_TOKEN 16 | env: 17 | PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} 18 | run: | 19 | if ${{ env.PYPI_TOKEN == '' }} ; then 20 | echo "PYPI_TOKEN secret is not set" 21 | exit 1 22 | fi 23 | - name: Check TESTPYPI_TOKEN 24 | env: 25 | TESTPYPI_TOKEN: ${{ secrets.TESTPYPI_TOKEN }} 26 | run: | 27 | if ${{ env.TESTPYPI_TOKEN == '' }} ; then 28 | echo "TESTPYPI_TOKEN secret is not set" 29 | exit 1 30 | fi 31 | -------------------------------------------------------------------------------- /.github/workflows/integration.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | on: 4 | push: 5 | 6 | schedule: 7 | - cron: "5 1 * * Sun-Thu" 8 | 9 | name: tests 10 | 11 | jobs: 12 | build-and-test: 13 | 14 | services: 15 | redisai: 16 | image: redislabs/redisai:edge-cpu-bionic 17 | ports: 18 | - 6379:6379 19 | runs-on: ubuntu-latest 20 | 21 | strategy: 22 | max-parallel: 10 23 | matrix: 24 | python-version: ["3.8", "3.9", "3.10", "3.11"] 25 | steps: 26 | - uses: actions/checkout@v3 27 | - uses: actions/setup-python@v4 28 | with: 29 | python-version: ${{matrix.python-version}} 30 | 31 | # penidng: https://github.com/tkukushkin/tox-poetry/pull/16 32 | - name: install base dependencies 33 | run: | 34 | pip install -q tox==3.27.0 poetry tox-poetry 35 | - name: cache 36 | uses: actions/cache@v3 37 | with: 38 | path: | 39 | .tox 40 | key: redisai-${{matrix.python_version}} 41 | - name: build the package 42 | run: | 43 | poetry build 44 | - name: test 45 | run: | 46 | tox -e tests 47 | -------------------------------------------------------------------------------- /.github/workflows/linters.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | on: 4 | pull_request: 5 | paths: 6 | - 'redisai/**' 7 | - 'pyproject.toml' 8 | 9 | name: lint 10 | 11 | env: 12 | python_version: 3.9 13 | 14 | jobs: 15 | lint: 16 | runs-on: ubuntu-latest 17 | steps: 18 | - uses: actions/checkout@v3 19 | - uses: actions/setup-python@v4 20 | with: 21 | python-version: ${{env.python_version}} 22 | - name: lint 23 | run: | 24 | pip install -q tox poetry 25 | tox -e linters 26 | -------------------------------------------------------------------------------- /.github/workflows/publish-pypi.yml: -------------------------------------------------------------------------------- 1 | name: Publish Pypi 2 | on: 3 | release: 4 | types: [ published ] 5 | 6 | jobs: 7 | pytest: 8 | name: Publish to PyPi 9 | runs-on: ubuntu-latest 10 | env: 11 | ACTIONS_ALLOW_UNSECURE_COMMANDS: true 12 | steps: 13 | - uses: actions/checkout@master 14 | 15 | - name: get version from tag 16 | id: get_version 17 | run: | 18 | realversion="${GITHUB_REF/refs\/tags\//}" 19 | realversion="${realversion//v/}" 20 | echo "::set-output name=VERSION::$realversion" 21 | 22 | - name: Set the version for publishing 23 | uses: ciiiii/toml-editor@1.0.0 24 | with: 25 | file: "pyproject.toml" 26 | key: "tool.poetry.version" 27 | value: "${{ steps.get_version.outputs.VERSION }}" 28 | 29 | - name: Set up Python 3.8 30 | uses: actions/setup-python@v1 31 | with: 32 | python-version: 3.8 33 | 34 | - name: install poetry 35 | run: | 36 | pip install poetry 37 | 38 | - name: Cache Poetry virtualenv 39 | uses: actions/cache@v1 40 | id: cache 41 | with: 42 | path: ~/.virtualenvs 43 | key: poetry-${{ hashFiles('**/poetry.lock') }} 44 | restore-keys: | 45 | poetry-${{ hashFiles('**/poetry.lock') }} 46 | 47 | - name: Set Poetry config 48 | run: | 49 | poetry config virtualenvs.in-project false 50 | poetry config virtualenvs.path ~/.virtualenvs 51 | 52 | - name: Install Dependencies 53 | run: poetry install 54 | if: steps.cache.outputs.cache-hit != 'true' 55 | 56 | - name: Publish to PyPI 57 | if: github.event_name == 'release' 58 | run: | 59 | poetry publish -u __token__ -p ${{ secrets.PYPI_TOKEN }} --build 60 | -------------------------------------------------------------------------------- /.github/workflows/release-drafter.yml: -------------------------------------------------------------------------------- 1 | name: Release Drafter 2 | 3 | on: 4 | push: 5 | # branches to consider in the event; optional, defaults to all 6 | branches: 7 | - master 8 | 9 | jobs: 10 | update_release_draft: 11 | runs-on: ubuntu-latest 12 | steps: 13 | # Drafts your next Release notes as Pull Requests are merged into "master" 14 | - uses: release-drafter/release-drafter@v5 15 | with: 16 | # (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml 17 | config-name: release-drafter-config.yml 18 | env: 19 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 20 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .project 2 | .pydevproject 3 | *.pyc 4 | .venv/ 5 | venv/ 6 | redisai.egg-info 7 | .idea 8 | .mypy_cache/ 9 | build/ 10 | dist/ 11 | docs/_build/ 12 | .DS_Store 13 | .vscode 14 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | ARG OSNICK=bionic 2 | ARG TARGET=cpu 3 | 4 | FROM redislabs/redisai:edge-${TARGET}-${OSNICK} as builder 5 | 6 | RUN apt update && apt install -y python3 python3-pip 7 | ADD . /build 8 | WORKDIR /build 9 | RUN pip3 install poetry 10 | RUN poetry config virtualenvs.create false 11 | RUN poetry build 12 | 13 | ### clean docker stage 14 | FROM redislabs/redisai:edge-${TARGET}-${OSNICK} as runner 15 | 16 | RUN apt update && apt install -y python3 python3-pip 17 | RUN rm -rf /var/cache/apt/ 18 | 19 | COPY --from=builder /build/dist/redisai*.tar.gz /tmp/ 20 | RUN pip3 install /tmp/redisai*.tar.gz 21 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2019, RedisAI 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | * Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | * Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | * Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ========== 2 | redisai-py 3 | ========== 4 | 5 | .. image:: https://img.shields.io/github/license/RedisAI/redisai-py.svg 6 | :target: https://github.com/RedisAI/redisai-py 7 | 8 | .. image:: https://badge.fury.io/py/redisai.svg 9 | :target: https://badge.fury.io/py/redisai 10 | 11 | .. image:: https://github.com/RedisAI/redisai-py/actions/workflows/integration.yml/badge.svg 12 | :target: https://github.com/RedisAI/redisai-py/actions/workflows/integration.yml 13 | 14 | .. image:: https://img.shields.io/github/release/RedisAI/redisai-py.svg 15 | :target: https://github.com/RedisAI/redisai-py/releases/latest 16 | 17 | .. image:: https://codecov.io/gh/RedisAI/redisai-py/branch/master/graph/badge.svg 18 | :target: https://codecov.io/gh/RedisAI/redisai-py 19 | 20 | .. image:: https://readthedocs.org/projects/redisai-py/badge/?version=latest 21 | :target: https://redisai-py.readthedocs.io/en/latest/?badge=latest 22 | 23 | .. image:: https://img.shields.io/badge/Forum-RedisAI-blue 24 | :target: https://forum.redis.com/c/modules/redisai 25 | 26 | .. image:: https://img.shields.io/discord/697882427875393627?style=flat-square 27 | :target: https://discord.gg/rTQm7UZ 28 | 29 | .. image:: https://snyk.io/test/github/RedisAI/redisai-py/badge.svg?targetFile=pyproject.toml 30 | :target: https://snyk.io/test/github/RedisAI/redisai-py?targetFile=pyproject.toml 31 | 32 | redisai-py is the Python client for RedisAI. Checkout the 33 | `documentation `_ for API details and examples 34 | 35 | Installation 36 | ------------ 37 | 38 | 1. Install Redis 5.0 or above 39 | 2. Install `RedisAI `_ 40 | 3. Install the Python client 41 | 42 | .. code-block:: bash 43 | 44 | $ pip install redisai 45 | 46 | 47 | 4. Install serialization-deserialization utility (optional) 48 | 49 | .. code-block:: bash 50 | 51 | $ pip install ml2rt 52 | 53 | Development 54 | ----------- 55 | 56 | 1. Assuming you have virtualenv installed, create a virtualenv to manage your python dependencies, and activate it. 57 | ```virtualenv -v venv; source venv/bin/activate``` 58 | 2. Install [pypoetry](https://python-poetry.org/) to manage your dependencies. 59 | ```pip install poetry``` 60 | 3. Install dependencies. 61 | ```poetry install --no-root``` 62 | 63 | [tox](https://tox.readthedocs.io/en/latest/) runs all tests as its default target. Running *tox* by itself will run unit tests. Ensure you have a running redis, with the module loaded. 64 | 65 | **Contributing** 66 | 67 | Prior to submitting a pull request, please ensure you've built and installed poetry as above. Then: 68 | 69 | 1. Run the linter. 70 | ```tox -e linters.``` 71 | 2. Run the unit tests. This assumes you have a redis server running, with the [RedisAI module](https://redisai.io) already loaded. If you don't, you may want to install a [docker build](https://hub.docker.com/r/redislabs/redisai/tags). 72 | ```tox -e tests``` 73 | 74 | `RedisAI example repo `_ shows few examples 75 | made using redisai-py under `python_client` folder. Also, checkout 76 | `ml2rt `_ for convenient functions those might help in 77 | converting models (sparkml, sklearn, xgboost to ONNX), serializing models to disk, loading 78 | it back to redisai-py etc. 79 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | coverage: 2 | status: 3 | project: 4 | default: 5 | target: 90 6 | threshold: 0% 7 | patch: 8 | default: 9 | target: 90 10 | threshold: 0% 11 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/api.rst: -------------------------------------------------------------------------------- 1 | .. _ref-api: 2 | 3 | API Documentation 4 | ================= 5 | This page hosts the documentation of all user-facing APIs. The only entrypoint the users 6 | should be using is the ``Client`` class. It exposes all the RedisAI commands as class methods 7 | 8 | 9 | Client Class 10 | ------------ 11 | .. autoclass:: redisai.Client 12 | :members: 13 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | project = "redisai-py" 2 | copyright = "2020, Redis" 3 | author = "Redis" 4 | release = "1.0.2" 5 | extensions = [ 6 | "sphinx.ext.autodoc", 7 | "sphinx.ext.autosummary", 8 | "sphinx.ext.extlinks", 9 | "sphinx.ext.napoleon", 10 | "sphinx.ext.todo", 11 | "sphinx.ext.intersphinx", 12 | "sphinx_rtd_theme", 13 | 'sphinx_search.extension', # search tools 14 | 'sphinx.ext.autodoc', 15 | ] 16 | pygments_style = "sphinx" 17 | autoapi_type = 'python' 18 | templates_path = ["_templates"] 19 | source_suffix = ".rst" 20 | master_doc = "index" 21 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 22 | html_theme = "sphinx_rtd_theme" 23 | html_logo = 'images/logo.png' 24 | html_use_smartypants = True 25 | html_last_updated_fmt = "%b %d, %Y" 26 | html_split_index = False 27 | html_static_path = ['_static'] 28 | html_sidebars = { 29 | "**": ["searchbox.html", "globaltoc.html", "sourcelink.html"], 30 | } 31 | html_short_title = "%s-%s" % (project, release) 32 | 33 | napoleon_use_ivar = True 34 | napoleon_use_rtype = True 35 | napoleon_use_param = True 36 | napoleon_include_init_with_doc = True 37 | 38 | add_module_names = False 39 | doctest_test_doctest_blocks = None 40 | autoclass_content = "class" 41 | -------------------------------------------------------------------------------- /docs/images/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RedisAI/redisai-py/c4b71db2c4c8bedba138e15394af57252e07c094/docs/images/logo.png -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to redisai-py's documentation! 2 | ====================================== 3 | 4 | .. toctree:: 5 | :maxdepth: 2 6 | :caption: Contents: 7 | 8 | readme 9 | api 10 | 11 | 12 | Indices and tables 13 | ================== 14 | 15 | * :ref:`genindex` 16 | * :ref:`modindex` 17 | * :ref:`search` 18 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/readme.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../README.rst 2 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "redisai" 3 | version = "1.3.0" 4 | description = "RedisAI Python Client" 5 | authors = ["Redis "] 6 | license = "BSD-3-Clause" 7 | readme = "README.rst" 8 | 9 | packages = [ 10 | { include = 'redisai' }, 11 | ] 12 | 13 | classifiers = [ 14 | 'Topic :: Database', 15 | 'Programming Language :: Python', 16 | 'Intended Audience :: Developers', 17 | 'Programming Language :: Python :: 3.8', 18 | 'Programming Language :: Python :: 3.9', 19 | 'Programming Language :: Python :: 3.10', 20 | 'Programming Language :: Python :: 3.11', 21 | 'License :: OSI Approved :: BSD License', 22 | 'Development Status :: 5 - Production/Stable' 23 | ] 24 | 25 | [tool.poetry.dependencies] 26 | python = ">=3.8,<=4.0.0" 27 | redis = "^4.1.4" 28 | hiredis = ">=0.20" 29 | numpy = ">=1.19.5" 30 | six = ">=1.10.0" 31 | Deprecated = "^1.2.12" 32 | pytest = "^7.2.1" 33 | 34 | [tool.poetry.dev-dependencies] 35 | codecov = "^2.1.11" 36 | flake8 = "<6.0.0" 37 | ml2rt = "^0.2.0" 38 | tox = ">=3.23.1,<=4.0.0" 39 | tox-poetry = "^0.3.0" 40 | Sphinx = "^4.1.2" 41 | sphinx-rtd-theme = "^0.5.2" 42 | readthedocs-sphinx-search = "^0.1.0" 43 | sphinx-autoapi = "^1.8.3" 44 | toml = "^0.10.2" 45 | bandit = "^1.7.0" 46 | pylint = "^2.8.2" 47 | vulture = "^2.3" 48 | scikit-image = "^0.19.3" 49 | 50 | [tool.poetry.urls] 51 | "Project URL" = "https://redisai.io" 52 | Repository = "https://github.com/RedisAI/redisai-py" 53 | Documentation = "https://redisai.readhtedocs.io" 54 | Homepage = "https://oss..com/redisai/" 55 | Tracker = "https://github.com/RedisAI/redisai-py/issues" 56 | 57 | [build-system] 58 | requires = ["poetry-core>=1.0.0"] 59 | build-backend = "poetry.core.masonry.api" 60 | -------------------------------------------------------------------------------- /redisai/__init__.py: -------------------------------------------------------------------------------- 1 | from .client import Client # noqa 2 | import pkg_resources 3 | 4 | __version__ = pkg_resources.get_distribution('redisai').version 5 | -------------------------------------------------------------------------------- /redisai/client.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | from functools import wraps 3 | from typing import AnyStr, ByteString, List, Sequence, Union 4 | 5 | import numpy as np 6 | from deprecated import deprecated 7 | from redis import StrictRedis 8 | 9 | from redisai import command_builder as builder 10 | from redisai.dag import Dag 11 | from redisai.pipeline import Pipeline 12 | from redisai.postprocessor import Processor 13 | 14 | processor = Processor() 15 | 16 | 17 | class Client(StrictRedis): 18 | """ 19 | Redis client build specifically for the RedisAI module. It takes all the necessary 20 | parameters to establish the connection and an optional ``debug`` parameter on 21 | initialization 22 | 23 | Parameters 24 | ---------- 25 | 26 | debug : bool 27 | If debug mode is ON, then each command that is sent to the server is 28 | printed to the terminal 29 | enable_postprocess : bool 30 | Flag to enable post processing. If enabled, all the bytestring-ed returns 31 | are converted to python strings recursively and key value pairs will be converted 32 | to dictionaries. Also note that, this flag doesn't work with pipeline() function 33 | since pipeline function could have native redis commands (along with RedisAI 34 | commands) 35 | 36 | Example 37 | ------- 38 | >>> from redisai import Client 39 | >>> con = Client(host='localhost', port=6379) 40 | """ 41 | 42 | REDISAI_COMMANDS_RESPONSE_CALLBACKS = {} 43 | 44 | def __init__(self, debug=False, enable_postprocess=True, *args, **kwargs): 45 | super().__init__(*args, **kwargs) 46 | if debug: 47 | self.execute_command = enable_debug(super().execute_command) 48 | self.enable_postprocess = enable_postprocess 49 | 50 | def pipeline(self, transaction: bool = True, shard_hint: bool = None) -> "Pipeline": 51 | """ 52 | It follows the same pipeline implementation of native redis client but enables it 53 | to access redisai operation as well. This function is experimental in the 54 | current release. 55 | 56 | Example 57 | ------- 58 | >>> pipe = con.pipeline(transaction=False) 59 | >>> pipe = pipe.set('nativeKey', 1) 60 | >>> pipe = pipe.tensorset('redisaiKey', np.array([1, 2])) 61 | >>> pipe.execute() 62 | [True, b'OK'] 63 | """ 64 | return Pipeline( 65 | self.enable_postprocess, 66 | self.connection_pool, 67 | self.response_callbacks, 68 | transaction=transaction, 69 | shard_hint=shard_hint, 70 | ) 71 | 72 | def dag( 73 | self, 74 | load: Sequence = None, 75 | persist: Sequence = None, 76 | routing: AnyStr = None, 77 | timeout: int = None, 78 | readonly: bool = False 79 | ) -> "Dag": 80 | """ 81 | It returns a DAG object on which other DAG-allowed operations can be called. For 82 | more details about DAG in RedisAI, refer to the RedisAI documentation. 83 | 84 | Parameters 85 | ---------- 86 | load : Union[AnyStr, List[AnyStr]] 87 | Load the list of given values from the keyspace to DAG scope 88 | persist : Union[AnyStr, List[AnyStr]] 89 | For each tensor key in the given list, write its values to the keyspace from 90 | DAG scope after the DAG execution is finished. 91 | routing : AnyStr 92 | Denotes a key to be used in the DAG or a tag that will assist in routing the dag 93 | execution command to the right shard. Redis will verify that all potential key 94 | accesses are done to within the target shard. 95 | timeout : int 96 | The max number on milisecinds that may pass before the request is prossced 97 | (meaning that the result will not be computed after that time and TIMEDOUT 98 | is returned in that case) 99 | readonly : bool 100 | If True, it triggers AI.DAGRUN_RO, the read only DAG which cannot write (PERSIST) to 101 | the keyspace. But since it can't write, it can execute on replicas 102 | 103 | 104 | Returns 105 | ------- 106 | Any 107 | Dag object which holds other operations permitted inside DAG as attributes 108 | 109 | Example 110 | ------- 111 | >>> con.tensorset('tensor', ...) 112 | 'OK' 113 | >>> con.modelstore('model', ...) 114 | 'OK' 115 | >>> dag = con.dag(load=['tensor'], persist=['output']) 116 | >>> dag.tensorset('another', ...) 117 | >>> dag.modelrun('model', inputs=['tensor', 'another'], outputs=['output']) 118 | >>> output = dag.tensorget('output').run() 119 | >>> # You can even chain the operations 120 | >>> result = dag.tensorset(**akwargs).modelrun(**bkwargs).tensorget(**ckwargs).run() 121 | """ 122 | return Dag(load, persist, routing, timeout, self.execute_command, readonly) 123 | 124 | def loadbackend(self, identifier: AnyStr, path: AnyStr) -> str: 125 | """ 126 | RedisAI by default won't load any backends. User can either explicitly 127 | load the backend by using this function or let RedisAI load the required 128 | backend from the default path on-demand. 129 | 130 | Parameters 131 | ---------- 132 | identifier : str 133 | Representing which backend. Allowed values - TF, TFLITE, TORCH & ONNX 134 | path: str 135 | Path to the shared object of the backend 136 | 137 | Returns 138 | ------- 139 | str 140 | 'OK' if success, raise an exception otherwise 141 | 142 | Example 143 | ------- 144 | >>> con.loadbackend('TORCH', '/path/to/the/backend/redisai_torch.so') 145 | 'OK' 146 | """ 147 | args = builder.loadbackend(identifier, path) 148 | res = self.execute_command(args) 149 | return res if not self.enable_postprocess else processor.loadbackend(res) 150 | 151 | def config(self, name: str, value: Union[str, int, None] = None) -> str: 152 | """ 153 | Get/Set configuration item. Current available configurations are: BACKENDSPATH and MODEL_CHUNK_SIZE. 154 | For more details, see: https://oss.redis.com/redisai/master/commands/#aiconfig. 155 | If value is given - the configuration under name will be overriten. 156 | 157 | Parameters 158 | ---------- 159 | name: str 160 | RedisAI config item to retreive/override (BACKENDSPATH / MODEL_CHUNK_SIZE). 161 | value: Union[str, int] 162 | Value to set the config item with (if given). 163 | 164 | Returns 165 | ------- 166 | The current configuration value if value is None, 167 | 'OK' if value was given and configuration overitten succeeded, 168 | raise an exception otherwise 169 | 170 | 171 | Example 172 | ------- 173 | >>> con.config('MODEL_CHUNK_SIZE', 128 * 1024) 174 | 'OK' 175 | >>> con.config('BACKENDSPATH', '/my/backends/path') 176 | 'OK' 177 | >>> con.config('BACKENDSPATH') 178 | '/my/backends/path' 179 | >>> con.config('MODEL_CHUNK_SIZE') 180 | '131072' 181 | """ 182 | args = builder.config(name, value) 183 | res = self.execute_command(args) 184 | return res if not self.enable_postprocess or not isinstance(res, bytes) else processor.config(res) 185 | 186 | def modelstore( 187 | self, 188 | key: AnyStr, 189 | backend: str, 190 | device: str, 191 | data: ByteString, 192 | batch: int = None, 193 | minbatch: int = None, 194 | minbatchtimeout: int = None, 195 | tag: AnyStr = None, 196 | inputs: Union[AnyStr, List[AnyStr]] = None, 197 | outputs: Union[AnyStr, List[AnyStr]] = None, 198 | ) -> str: 199 | """ 200 | Set the model on provided key. 201 | 202 | Parameters 203 | ---------- 204 | key : AnyStr 205 | Key name 206 | backend : str 207 | Backend name. Allowed backends are TF, TORCH, TFLITE, ONNX 208 | device : str 209 | Device name. Allowed devices are CPU and GPU. If multiple GPUs are available, 210 | it can be specified using the format GPU:. For example: GPU:0 211 | data : bytes 212 | Model graph read as bytes string 213 | batch : int 214 | Number of batches for doing auto-batching 215 | minbatch : int 216 | Minimum number of samples required in a batch for model execution 217 | minbatchtimeout : int 218 | The max number of miliseconds for which the engine will not trigger an execution 219 | if the number of samples is lower than minbatch (after minbatchtimeout is passed, 220 | the execution will start even if minbatch jas not reached) 221 | tag : AnyStr 222 | Any string that will be saved in RedisAI as tag for the model 223 | inputs : Union[AnyStr, List[AnyStr]] 224 | Input node(s) in the graph. Required only Tensorflow graphs 225 | outputs : Union[AnyStr, List[AnyStr]] 226 | Output node(s) in the graph Required only for Tensorflow graphs 227 | 228 | Returns 229 | ------- 230 | str 231 | 'OK' if success, raise an exception otherwise 232 | 233 | Example 234 | ------- 235 | >>> # Torch model 236 | >>> model_path = os.path.join('path/to/TorchScriptModel.pt') 237 | >>> model = open(model_path, 'rb').read() 238 | >>> con.modeltore("model", 'torch', 'cpu', model, tag='v1.0') 239 | 'OK' 240 | >>> # Tensorflow model 241 | >>> model_path = os.path.join('/path/to/tf_frozen_graph.pb') 242 | >>> model = open(model_path, 'rb').read() 243 | >>> con.modelstore('m', 'tf', 'cpu', model, 244 | ... inputs=['a', 'b'], outputs=['mul'], tag='v1.0') 245 | 'OK' 246 | """ 247 | chunk_size = self.config('MODEL_CHUNK_SIZE') 248 | args = builder.modelstore( 249 | key, 250 | backend, 251 | device, 252 | data, 253 | batch, 254 | minbatch, 255 | minbatchtimeout, 256 | tag, 257 | inputs, 258 | outputs, 259 | chunk_size=chunk_size 260 | ) 261 | res = self.execute_command(*args) 262 | return res if not self.enable_postprocess else processor.modelstore(res) 263 | 264 | @deprecated(version="1.2.0", reason="Use modelstore instead") 265 | def modelset( 266 | self, 267 | key: AnyStr, 268 | backend: str, 269 | device: str, 270 | data: ByteString, 271 | batch: int = None, 272 | minbatch: int = None, 273 | tag: AnyStr = None, 274 | inputs: Union[AnyStr, List[AnyStr]] = None, 275 | outputs: Union[AnyStr, List[AnyStr]] = None, 276 | ) -> str: 277 | """ 278 | Set the model on provided key. 279 | 280 | Parameters 281 | ---------- 282 | key : AnyStr 283 | Key name 284 | backend : str 285 | Backend name. Allowed backends are TF, TORCH, TFLITE, ONNX 286 | device : str 287 | Device name. Allowed devices are CPU and GPU. If multiple GPUs are available, 288 | it can be specified using the format GPU:. For example: GPU:0 289 | data : bytes 290 | Model graph read as bytes string 291 | batch : int 292 | Number of batches for doing auto-batching 293 | minbatch : int 294 | Minimum number of samples required in a batch for model execution 295 | tag : AnyStr 296 | Any string that will be saved in RedisAI as tag for the model 297 | inputs : Union[AnyStr, List[AnyStr]] 298 | Input node(s) in the graph. Required only Tensorflow graphs 299 | outputs : Union[AnyStr, List[AnyStr]] 300 | Output node(s) in the graph Required only for Tensorflow graphs 301 | 302 | Returns 303 | ------- 304 | str 305 | 'OK' if success, raise an exception otherwise 306 | 307 | Example 308 | ------- 309 | >>> # Torch model 310 | >>> model_path = os.path.join('path/to/TorchScriptModel.pt') 311 | >>> model = open(model_path, 'rb').read() 312 | >>> con.modelset("model", 'torch', 'cpu', model, tag='v1.0') 313 | 'OK' 314 | >>> # Tensorflow model 315 | >>> model_path = os.path.join('/path/to/tf_frozen_graph.pb') 316 | >>> model = open(model_path, 'rb').read() 317 | >>> con.modelset('m', 'tf', 'cpu', model, 318 | ... inputs=['a', 'b'], outputs=['mul'], tag='v1.0') 319 | 'OK' 320 | """ 321 | args = builder.modelset( 322 | key, backend, device, data, batch, minbatch, tag, inputs, outputs 323 | ) 324 | res = self.execute_command(*args) 325 | return res if not self.enable_postprocess else processor.modelset(res) 326 | 327 | def modelget(self, key: AnyStr, meta_only=False) -> dict: 328 | """ 329 | Fetch the model details and the model blob back from RedisAI 330 | 331 | Parameters 332 | ---------- 333 | key : AnyStr 334 | Model key in RedisAI 335 | meta_only : bool 336 | If True, only the meta data will be fetched, not the model blob 337 | 338 | Returns 339 | ------- 340 | dict 341 | A dictionary of model details such as device, backend etc. The model 342 | blob will be available at the key 'blob' 343 | 344 | Example 345 | ------- 346 | >>> con.modelget('model', meta_only=True) 347 | {'backend': 'TF', 'device': 'cpu', 'tag': 'v1.0'} 348 | """ 349 | args = builder.modelget(key, meta_only) 350 | res = self.execute_command(*args) 351 | return res if not self.enable_postprocess else processor.modelget(res) 352 | 353 | def modeldel(self, key: AnyStr) -> str: 354 | """ 355 | Delete the model from the RedisAI server 356 | 357 | Parameters 358 | ---------- 359 | key : AnyStr 360 | Key of the model to be deleted 361 | 362 | Returns 363 | ------- 364 | str 365 | 'OK' if success, raise an exception otherwise 366 | 367 | Example 368 | ------- 369 | >>> con.modeldel('model') 370 | 'OK' 371 | """ 372 | args = builder.modeldel(key) 373 | res = self.execute_command(*args) 374 | return res if not self.enable_postprocess else processor.modeldel(res) 375 | 376 | def modelexecute( 377 | self, 378 | key: AnyStr, 379 | inputs: Union[AnyStr, List[AnyStr]], 380 | outputs: Union[AnyStr, List[AnyStr]], 381 | timeout: int = None, 382 | ) -> str: 383 | """ 384 | Run the model using input(s) which are already in the scope and are associated 385 | to some keys. Modelexecute also needs the output key name(s) to store the output 386 | from the model. The number of outputs from the model and the number of keys 387 | provided here must be same. Otherwise, RedisAI throws an error 388 | 389 | Parameters 390 | ---------- 391 | key : str 392 | Model key to run 393 | inputs : Union[AnyStr, List[AnyStr]] 394 | Tensor(s) which is already saved in the RedisAI using a tensorset call. These 395 | tensors will be used as the inputs for the modelexecute 396 | outputs : Union[AnyStr, List[AnyStr]] 397 | keys on which the outputs to be saved. If those keys exist already, 398 | modelexecute will overwrite them with new values 399 | timeout : int 400 | The max number on milisecinds that may pass before the request is prossced 401 | (meaning that the result will not be computed after that time and TIMEDOUT 402 | is returned in that case 403 | 404 | Returns 405 | ------- 406 | str 407 | 'OK' if success, raise an exception otherwise 408 | 409 | Example 410 | ------- 411 | >>> con.modelstore('m', 'tf', 'cpu', model_pb, 412 | ... inputs=['a', 'b'], outputs=['mul'], tag='v1.0') 413 | 'OK' 414 | >>> con.tensorset('a', (2, 3), dtype='float') 415 | 'OK' 416 | >>> con.tensorset('b', (2, 3), dtype='float') 417 | 'OK' 418 | >>> con.modelexecute('m', ['a', 'b'], ['c']) 419 | 'OK' 420 | """ 421 | args = builder.modelexecute(key, inputs, outputs, timeout) 422 | res = self.execute_command(*args) 423 | return res if not self.enable_postprocess else processor.modelexecute(res) 424 | 425 | @deprecated(version="1.2.0", reason="Use modelexecute instead") 426 | def modelrun( 427 | self, 428 | key: AnyStr, 429 | inputs: Union[AnyStr, List[AnyStr]], 430 | outputs: Union[AnyStr, List[AnyStr]], 431 | ) -> str: 432 | """ 433 | Run the model using input(s) which are already in the scope and are associated 434 | to some keys. Modelrun also needs the output key name(s) to store the output 435 | from the model. The number of outputs from the model and the number of keys 436 | provided here must be same. Otherwise, RedisAI throws an error 437 | 438 | Parameters 439 | ---------- 440 | key : str 441 | Model key to run 442 | inputs : Union[AnyStr, List[AnyStr]] 443 | Tensor(s) which is already saved in the RedisAI using a tensorset call. These 444 | tensors will be used as the input for the modelrun 445 | outputs : Union[AnyStr, List[AnyStr]] 446 | keys on which the outputs to be saved. If those keys exist already, modelrun 447 | will overwrite them with new values 448 | 449 | Returns 450 | ------- 451 | str 452 | 'OK' if success, raise an exception otherwise 453 | 454 | Example 455 | ------- 456 | >>> con.modelstore('m', 'tf', 'cpu', model_pb, 457 | ... inputs=['a', 'b'], outputs=['mul'], tag='v1.0') 458 | 'OK' 459 | >>> con.tensorset('a', (2, 3), dtype='float') 460 | 'OK' 461 | >>> con.tensorset('b', (2, 3), dtype='float') 462 | 'OK' 463 | >>> con.modelrun('m', ['a', 'b'], ['c']) 464 | 'OK' 465 | """ 466 | args = builder.modelrun(key, inputs, outputs) 467 | res = self.execute_command(*args) 468 | return res if not self.enable_postprocess else processor.modelrun(res) 469 | 470 | def modelscan(self) -> List[List[AnyStr]]: 471 | """ 472 | Returns the list of all the models in the RedisAI server. Modelscan API is 473 | currently experimental and might be removed or changed in the future without 474 | warning 475 | 476 | Returns 477 | ------- 478 | List[List[AnyStr]] 479 | List of list of models and tags for each model if they existed 480 | 481 | Example 482 | ------- 483 | >>> con.modelscan() 484 | [['pt_model', ''], ['m', 'v1.2']] 485 | """ 486 | warnings.warn( 487 | "Experimental: Model List API is experimental and might change " 488 | "in the future without any notice", 489 | UserWarning, 490 | ) 491 | args = builder.modelscan() 492 | res = self.execute_command(*args) 493 | return res if not self.enable_postprocess else processor.modelscan(res) 494 | 495 | def tensorset( 496 | self, 497 | key: AnyStr, 498 | tensor: Union[np.ndarray, list, tuple], 499 | shape: Sequence[int] = None, 500 | dtype: str = None, 501 | ) -> str: 502 | """ 503 | Set the tensor to a key in RedisAI 504 | 505 | Parameters 506 | ---------- 507 | key : AnyStr 508 | The name of the tensor 509 | tensor : Union[np.ndarray, list, tuple] 510 | A `np.ndarray` object or Python list or tuple 511 | shape : Sequence[int] 512 | Shape of the tensor. Required if `tensor` is list or tuple 513 | dtype : str 514 | Data type of the tensor. Required if `tensor` is list or tuple 515 | 516 | Returns 517 | ------- 518 | str 519 | 'OK' if success, raise an exception otherwise 520 | 521 | Example 522 | ------- 523 | >>> con.tensorset('a', (2, 3), dtype='float') 524 | 'OK' 525 | >>> input_array = np.array([2, 3], dtype=np.float32) 526 | >>> con.tensorset('x', input_array) 527 | 'OK' 528 | """ 529 | args = builder.tensorset(key, tensor, shape, dtype) 530 | res = self.execute_command(*args) 531 | return res if not self.enable_postprocess else processor.tensorset(res) 532 | 533 | def tensorget( 534 | self, 535 | key: AnyStr, 536 | as_numpy: bool = True, 537 | as_numpy_mutable: bool = False, 538 | meta_only: bool = False, 539 | ) -> Union[dict, np.ndarray]: 540 | """ 541 | Retrieve the value of a tensor from the server. By default it returns the numpy 542 | array but it can be controlled using the `as_type` and `meta_only` argument. 543 | 544 | Parameters 545 | ---------- 546 | key : AnyStr 547 | The name of the tensor 548 | as_numpy : bool 549 | If True, returns a numpy.ndarray. Returns the value as a list and the 550 | metadata in a dictionary if False. This flag also decides how to fetch 551 | the value from the RedisAI server, which also has performance implications 552 | as_numpy_mutable : bool 553 | If True, returns a a mutable numpy.ndarray object by copy the tensor data. Otherwise (as long as_numpy=True) 554 | the returned numpy.ndarray will use the original tensor buffer and will be for read-only 555 | meta_only : bool 556 | If True, the value is not retrieved, only the shape and the type 557 | 558 | Returns 559 | ------- 560 | Union[dict, np.ndarray] 561 | Returns a dictionary of data or a numpy array. Default is numpy array 562 | 563 | Example 564 | ------- 565 | >>> con.tensorget('x') 566 | array([2, 3, 4]) 567 | >>> con.tensorget('x' as_numpy=False) 568 | {'values': [2, 3, 4], 'dtype': 'INT64', 'shape': [3]} 569 | >>> con.tensorget('x', meta_only=True) 570 | {'dtype': 'INT64', 'shape': [3]} 571 | """ 572 | args = builder.tensorget(key, as_numpy, meta_only) 573 | res = self.execute_command(*args) 574 | return ( 575 | res 576 | if not self.enable_postprocess 577 | else processor.tensorget(res, as_numpy, as_numpy_mutable, meta_only) 578 | ) 579 | 580 | def scriptstore( 581 | self, key: AnyStr, device: str, script: str, entry_points: Union[str, Sequence[str]], tag: AnyStr = None 582 | ) -> str: 583 | """ 584 | Set the script to RedisAI. The difference from scriptset is that in scriptstore 585 | you must specify entry points within your script. These functions must have specific 586 | signature: 'def entry_point(tensors: List[Tensor], keys: List[str], args: List[str])'. 587 | RedisAI uses the TorchScript engine to execute the script. So the script should 588 | have only TorchScript supported constructs. That being said, it's important to 589 | mention that using redisai script to do post processing or pre processing for a 590 | Tensorflow (or any other backend) is completely valid. For more details about 591 | TorchScript and supported ops, checkout TorchScript documentation. 592 | 593 | Parameters 594 | ---------- 595 | key : AnyStr 596 | Script key at the server 597 | device : str 598 | Device name. Allowed devices are CPU and GPU. If multiple GPUs are available. 599 | it can be specified using the format GPU:. For example: GPU:0 600 | script : str 601 | Script itself, as a Python string 602 | entry_points : Union[str, Sequence[str]] 603 | A list of functions in the script that may serve as entry point for the 604 | execution. Each entry point must have the specify signature: 605 | def entry_point(tensors: List[Tensor], keys: List[str], args: List[str])) 606 | Note that the script may contain additional helper functions that doesn't 607 | have to follow this signature. 608 | tag : AnyStr 609 | Any string that will be saved in RedisAI as tag for the script 610 | 611 | Returns 612 | ------- 613 | str 614 | 'OK' if success, raise an exception otherwise 615 | 616 | Note 617 | ---- 618 | Even though ``script`` is pure Python code, it's a subset of Python language and not 619 | all the Python operations are supported. For more details, checkout TorchScript 620 | documentation. It's also important to note that that the script is executed on a high 621 | performance C++ runtime instead of the Python interpreter. And hence ``script`` should 622 | not have any import statements (A common mistake people make all the time) 623 | 624 | Example 625 | ------- 626 | >>> script = r''' 627 | >>> def bar(tensors: List[Tensor], keys: List[str], args: List[str]): 628 | >>> a = tensors[0] 629 | >>> b = tensors[1] 630 | >>> return a + b 631 | >>>''' 632 | >>> con.scriptstore('ket', 'cpu', script, 'bar') 633 | 'OK' 634 | """ 635 | args = builder.scriptstore(key, device, script, entry_points, tag) 636 | res = self.execute_command(*args) 637 | return res if not self.enable_postprocess else processor.scriptstore(res) 638 | 639 | @deprecated(version="1.2.0", reason="Use scriptstore instead") 640 | def scriptset( 641 | self, key: AnyStr, device: str, script: str, tag: AnyStr = None 642 | ) -> str: 643 | """ 644 | Set the script to RedisAI. Action similar to Modelset. RedisAI uses the TorchScript 645 | engine to execute the script. So the script should have only TorchScript supported 646 | constructs. That being said, it's important to mention that using redisai script 647 | to do post processing or pre processing for a Tensorflow (or any other backend) 648 | is completely valid. For more details about TorchScript and supported ops, 649 | checkout TorchScript documentation. 650 | 651 | Parameters 652 | ---------- 653 | key : AnyStr 654 | Script key at the server 655 | device : str 656 | Device name. Allowed devices are CPU and GPU. If multiple GPUs are available. 657 | it can be specified using the format GPU:. For example: GPU:0 658 | script : str 659 | Script itself, as a Python string 660 | tag : AnyStr 661 | Any string that will be saved in RedisAI as tag for the model 662 | 663 | Returns 664 | ------- 665 | str 666 | 'OK' if success, raise an exception otherwise 667 | 668 | Note 669 | ---- 670 | Even though ``script`` is pure Python code, it's a subset of Python language and not 671 | all the Python operations are supported. For more details, checkout TorchScript 672 | documentation. It's also important to note that that the script is executed on a high 673 | performance C++ runtime instead of the Python interpreter. And hence ``script`` should 674 | not have any import statements (A common mistake people make all the time) 675 | 676 | Example 677 | ------- 678 | >>> script = open(scriptpath).read() 679 | >>> con.scriptset('ket', 'cpu', script) 680 | 'OK' 681 | """ 682 | args = builder.scriptset(key, device, script, tag) 683 | res = self.execute_command(*args) 684 | return res if not self.enable_postprocess else processor.scriptset(res) 685 | 686 | def scriptget(self, key: AnyStr, meta_only=False) -> dict: 687 | """ 688 | Get the saved script from RedisAI. Operation similar to model get 689 | 690 | Parameters 691 | ---------- 692 | key : AnyStr 693 | Key of the script 694 | meta_only : bool 695 | If True, only the meta data will be fetched, not the script itself 696 | 697 | Returns 698 | ------- 699 | dict 700 | Dictionary of script details which includes the script at the key ``source`` 701 | 702 | Example 703 | ------- 704 | >>> con.scriptget('ket', meta_only=True) 705 | {'device': 'cpu'} 706 | """ 707 | args = builder.scriptget(key, meta_only) 708 | res = self.execute_command(*args) 709 | return res if not self.enable_postprocess else processor.scriptget(res) 710 | 711 | def scriptdel(self, key: AnyStr) -> str: 712 | """ 713 | Delete the script from the RedisAI server 714 | 715 | Parameters 716 | ---------- 717 | key : AnyStr 718 | Script key to be deleted 719 | 720 | Returns 721 | ------- 722 | str 723 | 'OK' if success, raise an exception otherwise 724 | 725 | Example 726 | ------- 727 | >>> con.scriptdel('ket') 728 | 'OK' 729 | """ 730 | args = builder.scriptdel(key) 731 | res = self.execute_command(*args) 732 | return res if not self.enable_postprocess else processor.scriptdel(res) 733 | 734 | @deprecated(version="1.2.0", reason="Use scriptexecute instead") 735 | def scriptrun( 736 | self, 737 | key: AnyStr, 738 | function: str, 739 | inputs: Union[AnyStr, Sequence[AnyStr]], 740 | outputs: Union[AnyStr, Sequence[AnyStr]], 741 | ) -> str: 742 | """ 743 | Run an already set script. Similar to modelrun 744 | 745 | Parameters 746 | ---------- 747 | key : AnyStr 748 | Script key 749 | function : str 750 | Name of the function in the ``script`` 751 | inputs : Union[AnyStr, List[AnyStr]] 752 | Tensor(s) which is already saved in the RedisAI using a tensorset call. These 753 | tensors will be used as the input for the modelrun 754 | outputs : Union[AnyStr, List[AnyStr]] 755 | keys on which the outputs to be saved. If those keys exist already, scriptrun 756 | will overwrite them with new values 757 | 758 | Returns 759 | ------- 760 | str 761 | 'OK' if success, raise an exception otherwise 762 | 763 | Example 764 | ------- 765 | >>> con.scriptrun('ket', 'bar', inputs=['a', 'b'], outputs=['c']) 766 | 'OK' 767 | """ 768 | args = builder.scriptrun(key, function, inputs, outputs) 769 | res = self.execute_command(*args) 770 | return res if not self.enable_postprocess else processor.scriptrun(res) 771 | 772 | def scriptexecute( 773 | self, 774 | key: AnyStr, 775 | function: str, 776 | keys: Union[AnyStr, Sequence[AnyStr]] = None, 777 | inputs: Union[AnyStr, Sequence[AnyStr]] = None, 778 | args: Union[AnyStr, Sequence[AnyStr]] = None, 779 | outputs: Union[AnyStr, Sequence[AnyStr]] = None, 780 | timeout: int = None, 781 | ) -> str: 782 | """ 783 | Run an already set script. Similar to modelexecute. 784 | Must specify keys or inputs. 785 | 786 | Parameters 787 | ---------- 788 | key : AnyStr 789 | Script key 790 | function : str 791 | Name of the function in the ``script`` 792 | keys : Union[AnyStr, Sequence[AnyStr]] 793 | Denotes the list of Redis key names that the script will access to 794 | during its execution, for both read and/or write operations. 795 | inputs : Union[AnyStr, Sequence[AnyStr]] 796 | Denotes the input tensors list. 797 | args : Union[AnyStr, Sequence[AnyStr]] 798 | Denotes the list of additional arguments that a user can send to the 799 | script. All args are sent as strings, but can be casted to other types 800 | supported by torch script, such as int, or float. 801 | outputs : Union[AnyStr, List[AnyStr]] 802 | Denotes the output tensors keys' list. If those keys exist already, 803 | scriptexecute will overwrite them with new values. 804 | timeout : int 805 | The max number on milisecinds that may pass before the request is prossced 806 | (meaning that the result will not be computed after that time and TIMEDOUT 807 | is returned in that case). 808 | 809 | Returns 810 | ------- 811 | str 812 | 'OK' if success, raise an exception otherwise 813 | 814 | Example 815 | ------- 816 | >>> con.scriptexecute('myscript', 'bar', inputs=['a', 'b'], outputs=['c']) 817 | 'OK' 818 | >>> con.scriptexecute('myscript{tag}', 'addn', 819 | >>> inputs=['mytensor1{tag}', 'mytensor2{tag}', 'mytensor3{tag}'], 820 | >>> args=['5.0'], 821 | >>> outputs=['result{tag}']) 822 | 'OK' 823 | """ 824 | args = builder.scriptexecute(key, function, keys, inputs, args, outputs, timeout) 825 | res = self.execute_command(*args) 826 | return res if not self.enable_postprocess else processor.scriptexecute(res) 827 | 828 | def scriptscan(self) -> List[List[AnyStr]]: 829 | """ 830 | Returns the list of all the script in the RedisAI server. Scriptscan API is 831 | currently experimental and might remove or change in the future without warning 832 | 833 | Returns 834 | ------- 835 | List[List[AnyStr]] 836 | List of list of scripts and tags for each script if they existed 837 | 838 | Example 839 | ------- 840 | >>> con.scriptscan() 841 | [['ket1', 'v1.0'], ['ket2', '']] 842 | """ 843 | warnings.warn( 844 | "Experimental: Script List API is experimental and might change " 845 | "in the future without any notice", 846 | UserWarning, 847 | ) 848 | args = builder.scriptscan() 849 | res = self.execute_command(*args) 850 | return res if not self.enable_postprocess else processor.scriptscan(res) 851 | 852 | def infoget(self, key: AnyStr) -> dict: 853 | """ 854 | Get information such as 855 | - How long since the model has been running 856 | - How many samples have been processed 857 | - How many calls handled 858 | - How many errors raised 859 | - etc. 860 | 861 | Parameters 862 | ---------- 863 | key : AnyStr 864 | Model key 865 | 866 | Returns 867 | ------- 868 | dict 869 | Dictionary of model run details 870 | 871 | Example 872 | ------- 873 | >>> con.infoget('m') 874 | {'key': 'm', 'type': 'MODEL', 'backend': 'TF', 'device': 'cpu', 'tag': '', 875 | 'duration': 0, 'samples': 0, 'calls': 0, 'errors': 0} 876 | """ 877 | args = builder.infoget(key) 878 | res = self.execute_command(*args) 879 | return res if not self.enable_postprocess else processor.infoget(res) 880 | 881 | def inforeset(self, key: AnyStr) -> str: 882 | """ 883 | Reset the run information about the model 884 | 885 | Parameters 886 | ---------- 887 | key : AnyStr 888 | Model key 889 | 890 | Returns 891 | ------- 892 | str 893 | 'OK' if success, raise an exception otherwise 894 | 895 | Example 896 | ------- 897 | >>> con.inforeset('m') 898 | 'OK' 899 | """ 900 | args = builder.inforeset(key) 901 | res = self.execute_command(*args) 902 | return res if not self.enable_postprocess else processor.inforeset(res) 903 | 904 | 905 | def enable_debug(f): 906 | @wraps(f) 907 | def wrapper(*args): 908 | print(*args) 909 | return f(*args) 910 | 911 | return wrapper 912 | -------------------------------------------------------------------------------- /redisai/command_builder.py: -------------------------------------------------------------------------------- 1 | from typing import AnyStr, ByteString, List, Sequence, Union 2 | 3 | import numpy as np 4 | 5 | from . import utils 6 | 7 | # TODO: mypy check 8 | 9 | 10 | def loadbackend(identifier: AnyStr, path: AnyStr) -> Sequence: 11 | return f'AI.CONFIG LOADBACKEND {identifier} {path}' 12 | 13 | 14 | def config(name: str, value: Union[str, int, None] = None) -> Sequence: 15 | if value is not None: 16 | return f'AI.CONFIG {name} {value}' 17 | return f'AI.CONFIG GET {name}' 18 | 19 | 20 | def modelstore( 21 | name: AnyStr, 22 | backend: str, 23 | device: str, 24 | data: ByteString, 25 | batch: int, 26 | minbatch: int, 27 | minbatchtimeout: int, 28 | tag: AnyStr, 29 | inputs: Union[AnyStr, List[AnyStr]], 30 | outputs: Union[AnyStr, List[AnyStr]], 31 | chunk_size: int = 500 * 1024 * 1024 32 | ) -> Sequence: 33 | if name is None: 34 | raise ValueError("Model name was not given") 35 | 36 | # device format should be: "CPU | GPU [:]" 37 | device_type = device.split(":")[0] 38 | if device_type.upper() not in utils.allowed_devices: 39 | raise ValueError(f"Device not allowed. Use any from {utils.allowed_devices}") 40 | if backend.upper() not in utils.allowed_backends: 41 | raise ValueError(f"Backend not allowed. Use any from {utils.allowed_backends}") 42 | args = ["AI.MODELSTORE", name, backend, device] 43 | 44 | if tag is not None: 45 | args += ["TAG", tag] 46 | if batch is not None: 47 | args += ["BATCHSIZE", batch] 48 | if minbatch is not None: 49 | if batch is None: 50 | raise ValueError("Minbatch is not allowed without batch") 51 | args += ["MINBATCHSIZE", minbatch] 52 | if minbatchtimeout is not None: 53 | if minbatch is None: 54 | raise ValueError("Minbatchtimeout is not allowed without minbatch") 55 | args += ["MINBATCHTIMEOUT", minbatchtimeout] 56 | 57 | if backend.upper() == "TF": 58 | if not all((inputs, outputs)): 59 | raise ValueError( 60 | "Require keyword arguments inputs and outputs for TF models" 61 | ) 62 | args += [ 63 | "INPUTS", 64 | len(inputs) if isinstance(inputs, List) else 1, 65 | *utils.listify(inputs), 66 | ] 67 | args += [ 68 | "OUTPUTS", 69 | len(outputs) if isinstance(outputs, List) else 1, 70 | *utils.listify(outputs), 71 | ] 72 | elif inputs is not None or outputs is not None: 73 | raise ValueError( 74 | "Inputs and outputs keywords should not be specified for this backend" 75 | ) 76 | data_chunks = [data[i: i + chunk_size] for i in range(0, len(data), chunk_size)] 77 | args += ["BLOB", *data_chunks] 78 | return args 79 | 80 | 81 | def modelset( 82 | name: AnyStr, 83 | backend: str, 84 | device: str, 85 | data: ByteString, 86 | batch: int, 87 | minbatch: int, 88 | tag: AnyStr, 89 | inputs: Union[AnyStr, List[AnyStr]], 90 | outputs: Union[AnyStr, List[AnyStr]], 91 | ) -> Sequence: 92 | if device.upper() not in utils.allowed_devices: 93 | raise ValueError(f"Device not allowed. Use any from {utils.allowed_devices}") 94 | if backend.upper() not in utils.allowed_backends: 95 | raise ValueError(f"Backend not allowed. Use any from {utils.allowed_backends}") 96 | args = ["AI.MODELSET", name, backend, device] 97 | 98 | if tag is not None: 99 | args += ["TAG", tag] 100 | if batch is not None: 101 | args += ["BATCHSIZE", batch] 102 | if minbatch is not None: 103 | if batch is None: 104 | raise ValueError("Minbatch is not allowed without batch") 105 | args += ["MINBATCHSIZE", minbatch] 106 | 107 | if backend.upper() == "TF": 108 | if not (all((inputs, outputs))): 109 | raise ValueError("Require keyword arguments input and output for TF models") 110 | args += ["INPUTS", *utils.listify(inputs)] 111 | args += ["OUTPUTS", *utils.listify(outputs)] 112 | chunk_size = 500 * 1024 * 1024 113 | data_chunks = [data[i: i + chunk_size] for i in range(0, len(data), chunk_size)] 114 | # TODO: need a test case for this 115 | args += ["BLOB", *data_chunks] 116 | return args 117 | 118 | 119 | def modelget(name: AnyStr, meta_only=False) -> Sequence: 120 | args = ["AI.MODELGET", name, "META"] 121 | if not meta_only: 122 | args.append("BLOB") 123 | return args 124 | 125 | 126 | def modeldel(name: AnyStr) -> Sequence: 127 | return "AI.MODELDEL", name 128 | 129 | 130 | def modelexecute( 131 | name: AnyStr, 132 | inputs: Union[AnyStr, List[AnyStr]], 133 | outputs: Union[AnyStr, List[AnyStr]], 134 | timeout: int, 135 | ) -> Sequence: 136 | if name is None or inputs is None or outputs is None: 137 | raise ValueError("Missing required arguments for model execute command") 138 | args = [ 139 | "AI.MODELEXECUTE", 140 | name, 141 | "INPUTS", 142 | len(utils.listify(inputs)), 143 | *utils.listify(inputs), 144 | "OUTPUTS", 145 | len(utils.listify(outputs)), 146 | *utils.listify(outputs), 147 | ] 148 | if timeout is not None: 149 | args += ["TIMEOUT", timeout] 150 | return args 151 | 152 | 153 | def modelrun( 154 | name: AnyStr, 155 | inputs: Union[AnyStr, List[AnyStr]], 156 | outputs: Union[AnyStr, List[AnyStr]], 157 | ) -> Sequence: 158 | args = ( 159 | "AI.MODELRUN", 160 | name, 161 | "INPUTS", 162 | *utils.listify(inputs), 163 | "OUTPUTS", 164 | *utils.listify(outputs), 165 | ) 166 | return args 167 | 168 | 169 | def modelscan() -> Sequence: 170 | return ("AI._MODELSCAN",) 171 | 172 | 173 | def tensorset( 174 | key: AnyStr, 175 | tensor: Union[np.ndarray, list, tuple], 176 | shape: Sequence[int] = None, 177 | dtype: str = None, 178 | ) -> Sequence: 179 | if np and isinstance(tensor, np.ndarray): 180 | dtype, shape, blob = utils.numpy2blob(tensor) 181 | args = ["AI.TENSORSET", key, dtype, *shape, "BLOB", blob] 182 | elif isinstance(tensor, (list, tuple)): 183 | try: 184 | # Numpy 'str' dtype has many different names regarding maximal length in the tensor and more, 185 | # but the all share the 'num' attribute. This is a way to check if a dtype is a kind of string. 186 | if np.dtype(dtype).num == np.dtype("str").num: 187 | dtype = utils.dtype_dict["str"] 188 | else: 189 | dtype = utils.dtype_dict[dtype.lower()] 190 | except KeyError: 191 | raise TypeError( 192 | f"``{dtype}`` is not supported by RedisAI. Currently " 193 | f"supported types are {list(utils.dtype_dict.keys())}" 194 | ) 195 | except AttributeError: 196 | raise TypeError( 197 | "tensorset() missing argument 'dtype' or value of 'dtype' is None" 198 | ) 199 | if shape is None: 200 | shape = (len(tensor),) 201 | args = ["AI.TENSORSET", key, dtype, *shape, "VALUES", *tensor] 202 | else: 203 | raise TypeError( 204 | f"``tensor`` argument must be a numpy array or a list or a " 205 | f"tuple, but got {type(tensor)}" 206 | ) 207 | return args 208 | 209 | 210 | def tensorget(key: AnyStr, as_numpy: bool = True, meta_only: bool = False) -> Sequence: 211 | args = ["AI.TENSORGET", key, "META"] 212 | if not meta_only: 213 | if as_numpy is True: 214 | args.append("BLOB") 215 | else: 216 | args.append("VALUES") 217 | return args 218 | 219 | 220 | def scriptstore( 221 | name: AnyStr, 222 | device: str, 223 | script: str, 224 | entry_points: Union[str, Sequence[str]], 225 | tag: AnyStr = None 226 | ) -> Sequence: 227 | if device.upper() not in utils.allowed_devices: 228 | raise ValueError(f"Device not allowed. Use any from {utils.allowed_devices}") 229 | if name is None or script is None or entry_points is None: 230 | raise ValueError("Missing required arguments for script store command") 231 | args = ["AI.SCRIPTSTORE", name, device] 232 | if tag: 233 | args += ["TAG", tag] 234 | args += ["ENTRY_POINTS", len(utils.listify(entry_points)), *utils.listify(entry_points)] 235 | args.append("SOURCE") 236 | args.append(script) 237 | return args 238 | 239 | 240 | def scriptset(name: AnyStr, device: str, script: str, tag: AnyStr = None) -> Sequence: 241 | if device.upper() not in utils.allowed_devices: 242 | raise ValueError(f"Device not allowed. Use any from {utils.allowed_devices}") 243 | args = ["AI.SCRIPTSET", name, device] 244 | if tag: 245 | args += ["TAG", tag] 246 | args.append("SOURCE") 247 | args.append(script) 248 | return args 249 | 250 | 251 | def scriptget(name: AnyStr, meta_only=False) -> Sequence: 252 | args = ["AI.SCRIPTGET", name, "META"] 253 | if not meta_only: 254 | args.append("SOURCE") 255 | return args 256 | 257 | 258 | def scriptdel(name: AnyStr) -> Sequence: 259 | return "AI.SCRIPTDEL", name 260 | 261 | 262 | def scriptrun( 263 | name: AnyStr, 264 | function: str, 265 | inputs: Union[AnyStr, Sequence[AnyStr]], 266 | outputs: Union[AnyStr, Sequence[AnyStr]], 267 | ) -> Sequence: 268 | if name is None or function is None: 269 | raise ValueError("Missing required arguments for script run command") 270 | args = ( 271 | "AI.SCRIPTRUN", 272 | name, 273 | function, 274 | "INPUTS", 275 | *utils.listify(inputs), 276 | "OUTPUTS", 277 | *utils.listify(outputs), 278 | ) 279 | return args 280 | 281 | 282 | def scriptexecute( 283 | name: AnyStr, 284 | function: str, 285 | keys: Union[AnyStr, Sequence[AnyStr]], 286 | inputs: Union[AnyStr, Sequence[AnyStr]], 287 | input_args: Union[AnyStr, Sequence[AnyStr]], 288 | outputs: Union[AnyStr, Sequence[AnyStr]], 289 | timeout: int, 290 | ) -> Sequence: 291 | if name is None or function is None or (keys is None and inputs is None): 292 | raise ValueError("Missing required arguments for script execute command") 293 | args = ["AI.SCRIPTEXECUTE", name, function] 294 | 295 | if keys is not None: 296 | args += ["KEYS", len(utils.listify(keys)), *utils.listify(keys)] 297 | if inputs is not None: 298 | args += ["INPUTS", len(utils.listify(inputs)), *utils.listify(inputs)] 299 | if input_args is not None: 300 | args += ["ARGS", len(utils.listify(input_args)), *utils.listify(input_args)] 301 | if outputs is not None: 302 | args += ["OUTPUTS", len(utils.listify(outputs)), *utils.listify(outputs)] 303 | if timeout is not None: 304 | args += ["TIMEOUT", timeout] 305 | 306 | return args 307 | 308 | 309 | def scriptscan() -> Sequence: 310 | return ("AI._SCRIPTSCAN",) 311 | 312 | 313 | def infoget(key: AnyStr) -> Sequence: 314 | return "AI.INFO", key 315 | 316 | 317 | def inforeset(key: AnyStr) -> Sequence: 318 | return "AI.INFO", key, "RESETSTAT" 319 | -------------------------------------------------------------------------------- /redisai/dag.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | from typing import Any, AnyStr, List, Sequence, Union 3 | 4 | import numpy as np 5 | 6 | from redisai import command_builder as builder 7 | from redisai.postprocessor import Processor 8 | from deprecated import deprecated 9 | import warnings 10 | 11 | processor = Processor() 12 | 13 | 14 | class Dag: 15 | def __init__(self, load, persist, routing, timeout, executor, readonly=False): 16 | self.result_processors = [] 17 | self.enable_postprocess = True 18 | self.deprecatedDagrunMode = load is None and persist is None and routing is None 19 | self.readonly = readonly 20 | self.executor = executor 21 | 22 | if readonly and persist: 23 | raise RuntimeError( 24 | "READONLY requests cannot write (duh!) and should not " 25 | "have PERSISTing values" 26 | ) 27 | 28 | if self.deprecatedDagrunMode: 29 | # Throw warning about using deprecated dagrun 30 | warnings.warn("Creating Dag without any of LOAD, PERSIST and ROUTING arguments" 31 | "is allowed only in deprecated AI.DAGRUN or AI.DAGRUN_RO commands", DeprecationWarning) 32 | # Use dagrun 33 | if readonly: 34 | self.commands = ["AI.DAGRUN_RO"] 35 | else: 36 | self.commands = ["AI.DAGRUN"] 37 | else: 38 | # Use dagexecute 39 | if readonly: 40 | self.commands = ["AI.DAGEXECUTE_RO"] 41 | else: 42 | self.commands = ["AI.DAGEXECUTE"] 43 | if load is not None: 44 | if not isinstance(load, (list, tuple)): 45 | self.commands += ["LOAD", 1, load] 46 | else: 47 | self.commands += ["LOAD", len(load), *load] 48 | if persist is not None: 49 | if not isinstance(persist, (list, tuple)): 50 | self.commands += ["PERSIST", 1, persist] 51 | else: 52 | self.commands += ["PERSIST", len(persist), *persist] 53 | if routing is not None: 54 | self.commands += ["ROUTING", routing] 55 | if timeout is not None: 56 | self.commands += ["TIMEOUT", timeout] 57 | 58 | self.commands.append("|>") 59 | 60 | def tensorset( 61 | self, 62 | key: AnyStr, 63 | tensor: Union[np.ndarray, list, tuple], 64 | shape: Sequence[int] = None, 65 | dtype: str = None, 66 | ) -> Any: 67 | args = builder.tensorset(key, tensor, shape, dtype) 68 | self.commands.extend(args) 69 | self.commands.append("|>") 70 | self.result_processors.append(bytes.decode) 71 | return self 72 | 73 | def tensorget( 74 | self, 75 | key: AnyStr, 76 | as_numpy: bool = True, 77 | as_numpy_mutable: bool = False, 78 | meta_only: bool = False, 79 | ) -> Any: 80 | args = builder.tensorget(key, as_numpy, as_numpy_mutable) 81 | self.commands.extend(args) 82 | self.commands.append("|>") 83 | self.result_processors.append( 84 | partial( 85 | processor.tensorget, 86 | as_numpy=as_numpy, 87 | as_numpy_mutable=as_numpy_mutable, 88 | meta_only=meta_only, 89 | ) 90 | ) 91 | return self 92 | 93 | @deprecated(version="1.2.0", reason="Use modelexecute instead") 94 | def modelrun( 95 | self, 96 | key: AnyStr, 97 | inputs: Union[AnyStr, List[AnyStr]], 98 | outputs: Union[AnyStr, List[AnyStr]], 99 | ) -> Any: 100 | if self.deprecatedDagrunMode: 101 | args = builder.modelrun(key, inputs, outputs) 102 | self.commands.extend(args) 103 | self.commands.append("|>") 104 | self.result_processors.append(bytes.decode) 105 | return self 106 | else: 107 | return self.modelexecute(key, inputs, outputs) 108 | 109 | def modelexecute( 110 | self, 111 | key: AnyStr, 112 | inputs: Union[AnyStr, List[AnyStr]], 113 | outputs: Union[AnyStr, List[AnyStr]], 114 | ) -> Any: 115 | if self.deprecatedDagrunMode: 116 | raise RuntimeError( 117 | "You are using deprecated version of DAG, that does not supports MODELEXECUTE." 118 | "The new version requires giving at least one of LOAD, PERSIST and ROUTING" 119 | "arguments when constructing the Dag" 120 | ) 121 | args = builder.modelexecute(key, inputs, outputs, None) 122 | self.commands.extend(args) 123 | self.commands.append("|>") 124 | self.result_processors.append(bytes.decode) 125 | return self 126 | 127 | def scriptexecute( 128 | self, 129 | key: AnyStr, 130 | function: str, 131 | keys: Union[AnyStr, Sequence[AnyStr]] = None, 132 | inputs: Union[AnyStr, Sequence[AnyStr]] = None, 133 | args: Union[AnyStr, Sequence[AnyStr]] = None, 134 | outputs: Union[AnyStr, List[AnyStr]] = None, 135 | ) -> Any: 136 | if self.readonly: 137 | raise RuntimeError( 138 | "AI.SCRIPTEXECUTE cannot be used in readonly mode" 139 | ) 140 | if self.deprecatedDagrunMode: 141 | raise RuntimeError( 142 | "You are using deprecated version of DAG, that does not supports SCRIPTEXECUTE." 143 | "The new version requires giving at least one of LOAD, PERSIST and ROUTING" 144 | "arguments when constructing the Dag" 145 | ) 146 | args = builder.scriptexecute(key, function, keys, inputs, args, outputs, None) 147 | self.commands.extend(args) 148 | self.commands.append("|>") 149 | self.result_processors.append(bytes.decode) 150 | return self 151 | 152 | @deprecated(version="1.2.0", reason="Use execute instead") 153 | def run(self): 154 | return self.execute() 155 | 156 | def execute(self): 157 | commands = self.commands[:-1] # removing the last "|>" 158 | results = self.executor(*commands) 159 | if self.enable_postprocess: 160 | out = [] 161 | for res, fn in zip(results, self.result_processors): 162 | out.append(fn(res)) 163 | else: 164 | out = results 165 | return out 166 | -------------------------------------------------------------------------------- /redisai/pipeline.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | from typing import AnyStr, Sequence, Union 3 | 4 | import numpy as np 5 | import redis 6 | 7 | from redisai import command_builder as builder 8 | from redisai.postprocessor import Processor 9 | 10 | processor = Processor() 11 | 12 | 13 | class Pipeline(redis.client.Pipeline): 14 | def __init__(self, enable_postprocess, *args, **kwargs): 15 | self.enable_postprocess = enable_postprocess 16 | self.tensorget_processors = [] 17 | self.tensorset_processors = [] 18 | super().__init__(*args, **kwargs) 19 | 20 | def tensorget(self, key, as_numpy=True, as_numpy_mutable=False, meta_only=False): 21 | self.tensorget_processors.append( 22 | partial( 23 | processor.tensorget, 24 | as_numpy=as_numpy, 25 | as_numpy_mutable=as_numpy_mutable, 26 | meta_only=meta_only, 27 | ) 28 | ) 29 | args = builder.tensorget(key, as_numpy, meta_only) 30 | return self.execute_command(*args) 31 | 32 | def tensorset( 33 | self, 34 | key: AnyStr, 35 | tensor: Union[np.ndarray, list, tuple], 36 | shape: Sequence[int] = None, 37 | dtype: str = None, 38 | ) -> str: 39 | args = builder.tensorset(key, tensor, shape, dtype) 40 | return self.execute_command(*args) 41 | 42 | def _execute_transaction(self, *args, **kwargs): 43 | res = super()._execute_transaction(*args, **kwargs) 44 | for i in range(len(res)): 45 | # tensorget will have minimum 4 values if meta_only = True 46 | if isinstance(res[i], list) and len(res[i]) >= 4: 47 | res[i] = self.tensorget_processors.pop(0)(res[i]) 48 | return res 49 | 50 | def _execute_pipeline(self, *args, **kwargs): 51 | res = super()._execute_pipeline(*args, **kwargs) 52 | for i in range(len(res)): 53 | # tensorget will have minimum 4 values if meta_only = True 54 | if isinstance(res[i], list) and len(res[i]) >= 4: 55 | res[i] = self.tensorget_processors.pop(0)(res[i]) 56 | return res 57 | -------------------------------------------------------------------------------- /redisai/postprocessor.py: -------------------------------------------------------------------------------- 1 | from . import utils 2 | 3 | 4 | def decoder(val): 5 | return val.decode() 6 | 7 | 8 | class Processor: 9 | @staticmethod 10 | def modelget(res): 11 | resdict = utils.list2dict(res) 12 | utils.recursive_bytetransform(resdict["inputs"], lambda x: x.decode()) 13 | utils.recursive_bytetransform(resdict["outputs"], lambda x: x.decode()) 14 | return resdict 15 | 16 | @staticmethod 17 | def modelscan(res): 18 | return utils.recursive_bytetransform(res, lambda x: x.decode()) 19 | 20 | @staticmethod 21 | def tensorget(res, as_numpy, as_numpy_mutable, meta_only): 22 | """Process the tensorget output. 23 | 24 | If ``as_numpy`` is True, it'll be converted to a numpy array. The required 25 | information such as datatype and shape must be in ``rai_result`` itself. 26 | """ 27 | rai_result = utils.list2dict(res) 28 | if meta_only is True: 29 | return rai_result 30 | if as_numpy_mutable is True: 31 | return utils.blob2numpy( 32 | rai_result["blob"], 33 | rai_result["shape"], 34 | rai_result["dtype"], 35 | mutable=True, 36 | ) 37 | if as_numpy is True: 38 | return utils.blob2numpy( 39 | rai_result["blob"], 40 | rai_result["shape"], 41 | rai_result["dtype"], 42 | mutable=False, 43 | ) 44 | 45 | if rai_result["dtype"] == "STRING": 46 | def target(b): 47 | return b.decode() 48 | else: 49 | target = float if rai_result["dtype"] in ("FLOAT", "DOUBLE") else int 50 | utils.recursive_bytetransform(rai_result["values"], target) 51 | return rai_result 52 | 53 | @staticmethod 54 | def scriptget(res): 55 | return utils.list2dict(res) 56 | 57 | @staticmethod 58 | def scriptscan(res): 59 | return utils.recursive_bytetransform(res, lambda x: x.decode()) 60 | 61 | @staticmethod 62 | def infoget(res): 63 | return utils.list2dict(res) 64 | 65 | 66 | # These functions are only doing decoding on the output from redis 67 | decoder = staticmethod(decoder) 68 | decoding_functions = ( 69 | "config", 70 | "inforeset", 71 | "loadbackend", 72 | "modeldel", 73 | "modelexecute", 74 | "modelrun", 75 | "modelset", 76 | "modelstore", 77 | "scriptdel", 78 | "scriptexecute", 79 | "scriptrun", 80 | "scriptset", 81 | "scriptstore", 82 | "tensorset", 83 | ) 84 | for fn in decoding_functions: 85 | setattr(Processor, fn, decoder) 86 | -------------------------------------------------------------------------------- /redisai/utils.py: -------------------------------------------------------------------------------- 1 | from typing import AnyStr, ByteString, Callable, List, Sequence, Union 2 | import numpy as np 3 | 4 | dtype_dict = { 5 | "float": "FLOAT", 6 | "double": "DOUBLE", 7 | "float32": "FLOAT", 8 | "float64": "DOUBLE", 9 | "int8": "INT8", 10 | "int16": "INT16", 11 | "int32": "INT32", 12 | "int64": "INT64", 13 | "uint8": "UINT8", 14 | "uint16": "UINT16", 15 | "uint32": "UINT32", 16 | "uint64": "UINT64", 17 | "bool": "BOOL", 18 | "str": "STRING", 19 | } 20 | 21 | allowed_devices = {"CPU", "GPU"} 22 | allowed_backends = {"TF", "TFLITE", "TORCH", "ONNX"} 23 | 24 | 25 | def numpy2blob(tensor: np.ndarray) -> tuple: 26 | """Convert the numpy input from user to `Tensor`.""" 27 | try: 28 | if tensor.dtype.num == np.dtype("str").num: 29 | dtype = dtype_dict["str"] 30 | blob = "".join([string + "\0" for string in tensor.flat]) 31 | else: 32 | dtype = dtype_dict[str(tensor.dtype)] 33 | blob = tensor.tobytes() 34 | except KeyError: 35 | raise TypeError(f"RedisAI doesn't support tensors of type {tensor.dtype}") 36 | shape = tensor.shape 37 | return dtype, shape, blob 38 | 39 | 40 | def blob2numpy( 41 | value: ByteString, shape: Union[list, tuple], dtype: str, mutable: bool 42 | ) -> np.ndarray: 43 | """Convert `BLOB` result from RedisAI to `np.ndarray`.""" 44 | mm = {"FLOAT": "float32", "DOUBLE": "float64"} 45 | dtype = mm.get(dtype, dtype.lower()) 46 | if dtype == 'string': 47 | a = np.array(value.decode().split('\0')[:-1], dtype='str') 48 | elif mutable: 49 | a = np.fromstring(value, dtype=dtype) 50 | else: 51 | a = np.frombuffer(value, dtype=dtype) 52 | return a.reshape(shape) 53 | 54 | 55 | def list2dict(lst): 56 | """Convert the list from RedisAI to a dict.""" 57 | if len(lst) % 2 != 0: 58 | raise RuntimeError("Can't unpack the list: {}".format(lst)) 59 | out = {} 60 | for i in range(0, len(lst), 2): 61 | key = lst[i].decode().lower() 62 | val = lst[i + 1] 63 | if key != "blob" and isinstance(val, bytes): 64 | val = val.decode() 65 | out[key] = val 66 | return out 67 | 68 | 69 | def recursive_bytetransform(arr: List[AnyStr], target: Callable) -> list: 70 | """ 71 | Recurse value, replacing each element of b'' with the appropriate element. 72 | 73 | Function returns the same array after inplace operation which updates `arr` 74 | """ 75 | for ix in range(len(arr)): 76 | obj = arr[ix] 77 | if isinstance(obj, list): 78 | recursive_bytetransform(obj, target) 79 | else: 80 | arr[ix] = target(obj) 81 | return arr 82 | 83 | 84 | def listify(inp: Union[str, Sequence[str]]) -> Sequence[str]: 85 | """Wrap the ``inp`` with a list if it's not a list already.""" 86 | return (inp,) if not isinstance(inp, (list, tuple)) else inp 87 | -------------------------------------------------------------------------------- /test/test.py: -------------------------------------------------------------------------------- 1 | import os.path 2 | import sys 3 | import warnings 4 | 5 | from io import StringIO 6 | from unittest import TestCase 7 | from skimage.io import imread 8 | from skimage.transform import resize 9 | 10 | import numpy as np 11 | from ml2rt import load_model 12 | from redis.exceptions import ResponseError 13 | 14 | from redisai import Client 15 | 16 | 17 | DEBUG = False 18 | tf_graph = "graph.pb" 19 | torch_graph = "pt-minimal.pt" 20 | dog_img = "dog.jpg" 21 | 22 | 23 | class Capturing(list): 24 | def __enter__(self): 25 | self._stdout = sys.stdout 26 | sys.stdout = self._stringio = StringIO() 27 | return self 28 | 29 | def __exit__(self, *args): 30 | self.extend(self._stringio.getvalue().splitlines()) 31 | del self._stringio # free up some memory 32 | sys.stdout = self._stdout 33 | 34 | 35 | MODEL_DIR = os.path.dirname(os.path.abspath(__file__)) + "/testdata" 36 | TENSOR_DIR = MODEL_DIR 37 | script_old = r""" 38 | def bar(a, b): 39 | return a + b 40 | 41 | def bar_variadic(a, args : List[Tensor]): 42 | return args[0] + args[1] 43 | """ 44 | 45 | script = r""" 46 | def bar(tensors: List[Tensor], keys: List[str], args: List[str]): 47 | a = tensors[0] 48 | b = tensors[1] 49 | return a + b 50 | 51 | def bar_variadic(tensors: List[Tensor], keys: List[str], args: List[str]): 52 | a = tensors[0] 53 | l = tensors[1:] 54 | return a + l[0] 55 | """ 56 | 57 | script_with_redis_commands = r""" 58 | def redis_string_int_to_tensor(redis_value: Any): 59 | return torch.tensor(int(str(redis_value))) 60 | 61 | def int_set_get(tensors: List[Tensor], keys: List[str], args: List[str]): 62 | key = keys[0] 63 | value = int(args[0]) 64 | redis.execute("SET", key, str(value)) 65 | res = redis.execute("GET", key) 66 | return redis_string_int_to_tensor(res) 67 | 68 | def func(tensors: List[Tensor], keys: List[str], args: List[str]): 69 | redis.execute("SET", keys[0], args[0]) 70 | a = torch.stack(tensors).sum() 71 | b = redis_string_int_to_tensor(redis.execute("GET", keys[0])) 72 | redis.execute("DEL", keys[0]) 73 | return b + a 74 | """ 75 | 76 | data_processing_script = r""" 77 | def pre_process_3ch(tensors: List[Tensor], keys: List[str], args: List[str]): 78 | return tensors[0].float().div(255).unsqueeze(0) 79 | 80 | def post_process(tensors: List[Tensor], keys: List[str], args: List[str]): 81 | # tf model has 1001 classes, hence negative 1 82 | return tensors[0].max(1)[1] - 1 83 | """ 84 | 85 | 86 | class RedisAITestBase(TestCase): 87 | def setUp(self): 88 | super().setUp() 89 | RedisAITestBase.get_client().flushall() 90 | 91 | @staticmethod 92 | def get_client(debug=DEBUG): 93 | return Client(debug) 94 | 95 | 96 | class ClientTestCase(RedisAITestBase): 97 | def test_set_non_numpy_tensor(self): 98 | con = RedisAITestBase.get_client() 99 | con.tensorset("x", (2, 3, 4, 5), dtype="float") 100 | result = con.tensorget("x", as_numpy=False) 101 | self.assertEqual([2, 3, 4, 5], result["values"]) 102 | self.assertEqual([4], result["shape"]) 103 | 104 | con.tensorset("x", (2, 3, 4, 5), dtype="float64") 105 | result = con.tensorget("x", as_numpy=False) 106 | self.assertEqual([2, 3, 4, 5], result["values"]) 107 | self.assertEqual([4], result["shape"]) 108 | self.assertEqual("DOUBLE", result["dtype"]) 109 | 110 | con.tensorset("x", (2, 3, 4, 5), dtype="int16", shape=(2, 2)) 111 | result = con.tensorget("x", as_numpy=False) 112 | self.assertEqual([2, 3, 4, 5], result["values"]) 113 | self.assertEqual([2, 2], result["shape"]) 114 | 115 | con.tensorset("x", (1, 1, 0, 0), dtype="bool", shape=(2, 2)) 116 | result = con.tensorget("x", as_numpy=False) 117 | self.assertEqual([True, True, False, False], result["values"]) 118 | self.assertEqual([2, 2], result["shape"]) 119 | self.assertEqual("BOOL", result["dtype"]) 120 | 121 | con.tensorset("x", (12, 'a', 'G', 'four'), dtype="str", shape=(2, 2)) 122 | result = con.tensorget("x", as_numpy=False) 123 | self.assertEqual(['12', 'a', 'G', 'four'], result["values"]) 124 | self.assertEqual([2, 2], result["shape"]) 125 | self.assertEqual("STRING", result["dtype"]) 126 | 127 | with self.assertRaises(TypeError): 128 | con.tensorset("x", (2, 3, 4, 5), dtype="wrongtype", shape=(2, 2)) 129 | con.tensorset("x", (2, 3, 4, 5), dtype="int8", shape=(2, 2)) 130 | result = con.tensorget("x", as_numpy=False) 131 | self.assertEqual("INT8", result["dtype"]) 132 | self.assertEqual([2, 3, 4, 5], result["values"]) 133 | self.assertEqual([2, 2], result["shape"]) 134 | self.assertIn("values", result) 135 | 136 | with self.assertRaises(TypeError): 137 | con.tensorset("x") 138 | con.tensorset(1) 139 | 140 | def test_tensorget_meta(self): 141 | con = RedisAITestBase.get_client() 142 | con.tensorset("x", (2, 3, 4, 5), dtype="float") 143 | result = con.tensorget("x", meta_only=True) 144 | self.assertNotIn("values", result) 145 | self.assertEqual([4], result["shape"]) 146 | 147 | def test_numpy_tensor(self): 148 | con = RedisAITestBase.get_client() 149 | 150 | input_array = np.array([2, 3], dtype=np.float32) 151 | con.tensorset("x", input_array) 152 | values = con.tensorget("x") 153 | self.assertEqual(values.dtype, np.float32) 154 | 155 | input_array = np.array([2, 3], dtype=np.float64) 156 | con.tensorset("x", input_array) 157 | values = con.tensorget("x") 158 | self.assertEqual(values.dtype, np.float64) 159 | 160 | input_array = np.array([True, False]) 161 | con.tensorset("x", input_array) 162 | values = con.tensorget("x") 163 | self.assertEqual(values.dtype, "bool") 164 | self.assertTrue(np.array_equal(values, [True, False])) 165 | 166 | input_array = np.array(["a", "bb", "⚓⚓⚓", "d♻d♻"]).reshape((2, 2)) 167 | con.tensorset("x", input_array) 168 | values = con.tensorget("x") 169 | self.assertEqual(values.dtype.num, np.dtype("str").num) 170 | self.assertTrue(np.array_equal(values, [['a', 'bb'], ["⚓⚓⚓", "d♻d♻"]])) 171 | 172 | input_array = np.array([2, 3]) 173 | con.tensorset("x", input_array) 174 | values = con.tensorget("x") 175 | 176 | self.assertTrue(np.allclose([2, 3], values)) 177 | self.assertEqual(values.dtype, np.int64) 178 | self.assertEqual(values.shape, (2,)) 179 | self.assertTrue((np.allclose(values, input_array))) 180 | ret = con.tensorset("x", values) 181 | self.assertEqual(ret, "OK") 182 | 183 | # By default tensorget returns immutable, unless as_numpy_mutable is set as True 184 | ret = con.tensorget("x") 185 | self.assertRaises(ValueError, np.put, ret, 0, 1) 186 | ret = con.tensorget("x", as_numpy_mutable=True) 187 | np.put(ret, 0, 1) 188 | self.assertEqual(ret[0], 1) 189 | 190 | # AI.MODELSET is deprecated by AI.MODELSTORE. 191 | def test_deprecated_modelset(self): 192 | model_path = os.path.join(MODEL_DIR, "graph.pb") 193 | model_pb = load_model(model_path) 194 | con = RedisAITestBase.get_client() 195 | with self.assertRaises(ValueError): 196 | con.modelset( 197 | "m", 198 | "tf", 199 | "wrongdevice", 200 | model_pb, 201 | inputs=["a", "b"], 202 | outputs=["mul"], 203 | tag="v1.0", 204 | ) 205 | with self.assertRaises(ValueError): 206 | con.modelset( 207 | "m", 208 | "wrongbackend", 209 | "cpu", 210 | model_pb, 211 | inputs=["a", "b"], 212 | outputs=["mul"], 213 | tag="v1.0", 214 | ) 215 | con.modelset( 216 | "m", "tf", "cpu", model_pb, inputs=["a", "b"], outputs=["mul"], tag="v1.0" 217 | ) 218 | model = con.modelget("m", meta_only=True) 219 | self.assertEqual( 220 | model, 221 | { 222 | "backend": "TF", 223 | "batchsize": 0, 224 | "device": "cpu", 225 | "inputs": ["a", "b"], 226 | "minbatchsize": 0, 227 | "minbatchtimeout": 0, 228 | "outputs": ["mul"], 229 | "tag": "v1.0", 230 | }, 231 | ) 232 | 233 | def test_modelstore_errors(self): 234 | model_path = os.path.join(MODEL_DIR, "graph.pb") 235 | model_pb = load_model(model_path) 236 | con = RedisAITestBase.get_client() 237 | 238 | with self.assertRaises(ValueError) as e: 239 | con.modelstore( 240 | None, 241 | "TF", 242 | "CPU", 243 | model_pb, 244 | inputs=["a", "b"], 245 | outputs=["mul"] 246 | ) 247 | self.assertEqual(str(e.exception), "Model name was not given") 248 | 249 | with self.assertRaises(ValueError) as e: 250 | con.modelstore( 251 | "m", 252 | "tf", 253 | "wrongdevice", 254 | model_pb, 255 | inputs=["a", "b"], 256 | outputs=["mul"], 257 | tag="v1.0", 258 | ) 259 | self.assertTrue(str(e.exception).startswith("Device not allowed")) 260 | with self.assertRaises(ValueError) as e: 261 | con.modelstore( 262 | "m", 263 | "wrongbackend", 264 | "cpu", 265 | model_pb, 266 | inputs=["a", "b"], 267 | outputs=["mul"], 268 | tag="v1.0", 269 | ) 270 | self.assertTrue(str(e.exception).startswith("Backend not allowed")) 271 | with self.assertRaises(ValueError) as e: 272 | con.modelstore( 273 | "m", 274 | "tf", 275 | "cpu", 276 | model_pb, 277 | inputs=["a", "b"], 278 | outputs=["mul"], 279 | tag="v1.0", 280 | minbatch=2, 281 | ) 282 | self.assertEqual(str(e.exception), 283 | "Minbatch is not allowed without batch") 284 | with self.assertRaises(ValueError) as e: 285 | con.modelstore( 286 | "m", 287 | "tf", 288 | "cpu", 289 | model_pb, 290 | inputs=["a", "b"], 291 | outputs=["mul"], 292 | tag="v1.0", 293 | batch=4, 294 | minbatchtimeout=1000, 295 | ) 296 | self.assertTrue( 297 | str(e.exception), "Minbatchtimeout is not allowed without minbatch" 298 | ) 299 | with self.assertRaises(ValueError) as e: 300 | con.modelstore("m", "tf", "cpu", model_pb, tag="v1.0") 301 | self.assertTrue( 302 | str(e.exception), 303 | "Require keyword arguments inputs and outputs for TF models", 304 | ) 305 | with self.assertRaises(ValueError) as e: 306 | con.modelstore( 307 | "m", 308 | "torch", 309 | "cpu", 310 | model_pb, 311 | inputs=["a", "b"], 312 | outputs=["mul"], 313 | tag="v1.0", 314 | ) 315 | self.assertTrue( 316 | str(e.exception), 317 | "Inputs and outputs keywords should not be specified for this backend", 318 | ) 319 | 320 | def test_modelget_meta(self): 321 | model_path = os.path.join(MODEL_DIR, tf_graph) 322 | model_pb = load_model(model_path) 323 | con = RedisAITestBase.get_client() 324 | con.modelstore( 325 | "m", "tf", "cpu", model_pb, inputs=["a", "b"], outputs=["mul"], tag="v1.0" 326 | ) 327 | model = con.modelget("m", meta_only=True) 328 | self.assertEqual( 329 | model, 330 | { 331 | "backend": "TF", 332 | "batchsize": 0, 333 | "device": "cpu", 334 | "inputs": ["a", "b"], 335 | "minbatchsize": 0, 336 | "minbatchtimeout": 0, 337 | "outputs": ["mul"], 338 | "tag": "v1.0", 339 | }, 340 | ) 341 | 342 | def test_modelexecute_non_list_input_output(self): 343 | model_path = os.path.join(MODEL_DIR, "graph.pb") 344 | model_pb = load_model(model_path) 345 | con = RedisAITestBase.get_client() 346 | con.modelstore( 347 | "m", "tf", "cpu", model_pb, inputs=["a", "b"], outputs=["mul"], tag="v1.7" 348 | ) 349 | con.tensorset("a", (2, 3), dtype="float") 350 | con.tensorset("b", (2, 3), dtype="float") 351 | ret = con.modelexecute("m", ["a", "b"], "out") 352 | self.assertEqual(ret, "OK") 353 | 354 | def test_non_ascii_char(self): 355 | nonascii = "ĉ" 356 | model_path = os.path.join(MODEL_DIR, tf_graph) 357 | model_pb = load_model(model_path) 358 | con = RedisAITestBase.get_client() 359 | con.modelstore( 360 | "m" + nonascii, 361 | "tf", 362 | "cpu", 363 | model_pb, 364 | inputs=["a", "b"], 365 | outputs=["mul"], 366 | tag="v1.0", 367 | ) 368 | con.tensorset("a" + nonascii, (2, 3), dtype="float") 369 | con.tensorset("b", (2, 3), dtype="float") 370 | con.modelexecute( 371 | "m" + nonascii, ["a" + nonascii, "b"], ["c" + nonascii]) 372 | tensor = con.tensorget("c" + nonascii) 373 | self.assertTrue((np.allclose(tensor, [4.0, 9.0]))) 374 | 375 | def test_device_with_id(self): 376 | model_path = os.path.join(MODEL_DIR, tf_graph) 377 | model_pb = load_model(model_path) 378 | con = RedisAITestBase.get_client() 379 | ret = con.modelstore( 380 | "m", 381 | "tf", 382 | "cpu:1", 383 | model_pb, 384 | inputs=["a", "b"], 385 | outputs=["mul"], 386 | tag="v1.0", 387 | ) 388 | self.assertEqual('OK', ret) 389 | 390 | def test_run_tf_model(self): 391 | model_path = os.path.join(MODEL_DIR, tf_graph) 392 | bad_model_path = os.path.join(MODEL_DIR, torch_graph) 393 | 394 | model_pb = load_model(model_path) 395 | wrong_model_pb = load_model(bad_model_path) 396 | 397 | con = RedisAITestBase.get_client() 398 | con.modelstore( 399 | "m", "tf", "cpu", model_pb, inputs=["a", "b"], outputs=["mul"], tag="v1.0" 400 | ) 401 | con.modeldel("m") 402 | self.assertRaises(ResponseError, con.modelget, "m") 403 | con.modelstore( 404 | "m", "tf", "cpu", model_pb, inputs=["a", "b"], outputs="mul", tag="v1.0" 405 | ) 406 | 407 | # Required arguments ar None 408 | with self.assertRaises(ValueError) as e: 409 | con.modelexecute( 410 | "m", 411 | inputs=None, 412 | outputs=None 413 | ) 414 | self.assertEqual(str(e.exception), "Missing required arguments for model execute command") 415 | 416 | # wrong model 417 | with self.assertRaises(ResponseError) as e: 418 | con.modelstore( 419 | "m", "tf", "cpu", wrong_model_pb, inputs=["a", "b"], outputs=["mul"] 420 | ) 421 | self.assertEqual(str(e.exception), "Invalid GraphDef") 422 | 423 | con.tensorset("a", (2, 3), dtype="float") 424 | con.tensorset("b", (2, 3), dtype="float") 425 | con.modelexecute("m", ["a", "b"], ["c"]) 426 | tensor = con.tensorget("c") 427 | self.assertTrue(np.allclose([4, 9], tensor)) 428 | model_det = con.modelget("m") 429 | self.assertTrue(model_det["backend"] == "TF") 430 | self.assertTrue( 431 | model_det["device"] == "cpu" 432 | ) # TODO; RedisAI returns small letter 433 | self.assertTrue(model_det["tag"] == "v1.0") 434 | con.modeldel("m") 435 | self.assertRaises(ResponseError, con.modelget, "m") 436 | 437 | # AI.SCRIPTRUN is deprecated by AI.SCRIPTEXECUTE 438 | # and AI.SCRIPTSET is deprecated by AI.SCRIPTSTORE 439 | def test_deprecated_scriptset_and_scriptrun(self): 440 | con = RedisAITestBase.get_client() 441 | self.assertRaises(ResponseError, con.scriptset, "scr", "cpu", "return 1") 442 | con.scriptset("scr", "cpu", script_old) 443 | con.tensorset("a", (2, 3), dtype="float") 444 | con.tensorset("b", (2, 3), dtype="float") 445 | 446 | # test bar(a, b) 447 | con.scriptrun("scr", "bar", inputs=["a", "b"], outputs=["c"]) 448 | tensor = con.tensorget("c", as_numpy=False) 449 | self.assertEqual([4, 6], tensor["values"]) 450 | 451 | # test bar_variadic(a, args : List[Tensor]) 452 | con.scriptrun("scr", "bar_variadic", inputs=["a", "$", "b", "b"], outputs=["c"]) 453 | tensor = con.tensorget("c", as_numpy=False) 454 | self.assertEqual([4, 6], tensor["values"]) 455 | 456 | def test_scriptstore(self): 457 | con = RedisAITestBase.get_client() 458 | # try with bad arguments: 459 | with self.assertRaises(ValueError) as e: 460 | con.scriptstore("test", "cpu", script, entry_points=None) 461 | self.assertEqual(str(e.exception), "Missing required arguments for script store command") 462 | self.assertRaises(ValueError, con.scriptstore, "test", "cpu", script=None, entry_points="bar") 463 | with self.assertRaises(ResponseError) as e: 464 | con.scriptstore("test", "cpu", "return 1", "f") 465 | self.assertEqual(str(e.exception), 466 | "expected def but found 'return' here: File \"\", line 1 return 1 ~~~~~~ <--- HERE ") 467 | 468 | def test_scripts_execute(self): 469 | con = RedisAITestBase.get_client() 470 | # try with bad arguments: 471 | with self.assertRaises(ValueError) as e: 472 | con.scriptexecute("test", function=None, keys=None, inputs=None) 473 | self.assertEqual(str(e.exception), "Missing required arguments for script execute command") 474 | with self.assertRaises(ResponseError) as e: 475 | con.scriptexecute("test", "bar", inputs=["a"], outputs=["c"]) 476 | self.assertEqual(str(e.exception), "script key is empty") 477 | 478 | con.scriptstore("test", "cpu", script, "bar") 479 | con.tensorset("a", (2, 3), dtype="float") 480 | con.tensorset("b", (2, 3), dtype="float") 481 | con.scriptexecute("test", "bar", inputs=["a", "b"], outputs=["c"]) 482 | tensor = con.tensorget("c", as_numpy=False) 483 | self.assertEqual([4, 6], tensor["values"]) 484 | script_det = con.scriptget("test") 485 | self.assertTrue(script_det["device"] == "cpu") 486 | self.assertTrue(script_det["source"] == script) 487 | script_det = con.scriptget("test", meta_only=True) 488 | self.assertTrue(script_det["device"] == "cpu") 489 | self.assertNotIn("source", script_det) 490 | # delete the script 491 | con.scriptdel("test") 492 | self.assertRaises(ResponseError, con.scriptget, "test") 493 | 494 | # store new script 495 | con.scriptstore("myscript{1}", "cpu", script, ["bar", "bar_variadic"], "version1") 496 | con.tensorset("a{1}", [2, 3, 2, 3], shape=(2, 2), dtype="float") 497 | con.tensorset("b{1}", [2, 3, 2, 3], shape=(2, 2), dtype="float") 498 | con.scriptexecute("myscript{1}", "bar", inputs=["a{1}", "b{1}"], outputs=["c{1}"]) 499 | values = con.tensorget("c{1}", as_numpy=False) 500 | self.assertTrue(np.allclose(values["values"], [4.0, 6.0, 4.0, 6.0])) 501 | 502 | con.tensorset("b1{1}", [2, 3, 2, 3], shape=(2, 2), dtype="float") 503 | con.scriptexecute("myscript{1}", 'bar_variadic', 504 | inputs=["a{1}", "b1{1}", "b{1}"], 505 | outputs=["c{1}"]) 506 | 507 | values = con.tensorget("c{1}", as_numpy=False)['values'] 508 | self.assertEqual(values, [4.0, 6.0, 4.0, 6.0]) 509 | 510 | def test_scripts_redis_commands(self): 511 | con = RedisAITestBase.get_client() 512 | con.scriptstore("myscript{1}", "cpu", script_with_redis_commands, ["int_set_get", "func"]) 513 | con.scriptexecute("myscript{1}", "int_set_get", keys=["x{1}", "{1}"], args=["3"], outputs=["y{1}"]) 514 | values = con.tensorget("y{1}", as_numpy=False) 515 | self.assertTrue(np.allclose(values["values"], [3])) 516 | 517 | con.tensorset("mytensor1{1}", [40], dtype="float") 518 | con.tensorset("mytensor2{1}", [10], dtype="float") 519 | con.tensorset("mytensor3{1}", [1], dtype="float") 520 | con.scriptexecute("myscript{1}", "func", 521 | keys=["key{1}"], 522 | inputs=["mytensor1{1}", "mytensor2{1}", "mytensor3{1}"], 523 | args=["3"], 524 | outputs=["my_output{1}"]) 525 | values = con.tensorget("my_output{1}", as_numpy=False) 526 | self.assertTrue(np.allclose(values["values"], [54])) 527 | self.assertIsNone(con.get("key{1}")) 528 | 529 | def test_run_onnxml_model(self): 530 | mlmodel_path = os.path.join(MODEL_DIR, "boston.onnx") 531 | onnxml_model = load_model(mlmodel_path) 532 | con = RedisAITestBase.get_client() 533 | con.modelstore("onnx_model", "onnx", "cpu", onnxml_model) 534 | tensor = np.ones((1, 13)).astype(np.float32) 535 | con.tensorset("input", tensor) 536 | con.modelexecute("onnx_model", ["input"], ["output"]) 537 | # tests `convert_to_num` 538 | outtensor = con.tensorget("output", as_numpy=False) 539 | self.assertEqual(int(float(outtensor["values"][0])), 24) 540 | 541 | def test_run_onnxdl_model(self): 542 | # A PyTorch model that finds the square 543 | dlmodel_path = os.path.join(MODEL_DIR, "findsquare.onnx") 544 | onnxdl_model = load_model(dlmodel_path) 545 | con = RedisAITestBase.get_client() 546 | con.modelstore("onnx_model", "onnx", "cpu", onnxdl_model) 547 | tensor = np.array((2,)).astype(np.float32) 548 | con.tensorset("input", tensor) 549 | con.modelexecute("onnx_model", ["input"], ["output"]) 550 | outtensor = con.tensorget("output") 551 | self.assertTrue(np.allclose(outtensor, [4.0])) 552 | 553 | def test_run_pytorch_model(self): 554 | model_path = os.path.join(MODEL_DIR, torch_graph) 555 | ptmodel = load_model(model_path) 556 | con = RedisAITestBase.get_client() 557 | con.modelstore("pt_model", "torch", "cpu", ptmodel, tag="v1.0") 558 | con.tensorset("a", [2, 3, 2, 3], shape=(2, 2), dtype="float") 559 | con.tensorset("b", [2, 3, 2, 3], shape=(2, 2), dtype="float") 560 | con.modelexecute("pt_model", ["a", "b"], ["output"]) 561 | output = con.tensorget("output", as_numpy=False) 562 | self.assertTrue(np.allclose(output["values"], [4, 6, 4, 6])) 563 | 564 | def test_run_tflite_model(self): 565 | model_path = os.path.join(MODEL_DIR, "mnist_model_quant.tflite") 566 | tflmodel = load_model(model_path) 567 | con = RedisAITestBase.get_client() 568 | con.modelstore("tfl_model", "tflite", "cpu", tflmodel) 569 | 570 | input_path = os.path.join(TENSOR_DIR, "one.raw") 571 | with open(input_path, 'rb') as f: 572 | img = np.frombuffer(f.read(), dtype=np.float32) 573 | con.tensorset("img", img) 574 | con.modelexecute("tfl_model", ["img"], ["output1", "output2"]) 575 | output = con.tensorget("output1") 576 | self.assertEqual(output, [1]) 577 | 578 | # AI.MODELRUN is deprecated by AI.MODELEXECUTE 579 | def test_deprecated_modelrun(self): 580 | model_path = os.path.join(MODEL_DIR, "graph.pb") 581 | model_pb = load_model(model_path) 582 | 583 | con = RedisAITestBase.get_client() 584 | con.modelstore( 585 | "m", "tf", "cpu", model_pb, inputs=["a", "b"], outputs=["mul"], tag="v1.0" 586 | ) 587 | 588 | con.tensorset("a", (2, 3), dtype="float") 589 | con.tensorset("b", (2, 3), dtype="float") 590 | con.modelrun("m", ["a", "b"], ["c"]) 591 | tensor = con.tensorget("c") 592 | self.assertTrue(np.allclose([4, 9], tensor)) 593 | 594 | def test_info(self): 595 | model_path = os.path.join(MODEL_DIR, tf_graph) 596 | model_pb = load_model(model_path) 597 | con = RedisAITestBase.get_client() 598 | con.modelstore("m", "tf", "cpu", model_pb, 599 | inputs=["a", "b"], outputs=["mul"]) 600 | first_info = con.infoget("m") 601 | expected = { 602 | "key": "m", 603 | "type": "MODEL", 604 | "backend": "TF", 605 | "device": "cpu", 606 | "tag": "", 607 | "duration": 0, 608 | "samples": 0, 609 | "calls": 0, 610 | "errors": 0, 611 | } 612 | self.assertEqual(first_info, expected) 613 | con.tensorset("a", (2, 3), dtype="float") 614 | con.tensorset("b", (2, 3), dtype="float") 615 | con.modelexecute("m", ["a", "b"], ["c"]) 616 | con.modelexecute("m", ["a", "b"], ["c"]) 617 | second_info = con.infoget("m") 618 | self.assertEqual(second_info["calls"], 2) # 2 model runs 619 | con.inforeset("m") 620 | third_info = con.infoget("m") 621 | # before modelrun and after reset 622 | self.assertEqual(first_info, third_info) 623 | 624 | def test_model_scan(self): 625 | model_path = os.path.join(MODEL_DIR, tf_graph) 626 | model_pb = load_model(model_path) 627 | con = RedisAITestBase.get_client() 628 | con.modelstore( 629 | "m", "tf", "cpu", model_pb, inputs=["a", "b"], outputs=["mul"], tag="v1.2" 630 | ) 631 | model_path = os.path.join(MODEL_DIR, "pt-minimal.pt") 632 | ptmodel = load_model(model_path) 633 | con = RedisAITestBase.get_client() 634 | # TODO: RedisAI modelscan issue 635 | con.modelstore("pt_model", "torch", "cpu", ptmodel) 636 | mlist = con.modelscan() 637 | self.assertEqual(mlist, [["pt_model", ""], ["m", "v1.2"]]) 638 | 639 | def test_script_scan(self): 640 | con = RedisAITestBase.get_client() 641 | con.scriptset("ket1", "cpu", script, tag="v1.0") 642 | con.scriptset("ket2", "cpu", script) 643 | slist = con.scriptscan() 644 | self.assertEqual(slist, [["ket1", "v1.0"], ["ket2", ""]]) 645 | 646 | def test_debug(self): 647 | con = RedisAITestBase.get_client(debug=True) 648 | with Capturing() as output: 649 | con.tensorset("x", (2, 3, 4, 5), dtype="float") 650 | self.assertEqual(["AI.TENSORSET x FLOAT 4 VALUES 2 3 4 5"], output) 651 | 652 | def test_config(self): 653 | con = RedisAITestBase.get_client() 654 | model_path = os.path.join(MODEL_DIR, torch_graph) 655 | pt_model = load_model(model_path) 656 | self.assertEqual(con.modelstore("pt_model", "torch", "cpu", pt_model), 'OK') 657 | 658 | # Get the defaults configs. 659 | self.assertEqual(int(con.config('MODEL_CHUNK_SIZE')), 511 * 1024 * 1024) 660 | default_path = con.config('BACKENDSPATH') 661 | 662 | # Set different model chunk size, and verify that it returns properly from "modelget". 663 | con.config('MODEL_CHUNK_SIZE', len(pt_model) // 3) 664 | self.assertEqual(int(con.config('MODEL_CHUNK_SIZE')), len(pt_model) // 3) 665 | chunks = con.modelget("pt_model")['blob'] 666 | self.assertEqual(len(chunks), 4) # Since pt_model is of size 1352 bytes, expect 4 chunks. 667 | flat_chunks = b"".join(list(chunks)) 668 | self.assertEqual(pt_model, flat_chunks) 669 | con.config('MODEL_CHUNK_SIZE', 511 * 1024 * 1024) # restore default 670 | 671 | # Set different backendspath (and restore the default one). 672 | con.config('BACKENDSPATH', 'my/backends/path') 673 | self.assertEqual(con.config('BACKENDSPATH'), 'my/backends/path') 674 | con.config('BACKENDSPATH', default_path) 675 | 676 | # Test for errors - set and get non-existing configs. 677 | with self.assertRaises(ResponseError) as e: 678 | con.config("non-existing", "val") 679 | self.assertEqual(str(e.exception), "unsupported subcommand") 680 | 681 | with self.assertRaises(ResponseError) as e: 682 | con.config("MODEL_CHUNK_SIZE", "not-a-number") 683 | self.assertEqual(str(e.exception), "MODEL_CHUNK_SIZE: invalid chunk size") 684 | 685 | self.assertEqual(con.config("non-existing"), None) 686 | 687 | 688 | def load_image(): 689 | image_filename = os.path.join(MODEL_DIR, dog_img) 690 | img_height, img_width = 224, 224 691 | 692 | img = imread(image_filename) 693 | img = resize(img, (img_height, img_width), mode='constant', anti_aliasing=True) 694 | img = img.astype(np.uint8) 695 | return img 696 | 697 | 698 | class DagTestCase(RedisAITestBase): 699 | def setUp(self): 700 | super().setUp() 701 | con = RedisAITestBase.get_client() 702 | model_path = os.path.join(MODEL_DIR, torch_graph) 703 | ptmodel = load_model(model_path) 704 | con.modelstore("pt_model", "torch", "cpu", ptmodel, tag="v7.0") 705 | 706 | def test_deprecated_dugrun(self): 707 | con = RedisAITestBase.get_client() 708 | 709 | # test the warning of using dagrun 710 | with warnings.catch_warnings(record=True) as w: 711 | warnings.simplefilter("default") 712 | dag = con.dag() 713 | self.assertTrue(issubclass(w[-1].category, DeprecationWarning)) 714 | self.assertEqual(str(w[-1].message), 715 | "Creating Dag without any of LOAD, PERSIST and ROUTING arguments" 716 | "is allowed only in deprecated AI.DAGRUN or AI.DAGRUN_RO commands") 717 | 718 | # test that dagrun and model run hadn't been broken 719 | dag.tensorset("a", [2, 3, 2, 3], shape=(2, 2), dtype="float") 720 | dag.tensorset("b", [2, 3, 2, 3], shape=(2, 2), dtype="float") 721 | # can't use modelexecute or scriptexecute when using DAGRUN 722 | with self.assertRaises(RuntimeError) as e: 723 | dag.modelexecute("pt_model", ["a", "b"], ["output"]) 724 | self.assertEqual(str(e.exception), 725 | "You are using deprecated version of DAG, that does not supports MODELEXECUTE." 726 | "The new version requires giving at least one of LOAD, PERSIST and ROUTING" 727 | "arguments when constructing the Dag") 728 | with self.assertRaises(RuntimeError) as e: 729 | dag.scriptexecute("myscript{1}", "bar", inputs=["a{1}", "b{1}"], outputs=["c{1}"]) 730 | self.assertEqual(str(e.exception), 731 | "You are using deprecated version of DAG, that does not supports SCRIPTEXECUTE." 732 | "The new version requires giving at least one of LOAD, PERSIST and ROUTING" 733 | "arguments when constructing the Dag") 734 | dag.modelrun("pt_model", ["a", "b"], ["output"]) 735 | dag.tensorget("output") 736 | result = dag.run() 737 | expected = [ 738 | "OK", 739 | "OK", 740 | "OK", 741 | np.array([[4.0, 6.0], [4.0, 6.0]], dtype=np.float32), 742 | ] 743 | self.assertTrue(np.allclose(expected.pop(), result.pop())) 744 | self.assertEqual(expected, result) 745 | 746 | def test_deprecated_modelrun_and_run(self): 747 | # use modelrun&run method but perform modelexecute&dagexecute behind the scene 748 | con = RedisAITestBase.get_client() 749 | 750 | con.tensorset("a", [2, 3, 2, 3], shape=(2, 2), dtype="float") 751 | con.tensorset("b", [2, 3, 2, 3], shape=(2, 2), dtype="float") 752 | dag = con.dag(load=["a", "b"], persist="output") 753 | dag.modelrun("pt_model", ["a", "b"], ["output"]) 754 | dag.tensorget("output") 755 | result = dag.run() 756 | expected = ["OK", np.array([[4.0, 6.0], [4.0, 6.0]], dtype=np.float32)] 757 | result_outside_dag = con.tensorget("output") 758 | self.assertTrue(np.allclose(expected.pop(), result.pop())) 759 | result = dag.run() 760 | self.assertTrue(np.allclose(result_outside_dag, result.pop())) 761 | self.assertEqual(expected, result) 762 | 763 | def test_dagexecute_with_scriptexecute_redis_commands(self): 764 | con = RedisAITestBase.get_client() 765 | con.scriptstore("myscript{1}", "cpu", script_with_redis_commands, "func") 766 | dag = con.dag(persist='my_output{1}', routing='{1}') 767 | dag.tensorset("mytensor1{1}", [40], dtype="float") 768 | dag.tensorset("mytensor2{1}", [10], dtype="float") 769 | dag.tensorset("mytensor3{1}", [1], dtype="float") 770 | dag.scriptexecute("myscript{1}", "func", 771 | keys=["key{1}"], 772 | inputs=["mytensor1{1}", "mytensor2{1}", "mytensor3{1}"], 773 | args=["3"], 774 | outputs=["my_output{1}"]) 775 | dag.execute() 776 | values = con.tensorget("my_output{1}", as_numpy=False) 777 | self.assertTrue(np.allclose(values["values"], [54])) 778 | 779 | def test_dagexecute_modelexecute_with_scriptexecute(self): 780 | con = RedisAITestBase.get_client() 781 | script_name = 'imagenet_script:{1}' 782 | model_name = 'imagenet_model:{1}' 783 | 784 | img = load_image() 785 | model_path = os.path.join(MODEL_DIR, "resnet50.pb") 786 | model = load_model(model_path) 787 | con.scriptstore(script_name, 'cpu', data_processing_script, entry_points=['post_process', 'pre_process_3ch']) 788 | con.modelstore(model_name, 'TF', 'cpu', model, inputs='images', outputs='output') 789 | 790 | dag = con.dag(persist='output:{1}') 791 | dag.tensorset('image:{1}', tensor=img, shape=(img.shape[1], img.shape[0]), dtype='UINT8') 792 | dag.scriptexecute(script_name, 'pre_process_3ch', inputs='image:{1}', outputs='temp_key1') 793 | dag.modelexecute(model_name, inputs='temp_key1', outputs='temp_key2') 794 | dag.scriptexecute(script_name, 'post_process', inputs='temp_key2', outputs='output:{1}') 795 | ret = dag.execute() 796 | self.assertEqual(['OK', 'OK', 'OK', 'OK'], ret) 797 | 798 | def test_dagexecute_with_load(self): 799 | con = RedisAITestBase.get_client() 800 | con.tensorset("a", [2, 3, 2, 3], shape=(2, 2), dtype="float") 801 | 802 | dag = con.dag(load="a") 803 | dag.tensorset("b", [2, 3, 2, 3], shape=(2, 2), dtype="float") 804 | dag.modelexecute("pt_model", ["a", "b"], ["output"]) 805 | dag.tensorget("output") 806 | result = dag.execute() 807 | expected = ["OK", "OK", np.array( 808 | [[4.0, 6.0], [4.0, 6.0]], dtype=np.float32)] 809 | self.assertTrue(np.allclose(expected.pop(), result.pop())) 810 | self.assertEqual(expected, result) 811 | self.assertRaises(ResponseError, con.tensorget, "b") 812 | 813 | def test_dagexecute_with_persist(self): 814 | con = RedisAITestBase.get_client() 815 | 816 | with self.assertRaises(ResponseError): 817 | dag = con.dag(persist="wrongkey") 818 | dag.tensorset("a", [2, 3, 2, 3], shape=(2, 2), dtype="float").execute() 819 | 820 | dag = con.dag(persist=["b"]) 821 | dag.tensorset("a", [2, 3, 2, 3], shape=(2, 2), dtype="float") 822 | dag.tensorset("b", [2, 3, 2, 3], shape=(2, 2), dtype="float") 823 | dag.tensorget("b") 824 | result = dag.execute() 825 | b = con.tensorget("b") 826 | self.assertTrue(np.allclose(b, result[-1])) 827 | self.assertEqual(b.dtype, np.float32) 828 | self.assertEqual(len(result), 3) 829 | 830 | def test_dagexecute_calling_on_return(self): 831 | con = RedisAITestBase.get_client() 832 | con.tensorset("a", [2, 3, 2, 3], shape=(2, 2), dtype="float") 833 | result = ( 834 | con.dag(load="a") 835 | .tensorset("b", [2, 3, 2, 3], shape=(2, 2), dtype="float") 836 | .modelexecute("pt_model", ["a", "b"], ["output"]) 837 | .tensorget("output") 838 | .execute() 839 | ) 840 | expected = ["OK", "OK", np.array( 841 | [[4.0, 6.0], [4.0, 6.0]], dtype=np.float32)] 842 | self.assertTrue(np.allclose(expected.pop(), result.pop())) 843 | self.assertEqual(expected, result) 844 | 845 | def test_dagexecute_without_load_and_persist(self): 846 | con = RedisAITestBase.get_client() 847 | dag = con.dag(load="wrongkey") 848 | with self.assertRaises(ResponseError) as e: 849 | dag.tensorget("wrongkey").execute() 850 | self.assertEqual(str(e.exception), "tensor key is empty or in a different shard") 851 | 852 | dag = con.dag(persist="output") 853 | dag.tensorset("a", [2, 3, 2, 3], shape=(2, 2), dtype="float") 854 | dag.tensorset("b", [2, 3, 2, 3], shape=(2, 2), dtype="float") 855 | dag.modelexecute("pt_model", ["a", "b"], ["output"]) 856 | dag.tensorget("output") 857 | result = dag.execute() 858 | expected = [ 859 | "OK", 860 | "OK", 861 | "OK", 862 | np.array([[4.0, 6.0], [4.0, 6.0]], dtype=np.float32), 863 | ] 864 | self.assertTrue(np.allclose(expected.pop(), result.pop())) 865 | self.assertEqual(expected, result) 866 | 867 | def test_dagexecute_with_load_and_persist(self): 868 | con = RedisAITestBase.get_client() 869 | con.tensorset("a", [2, 3, 2, 3], shape=(2, 2), dtype="float") 870 | con.tensorset("b", [2, 3, 2, 3], shape=(2, 2), dtype="float") 871 | dag = con.dag(load=["a", "b"], persist="output") 872 | dag.modelexecute("pt_model", ["a", "b"], ["output"]) 873 | dag.tensorget("output") 874 | result = dag.execute() 875 | expected = ["OK", np.array([[4.0, 6.0], [4.0, 6.0]], dtype=np.float32)] 876 | result_outside_dag = con.tensorget("output") 877 | self.assertTrue(np.allclose(expected.pop(), result.pop())) 878 | result = dag.execute() 879 | self.assertTrue(np.allclose(result_outside_dag, result.pop())) 880 | self.assertEqual(expected, result) 881 | 882 | def test_dagexecuteRO(self): 883 | con = RedisAITestBase.get_client() 884 | con.tensorset("a", [2, 3, 2, 3], shape=(2, 2), dtype="float") 885 | con.tensorset("b", [2, 3, 2, 3], shape=(2, 2), dtype="float") 886 | with self.assertRaises(RuntimeError): 887 | con.dag(load=["a", "b"], persist="output", readonly=True) 888 | dag = con.dag(load=["a", "b"], readonly=True) 889 | 890 | with self.assertRaises(RuntimeError) as e: 891 | dag.scriptexecute("myscript{1}", "bar", inputs=["a{1}", "b{1}"], outputs=["c{1}"]) 892 | self.assertEqual(str(e.exception), "AI.SCRIPTEXECUTE cannot be used in readonly mode") 893 | 894 | dag.modelexecute("pt_model", ["a", "b"], ["output"]) 895 | dag.tensorget("output") 896 | result = dag.execute() 897 | expected = ["OK", np.array([[4.0, 6.0], [4.0, 6.0]], dtype=np.float32)] 898 | self.assertTrue(np.allclose(expected.pop(), result.pop())) 899 | 900 | 901 | class PipelineTest(RedisAITestBase): 902 | def test_pipeline_non_transaction(self): 903 | con = RedisAITestBase.get_client() 904 | arr = np.array([[2.0, 3.0], [2.0, 3.0]], dtype=np.float32) 905 | pipe = con.pipeline(transaction=False) 906 | pipe = pipe.tensorset("a", arr).set("native", 1) 907 | pipe = pipe.tensorget("a", as_numpy=False) 908 | pipe = pipe.tensorget("a", as_numpy=True).tensorget( 909 | "a", meta_only=True) 910 | result = pipe.execute() 911 | expected = [ 912 | b"OK", 913 | True, 914 | {"dtype": "FLOAT", "shape": [2, 2], 915 | "values": [2.0, 3.0, 2.0, 3.0]}, 916 | arr, 917 | {"dtype": "FLOAT", "shape": [2, 2]}, 918 | ] 919 | for res, exp in zip(result, expected): 920 | if isinstance(res, np.ndarray): 921 | self.assertTrue(np.allclose(exp, res)) 922 | else: 923 | self.assertEqual(res, exp) 924 | 925 | def test_pipeline_transaction(self): 926 | con = RedisAITestBase.get_client() 927 | arr = np.array([[2.0, 3.0], [2.0, 3.0]], dtype=np.float32) 928 | pipe = con.pipeline(transaction=True) 929 | pipe = pipe.tensorset("a", arr).set("native", 1) 930 | pipe = pipe.tensorget("a", as_numpy=False) 931 | pipe = pipe.tensorget("a", as_numpy=True).tensorget( 932 | "a", meta_only=True) 933 | result = pipe.execute() 934 | expected = [ 935 | b"OK", 936 | True, 937 | {"dtype": "FLOAT", "shape": [2, 2], 938 | "values": [2.0, 3.0, 2.0, 3.0]}, 939 | arr, 940 | {"dtype": "FLOAT", "shape": [2, 2]}, 941 | ] 942 | for res, exp in zip(result, expected): 943 | if isinstance(res, np.ndarray): 944 | self.assertTrue(np.allclose(exp, res)) 945 | else: 946 | self.assertEqual(res, exp) 947 | -------------------------------------------------------------------------------- /test/testdata/boston.onnx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RedisAI/redisai-py/c4b71db2c4c8bedba138e15394af57252e07c094/test/testdata/boston.onnx -------------------------------------------------------------------------------- /test/testdata/dog.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RedisAI/redisai-py/c4b71db2c4c8bedba138e15394af57252e07c094/test/testdata/dog.jpg -------------------------------------------------------------------------------- /test/testdata/findsquare.onnx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RedisAI/redisai-py/c4b71db2c4c8bedba138e15394af57252e07c094/test/testdata/findsquare.onnx -------------------------------------------------------------------------------- /test/testdata/graph.pb: -------------------------------------------------------------------------------- 1 | 2 | , 3 | a Placeholder* 4 | dtype0* 5 | shape: 6 | , 7 | b Placeholder* 8 | dtype0* 9 | shape: 10 |  11 | mulMulab* 12 | T0 13 |  14 | cIdentitymul* 15 | T0 -------------------------------------------------------------------------------- /test/testdata/mnist_model_quant.tflite: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RedisAI/redisai-py/c4b71db2c4c8bedba138e15394af57252e07c094/test/testdata/mnist_model_quant.tflite -------------------------------------------------------------------------------- /test/testdata/one.raw: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RedisAI/redisai-py/c4b71db2c4c8bedba138e15394af57252e07c094/test/testdata/one.raw -------------------------------------------------------------------------------- /test/testdata/pt-minimal.pt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RedisAI/redisai-py/c4b71db2c4c8bedba138e15394af57252e07c094/test/testdata/pt-minimal.pt -------------------------------------------------------------------------------- /test/testdata/resnet50.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RedisAI/redisai-py/c4b71db2c4c8bedba138e15394af57252e07c094/test/testdata/resnet50.pb -------------------------------------------------------------------------------- /test/testdata/script.txt: -------------------------------------------------------------------------------- 1 | def bar(a, b): 2 | return a + b 3 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | skipsdist = true 3 | envlist = linters,tests 4 | 5 | [flake8] 6 | max-complexity = 10 7 | ignore = E501,C901 8 | srcdir = ./redisai 9 | exclude =.git,.tox,dist,doc,*/__pycache__/*,venv,.venv 10 | 11 | [testenv:tests] 12 | whitelist_externals = find 13 | commands_pre = 14 | pip install --upgrade pip 15 | commands = 16 | poetry install --no-root --only dev 17 | pytest test/test.py 18 | 19 | [testenv:linters] 20 | allowlist_externals = 21 | poetry 22 | commands = 23 | poetry install --no-root --only dev 24 | flake8 --show-source 25 | vulture redisai --min-confidence 80 26 | bandit redisai/** 27 | 28 | [testenv:docs] 29 | commands = make html 30 | --------------------------------------------------------------------------------