├── .github └── workflows │ ├── check_label.yml │ ├── deploy_release.yaml │ ├── doxygen.yml │ ├── pull-request-links.yaml │ └── run_test.yaml ├── .gitignore ├── .readthedocs.yaml ├── CHANGELOG.md ├── LICENSES └── Apache-2.0.txt ├── README.md ├── codecov.yml ├── docs ├── Makefile ├── build.sh ├── how-to-update-interface.md ├── make.bat ├── requirements.txt └── source │ ├── _autosummary │ └── nebula3.gclient.rst │ ├── conf.py │ ├── index.rst │ ├── modules.rst │ ├── nebula3.gclient.net.rst │ ├── nebula3.gclient.rst │ └── nebula3.rst ├── doxygen-config ├── example ├── FormatResp.py ├── GraphClientMultiThreadExample.py ├── GraphClientSimpleExample.py ├── GraphVis.py ├── Params.py ├── ScanVertexEdgeExample.py ├── SessionPoolExample.py ├── apache_echarts.html ├── data.json └── get_started.ipynb ├── nebula3 ├── Config.py ├── Exception.py ├── __init__.py ├── common │ ├── __init__.py │ ├── constants.py │ └── ttypes.py ├── data │ ├── DataObject.py │ ├── ResultSet.py │ └── __init__.py ├── fbthrift │ ├── TMultiplexedProcessor.py │ ├── TSCons.py │ ├── Thrift.py │ ├── __init__.py │ ├── protocol │ │ ├── TBinaryProtocol.py │ │ ├── TCompactProtocol.py │ │ ├── THeaderProtocol.py │ │ ├── TJSONProtocol.py │ │ ├── TMultiplexedProtocol.py │ │ ├── TProtocol.py │ │ ├── TProtocolDecorator.py │ │ ├── TSimpleJSONProtocol.py │ │ ├── __init__.py │ │ └── exceptions.py │ ├── server │ │ ├── TAsyncioServer.py │ │ ├── TCppServer.py │ │ ├── TServer.py │ │ └── __init__.py │ ├── transport │ │ ├── THeaderTransport.py │ │ ├── THttp2Client.py │ │ ├── THttpClient.py │ │ ├── TSSLSocket.py │ │ ├── TSSLSocketOverHttpTunnel.py │ │ ├── TSocket.py │ │ ├── TSocketOverHttpTunnel.py │ │ ├── TSocketTest.py │ │ ├── TTransport.py │ │ └── __init__.py │ └── util │ │ ├── BytesStrIO.py │ │ ├── Decorators.py │ │ ├── Recursive.py │ │ ├── Serializer.py │ │ ├── TCppServerTestManager.py │ │ ├── TCppServerTestManagerTest.py │ │ ├── TValidator.py │ │ ├── __init__.py │ │ ├── async_common.py │ │ ├── asyncio.py │ │ ├── converter.py │ │ ├── fuzzer.py │ │ ├── inspect.py │ │ ├── randomizer.py │ │ ├── remote.py │ │ └── type_inspect.py ├── gclient │ ├── __init__.py │ └── net │ │ ├── AuthResult.py │ │ ├── Connection.py │ │ ├── ConnectionPool.py │ │ ├── Session.py │ │ ├── SessionPool.py │ │ ├── __init__.py │ │ └── base.py ├── graph │ ├── GraphService-fuzzer │ ├── GraphService-remote │ ├── GraphService.py │ ├── __init__.py │ ├── constants.py │ └── ttypes.py ├── logger.py ├── mclient │ └── __init__.py ├── meta │ ├── MetaService-fuzzer │ ├── MetaService-remote │ ├── MetaService.py │ ├── __init__.py │ ├── constants.py │ └── ttypes.py ├── sclient │ ├── BaseResult.py │ ├── GraphStorageClient.py │ ├── ScanResult.py │ ├── __init__.py │ └── net │ │ └── __init__.py └── storage │ ├── GraphStorageService-fuzzer │ ├── GraphStorageService-remote │ ├── GraphStorageService.py │ ├── StorageAdminService-fuzzer │ ├── StorageAdminService-remote │ ├── StorageAdminService.py │ ├── __init__.py │ ├── constants.py │ └── ttypes.py ├── pdm.lock ├── pyproject.toml ├── setup.py └── tests ├── .env ├── conftest.py ├── docker-compose-ssl.yaml ├── docker-compose.yaml ├── secrets ├── client.cnf ├── client.crt ├── client.csr ├── client.key ├── root.cnf ├── root.crt ├── root.csr ├── root.key ├── root.srl ├── run.sh ├── server.cnf ├── server.crt ├── server.csr └── server.key ├── test_connection.py ├── test_data_from_server.py ├── test_data_type.py ├── test_graph_storage_client.py ├── test_meta_cache.py ├── test_parameter.py ├── test_pool.py ├── test_session.py ├── test_session_pool.py ├── test_ssl_connection.py └── test_ssl_pool.py /.github/workflows/check_label.yml: -------------------------------------------------------------------------------- 1 | name: Auto label 2 | 3 | on: 4 | issues: 5 | types: 6 | - reopened 7 | - opened 8 | - labeled 9 | - unlabeled 10 | - closed 11 | 12 | env: 13 | GH_PAT: ${{ secrets.GITHUB_TOKEN }} 14 | EVENT: ${{ toJSON(github.event)}} 15 | EVENT_NAME: ${{ github.event_name}} 16 | 17 | jobs: 18 | sync: 19 | name: auto label 20 | runs-on: ubuntu-latest 21 | steps: 22 | - uses: HarrisChu/auto_label@v1 23 | -------------------------------------------------------------------------------- /.github/workflows/deploy_release.yaml: -------------------------------------------------------------------------------- 1 | name: Upload package to pypi 2 | 3 | on: 4 | release: 5 | types: 6 | - published 7 | 8 | jobs: 9 | ci: 10 | runs-on: ubuntu-22.04 11 | 12 | steps: 13 | - uses: actions/checkout@v3 14 | - uses: pdm-project/setup-pdm@v3 15 | with: 16 | python-version: '3.x' 17 | cache: true 18 | - name: Install dependencies 19 | run: pdm install 20 | - name: Test with pytest 21 | run: | 22 | docker compose -f tests/docker-compose.yaml up -d 23 | sleep 20 24 | pdm test 25 | - name: Test SSL connection with pytest 26 | run: | 27 | enable_ssl=true docker compose -f tests/docker-compose-ssl.yaml up -d 28 | sleep 20 29 | pdm test-ssl 30 | 31 | - name: Build and publish 32 | env: 33 | PDM_PUBLISH_USERNAME: ${{ secrets.PYPI_NAME }} 34 | PDM_PUBLISH_PASSWORD: ${{ secrets.PYPI_PASSWORD }} 35 | run: pdm publish 36 | -------------------------------------------------------------------------------- /.github/workflows/doxygen.yml: -------------------------------------------------------------------------------- 1 | name: Generate API reference via Doxygen and push to GitHub Pages 2 | env: 3 | # Specify the doc version to which the API reference belongs 4 | doc_version: 3.6.0 5 | on: 6 | push: 7 | branches: 8 | # Remember to update the branch name when you create a new branch 9 | - master 10 | 11 | jobs: 12 | build: 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - name: Checkout code 17 | uses: actions/checkout@v4 18 | with: 19 | fetch-depth: 0 # fetch all commits/branches for gitversion 20 | 21 | - name: Extract branch name 22 | run: echo "BRANCH_NAME=$(echo ${GITHUB_REF#refs/heads/})" >> $GITHUB_ENV 23 | 24 | - name: Install Doxygen 25 | run: | 26 | sudo apt-get update 27 | sudo apt-get install -y doxygen graphviz 28 | 29 | # Generate HTML files 30 | - name: Generate Documentation 31 | run: | 32 | echo "OUTPUT_DIRECTORY=$BRANCH_NAME" >> doxygen-config 33 | doxygen doxygen-config 34 | 35 | # Deploy the generated HTML files to the gh-pages branch 36 | - name: Deploy to gh-pages 37 | uses: JamesIves/github-pages-deploy-action@v4 38 | with: 39 | folder: ${{ env.BRANCH_NAME }}/html 40 | target-folder: ${{ env.BRANCH_NAME }} 41 | 42 | # - name: show gh-pages branch 43 | # run: | 44 | # git branch 45 | # git checkout . 46 | # git checkout gh-pages 47 | 48 | # # Compresses HTML files into a tar.gz file 49 | # - name: compress api reference 50 | # run: | 51 | # tar -zcvf $BRANCH_NAME.tar.gz $BRANCH_NAME 52 | 53 | # - name: transfer api reference 54 | # uses: appleboy/scp-action@master 55 | # with: 56 | # host: 20.163.77.63 57 | # username: azureuser 58 | # password: ${{ secrets.ENSITE_PASSWORD }} 59 | # port: 404 60 | # source: $BRANCH_NAME.tar.gz 61 | # # Return error if the target doc version does not already exist 62 | # target: /var/www/ent-docs/${{ env.doc_version }}/ 63 | 64 | # - name: uncompress ap reference 65 | # uses: appleboy/ssh-action@master 66 | # with: 67 | # host: 20.163.77.63 68 | # username: azureuser 69 | # password: ${{ secrets.ENSITE_PASSWORD }} 70 | # port: 404 71 | # script: | 72 | # mkdir -p /var/www/ent-docs/${{ env.doc_version}}/api/python/ 73 | # tar -zxf /var/www/ent-docs/${{ env.doc_version}}/$BRANCH_NAME.tar.gz -C /var/www/ent-docs/${{ env.doc_version}}/api/python/ -------------------------------------------------------------------------------- /.github/workflows/pull-request-links.yaml: -------------------------------------------------------------------------------- 1 | # .github/workflows/pull-request-links.yaml 2 | 3 | name: readthedocs/actions 4 | on: 5 | pull_request_target: 6 | types: 7 | - opened 8 | # Execute this action only on PRs that touch 9 | # documentation files. 10 | paths: 11 | - "docs/**" 12 | 13 | permissions: 14 | pull-requests: write 15 | 16 | jobs: 17 | pull-request-links: 18 | runs-on: ubuntu-latest 19 | steps: 20 | - uses: readthedocs/actions/preview@v1 21 | with: 22 | project-slug: "nebulagraph-python" 23 | -------------------------------------------------------------------------------- /.github/workflows/run_test.yaml: -------------------------------------------------------------------------------- 1 | name: Test ci 2 | 3 | on: 4 | push: 5 | branches: [master,'release-**'] 6 | paths-ignore: 7 | - '**.md' 8 | - 'docs/**' 9 | - '.gitignore' 10 | - 'LICENSE' 11 | pull_request: 12 | branches: [master,'release-**'] 13 | paths-ignore: 14 | - '**.md' 15 | - 'docs/**' 16 | - '.gitignore' 17 | - 'LICENSE' 18 | schedule: 19 | - cron: "0 6 * * *" 20 | 21 | jobs: 22 | ci-pip-install-from-source: 23 | # This is to verify the setup.py as a mitigation for remain python 3.6.2+ capability 24 | runs-on: ubuntu-20.04 25 | strategy: 26 | max-parallel: 2 27 | matrix: 28 | python-version: [3.6, 3.7] 29 | steps: 30 | - uses: actions/checkout@v3 31 | - name: Set up Python ${{ matrix.python-version }} 32 | uses: actions/setup-python@v4 33 | with: 34 | python-version: ${{ matrix.python-version }} 35 | - name: Install nebulagraph-python from source and test dependencies 36 | run: | 37 | python -m pip install --upgrade pip 38 | # remove pyproject.toml to avoid pdm install 39 | rm pyproject.toml 40 | pip install . 41 | pip install pip-tools pytest 42 | - name: Test with pytest 43 | run: | 44 | docker compose -f docker-compose.yaml up -d 45 | sleep 20 46 | pytest -s -v -k "not SSL" 47 | working-directory: tests 48 | 49 | 50 | build-lint-test: 51 | runs-on: ubuntu-22.04 52 | strategy: 53 | max-parallel: 2 54 | matrix: 55 | python-version: [3.7, 3.8, 3.9, '3.10', 3.11] 56 | 57 | steps: 58 | - name: Maximize runner space 59 | uses: easimon/maximize-build-space@master 60 | with: 61 | root-reserve-mb: 1024 62 | remove-dotnet: 'true' 63 | remove-android: 'true' 64 | remove-haskell: 'true' 65 | 66 | - uses: actions/checkout@v3 67 | - name: Set up Python ${{ matrix.python-version }} 68 | uses: pdm-project/setup-pdm@v3 69 | with: 70 | python-version: ${{ matrix.python-version }} 71 | cache: true 72 | 73 | - name: Install dependencies 74 | run: | 75 | python -m pip install --upgrade pip 76 | pip install . 77 | pdm install -G:dev 78 | pdm install -G:test 79 | - name: lint 80 | run: pdm fmt-check 81 | - name: Test with pytest 82 | run: | 83 | docker compose -f tests/docker-compose.yaml up -d 84 | sleep 20 85 | pdm test 86 | - name: Test SSL connection with pytest 87 | run: | 88 | enable_ssl=true docker compose -f tests/docker-compose-ssl.yaml up -d 89 | sleep 20 90 | pdm test-ssl 91 | - name: Upload Coverage to Codecov 92 | uses: codecov/codecov-action@v3 93 | with: 94 | files: coverage.xml 95 | 96 | example-test: 97 | runs-on: ubuntu-latest 98 | strategy: 99 | matrix: 100 | python-version: [3.11, 3.12] 101 | steps: 102 | - uses: actions/checkout@v3 103 | - name: Set up Python ${{ matrix.python-version }} 104 | uses: pdm-project/setup-pdm@v3 105 | with: 106 | python-version: ${{ matrix.python-version }} 107 | cache: true 108 | - name: Install dependencies 109 | run: | 110 | pip install . 111 | pip install prettytable pandas 112 | - name: Setup containers 113 | run: | 114 | docker compose -f tests/docker-compose.yaml up -d 115 | sleep 20 116 | - name: Test example 117 | run: | 118 | for f in example/*.py; do python3 "$f"; done 119 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | *.DS_Store 93 | 94 | # Spyder project settings 95 | .spyderproject 96 | .spyproject 97 | 98 | # Rope project settings 99 | .ropeproject 100 | 101 | # mkdocs documentation 102 | /site 103 | 104 | # mypy 105 | .mypy_cache/ 106 | .pyc 107 | 108 | # thrift file 109 | nebula/graph.thrift 110 | 111 | # ide 112 | .idea/ 113 | .vscode/ 114 | 115 | # CI data 116 | tests/data 117 | tests/logs 118 | .pdm-python 119 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yaml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | version: 2 6 | 7 | build: 8 | os: ubuntu-22.04 9 | tools: 10 | python: "3.11" 11 | 12 | # Build documentation in the "docs/" directory with Sphinx 13 | sphinx: 14 | configuration: docs/source/conf.py 15 | 16 | # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html 17 | python: 18 | install: 19 | - requirements: docs/requirements.txt 20 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## v2.5.0(2021-08-17) 2 | Compatible with the v2.5.0 version of nebula-graph 3 | 4 | - feature 5 | - add TimeWrapper/DateTimeWrapper type to get timezone obtained from the server to calculate the local time 6 | - Session supports reconnecting to different graph services 7 | - bugfix 8 | - fix the interface values of Relationship and modify the interface propertys to properties https://github.com/vesoft-inc/nebula-python/pull/113 9 | - fix get offline host info from list_hosts https://github.com/vesoft-inc/nebula-python/pull/104 10 | - fix fbthrift timeout bug https://github.com/vesoft-inc/nebula-python/pull/126 11 | - incompatible 12 | - the ErrorCode define is changed, all ErrorCode is defined in nebula3.common.ttypes.ErrorCode 13 | 14 | ## v2.0.0(2021-03-23) 15 | Compatible with the v2.0.0 version of nebula-graph 16 | 17 | - New features 18 | - Support to use with nebula-graph2.0 19 | 20 | ## v2.0.0rc1(2021-01-06) 21 | Compatible with the v2.0.0-RC1 version of nebula-graph 22 | 23 | - New features 24 | - Support to scan vertexes and edges 25 | - Support more data type function 26 | 27 | ## v2.0.0-1(2020-11-30) 28 | Compatible with the v2.0.0-beta version of nebula-graph 29 | 30 | - New features 31 | - Support to use with nebula-graph2.0 32 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | ignore: 2 | - "nebula3/common" 3 | - "nebula3/fbthrift" 4 | - "nebula3/meta" 5 | - "nebula3/storage" 6 | - "nebula3/graph" 7 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/build.sh: -------------------------------------------------------------------------------- 1 | pip install --user sphinx furo 2 | sphinx-apidoc -f -o source ../nebula3 \ 3 | ../nebula3/common/* \ 4 | ../nebula3/data/* \ 5 | ../nebula3/fbthrift/* \ 6 | ../nebula3/graph/* \ 7 | ../nebula3/mclient/* \ 8 | ../nebula3/meta/* \ 9 | ../nebula3/sclient/* \ 10 | ../nebula3/storage/* 11 | make clean 12 | make html 13 | -------------------------------------------------------------------------------- /docs/how-to-update-interface.md: -------------------------------------------------------------------------------- 1 | ## Update RPC interface 2 | 3 | NebulaGraph 3 uses thrift to define the RPC interface. The interface files are in the `src/interface` directory of the NebulaGraph repository. The interface files are used to generate the RPC interface code in different languages. 4 | How to update generated files when the thrift file changes in the repository `https://github.com/vesoft-inc/nebula`: 5 | 6 | - Download the thrift binary from OSS, which was built on Fedora30: 7 | 8 | ```bash 9 | wget https://oss-cdn.nebula-graph.com.cn/fbthrift_bin/thrift1 10 | ``` 11 | 12 | - Utilize the `thrift1` binary along with the thrift files located at `https://github.com/vesoft-inc/nebula/tree/master/src/interface` to generate the interface files. 13 | 14 | ```bash 15 | ./thrift1 --strict --allow-neg-enum-vals --gen "py" -o . common.thrift 16 | ./thrift1 --strict --allow-neg-enum-vals --gen "py" -o . graph.thrift 17 | ./thrift1 --strict --allow-neg-enum-vals --gen "py" -o . meta.thrift 18 | ./thrift1 --strict --allow-neg-enum-vals --gen "py" -o . storage.thrift 19 | ``` -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | %SPHINXBUILD% >NUL 2>NUL 14 | if errorlevel 9009 ( 15 | echo. 16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 17 | echo.installed, then set the SPHINXBUILD environment variable to point 18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 19 | echo.may add the Sphinx directory to PATH. 20 | echo. 21 | echo.If you don't have Sphinx installed, grab it from 22 | echo.https://www.sphinx-doc.org/ 23 | exit /b 1 24 | ) 25 | 26 | if "%1" == "" goto help 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx==7.2.6 2 | furo==2023.9.10 3 | six==1.16.0 4 | pytz==2023.3.post1 5 | -------------------------------------------------------------------------------- /docs/source/_autosummary/nebula3.gclient.rst: -------------------------------------------------------------------------------- 1 | nebula3.gclient 2 | =============== 3 | 4 | .. automodule:: nebula3.gclient 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # For the full list of built-in configuration values, see the documentation: 4 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 5 | import os 6 | import sys 7 | 8 | sys.path.insert(0, os.path.abspath('../../')) 9 | 10 | # -- Project information ----------------------------------------------------- 11 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information 12 | 13 | project = 'NebulaGraph Python' 14 | copyright = '2023, Nicole' 15 | author = 'Nicole' 16 | release = 'v3' 17 | 18 | # -- General configuration --------------------------------------------------- 19 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration 20 | 21 | extensions = [ 22 | 'sphinx.ext.autodoc', 23 | 'sphinx.ext.autosummary' 24 | ] 25 | 26 | templates_path = ['_templates'] 27 | exclude_patterns = [] 28 | 29 | # -- Options for HTML output ------------------------------------------------- 30 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output 31 | 32 | html_theme = 'furo' 33 | html_static_path = ['_static'] 34 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. nebula-python-doc documentation master file, created by 2 | sphinx-quickstart on Tue Sep 19 14:47:29 2023. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to NebulaGraph Python! 7 | ============================================= 8 | 9 | .. toctree:: 10 | :maxdepth: 2 11 | :caption: Contents: 12 | 13 | 14 | .. autosummary:: 15 | :toctree: _autosummary 16 | 17 | nebula3.gclient 18 | 19 | Indices and tables 20 | ================== 21 | 22 | * :ref:`genindex` 23 | * :ref:`modindex` 24 | * :ref:`search` 25 | -------------------------------------------------------------------------------- /docs/source/modules.rst: -------------------------------------------------------------------------------- 1 | nebula3 2 | ======= 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | nebula3 8 | -------------------------------------------------------------------------------- /docs/source/nebula3.gclient.net.rst: -------------------------------------------------------------------------------- 1 | nebula3.gclient.net package 2 | =========================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | nebula3.gclient.net.AuthResult module 8 | ------------------------------------- 9 | 10 | .. automodule:: nebula3.gclient.net.AuthResult 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | nebula3.gclient.net.Connection module 16 | ------------------------------------- 17 | 18 | .. automodule:: nebula3.gclient.net.Connection 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | nebula3.gclient.net.ConnectionPool module 24 | ----------------------------------------- 25 | 26 | .. automodule:: nebula3.gclient.net.ConnectionPool 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | nebula3.gclient.net.Session module 32 | ---------------------------------- 33 | 34 | .. automodule:: nebula3.gclient.net.Session 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | nebula3.gclient.net.SessionPool module 40 | -------------------------------------- 41 | 42 | .. automodule:: nebula3.gclient.net.SessionPool 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | 47 | Module contents 48 | --------------- 49 | 50 | .. automodule:: nebula3.gclient.net 51 | :members: 52 | :undoc-members: 53 | :show-inheritance: 54 | -------------------------------------------------------------------------------- /docs/source/nebula3.gclient.rst: -------------------------------------------------------------------------------- 1 | nebula3.gclient package 2 | ======================= 3 | 4 | Subpackages 5 | ----------- 6 | 7 | .. toctree:: 8 | :maxdepth: 4 9 | 10 | nebula3.gclient.net 11 | 12 | Module contents 13 | --------------- 14 | 15 | .. automodule:: nebula3.gclient 16 | :members: 17 | :undoc-members: 18 | :show-inheritance: 19 | -------------------------------------------------------------------------------- /docs/source/nebula3.rst: -------------------------------------------------------------------------------- 1 | nebula3 package 2 | =============== 3 | 4 | Subpackages 5 | ----------- 6 | 7 | .. toctree:: 8 | :maxdepth: 4 9 | 10 | nebula3.gclient 11 | 12 | Submodules 13 | ---------- 14 | 15 | nebula3.Config module 16 | --------------------- 17 | 18 | .. automodule:: nebula3.Config 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | nebula3.Exception module 24 | ------------------------ 25 | 26 | .. automodule:: nebula3.Exception 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | nebula3.logger module 32 | --------------------- 33 | 34 | .. automodule:: nebula3.logger 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | Module contents 40 | --------------- 41 | 42 | .. automodule:: nebula3 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | -------------------------------------------------------------------------------- /example/FormatResp.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # --coding:utf-8-- 3 | 4 | # Copyright (c) 2020 vesoft inc. All rights reserved. 5 | # 6 | # This source code is licensed under Apache 2.0 License. 7 | 8 | 9 | from typing import Dict 10 | 11 | import pandas as pd 12 | import prettytable 13 | 14 | from nebula3.data.DataObject import Value, ValueWrapper 15 | from nebula3.data.ResultSet import ResultSet 16 | 17 | 18 | ################################ 19 | # Method 0 (Recommended) # 20 | # nebula3-python>=3.6.0 # 21 | ################################ 22 | def result_to_df_buildin(result: ResultSet) -> pd.DataFrame: 23 | """ 24 | build list for each column, and transform to dataframe 25 | """ 26 | assert result.is_succeeded() 27 | return result.as_data_frame() 28 | 29 | 30 | ################################ 31 | # Method 1 (Recommended) # 32 | # nebula3-python<=3.5.0 # 33 | ################################ 34 | def result_to_df(result: ResultSet) -> pd.DataFrame: 35 | """ 36 | build list for each column, and transform to dataframe 37 | """ 38 | assert result.is_succeeded() 39 | columns = result.keys() 40 | d: Dict[str, list] = {} 41 | for col_num in range(result.col_size()): 42 | col_name = columns[col_num] 43 | col_list = result.column_values(col_name) 44 | d[col_name] = [x.cast() for x in col_list] 45 | return pd.DataFrame(d) 46 | 47 | 48 | ################################ 49 | # Method 2 (Customize) # 50 | ################################ 51 | cast_as = { 52 | Value.NVAL: "as_null", 53 | Value.BVAL: "as_bool", 54 | Value.IVAL: "as_int", 55 | Value.FVAL: "as_double", 56 | Value.SVAL: "as_string", 57 | Value.LVAL: "as_list", 58 | Value.UVAL: "as_set", 59 | Value.MVAL: "as_map", 60 | Value.TVAL: "as_time", 61 | Value.DVAL: "as_date", 62 | Value.DTVAL: "as_datetime", 63 | Value.VVAL: "as_node", 64 | Value.EVAL: "as_relationship", 65 | Value.PVAL: "as_path", 66 | Value.GGVAL: "as_geography", 67 | Value.DUVAL: "as_duration", 68 | } 69 | 70 | 71 | def cast(val: ValueWrapper): 72 | _type = val._value.getType() 73 | if _type == Value.__EMPTY__: 74 | return None 75 | if _type in cast_as: 76 | return getattr(val, cast_as[_type])() 77 | if _type == Value.LVAL: 78 | return [x.cast() for x in val.as_list()] 79 | if _type == Value.UVAL: 80 | return {x.cast() for x in val.as_set()} 81 | if _type == Value.MVAL: 82 | return {k: v.cast() for k, v in val.as_map().items()} 83 | 84 | 85 | def print_resp(resp: ResultSet): 86 | assert resp.is_succeeded() 87 | output_table = prettytable.PrettyTable() 88 | output_table.field_names = resp.keys() 89 | for recode in resp: 90 | value_list = [] 91 | for col in recode: 92 | val = cast(col) 93 | value_list.append(val) 94 | output_table.add_row(value_list) 95 | print(output_table) 96 | -------------------------------------------------------------------------------- /example/GraphClientMultiThreadExample.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # --coding:utf-8-- 3 | 4 | # Copyright (c) 2020 vesoft inc. All rights reserved. 5 | # 6 | # This source code is licensed under Apache 2.0 License. 7 | 8 | 9 | import threading 10 | import time 11 | 12 | from FormatResp import print_resp 13 | 14 | from nebula3.Config import Config 15 | from nebula3.gclient.net import ConnectionPool 16 | 17 | 18 | def main_test(): 19 | client = None 20 | try: 21 | space_name = "space_" + threading.current_thread().name 22 | print( 23 | "thread name: %s, space_name : %s" 24 | % (threading.current_thread().name, space_name) 25 | ) 26 | # Get one gclient 27 | client = connection_pool.get_session("root", "nebula") 28 | assert client is not None 29 | 30 | # Create space mySpace and schema 31 | resp = client.execute( 32 | "CREATE SPACE IF NOT EXISTS {} (vid_type=FIXED_STRING(30)); USE {};" 33 | "CREATE TAG IF NOT EXISTS person(name string, age int);" 34 | "CREATE EDGE IF NOT EXISTS like(likeness double);".format( 35 | space_name, space_name 36 | ) 37 | ) 38 | assert resp.is_succeeded(), resp.error_msg() 39 | 40 | time.sleep(6) 41 | 42 | # Insert vertexes 43 | client.execute( 44 | "INSERT VERTEX person(name, age) VALUES " 45 | "'Bob':('Bob', 10), " 46 | "'Lily':('Lily', 9), " 47 | "'Tom':('Tom', 10), " 48 | "'Jerry':('Jerry', 13), " 49 | "'John':('John', 11)" 50 | ) 51 | 52 | assert resp.is_succeeded(), resp.error_msg() 53 | 54 | # Insert edges 55 | client.execute( 56 | "INSERT EDGE like(likeness) VALUES " 57 | "'Bob'->'Lily':(80.0), " 58 | "'Bob'->'Tom':(70.0), " 59 | "'Lily'->'Jerry':(84.0), " 60 | "'Tom'->'Jerry':(68.3), " 61 | "'Bob'->'John':(97.2)" 62 | ) 63 | 64 | # Query data 65 | query_resp = client.execute( 66 | 'GO FROM "Bob" OVER like YIELD $^.person.name, ' 67 | "$^.person.age, like.likeness" 68 | ) 69 | if not query_resp.is_succeeded(): 70 | print("Execute failed: %s" % query_resp.error_msg()) 71 | exit(1) 72 | 73 | # Print the result of query 74 | print( 75 | " \n====== The query result of thread[%s]======\n " 76 | % threading.current_thread().name 77 | ) 78 | print_resp(query_resp) 79 | 80 | except Exception as x: 81 | print(x) 82 | import traceback 83 | 84 | print(traceback.format_exc()) 85 | finally: 86 | if client is not None: 87 | client.release() 88 | 89 | 90 | if __name__ == "__main__": 91 | config = Config() 92 | config.max_connection_pool_size = 4 93 | 94 | # init connection pool 95 | connection_pool = ConnectionPool() 96 | assert connection_pool.init([("127.0.0.1", 9669), ("127.0.0.1", 9670)], config) 97 | 98 | # Use multi thread and reuse the session three times 99 | for count in range(0, 3): 100 | threads = list() 101 | for i in range(0, 4): 102 | threads.append( 103 | threading.Thread(target=main_test, name="thread{}".format(i)) 104 | ) 105 | 106 | for thread in threads: 107 | thread.start() 108 | 109 | for thread in threads: 110 | thread.join() 111 | 112 | # close connect pool 113 | connection_pool.close() 114 | -------------------------------------------------------------------------------- /example/GraphClientSimpleExample.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # --coding:utf-8-- 3 | 4 | # Copyright (c) 2020 vesoft inc. All rights reserved. 5 | # 6 | # This source code is licensed under Apache 2.0 License. 7 | 8 | 9 | import json 10 | import time 11 | 12 | from FormatResp import print_resp, result_to_df_buildin, result_to_df 13 | import pandas as pd 14 | 15 | from nebula3.Config import Config 16 | from nebula3.gclient.net import ConnectionPool 17 | 18 | if __name__ == "__main__": 19 | client = None 20 | try: 21 | config = Config() 22 | config.max_connection_pool_size = 2 23 | # init connection pool 24 | connection_pool = ConnectionPool() 25 | assert connection_pool.init([("127.0.0.1", 9669)], config) 26 | 27 | # get session from the pool 28 | client = connection_pool.get_session("root", "nebula") 29 | assert client is not None 30 | 31 | # get the result in json format 32 | resp_json = client.execute_json("yield 1") 33 | json_obj = json.loads(resp_json) 34 | print(json.dumps(json_obj, indent=2, sort_keys=True)) 35 | 36 | client.execute( 37 | "CREATE SPACE IF NOT EXISTS test(vid_type=FIXED_STRING(30)); USE test;" 38 | "CREATE TAG IF NOT EXISTS person(name string, age int);" 39 | "CREATE EDGE IF NOT EXISTS like (likeness double);" 40 | ) 41 | 42 | # insert data need to sleep after create schema 43 | time.sleep(6) 44 | 45 | # insert vertex 46 | resp = client.execute( 47 | 'INSERT VERTEX person(name, age) VALUES "Bob":("Bob", 10), "Lily":("Lily", 9)' 48 | ) 49 | assert resp.is_succeeded(), resp.error_msg() 50 | 51 | # insert edges 52 | resp = client.execute('INSERT EDGE like(likeness) VALUES "Bob"->"Lily":(80.0);') 53 | assert resp.is_succeeded(), resp.error_msg() 54 | 55 | resp = client.execute('FETCH PROP ON person "Bob" YIELD vertex as node') 56 | assert resp.is_succeeded(), resp.error_msg() 57 | print_resp(resp) 58 | 59 | resp = client.execute('FETCH PROP ON like "Bob"->"Lily" YIELD edge as e') 60 | assert resp.is_succeeded(), resp.error_msg() 61 | print_resp(resp) 62 | 63 | # query data 64 | resp = client.execute( 65 | 'GET SUBGRAPH WITH PROP 2 STEPS FROM "Bob" YIELD VERTICES AS nodes, EDGES AS relationships;' 66 | ) 67 | df = result_to_df_buildin(resp) 68 | df_1 = result_to_df(resp) 69 | 70 | print("Testing pandas dataframe operations") 71 | print(df_1) 72 | 73 | # Convert the dataframe 'df' into a CSV file 74 | df.to_csv('subgraph_data.csv', index=False) 75 | print("Dataframe 'df' has been exported to 'subgraph_data.csv'.") 76 | 77 | # Read the CSV file back into a dataframe 78 | df_csv = pd.read_csv('subgraph_data.csv') 79 | print("CSV file 'subgraph_data.csv' has been read into dataframe 'df_csv'.") 80 | 81 | # Display the first 5 rows of the dataframe 82 | print("Displaying the first 5 rows of dataframe 'df_csv':") 83 | print(df_csv.head()) 84 | 85 | # drop space 86 | resp = client.execute("DROP SPACE test") 87 | assert resp.is_succeeded(), resp.error_msg() 88 | 89 | print("Example finished") 90 | 91 | except Exception: 92 | import traceback 93 | 94 | print(traceback.format_exc()) 95 | if client is not None: 96 | client.release() 97 | exit(1) 98 | -------------------------------------------------------------------------------- /example/GraphVis.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # --coding:utf-8-- 3 | 4 | # Copyright (c) 2024 vesoft inc. All rights reserved. 5 | # 6 | # This source code is licensed under Apache 2.0 License. 7 | 8 | 9 | import json 10 | 11 | 12 | from nebula3.gclient.net import ConnectionPool 13 | 14 | 15 | def get_node_list_and_edge_list_json(result): 16 | # init connection pool 17 | connection_pool = ConnectionPool() 18 | assert connection_pool.init([("127.0.0.1", 9669)]) 19 | 20 | # get session from the pool 21 | client = connection_pool.get_session("root", "nebula") 22 | assert client is not None 23 | 24 | client.execute("USE nba") 25 | 26 | result = client.execute( 27 | 'GET SUBGRAPH WITH PROP 2 STEPS FROM "player101" YIELD VERTICES AS nodes, EDGES AS relationships;' 28 | ) 29 | 30 | assert result.is_succeeded(), result.error_msg() 31 | 32 | data = result.dict_for_vis() 33 | 34 | json_data = json.dumps(data, indent=2, sort_keys=True) 35 | 36 | # save the json data to a file 37 | with open('data.json', 'w') as f: 38 | f.write(json_data) 39 | 40 | # Check the data.json file to see the result 41 | 42 | # See example/apache_echarts.html to see a reference implementation of the visualization 43 | # using Apache ECharts 44 | -------------------------------------------------------------------------------- /example/Params.py: -------------------------------------------------------------------------------- 1 | import time 2 | from typing import Any, Dict, List 3 | 4 | from nebula3.gclient.net import ConnectionPool 5 | from nebula3.Config import Config 6 | from nebula3.common import ttypes 7 | from nebula3.data.ResultSet import ResultSet 8 | 9 | # define a config 10 | config = Config() 11 | connection_pool = ConnectionPool() 12 | connection_pool.init([("127.0.0.1", 9669)], config) 13 | 14 | # get session from the connection pool 15 | client = connection_pool.get_session("root", "nebula") 16 | client.execute("CREATE SPACE IF NOT EXISTS test(vid_type=FIXED_STRING(30));") 17 | 18 | 19 | time.sleep( 20 | 6 21 | ) # two cycles of heartbeat, by default of a NebulaGraph cluster, we will need to sleep 20s 22 | 23 | client.execute( 24 | "USE test;" 25 | "CREATE TAG IF NOT EXISTS person(name string, age int);" 26 | "CREATE EDGE IF NOT EXISTS like (likeness double);" 27 | ) 28 | 29 | # prepare NebulaGraph Byte typed parameters 30 | 31 | bval = ttypes.Value() 32 | bval.set_bVal(True) 33 | ival = ttypes.Value() 34 | ival.set_iVal(3) 35 | sval = ttypes.Value() 36 | sval.set_sVal("Bob") 37 | 38 | params = {"p1": ival, "p2": bval, "p3": sval} 39 | 40 | 41 | # we could pass NebulaGraph Raw byte params like params, they will be evaluated in server side: 42 | resp = client.execute_parameter( 43 | "RETURN abs($p1)+3 AS col1, (toBoolean($p2) AND false) AS col2, toLower($p3)+1 AS col3", 44 | params, 45 | ) 46 | 47 | # It may be not dev friendly to prepare i.e. a list of string typed params, actually NebulaGrap python client supports to pass premitive typed parms, too. 48 | 49 | params_premitive = { 50 | "p1": 3, 51 | "p2": True, 52 | "p3": "Bob", 53 | "p4": ["Bob", "Lily"], 54 | } 55 | 56 | resp = client.execute_py( 57 | "RETURN abs($p1)+3 AS col1, (toBoolean($p2) and false) AS col2, toLower($p3)+1 AS col3", 58 | params_premitive, 59 | ) 60 | resp = client.execute_py( 61 | "MATCH (v) WHERE id(v) in $p4 RETURN id(v) AS vertex_id", 62 | params_premitive, 63 | ) 64 | -------------------------------------------------------------------------------- /example/ScanVertexEdgeExample.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # --coding:utf-8-- 3 | 4 | # Copyright (c) 2020 vesoft inc. All rights reserved. 5 | # 6 | # This source code is licensed under Apache 2.0 License. 7 | 8 | 9 | import random 10 | import time 11 | 12 | from nebula3.Config import Config 13 | from nebula3.gclient.net import ConnectionPool 14 | from nebula3.mclient import MetaCache 15 | from nebula3.sclient.GraphStorageClient import GraphStorageClient 16 | 17 | 18 | def prepare_data(): 19 | config = Config() 20 | config.max_connection_pool_size = 1 21 | # init connection pool 22 | connection_pool = ConnectionPool() 23 | # the graphd server's address 24 | assert connection_pool.init([("127.0.0.1", 9671)], config) 25 | client = connection_pool.get_session("root", "nebula") 26 | client.execute( 27 | "CREATE SPACE IF NOT EXISTS ScanSpace(" 28 | "PARTITION_NUM=10," 29 | "vid_type=FIXED_STRING(20));" 30 | "USE ScanSpace;" 31 | "CREATE TAG IF NOT EXISTS person(name string, age int);" 32 | "CREATE EDGE IF NOT EXISTS friend(start int, end int);" 33 | ) 34 | time.sleep(5) 35 | 36 | for id in range(20): 37 | vid = "person" + str(id) 38 | cmd = "INSERT VERTEX person(name, age) " 'VALUES "{}":("{}", {})'.format( 39 | vid, vid, id 40 | ) 41 | client.execute(cmd) 42 | for id in range(20): 43 | src_id = "person" + str(id) 44 | dst_id = "person" + str(20 - id) 45 | start = random.randint(2000, 2010) 46 | end = random.randint(2010, 2020) 47 | cmd = "INSERT EDGE friend(start, end) " 'VALUES "{}"->"{}":({}, {})'.format( 48 | src_id, dst_id, start, end 49 | ) 50 | client.execute(cmd) 51 | client.release() 52 | connection_pool.close() 53 | 54 | 55 | def scan_person_vertex(graph_storage_client): 56 | resp = graph_storage_client.scan_vertex( 57 | space_name="ScanSpace", tag_name="person", limit=1 58 | ) 59 | print("======== Scan vertexes in ScanSpace ======") 60 | while resp.has_next(): 61 | result = resp.next() 62 | if result is not None: 63 | for vertex_data in result: 64 | print(vertex_data) 65 | 66 | 67 | def scan_person_edge(graph_storage_client): 68 | resp = graph_storage_client.scan_edge( 69 | space_name="ScanSpace", edge_name="friend", limit=100 70 | ) 71 | print("======== Scan edges in ScanSpace ======") 72 | while resp.has_next(): 73 | result = resp.next() 74 | if result is not None: 75 | for edge_data in result: 76 | print(edge_data) 77 | 78 | 79 | """ 80 | The scan result 81 | ======== Scan vertexes in ScanSpace ====== 82 | ('person11' :person{'name': "person11", 'age': 11}) 83 | ('person16' :person{'name': "person16", 'age': 16}) 84 | ('person9' :person{'name': "person9", 'age': 9}) 85 | ('person10' :person{'name': "person10", 'age': 10}) 86 | ('person15' :person{'name': "person15", 'age': 15}) 87 | ('person0' :person{'name': "person0", 'age': 0}) 88 | ('person2' :person{'name': "person2", 'age': 2}) 89 | ('person13' :person{'name': "person13", 'age': 13}) 90 | ('person18' :person{'name': "person18", 'age': 18}) 91 | ('person6' :person{'name': "person6", 'age': 6}) 92 | ('person7' :person{'name': "person7", 'age': 7}) 93 | ('person12' :person{'name': "person12", 'age': 12}) 94 | ('person17' :person{'name': "person17", 'age': 17}) 95 | ('person5' :person{'name': "person5", 'age': 5}) 96 | ('person8' :person{'name': "person8", 'age': 8}) 97 | ('person4' :person{'name': "person4", 'age': 4}) 98 | ('person1' :person{'name': "person1", 'age': 1}) 99 | ('person14' :person{'name': "person14", 'age': 14}) 100 | ('person19' :person{'name': "person19", 'age': 19}) 101 | ('person3' :person{'name': "person3", 'age': 3}) 102 | ======== Scan edges in ScanSpace ====== 103 | (person4)-[:friend@0{'start': 2000, 'end': 2015}]->(person16) 104 | (person1)-[:friend@0{'start': 2002, 'end': 2020}]->(person19) 105 | (person14)-[:friend@0{'start': 2008, 'end': 2020}]->(person6) 106 | (person19)-[:friend@0{'start': 2009, 'end': 2013}]->(person1) 107 | (person3)-[:friend@0{'start': 2010, 'end': 2011}]->(person17) 108 | (person11)-[:friend@0{'start': 2001, 'end': 2017}]->(person9) 109 | (person16)-[:friend@0{'start': 2007, 'end': 2014}]->(person4) 110 | (person9)-[:friend@0{'start': 2001, 'end': 2017}]->(person11) 111 | (person10)-[:friend@0{'start': 2009, 'end': 2020}]->(person10) 112 | (person15)-[:friend@0{'start': 2002, 'end': 2018}]->(person5) 113 | (person0)-[:friend@0{'start': 2008, 'end': 2017}]->(person20) 114 | (person2)-[:friend@0{'start': 2009, 'end': 2012}]->(person18) 115 | (person13)-[:friend@0{'start': 2003, 'end': 2012}]->(person7) 116 | (person18)-[:friend@0{'start': 2004, 'end': 2012}]->(person2) 117 | (person6)-[:friend@0{'start': 2001, 'end': 2017}]->(person14) 118 | (person7)-[:friend@0{'start': 2009, 'end': 2015}]->(person13) 119 | (person12)-[:friend@0{'start': 2007, 'end': 2010}]->(person8) 120 | (person17)-[:friend@0{'start': 2008, 'end': 2013}]->(person3) 121 | (person5)-[:friend@0{'start': 2005, 'end': 2015}]->(person15) 122 | (person8)-[:friend@0{'start': 2000, 'end': 2019}]->(person12) 123 | """ 124 | 125 | if __name__ == "__main__": 126 | meta_cache = None 127 | graph_storage_client = None 128 | try: 129 | # the metad servers's address 130 | meta_cache = MetaCache( 131 | [("172.28.1.1", 9559), ("172.28.1.2", 9559), ("172.28.1.3", 9559)], 50000 132 | ) 133 | graph_storage_client = GraphStorageClient(meta_cache) 134 | graph_storage_client.set_user_passwd("root", "nebula") 135 | prepare_data() 136 | scan_person_vertex(graph_storage_client) 137 | scan_person_edge(graph_storage_client) 138 | 139 | except Exception: 140 | import traceback 141 | 142 | print(traceback.format_exc()) 143 | if graph_storage_client is not None: 144 | graph_storage_client.close() 145 | exit(1) 146 | finally: 147 | if graph_storage_client is not None: 148 | graph_storage_client.close() 149 | if meta_cache is not None: 150 | meta_cache.close() 151 | -------------------------------------------------------------------------------- /example/SessionPoolExample.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # --coding:utf-8-- 3 | 4 | # Copyright (c) 2022 vesoft inc. All rights reserved. 5 | # 6 | # This source code is licensed under Apache 2.0 License. 7 | 8 | 9 | import time 10 | 11 | from FormatResp import print_resp 12 | 13 | from nebula3.common.ttypes import ErrorCode 14 | from nebula3.Config import SessionPoolConfig 15 | from nebula3.gclient.net import Connection 16 | from nebula3.gclient.net.SessionPool import SessionPool 17 | 18 | if __name__ == "__main__": 19 | ip = "127.0.0.1" 20 | port = 9669 21 | 22 | try: 23 | config = SessionPoolConfig() 24 | 25 | # prepare space 26 | conn = Connection() 27 | conn.open(ip, port, 1000) 28 | auth_result = conn.authenticate("root", "nebula") 29 | assert auth_result.get_session_id() != 0 30 | resp = conn.execute( 31 | auth_result._session_id, 32 | "CREATE SPACE IF NOT EXISTS session_pool_test(vid_type=FIXED_STRING(30))", 33 | ) 34 | assert resp.error_code == ErrorCode.SUCCEEDED 35 | # insert data need to sleep after create schema 36 | time.sleep(10) 37 | 38 | # init session pool 39 | session_pool = SessionPool("root", "nebula", "session_pool_test", [(ip, port)]) 40 | assert session_pool.init(config) 41 | 42 | # add schema 43 | resp = session_pool.execute( 44 | "CREATE TAG IF NOT EXISTS person(name string, age int);" 45 | "CREATE EDGE like (likeness double);" 46 | ) 47 | 48 | time.sleep(6) 49 | 50 | # insert vertex 51 | resp = session_pool.execute( 52 | 'INSERT VERTEX person(name, age) VALUES "Bob":("Bob", 10), "Lily":("Lily", 9)' 53 | ) 54 | assert resp.is_succeeded(), resp.error_msg() 55 | 56 | # insert edges 57 | resp = session_pool.execute( 58 | 'INSERT EDGE like(likeness) VALUES "Bob"->"Lily":(80.0);' 59 | ) 60 | assert resp.is_succeeded(), resp.error_msg() 61 | 62 | resp = session_pool.execute('FETCH PROP ON person "Bob" YIELD vertex as node') 63 | assert resp.is_succeeded(), resp.error_msg() 64 | print_resp(resp) 65 | 66 | resp = session_pool.execute('FETCH PROP ON like "Bob"->"Lily" YIELD edge as e') 67 | assert resp.is_succeeded(), resp.error_msg() 68 | print_resp(resp) 69 | 70 | # drop space 71 | conn.execute( 72 | auth_result._session_id, 73 | "DROP SPACE session_pool_test", 74 | ) 75 | 76 | print("Example finished") 77 | 78 | except Exception: 79 | import traceback 80 | 81 | print(traceback.format_exc()) 82 | exit(1) 83 | -------------------------------------------------------------------------------- /nebula3/Config.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # --coding:utf-8-- 3 | 4 | # Copyright (c) 2020 vesoft inc. All rights reserved. 5 | # 6 | # This source code is licensed under Apache 2.0 License. 7 | 8 | 9 | import ssl 10 | 11 | 12 | class Config(object): 13 | # the min connection always in pool 14 | min_connection_pool_size = 0 15 | # the max connection in pool 16 | max_connection_pool_size = 10 17 | # connection or execute timeout, unit ms, 0 means no timeout 18 | timeout = 0 19 | # 0 means will never close the idle connection, unit ms, 20 | idle_time = 0 21 | # the interval to check idle time connection, unit second, -1 means no check 22 | interval_check = -1 23 | # use http2 or not 24 | use_http2 = False 25 | # headers for http2, dict type 26 | http_headers = None 27 | 28 | 29 | class SSL_config(object): 30 | """configs used to Initialize a TSSLSocket. 31 | @ ssl_version(int) protocol version. see ssl module. If none is 32 | specified, we will default to the most 33 | reasonably secure and compatible configuration 34 | if possible. 35 | For Python versions >= 2.7.9, we will default 36 | to at least TLS 1.1. 37 | For Python versions < 2.7.9, we can only 38 | default to TLS 1.0, which is the best that 39 | Python guarantees to offers at this version. 40 | If you specify ssl.PROTOCOL_SSLv23, and 41 | the OpenSSL linked with Python is new enough, 42 | it is possible for a TLS 1.2 connection be 43 | established; however, there is no way in 44 | < Python 2.7.9 to explicitly disable SSLv2 45 | and SSLv3. For that reason, we default to 46 | TLS 1.0. 47 | 48 | @ cert_reqs(int) whether to verify peer certificate. see ssl 49 | module. 50 | 51 | @ ca_certs(str) filename containing trusted root certs. 52 | 53 | @ verify_name if False, no peer name validation is performed 54 | if True, verify subject name of peer vs 'host' 55 | if a str, verify subject name of peer vs given 56 | str 57 | 58 | @ keyfile filename containing the client's private key 59 | 60 | @ certfile filename containing the client's cert and 61 | optionally the private key 62 | 63 | @ allow_weak_ssl_versions(bool) By default, we try to disable older 64 | protocol versions. Only set this 65 | if you know what you are doing. 66 | """ 67 | 68 | unix_socket = None 69 | ssl_version = None 70 | cert_reqs = ssl.CERT_NONE 71 | ca_certs = None 72 | verify_name = False 73 | keyfile = None 74 | certfile = None 75 | allow_weak_ssl_versions = False 76 | 77 | 78 | class SessionPoolConfig(object): 79 | """The configs for the session pool 80 | @ timeout(int): the timeout of the session 81 | @ idle_time(int): the idle time of the session 82 | @ max_size(int): the max size of the session 83 | @ min_size(int): the min size of the session 84 | @ interval_check(int): the interval to check the idle time of the session 85 | """ 86 | 87 | timeout = 0 88 | idle_time = 0 89 | max_size = 30 90 | min_size = 1 91 | interval_check = -1 92 | # use http2 or not 93 | use_http2 = False 94 | # headers for http2, dict type 95 | http_headers = None 96 | -------------------------------------------------------------------------------- /nebula3/Exception.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # --coding:utf-8-- 3 | 4 | # Copyright (c) 2020 vesoft inc. All rights reserved. 5 | # 6 | # This source code is licensed under Apache 2.0 License. 7 | 8 | 9 | class OutOfRangeException(Exception): 10 | def __init__(self): 11 | Exception.__init__(self) 12 | self.message = 'list index out of range' 13 | 14 | 15 | class InvalidKeyException(Exception): 16 | def __init__(self, message): 17 | Exception.__init__(self, message) 18 | self.message = "KeyError: `{}'".format(message) 19 | 20 | 21 | class InvalidValueTypeException(Exception): 22 | def __init__(self, message): 23 | Exception.__init__(self, message) 24 | self.message = "Invalid value type: `{}'".format(message) 25 | 26 | 27 | class AuthFailedException(Exception): 28 | def __init__(self, message): 29 | Exception.__init__(self, message) 30 | self.message = 'Auth failed: {}'.format(message) 31 | 32 | 33 | class TagNotFoundException(Exception): 34 | def __init__(self, name): 35 | Exception.__init__(self) 36 | self.message = 'Tag:{} not found'.format(name) 37 | 38 | 39 | class EdgeNotFoundException(Exception): 40 | def __init__(self, name): 41 | Exception.__init__(self) 42 | self.message = 'Edge:{} not found'.format(name) 43 | 44 | 45 | class SpaceNotFoundException(Exception): 46 | def __init__(self, name): 47 | Exception.__init__(self, name) 48 | self.message = 'Space:{} not found'.format(name) 49 | 50 | 51 | class PartNotFoundException(Exception): 52 | def __init__(self, part): 53 | Exception.__init__(self) 54 | self.message = 'Partition:{} not found'.format(part) 55 | 56 | 57 | class NotValidConnectionException(Exception): 58 | def __init__(self): 59 | Exception.__init__(self) 60 | self.message = 'No extra connection' 61 | 62 | 63 | class NoValidSessionException(Exception): 64 | def __init__(self, message): 65 | Exception.__init__(self, message) 66 | self.message = 'Failed to get a valid session from the pool: {}'.format(message) 67 | 68 | 69 | class InValidHostname(Exception): 70 | def __init__(self, message): 71 | Exception.__init__(self, message) 72 | self.message = 'Invalid hostname: {}'.format(message) 73 | 74 | 75 | class SessionException(Exception): 76 | E_SESSION_INVALID = -1002 77 | E_SESSION_TIMEOUT = -1003 78 | 79 | def __init__(self, code=E_SESSION_INVALID, message=None): 80 | Exception.__init__(self, message) 81 | self.type = code 82 | self.message = message 83 | 84 | 85 | class ExecutionErrorException(Exception): 86 | E_EXECUTION_ERROR = -1005 87 | 88 | def __init__(self, message=None): 89 | Exception.__init__(self, message) 90 | self.type = self.E_EXECUTION_ERROR 91 | self.message = message 92 | 93 | 94 | class IOErrorException(Exception): 95 | E_UNKNOWN = 0 96 | E_ALL_BROKEN = 1 97 | E_CONNECT_BROKEN = 2 98 | E_TIMEOUT = 3 99 | E_NOT_OPEN = 4 100 | 101 | def __init__(self, code=E_UNKNOWN, message=None): 102 | Exception.__init__(self, message) 103 | self.type = code 104 | self.message = message 105 | 106 | 107 | class ClientServerIncompatibleException(Exception): 108 | def __init__(self, message): 109 | Exception.__init__( 110 | self, 111 | f'Current client is not compatible with the remote server, please check the version: {message}', 112 | ) 113 | -------------------------------------------------------------------------------- /nebula3/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # --coding:utf-8-- 3 | 4 | # Copyright (c) 2020 vesoft inc. All rights reserved. 5 | # 6 | # This source code is licensed under Apache 2.0 License. 7 | 8 | 9 | from nebula3.common.ttypes import Value 10 | 11 | Value.__hash__ = lambda self: self.value.__hash__() 12 | -------------------------------------------------------------------------------- /nebula3/common/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Autogenerated by Thrift 3 | # 4 | # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING 5 | # @generated 6 | # 7 | __all__ = ['ttypes', 'constants'] 8 | -------------------------------------------------------------------------------- /nebula3/common/constants.py: -------------------------------------------------------------------------------- 1 | # 2 | # Autogenerated by Thrift 3 | # 4 | # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING 5 | # @generated 6 | # 7 | 8 | from __future__ import absolute_import 9 | import sys 10 | from nebula3.fbthrift.util.Recursive import fix_spec 11 | from nebula3.fbthrift.Thrift import TType, TMessageType, TPriority, TRequestContext, TProcessorEventHandler, TServerInterface, TProcessor, TException, TApplicationException, UnimplementedTypedef 12 | from nebula3.fbthrift.protocol.TProtocol import TProtocolException 13 | 14 | 15 | 16 | from .ttypes import UTF8STRINGS, NullType, PropertyType, ErrorCode, SchemaID, Date, Time, DateTime, Value, NList, NMap, NSet, Row, DataSet, Coordinate, Point, LineString, Polygon, Geography, Tag, Vertex, Edge, Step, Path, HostAddr, KeyValue, Duration, LogInfo, DirInfo, CheckpointInfo, LogEntry, ClusterID, GraphSpaceID, PartitionID, TagID, EdgeType, EdgeRanking, LogID, TermID, Timestamp, IndexID, Port, SessionID, ExecutionPlanID 17 | 18 | version = "3.0.0" 19 | 20 | -------------------------------------------------------------------------------- /nebula3/data/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vesoft-inc/nebula-python/ba345a6e26e480201c7abe6314687f744b12bb1b/nebula3/data/__init__.py -------------------------------------------------------------------------------- /nebula3/fbthrift/TMultiplexedProcessor.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Facebook, Inc. and its affiliates. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # pyre-unsafe 16 | 17 | from __future__ import absolute_import 18 | from __future__ import division 19 | from __future__ import print_function 20 | from __future__ import unicode_literals 21 | 22 | import sys 23 | 24 | from nebula3.fbthrift.Thrift import TProcessor, TMessageType, TException 25 | from nebula3.fbthrift.protocol import TProtocolDecorator, TMultiplexedProtocol 26 | 27 | class TMultiplexedProcessor(TProcessor): 28 | def __init__(self): 29 | self.services = {} 30 | 31 | def registerProcessor(self, serviceName, processor): 32 | self.services[serviceName] = processor 33 | 34 | def setEventHandler(self, event_handler, serviceName=None): 35 | """ Set event handler for a service. If serviceName is None, 36 | set event handler for all services""" 37 | if serviceName is not None: 38 | if not serviceName in self.services: 39 | raise TException("Cannot set event handler for service " + 40 | serviceName + ": no such service") 41 | else: 42 | self.services[serviceName].setEventHandler(event_handler) 43 | else: 44 | for processor in self.services.values(): 45 | processor.setEventHandler(event_handler) 46 | 47 | def process(self, iprot, oprot, server_ctx=None): 48 | (name, type, seqid) = iprot.readMessageBegin() 49 | if type != TMessageType.CALL and type != TMessageType.ONEWAY: 50 | raise TException("TMultiplex protocol only supports CALL & ONEWAY") 51 | 52 | if sys.version_info[0] >= 3 and isinstance(name, bytes): 53 | name = name.decode('utf-8') 54 | index = name.find(TMultiplexedProtocol.SEPARATOR) 55 | if index < 0: 56 | raise TException("Service name not found in message name: " + 57 | name + ". Did you forget to use TMultiplexProtocol " + 58 | "in your client?") 59 | 60 | serviceName = name[0:index] 61 | call = name[index + len(TMultiplexedProtocol.SEPARATOR):] 62 | if sys.version_info[0] >= 3: 63 | call = call.encode('utf-8') 64 | if not serviceName in self.services: 65 | raise TException("Service name not found: " + serviceName + 66 | ". Did you forget to call registerProcessor()?") 67 | 68 | standardMessage = ( 69 | call, 70 | type, 71 | seqid 72 | ) 73 | return self.services[serviceName].process(StoredMessageProtocol(iprot, 74 | standardMessage), oprot, server_ctx) 75 | 76 | 77 | class StoredMessageProtocol(TProtocolDecorator.TProtocolDecorator): 78 | def __init__(self, protocol, messageBegin): 79 | TProtocolDecorator.TProtocolDecorator.__init__(self, protocol) 80 | self.messageBegin = messageBegin 81 | 82 | def readMessageBegin(self): 83 | return self.messageBegin 84 | -------------------------------------------------------------------------------- /nebula3/fbthrift/TSCons.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Facebook, Inc. and its affiliates. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from __future__ import absolute_import 16 | from __future__ import division 17 | from __future__ import print_function 18 | from __future__ import unicode_literals 19 | 20 | from os import path 21 | from SCons.Builder import Builder 22 | 23 | def scons_env(env, add=''): 24 | opath = path.dirname(path.abspath('$TARGET')) 25 | lstr = 'thrift --gen cpp -o ' + opath + ' ' + add + ' $SOURCE' 26 | cppbuild = Builder(action=lstr) 27 | env.Append(BUILDERS={'ThriftCpp': cppbuild}) 28 | 29 | def gen_cpp(env, dir, file): 30 | scons_env(env) 31 | suffixes = ['_data.h', '_data.cpp', '_types.h', '_types.cpp'] 32 | targets = ['gen-cpp' + file + s for s in suffixes] 33 | return env.ThriftCpp(targets, dir + file + '.thrift') 34 | -------------------------------------------------------------------------------- /nebula3/fbthrift/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Facebook, Inc. and its affiliates. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | __all__ = ['Thrift', 'TSCons'] 16 | -------------------------------------------------------------------------------- /nebula3/fbthrift/protocol/TMultiplexedProtocol.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Facebook, Inc. and its affiliates. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # pyre-unsafe 16 | 17 | from __future__ import absolute_import 18 | from __future__ import division 19 | from __future__ import print_function 20 | from __future__ import unicode_literals 21 | 22 | from nebula3.fbthrift.Thrift import TMessageType 23 | from nebula3.fbthrift.protocol import TProtocolDecorator 24 | 25 | SEPARATOR = ":" 26 | 27 | class TMultiplexedProtocol(TProtocolDecorator.TProtocolDecorator): 28 | def __init__(self, protocol, serviceName): 29 | TProtocolDecorator.TProtocolDecorator.__init__(self, protocol) 30 | self.serviceName = serviceName 31 | 32 | def writeMessageBegin(self, name, type, seqid): 33 | if type == TMessageType.CALL or type == TMessageType.ONEWAY: 34 | self.protocol.writeMessageBegin( 35 | self.serviceName + SEPARATOR + name, 36 | type, 37 | seqid 38 | ) 39 | else: 40 | self.protocol.writeMessageBegin(name, type, seqid) 41 | -------------------------------------------------------------------------------- /nebula3/fbthrift/protocol/TProtocol.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Facebook, Inc. and its affiliates. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # pyre-unsafe 16 | 17 | from __future__ import absolute_import 18 | from __future__ import division 19 | from __future__ import print_function 20 | from __future__ import unicode_literals 21 | 22 | from nebula3.fbthrift.Thrift import * 23 | 24 | class TProtocolException(TException): 25 | 26 | """Custom Protocol Exception class""" 27 | 28 | UNKNOWN = 0 29 | INVALID_DATA = 1 30 | NEGATIVE_SIZE = 2 31 | SIZE_LIMIT = 3 32 | BAD_VERSION = 4 33 | INVALID_PROTOCOL = 5 34 | MISSING_REQUIRED_FIELD = 6 35 | 36 | def __init__(self, type=UNKNOWN, message=None): 37 | TException.__init__(self, message) 38 | self.type = type 39 | 40 | class TProtocolBase: 41 | 42 | """Base class for Thrift protocol driver.""" 43 | 44 | def __init__(self, trans): 45 | self.trans = trans 46 | 47 | def writeMessageBegin(self, name, ttype, seqid): 48 | pass 49 | 50 | def writeMessageEnd(self): 51 | pass 52 | 53 | def writeStructBegin(self, name): 54 | pass 55 | 56 | def writeStructEnd(self): 57 | pass 58 | 59 | def writeUnionBegin(self, name): 60 | self.writeStructBegin(name) 61 | 62 | def writeUnionEnd(self): 63 | self.writeStructEnd() 64 | 65 | def writeFieldBegin(self, name, type, id): 66 | pass 67 | 68 | def writeFieldEnd(self): 69 | pass 70 | 71 | def writeFieldStop(self): 72 | pass 73 | 74 | def writeMapBegin(self, ktype, vtype, size): 75 | pass 76 | 77 | def writeMapEnd(self): 78 | pass 79 | 80 | def writeListBegin(self, etype, size): 81 | pass 82 | 83 | def writeListEnd(self): 84 | pass 85 | 86 | def writeSetBegin(self, etype, size): 87 | pass 88 | 89 | def writeSetEnd(self): 90 | pass 91 | 92 | def writeBool(self, bool_val): 93 | pass 94 | 95 | def writeByte(self, byte): 96 | pass 97 | 98 | def writeI16(self, i16): 99 | pass 100 | 101 | def writeI32(self, i32): 102 | pass 103 | 104 | def writeI64(self, i64): 105 | pass 106 | 107 | def writeDouble(self, dub): 108 | pass 109 | 110 | def writeFloat(self, flt): 111 | pass 112 | 113 | def writeString(self, str): 114 | pass 115 | 116 | def readMessageBegin(self): 117 | pass 118 | 119 | def readMessageEnd(self): 120 | pass 121 | 122 | def readStructBegin(self): 123 | pass 124 | 125 | def readStructEnd(self): 126 | pass 127 | 128 | def readFieldBegin(self): 129 | pass 130 | 131 | def readFieldEnd(self): 132 | pass 133 | 134 | def readMapBegin(self): 135 | pass 136 | 137 | def readMapEnd(self): 138 | pass 139 | 140 | def readListBegin(self): 141 | pass 142 | 143 | def readListEnd(self): 144 | pass 145 | 146 | def readSetBegin(self): 147 | pass 148 | 149 | def readSetEnd(self): 150 | pass 151 | 152 | def readBool(self): 153 | pass 154 | 155 | def readByte(self): 156 | pass 157 | 158 | def readI16(self): 159 | pass 160 | 161 | def readI32(self): 162 | pass 163 | 164 | def readI64(self): 165 | pass 166 | 167 | def readDouble(self): 168 | pass 169 | 170 | def readFloat(self): 171 | pass 172 | 173 | def readString(self): 174 | pass 175 | 176 | def skip(self, type): 177 | if type == TType.BOOL: 178 | self.readBool() 179 | elif type == TType.BYTE: 180 | self.readByte() 181 | elif type == TType.I16: 182 | self.readI16() 183 | elif type == TType.I32: 184 | self.readI32() 185 | elif type == TType.I64: 186 | self.readI64() 187 | elif type == TType.DOUBLE: 188 | self.readDouble() 189 | elif type == TType.FLOAT: 190 | self.readFloat() 191 | elif type == TType.STRING: 192 | self.readString() 193 | elif type == TType.STRUCT: 194 | name = self.readStructBegin() 195 | while True: 196 | (name, type, id) = self.readFieldBegin() 197 | if type == TType.STOP: 198 | break 199 | self.skip(type) 200 | self.readFieldEnd() 201 | self.readStructEnd() 202 | elif type == TType.MAP: 203 | (ktype, vtype, size) = self.readMapBegin() 204 | for _ in range(size): 205 | self.skip(ktype) 206 | self.skip(vtype) 207 | self.readMapEnd() 208 | elif type == TType.SET: 209 | (etype, size) = self.readSetBegin() 210 | for _ in range(size): 211 | self.skip(etype) 212 | self.readSetEnd() 213 | elif type == TType.LIST: 214 | (etype, size) = self.readListBegin() 215 | for _ in range(size): 216 | self.skip(etype) 217 | self.readListEnd() 218 | else: 219 | raise TProtocolException( 220 | TProtocolException.INVALID_DATA, 221 | "Unexpected type for skipping {}".format(type) 222 | ) 223 | 224 | def readIntegral(self, type): 225 | if type == TType.BOOL: 226 | return self.readBool() 227 | elif type == TType.BYTE: 228 | return self.readByte() 229 | elif type == TType.I16: 230 | return self.readI16() 231 | elif type == TType.I32: 232 | return self.readI32() 233 | elif type == TType.I64: 234 | return self.readI64() 235 | else: 236 | raise Exception("Unknown integral type: %s" % str(type)) 237 | 238 | def readFloatingPoint(self, type): 239 | if type == TType.FLOAT: 240 | return self.readFloat() 241 | elif type == TType.DOUBLE: 242 | return self.readDouble() 243 | else: 244 | raise Exception("Unknown floating point type: %s" % str(type)) 245 | 246 | class TProtocolFactory: 247 | def getProtocol(self, trans): 248 | pass 249 | -------------------------------------------------------------------------------- /nebula3/fbthrift/protocol/TProtocolDecorator.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Facebook, Inc. and its affiliates. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # pyre-unsafe 16 | 17 | from __future__ import absolute_import 18 | from __future__ import division 19 | from __future__ import print_function 20 | from __future__ import unicode_literals 21 | 22 | from nebula3.fbthrift.protocol.TProtocol import TProtocolBase 23 | from types import * 24 | 25 | class TProtocolDecorator(): 26 | def __init__(self, protocol): 27 | TProtocolBase(protocol) 28 | self.protocol = protocol 29 | 30 | def __getattr__(self, name): 31 | if hasattr(self.protocol, name): 32 | member = getattr(self.protocol, name) 33 | if type(member) in [MethodType, FunctionType, LambdaType, 34 | BuiltinFunctionType, BuiltinMethodType]: 35 | return lambda *args, **kwargs: self._wrap(member, args, kwargs) 36 | else: 37 | return member 38 | raise AttributeError(name) 39 | 40 | def _wrap(self, func, args, kwargs): 41 | if type(func) == MethodType: 42 | result = func(*args, **kwargs) 43 | else: 44 | result = func(self.protocol, *args, **kwargs) 45 | return result 46 | -------------------------------------------------------------------------------- /nebula3/fbthrift/protocol/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Facebook, Inc. and its affiliates. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | __all__ = [ 16 | 'TProtocol', 17 | 'TBinaryProtocol', 18 | 'fastproto', 19 | 'TSimpleJSONProtocol', 20 | 'TCompactProtocol', 21 | 'THeaderProtocol', 22 | ] 23 | -------------------------------------------------------------------------------- /nebula3/fbthrift/protocol/exceptions.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Facebook, Inc. and its affiliates. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | 16 | def create_ThriftUnicodeDecodeError_from_UnicodeDecodeError(error, field_name): 17 | if isinstance(error, ThriftUnicodeDecodeError): 18 | error.field_names.append(field_name) 19 | return error 20 | return ThriftUnicodeDecodeError( 21 | error.encoding, error.object, error.start, error.end, error.reason, field_name 22 | ) 23 | 24 | 25 | class ThriftUnicodeDecodeError(UnicodeDecodeError): 26 | def __init__(self, encoding, object, start, end, reason, field_name): 27 | super(ThriftUnicodeDecodeError, self).__init__(encoding, object, start, end, reason) 28 | self.field_names = [field_name] 29 | 30 | def __str__(self): 31 | return "{error} when decoding field '{field}'".format( 32 | error=super(ThriftUnicodeDecodeError, self).__str__(), field="->".join(reversed(self.field_names)) 33 | ) 34 | -------------------------------------------------------------------------------- /nebula3/fbthrift/server/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Facebook, Inc. and its affiliates. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from __future__ import absolute_import 16 | from __future__ import division 17 | from __future__ import print_function 18 | from __future__ import unicode_literals 19 | 20 | __all__ = ['TServer', 'TAsyncioServer'] 21 | -------------------------------------------------------------------------------- /nebula3/fbthrift/transport/THttp2Client.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 vesoft inc. All rights reserved. 2 | # 3 | # This source code is licensed under Apache 2.0 License. 4 | # 5 | from nebula3.fbthrift.transport.TTransport import * 6 | import httpx 7 | 8 | default_timeout = 60 9 | 10 | class THttp2Client(TTransportBase): 11 | def __init__(self, url, 12 | timeout=None, 13 | verify=None, 14 | certfile=None, 15 | keyfile=None, 16 | password=None, 17 | http_headers=None, 18 | ): 19 | self.__wbuf = StringIO() 20 | self.__rbuf = StringIO() 21 | self.__http = None 22 | if timeout is not None and timeout > 0 : 23 | self.timeout = timeout 24 | if timeout is None: 25 | self.timeout = default_timeout 26 | 27 | self.url = url 28 | if verify is None: 29 | self.verify = False 30 | else: 31 | self.verify = verify 32 | if certfile is not None : 33 | self.cert = (certfile, keyfile, password) 34 | else: 35 | self.cert = None 36 | self.response = None 37 | self.http_headers = http_headers 38 | 39 | def isOpen(self): 40 | return self.__http is not None and self.__http.is_closed is False 41 | 42 | def open(self): 43 | if self.cert is None: 44 | self.__http = httpx.Client(http1=False,http2=True, verify=False, timeout=self.timeout) 45 | else: 46 | self.__http = httpx.Client(http1=False,http2=True, verify=self.verify, cert=self.cert, timeout=self.timeout) 47 | 48 | def close(self): 49 | self.__http.close() 50 | self.__http = None 51 | 52 | def read(self, sz): 53 | return self.__rbuf.read(sz) 54 | 55 | 56 | def write(self, buf): 57 | self.__wbuf.write(buf) 58 | 59 | def flush(self): 60 | if self.isOpen(): 61 | self.close() 62 | self.open() 63 | 64 | # Pull data out of buffer 65 | data = self.__wbuf.getvalue() 66 | self.__wbuf = StringIO() 67 | 68 | # HTTP2 request 69 | header = { 70 | 'Content-Type': 'application/x-thrift', 71 | 'Content-Length': str(len(data)), 72 | 'User-Agent': 'Python/THttpClient', 73 | } 74 | if self.http_headers is not None and isinstance(self.http_headers, dict): 75 | header.update(self.http_headers) 76 | try: 77 | self.response= self.__http.post(self.url, headers=header, data=data) 78 | except Exception as e: 79 | raise TTransportException(TTransportException.UNKNOWN, str(e)) 80 | # Get reply to flush the request 81 | self.code = self.response.status_code 82 | self.headers = self.response.headers 83 | self.__rbuf = StringIO() 84 | self.__rbuf.write(self.response.read()) 85 | self.__rbuf.seek(0) 86 | -------------------------------------------------------------------------------- /nebula3/fbthrift/transport/THttpClient.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Facebook, Inc. and its affiliates. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # pyre-unsafe 16 | 17 | from __future__ import absolute_import 18 | from __future__ import division 19 | from __future__ import print_function 20 | from __future__ import unicode_literals 21 | 22 | from nebula3.fbthrift.transport.TTransport import * 23 | 24 | import os 25 | import socket 26 | import sys 27 | import warnings 28 | 29 | if sys.version_info[0] >= 3: 30 | from io import BytesIO as StringIO 31 | from urllib import parse 32 | from http import client 33 | # pyre-fixme[11]: Annotation `parse` is not defined as a type. 34 | urlparse = parse 35 | urllib = parse 36 | # pyre-fixme[11]: Annotation `client` is not defined as a type. 37 | httplib = client 38 | else: 39 | from cStringIO import StringIO 40 | import urlparse 41 | import httplib # @manual 42 | import urllib 43 | 44 | class THttpClient(TTransportBase): 45 | 46 | """Http implementation of TTransport base.""" 47 | 48 | def __init__(self, uri_or_host, port=None, path=None, ssl_context=None): 49 | """THttpClient supports two different types constructor parameters. 50 | 51 | THttpClient(host, port, path) - deprecated 52 | THttpClient(uri) 53 | 54 | Only the second supports https.""" 55 | 56 | if port is not None: 57 | warnings.warn( 58 | "Please use the THttpClient('http://host:port/path') syntax", 59 | DeprecationWarning, 60 | stacklevel=2) 61 | self.host = uri_or_host 62 | self.http_host = self.host 63 | self.port = port 64 | assert path 65 | self.path = path 66 | self.scheme = 'http' 67 | else: 68 | parsed = urlparse.urlparse(uri_or_host) 69 | self.scheme = parsed.scheme 70 | assert self.scheme in ('http', 'https') 71 | if self.scheme == 'http': 72 | self.port = parsed.port or httplib.HTTP_PORT 73 | elif self.scheme == 'https': 74 | self.port = parsed.port or httplib.HTTPS_PORT 75 | self.host = parsed.hostname 76 | self.http_host = parsed.netloc 77 | self.path = parsed.path 78 | if parsed.query: 79 | self.path += '?%s' % parsed.query 80 | self.__wbuf = StringIO() 81 | self.__http = None 82 | self.__timeout = None 83 | self.__custom_headers = None 84 | self.ssl_context = ssl_context 85 | 86 | def open(self): 87 | if self.scheme == 'http': 88 | self.__http = httplib.HTTPConnection(self.host, self.port, 89 | timeout=self.__timeout) 90 | else: 91 | self.__http = httplib.HTTPSConnection(self.host, self.port, 92 | context=self.ssl_context, 93 | timeout=self.__timeout) 94 | 95 | def close(self): 96 | self.__http.close() 97 | self.__http = None 98 | 99 | def isOpen(self): 100 | return self.__http is not None 101 | 102 | def setTimeout(self, ms): 103 | if ms is None: 104 | self.__timeout = None 105 | else: 106 | self.__timeout = ms / 1000.0 107 | 108 | def setCustomHeaders(self, headers): 109 | self.__custom_headers = headers 110 | 111 | def setCustomHeader(self, name, value): 112 | if self.__custom_headers is None: 113 | self.__custom_headers = {} 114 | self.__custom_headers[name] = value 115 | 116 | def read(self, sz): 117 | return self.response.read(sz) 118 | 119 | def write(self, buf): 120 | self.__wbuf.write(buf) 121 | 122 | def flush(self): 123 | if self.isOpen(): 124 | self.close() 125 | self.open() 126 | 127 | # Pull data out of buffer 128 | data = self.__wbuf.getvalue() 129 | self.__wbuf = StringIO() 130 | 131 | # HTTP request 132 | self.__http.putrequest('POST', self.path, skip_host=True) 133 | 134 | if not self.__custom_headers or 'Host' not in self.__custom_headers: 135 | self.__http.putheader('Host', self.http_host) 136 | 137 | self.__http.putheader('Content-Type', 'application/x-thrift') 138 | self.__http.putheader('Content-Length', str(len(data))) 139 | 140 | if not self.__custom_headers or 'User-Agent' not in \ 141 | self.__custom_headers: 142 | user_agent = 'Python/THttpClient' 143 | script = os.path.basename(sys.argv[0]) 144 | if script: 145 | user_agent = '%s (%s)' % (user_agent, urllib.quote(script)) 146 | self.__http.putheader('User-Agent', user_agent) 147 | 148 | if self.__custom_headers: 149 | if sys.version_info[0] >= 3: 150 | custom_headers_iter = self.__custom_headers.items() 151 | else: 152 | custom_headers_iter = self.__custom_headers.items() 153 | for key, val in custom_headers_iter: 154 | self.__http.putheader(key, val) 155 | 156 | try: 157 | self.__http.endheaders() 158 | 159 | # Write payload 160 | self.__http.send(data) 161 | except socket.gaierror as e: 162 | raise TTransportException(TTransportException.NOT_OPEN, str(e)) 163 | except Exception as e: 164 | raise TTransportException(TTransportException.UNKNOWN, str(e)) 165 | 166 | 167 | # Get reply to flush the request 168 | self.response = self.__http.getresponse() 169 | self.code = self.response.status 170 | self.headers = self.response.getheaders() 171 | -------------------------------------------------------------------------------- /nebula3/fbthrift/transport/TSSLSocketOverHttpTunnel.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Facebook, Inc. and its affiliates. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from __future__ import absolute_import 16 | from __future__ import division 17 | from __future__ import print_function 18 | from __future__ import unicode_literals 19 | 20 | import socket 21 | import ssl 22 | 23 | from nebula3.fbthrift.transport.TSocketOverHttpTunnel import TSocketOverHttpTunnel 24 | from nebula3.fbthrift.transport.TTransport import TTransportException 25 | 26 | class TSSLSocketOverHttpTunnel(TSocketOverHttpTunnel): 27 | def __init__(self, host, port, proxy_host, proxy_port, 28 | ssl_version=ssl.PROTOCOL_TLSv1, 29 | cert_reqs=ssl.CERT_NONE, 30 | ca_certs=None, 31 | keyfile=None, 32 | certfile=None): 33 | TSocketOverHttpTunnel.__init__(self, host, port, proxy_host, proxy_port) 34 | self.ssl_version = ssl_version 35 | self.cert_reqs = cert_reqs 36 | self.keyfile, self.certfile, self.ca_certs = \ 37 | keyfile, certfile, ca_certs 38 | 39 | def open(self): 40 | TSocketOverHttpTunnel.open(self) 41 | try: 42 | sslh = ssl.SSLSocket(self.handle, 43 | ssl_version=self.ssl_version, 44 | cert_reqs=self.cert_reqs, 45 | keyfile=self.keyfile, 46 | certfile=self.certfile, 47 | ca_certs=self.ca_certs) 48 | self.handle = sslh 49 | except ssl.SSLError as e: 50 | self.close() 51 | raise TTransportException(TTransportException.NOT_OPEN, 52 | "SSL error during handshake: " + str(e)) 53 | except socket.error as e: 54 | self.close() 55 | raise TTransportException(TTransportException.NOT_OPEN, 56 | "socket error during SSL handshake: " + str(e)) 57 | -------------------------------------------------------------------------------- /nebula3/fbthrift/transport/TSocketOverHttpTunnel.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Facebook, Inc. and its affiliates. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from __future__ import absolute_import 16 | from __future__ import division 17 | from __future__ import print_function 18 | from __future__ import unicode_literals 19 | 20 | from nebula3.fbthrift.transport.TSocket import TSocket 21 | from nebula3.fbthrift.transport.TTransport import TTransportException 22 | import socket 23 | 24 | 25 | class TSocketOverHttpTunnel(TSocket): 26 | def __init__(self, host, port, proxy_host, proxy_port): 27 | TSocket.__init__(self, proxy_host, proxy_port) 28 | try: 29 | # Use IP address since sometimes proxy_host cannot resolve 30 | # external hostnames using unbound 31 | info = socket.getaddrinfo( 32 | host, 33 | None, 34 | socket.AF_INET | socket.AF_INET6, 35 | socket.SOCK_STREAM, 36 | socket.IPPROTO_TCP) 37 | self.remote_host = info[0][4][0] 38 | except socket.error as e: 39 | raise TTransportException(TTransportException.NOT_OPEN, str(e)) 40 | self.remote_port = port 41 | 42 | def open(self): 43 | TSocket.open(self) 44 | self.write("CONNECT %s:%d HTTP/1.1\r\nHost: %s:%d\r\n\r\n" % ( 45 | self.remote_host, self.remote_port, 46 | self.remote_host, self.remote_port)) 47 | res = self.read(4096) 48 | try: 49 | status = res.split()[1] 50 | if status != '200': 51 | self.close() 52 | raise TTransportException(TTransportException.NOT_OPEN, 53 | "Error response from proxy server: %s" % res) 54 | except IndexError: 55 | self.close() 56 | raise TTransportException(TTransportException.NOT_OPEN, 57 | "Error response from proxy server: %s" % res) 58 | -------------------------------------------------------------------------------- /nebula3/fbthrift/transport/TSocketTest.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Facebook, Inc. and its affiliates. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from __future__ import absolute_import 16 | from __future__ import division 17 | from __future__ import print_function 18 | from __future__ import unicode_literals 19 | 20 | import os.path 21 | import socket 22 | import tempfile 23 | import threading 24 | import time 25 | import unittest 26 | 27 | import nebula3.fbthrift.transport.TSocket as TSocket 28 | import nebula3.fbthrift.transport.TTransport as TTransport 29 | 30 | 31 | class TSocketTest(unittest.TestCase): 32 | 33 | def test_usage_as_context_manager(self): 34 | """ 35 | Asserts that both TSocket and TServerSocket can be used with `with` and 36 | that their resources are disposed of at the close of the `with`. 37 | """ 38 | text = b"hi" # sample text to send over the wire 39 | with TSocket.TServerSocket(port=0, family=socket.AF_INET6) as server: 40 | addr = server.getSocketNames()[0] 41 | with TSocket.TSocket(host=addr[0], port=addr[1]) as conn: 42 | conn.write(text) 43 | self.assertFalse(conn.isOpen()) 44 | with server.accept() as client: 45 | read = client.read(len(text)) 46 | self.assertFalse(conn.isOpen()) 47 | self.assertFalse(server.isListening()) 48 | self.assertEquals(read, text) 49 | 50 | def test_server_context_errors(self): 51 | # Make sure the TServerSocket context manager doesn't 52 | # swallow exceptions 53 | def do_test(): 54 | with TSocket.TServerSocket(port=0, family=socket.AF_INET6): 55 | raise Exception('test_error') 56 | 57 | self.assertRaisesRegexp(Exception, 'test_error', do_test) 58 | 59 | def test_open_failure(self): 60 | # Bind a server socket to an address, but don't actually listen on it. 61 | server_socket = socket.socket(socket.AF_INET6) 62 | try: 63 | server_socket.bind(('::', 0)) 64 | server_port = server_socket.getsockname()[1] 65 | 66 | # Explicitly use "localhost" as the hostname, so that the 67 | # connect code will try both IPv6 and IPv4. We want to 68 | # exercise the failure behavior when trying multiple addresses. 69 | sock = TSocket.TSocket(host='localhost', port=server_port) 70 | sock.setTimeout(50) # ms 71 | try: 72 | sock.open() 73 | self.fail('unexpectedly succeeded to connect to closed socket') 74 | except TTransport.TTransportException: 75 | # sock.open() should not leave the file descriptor open 76 | # when it fails 77 | self.assertEquals(None, sock.handle) 78 | self.assertEquals({}, sock.handles) 79 | 80 | # Calling close() again on the socket should be a no-op, 81 | # and shouldn't throw an error 82 | sock.close() 83 | finally: 84 | server_socket.close() 85 | 86 | def test_poller_process(self): 87 | # Make sure that pollers do not fail when they're given None as timeout 88 | text = "hi" # sample text to send over the wire 89 | with TSocket.TServerSocket(port=0, family=socket.AF_INET6) as server: 90 | addr = server.getSocketNames()[0] 91 | 92 | def write_data(): 93 | # delay writing to verify that poller.process is waiting 94 | time.sleep(1) 95 | with TSocket.TSocket(host=addr[0], port=addr[1]) as conn: 96 | conn.write(text) 97 | 98 | poller = TSocket.ConnectionSelect() 99 | thread = threading.Thread(target=write_data) 100 | thread.start() 101 | for filenos in server.handles.keys(): 102 | poller.read(filenos) 103 | 104 | r, _, x = poller.process(timeout=None) 105 | 106 | thread.join() 107 | # Verify that r is non-empty 108 | self.assertTrue(r) 109 | 110 | def test_deprecated_str_form_of_port(self): 111 | # Make sure that the deprecated form of the `port` parameter is 112 | # accepted in TServerSocket and TSocket. 113 | port = "0" 114 | text = b"hi" # sample text to send over the wire 115 | # NB: unfortunately unittest.TestCase.assertWarns isn't available until 116 | # py3. 117 | with TSocket.TServerSocket(port=port, family=socket.AF_INET6) as server: 118 | addr = server.getSocketNames()[0] 119 | with TSocket.TSocket(host=addr[0], port=str(addr[1])) as conn: 120 | conn.write(text) 121 | with server.accept() as client: 122 | read = client.read(len(text)) 123 | self.assertEquals(read, text) 124 | 125 | def test_bad_port(self): 126 | port = 'bogus' 127 | with self.assertRaises(ValueError): 128 | with TSocket.TServerSocket(port=port): 129 | pass 130 | 131 | with self.assertRaises(ValueError): 132 | with TSocket.TSocket(port=port): 133 | pass 134 | 135 | def test_unix_socket(self): 136 | text = b"hi" # sample text to send over the wire 137 | with tempfile.NamedTemporaryFile(delete=True) as fh: 138 | unix_socket = fh.name 139 | with TSocket.TServerSocket(unix_socket=unix_socket) as server: 140 | with TSocket.TSocket(unix_socket=unix_socket) as conn: 141 | conn.write(text) 142 | with server.accept() as client: 143 | read = client.read(len(text)) 144 | self.assertEquals(read, text) 145 | # The socket will not be cleaned up when the server has been shutdown. 146 | self.assertTrue(os.path.exists(unix_socket)) 147 | -------------------------------------------------------------------------------- /nebula3/fbthrift/transport/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Facebook, Inc. and its affiliates. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | -------------------------------------------------------------------------------- /nebula3/fbthrift/util/BytesStrIO.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Facebook, Inc. and its affiliates. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # pyre-unsafe 16 | 17 | from __future__ import absolute_import 18 | from __future__ import division 19 | from __future__ import print_function 20 | from __future__ import unicode_literals 21 | 22 | import sys 23 | 24 | if sys.version_info[0] >= 3: 25 | 26 | from io import BytesIO 27 | 28 | class BytesStrIO(BytesIO): 29 | def __init__(self, *args): 30 | args_new = [] 31 | for arg in args: 32 | if not isinstance(arg, (bytes, memoryview)): 33 | args_new.append(arg.encode()) 34 | else: 35 | args_new.append(arg) 36 | BytesIO.__init__(self, *args_new) 37 | 38 | def write(self, data): 39 | if isinstance(data, (bytes, memoryview)): 40 | BytesIO.write(self, data) 41 | else: 42 | BytesIO.write(self, data.encode()) 43 | -------------------------------------------------------------------------------- /nebula3/fbthrift/util/Recursive.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Facebook, Inc. and its affiliates. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # pyre-unsafe 16 | 17 | from __future__ import absolute_import 18 | from __future__ import division 19 | from __future__ import print_function 20 | from __future__ import unicode_literals 21 | 22 | from nebula3.fbthrift.Thrift import TType 23 | 24 | def fix_spec(all_structs): 25 | for s in all_structs: 26 | spec = s.thrift_spec 27 | for t in spec: 28 | if t is None: 29 | continue 30 | elif t[1] == TType.STRUCT: 31 | t[3][1] = t[3][0].thrift_spec 32 | elif t[1] in (TType.LIST, TType.SET): 33 | _fix_list_or_set(t[3]) 34 | elif t[1] == TType.MAP: 35 | _fix_map(t[3]) 36 | 37 | def _fix_list_or_set(element_type): 38 | if element_type[0] == TType.STRUCT: 39 | element_type[1][1] = element_type[1][0].thrift_spec 40 | elif element_type[0] in (TType.LIST, TType.SET): 41 | _fix_list_or_set(element_type[1]) 42 | elif element_type[0] == TType.MAP: 43 | _fix_map(element_type[1]) 44 | 45 | def _fix_map(element_type): 46 | if element_type[0] == TType.STRUCT: 47 | element_type[1][1] = element_type[1][0].thrift_spec 48 | elif element_type[0] in (TType.LIST, TType.SET): 49 | _fix_list_or_set(element_type[1]) 50 | elif element_type[0] == TType.MAP: 51 | _fix_map(element_type[1]) 52 | 53 | if element_type[2] == TType.STRUCT: 54 | element_type[3][1] = element_type[3][0].thrift_spec 55 | elif element_type[2] in (TType.LIST, TType.SET): 56 | _fix_list_or_set(element_type[3]) 57 | elif element_type[2] == TType.MAP: 58 | _fix_map(element_type[3]) 59 | -------------------------------------------------------------------------------- /nebula3/fbthrift/util/Serializer.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Facebook, Inc. and its affiliates. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # pyre-unsafe 16 | 17 | from __future__ import absolute_import 18 | from __future__ import division 19 | from __future__ import print_function 20 | from __future__ import unicode_literals 21 | 22 | from typing import Any, AnyStr, TypeVar # noqa: F401 23 | 24 | from nebula3.fbthrift.transport import TTransport 25 | from nebula3.fbthrift.protocol import THeaderProtocol 26 | 27 | 28 | # pyre-fixme[34]: `Variable[AnyStr <: [str, bytes]]` isn't present in the function's 29 | # parameters. 30 | def serialize(protocol_factory, thr): 31 | # type: (Any, Any) -> AnyStr 32 | """Convenience method for serializing objects using the given 33 | protocol factory and a TMemoryBuffer.""" 34 | transport = TTransport.TMemoryBuffer() 35 | protocol = protocol_factory.getProtocol(transport) 36 | thr.write(protocol) 37 | if isinstance(protocol, THeaderProtocol.THeaderProtocol): 38 | protocol.trans.flush() 39 | return transport.getvalue() 40 | 41 | 42 | T = TypeVar("T") # noqa: F401 43 | 44 | 45 | def deserialize(protocol_factory, data, thr_out): 46 | # type: (Any, AnyStr, T) -> T 47 | """Convenience method for deserializing objects using the given 48 | protocol factory and a TMemoryBuffer. returns its thr_out 49 | argument.""" 50 | transport = TTransport.TMemoryBuffer(data) 51 | try: 52 | # pyre-fixme[16]: `T` has no attribute `thrift_spec`. 53 | protocol = protocol_factory.getProtocol(transport, thr_out.thrift_spec) # noqa: T484 54 | except TypeError: 55 | protocol = protocol_factory.getProtocol(transport) 56 | if isinstance(protocol, THeaderProtocol.THeaderProtocol): 57 | # this reads the THeader headers to detect what the underlying 58 | # protocol is, as well as looking at transforms, etc. 59 | protocol.trans.readFrame(0) 60 | protocol.reset_protocol() 61 | # pyre-fixme[16]: `T` has no attribute `read`. 62 | thr_out.read(protocol) # noqa: T484 63 | return thr_out 64 | -------------------------------------------------------------------------------- /nebula3/fbthrift/util/TCppServerTestManager.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Facebook, Inc. and its affiliates. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from __future__ import absolute_import 16 | from __future__ import division 17 | from __future__ import print_function 18 | from __future__ import unicode_literals 19 | 20 | import threading 21 | from nebula3.fbthrift.Thrift import TProcessor 22 | from nebula3.fbthrift.server.TCppServer import TCppServer 23 | 24 | class TCppServerTestManager(object): 25 | """ 26 | A context manager for running a TCppServer in unit tests. 27 | 28 | Caller may pass either an Iface, a Processor, or a not-running TCppServer. 29 | 30 | Basic example: 31 | 32 | from mylib import MyServiceHandler 33 | from nebula3.fbthrift.util.TCppServerTestManager import TCppServerTestManager 34 | 35 | class MyServiceTest(unittest.TestCase) 36 | 37 | def test_traffic(self): 38 | handler = MyServiceHandler() # derived from MyService.Iface 39 | with TCppServerTestManager(handler) as server: 40 | host, port = server.addr() 41 | # Talk to the server using thrift in here.... 42 | 43 | See the unit-tests for this class for better-worked-out examples. 44 | """ 45 | 46 | @staticmethod 47 | def make_server(processor): 48 | """ 49 | Creates a TCppServer given a processor. This is the function used 50 | internally, but it may be of interest separately as well. 51 | """ 52 | server = TCppServer(processor) 53 | server.setPort(0) 54 | server.setNumCPUWorkerThreads(1) 55 | server.setNumIOWorkerThreads(1) 56 | server.setNewSimpleThreadManager( 57 | count=1, 58 | pendingTaskCountMax=5, 59 | enableTaskStats=False 60 | ) 61 | return server 62 | 63 | def __init__(self, obj, cleanUp=True): 64 | self.__obj = obj 65 | self.__handler = None 66 | self.__processor = None 67 | self.__server = None 68 | self.__thread = None 69 | self.__thread_started_ev = None 70 | self.__do_cleanup = cleanUp 71 | 72 | def __enter__(self): 73 | self.start() 74 | return self 75 | 76 | def __exit__(self, type, value, traceback): 77 | self.stop() 78 | 79 | def start(self): 80 | """ 81 | Starts the server in another thread. 82 | 83 | Returns after the server has bound to and listened on its port. Callers 84 | may immediately open connections without needing to wait or poll. 85 | """ 86 | if self.__is_handler(self.__obj): 87 | self.__handler = self.__obj 88 | self.__processor = self.__make_processor(self.__handler) 89 | self.__server = self.__make_server(self.__processor) 90 | elif self.__is_processor(self.__obj): 91 | self.__processor = self.__obj 92 | self.__server = self.__make_server(self.__processor) 93 | elif self.__is_server(self.__obj): 94 | self.__server = self.__obj 95 | else: 96 | raise Exception("Not a handler, a processor, or a server.") 97 | self.__server_started_ev = threading.Event() 98 | self.__thread = threading.Thread(target=self.__serve) 99 | self.__thread.start() 100 | self.__server_started_ev.wait() 101 | self.__server_started_ev = None 102 | 103 | def stop(self): 104 | """ 105 | Stops the server. 106 | 107 | Returns after the server has been stopped and all resources have been 108 | cleaned up. 109 | """ 110 | self.__server.stop() 111 | self.__thread.join() 112 | self.__thread = None 113 | self.__server = None 114 | self.__processor = None 115 | self.__handler = None 116 | 117 | def addr(self): 118 | """ 119 | Returns a pair of host-addr and port on which the running server is 120 | listening. 121 | 122 | If constructed with a handler or a processor, addr is * or :: and port 123 | is ephemeral. 124 | """ 125 | addr = self.__server.getAddress() 126 | return addr[0], addr[1] 127 | 128 | def __serve(self): 129 | self.__server.setup() 130 | self.__server_started_ev.set() 131 | try: 132 | self.__server.loop() 133 | finally: 134 | if self.__do_cleanup: 135 | self.__server.cleanUp() 136 | 137 | def __is_handler(self, obj): 138 | return hasattr(obj, '_processor_type') \ 139 | and not self.__is_processor(obj) 140 | 141 | def __is_processor(self, obj): 142 | return isinstance(obj, TProcessor) 143 | 144 | def __is_server(self, obj): 145 | return isinstance(obj, TCppServer) 146 | 147 | def __make_processor(self, handler): 148 | return handler._processor_type(handler) 149 | 150 | def __make_server(self, processor): 151 | return self.__class__.make_server(self.__processor) 152 | -------------------------------------------------------------------------------- /nebula3/fbthrift/util/TValidator.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Facebook, Inc. and its affiliates. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # pyre-unsafe 16 | 17 | from __future__ import absolute_import 18 | from __future__ import division 19 | from __future__ import print_function 20 | from __future__ import unicode_literals 21 | 22 | from nebula3.fbthrift.Thrift import TType 23 | 24 | import logging 25 | _log = logging.getLogger('nebula3.fbthrift.validator') 26 | 27 | import sys 28 | if sys.version_info[0] >= 3: 29 | basestring = str 30 | unicode = str 31 | long = int 32 | 33 | class TValidator: 34 | tinfo = { 35 | # ttype: (type_name, python_type, min_value, max_value) 36 | TType.BOOL: ('BOOL', bool, None, None), 37 | TType.BYTE: ('BYTE', int, -128, 127), 38 | TType.DOUBLE: ('DOUBLE', float, None, None), 39 | TType.I16: ('I16', int, -32768, 32767), 40 | TType.I32: ('I32', int, -2147483648, 2147483647), 41 | TType.I64: ('I64', (int, long), None, None), 42 | TType.STRING: ('STRING', basestring, None, None), 43 | TType.UTF8: ('UTF8', unicode, None, None), 44 | } 45 | 46 | def __init__(self): 47 | self.custom_validators = {} 48 | 49 | def addClassValidator(self, name, validator): 50 | self.custom_validators[name] = validator 51 | 52 | def validate(self, msg): 53 | if not hasattr(msg, 'thrift_spec'): 54 | _log.error("Not a valid thrift object") 55 | return False 56 | 57 | name = msg.__class__.__name__ 58 | return self.check_struct(name, msg, msg.thrift_spec) 59 | 60 | def check_basic(self, name, value, thrift_type): 61 | if thrift_type not in self.tinfo: 62 | _log.warn("%s Unrecognized thrift type %d. No validation done!", 63 | name, thrift_type) 64 | return True 65 | 66 | t_name, python_type, v_min, v_max = self.tinfo[thrift_type] 67 | if not isinstance(value, python_type): 68 | error = "Value %s is not a %s" % (str(value), t_name) 69 | elif (v_min is not None and value < v_min) \ 70 | or (v_max is not None and value > v_max): 71 | error = "Value %s not within %s boundaries" % (str(value), t_name) 72 | else: 73 | error = None 74 | 75 | if error is None: 76 | _log.debug("%s -> %s (type: %s) OK", name, str(value), t_name) 77 | else: 78 | _log.error("ERROR: %s is WRONG. %s", name, error) 79 | return error is None 80 | 81 | def check_map(self, name, value, k_type, k_specs, v_type, v_specs): 82 | _log.debug('%s - MAP check:', name) 83 | ok = True 84 | for k, v in value.items(): 85 | if not self.check_type("%s key" % (name), k, k_type, k_specs): 86 | ok = False 87 | if not self.check_type("%s[%s]" % (name, k), v, v_type, v_specs): 88 | ok = False 89 | return ok 90 | 91 | def check_listset(self, name, value, v_type, v_specs): 92 | _log.debug('%s - LIST/SET check:', name) 93 | ok = True 94 | for i, v in enumerate(value): 95 | if not self.check_type("%s[%d]" % (name, i), v, v_type, v_specs): 96 | ok = False 97 | return ok 98 | 99 | def check_type(self, name, value, v_type, specs): 100 | if value is None: 101 | _log.debug("%s - NOT set", name) 102 | return True 103 | 104 | if v_type == TType.STRUCT: 105 | struct_specs = specs[1] 106 | ok = self.check_struct(name, value, struct_specs) 107 | elif v_type == TType.MAP: 108 | k_type = specs[0] 109 | k_specs = specs[1] 110 | v_type = specs[2] 111 | v_specs = specs[3] 112 | ok = self.check_map(name, value, k_type, k_specs, v_type, v_specs) 113 | elif v_type in (TType.LIST, TType.SET): 114 | v_type = specs[0] 115 | v_specs = specs[1] 116 | ok = self.check_listset(name, value, v_type, v_specs) 117 | else: 118 | ok = self.check_basic(name, value, v_type) 119 | 120 | return ok 121 | 122 | def check_struct(self, name, value, specs): 123 | _log.debug('%s - STRUCT check:', name) 124 | if specs is None: 125 | _log.error("%s - Empty thrift specs, can not be validated", name) 126 | return False 127 | 128 | ok = True 129 | for spec in specs: 130 | if spec is None: 131 | # Some fields of the struct might be not defined or 132 | # skipped old fields and their spec will be None 133 | continue 134 | f_name = name + "." + str(spec[2]) 135 | f_type = spec[1] 136 | f_value = getattr(value, spec[2]) 137 | f_specs = spec[3] 138 | if not self.check_type(f_name, f_value, f_type, f_specs): 139 | ok = False 140 | 141 | class_name = value.__class__.__name__ 142 | if ok and class_name in self.custom_validators: 143 | if self.custom_validators[class_name](value): 144 | _log.debug('%s - Custom validator for class %s OK', 145 | name, class_name) 146 | else: 147 | _log.error('%s - Custom validator for class %s failed!', 148 | name, class_name) 149 | ok = False 150 | return ok 151 | -------------------------------------------------------------------------------- /nebula3/fbthrift/util/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) Facebook, Inc. and its affiliates. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # pyre-unsafe 16 | 17 | from __future__ import absolute_import, division, print_function, unicode_literals 18 | 19 | from collections import OrderedDict, namedtuple 20 | 21 | from nebula3.fbthrift.Thrift import TType 22 | 23 | 24 | __all__ = ["create_client", "Serializer", "struct_to_dict", "parse_struct_spec"] 25 | StructField = namedtuple("StructField", "id type name type_args default req_type") 26 | 27 | 28 | def create_client( 29 | client_klass, 30 | host=None, 31 | port=None, 32 | client_type=None, 33 | path=None, 34 | timeout=None, 35 | ): 36 | """ 37 | Given a thrift client class, and a host/port 38 | return a client using HeaderTransport 39 | """ 40 | from nebula3.fbthrift.transport.TSocket import TSocket 41 | from nebula3.fbthrift.protocol.THeaderProtocol import THeaderProtocol 42 | 43 | sock = TSocket(host=host, port=port, unix_socket=path) 44 | sock.setTimeout(timeout) 45 | protocol = THeaderProtocol( 46 | sock, 47 | client_types=[client_type] 48 | if client_type 49 | else None, # We accept the same as our inital send_ 50 | client_type=client_type, # Used for the inital send_ 51 | ) 52 | sock.open() 53 | return client_klass(protocol) 54 | 55 | 56 | def parse_struct_spec(struct): 57 | """ 58 | Given a thrift struct return a generator of parsed field information 59 | 60 | StructField fields: 61 | id - the field number 62 | type - a Thrift.TType 63 | name - the field name 64 | type_args - type arguments (ex: Key type Value type for maps) 65 | default - the default value 66 | req_type - the field required setting 67 | (0: Required, 1: Optional, 2: Optional IN, Required OUT) 68 | 69 | :param struct: a thrift struct 70 | :return: a generator of StructField tuples 71 | """ 72 | for field in struct.thrift_spec: 73 | if not field: 74 | continue 75 | yield StructField._make(field) 76 | 77 | 78 | def struct_to_dict(struct, defaults=False): 79 | """ 80 | Given a Thrift Struct convert it into a dict 81 | :param struct: a thrift struct 82 | :param defaults: return default values 83 | :return: OrderedDict 84 | """ 85 | adict = OrderedDict() 86 | union = struct.isUnion() 87 | if union and struct.field == 0: 88 | # if struct.field is 0 then it is unset escape 89 | return adict 90 | for field in parse_struct_spec(struct): 91 | if union: 92 | if field.id == struct.field: 93 | value = struct.value 94 | else: 95 | continue 96 | else: 97 | value = getattr(struct, field.name, field.default) 98 | if value != field.default or defaults: 99 | if field.type == TType.STRUCT: 100 | if value is not None: 101 | sub_dict = struct_to_dict(value, defaults=defaults) 102 | if sub_dict or defaults: # Do not include empty sub structs 103 | adict[field.name] = sub_dict 104 | elif field.type == TType.LIST: 105 | sub_list = __list_to_dict(value, field.type_args, defaults=defaults) 106 | if sub_list or defaults: 107 | adict[field.name] = sub_list 108 | elif field.type == TType.SET: 109 | sub_set = __set_to_dict(value, field.type_args, defaults=defaults) 110 | if sub_set or defaults: 111 | adict[field.name] = sub_set 112 | elif field.type == TType.MAP: 113 | sub_map = __map_to_dict(value, field.type_args, defaults=defaults) 114 | if sub_map or defaults: 115 | adict[field.name] = sub_map 116 | else: 117 | adict[field.name] = value 118 | if union: # If we got this far then we have the union value 119 | break 120 | return adict 121 | 122 | 123 | def __list_to_dict(alist, type_args, defaults=False): 124 | """ 125 | Given a python list-like collection, potentially containing Thrift Structs, 126 | convert it into a dict 127 | :param alist: a list or set 128 | :param defaults: return default values 129 | :return: List 130 | """ 131 | if not alist: 132 | return alist 133 | 134 | element_type = type_args[0] 135 | if element_type == TType.STRUCT: 136 | return [struct_to_dict(element, defaults=defaults) for element in alist] 137 | if element_type == TType.LIST: 138 | return [ 139 | __list_to_dict(element, type_args[1], defaults=defaults) 140 | for element in alist 141 | ] 142 | if element_type == TType.SET: 143 | return [ 144 | __set_to_dict(element, type_args[1], defaults=defaults) for element in alist 145 | ] 146 | else: 147 | return alist 148 | 149 | 150 | def __set_to_dict(aset, type_args, defaults=False): 151 | """ 152 | Given a python set-like collection, potentially containing Thrift Structs 153 | and recursively parsing the elements 154 | :param aset: a set 155 | :param defaults: return default values 156 | :return: Set 157 | """ 158 | if not aset: 159 | return aset 160 | 161 | element_type = type_args[0] 162 | if element_type == TType.STRUCT: 163 | return {struct_to_dict(element, defaults=defaults) for element in aset} 164 | if element_type == TType.LIST: 165 | return { 166 | __list_to_dict(element, type_args[1], defaults=defaults) 167 | for element in aset 168 | } 169 | if element_type == TType.SET: 170 | return { 171 | __set_to_dict(element, type_args[1], defaults=defaults) for element in aset 172 | } 173 | else: 174 | return aset 175 | 176 | 177 | def __map_to_dict(amap, type_args, defaults=False): 178 | """ 179 | Given a python dictionary, potentially containing Thrift Structs, convert it 180 | into a dict 181 | :param amap: a map 182 | :param defaults: return default values 183 | :return: Dict 184 | """ 185 | if not amap: 186 | return amap 187 | 188 | keys, values = zip(*amap.items()) 189 | 190 | keys = __list_to_dict(keys, type_args[:2], defaults=defaults) 191 | values = __list_to_dict(values, type_args[2:4], defaults=defaults) 192 | 193 | return dict(zip(keys, values)) 194 | -------------------------------------------------------------------------------- /nebula3/fbthrift/util/asyncio.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # Copyright (c) Facebook, Inc. and its affiliates. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | import asyncio 17 | 18 | from nebula3.fbthrift.server.TAsyncioServer import ThriftClientProtocolFactory 19 | from nebula3.fbthrift.util.Decorators import protocol_manager 20 | 21 | 22 | class async_protocol_manager: 23 | def __init__(self, coro): 24 | """ 25 | Given a coro from create_connection create a context manager 26 | around the protocol returned 27 | """ 28 | self.coro = coro 29 | 30 | def __await__(self): 31 | async def as_protocol_manager(): 32 | _, protocol = await self.coro 33 | return protocol_manager(protocol) 34 | 35 | return as_protocol_manager().__await__() 36 | 37 | __iter__ = __await__ 38 | 39 | async def __aenter__(self): 40 | _, self.protocol = await self.coro 41 | return self.protocol.client 42 | 43 | async def __aexit__(self, exc_type, exc, tb): 44 | self.protocol.close() 45 | 46 | 47 | def create_client( 48 | client_klass, 49 | *, 50 | host=None, 51 | port=None, 52 | sock=None, 53 | loop=None, 54 | timeouts=None, 55 | client_type=None, 56 | ssl=None, 57 | ): 58 | """ 59 | create an asyncio thrift client and return an async context 60 | manager that can be used as follows: 61 | 62 | async with create_client(smc2_client, port=1421) as smc: 63 | await smc.getStatus() 64 | 65 | This can be used in the old way: 66 | 67 | with (await create_client(smc2_client, port=1421)) as smc: 68 | await smc.getStatus() 69 | 70 | or even the old deprecated way: 71 | 72 | with (yield from create_client(smc2_client, port=1421) as smc: 73 | yield from smc.getStatus() 74 | 75 | :param client_klass: thrift Client class 76 | :param host: hostname/ip, None = loopback 77 | :param port: port number 78 | :param sock: socket.socket object 79 | :param loop: asyncio event loop 80 | :returns: an Async Context Manager 81 | """ 82 | if not loop: 83 | loop = asyncio.get_event_loop() 84 | 85 | coro = loop.create_connection( 86 | ThriftClientProtocolFactory( 87 | client_klass, 88 | loop=loop, 89 | timeouts=timeouts, 90 | client_type=client_type, 91 | ), 92 | host=host, 93 | port=port, 94 | sock=sock, 95 | ssl=ssl, 96 | ) 97 | return async_protocol_manager(coro) 98 | 99 | 100 | def call_as_future(f, loop, *args, **kwargs): 101 | """call_as_future(callable, *args, **kwargs) -> asyncio.Task 102 | 103 | Like asyncio.ensure_future() but takes any callable and converts 104 | it to a coroutine function first. 105 | """ 106 | if not asyncio.iscoroutinefunction(f): 107 | f = asyncio.coroutine(f) 108 | 109 | return asyncio.ensure_future(f(*args, **kwargs), loop=loop) 110 | -------------------------------------------------------------------------------- /nebula3/fbthrift/util/converter.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python3 2 | # Copyright (c) Facebook, Inc. and its affiliates. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | # pyre-unsafe 17 | 18 | from typing import Any, Type, TypeVar 19 | 20 | from nebula3.fbthrift.py3.types import Enum, Struct 21 | from nebula3.fbthrift.Thrift import TType 22 | from nebula3.fbthrift.util import parse_struct_spec 23 | 24 | 25 | T = TypeVar("T") 26 | 27 | 28 | def to_py_struct(cls: Type[T], obj: Struct) -> T: 29 | return _to_py_struct(cls, obj) 30 | 31 | 32 | def _to_py_struct(cls: Type[T], obj: Struct) -> T: 33 | # pyre-fixme[16]: `T` has no attribute `isUnion`. 34 | if cls.isUnion(): 35 | return cls( 36 | **{ 37 | field.name: _to_py_field( 38 | field.type, field.type_args, getattr(obj, field.name) 39 | ) 40 | for field in parse_struct_spec(cls) 41 | # pyre-fixme[16]: `Struct` has no attribute `type`. 42 | if field.name == obj.type.name 43 | } 44 | ) 45 | else: 46 | return cls( 47 | **{ 48 | field.name: _to_py_field( 49 | field.type, field.type_args, getattr(obj, field.name) 50 | ) 51 | for field in parse_struct_spec(cls) 52 | } 53 | ) 54 | 55 | 56 | def _to_py_field(field_type: TType, type_args: Any, obj: Any) -> Any: 57 | if obj is None: 58 | return None 59 | if field_type == TType.STRUCT: 60 | return _to_py_struct(type_args[0], obj) 61 | if field_type == TType.LIST: 62 | return [_to_py_field(type_args[0], type_args[1], elem) for elem in obj] 63 | if field_type == TType.SET: 64 | return {_to_py_field(type_args[0], type_args[1], elem) for elem in obj} 65 | if field_type == TType.MAP: 66 | return { 67 | _to_py_field(type_args[0], type_args[1], k): _to_py_field( 68 | type_args[2], type_args[3], v 69 | ) 70 | for k, v in obj.items() 71 | } 72 | if isinstance(obj, Enum): 73 | return obj.value 74 | return obj 75 | -------------------------------------------------------------------------------- /nebula3/gclient/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vesoft-inc/nebula-python/ba345a6e26e480201c7abe6314687f744b12bb1b/nebula3/gclient/__init__.py -------------------------------------------------------------------------------- /nebula3/gclient/net/AuthResult.py: -------------------------------------------------------------------------------- 1 | # --coding:utf-8-- 2 | # 3 | # Copyright (c) 2021 vesoft inc. All rights reserved. 4 | # 5 | # This source code is licensed under Apache 2.0 License. 6 | 7 | 8 | class AuthResult(object): 9 | def __init__(self, session_id, timezone_offset, timezone_name): 10 | self._session_id = session_id 11 | self._timezone_offset = timezone_offset 12 | self._timezone_name = timezone_name 13 | 14 | def get_session_id(self): 15 | return self._session_id 16 | 17 | def get_timezone_offset(self): 18 | return self._timezone_offset 19 | 20 | def get_timezone_name(self): 21 | return self._timezone_name 22 | -------------------------------------------------------------------------------- /nebula3/gclient/net/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # --coding:utf-8-- 3 | 4 | # Copyright (c) 2020 vesoft inc. All rights reserved. 5 | # 6 | # This source code is licensed under Apache 2.0 License. 7 | 8 | 9 | from nebula3.common.ttypes import ErrorCode 10 | from nebula3.Exception import ( 11 | AuthFailedException, 12 | IOErrorException, 13 | NotValidConnectionException, 14 | InValidHostname, 15 | ) 16 | 17 | from nebula3.data.ResultSet import ResultSet 18 | 19 | from nebula3.gclient.net.AuthResult import AuthResult 20 | from nebula3.gclient.net.Session import Session 21 | from nebula3.gclient.net.Connection import Connection 22 | from nebula3.gclient.net.ConnectionPool import ConnectionPool 23 | from nebula3.gclient.net.base import BaseExecutor, ExecuteError 24 | -------------------------------------------------------------------------------- /nebula3/gclient/net/base.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from abc import abstractmethod 3 | from typing import Dict, Any, Optional 4 | from nebula3.data.ResultSet import ResultSet 5 | from nebula3.common.ttypes import ErrorCode, Value, NList, Date, Time, DateTime 6 | 7 | 8 | class ExecuteError(Exception): 9 | def __init__(self, stmt: str, param: Any, code: ErrorCode, msg: str): 10 | self.stmt = stmt 11 | self.param = param 12 | self.code = code 13 | self.msg = msg 14 | 15 | def __str__(self): 16 | return ( 17 | f"ExecuteError. err_code: {self.code}, err_msg: {self.msg}.\n" 18 | + f"Statement: \n{self.stmt}\n" 19 | + f"Parameter: \n{self.param}" 20 | ) 21 | 22 | 23 | class BaseExecutor: 24 | @abstractmethod 25 | def execute_parameter( 26 | self, stmt: str, params: Optional[Dict[str, Any]] 27 | ) -> ResultSet: 28 | pass 29 | 30 | @abstractmethod 31 | def execute_json_with_parameter( 32 | self, stmt: str, params: Optional[Dict[str, Any]] 33 | ) -> bytes: 34 | pass 35 | 36 | def execute(self, stmt: str) -> ResultSet: 37 | return self.execute_parameter(stmt, None) 38 | 39 | def execute_json(self, stmt: str) -> bytes: 40 | return self.execute_json_with_parameter(stmt, None) 41 | 42 | def execute_py( 43 | self, 44 | stmt: str, 45 | params: Optional[Dict[str, Any]] = None, 46 | ): 47 | """**Recommended** Execute a statement with parameters in Python type instead of thrift type.""" 48 | if params is None: 49 | result = self.execute_parameter(stmt, None) 50 | else: 51 | result = self.execute_parameter(stmt, _build_byte_param(params)) 52 | 53 | if not result.is_succeeded(): 54 | raise ExecuteError(stmt, params, result.error_code(), result.error_msg()) 55 | 56 | return result 57 | 58 | 59 | def _build_byte_param(params: dict) -> dict: 60 | byte_params = {} 61 | for k, v in params.items(): 62 | if isinstance(v, Value): 63 | byte_params[k] = v 64 | elif str(type(v)).startswith("nebula3.common.ttypes"): 65 | byte_params[k] = v 66 | else: 67 | byte_params[k] = _cast_value(v) 68 | return byte_params 69 | 70 | 71 | def _cast_value(value: Any) -> Value: 72 | """ 73 | Cast the value to nebula Value type 74 | ref: https://github.com/vesoft-inc/nebula/blob/master/src/common/datatypes/Value.cpp 75 | :param value: the value to be casted 76 | :return: the casted value 77 | """ 78 | casted_value = Value() 79 | if isinstance(value, bool): 80 | casted_value.set_bVal(value) 81 | elif isinstance(value, int): 82 | casted_value.set_iVal(value) 83 | elif isinstance(value, str): 84 | casted_value.set_sVal(value) 85 | elif isinstance(value, float): 86 | casted_value.set_fVal(value) 87 | elif isinstance(value, datetime.date): 88 | date_value = Date(year=value.year, month=value.month, day=value.day) 89 | casted_value.set_dVal(date_value) 90 | elif isinstance(value, datetime.time): 91 | time_value = Time( 92 | hour=value.hour, 93 | minute=value.minute, 94 | sec=value.second, 95 | microsec=value.microsecond, 96 | ) 97 | casted_value.set_tVal(time_value) 98 | elif isinstance(value, datetime.datetime): 99 | datetime_value = DateTime( 100 | year=value.year, 101 | month=value.month, 102 | day=value.day, 103 | hour=value.hour, 104 | minute=value.minute, 105 | sec=value.second, 106 | microsec=value.microsecond, 107 | ) 108 | casted_value.set_dtVal(datetime_value) 109 | # TODO: add support for GeoSpatial 110 | elif isinstance(value, list): 111 | byte_list = [] 112 | for item in value: 113 | byte_list.append(_cast_value(item)) 114 | casted_value.set_lVal(NList(values=byte_list)) 115 | elif isinstance(value, dict): 116 | # TODO: add support for NMap 117 | raise TypeError("Unsupported type: dict") 118 | else: 119 | raise TypeError(f"Unsupported type: {type(value)}") 120 | return casted_value 121 | -------------------------------------------------------------------------------- /nebula3/graph/GraphService-fuzzer: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Autogenerated by Thrift 4 | # 5 | # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING 6 | # @generated 7 | # 8 | 9 | from __future__ import absolute_import 10 | from __future__ import division 11 | from __future__ import print_function 12 | from __future__ import unicode_literals 13 | 14 | import os 15 | import sys 16 | 17 | if (not sys.argv[0].endswith("par") and 18 | not sys.argv[0].endswith("xar") and 19 | os.getenv('PAR_UNPACK_TMP') == None): 20 | 21 | f = open(sys.argv[0], "r") 22 | 23 | f.readline() # This will be #!/bin/bash 24 | line = f.readline() 25 | f.close() 26 | 27 | # The par generator tool always has '# This par was made' as the 28 | # second line. See fbcode/tools/make_par/make_par.py 29 | if (not line.startswith('# This par was made')): 30 | print("""WARNING 31 | You are trying to run *-fuzzer.py which is 32 | incorrect as the paths are not set up correctly. 33 | Instead, you should generate your thrift file with 34 | thrift_library and then run the resulting 35 | *-fuzzer.par. 36 | For more information, please read 37 | http://fburl.com/python-remotes""") 38 | exit() 39 | 40 | from . import GraphService 41 | from . import ttypes 42 | from . import constants 43 | 44 | import nebula3.fbthrift.util.fuzzer 45 | nebula3.fbthrift.util.fuzzer.fuzz_service(GraphService, ttypes, constants) 46 | -------------------------------------------------------------------------------- /nebula3/graph/GraphService-remote: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Autogenerated by Thrift 4 | # 5 | # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING 6 | # @generated 7 | # 8 | 9 | from __future__ import print_function 10 | from __future__ import absolute_import 11 | 12 | import os 13 | import sys 14 | 15 | if (not sys.argv[0].endswith("par") and 16 | not sys.argv[0].endswith("xar") and 17 | os.getenv('PAR_UNPACK_TMP') == None): 18 | 19 | f = open(sys.argv[0], "r") 20 | 21 | f.readline() # This will be #!/bin/bash 22 | line = f.readline() 23 | f.close() 24 | 25 | # The par generator tool always has '# This par was made' as the 26 | # second line. See fbcode/tools/make_par/make_par.py 27 | if (not line.startswith('# This par was made')): 28 | print("""WARNING 29 | You are trying to run *-remote.py which is 30 | incorrect as the paths are not set up correctly. 31 | Instead, you should generate your thrift file with 32 | thrift_library and then run the resulting 33 | *-remote.par. 34 | For more information, please read 35 | http://fburl.com/python-remotes""") 36 | exit() 37 | 38 | from . import GraphService 39 | from . import ttypes 40 | 41 | from nebula3.fbthrift.util.remote import Function 42 | from nebula3.fbthrift.remote import Remote 43 | 44 | FUNCTIONS = { 45 | 'authenticate': Function('authenticate', 'GraphService', 'AuthResponse', [('binary', 'username', 'binary'), ('binary', 'password', 'binary')]), 46 | 'signout': Function('signout', 'GraphService', None, [('i64', 'sessionId', 'i64')]), 47 | 'execute': Function('execute', 'GraphService', 'ExecutionResponse', [('i64', 'sessionId', 'i64'), ('binary', 'stmt', 'binary')]), 48 | 'executeWithParameter': Function('executeWithParameter', 'GraphService', 'ExecutionResponse', [('i64', 'sessionId', 'i64'), ('binary', 'stmt', 'binary'), ('map', 'parameterMap', 'map')]), 49 | 'executeJson': Function('executeJson', 'GraphService', 'binary', [('i64', 'sessionId', 'i64'), ('binary', 'stmt', 'binary')]), 50 | 'executeJsonWithParameter': Function('executeJsonWithParameter', 'GraphService', 'binary', [('i64', 'sessionId', 'i64'), ('binary', 'stmt', 'binary'), ('map', 'parameterMap', 'map')]), 51 | 'verifyClientVersion': Function('verifyClientVersion', 'GraphService', 'VerifyClientVersionResp', [('VerifyClientVersionReq', 'req', 'VerifyClientVersionReq')]), 52 | } 53 | 54 | SERVICE_NAMES = ['GraphService', ] 55 | 56 | if __name__ == '__main__': 57 | Remote.run(FUNCTIONS, SERVICE_NAMES, GraphService, ttypes, sys.argv, default_port=9090) 58 | -------------------------------------------------------------------------------- /nebula3/graph/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Autogenerated by Thrift 3 | # 4 | # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING 5 | # @generated 6 | # 7 | __all__ = ['ttypes', 'constants', 'GraphService'] 8 | -------------------------------------------------------------------------------- /nebula3/graph/constants.py: -------------------------------------------------------------------------------- 1 | # 2 | # Autogenerated by Thrift 3 | # 4 | # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING 5 | # @generated 6 | # 7 | 8 | from __future__ import absolute_import 9 | import sys 10 | from nebula3.fbthrift.util.Recursive import fix_spec 11 | from nebula3.fbthrift.Thrift import TType, TMessageType, TPriority, TRequestContext, TProcessorEventHandler, TServerInterface, TProcessor, TException, TApplicationException, UnimplementedTypedef 12 | from nebula3.fbthrift.protocol.TProtocol import TProtocolException 13 | 14 | 15 | import nebula3.common.ttypes 16 | 17 | 18 | from .ttypes import UTF8STRINGS, ProfilingStats, PlanNodeBranchInfo, Pair, PlanNodeDescription, PlanDescription, ExecutionResponse, AuthResponse, VerifyClientVersionResp, VerifyClientVersionReq 19 | 20 | -------------------------------------------------------------------------------- /nebula3/logger.py: -------------------------------------------------------------------------------- 1 | # --coding:utf-8-- 2 | # 3 | # Copyright (c) 2021 vesoft inc. All rights reserved. 4 | # 5 | # This source code is licensed under Apache 2.0 License. 6 | 7 | import logging 8 | 9 | logger = logging.getLogger(__name__) 10 | -------------------------------------------------------------------------------- /nebula3/meta/MetaService-fuzzer: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Autogenerated by Thrift 4 | # 5 | # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING 6 | # @generated 7 | # 8 | 9 | from __future__ import absolute_import 10 | from __future__ import division 11 | from __future__ import print_function 12 | from __future__ import unicode_literals 13 | 14 | import os 15 | import sys 16 | 17 | if (not sys.argv[0].endswith("par") and 18 | not sys.argv[0].endswith("xar") and 19 | os.getenv('PAR_UNPACK_TMP') == None): 20 | 21 | f = open(sys.argv[0], "r") 22 | 23 | f.readline() # This will be #!/bin/bash 24 | line = f.readline() 25 | f.close() 26 | 27 | # The par generator tool always has '# This par was made' as the 28 | # second line. See fbcode/tools/make_par/make_par.py 29 | if (not line.startswith('# This par was made')): 30 | print("""WARNING 31 | You are trying to run *-fuzzer.py which is 32 | incorrect as the paths are not set up correctly. 33 | Instead, you should generate your thrift file with 34 | thrift_library and then run the resulting 35 | *-fuzzer.par. 36 | For more information, please read 37 | http://fburl.com/python-remotes""") 38 | exit() 39 | 40 | from . import MetaService 41 | from . import ttypes 42 | from . import constants 43 | 44 | import nebula3.fbthrift.util.fuzzer 45 | nebula3.fbthrift.util.fuzzer.fuzz_service(MetaService, ttypes, constants) 46 | -------------------------------------------------------------------------------- /nebula3/meta/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Autogenerated by Thrift 3 | # 4 | # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING 5 | # @generated 6 | # 7 | __all__ = ['ttypes', 'constants', 'MetaService'] 8 | -------------------------------------------------------------------------------- /nebula3/meta/constants.py: -------------------------------------------------------------------------------- 1 | # 2 | # Autogenerated by Thrift 3 | # 4 | # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING 5 | # @generated 6 | # 7 | 8 | from __future__ import absolute_import 9 | import sys 10 | from nebula3.fbthrift.util.Recursive import fix_spec 11 | from nebula3.fbthrift.Thrift import TType, TMessageType, TPriority, TRequestContext, TProcessorEventHandler, TServerInterface, TProcessor, TException, TApplicationException, UnimplementedTypedef 12 | from nebula3.fbthrift.protocol.TProtocol import TProtocolException 13 | 14 | 15 | import nebula3.common.ttypes 16 | 17 | 18 | from .ttypes import UTF8STRINGS, AlterSchemaOp, RoleType, GeoShape, IsolationLevel, HostStatus, SnapshotStatus, AlterSpaceOp, JobOp, JobType, JobStatus, ListHostType, HostRole, TaskResult, ConfigModule, ConfigMode, ListenerType, ExternalServiceType, QueryStatus, ID, ColumnTypeDef, ColumnDef, SchemaProp, Schema, IdName, SpaceDesc, SpaceItem, TagItem, AlterSchemaItem, EdgeItem, IndexParams, IndexItem, HostItem, UserItem, RoleItem, ExecResp, AlterSpaceReq, AdminJobReq, JobDesc, TaskDesc, AdminJobResult, AdminJobResp, Correlativity, StatsItem, CreateSpaceReq, CreateSpaceAsReq, DropSpaceReq, ClearSpaceReq, ListSpacesReq, ListSpacesResp, GetSpaceReq, GetSpaceResp, CreateTagReq, AlterTagReq, DropTagReq, ListTagsReq, ListTagsResp, GetTagReq, GetTagResp, CreateEdgeReq, AlterEdgeReq, GetEdgeReq, GetEdgeResp, DropEdgeReq, ListEdgesReq, ListEdgesResp, AddHostsReq, DropHostsReq, ListHostsReq, ListHostsResp, PartItem, ListPartsReq, ListPartsResp, GetPartsAllocReq, GetPartsAllocResp, GetWorkerIdReq, GetWorkerIdResp, GetSegmentIdReq, GetSegmentIdResp, HBResp, LeaderInfo, PartitionList, HBReq, ServiceInfo, AgentHBReq, AgentHBResp, IndexFieldDef, CreateTagIndexReq, DropTagIndexReq, GetTagIndexReq, GetTagIndexResp, ListTagIndexesReq, ListTagIndexesResp, CreateEdgeIndexReq, DropEdgeIndexReq, GetEdgeIndexReq, GetEdgeIndexResp, ListEdgeIndexesReq, ListEdgeIndexesResp, RebuildIndexReq, CreateUserReq, DropUserReq, AlterUserReq, GrantRoleReq, RevokeRoleReq, ListUsersReq, ListUsersResp, ListRolesReq, ListRolesResp, GetUserRolesReq, ChangePasswordReq, BalanceTask, ConfigItem, RegConfigReq, GetConfigReq, GetConfigResp, SetConfigReq, ListConfigsReq, ListConfigsResp, CreateSnapshotReq, DropSnapshotReq, ListSnapshotsReq, Snapshot, ListSnapshotsResp, ListIndexStatusReq, IndexStatus, ListIndexStatusResp, MergeZoneReq, DropZoneReq, DivideZoneReq, RenameZoneReq, AddHostsIntoZoneReq, GetZoneReq, GetZoneResp, ListZonesReq, Zone, ListZonesResp, AddListenerReq, RemoveListenerReq, ListListenerReq, ListenerInfo, ListListenerResp, GetStatsReq, GetStatsResp, HostBackupInfo, SpaceBackupInfo, BackupMeta, CreateBackupReq, CreateBackupResp, HostPair, RestoreMetaReq, PartInfo, RestoreMetaResp, ServiceClient, SignInServiceReq, SignOutServiceReq, ListServiceClientsReq, ListServiceClientsResp, FTIndex, CreateFTIndexReq, DropFTIndexReq, ListFTIndexesReq, ListFTIndexesResp, QueryDesc, Session, CreateSessionReq, CreateSessionResp, UpdateSessionsReq, UpdateSessionsResp, ListSessionsReq, ListSessionsResp, GetSessionReq, GetSessionResp, RemoveSessionReq, RemoveSessionResp, KillQueryReq, ReportTaskReq, ListClusterInfoResp, ListClusterInfoReq, GetMetaDirInfoResp, GetMetaDirInfoReq, VerifyClientVersionResp, VerifyClientVersionReq, SaveGraphVersionResp, SaveGraphVersionReq, SchemaVer, ClusterID 19 | 20 | -------------------------------------------------------------------------------- /nebula3/sclient/ScanResult.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # --coding:utf-8-- 3 | 4 | # Copyright (c) 2020 vesoft inc. All rights reserved. 5 | # 6 | # This source code is licensed under Apache 2.0 License. 7 | 8 | 9 | import concurrent 10 | 11 | from nebula3.sclient import PartManager, do_scan_job, PartInfo 12 | 13 | from nebula3.sclient.BaseResult import BaseResult, VertexData, EdgeData 14 | from nebula3.logger import logger 15 | 16 | 17 | class VertexResult(BaseResult): 18 | def __init__(self, data_sets, decode_type='utf-8'): 19 | super().__init__(data_sets=data_sets, decode_type=decode_type, is_vertex=True) 20 | 21 | def as_nodes(self): 22 | """convert the vertexes to relationships 23 | 24 | :return: list 25 | """ 26 | nodes = [] 27 | for data_set in self._data_sets: 28 | for row in data_set.rows: 29 | vertex_data = VertexData(row, data_set.column_names, self._decode_type) 30 | nodes.append(vertex_data.as_node()) 31 | return nodes 32 | 33 | 34 | class EdgeResult(BaseResult): 35 | def __init__(self, data_sets: list, decode_type='utf-8'): 36 | super().__init__(data_sets=data_sets, decode_type=decode_type, is_vertex=False) 37 | 38 | def as_relationships(self): 39 | """convert the edges to relationships 40 | 41 | :return: list 42 | """ 43 | relationships = [] 44 | for data_set in self._data_sets: 45 | for row in data_set.rows: 46 | edge_data = EdgeData(row, data_set.column_names, self._decode_type) 47 | relationships.append(edge_data.as_relationship()) 48 | return relationships 49 | 50 | 51 | class ScanResult(object): 52 | """the scan result""" 53 | 54 | def __init__( 55 | self, 56 | graph_storage_client, 57 | req, 58 | part_addrs, 59 | partial_success=False, 60 | is_vertex=True, 61 | decode_type='utf-8', 62 | ): 63 | self._is_vertex = is_vertex 64 | self._decode_type = decode_type 65 | self._data_sets = [] 66 | self._graph_storage_client = graph_storage_client 67 | self._partial_success = partial_success 68 | self._req = req 69 | part_infos = {} 70 | for part_id in part_addrs.keys(): 71 | part_infos[part_id] = PartInfo(part_id, part_addrs[part_id]) 72 | self._parts_manager = PartManager(part_infos) 73 | 74 | def has_next(self): 75 | """whether if has data, the first call is always return True 76 | 77 | :return: True of False 78 | """ 79 | return self._parts_manager.has_next() 80 | 81 | def next(self): 82 | """get scan data result 83 | 84 | :return: VertexResult or EdgeResult 85 | """ 86 | conns = self._graph_storage_client.get_conns() 87 | num = len(conns) 88 | if num == 0: 89 | raise RuntimeError('There is no storage connection') 90 | logger.debug('Graph storage client num: {}'.format(num)) 91 | exceptions = [] 92 | result = [] 93 | with concurrent.futures.ThreadPoolExecutor(num) as executor: 94 | do_scan = [] 95 | for i, conn in enumerate(conns): 96 | future = executor.submit( 97 | do_scan_job, 98 | conns[i], 99 | self._parts_manager, 100 | self._req, 101 | self._is_vertex, 102 | self._partial_success, 103 | ) 104 | do_scan.append(future) 105 | 106 | for future in concurrent.futures.as_completed(do_scan): 107 | if future.exception() is not None: 108 | logger.error(future.exception()) 109 | exceptions.append(future.exception()) 110 | else: 111 | ret, data_sets = future.result() 112 | if ret is not None: 113 | logger.error('Scan failed: {}'.format(ret)) 114 | exceptions.append(RuntimeError('Scan failed: {}'.format(ret))) 115 | continue 116 | if len(data_sets) != 0: 117 | result.extend(data_sets) 118 | self._parts_manager.reset_jobs() 119 | if len(exceptions) == 0: 120 | if len(result) == 0: 121 | logger.warning('Get empty result') 122 | return None 123 | else: 124 | if self._is_vertex: 125 | return VertexResult(result, self._decode_type) 126 | else: 127 | return EdgeResult(result, self._decode_type) 128 | else: 129 | raise exceptions[0] 130 | -------------------------------------------------------------------------------- /nebula3/sclient/net/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # --coding:utf-8-- 3 | 4 | # Copyright (c) 2020 vesoft inc. All rights reserved. 5 | # 6 | # This source code is licensed under Apache 2.0 License. 7 | 8 | 9 | import socket 10 | 11 | from nebula3.Exception import InValidHostname 12 | from nebula3.storage import GraphStorageService 13 | from nebula3.fbthrift.transport import TSocket, THeaderTransport, TTransport 14 | from nebula3.fbthrift.protocol import THeaderProtocol 15 | 16 | 17 | class GraphStorageConnection(object): 18 | def __init__(self, address, timeout, meta_cache): 19 | self._address = address 20 | self._timeout = timeout 21 | self._meta_cache = meta_cache 22 | self._connection = None 23 | self._ip = '' 24 | try: 25 | self._ip = socket.gethostbyname(address.host) 26 | if not isinstance(address.port, int): 27 | raise RuntimeError('Wrong port type: {}'.format(type(address.port))) 28 | except Exception: 29 | raise InValidHostname(str(address.host)) 30 | 31 | def open(self): 32 | try: 33 | self.close() 34 | s = TSocket.TSocket(self._address.host, self._address.port) 35 | if self._timeout > 0: 36 | s.setTimeout(self._timeout) 37 | 38 | buffered_transport = TTransport.TBufferedTransport(s) 39 | header_transport = THeaderTransport.THeaderTransport(buffered_transport) 40 | protocol = THeaderProtocol.THeaderProtocol(header_transport) 41 | header_transport.open() 42 | 43 | self._connection = GraphStorageService.Client(protocol) 44 | except Exception: 45 | raise 46 | 47 | def scan_vertex(self, req): 48 | return self._connection.scanVertex(req) 49 | 50 | def scan_edge(self, req): 51 | return self._connection.scanEdge(req) 52 | 53 | def storage_addr(self): 54 | return self._address 55 | 56 | def update_leader_info(self, space_id, part_id, address): 57 | self._meta_cache.update_storage_leader(space_id, part_id, address) 58 | 59 | def close(self): 60 | try: 61 | if self._connection is not None: 62 | self._connection._iprot.trans.close() 63 | except Exception: 64 | raise 65 | 66 | def __del__(self): 67 | self.close() 68 | -------------------------------------------------------------------------------- /nebula3/storage/GraphStorageService-fuzzer: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Autogenerated by Thrift 4 | # 5 | # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING 6 | # @generated 7 | # 8 | 9 | from __future__ import absolute_import 10 | from __future__ import division 11 | from __future__ import print_function 12 | from __future__ import unicode_literals 13 | 14 | import os 15 | import sys 16 | 17 | if (not sys.argv[0].endswith("par") and 18 | not sys.argv[0].endswith("xar") and 19 | os.getenv('PAR_UNPACK_TMP') == None): 20 | 21 | f = open(sys.argv[0], "r") 22 | 23 | f.readline() # This will be #!/bin/bash 24 | line = f.readline() 25 | f.close() 26 | 27 | # The par generator tool always has '# This par was made' as the 28 | # second line. See fbcode/tools/make_par/make_par.py 29 | if (not line.startswith('# This par was made')): 30 | print("""WARNING 31 | You are trying to run *-fuzzer.py which is 32 | incorrect as the paths are not set up correctly. 33 | Instead, you should generate your thrift file with 34 | thrift_library and then run the resulting 35 | *-fuzzer.par. 36 | For more information, please read 37 | http://fburl.com/python-remotes""") 38 | exit() 39 | 40 | from . import GraphStorageService 41 | from . import ttypes 42 | from . import constants 43 | 44 | import nebula3.fbthrift.util.fuzzer 45 | nebula3.fbthrift.util.fuzzer.fuzz_service(GraphStorageService, ttypes, constants) 46 | -------------------------------------------------------------------------------- /nebula3/storage/GraphStorageService-remote: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Autogenerated by Thrift 4 | # 5 | # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING 6 | # @generated 7 | # 8 | 9 | from __future__ import print_function 10 | from __future__ import absolute_import 11 | 12 | import os 13 | import sys 14 | 15 | if (not sys.argv[0].endswith("par") and 16 | not sys.argv[0].endswith("xar") and 17 | os.getenv('PAR_UNPACK_TMP') == None): 18 | 19 | f = open(sys.argv[0], "r") 20 | 21 | f.readline() # This will be #!/bin/bash 22 | line = f.readline() 23 | f.close() 24 | 25 | # The par generator tool always has '# This par was made' as the 26 | # second line. See fbcode/tools/make_par/make_par.py 27 | if (not line.startswith('# This par was made')): 28 | print("""WARNING 29 | You are trying to run *-remote.py which is 30 | incorrect as the paths are not set up correctly. 31 | Instead, you should generate your thrift file with 32 | thrift_library and then run the resulting 33 | *-remote.par. 34 | For more information, please read 35 | http://fburl.com/python-remotes""") 36 | exit() 37 | 38 | from . import GraphStorageService 39 | from . import ttypes 40 | 41 | from nebula3.fbthrift.util.remote import Function 42 | from nebula3.fbthrift.remote import Remote 43 | 44 | FUNCTIONS = { 45 | 'getNeighbors': Function('getNeighbors', 'GraphStorageService', 'GetNeighborsResponse', [('GetNeighborsRequest', 'req', 'GetNeighborsRequest')]), 46 | 'getDstBySrc': Function('getDstBySrc', 'GraphStorageService', 'GetDstBySrcResponse', [('GetDstBySrcRequest', 'req', 'GetDstBySrcRequest')]), 47 | 'getProps': Function('getProps', 'GraphStorageService', 'GetPropResponse', [('GetPropRequest', 'req', 'GetPropRequest')]), 48 | 'addVertices': Function('addVertices', 'GraphStorageService', 'ExecResponse', [('AddVerticesRequest', 'req', 'AddVerticesRequest')]), 49 | 'addEdges': Function('addEdges', 'GraphStorageService', 'ExecResponse', [('AddEdgesRequest', 'req', 'AddEdgesRequest')]), 50 | 'deleteEdges': Function('deleteEdges', 'GraphStorageService', 'ExecResponse', [('DeleteEdgesRequest', 'req', 'DeleteEdgesRequest')]), 51 | 'deleteVertices': Function('deleteVertices', 'GraphStorageService', 'ExecResponse', [('DeleteVerticesRequest', 'req', 'DeleteVerticesRequest')]), 52 | 'deleteTags': Function('deleteTags', 'GraphStorageService', 'ExecResponse', [('DeleteTagsRequest', 'req', 'DeleteTagsRequest')]), 53 | 'updateVertex': Function('updateVertex', 'GraphStorageService', 'UpdateResponse', [('UpdateVertexRequest', 'req', 'UpdateVertexRequest')]), 54 | 'updateEdge': Function('updateEdge', 'GraphStorageService', 'UpdateResponse', [('UpdateEdgeRequest', 'req', 'UpdateEdgeRequest')]), 55 | 'scanVertex': Function('scanVertex', 'GraphStorageService', 'ScanResponse', [('ScanVertexRequest', 'req', 'ScanVertexRequest')]), 56 | 'scanEdge': Function('scanEdge', 'GraphStorageService', 'ScanResponse', [('ScanEdgeRequest', 'req', 'ScanEdgeRequest')]), 57 | 'getUUID': Function('getUUID', 'GraphStorageService', 'GetUUIDResp', [('GetUUIDReq', 'req', 'GetUUIDReq')]), 58 | 'lookupIndex': Function('lookupIndex', 'GraphStorageService', 'LookupIndexResp', [('LookupIndexRequest', 'req', 'LookupIndexRequest')]), 59 | 'lookupAndTraverse': Function('lookupAndTraverse', 'GraphStorageService', 'GetNeighborsResponse', [('LookupAndTraverseRequest', 'req', 'LookupAndTraverseRequest')]), 60 | 'chainUpdateEdge': Function('chainUpdateEdge', 'GraphStorageService', 'UpdateResponse', [('UpdateEdgeRequest', 'req', 'UpdateEdgeRequest')]), 61 | 'chainAddEdges': Function('chainAddEdges', 'GraphStorageService', 'ExecResponse', [('AddEdgesRequest', 'req', 'AddEdgesRequest')]), 62 | 'chainDeleteEdges': Function('chainDeleteEdges', 'GraphStorageService', 'ExecResponse', [('DeleteEdgesRequest', 'req', 'DeleteEdgesRequest')]), 63 | 'get': Function('get', 'GraphStorageService', 'KVGetResponse', [('KVGetRequest', 'req', 'KVGetRequest')]), 64 | 'put': Function('put', 'GraphStorageService', 'ExecResponse', [('KVPutRequest', 'req', 'KVPutRequest')]), 65 | 'remove': Function('remove', 'GraphStorageService', 'ExecResponse', [('KVRemoveRequest', 'req', 'KVRemoveRequest')]), 66 | } 67 | 68 | SERVICE_NAMES = ['GraphStorageService', ] 69 | 70 | if __name__ == '__main__': 71 | Remote.run(FUNCTIONS, SERVICE_NAMES, GraphStorageService, ttypes, sys.argv, default_port=9090) 72 | -------------------------------------------------------------------------------- /nebula3/storage/StorageAdminService-fuzzer: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Autogenerated by Thrift 4 | # 5 | # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING 6 | # @generated 7 | # 8 | 9 | from __future__ import absolute_import 10 | from __future__ import division 11 | from __future__ import print_function 12 | from __future__ import unicode_literals 13 | 14 | import os 15 | import sys 16 | 17 | if (not sys.argv[0].endswith("par") and 18 | not sys.argv[0].endswith("xar") and 19 | os.getenv('PAR_UNPACK_TMP') == None): 20 | 21 | f = open(sys.argv[0], "r") 22 | 23 | f.readline() # This will be #!/bin/bash 24 | line = f.readline() 25 | f.close() 26 | 27 | # The par generator tool always has '# This par was made' as the 28 | # second line. See fbcode/tools/make_par/make_par.py 29 | if (not line.startswith('# This par was made')): 30 | print("""WARNING 31 | You are trying to run *-fuzzer.py which is 32 | incorrect as the paths are not set up correctly. 33 | Instead, you should generate your thrift file with 34 | thrift_library and then run the resulting 35 | *-fuzzer.par. 36 | For more information, please read 37 | http://fburl.com/python-remotes""") 38 | exit() 39 | 40 | from . import StorageAdminService 41 | from . import ttypes 42 | from . import constants 43 | 44 | import nebula3.fbthrift.util.fuzzer 45 | nebula3.fbthrift.util.fuzzer.fuzz_service(StorageAdminService, ttypes, constants) 46 | -------------------------------------------------------------------------------- /nebula3/storage/StorageAdminService-remote: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Autogenerated by Thrift 4 | # 5 | # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING 6 | # @generated 7 | # 8 | 9 | from __future__ import print_function 10 | from __future__ import absolute_import 11 | 12 | import os 13 | import sys 14 | 15 | if (not sys.argv[0].endswith("par") and 16 | not sys.argv[0].endswith("xar") and 17 | os.getenv('PAR_UNPACK_TMP') == None): 18 | 19 | f = open(sys.argv[0], "r") 20 | 21 | f.readline() # This will be #!/bin/bash 22 | line = f.readline() 23 | f.close() 24 | 25 | # The par generator tool always has '# This par was made' as the 26 | # second line. See fbcode/tools/make_par/make_par.py 27 | if (not line.startswith('# This par was made')): 28 | print("""WARNING 29 | You are trying to run *-remote.py which is 30 | incorrect as the paths are not set up correctly. 31 | Instead, you should generate your thrift file with 32 | thrift_library and then run the resulting 33 | *-remote.par. 34 | For more information, please read 35 | http://fburl.com/python-remotes""") 36 | exit() 37 | 38 | from . import StorageAdminService 39 | from . import ttypes 40 | 41 | from nebula3.fbthrift.util.remote import Function 42 | from nebula3.fbthrift.remote import Remote 43 | 44 | FUNCTIONS = { 45 | 'transLeader': Function('transLeader', 'StorageAdminService', 'AdminExecResp', [('TransLeaderReq', 'req', 'TransLeaderReq')]), 46 | 'addPart': Function('addPart', 'StorageAdminService', 'AdminExecResp', [('AddPartReq', 'req', 'AddPartReq')]), 47 | 'addLearner': Function('addLearner', 'StorageAdminService', 'AdminExecResp', [('AddLearnerReq', 'req', 'AddLearnerReq')]), 48 | 'removePart': Function('removePart', 'StorageAdminService', 'AdminExecResp', [('RemovePartReq', 'req', 'RemovePartReq')]), 49 | 'memberChange': Function('memberChange', 'StorageAdminService', 'AdminExecResp', [('MemberChangeReq', 'req', 'MemberChangeReq')]), 50 | 'waitingForCatchUpData': Function('waitingForCatchUpData', 'StorageAdminService', 'AdminExecResp', [('CatchUpDataReq', 'req', 'CatchUpDataReq')]), 51 | 'createCheckpoint': Function('createCheckpoint', 'StorageAdminService', 'CreateCPResp', [('CreateCPRequest', 'req', 'CreateCPRequest')]), 52 | 'dropCheckpoint': Function('dropCheckpoint', 'StorageAdminService', 'DropCPResp', [('DropCPRequest', 'req', 'DropCPRequest')]), 53 | 'blockingWrites': Function('blockingWrites', 'StorageAdminService', 'BlockingSignResp', [('BlockingSignRequest', 'req', 'BlockingSignRequest')]), 54 | 'getLeaderParts': Function('getLeaderParts', 'StorageAdminService', 'GetLeaderPartsResp', [('GetLeaderReq', 'req', 'GetLeaderReq')]), 55 | 'checkPeers': Function('checkPeers', 'StorageAdminService', 'AdminExecResp', [('CheckPeersReq', 'req', 'CheckPeersReq')]), 56 | 'addAdminTask': Function('addAdminTask', 'StorageAdminService', 'AddTaskResp', [('AddTaskRequest', 'req', 'AddTaskRequest')]), 57 | 'stopAdminTask': Function('stopAdminTask', 'StorageAdminService', 'StopTaskResp', [('StopTaskRequest', 'req', 'StopTaskRequest')]), 58 | 'clearSpace': Function('clearSpace', 'StorageAdminService', 'ClearSpaceResp', [('ClearSpaceReq', 'req', 'ClearSpaceReq')]), 59 | } 60 | 61 | SERVICE_NAMES = ['StorageAdminService', ] 62 | 63 | if __name__ == '__main__': 64 | Remote.run(FUNCTIONS, SERVICE_NAMES, StorageAdminService, ttypes, sys.argv, default_port=9090) 65 | -------------------------------------------------------------------------------- /nebula3/storage/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Autogenerated by Thrift 3 | # 4 | # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING 5 | # @generated 6 | # 7 | __all__ = ['ttypes', 'constants', 'GraphStorageService', 'StorageAdminService', 'InternalStorageService'] 8 | -------------------------------------------------------------------------------- /nebula3/storage/constants.py: -------------------------------------------------------------------------------- 1 | # 2 | # Autogenerated by Thrift 3 | # 4 | # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING 5 | # @generated 6 | # 7 | 8 | from __future__ import absolute_import 9 | import sys 10 | from nebula3.fbthrift.util.Recursive import fix_spec 11 | from nebula3.fbthrift.Thrift import TType, TMessageType, TPriority, TRequestContext, TProcessorEventHandler, TServerInterface, TProcessor, TException, TApplicationException, UnimplementedTypedef 12 | from nebula3.fbthrift.protocol.TProtocol import TProtocolException 13 | 14 | 15 | import nebula3.common.ttypes 16 | import nebula3.meta.ttypes 17 | 18 | 19 | from .ttypes import UTF8STRINGS, StatType, OrderDirection, EdgeDirection, ScanType, EngineSignType, RequestCommon, PartitionResult, ResponseCommon, StatProp, Expr, EdgeProp, VertexProp, OrderBy, TraverseSpec, GetNeighborsRequest, GetNeighborsResponse, GetDstBySrcRequest, GetDstBySrcResponse, ExecResponse, GetPropRequest, GetPropResponse, NewTag, NewVertex, EdgeKey, NewEdge, AddVerticesRequest, AddEdgesRequest, DeleteVerticesRequest, DeleteEdgesRequest, DelTags, DeleteTagsRequest, UpdateResponse, UpdatedProp, UpdateVertexRequest, UpdateEdgeRequest, GetUUIDReq, GetUUIDResp, LookupIndexResp, IndexColumnHint, IndexQueryContext, IndexSpec, LookupIndexRequest, LookupAndTraverseRequest, ScanCursor, ScanVertexRequest, ScanEdgeRequest, ScanResponse, TaskPara, KVGetRequest, KVGetResponse, KVPutRequest, KVRemoveRequest, AdminExecResp, TransLeaderReq, AddPartReq, AddLearnerReq, RemovePartReq, MemberChangeReq, CatchUpDataReq, GetLeaderReq, CreateCPRequest, CreateCPResp, DropCPRequest, DropCPResp, BlockingSignRequest, BlockingSignResp, GetLeaderPartsResp, CheckPeersReq, RebuildIndexRequest, ListClusterInfoResp, ListClusterInfoReq, AddTaskRequest, AddTaskResp, StopTaskRequest, StopTaskResp, ClearSpaceReq, ClearSpaceResp, ChainAddEdgesRequest, ChainUpdateEdgeRequest, ChainDeleteEdgesRequest 20 | 21 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "nebula3-python" 3 | version = "3.8.2" 4 | description = "Python client for NebulaGraph v3" 5 | authors = [ 6 | {name = "vesoft-inc", email = "info@vesoft.com"}, 7 | ] 8 | dependencies = [ 9 | "future >= 0.18.0", 10 | "httplib2 >= 0.20.0", 11 | "pytz >= 2021.1", 12 | "six >= 1.16.0", 13 | "httpx[http2] >= 0.22.0", 14 | ] 15 | requires-python = ">=3.6.2" 16 | readme = "README.md" 17 | license = {text = "Apache 2.0"} 18 | 19 | [project.urls] 20 | Homepage = "https://github.com/vesoft-inc/nebula-python" 21 | 22 | [build-system] 23 | requires = ["pdm-backend"] 24 | build-backend = "pdm.backend" 25 | 26 | [tool.black] 27 | extend-exclude = ''' 28 | /( 29 | nebula3/common 30 | | nebula3/storage 31 | | nebula3/graph 32 | | nebula3/meta 33 | | nebula3/common 34 | | nebula3/fbthrift 35 | | docs/source 36 | )/ 37 | ''' 38 | 39 | [tool.pdm] 40 | distribution = true 41 | [tool.pdm.build] 42 | includes = ["nebula3"] 43 | 44 | [tool.pdm.dev-dependencies] 45 | dev = [ 46 | "black==22.8.0", 47 | ] 48 | test = [ 49 | "pytest; python_full_version >= \"3.7.1\"", 50 | "pytest-cov; python_full_version >= \"3.7.1\"", 51 | ] 52 | example = [ 53 | "prettytable; python_version >= \"3.10\"", 54 | "pandas; python_version >= \"3.10\"", 55 | ] 56 | 57 | [tool.pdm.scripts] 58 | fmt = "black -S ." 59 | fmt-check = "black -S --check ." 60 | test = 'pytest -s -v -k "not SSL" --cov=nebula3/ --cov-report=xml --cov-append' 61 | test-ssl = 'pytest -s -v -k "SSL" --cov=nebula3/ --cov-report=xml --cov-append' 62 | 63 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Copyright (c) 2020 vesoft inc. All rights reserved. 4 | # 5 | # This source code is licensed under Apache 2.0 License. 6 | 7 | import sys 8 | from setuptools import setup, find_packages 9 | from pathlib import Path 10 | 11 | base_dir = Path(__file__).parent 12 | long_description = (base_dir / "README.md").read_text() 13 | 14 | requirements = [ 15 | "httplib2 >= 0.20.0", 16 | "future >= 0.18.0", 17 | "six >= 1.16.0", 18 | "pytz >= 2021.1", 19 | "httpx[http2] >= 0.22.0", 20 | ] 21 | 22 | if sys.version_info < (3, 7): 23 | # httpcore-->anyio-->contextvars when it's < 3.7 24 | # while setuptools doesn't handle the dependency well 25 | requirements.append("contextvars==2.4") 26 | 27 | setup( 28 | name="nebula3-python", 29 | version="3.8.2", 30 | license="Apache 2.0", 31 | author="vesoft-inc", 32 | author_email="info@vesoft.com", 33 | description="Python client for NebulaGraph v3", 34 | long_description=long_description, 35 | long_description_content_type="text/markdown", 36 | url="https://github.com/vesoft-inc/nebula-python", 37 | install_requires=requirements, 38 | packages=find_packages(), 39 | platforms=["3.6, 3.7, 3.8, 3.9, 3.10, 3.11, 3.12"], 40 | package_dir={"nebula3": "nebula3"}, 41 | ) 42 | -------------------------------------------------------------------------------- /tests/.env: -------------------------------------------------------------------------------- 1 | TZ=UTC 2 | 3 | ca_path=/secrets/root.crt 4 | cert_path=/secrets/server.crt 5 | key_path=/secrets/server.key 6 | enable_ssl=false 7 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | logging.basicConfig( 4 | level=logging.INFO, 5 | format='[%(asctime)s] %(levelname)-8s [%(filename)s:%(lineno)d]:%(message)s', 6 | ) 7 | 8 | logging.getLogger('nebula3').setLevel(logging.DEBUG) 9 | -------------------------------------------------------------------------------- /tests/secrets/client.cnf: -------------------------------------------------------------------------------- 1 | [ req ] 2 | default_bits = 2048 3 | prompt = no 4 | distinguished_name = dn 5 | req_extensions = req_ext 6 | 7 | [ dn ] 8 | C = CH 9 | O = test-ca 10 | CN = client 11 | 12 | [ v3_ca ] 13 | basicConstraints = critical,CA:TRUE 14 | subjectKeyIdentifier = hash 15 | authorityKeyIdentifier = keyid:always,issuer:always 16 | 17 | [ req_ext ] 18 | subjectAltName = @alt_names 19 | 20 | [alt_names] 21 | IP.1 = 127.0.0.1 22 | -------------------------------------------------------------------------------- /tests/secrets/client.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIC/zCCAeegAwIBAgIUDufwf8ZK2xZXqZUF/P/tbo1psVEwDQYJKoZIhvcNAQEL 3 | BQAwLjELMAkGA1UEBhMCQ0gxEDAOBgNVBAoMB3Rlc3QtY2ExDTALBgNVBAMMBHJv 4 | b3QwHhcNMjQwMzA3MDI1NDA0WhcNMzQwMzA1MDI1NDA0WjAwMQswCQYDVQQGEwJD 5 | SDEQMA4GA1UECgwHdGVzdC1jYTEPMA0GA1UEAwwGY2xpZW50MIIBIjANBgkqhkiG 6 | 9w0BAQEFAAOCAQ8AMIIBCgKCAQEApdLcd4tpW/t5cR6qYgHvDO5lUEdIScOoO7e+ 7 | GeIII7kAPJoPRMx5MtYBZfxJL3GWQkO1LxQaOOQSeowwCipQAsV2BkU72YR/Su39 8 | 9rFUvXtaX/m+E4fspiLnA/sQRxQGJWP03a7QnxMo9uQoFDgp9BDxWTJXE9h2yGAt 9 | zX8WPCuElXyroagXNqM/gVim6vMjfAS1wcmeJA2dGhuohABVZYPc8sqd90KOdEwM 10 | NO/tTEewmS38FWqQguuXUKiYgNgbJPk17OXlKRkhtNNuomw4Rxy9d1FX+OsMsFw1 11 | jXeGJmSrodlSWIm6M3tr+os3OZdhK/9aENaU1ug34gr4JvKQgwIDAQABoxMwETAP 12 | BgNVHREECDAGhwR/AAABMA0GCSqGSIb3DQEBCwUAA4IBAQAyU86c/4xbmwKbei5a 13 | EmCgnRZULw0dHv53PC3DjGJFKRy9XesV+aH9p+yDI9+u32mChHyDNYLN8Xr96PYh 14 | EwJ8eFSBqqCGxK8jIEyxBWAMLEx/XhtBwwQ7Ng6e0zn4rWLTp25O34AI75bUeMh2 15 | J4VNpoNClKo/xdOBrTbpA8l/E0dXEHH3KsMJEEFwLKItAiuuHdgruZjq6QS1xyhf 16 | 56r6ze8tWtbllnASfoPWDrEXuSZK5EapeljNOUvvVmaOuCouGPcbSOt40o609N27 17 | nuvm7NnaswWj4M2qnxcUxyvOgryl/MQC7gEHr+1skOZP1m8+YEBWtMB85iC03BKF 18 | j4ah 19 | -----END CERTIFICATE----- 20 | -------------------------------------------------------------------------------- /tests/secrets/client.csr: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE REQUEST----- 2 | MIIClzCCAX8CAQAwMDELMAkGA1UEBhMCQ0gxEDAOBgNVBAoMB3Rlc3QtY2ExDzAN 3 | BgNVBAMMBmNsaWVudDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKXS 4 | 3HeLaVv7eXEeqmIB7wzuZVBHSEnDqDu3vhniCCO5ADyaD0TMeTLWAWX8SS9xlkJD 5 | tS8UGjjkEnqMMAoqUALFdgZFO9mEf0rt/faxVL17Wl/5vhOH7KYi5wP7EEcUBiVj 6 | 9N2u0J8TKPbkKBQ4KfQQ8VkyVxPYdshgLc1/FjwrhJV8q6GoFzajP4FYpurzI3wE 7 | tcHJniQNnRobqIQAVWWD3PLKnfdCjnRMDDTv7UxHsJkt/BVqkILrl1ComIDYGyT5 8 | Nezl5SkZIbTTbqJsOEccvXdRV/jrDLBcNY13hiZkq6HZUliJujN7a/qLNzmXYSv/ 9 | WhDWlNboN+IK+CbykIMCAwEAAaAiMCAGCSqGSIb3DQEJDjETMBEwDwYDVR0RBAgw 10 | BocEfwAAATANBgkqhkiG9w0BAQsFAAOCAQEAo4dfWo//pOSZarRDBffwHdRVuDhl 11 | /g9PmosAIn8nnMi3gAof0PssPTdhFQbpSpCkEceYNC4aSJSn+asOpv8Z9X3mh6C1 12 | z8GIZW/7iof0QU5W3QQHAw6uvIalMS9wJOO+Am5zKZsZJuncTcw5hA8tt3ruqXkx 13 | o26OUA76QUUI9aIfAo0tRFbuSZMzWsiYDRyiQvq+6kaObC9pvSV5qxcqbeDICR1Q 14 | g+9YYdVW6I93wu9mCIZ7EBgjLgmKQ/wcQDrwlCfM7tY84cSEzCrI7xQD+84k57wt 15 | VGnjEnocbNyNkFrKDwVPgRoJkCAWRlb/Kw0kriOEbEzI3mgYml+tgGk7fQ== 16 | -----END CERTIFICATE REQUEST----- 17 | -------------------------------------------------------------------------------- /tests/secrets/client.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIEowIBAAKCAQEApdLcd4tpW/t5cR6qYgHvDO5lUEdIScOoO7e+GeIII7kAPJoP 3 | RMx5MtYBZfxJL3GWQkO1LxQaOOQSeowwCipQAsV2BkU72YR/Su399rFUvXtaX/m+ 4 | E4fspiLnA/sQRxQGJWP03a7QnxMo9uQoFDgp9BDxWTJXE9h2yGAtzX8WPCuElXyr 5 | oagXNqM/gVim6vMjfAS1wcmeJA2dGhuohABVZYPc8sqd90KOdEwMNO/tTEewmS38 6 | FWqQguuXUKiYgNgbJPk17OXlKRkhtNNuomw4Rxy9d1FX+OsMsFw1jXeGJmSrodlS 7 | WIm6M3tr+os3OZdhK/9aENaU1ug34gr4JvKQgwIDAQABAoIBAA8ZDse3osUkNV9u 8 | SlcYE/rabdEMXHGS5etiBGX45YLTIkliQYejElnlkEUH3mX6cpLf9UwvCWAqq+AK 9 | iYkzZwuJjQBvJ07buHdLCfd/F9cUtxrL81l1478QqHX9UvMVyKV431+bvhTtuqoY 10 | BeopXvCA5miXo8QDznTaBPGT46NgsWm3jcnZ4u6w8guzFZnsRVrikx2QYkqhQ2cS 11 | m7JzsXDHoa0nRlknL5uiW+0pRbLca55U1K1eOtcXc1+I7iwU3ovtfoqX4z0ZOK34 12 | qC/o9A+kaC0z9IWqnv7HCfXoA60kasiMKcWFZsZdydHv7TDBLL/T/l3jReUeYlaL 13 | 5J3PSfkCgYEA1q4r8uJ2WRnsm1tAkjThCHocQbF8RALQxz7uCHAc6xF5Q+zgTZ/p 14 | cKRjjOif5+F2SLWf5i4k++yw9LGxuH8qCqM8ANl3+FG4EjdBaVo1SYG/AtM/deKn 15 | 5kKgBrG2Krrs+dfrTzsQ8oFPN5FjzGFdOiZvsTFBhqDHLD0jW0WpM78CgYEAxb1m 16 | n4uupDeTYvO4hhpA/HQ/Hd3IygTw5XVTsJe2HWIyOF9piqClUmAl8+KXi71biB6R 17 | 3FHNkqR10iheU2oEcYSylxrKTT5/UP6baNWzkOrXy3m4JIQ3Md3WlFNTdV6Bx6Cz 18 | 3YvEXJ74lbnHZVLpuzUQvs08QaKINOtiQ7K9xD0CgYEAwmqzhF/Qm/qdS0iFnqLi 19 | 1MF5S4kHp7QBDSARg3S8y6lkb75amBHk380QFrEo9184tro9F8DcX4XH76Ho99O5 20 | 6C5SDdbkDe87ystdoqI+NdaZfF6BTyo3sZDOyaEw/1uPdugywusT+vpeGk3B2nqm 21 | ah4a8pZP6DM8jPTFS0g35pMCgYAMX6a9UxtRkVy6s2+XjKsGYSvV7Zvz8E02vpnH 22 | EidGPW/RET9kelr+BaA6W7KJ/wZsRSVxePj5NSteREJqy9aNV/Vo+TcxkDidU+Fb 23 | tQuwhNGOHQbF2jQt/ajAzVoKnShX2pmGwok+FoCNtfthPsf7e/2ZQvSDUu1dCmGy 24 | xMuVLQKBgFTzVoLWViXwC6tDAuAfZSljDiePEaE7W/SXss+aXWgcTsp0iqDu+0yQ 25 | aCiY2ZzhqJ31XybJ0zRSCSIiJIMrGSrHuyJzPkEuew5geB2xxfUxEPNtm0jMUrKL 26 | V/7bNhX1HocEIBbvAyDBBdzS7hpDAU24aPJdIP9eyl1hvtbXcWTz 27 | -----END RSA PRIVATE KEY----- 28 | -------------------------------------------------------------------------------- /tests/secrets/root.cnf: -------------------------------------------------------------------------------- 1 | [ req ] 2 | default_bits = 2048 3 | prompt = no 4 | distinguished_name = dn 5 | req_extensions = req_ext 6 | 7 | [ dn ] 8 | C = CH 9 | O = test-ca 10 | CN = root 11 | 12 | [ v3_ca ] 13 | basicConstraints = critical,CA:TRUE 14 | subjectKeyIdentifier = hash 15 | authorityKeyIdentifier = keyid:always,issuer:always 16 | 17 | [ req_ext ] 18 | subjectAltName = @alt_names 19 | 20 | [alt_names] 21 | IP.1 = 127.0.0.1 22 | -------------------------------------------------------------------------------- /tests/secrets/root.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDiTCCAnGgAwIBAgIUHIvWv3PL7M7JWCr6HWBlX0L56sswDQYJKoZIhvcNAQEL 3 | BQAwLjELMAkGA1UEBhMCQ0gxEDAOBgNVBAoMB3Rlc3QtY2ExDTALBgNVBAMMBHJv 4 | b3QwHhcNMjQwMzA3MDI1MzU0WhcNMzQwMzA1MDI1MzU0WjAuMQswCQYDVQQGEwJD 5 | SDEQMA4GA1UECgwHdGVzdC1jYTENMAsGA1UEAwwEcm9vdDCCASIwDQYJKoZIhvcN 6 | AQEBBQADggEPADCCAQoCggEBALifTxrLCagG5c73511A+QsYRUxLCZQ8w9apEGEc 7 | Zcd7j81kNr9RHqpg9yCSEJ+48ruoBYlCUXnBjZvLEI9uAi6Fxe17qPeJAyol8sQw 8 | yVY7AtjeOXidvDEWdhTiXEdneUlOF5ESYp9FxwJH/Tim/vEGyZiS/ufeC7hrkTfK 9 | hC83tindfFIe95xXptbGvbEjURWn3VTWl3/hGpvmeflwDwM7iZv48qJRvdlKvMsZ 10 | wQz8b7/DEUQgV1wRMpTBbYWDop7cnLBPW9P8r0A0JaZisvoqv1Fif2r901DLAs7E 11 | 3hX9EJ1TcoGDPtjUxj68WQEdzwBBuYXvaoP7YQSHh8Ic7hkCAwEAAaOBnjCBmzAP 12 | BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTLtoOhHdz9kCVWWi2vswFD3Dm8PzBp 13 | BgNVHSMEYjBggBTLtoOhHdz9kCVWWi2vswFD3Dm8P6EypDAwLjELMAkGA1UEBhMC 14 | Q0gxEDAOBgNVBAoMB3Rlc3QtY2ExDTALBgNVBAMMBHJvb3SCFByL1r9zy+zOyVgq 15 | +h1gZV9C+erLMA0GCSqGSIb3DQEBCwUAA4IBAQBkPyxzKXuLS5TyIwUw0V/84IPk 16 | Z5qG0NtOfc4xBX16mozkVXDJ+QboC/GTEwPP8UAllFyoQc4FR4igdLdLotztTI20 17 | SG7isrBjMB4S67rq8bRnlO1EgWDQ7nFvDRXQjVzFi/l+moc11uCfAzRTtB7ozaz9 18 | qhEvBrI5HmwRSaFS6TkeWQ2sATGmc7VTULC3T3o6/xyh1xub9ENyJej0sqpxLekH 19 | QH5t5EIN/82CHb6OYzFr/Lna/7wt+h3uJaPUJ6iVc8zLM8/yABv6UL7FjpnrU8oY 20 | PL9xWF9N6VewqDA4WyHkuRoknvb6qWOFL8HCPTWUnwaZ4Z0fzIIHIrchQAxj 21 | -----END CERTIFICATE----- 22 | -------------------------------------------------------------------------------- /tests/secrets/root.csr: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE REQUEST----- 2 | MIIClTCCAX0CAQAwLjELMAkGA1UEBhMCQ0gxEDAOBgNVBAoMB3Rlc3QtY2ExDTAL 3 | BgNVBAMMBHJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC4n08a 4 | ywmoBuXO9+ddQPkLGEVMSwmUPMPWqRBhHGXHe4/NZDa/UR6qYPcgkhCfuPK7qAWJ 5 | QlF5wY2byxCPbgIuhcXte6j3iQMqJfLEMMlWOwLY3jl4nbwxFnYU4lxHZ3lJTheR 6 | EmKfRccCR/04pv7xBsmYkv7n3gu4a5E3yoQvN7Yp3XxSHvecV6bWxr2xI1EVp91U 7 | 1pd/4Rqb5nn5cA8DO4mb+PKiUb3ZSrzLGcEM/G+/wxFEIFdcETKUwW2Fg6Ke3Jyw 8 | T1vT/K9ANCWmYrL6Kr9RYn9q/dNQywLOxN4V/RCdU3KBgz7Y1MY+vFkBHc8AQbmF 9 | 72qD+2EEh4fCHO4ZAgMBAAGgIjAgBgkqhkiG9w0BCQ4xEzARMA8GA1UdEQQIMAaH 10 | BH8AAAEwDQYJKoZIhvcNAQELBQADggEBAAfhFkpaOZXWKyyjlra+EAB4prOb9eZ2 11 | ECdLkaczjXUGC8JYb+653sxWY/Ntf0Yh2YloGa6kGweJxG6xXXgq5WaVC+tT22kC 12 | WHWwCTI6TlbYJSQqkNDM/E/k+rlYVpKyCqLkiPq4aNmN7w0LcDOX1GSQzJ1ujJPr 13 | 3R5TiIdlNpHxRBU+lD2Mq4rttBLXrFfUWHgAh2n3kYMdxG4ZQv98OWlTKx668p+j 14 | Nov0tF0PiJ2jVqlSerCLJ0hreOc1/+UjtAYrUDzF5F2rdOO6AZxNIkcgIZd+4cei 15 | 5vppVLv9BjvDbnZgroKHIlxko+LCDf3SWx8g1ZRlCzZvw6ZC9zTFByU= 16 | -----END CERTIFICATE REQUEST----- 17 | -------------------------------------------------------------------------------- /tests/secrets/root.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIEpAIBAAKCAQEAuJ9PGssJqAblzvfnXUD5CxhFTEsJlDzD1qkQYRxlx3uPzWQ2 3 | v1EeqmD3IJIQn7jyu6gFiUJRecGNm8sQj24CLoXF7Xuo94kDKiXyxDDJVjsC2N45 4 | eJ28MRZ2FOJcR2d5SU4XkRJin0XHAkf9OKb+8QbJmJL+594LuGuRN8qELze2Kd18 5 | Uh73nFem1sa9sSNRFafdVNaXf+Eam+Z5+XAPAzuJm/jyolG92Uq8yxnBDPxvv8MR 6 | RCBXXBEylMFthYOintycsE9b0/yvQDQlpmKy+iq/UWJ/av3TUMsCzsTeFf0QnVNy 7 | gYM+2NTGPrxZAR3PAEG5he9qg/thBIeHwhzuGQIDAQABAoIBABKlbFzdYsVIpQ8p 8 | NIHTnROEyQLylS4WkQTuIExejNz/2r84vtrFu54jmvH8LpejG0jeqGh7fTUZ2k6A 9 | zNRfnPLL6fiZQ7B0cxBfsswjzUHbRedXUmCfNbGTfiuPzRgdGD/SFkk9nR5O8tGF 10 | Kcr9+sOnFtdFCV+rqm1e19TVrFIEV4M2aAycAAZJjw3ik1FIA1wv0KWca3TjlDxW 11 | uYCGQTOXp3huLdeE6UYDIj+Rambka2gIgXRhB8ne/qKzsPdXyQmQ7TaaMWIsipQT 12 | q0u3nM/k+dlwgDCTlCx/v7UIS6qksRXixcHQ84iluYoeGL9/4kKjcGr8K8ZC4Ccb 13 | 3swy/I0CgYEA3MqujCFWxmWdpP8slIL0+hbPyQTVZQ4lFBCU4PFh81+tvBiGi3rG 14 | ExLy38vJvzxcTRphKF88ZgDrpw304RjF6se0s0cqMo87zi/dlszQtXSIECGf8Mwh 15 | 1NoOHVqkpW7Pt24QNnMBCH+5nwGuYyB6XYTJoFruHtFinxkzPJT2J7MCgYEA1hAX 16 | zK9UaKoxMfBKyez1p2VyNjciEcoR/6m4VC12+HziCbDd3+AogirZCGdncjxYuhX5 17 | J6HEkdQshqBaAGAuSnMZ4XYp13QYrzl6+hXBJ7y4FRVKrJeFDw2UUQ9pfH55vroU 18 | Bjwem08x40o39+d/mXQh2PLS7ow+PQb7ayjALQMCgYAxrp3mRF49WIJHrW+/n1Q2 19 | rkllR2QpUCyBvlVx5H8Q/5edertTyXWmtx3VxAqKsK9oXiY8qmlx3oUoWxieSi/G 20 | 69c6fCn/pIFlnRm0pDPWkPlI//f4fEakmciRTGM+Ipg1A1w8lFZILk17fSpbfeRr 21 | BMVGAxAP+TfC6y2/VRmqxwKBgQC6jd3f8XETojcreWoEXrGcCGon25aigEN9V1Eg 22 | c1r6pgNsRKqowDctceAhVdSulu8QhbPVGUMxgAKp9TdamnfdREBq4EsIzNHya5rE 23 | No9WbT7UiPDCR26bts7qcg8Rgzvgv1RSNrTJz5P/rJ/oOeVXPAF/i+fOI4TeNKaK 24 | NkJ9NwKBgQDInmMPfhdwuLcqj+RQMxgUtdjebcIwiuMZyICrj1OTYLExhnSRcD41 25 | vz3rRuMEQecMrsLfBhvm2mCyYyVaJxDNBHpC1yBjdMDgVBESLircK7hSmFb8Lkdb 26 | z6xV69xHm1olv18Q6b6CsTq+4okMZVD2SCCZvQtG9ZfV3+q9A+D3Yw== 27 | -----END RSA PRIVATE KEY----- 28 | -------------------------------------------------------------------------------- /tests/secrets/root.srl: -------------------------------------------------------------------------------- 1 | 0EE7F07FC64ADB1657A99505FCFFED6E8D69B151 2 | -------------------------------------------------------------------------------- /tests/secrets/run.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # used to generate certs for nebula-graph 4 | # usage: 5 | # 1. ./cert.sh root 6 | # 2. ./cert.sh server 7 | # 3. ./cert.sh client 8 | 9 | # config: 10 | # server: 11 | # --cert_path=server.crt 12 | # --key_path=server.key 13 | # --ca_path=root.crt 14 | 15 | # client: follow per client repo 16 | 17 | set -eu 18 | DN_C=CH 19 | DN_O=vesoft 20 | DN_OU=Eng 21 | DN_CN= 22 | DN_EMAIL=harris.chu@xxxx.com 23 | 24 | SERVER_ADDRESS_IP="" 25 | SERVER_ADDRESS_DNS="localhost graphd0 graphd1 graphd2" 26 | CLIENT_ADDRESS_IP="" 27 | CLIENT_ADDRESS_DNS="" 28 | 29 | if [ $# != 1 ]; then 30 | echo "USAGE: $0 " 31 | exit 1; 32 | fi 33 | 34 | function gen_cert { 35 | cert_type=$1 36 | subject_name_ip=$2 37 | subject_name_dns=$3 38 | cat << EOF > ${cert_type}.cnf 39 | [ req ] 40 | default_bits = 2048 41 | prompt = no 42 | distinguished_name = dn 43 | req_extensions = req_ext 44 | 45 | [ dn ] 46 | C = CH 47 | O = test-ca 48 | CN = ${cert_type} 49 | 50 | [ v3_ca ] 51 | basicConstraints = critical,CA:TRUE 52 | subjectKeyIdentifier = hash 53 | authorityKeyIdentifier = keyid:always,issuer:always 54 | 55 | [ req_ext ] 56 | subjectAltName = @alt_names 57 | 58 | [alt_names] 59 | IP.1 = 127.0.0.1 60 | EOF 61 | if [ "$subject_name_ip" != "" ];then 62 | start=2 63 | for i in ${subject_name_ip}; do 64 | cat << EOF >> ${cert_type}.cnf 65 | IP.${start} = ${i} 66 | EOF 67 | start=$(($start+1)) 68 | done 69 | fi 70 | if [ "$subject_name_dns" != "" ];then 71 | start=1 72 | for i in ${subject_name_dns}; do 73 | cat << EOF >> ${cert_type}.cnf 74 | DNS.${start} = ${i} 75 | EOF 76 | start=$(($start+1)) 77 | done 78 | fi 79 | openssl genrsa -out ${cert_type}.key 2048 80 | openssl req -new -config ${cert_type}.cnf -out ${cert_type}.csr -key ${cert_type}.key 81 | if [ ${cert_type} == "root" ]; then 82 | openssl x509 -req -in ${cert_type}.csr -out ${cert_type}.crt -extfile ${cert_type}.cnf -extensions v3_ca -signkey ${cert_type}.key -CAcreateserial -days 3650 83 | else 84 | openssl x509 -req -in ${cert_type}.csr -out ${cert_type}.crt -CA root.crt -CAkey root.key -CAcreateserial -days 3650 -extfile ${cert_type}.cnf -extensions req_ext 85 | fi 86 | 87 | } 88 | 89 | cert_type=${1} 90 | if [ ${cert_type} != "root" ] && [ ! -e root.crt ];then 91 | echo "root.crt not exist" 92 | exit 1 93 | fi 94 | echo "generate ${cert_type} cert" 95 | if [ ${cert_type} == "server" ]; then 96 | gen_cert ${cert_type} "${SERVER_ADDRESS_IP[*]}" "${SERVER_ADDRESS_DNS[*]}" 97 | elif [ ${cert_type} == "client" ]; then 98 | gen_cert ${cert_type} "${CLIENT_ADDRESS_IP[*]}" "${CLIENT_ADDRESS_DNS[*]}" 99 | else 100 | gen_cert ${cert_type} "" "" 101 | fi 102 | echo "finish" 103 | -------------------------------------------------------------------------------- /tests/secrets/server.cnf: -------------------------------------------------------------------------------- 1 | [ req ] 2 | default_bits = 2048 3 | prompt = no 4 | distinguished_name = dn 5 | req_extensions = req_ext 6 | 7 | [ dn ] 8 | C = CH 9 | O = test-ca 10 | CN = server 11 | 12 | [ v3_ca ] 13 | basicConstraints = critical,CA:TRUE 14 | subjectKeyIdentifier = hash 15 | authorityKeyIdentifier = keyid:always,issuer:always 16 | 17 | [ req_ext ] 18 | subjectAltName = @alt_names 19 | 20 | [alt_names] 21 | IP.1 = 127.0.0.1 22 | IP.2 = 192.168.8.202 23 | DNS.1 = localhost 24 | DNS.2 = graphd0 25 | DNS.3 = graphd1 26 | DNS.4 = graphd2 27 | -------------------------------------------------------------------------------- /tests/secrets/server.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDKzCCAhOgAwIBAgIUDufwf8ZK2xZXqZUF/P/tbo1psVAwDQYJKoZIhvcNAQEL 3 | BQAwLjELMAkGA1UEBhMCQ0gxEDAOBgNVBAoMB3Rlc3QtY2ExDTALBgNVBAMMBHJv 4 | b3QwHhcNMjQwMzA3MDI1NDAyWhcNMzQwMzA1MDI1NDAyWjAwMQswCQYDVQQGEwJD 5 | SDEQMA4GA1UECgwHdGVzdC1jYTEPMA0GA1UEAwwGc2VydmVyMIIBIjANBgkqhkiG 6 | 9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuBC99fcKfK2myIpU4G7BufjJqwEH66PGUeay 7 | mEbj8V7wfm8CoAuE/UcX/0r+U8cgxbRfv9zmtXt/6j6bk1lLmQ123+JHBOGN5HDc 8 | DjGl4y/eL7/uwFJnQ0FAd/Jf/vU8iOpx1MWrrSuEklq3eacoCMqAPkD+oXp1Eeq4 9 | L5YvsecSYYOa25WrDYvgFhVUa4BIB3K/j8H2f+T0NVJank2b597pmtAxox+ds8+4 10 | WI5Aex0DCYd1EBoZMYJPZzSWYzPVCardM9uOcFPDZ1ynGdWgDlIhUFrl/IcWIqNb 11 | wTVZ0WoUZTJsKoZlX8H2fowr4M5pZMHw7MQfT9BPyVu5Yt1sxwIDAQABoz8wPTA7 12 | BgNVHREENDAyhwR/AAABhwTAqAjKgglsb2NhbGhvc3SCB2dyYXBoZDCCB2dyYXBo 13 | ZDGCB2dyYXBoZDIwDQYJKoZIhvcNAQELBQADggEBAIDb2/S4oflgpsySMdbsqfh3 14 | siwRY2dkZ5sNYHRxG3rZZXtyYnsj+KWWFZrYZrbPGF2qDxTl5AWj4LL3GUZGfgPK 15 | HarGhb8Qtr25fvYhPQ6azxGmolZ1n5i+9kjquupANW89KiEs8f5dgXChKjN9i6um 16 | gmCTh9di+Dtmtsy66bl/jMiRxX0XHi5E1XMANfXUMPtbZz/kW1wCy+XU439hdMds 17 | U8/MJaS9uYL763O5Ept6tWFDIlbveUmcoZBPmZXb9LdXjWgPeBHx6y83AoUFViGE 18 | lxB+1zJJyBwPTNz+IPzBKVJkiEttL/MC9YSS2pJOtCK5FXYNcdOxOaV617ENbMY= 19 | -----END CERTIFICATE----- 20 | -------------------------------------------------------------------------------- /tests/secrets/server.csr: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE REQUEST----- 2 | MIICwzCCAasCAQAwMDELMAkGA1UEBhMCQ0gxEDAOBgNVBAoMB3Rlc3QtY2ExDzAN 3 | BgNVBAMMBnNlcnZlcjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALgQ 4 | vfX3CnytpsiKVOBuwbn4yasBB+ujxlHmsphG4/Fe8H5vAqALhP1HF/9K/lPHIMW0 5 | X7/c5rV7f+o+m5NZS5kNdt/iRwThjeRw3A4xpeMv3i+/7sBSZ0NBQHfyX/71PIjq 6 | cdTFq60rhJJat3mnKAjKgD5A/qF6dRHquC+WL7HnEmGDmtuVqw2L4BYVVGuASAdy 7 | v4/B9n/k9DVSWp5Nm+fe6ZrQMaMfnbPPuFiOQHsdAwmHdRAaGTGCT2c0lmMz1Qmq 8 | 3TPbjnBTw2dcpxnVoA5SIVBa5fyHFiKjW8E1WdFqFGUybCqGZV/B9n6MK+DOaWTB 9 | 8OzEH0/QT8lbuWLdbMcCAwEAAaBOMEwGCSqGSIb3DQEJDjE/MD0wOwYDVR0RBDQw 10 | MocEfwAAAYcEwKgIyoIJbG9jYWxob3N0ggdncmFwaGQwggdncmFwaGQxggdncmFw 11 | aGQyMA0GCSqGSIb3DQEBCwUAA4IBAQBhnzQa1F8ukbNIBH8JE8NxW2PDR3RYGxT2 12 | YuR/cGKghBdkau4gw/MKVGk2bs/ixaTopOXFSPEtusvxx6p3/qFXjb79h5XM5G4T 13 | O1LOCxd4KValtwvJkzJhgzaXv3nQaCgmh1TKMsu1A0w/erYCLaDhUyFEIESUyilI 14 | lFVq7qfJX1CDz8jMPeFPNkjn5hZn+dPLmsP5JMgSvTmBstO3aPG/46DImkpHFZ5s 15 | lA3Rvhk/E1WCXBQBf0ri+fPw9cHtR366zv1sEpLmmPIHdjZdbRuRToQBQ4o7kOpL 16 | yaI+Tg6vp6nhagqqwVoLrkDRfwkj2nXGB7d56/MkyIXFnm+Yqnkq 17 | -----END CERTIFICATE REQUEST----- 18 | -------------------------------------------------------------------------------- /tests/secrets/server.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIEpAIBAAKCAQEAuBC99fcKfK2myIpU4G7BufjJqwEH66PGUeaymEbj8V7wfm8C 3 | oAuE/UcX/0r+U8cgxbRfv9zmtXt/6j6bk1lLmQ123+JHBOGN5HDcDjGl4y/eL7/u 4 | wFJnQ0FAd/Jf/vU8iOpx1MWrrSuEklq3eacoCMqAPkD+oXp1Eeq4L5YvsecSYYOa 5 | 25WrDYvgFhVUa4BIB3K/j8H2f+T0NVJank2b597pmtAxox+ds8+4WI5Aex0DCYd1 6 | EBoZMYJPZzSWYzPVCardM9uOcFPDZ1ynGdWgDlIhUFrl/IcWIqNbwTVZ0WoUZTJs 7 | KoZlX8H2fowr4M5pZMHw7MQfT9BPyVu5Yt1sxwIDAQABAoIBAD0MEHTLgobncTGB 8 | 77SgB1CO2xQEO4RDxTrUsNcga+lZ+5lzaAN8zpbSRXs+fCt9F7l6oWcY9MusMjiW 9 | mzK7ov8YIB0RB0zvIghqBhIPFV3MOnzLw3u12NnNgnxFvuDopTMjzq0rAk1k6YtA 10 | 2ylV70k7IF2FYO5dlwZiOsRNo3P1B2O8jVl52u3KQmEefRLdylgGRnTT4la8ePIW 11 | Qn79VtbV4ytJI8frM9Tk87SvIbZFhXEUSKWSWV6MeNWNA7VXIeDJV96xbjw6q3TV 12 | ugZKYyZt3F8JP8Xge1RgTzNlt1DDskQCzAbkL5TZqIRbgnZgLJgKS9EKvl9C0p7W 13 | 1IMxioECgYEA4WgTUQbjtIoDLx3rWj/lmCgldv20I0oS7g+PXLIt9uJKOeTwk8Ix 14 | kFk1vEaut4D6aOFaa3ZLnAMJTOkFzrt4ysFfzAj7tJDEYOHMg6lDsbcS2kZ44UF1 15 | UYTPnPv8TzMl5/pKPjDqi5DK4jXOENfXH6XV/keW77SRKhNM8/Fdbf8CgYEA0Qw8 16 | jEe/wvqRo+1RejOUUBMd0E6oc9O/7+2dcA6cU0hUBtoA+22dWrVr/1kqXapuyJ/p 17 | RZkcnHPYXoYHSUa+QL5Qnk+bAuMhPlizG6qO/kEv+wnREpdzXgvVxqQv9tl/BruA 18 | QLSKuHEbGlZzlxpFjRyXSpvdk10L3l2pKTM+ETkCgYEAugIqvpjS269whTdJesZz 19 | 7yZiFFm0h9Ke4HlSswoOyEdhPXlh1m1QfXBA2cfyclgrTNV6ymdqaapPyHobYshu 20 | G9OL/FIFdLJqfBCZtqS9yi/rjNjO9AQSjlzs/v0q7yikRVTUY5H2W4n3Asmn7VpQ 21 | mjs95oKpWdJGYNRgkoZBtX0CgYAJn5+V8aOhTTmxKzaqAn/cY9Tmhwh4NJJMcErk 22 | IrgNmXHM0vJioLwytdFf8vHhV/w7nij1/EN17FtUlqT1OZ1+i4RYlV+UdXJNVXYB 23 | Wwu0JroXu271hSVZegAAYgSXP5H6dF1GHUmZjhNqVfqCRUJYaa2JUL2VkB4AwZ0M 24 | +X/s+QKBgQCmv3RJIVJDwiXu2uTuEYoagqOH7sso7Kpm1A8EVpQpZHJGAAKb/IQi 25 | RWxP/qg9sW1YP2tFvV+0Z3UOMyAA3yOF4Q6OXVi9nsY5Mq1WwzpbYyFZ7BkmFQl/ 26 | DSK56h9uG49OSRjj9iSy5GcdEeRGFPtsHUaez3guUGHTHSJTUD/erw== 27 | -----END RSA PRIVATE KEY----- 28 | -------------------------------------------------------------------------------- /tests/test_connection.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # --coding:utf-8-- 3 | 4 | # Copyright (c) 2020 vesoft inc. All rights reserved. 5 | # 6 | # This source code is licensed under Apache 2.0 License. 7 | 8 | 9 | import time 10 | from unittest import TestCase 11 | 12 | from nebula3.common import ttypes 13 | from nebula3.Exception import IOErrorException 14 | from nebula3.gclient.net import Connection 15 | 16 | AddrIp = ["127.0.0.1", "::1"] 17 | port = 9669 18 | 19 | 20 | class TestConnection(TestCase): 21 | def test_create(self): 22 | for ip in AddrIp: 23 | try: 24 | conn = Connection() 25 | conn.open(ip, port, 1000) 26 | auth_result = conn.authenticate("root", "nebula") 27 | assert auth_result.get_session_id() != 0 28 | conn.close() 29 | except Exception as ex: 30 | assert False, ex 31 | 32 | def test_release(self): 33 | for ip in AddrIp: 34 | try: 35 | conn = Connection() 36 | conn.open(ip, port, 1000) 37 | auth_result = conn.authenticate("root", "nebula") 38 | session_id = auth_result.get_session_id() 39 | assert session_id != 0 40 | resp = conn.execute(session_id, "SHOW SPACES") 41 | assert resp.error_code == ttypes.ErrorCode.SUCCEEDED, resp.error_msg 42 | conn.signout(session_id) 43 | # the session delete later 44 | time.sleep(12) 45 | resp = conn.execute(session_id, "SHOW SPACES") 46 | assert resp.error_code != ttypes.ErrorCode.SUCCEEDED 47 | conn.close() 48 | except Exception as ex: 49 | assert False, ex 50 | 51 | def test_close(self): 52 | for ip in AddrIp: 53 | conn = Connection() 54 | conn.open(ip, port, 1000) 55 | auth_result = conn.authenticate("root", "nebula") 56 | assert auth_result.get_session_id() != 0 57 | conn.close() 58 | try: 59 | conn.authenticate("root", "nebula") 60 | except IOErrorException: 61 | assert True 62 | -------------------------------------------------------------------------------- /tests/test_meta_cache.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # --coding:utf-8-- 3 | 4 | # Copyright (c) 2020 vesoft inc. All rights reserved. 5 | # 6 | # This source code is licensed under Apache 2.0 License. 7 | 8 | 9 | import time 10 | 11 | from nebula3.common import ttypes 12 | from nebula3.gclient.net import Connection 13 | from nebula3.mclient import MetaCache 14 | 15 | 16 | class TestMetaCache(object): 17 | @classmethod 18 | def setup_class(cls): 19 | # create schema 20 | try: 21 | conn = Connection() 22 | conn.open("127.0.0.1", 9669, 1000) 23 | auth_result = conn.authenticate("root", "nebula") 24 | session_id = auth_result.get_session_id() 25 | assert session_id != 0 26 | resp = conn.execute( 27 | session_id, 28 | "CREATE SPACE IF NOT EXISTS test_meta_cache1(REPLICA_FACTOR=3, vid_type=FIXED_STRING(8));" 29 | "USE test_meta_cache1;" 30 | "CREATE TAG IF NOT EXISTS tag11(name string);" 31 | "CREATE EDGE IF NOT EXISTS edge11(name string);" 32 | "CREATE SPACE IF NOT EXISTS test_meta_cache2(vid_type=FIXED_STRING(8));" 33 | "USE test_meta_cache2;" 34 | "CREATE TAG IF NOT EXISTS tag22(name string);" 35 | "CREATE EDGE IF NOT EXISTS edge22(name string);", 36 | ) 37 | assert resp.error_code == 0 38 | conn.close() 39 | time.sleep(10) 40 | cls.meta_cache = MetaCache( 41 | [("127.0.0.1", 9559), ("127.0.0.1", 9560), ("127.0.0.1", 9561)], 50000 42 | ) 43 | except Exception: 44 | import traceback 45 | 46 | print(traceback.format_exc()) 47 | assert False 48 | 49 | def test_get_space_id(self): 50 | space_id1 = self.meta_cache.get_space_id("test_meta_cache1") 51 | space_id2 = self.meta_cache.get_space_id("test_meta_cache2") 52 | assert 0 < space_id1 < space_id2 53 | 54 | # test not existed 55 | try: 56 | space_id = self.meta_cache.get_tag_id( 57 | "test_meta_cache1", "space_not_existed" 58 | ) 59 | assert False 60 | except Exception: 61 | assert True 62 | 63 | def test_get_tag_id(self): 64 | tag_id1 = self.meta_cache.get_tag_id("test_meta_cache1", "tag11") 65 | tag_id2 = self.meta_cache.get_tag_id("test_meta_cache2", "tag22") 66 | assert 0 < tag_id1 < tag_id2 67 | 68 | # test not existed 69 | try: 70 | tag_id = self.meta_cache.get_tag_id("test_meta_cache1", "tag_not_existed") 71 | assert False 72 | except Exception: 73 | assert True 74 | 75 | def test_get_edge_type(self): 76 | edge_id1 = self.meta_cache.get_edge_type("test_meta_cache1", "edge11") 77 | edge_id2 = self.meta_cache.get_edge_type("test_meta_cache2", "edge22") 78 | assert 0 < edge_id1 < edge_id2 79 | 80 | # test not existed 81 | try: 82 | edge_id = self.meta_cache.get_edge_type( 83 | "test_meta_cache1", "edge_not_existed" 84 | ) 85 | assert False 86 | except Exception: 87 | assert True 88 | 89 | def test_get_tag_schema(self): 90 | tag_schema1 = self.meta_cache.get_tag_schema("test_meta_cache1", "tag11") 91 | tag_schema2 = self.meta_cache.get_tag_schema("test_meta_cache2", "tag22") 92 | assert tag_schema1.columns[0].name.decode("utf-8") == "name" 93 | assert tag_schema1.columns[0].type.type == ttypes.PropertyType.STRING 94 | assert tag_schema1.columns[0].type.type_length == 0 95 | assert tag_schema2.columns[0].name.decode("utf-8") == "name" 96 | assert tag_schema2.columns[0].type.type == ttypes.PropertyType.STRING 97 | assert tag_schema2.columns[0].type.type_length == 0 98 | 99 | # test not existed 100 | try: 101 | tag_item = self.meta_cache.get_tag_schema( 102 | "test_meta_cache1", "tag_not_existed" 103 | ) 104 | assert False 105 | except Exception: 106 | assert True 107 | 108 | def test_get_edge_schema(self): 109 | edge_schema1 = self.meta_cache.get_edge_schema("test_meta_cache1", "edge11") 110 | edge_schema2 = self.meta_cache.get_edge_schema("test_meta_cache2", "edge22") 111 | assert edge_schema1.columns[0].name.decode("utf-8") == "name" 112 | assert edge_schema1.columns[0].type.type == ttypes.PropertyType.STRING 113 | assert edge_schema1.columns[0].type.type_length == 0 114 | assert edge_schema2.columns[0].name.decode("utf-8") == "name" 115 | assert edge_schema2.columns[0].type.type == ttypes.PropertyType.STRING 116 | assert edge_schema2.columns[0].type.type_length == 0 117 | 118 | # test not existed 119 | try: 120 | edge_item = self.meta_cache.get_edge_schema( 121 | "test_meta_cache1", "edge_not_existed" 122 | ) 123 | assert False 124 | except Exception: 125 | assert True 126 | 127 | def test_get_part_leader(self): 128 | address = self.meta_cache.get_part_leader("test_meta_cache1", 1) 129 | assert address.host.find("172.28.2") == 0 130 | assert address.port == 9779 131 | 132 | def test_get_part_leaders(self): 133 | part_addresses = self.meta_cache.get_part_leaders("test_meta_cache1") 134 | 135 | parts = [part for part in part_addresses.keys()] 136 | assert len(parts) == 100 137 | expected_parts = [i for i in range(1, 101)] 138 | assert sorted(parts) == sorted(expected_parts) 139 | 140 | for part in part_addresses.keys(): 141 | assert part_addresses[part].host in [ 142 | "172.28.2.1", 143 | "172.28.2.2", 144 | "172.28.2.3", 145 | ] 146 | 147 | ports = [part_addresses[part].port for part in part_addresses.keys()] 148 | expected_hosts = [9779 for i in range(1, 101)] 149 | assert ports == expected_hosts 150 | 151 | def test_get_all_storage_addrs(self): 152 | addresses = self.meta_cache.get_all_storage_addrs() 153 | assert len(addresses) == 3 154 | hosts = [addr.host for addr in addresses] 155 | expected_hosts = ["172.28.2.1", "172.28.2.2", "172.28.2.3"] 156 | hosts = sorted(hosts) 157 | expected_hosts = sorted(expected_hosts) 158 | assert hosts == expected_hosts 159 | 160 | ports = [addr.port for addr in addresses] 161 | expected_hosts = [9779, 9779, 9779] 162 | assert ports == expected_hosts 163 | 164 | def test_get_part_alloc(self): 165 | part_alloc = self.meta_cache.get_part_alloc("test_meta_cache1") 166 | assert len(part_alloc) == 100 167 | 168 | expected_parts = [i for i in range(1, 101)] 169 | parts = [part for part in part_alloc] 170 | assert sorted(expected_parts) == sorted(parts) 171 | 172 | hosts = [addr.host for addr in part_alloc[1]] 173 | expected_hosts = ["172.28.2.1", "172.28.2.2", "172.28.2.3"] 174 | assert sorted(hosts) == sorted(expected_hosts) 175 | 176 | ports = [addr.port for addr in part_alloc[1]] 177 | expected_ports = [9779, 9779, 9779] 178 | assert sorted(ports) == sorted(expected_ports) 179 | -------------------------------------------------------------------------------- /tests/test_parameter.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # --coding:utf-8-- 3 | 4 | # Copyright (c) 2021 vesoft inc. All rights reserved. 5 | # 6 | # This source code is licensed under Apache 2.0 License. 7 | 8 | import time 9 | import json 10 | 11 | from nebula3.gclient.net import ConnectionPool, ExecuteError 12 | from nebula3.Config import Config 13 | from nebula3.common import * 14 | from unittest import TestCase 15 | 16 | 17 | class TestParameter(TestCase): 18 | @classmethod 19 | def setUp(self) -> None: 20 | super().setUpClass() 21 | self.user_name = "root" 22 | self.password = "nebula" 23 | self.configs = Config() 24 | self.configs.max_connection_pool_size = 6 25 | self.pool = ConnectionPool() 26 | self.pool.init([("127.0.0.1", 9671)], self.configs) 27 | 28 | # get session from the pool 29 | client = self.pool.get_session("root", "nebula") 30 | assert client is not None 31 | 32 | # prepare space and insert data 33 | resp = client.execute( 34 | "CREATE SPACE IF NOT EXISTS parameter_test(vid_type=FIXED_STRING(30));USE parameter_test" 35 | ) 36 | assert resp.is_succeeded(), resp.error_msg() 37 | resp = client.execute( 38 | "CREATE TAG IF NOT EXISTS person(name string, age int);" 39 | "CREATE EDGE like (likeness double);" 40 | ) 41 | 42 | time.sleep(6) 43 | # insert data need to sleep after create schema 44 | resp = client.execute("CREATE TAG INDEX person_age_index on person(age)") 45 | time.sleep(6) 46 | # insert vertex 47 | resp = client.execute( 48 | 'INSERT VERTEX person(name, age) VALUES "Bob":("Bob", 10), "Lily":("Lily", 9)' 49 | ) 50 | assert resp.is_succeeded(), resp.error_msg() 51 | # insert edges 52 | resp = client.execute('INSERT EDGE like(likeness) VALUES "Bob"->"Lily":(80.0);') 53 | assert resp.is_succeeded(), resp.error_msg() 54 | resp = client.execute("REBUILD TAG INDEX person_age_index") 55 | assert resp.is_succeeded(), resp.error_msg() 56 | 57 | # prepare parameters 58 | bval = ttypes.Value() 59 | bval.set_bVal(True) 60 | ival = ttypes.Value() 61 | ival.set_iVal(3) 62 | sval = ttypes.Value() 63 | sval.set_sVal("Bob") 64 | self.params = {"p1": ival, "p2": bval, "p3": sval} 65 | self.params_premitive = { 66 | "p1": 3, 67 | "p2": True, 68 | "p3": "Bob", 69 | "p4": ["Bob", "Lily"], 70 | } 71 | 72 | assert self.pool.connects() == 1 73 | assert self.pool.in_used_connects() == 1 74 | 75 | def test_parameter(self): 76 | # get session from the pool 77 | client = self.pool.get_session("root", "nebula") 78 | assert client is not None 79 | resp = client.execute_parameter( 80 | "USE parameter_test", 81 | self.params, 82 | ) 83 | assert resp.is_succeeded() 84 | # test basic parameter 85 | resp = client.execute_parameter( 86 | "RETURN abs($p1)+3 AS col1, (toBoolean($p2) and false) AS col2, toLower($p3)+1 AS col3", 87 | self.params, 88 | ) 89 | assert resp.is_succeeded(), resp.error_msg() 90 | assert 1 == resp.row_size() 91 | names = ["col1", "col2", "col3"] 92 | assert names == resp.keys() 93 | assert 6 == resp.row_values(0)[0].as_int() 94 | assert False == resp.row_values(0)[1].as_bool() 95 | assert "bob1" == resp.row_values(0)[2].as_string() 96 | 97 | # test cypher parameter 98 | resp = client.execute_parameter( 99 | f"""MATCH (v:person)--() WHERE v.person.age>abs($p1)+3 100 | RETURN v.person.name AS vname,v.person.age AS vage ORDER BY vage, $p3 LIMIT $p1+1""", 101 | self.params, 102 | ) 103 | assert resp.is_succeeded(), resp.error_msg() 104 | assert 2 == resp.row_size() 105 | names = ["vname", "vage"] 106 | assert names == resp.keys() 107 | assert "Lily" == resp.row_values(0)[0].as_string() 108 | assert 9 == resp.row_values(0)[1].as_int() 109 | assert "Bob" == resp.row_values(1)[0].as_string() 110 | assert 10 == resp.row_values(1)[1].as_int() 111 | # test ngql parameter 112 | resp = client.execute_parameter( 113 | '$p1=go from "Bob" over like yield like._dst;', 114 | self.params, 115 | ) 116 | assert not resp.is_succeeded() 117 | resp = client.execute_parameter( 118 | "go from $p3 over like yield like._dst;", 119 | self.params, 120 | ) 121 | assert not resp.is_succeeded() 122 | resp = client.execute_parameter( 123 | "fetch prop on person $p3 yield vertex as v", 124 | self.params, 125 | ) 126 | assert not resp.is_succeeded() 127 | resp = client.execute_parameter( 128 | 'find all path from $p3 to "Yao Ming" over like yield path as p', 129 | self.params, 130 | ) 131 | assert not resp.is_succeeded() 132 | resp = client.execute_parameter( 133 | "get subgraph from $p3 both like yield vertices as v", 134 | self.params, 135 | ) 136 | assert not resp.is_succeeded() 137 | resp = client.execute_parameter( 138 | 'go 3 steps from "Bob" over like yield like._dst limit [1,$p1,3]', 139 | self.params, 140 | ) 141 | assert not resp.is_succeeded() 142 | 143 | # same test with premitive params 144 | resp = client.execute_py( 145 | "RETURN abs($p1)+3 AS col1, (toBoolean($p2) and false) AS col2, toLower($p3)+1 AS col3", 146 | self.params_premitive, 147 | ).as_primitive() 148 | assert 1 == len(resp) 149 | assert ["col1", "col2", "col3"] == list(resp[0].keys()) 150 | assert resp[0]["col1"] == 6 151 | assert resp[0]["col2"] == False 152 | assert resp[0]["col3"] == "bob1" 153 | try: 154 | resp = client.execute_py( 155 | '$p1=go from "Bob" over like yield like._dst;', 156 | self.params_premitive, 157 | ) 158 | except ExecuteError: 159 | pass 160 | else: 161 | raise AssertionError("should raise exception") 162 | try: 163 | resp = client.execute_py( 164 | "go from $p3 over like yield like._dst;", 165 | self.params_premitive, 166 | ) 167 | except ExecuteError: 168 | pass 169 | else: 170 | raise AssertionError("should raise exception") 171 | resp = client.execute_py( 172 | "MATCH (v) WHERE id(v) in $p4 RETURN id(v) AS vertex_id", 173 | self.params_premitive, 174 | ).as_primitive() 175 | assert 2 == len(resp) 176 | 177 | def tearDown(self) -> None: 178 | client = self.pool.get_session("root", "nebula") 179 | assert client is not None 180 | resp = client.execute("DROP SPACE parameter_test") 181 | assert resp.is_succeeded(), resp.error_msg() 182 | -------------------------------------------------------------------------------- /tests/test_session.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # --coding:utf-8-- 3 | 4 | # Copyright (c) 2020 vesoft inc. All rights reserved. 5 | # 6 | # This source code is licensed under Apache 2.0 License. 7 | 8 | 9 | import os 10 | import time 11 | from unittest import TestCase 12 | 13 | from nebula3.Config import Config 14 | from nebula3.gclient.net import ConnectionPool 15 | 16 | 17 | class TestSession(TestCase): 18 | @classmethod 19 | def setup_class(self): 20 | self.user_name = "root" 21 | self.password = "nebula" 22 | self.configs = Config() 23 | self.configs.max_connection_pool_size = 6 24 | self.pool = ConnectionPool() 25 | assert self.pool.init( 26 | [("127.0.0.1", 9669), ("127.0.0.1", 9670), ("127.0.0.1", 9671)], 27 | self.configs, 28 | ) 29 | assert self.pool.connects() == 0 30 | assert self.pool.in_used_connects() == 0 31 | 32 | def test_1_release_by_del(self): 33 | def get_local_session(pool): 34 | session = pool.get_session("root", "nebula") 35 | assert pool.in_used_connects() == 1 36 | 37 | get_local_session(self.pool) 38 | assert self.pool.in_used_connects() == 0 39 | 40 | def test_2_reconnect(self): 41 | try: 42 | session = self.pool.get_session("root", "nebula") 43 | time.sleep(2) 44 | 45 | # wait for the session space info to be updated to meta service 46 | resp = session.execute( 47 | "CREATE SPACE IF NOT EXISTS test_session(vid_type=FIXED_STRING(8)); USE test_session;" 48 | ) 49 | assert resp.is_succeeded(), resp.error_msg() 50 | time.sleep(10) 51 | for i in range(0, 5): 52 | if i == 3: 53 | os.system( 54 | "docker stop tests_graphd0_1 || docker stop tests-graphd0-1" 55 | ) 56 | os.system( 57 | "docker stop tests_graphd1_1 || docker stop tests-graphd1-1" 58 | ) 59 | time.sleep(3) 60 | resp = session.execute("SHOW SESSIONS") 61 | assert resp.is_succeeded(), resp.error_msg() 62 | assert resp.space_name() == "test_session" 63 | time.sleep(2) 64 | session.release() 65 | new_session = self.pool.get_session("root", "nebula") 66 | new_session.execute("SHOW SPACES") 67 | except Exception as e: 68 | assert False, e 69 | finally: 70 | os.system("docker start tests_graphd0_1 || docker start tests-graphd0-1") 71 | os.system("docker start tests_graphd1_1 || docker start tests-graphd1-1") 72 | time.sleep(2) 73 | 74 | def test_3_session_context(self): 75 | in_used_connects = self.pool.in_used_connects() 76 | with self.pool.session_context("root", "nebula") as session: 77 | assert self.pool.in_used_connects() == in_used_connects + 1 78 | assert self.pool.in_used_connects() == in_used_connects 79 | 80 | def test_4_timeout(self): 81 | try: 82 | configs = Config() 83 | configs.timeout = 100 84 | configs.max_connection_pool_size = 1 85 | pool = ConnectionPool() 86 | assert pool.init([("127.0.0.1", 9669)], configs) 87 | session = pool.get_session(self.user_name, self.password) 88 | ngql = "" 89 | for n in range(0, 500): 90 | ngql = ngql + "show hosts;" 91 | session.execute(ngql) 92 | assert False, "expect to get exception" 93 | except Exception as ex: 94 | assert str(ex).find("timed out") > 0 95 | assert True, ex 96 | -------------------------------------------------------------------------------- /tests/test_session_pool.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # --coding:utf-8-- 3 | 4 | # Copyright (c) 2022 vesoft inc. All rights reserved. 5 | # 6 | # This source code is licensed under Apache 2.0 License. 7 | 8 | 9 | import json 10 | import threading 11 | import time 12 | from unittest import TestCase 13 | 14 | from nebula3.common.ttypes import ErrorCode 15 | from nebula3.Config import SessionPoolConfig 16 | from nebula3.Exception import ( 17 | InValidHostname, 18 | ) 19 | from nebula3.gclient.net import Connection 20 | from nebula3.gclient.net.SessionPool import SessionPool 21 | 22 | # ports for test 23 | test_port = 9669 24 | test_port2 = 9670 25 | 26 | 27 | def prepare_space(space_name="session_pool_test"): 28 | # prepare space 29 | conn = Connection() 30 | conn.open("127.0.0.1", test_port, 1000) 31 | auth_result = conn.authenticate("root", "nebula") 32 | assert auth_result.get_session_id() != 0 33 | resp = conn.execute( 34 | auth_result._session_id, 35 | "CREATE SPACE IF NOT EXISTS {}(partition_num=32, replica_factor=1, vid_type = FIXED_STRING(30))".format( 36 | space_name 37 | ), 38 | ) 39 | assert resp.error_code == ErrorCode.SUCCEEDED 40 | 41 | 42 | def drop_space(space_name="session_pool_test"): 43 | # drop space 44 | conn = Connection() 45 | conn.open("127.0.0.1", test_port, 1000) 46 | auth_result = conn.authenticate("root", "nebula") 47 | assert auth_result.get_session_id() != 0 48 | 49 | # drop space 50 | resp = conn.execute( 51 | auth_result._session_id, 52 | "DROP SPACE IF EXISTS {}".format(space_name), 53 | ) 54 | assert resp.error_code == ErrorCode.SUCCEEDED 55 | 56 | 57 | class TestSessionPoolBasic(TestCase): 58 | @classmethod 59 | def setup_class(self): 60 | self.addresses = list() 61 | self.addresses.append(("127.0.0.1", test_port)) 62 | self.addresses.append(("127.0.0.1", test_port2)) 63 | self.configs = SessionPoolConfig() 64 | self.configs.min_size = 2 65 | self.configs.max_size = 4 66 | self.configs.idle_time = 2000 67 | self.configs.interval_check = 2 68 | 69 | # prepare space 70 | prepare_space("session_pool_test") 71 | prepare_space("session_pool_test_2") 72 | 73 | # insert data need to sleep after create schema 74 | time.sleep(10) 75 | 76 | self.session_pool = SessionPool( 77 | "root", "nebula", "session_pool_test", self.addresses 78 | ) 79 | assert self.session_pool.init(self.configs) 80 | 81 | def tearDown_Class(self): 82 | drop_space("session_pool_test") 83 | drop_space("session_pool_test_2") 84 | 85 | def test_pool_init(self): 86 | # basic 87 | session_pool = SessionPool( 88 | "root", "nebula", "session_pool_test", self.addresses 89 | ) 90 | assert session_pool.init(self.configs) 91 | 92 | # handle wrong service port 93 | pool = SessionPool( 94 | "root", "nebula", "session_pool_test", [("127.0.0.1", 3800)] 95 | ) # wrong port 96 | try: 97 | pool.init(self.configs) 98 | assert False 99 | except Exception: 100 | assert True 101 | 102 | # handle invalid hostname 103 | try: 104 | session_pool = SessionPool( 105 | "root", "nebula", "session_pool_test", [("wrong_host", test_port)] 106 | ) 107 | session_pool.init(self.configs) 108 | assert False 109 | except InValidHostname: 110 | assert True, "We expected get the exception" 111 | 112 | def test_ping(self): 113 | assert self.session_pool.ping(self.addresses[0]) 114 | assert self.session_pool.ping(("127.0.0.1", 5000)) is False 115 | 116 | def test_execute(self): 117 | resp = self.session_pool.execute("SHOW HOSTS") 118 | assert resp.is_succeeded() 119 | 120 | def test_execute_json(self): 121 | resp = self.session_pool.execute_json("SHOW HOSTS") 122 | json_obj = json.loads(resp) 123 | # Get errorcode 124 | resp_error_code = json_obj["errors"][0]["code"] 125 | assert 0 == resp_error_code 126 | 127 | def test_switch_space(self): 128 | # This test is used to test if the space bond to session is the same as the space in the session pool config after executing 129 | # a query contains `USE ` statement. 130 | session_pool = SessionPool( 131 | "root", "nebula", "session_pool_test", self.addresses 132 | ) 133 | configs = SessionPoolConfig() 134 | configs.min_size = 1 135 | configs.max_size = 1 136 | assert session_pool.init(configs) 137 | 138 | resp = session_pool.execute("USE session_pool_test_2; SHOW HOSTS;") 139 | assert resp.is_succeeded() 140 | 141 | # The space in the session pool config should be the same as the space in the session. 142 | resp = session_pool.execute("SHOW HOSTS;") 143 | assert resp.is_succeeded() 144 | assert resp.space_name() == "session_pool_test" 145 | 146 | def test_session_renew_when_invalid(self): 147 | # This test is used to test if the session will be renewed when the session is invalid. 148 | session_pool = SessionPool( 149 | "root", "nebula", "session_pool_test", self.addresses 150 | ) 151 | configs = SessionPoolConfig() 152 | configs.min_size = 1 153 | configs.max_size = 1 154 | assert session_pool.init(configs) 155 | 156 | # kill all sessions of the pool, size 1 here though 157 | for session in session_pool._idle_sessions: 158 | session_id = session._session_id 159 | session.execute(f"KILL SESSION {session_id}") 160 | try: 161 | session_pool.execute("SHOW HOSTS;") 162 | except Exception: 163 | pass 164 | # - session_id is not in the pool 165 | # - session_pool is still usable after renewing 166 | assert ( 167 | session_id not in session_pool._idle_sessions 168 | ), "session should be renewed" 169 | resp = session_pool.execute("SHOW HOSTS;") 170 | assert resp.is_succeeded(), "session_pool should be usable after renewing" 171 | session_pool.close() 172 | 173 | 174 | def test_session_pool_multi_thread(): 175 | # prepare space 176 | prepare_space() 177 | 178 | # Test multi thread 179 | addresses = [("127.0.0.1", test_port), ("127.0.0.1", test_port2)] 180 | configs = SessionPoolConfig() 181 | configs.min_size = 2 182 | configs.max_size = 4 183 | configs.idle_time = 2000 184 | configs.interval_check = 2 185 | 186 | session_pool = SessionPool("root", "nebula", "session_pool_test", addresses) 187 | assert session_pool.init(configs) 188 | 189 | global success_flag 190 | success_flag = True 191 | 192 | def main_test(): 193 | global success_flag 194 | try: 195 | resp = session_pool.execute("SHOW HOSTS") 196 | if not resp.is_succeeded(): 197 | raise RuntimeError( 198 | "Failed to execute the query in thread {} : {}".format( 199 | threading.current_thread().getName(), resp.error_msg() 200 | ) 201 | ) 202 | 203 | except Exception as x: 204 | print(x) 205 | success_flag = False 206 | return 207 | 208 | thread1 = threading.Thread(target=main_test, name="thread1") 209 | thread2 = threading.Thread(target=main_test, name="thread2") 210 | thread3 = threading.Thread(target=main_test, name="thread3") 211 | thread4 = threading.Thread(target=main_test, name="thread4") 212 | 213 | thread1.start() 214 | thread2.start() 215 | thread3.start() 216 | thread4.start() 217 | 218 | thread1.join() 219 | thread2.join() 220 | thread3.join() 221 | thread4.join() 222 | assert len(session_pool._active_sessions) == 0 223 | assert success_flag 224 | -------------------------------------------------------------------------------- /tests/test_ssl_connection.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # --coding:utf-8-- 3 | 4 | # Copyright (c) 2021 vesoft inc. All rights reserved. 5 | # 6 | # This source code is licensed under Apache 2.0 License. 7 | 8 | import os 9 | import ssl 10 | import time 11 | from unittest import TestCase 12 | 13 | import pytest 14 | 15 | from nebula3.common import ttypes 16 | from nebula3.Config import SSL_config 17 | from nebula3.Exception import IOErrorException 18 | from nebula3.gclient.net import Connection 19 | 20 | current_dir = os.path.dirname(os.path.abspath(__file__)) 21 | 22 | # set SSL config 23 | ssl_config = SSL_config() 24 | ssl_config.cert_reqs = ssl.CERT_OPTIONAL 25 | ssl_config.ca_certs = os.path.join(current_dir, "secrets/root.crt") 26 | ssl_config.keyfile = os.path.join(current_dir, "secrets/client.key") 27 | ssl_config.certfile = os.path.join(current_dir, "secrets/client.crt") 28 | 29 | # self signed SSL config 30 | ssl_selfs_signed_config = SSL_config() 31 | ssl_selfs_signed_config.cert_reqs = ssl.CERT_OPTIONAL 32 | ssl_selfs_signed_config.cert_reqs = ssl.CERT_OPTIONAL 33 | ssl_selfs_signed_config.ca_certs = os.path.join(current_dir, "secrets/root.crt") 34 | ssl_selfs_signed_config.keyfile = os.path.join(current_dir, "secrets/client.key") 35 | ssl_selfs_signed_config.certfile = os.path.join(current_dir, "secrets/client.crt") 36 | 37 | host = "127.0.0.1" 38 | port = 9669 39 | 40 | 41 | @pytest.mark.SSL 42 | class TestSSLConnection(TestCase): 43 | def test_create(self): 44 | try: 45 | conn = Connection() 46 | conn.open_SSL(host, port, 1000, ssl_config) 47 | auth_result = conn.authenticate("root", "nebula") 48 | assert auth_result.get_session_id() != 0 49 | conn.close() 50 | except Exception as ex: 51 | assert False, ex 52 | 53 | def test_release(self): 54 | try: 55 | conn = Connection() 56 | conn.open_SSL(host, port, 1000, ssl_config) 57 | auth_result = conn.authenticate("root", "nebula") 58 | session_id = auth_result.get_session_id() 59 | assert session_id != 0 60 | resp = conn.execute(session_id, "SHOW SPACES") 61 | assert resp.error_code == ttypes.ErrorCode.SUCCEEDED, resp.error_msg 62 | conn.signout(session_id) 63 | # the session delete later 64 | time.sleep(12) 65 | resp = conn.execute(session_id, "SHOW SPACES") 66 | assert resp.error_code != ttypes.ErrorCode.SUCCEEDED 67 | conn.close() 68 | except Exception as ex: 69 | assert False, ex 70 | 71 | def test_close(self): 72 | conn = Connection() 73 | conn.open_SSL(host, port, 1000, ssl_config) 74 | auth_result = conn.authenticate("root", "nebula") 75 | assert auth_result.get_session_id() != 0 76 | conn.close() 77 | try: 78 | conn.authenticate("root", "nebula") 79 | except IOErrorException: 80 | assert True 81 | 82 | 83 | @pytest.mark.SSL 84 | class TestSSLConnectionSelfSigned(TestCase): 85 | def test_create_self_signed(self): 86 | try: 87 | conn = Connection() 88 | conn.open_SSL(host, port, 1000, ssl_selfs_signed_config) 89 | auth_result = conn.authenticate("root", "nebula") 90 | assert auth_result.get_session_id() != 0 91 | conn.close() 92 | except Exception as ex: 93 | assert False, ex 94 | 95 | def test_release_self_signed(self): 96 | try: 97 | conn = Connection() 98 | conn.open_SSL(host, port, 1000, ssl_selfs_signed_config) 99 | auth_result = conn.authenticate("root", "nebula") 100 | session_id = auth_result.get_session_id() 101 | assert session_id != 0 102 | resp = conn.execute(session_id, "SHOW SPACES") 103 | assert resp.error_code == ttypes.ErrorCode.SUCCEEDED, resp.error_msg 104 | conn.signout(session_id) 105 | # the session delete later 106 | time.sleep(12) 107 | resp = conn.execute(session_id, "SHOW SPACES") 108 | assert resp.error_code != ttypes.ErrorCode.SUCCEEDED 109 | conn.close() 110 | except Exception as ex: 111 | assert False, ex 112 | 113 | def test_close_self_signed(self): 114 | conn = Connection() 115 | conn.open_SSL(host, port, 1000, ssl_selfs_signed_config) 116 | auth_result = conn.authenticate("root", "nebula") 117 | assert auth_result.get_session_id() != 0 118 | conn.close() 119 | try: 120 | conn.authenticate("root", "nebula") 121 | except IOErrorException: 122 | assert True 123 | -------------------------------------------------------------------------------- /tests/test_ssl_pool.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # --coding:utf-8-- 3 | 4 | # Copyright (c) 2021 vesoft inc. All rights reserved. 5 | # 6 | # This source code is licensed under Apache 2.0 License. 7 | import copy 8 | import os 9 | import ssl 10 | from unittest import TestCase 11 | 12 | import pytest 13 | 14 | from nebula3.Config import Config, SSL_config 15 | from nebula3.gclient.net import ConnectionPool 16 | 17 | current_dir = os.path.dirname(os.path.abspath(__file__)) 18 | 19 | 20 | @pytest.mark.SSL 21 | class TestConnectionPool(TestCase): 22 | @classmethod 23 | def setup_class(self): 24 | self.addresses = list() 25 | self.addresses.append(("127.0.0.1", 9669)) 26 | self.configs = Config() 27 | self.configs.min_connection_pool_size = 2 28 | self.configs.max_connection_pool_size = 4 29 | self.configs.idle_time = 2000 30 | self.configs.interval_check = 2 31 | 32 | # set SSL config 33 | self.ssl_config = SSL_config() 34 | self.ssl_config.cert_reqs = ssl.CERT_OPTIONAL 35 | self.ssl_config.ca_certs = os.path.join(current_dir, "secrets/root.crt") 36 | self.ssl_config.keyfile = os.path.join(current_dir, "secrets/client.key") 37 | self.ssl_config.certfile = os.path.join(current_dir, "secrets/client.crt") 38 | # self signed SSL config 39 | self.ssl_selfs_signed_config = SSL_config() 40 | self.ssl_selfs_signed_config.cert_reqs = ssl.CERT_OPTIONAL 41 | self.ssl_selfs_signed_config.ca_certs = os.path.join( 42 | current_dir, "secrets/root.crt" 43 | ) 44 | self.ssl_selfs_signed_config.keyfile = os.path.join( 45 | current_dir, "secrets/client.key" 46 | ) 47 | self.ssl_selfs_signed_config.certfile = os.path.join( 48 | current_dir, "secrets/client.crt" 49 | ) 50 | 51 | def test_ssl_with_ca(self): 52 | pool = ConnectionPool() 53 | assert pool.init(self.addresses, self.configs, self.ssl_config) 54 | session = pool.get_session("root", "nebula") 55 | resp = session.execute("SHOW HOSTS") 56 | assert resp.is_succeeded() 57 | 58 | def test_ssl_with_invalid_ca(self): 59 | pool = ConnectionPool() 60 | config = copy.copy(self.ssl_config) 61 | config.ca_certs = "invalid" 62 | 63 | with self.assertRaises(Exception): 64 | pool.init(self.addresses, self.configs, config) 65 | --------------------------------------------------------------------------------