├── .cargo └── audit.toml ├── .dockerignore ├── .github ├── actions │ ├── check │ │ └── action.yml │ ├── publish_binary │ │ └── action.yml │ └── setup │ │ └── action.yml └── workflows │ ├── bindings.nodejs.yml │ ├── bindings.python.yml │ ├── ci.yml │ ├── frontend.yml │ ├── pr.yml │ ├── release.yml │ └── ttc.yml ├── .gitignore ├── .typos.toml ├── Cargo.toml ├── LICENSE ├── Makefile ├── README.md ├── bindings ├── nodejs │ ├── .gitignore │ ├── .npmignore │ ├── .prettierignore │ ├── .yarnrc.yml │ ├── Cargo.toml │ ├── README.md │ ├── build.rs │ ├── cucumber.json │ ├── generated.js │ ├── index.d.ts │ ├── index.js │ ├── npm │ │ ├── darwin-arm64 │ │ │ └── package.json │ │ ├── darwin-x64 │ │ │ └── package.json │ │ ├── linux-arm64-gnu │ │ │ └── package.json │ │ ├── linux-arm64-musl │ │ │ └── package.json │ │ ├── linux-x64-gnu │ │ │ └── package.json │ │ ├── linux-x64-musl │ │ │ └── package.json │ │ ├── win32-arm64-msvc │ │ │ └── package.json │ │ └── win32-x64-msvc │ │ │ └── package.json │ ├── package.json │ ├── pnpm-lock.yaml │ ├── scripts │ │ └── header.js │ ├── src │ │ └── lib.rs │ ├── tests │ │ ├── binding.feature │ │ ├── binding.js │ │ └── data │ ├── tsconfig.json │ └── typedoc.json ├── python │ ├── .gitignore │ ├── Cargo.toml │ ├── README.md │ ├── package │ │ └── databend_driver │ │ │ ├── __init__.py │ │ │ └── __init__.pyi │ ├── pyproject.toml │ ├── src │ │ ├── asyncio.rs │ │ ├── blocking.rs │ │ ├── lib.rs │ │ ├── types.rs │ │ └── utils.rs │ ├── tests │ │ ├── asyncio │ │ │ ├── binding.feature │ │ │ └── steps │ │ │ │ └── binding.py │ │ ├── blocking │ │ │ ├── binding.feature │ │ │ └── steps │ │ │ │ └── binding.py │ │ ├── cursor │ │ │ ├── binding.feature │ │ │ └── steps │ │ │ │ └── binding.py │ │ └── data │ └── uv.lock └── tests │ ├── README.md │ ├── data │ └── test.csv │ └── features │ └── binding.feature ├── cli ├── Cargo.toml ├── README.md ├── build.rs ├── frontend │ └── build │ │ ├── asset-manifest.json │ │ ├── index.html │ │ ├── logo.svg │ │ └── static │ │ ├── css │ │ └── main.a94f3081.css │ │ ├── js │ │ ├── main.900dc5c4.js │ │ └── main.900dc5c4.js.LICENSE.txt │ │ └── media │ │ ├── download.1a2f96254a617a2765b0fca418a3e5bf.svg │ │ ├── full-screen.972c352cc40ca74a17222b04ad2b1316.svg │ │ ├── zoom-in.b8e914df4e9fbb889ceec372b13f62d1.svg │ │ └── zoom-out.9600557781b9acd42f24eddad00f6f36.svg ├── src │ ├── args.rs │ ├── ast │ │ ├── mod.rs │ │ └── query_kind.rs │ ├── config.rs │ ├── display.rs │ ├── gendata.rs │ ├── helper.rs │ ├── main.rs │ ├── session.rs │ ├── trace.rs │ └── web.rs ├── test.sh └── tests │ ├── 00-base.result │ ├── 00-base.sql │ ├── 01-put.result │ ├── 01-put.sh │ ├── data │ ├── books.csv │ ├── books.parquet │ ├── ontime.sql │ └── ontime_200.csv.gz │ └── http │ ├── 01-load_stdin.result │ ├── 01-load_stdin.sh │ ├── 02-load_file.result │ ├── 02-load_file.sh │ ├── 03-load_file_gzip.result │ ├── 03-load_file_gzip.sh │ ├── 04-args-database.result │ ├── 04-args-database.sh │ ├── 05-stream.result │ ├── 05-stream.sh │ ├── 06-update.result │ ├── 06-update.sh │ ├── 07-gendata.result │ └── 07-gendata.sh ├── core ├── Cargo.toml ├── README.md ├── src │ ├── auth.rs │ ├── client.rs │ ├── error.rs │ ├── error_code.rs │ ├── global_cookie_store.rs │ ├── lib.rs │ ├── login.rs │ ├── pages.rs │ ├── presign.rs │ ├── request.rs │ ├── response.rs │ ├── session.rs │ └── stage.rs └── tests │ └── core │ ├── common │ └── mod.rs │ ├── data │ └── sample.csv │ ├── main.rs │ ├── simple.rs │ └── stage.rs ├── deny.toml ├── driver ├── Cargo.toml ├── README.md ├── src │ ├── client.rs │ ├── conn.rs │ ├── flight_sql.rs │ ├── lib.rs │ ├── params.rs │ ├── placeholder.rs │ └── rest_api.rs └── tests │ └── driver │ ├── common │ └── mod.rs │ ├── connection.rs │ ├── data │ └── books.csv │ ├── load.rs │ ├── main.rs │ ├── select_iter.rs │ ├── select_simple.rs │ ├── session.rs │ └── temp_table.rs ├── frontend ├── .env ├── .gitignore ├── README.md ├── package.json ├── pnpm-lock.yaml ├── postcss.config.js ├── public │ ├── index.html │ └── logo.svg ├── src │ ├── ProfileGraphDashboard.tsx │ ├── components │ │ ├── Attributes.tsx │ │ ├── FlowAnalysisGraph.tsx │ │ ├── MostExpensiveNodes.tsx │ │ ├── ProfileOverview.tsx │ │ ├── ProfileOverviewNode.tsx │ │ └── Statistics.tsx │ ├── constants │ │ └── index.ts │ ├── css │ │ └── ProfileGraphDashboard.css │ ├── hooks │ │ ├── useFlowAnalysisGraphConfig.tsx │ │ ├── useGraphEvents.ts │ │ ├── useGraphSize.ts │ │ ├── useNodeSelection.ts │ │ ├── useProfileData.ts │ │ └── useReshape.ts │ ├── images │ │ └── icons │ │ │ ├── download.svg │ │ │ ├── full-screen.svg │ │ │ ├── zoom-in.svg │ │ │ └── zoom-out.svg │ ├── index.css │ ├── index.tsx │ ├── react-app-env.d.ts │ ├── setupTests.ts │ ├── types │ │ └── ProfileGraphDashboard.ts │ └── utills │ │ ├── graph.ts │ │ └── index.ts ├── tailwind.config.js └── tsconfig.json ├── licenserc.toml ├── licenses └── Apache-2.0.txt ├── macros ├── Cargo.toml └── src │ ├── from_row.rs │ ├── lib.rs │ └── parser.rs ├── nfpm.yaml ├── rust-toolchain.toml ├── rustfmt.toml ├── scripts └── bump.sh ├── sql ├── Cargo.toml └── src │ ├── cursor_ext │ ├── cursor_checkpoint_ext.rs │ ├── cursor_read_bytes_ext.rs │ ├── cursor_read_number_ext.rs │ ├── cursor_read_string_ext.rs │ └── mod.rs │ ├── error.rs │ ├── lib.rs │ ├── raw_rows.rs │ ├── rows.rs │ ├── schema.rs │ └── value.rs ├── taplo.toml ├── tests ├── Makefile ├── config │ ├── databend-meta-node-1.toml │ ├── databend-query-node-1.toml │ └── nginx.conf └── docker-compose.yaml └── ttc ├── Cargo.toml ├── Dockerfile ├── README.md └── src ├── client.rs └── server.rs /.cargo/audit.toml: -------------------------------------------------------------------------------- 1 | [advisories] 2 | ignore = [ 3 | # time: Potential segfault in the time crate 4 | # We are not affected by this CVE. 5 | # And there is no actions we can take, waiting for upstream. 6 | "RUSTSEC-2020-0071", 7 | ] 8 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | target/ 2 | frontend 3 | ttc/Dockerfile 4 | -------------------------------------------------------------------------------- /.github/actions/check/action.yml: -------------------------------------------------------------------------------- 1 | name: 'Check' 2 | description: 'Check will do all essential checks' 3 | runs: 4 | using: "composite" 5 | steps: 6 | 7 | - name: Check Apache License Header 8 | uses: korandoru/hawkeye@v3 9 | 10 | - name: Typos 11 | uses: crate-ci/typos@master 12 | 13 | - name: Format 14 | shell: bash 15 | run: | 16 | cargo fmt --all -- --check 17 | 18 | - name: Install Check Tools 19 | env: 20 | GH_TOKEN: ${{ github.token }} 21 | shell: bash 22 | run: | 23 | cargo install cargo-quickinstall 24 | cargo quickinstall cargo-binstall 25 | cargo binstall -y cargo-machete 26 | cargo binstall -y cargo-deny 27 | cargo binstall -y cargo-xwin 28 | cargo binstall -y sccache 29 | 30 | - name: Machete 31 | shell: bash 32 | run: | 33 | cargo machete 34 | 35 | - name: Deny Check 36 | shell: bash 37 | run: | 38 | cargo deny check 39 | 40 | - name: Clippy 41 | shell: bash 42 | run: | 43 | cargo clippy --all --all-targets -- -D warnings 44 | -------------------------------------------------------------------------------- /.github/actions/publish_binary/action.yml: -------------------------------------------------------------------------------- 1 | name: 'Publish Binary' 2 | description: 'Publish Binary to GitHub Releases and Cloudflare R2' 3 | inputs: 4 | os: 5 | description: "Operating system" 6 | required: true 7 | target: 8 | description: "Release target" 9 | required: true 10 | version: 11 | description: "Release version" 12 | required: true 13 | runs: 14 | using: "composite" 15 | steps: 16 | 17 | - name: Pack binary 18 | id: package 19 | shell: bash 20 | run: | 21 | mkdir -p dist 22 | case ${{ inputs.os }} in 23 | linux) 24 | tar -czf dist/bendsql-${{ inputs.target }}.tar.gz -C target/${{ inputs.target }}/release bendsql 25 | echo "file=bendsql-${{ inputs.target }}.tar.gz" >> $GITHUB_OUTPUT 26 | ;; 27 | macos) 28 | tar -czf dist/bendsql-${{ inputs.target }}.tar.gz -C target/${{ inputs.target }}/release bendsql 29 | echo "file=bendsql-${{ inputs.target }}.tar.gz" >> $GITHUB_OUTPUT 30 | ;; 31 | windows) 32 | 7z a -tzip dist/bendsql-${{ inputs.target }}.zip target/${{ inputs.target }}/release/bendsql.exe 33 | echo "file=bendsql-${{ inputs.target }}.zip" >> $GITHUB_OUTPUT 34 | ;; 35 | *) 36 | echo "Unsupported OS: ${{ inputs.os }}" 37 | exit 1 38 | ;; 39 | esac 40 | 41 | - name: Publish to Github Releases 42 | id: name 43 | shell: bash 44 | run: | 45 | gh release upload ${{ inputs.version }} dist/${{ steps.package.outputs.file }} --clobber 46 | 47 | - name: Upload package to Cloudflare R2 48 | id: upload 49 | shell: bash 50 | run: | 51 | aws s3 cp dist/${{ steps.package.outputs.file }} s3://repo/bendsql/${{ inputs.version }}/${{ steps.package.outputs.file }} --no-progress --checksum-algorithm=CRC32 52 | -------------------------------------------------------------------------------- /.github/actions/setup/action.yml: -------------------------------------------------------------------------------- 1 | name: Setup Rust Builder 2 | description: 'Prepare Rust Build Environment' 3 | inputs: 4 | runner: 5 | description: 'runner provider' 6 | required: true 7 | default: 'github' 8 | cache-key: 9 | description: 'the rust cache key suffix' 10 | required: false 11 | default: '' 12 | target: 13 | description: 'the rust target to build' 14 | required: false 15 | default: x86_64-unknown-linux-gnu 16 | 17 | runs: 18 | using: "composite" 19 | steps: 20 | - uses: dtolnay/rust-toolchain@stable 21 | id: toolchain 22 | with: 23 | toolchain: stable 24 | targets: ${{ inputs.target }} 25 | 26 | - name: Setup sccache 27 | uses: mozilla-actions/sccache-action@main 28 | with: 29 | disable_annotations: true 30 | 31 | - name: Cache Cargo for Github Runner 32 | if: inputs.runner == 'github' 33 | uses: actions/cache@v4 34 | with: 35 | path: | 36 | ~/.cargo/registry/ 37 | key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.toml') }}-${{ steps.toolchain.outputs.cachekey }}-${{ inputs.cache-key }} 38 | restore-keys: | 39 | ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.toml') }}-${{ steps.toolchain.outputs.cachekey }} 40 | ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.toml') }} 41 | ${{ runner.os }}-cargo 42 | 43 | - name: Cache Cargo for AWS Self-Hosted Runner 44 | if: inputs.runner == 'aws' 45 | uses: everpcpc/actions-cache@v2 46 | env: 47 | AWS_REGION: us-east-2 48 | with: 49 | provider: s3 50 | bucket: "databend-ci" 51 | root: "bendsql/cache" 52 | path: | 53 | ~/.cargo/registry/ 54 | key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.toml') }}-${{ steps.toolchain.outputs.cachekey }}-${{ inputs.cache-key }} 55 | restore-keys: | 56 | ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.toml') }}-${{ steps.toolchain.outputs.cachekey }} 57 | ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.toml') }} 58 | ${{ runner.os }}-cargo 59 | 60 | - name: Setup rust related environment variables 61 | shell: bash 62 | run: | 63 | 64 | # Enable terminal color 65 | echo "CARGO_TERM_COLOR=always" >> $GITHUB_ENV 66 | # Disable full debug symbol generation to speed up CI build and keep memory down 67 | # "1" means line tables only, which is useful for panic tracebacks. 68 | echo "RUSTFLAGS=-C debuginfo=1" >> $GITHUB_ENV 69 | # Enable backtraces 70 | echo "RUST_BACKTRACE=1" >> $GITHUB_ENV 71 | # Enable logging 72 | echo "RUST_LOG=info" >> $GITHUB_ENV 73 | # Enable sparse index 74 | echo "CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse" >> $GITHUB_ENV 75 | 76 | - name: Enable sccache for Github Actions 77 | shell: bash 78 | if: inputs.runner == 'github' 79 | run: | 80 | echo "SCCACHE_GHA_ENABLED=true" >> $GITHUB_ENV 81 | echo "RUSTC_WRAPPER=sccache" >> $GITHUB_ENV 82 | 83 | - name: Enable sccache for AWS Self-Hosted Actions 84 | shell: bash 85 | if: inputs.runner == 'aws' 86 | run: | 87 | echo "RUSTC_WRAPPER=sccache" >> $GITHUB_ENV 88 | echo "SCCACHE_BUCKET=databend-ci" >> $GITHUB_ENV 89 | echo "SCCACHE_REGION=us-east-2" >> $GITHUB_ENV 90 | echo "SCCACHE_S3_KEY_PREFIX=bendsql/sccache/" >> $GITHUB_ENV 91 | echo "SCCACHE_S3_USE_SSL=true" >> $GITHUB_ENV 92 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | branches: [main] 8 | 9 | concurrency: 10 | group: ${{ github.workflow }}-${{ github.ref }}-${{ github.event_name }} 11 | cancel-in-progress: true 12 | 13 | jobs: 14 | check: 15 | runs-on: ubuntu-latest 16 | steps: 17 | - uses: actions/checkout@v4 18 | - uses: ./.github/actions/setup 19 | with: 20 | cache-key: check 21 | - uses: ./.github/actions/check 22 | 23 | build: 24 | needs: check 25 | runs-on: ${{ matrix.os }}-latest 26 | name: build-${{ matrix.os }}-${{ matrix.arch }} 27 | strategy: 28 | matrix: 29 | include: 30 | - { os: ubuntu, arch: x86_64, target: x86_64-unknown-linux-gnu } 31 | - { os: macos, arch: aarch64, target: aarch64-apple-darwin } 32 | - { os: windows, arch: x86_64, target: x86_64-pc-windows-msvc } 33 | steps: 34 | - uses: actions/checkout@v4 35 | - uses: ./.github/actions/setup 36 | with: 37 | cache-key: build 38 | target: ${{ matrix.target }} 39 | - run: cargo build 40 | 41 | unit: 42 | needs: check 43 | runs-on: ubuntu-latest 44 | steps: 45 | - uses: actions/checkout@v4 46 | - uses: ./.github/actions/setup 47 | with: 48 | cache-key: unit 49 | - run: cargo test --all-features --lib -- --show-output 50 | 51 | integration: 52 | needs: check 53 | runs-on: [self-hosted, Linux, X64, 2c8g, aws] 54 | steps: 55 | - uses: actions/checkout@v4 56 | - uses: ./.github/actions/setup 57 | with: 58 | runner: aws 59 | cache-key: integration 60 | - name: Get License from S3 61 | run: | 62 | aws s3 cp s3://databend-ci/misc/license-trial.key license.key 63 | aws s3 cp s3://databend-ci/misc/license-trial.json license.json 64 | cat license.json 65 | echo "QUERY_DATABEND_ENTERPRISE_LICENSE=$(cat license.key)" >> $GITHUB_ENV 66 | - run: make -C tests test-core 67 | - run: make -C tests test-driver 68 | - run: make -C tests test-bendsql 69 | -------------------------------------------------------------------------------- /.github/workflows/frontend.yml: -------------------------------------------------------------------------------- 1 | name: Frontend 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - main 7 | paths: 8 | - "frontend/**" 9 | - ".github/workflows/frontend.yml" 10 | 11 | concurrency: 12 | group: ${{ github.workflow }}-${{ github.ref }}-${{ github.event_name }} 13 | cancel-in-progress: true 14 | 15 | jobs: 16 | build: 17 | runs-on: ubuntu-latest 18 | steps: 19 | - uses: actions/checkout@v4 20 | 21 | - uses: actions/setup-node@v4 22 | with: 23 | node-version: "22" 24 | 25 | - run: npm i -g --force corepack && corepack enable 26 | 27 | - name: Install dependencies 28 | working-directory: frontend/ 29 | run: pnpm install 30 | 31 | - name: Build 32 | working-directory: frontend/ 33 | run: pnpm run build 34 | -------------------------------------------------------------------------------- /.github/workflows/pr.yml: -------------------------------------------------------------------------------- 1 | name: PR Assistant 2 | 3 | on: 4 | pull_request_target: 5 | branches: 6 | - main 7 | types: 8 | - opened 9 | - edited 10 | - synchronize 11 | 12 | jobs: 13 | title: 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: amannn/action-semantic-pull-request@v5 17 | env: 18 | GITHUB_TOKEN: ${{ github.token }} 19 | -------------------------------------------------------------------------------- /.github/workflows/ttc.yml: -------------------------------------------------------------------------------- 1 | # .github/workflows/docker-publish.yml 2 | 3 | name: TTC docker publish 4 | 5 | on: 6 | push: 7 | branches: 8 | - main 9 | tags: 10 | - v* 11 | paths: 12 | - 'core/**' 13 | - 'driver/**' 14 | - 'ttc/**' 15 | - 'sql/**' 16 | 17 | jobs: 18 | docker: 19 | runs-on: ubuntu-latest 20 | 21 | steps: 22 | - name: Checkout code 23 | uses: actions/checkout@v4 24 | 25 | - name: Set up Docker Buildx 26 | uses: docker/setup-buildx-action@v3 27 | 28 | - name: Login to DockerHub 29 | uses: docker/login-action@v3 30 | with: 31 | username: ${{ secrets.DOCKERHUB_USERNAME }} 32 | password: ${{ secrets.DOCKERHUB_TOKEN }} 33 | 34 | - name: Set Docker tag 35 | shell: bash 36 | run: | 37 | ref_v="${{ github.ref }}" 38 | if [[ $ref_v == refs/tags/* ]]; then 39 | DOCKER_TAG=${ref_v:10} 40 | else 41 | DOCKER_TAG="latest" 42 | fi 43 | echo "DOCKER_TAG=$DOCKER_TAG" >> $GITHUB_ENV 44 | 45 | - name: Print Docker tag 46 | shell: bash 47 | run: | 48 | echo "Start to build and publish: datafuselabs/ttc-rust:$DOCKER_TAG" 49 | 50 | - name: TTC Build and push 51 | uses: docker/build-push-action@v6 52 | with: 53 | push: true 54 | file: ./ttc/Dockerfile 55 | tags: datafuselabs/ttc-rust:${{ env.DOCKER_TAG }} 56 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .vscode 2 | .idea 3 | **/target 4 | Cargo.lock 5 | venv/ 6 | 7 | /tests/data 8 | *.output 9 | 10 | /dist 11 | 12 | **/.DS_Store 13 | -------------------------------------------------------------------------------- /.typos.toml: -------------------------------------------------------------------------------- 1 | # If there are words that you think should not be treated as typo. 2 | # Please list here along with a comment. 3 | [default.extend-words] 4 | # Preserved for NdJson 5 | "Nd" = "Nd" 6 | # Microsoft K8s Server 7 | "AKS" = "AKS" 8 | # Wrong cases 9 | "ba" = "ba" 10 | "ue" = "ue" 11 | "INOUT" = "INOUT" 12 | "ser" = "ser" 13 | "Ser" = "Ser" 14 | "flate" = "flate" 15 | "Tke" = "Tke" 16 | 17 | [files] 18 | extend-exclude = [ 19 | "cli/frontend" 20 | ] 21 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | default-members = ["core", "sql", "driver", "macros", "cli"] 3 | members = [ 4 | "core", 5 | "sql", 6 | "driver", 7 | "macros", 8 | "cli", 9 | "bindings/python", 10 | "bindings/nodejs", 11 | "ttc", 12 | ] 13 | resolver = "2" 14 | 15 | [workspace.package] 16 | edition = "2021" 17 | version = "0.27.4" 18 | license = "Apache-2.0" 19 | authors = ["Databend Authors "] 20 | categories = ["database"] 21 | keywords = ["databend", "database", "rust"] 22 | repository = "https://github.com/databendlabs/bendsql" 23 | 24 | [workspace.dependencies] 25 | databend-client = { path = "core", version = "0.27.4" } 26 | databend-driver = { path = "driver", version = "0.27.4" } 27 | databend-driver-core = { path = "sql", version = "0.27.4" } 28 | databend-driver-macros = { path = "macros", version = "0.27.4" } 29 | 30 | jsonb = { version = "0.5.1" } 31 | tokio-stream = "0.1" 32 | chrono = { version = "0.4.40", default-features = false, features = ["clock"] } 33 | arrow = { version = "55.0" } 34 | arrow-array = { version = "55.0" } 35 | arrow-buffer = { version = "55.0" } 36 | arrow-schema = { version = "55.0" } 37 | arrow-flight = { version = "55.0", features = ["flight-sql-experimental"] } 38 | tonic = { version = "0.12", default-features = false, features = [ 39 | "transport", 40 | "codegen", 41 | "tls", 42 | "tls-webpki-roots", 43 | "prost", 44 | ] } 45 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: check build test integration 2 | 3 | default: build 4 | 5 | lint: check 6 | 7 | check: 8 | cargo fmt --all -- --check 9 | cargo clippy --all-targets --all-features -- -D warnings 10 | cargo deny check 11 | # cargo install cargo-machete 12 | cargo machete 13 | # cargo install hawkeye 14 | hawkeye check 15 | # cargo install typos-cli 16 | typos 17 | 18 | build-frontend: 19 | rm -rf cli/frontend 20 | mkdir -p cli/frontend 21 | cd frontend && \ 22 | if [ ! -d node_modules ]; then pnpm install; fi && \ 23 | pnpm build && cp -rf build ../cli/frontend/ 24 | 25 | run: 26 | make build-frontend 27 | cargo run 28 | 29 | build: 30 | make build-frontend 31 | cargo build --release 32 | 33 | test: 34 | cargo test --all --all-features --lib -- --nocapture 35 | 36 | integration: 37 | make -C tests 38 | 39 | integration-down: 40 | make -C tests down 41 | 42 | integration-core: 43 | make -C tests test-core 44 | 45 | integration-driver: 46 | make -C tests test-driver 47 | 48 | integration-bendsql: 49 | make -C tests test-bendsql 50 | 51 | integration-bindings-python: 52 | make -C tests test-bindings-python 53 | 54 | integration-bindings-nodejs: 55 | make -C tests test-bindings-nodejs 56 | -------------------------------------------------------------------------------- /bindings/nodejs/.gitignore: -------------------------------------------------------------------------------- 1 | # Created by https://www.toptal.com/developers/gitignore/api/node 2 | # Edit at https://www.toptal.com/developers/gitignore?templates=node 3 | 4 | ### Node ### 5 | # Logs 6 | logs 7 | *.log 8 | npm-debug.log* 9 | yarn-debug.log* 10 | yarn-error.log* 11 | lerna-debug.log* 12 | 13 | # Diagnostic reports (https://nodejs.org/api/report.html) 14 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 15 | 16 | # Runtime data 17 | pids 18 | *.pid 19 | *.seed 20 | *.pid.lock 21 | 22 | # Directory for instrumented libs generated by jscoverage/JSCover 23 | lib-cov 24 | 25 | # Coverage directory used by tools like istanbul 26 | coverage 27 | *.lcov 28 | 29 | # nyc test coverage 30 | .nyc_output 31 | 32 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 33 | .grunt 34 | 35 | # Bower dependency directory (https://bower.io/) 36 | bower_components 37 | 38 | # node-waf configuration 39 | .lock-wscript 40 | 41 | # Compiled binary addons (https://nodejs.org/api/addons.html) 42 | build/Release 43 | artifacts/ 44 | 45 | # Dependency directories 46 | node_modules/ 47 | jspm_packages/ 48 | 49 | # TypeScript v1 declaration files 50 | typings/ 51 | 52 | # TypeScript cache 53 | *.tsbuildinfo 54 | 55 | # Optional npm cache directory 56 | .npm 57 | 58 | # Optional eslint cache 59 | .eslintcache 60 | 61 | # Microbundle cache 62 | .rpt2_cache/ 63 | .rts2_cache_cjs/ 64 | .rts2_cache_es/ 65 | .rts2_cache_umd/ 66 | 67 | # Optional REPL history 68 | .node_repl_history 69 | 70 | # Output of 'npm pack' 71 | *.tgz 72 | 73 | # Yarn Integrity file 74 | .yarn-integrity 75 | 76 | # dotenv environment variables file 77 | .env 78 | .env.test 79 | 80 | # parcel-bundler cache (https://parceljs.org/) 81 | .cache 82 | 83 | # Next.js build output 84 | .next 85 | 86 | # Nuxt.js build / generate output 87 | .nuxt 88 | dist 89 | 90 | # Gatsby files 91 | .cache/ 92 | # Comment in the public line in if your project uses Gatsby and not Next.js 93 | # https://nextjs.org/blog/next-9-1#public-directory-support 94 | # public 95 | 96 | # vuepress build output 97 | .vuepress/dist 98 | 99 | # Serverless directories 100 | .serverless/ 101 | 102 | # FuseBox cache 103 | .fusebox/ 104 | 105 | # DynamoDB Local files 106 | .dynamodb/ 107 | 108 | # TernJS port file 109 | .tern-port 110 | 111 | # Stores VSCode versions used for testing VSCode extensions 112 | .vscode-test 113 | 114 | # End of https://www.toptal.com/developers/gitignore/api/node 115 | 116 | # Created by https://www.toptal.com/developers/gitignore/api/macos 117 | # Edit at https://www.toptal.com/developers/gitignore?templates=macos 118 | 119 | ### macOS ### 120 | # General 121 | .DS_Store 122 | .AppleDouble 123 | .LSOverride 124 | 125 | # Icon must end with two 126 | Icon 127 | 128 | 129 | # Thumbnails 130 | ._* 131 | 132 | # Files that might appear in the root of a volume 133 | .DocumentRevisions-V100 134 | .fseventsd 135 | .Spotlight-V100 136 | .TemporaryItems 137 | .Trashes 138 | .VolumeIcon.icns 139 | .com.apple.timemachine.donotpresent 140 | 141 | # Directories potentially created on remote AFP share 142 | .AppleDB 143 | .AppleDesktop 144 | Network Trash Folder 145 | Temporary Items 146 | .apdisk 147 | 148 | ### macOS Patch ### 149 | # iCloud generated files 150 | *.icloud 151 | 152 | # End of https://www.toptal.com/developers/gitignore/api/macos 153 | 154 | # Created by https://www.toptal.com/developers/gitignore/api/windows 155 | # Edit at https://www.toptal.com/developers/gitignore?templates=windows 156 | 157 | ### Windows ### 158 | # Windows thumbnail cache files 159 | Thumbs.db 160 | Thumbs.db:encryptable 161 | ehthumbs.db 162 | ehthumbs_vista.db 163 | 164 | # Dump file 165 | *.stackdump 166 | 167 | # Folder config file 168 | [Dd]esktop.ini 169 | 170 | # Recycle Bin used on file shares 171 | $RECYCLE.BIN/ 172 | 173 | # Windows Installer files 174 | *.cab 175 | *.msi 176 | *.msix 177 | *.msm 178 | *.msp 179 | 180 | # Windows shortcuts 181 | *.lnk 182 | 183 | # End of https://www.toptal.com/developers/gitignore/api/windows 184 | 185 | #Added by cargo 186 | 187 | /target 188 | Cargo.lock 189 | 190 | .pnp.* 191 | .yarn 192 | *.node 193 | docs/ 194 | -------------------------------------------------------------------------------- /bindings/nodejs/.npmignore: -------------------------------------------------------------------------------- 1 | target 2 | Cargo.lock 3 | .cargo 4 | .github 5 | npm 6 | .eslintrc 7 | .prettierignore 8 | rustfmt.toml 9 | yarn.lock 10 | *.node 11 | .yarn 12 | __test__ 13 | renovate.json 14 | -------------------------------------------------------------------------------- /bindings/nodejs/.prettierignore: -------------------------------------------------------------------------------- 1 | target 2 | generated.js 3 | index.d.ts 4 | .yarn 5 | -------------------------------------------------------------------------------- /bindings/nodejs/.yarnrc.yml: -------------------------------------------------------------------------------- 1 | nodeLinker: node-modules 2 | -------------------------------------------------------------------------------- /bindings/nodejs/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "databend-nodejs" 3 | publish = false 4 | 5 | version = { workspace = true } 6 | edition = { workspace = true } 7 | license = { workspace = true } 8 | authors = { workspace = true } 9 | repository = { workspace = true } 10 | 11 | [lib] 12 | crate-type = ["cdylib"] 13 | doc = false 14 | 15 | [dependencies] 16 | chrono = { workspace = true } 17 | databend-driver = { workspace = true, features = ["rustls", "flight-sql"] } 18 | tokio-stream = { workspace = true } 19 | 20 | napi = { version = "2.16", default-features = false, features = [ 21 | "napi6", 22 | "async", 23 | "serde-json", 24 | "chrono_date", 25 | ] } 26 | napi-derive = "2.16" 27 | once_cell = "1.21" 28 | serde_json = "1.0" 29 | 30 | [build-dependencies] 31 | napi-build = "2" 32 | -------------------------------------------------------------------------------- /bindings/nodejs/README.md: -------------------------------------------------------------------------------- 1 | # databend-driver 2 | 3 | Databend Node.js Client 4 | 5 | [![image](https://img.shields.io/npm/v/databend-driver.svg)](https://www.npmjs.com/package/databend-driver) 6 | ![License](https://img.shields.io/npm/l/databend-driver.svg) 7 | [![image](https://img.shields.io/npm/types/databend-driver.svg)](https://www.npmjs.com/package/databend-driver) 8 | 9 | ## Usage 10 | 11 | ```javascript 12 | const { Client } = require("databend-driver"); 13 | 14 | const client = new Client( 15 | "databend://root:root@localhost:8000/?sslmode=disable", 16 | ); 17 | const conn = await client.getConn(); 18 | 19 | await conn.exec(`CREATE TABLE test ( 20 | i64 Int64, 21 | u64 UInt64, 22 | f64 Float64, 23 | s String, 24 | s2 String, 25 | d Date, 26 | t DateTime 27 | );`); 28 | 29 | // get rows of value array 30 | const rows = await conn.queryIter("SELECT * FROM test"); 31 | let row = await rows.next(); 32 | while (row) { 33 | console.log(row.values()); 34 | row = await rows.next(); 35 | } 36 | 37 | // get rows of map 38 | const rows = await conn.queryIter("SELECT * FROM test"); 39 | let row = await rows.next(); 40 | while (row) { 41 | console.log(row.data()); 42 | row = await rows.next(); 43 | } 44 | 45 | // iter rows 46 | const rows = await conn.queryIter("SELECT * FROM test"); 47 | for await (const row of rows) { 48 | console.log(row.values()); 49 | } 50 | 51 | // pipe rows 52 | import { Transform } from "node:stream"; 53 | import { finished, pipeline } from "node:stream/promises"; 54 | 55 | const rows = await conn.queryIter("SELECT * FROM test"); 56 | const stream = rows.stream(); 57 | const transformer = new Transform({ 58 | readableObjectMode: true, 59 | writableObjectMode: true, 60 | transform(row, _, callback) { 61 | console.log(row.data()); 62 | }, 63 | }); 64 | await pipeline(stream, transformer); 65 | await finished(stream); 66 | ``` 67 | 68 | ## Type Mapping 69 | 70 | [Databend Types](https://docs.databend.com/sql/sql-reference/data-types/) 71 | 72 | ### General Data Types 73 | 74 | | Databend | Node.js | 75 | | ----------- | --------- | 76 | | `BOOLEAN` | `Boolean` | 77 | | `TINYINT` | `Number` | 78 | | `SMALLINT` | `Number` | 79 | | `INT` | `Number` | 80 | | `BIGINT` | `BigInt` | 81 | | `FLOAT` | `Number` | 82 | | `DOUBLE` | `Number` | 83 | | `DECIMAL` | `String` | 84 | | `DATE` | `Date` | 85 | | `TIMESTAMP` | `Date` | 86 | | `VARCHAR` | `String` | 87 | | `BINARY` | `Buffer` | 88 | 89 | ### Semi-Structured Data Types 90 | 91 | | Databend | Node.js | 92 | | ----------- | ----------------- | 93 | | `ARRAY` | `Array` | 94 | | `TUPLE` | `Array` | 95 | | `MAP` | `Object` | 96 | | `VARIANT` | `String / Object` | 97 | | `BITMAP` | `String` | 98 | | `GEOMETRY` | `String` | 99 | | `GEOGRAPHY` | `String` | 100 | 101 | Note: `VARIANT` is a json encoded string. Example: 102 | 103 | ```sql 104 | CREATE TABLE example ( 105 | data VARIANT 106 | ); 107 | INSERT INTO example VALUES ('{"a": 1, "b": "hello"}'); 108 | ``` 109 | 110 | ```javascript 111 | const row = await conn.queryRow("SELECT * FROM example limit 1;"); 112 | const data = row.values()[0]; 113 | const value = JSON.parse(data); 114 | console.log(value); 115 | ``` 116 | 117 | We also provide a helper function to convert `VARIANT` to `Object`: 118 | 119 | ```javascript 120 | const row = await conn.queryRow("SELECT * FROM example limit 1;"); 121 | row.setOpts({ variantAsObject: true }); 122 | console.log(row.data()); 123 | ``` 124 | 125 | Parameter binding 126 | 127 | ```javascript 128 | const row = await this.conn.queryRow( 129 | "SELECT $1, $2, $3, $4", 130 | (params = [3, false, 4, "55"]), 131 | ); 132 | const row = await this.conn.queryRow( 133 | "SELECT :a, :b, :c, :d", 134 | (params = { a: 3, b: false, c: 4, d: "55" }), 135 | ); 136 | const row = await this.conn.queryRow("SELECT ?, ?, ?, ?", [3, false, 4, "55"]); 137 | ``` 138 | 139 | ## Development 140 | 141 | ```shell 142 | cd bindings/nodejs 143 | pnpm install 144 | pnpm run build:debug 145 | pnpm run test 146 | ``` 147 | -------------------------------------------------------------------------------- /bindings/nodejs/build.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | extern crate napi_build; 16 | 17 | fn main() { 18 | napi_build::setup(); 19 | } 20 | -------------------------------------------------------------------------------- /bindings/nodejs/cucumber.json: -------------------------------------------------------------------------------- 1 | { 2 | "default": { 3 | "paths": ["tests/*.feature"], 4 | "require": ["tests/*.js"] 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /bindings/nodejs/index.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2021 Datafuse Labs 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | /// 18 | 19 | const { Readable } = require("node:stream"); 20 | 21 | const { Client, RowIterator } = require("./generated.js"); 22 | 23 | class RowsStream extends Readable { 24 | constructor(reader, options) { 25 | super({ objectMode: true, ...options }); 26 | this.reader = reader; 27 | } 28 | 29 | _read() { 30 | this.reader 31 | .next() 32 | .then((item) => { 33 | this.push(item); 34 | }) 35 | .catch((e) => { 36 | this.emit("error", e); 37 | }); 38 | } 39 | } 40 | 41 | RowIterator.prototype[Symbol.asyncIterator] = async function* () { 42 | while (true) { 43 | const item = await this.next(); 44 | if (item === null) { 45 | break; 46 | } 47 | yield item; 48 | } 49 | }; 50 | 51 | RowIterator.prototype.stream = function () { 52 | return new RowsStream(this); 53 | }; 54 | 55 | module.exports.Client = Client; 56 | -------------------------------------------------------------------------------- /bindings/nodejs/npm/darwin-arm64/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@databend-driver/lib-darwin-arm64", 3 | "repository": "https://github.com/databendlabs/bendsql.git", 4 | "version": "0.27.4", 5 | "os": [ 6 | "darwin" 7 | ], 8 | "cpu": [ 9 | "arm64" 10 | ], 11 | "main": "databend-driver.darwin-arm64.node", 12 | "files": [ 13 | "databend-driver.darwin-arm64.node" 14 | ], 15 | "license": "Apache-2.0", 16 | "engines": { 17 | "node": ">= 10" 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /bindings/nodejs/npm/darwin-x64/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@databend-driver/lib-darwin-x64", 3 | "repository": "https://github.com/databendlabs/bendsql.git", 4 | "version": "0.27.4", 5 | "os": [ 6 | "darwin" 7 | ], 8 | "cpu": [ 9 | "x64" 10 | ], 11 | "main": "databend-driver.darwin-x64.node", 12 | "files": [ 13 | "databend-driver.darwin-x64.node" 14 | ], 15 | "license": "Apache-2.0", 16 | "engines": { 17 | "node": ">= 10" 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /bindings/nodejs/npm/linux-arm64-gnu/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@databend-driver/lib-linux-arm64-gnu", 3 | "repository": "https://github.com/databendlabs/bendsql.git", 4 | "version": "0.27.4", 5 | "os": [ 6 | "linux" 7 | ], 8 | "cpu": [ 9 | "arm64" 10 | ], 11 | "main": "databend-driver.linux-arm64-gnu.node", 12 | "files": [ 13 | "databend-driver.linux-arm64-gnu.node" 14 | ], 15 | "license": "Apache-2.0", 16 | "engines": { 17 | "node": ">= 10" 18 | }, 19 | "libc": [ 20 | "glibc" 21 | ] 22 | } 23 | -------------------------------------------------------------------------------- /bindings/nodejs/npm/linux-arm64-musl/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@databend-driver/lib-linux-arm64-musl", 3 | "repository": "https://github.com/databendlabs/bendsql.git", 4 | "version": "0.27.4", 5 | "os": [ 6 | "linux" 7 | ], 8 | "cpu": [ 9 | "arm64" 10 | ], 11 | "main": "databend-driver.linux-arm64-musl.node", 12 | "files": [ 13 | "databend-driver.linux-arm64-musl.node" 14 | ], 15 | "license": "Apache-2.0", 16 | "engines": { 17 | "node": ">= 10" 18 | }, 19 | "libc": [ 20 | "musl" 21 | ] 22 | } 23 | -------------------------------------------------------------------------------- /bindings/nodejs/npm/linux-x64-gnu/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@databend-driver/lib-linux-x64-gnu", 3 | "repository": "https://github.com/databendlabs/bendsql.git", 4 | "version": "0.27.4", 5 | "os": [ 6 | "linux" 7 | ], 8 | "cpu": [ 9 | "x64" 10 | ], 11 | "main": "databend-driver.linux-x64-gnu.node", 12 | "files": [ 13 | "databend-driver.linux-x64-gnu.node" 14 | ], 15 | "license": "Apache-2.0", 16 | "engines": { 17 | "node": ">= 10" 18 | }, 19 | "libc": [ 20 | "glibc" 21 | ] 22 | } 23 | -------------------------------------------------------------------------------- /bindings/nodejs/npm/linux-x64-musl/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@databend-driver/lib-linux-x64-musl", 3 | "repository": "https://github.com/databendlabs/bendsql.git", 4 | "version": "0.27.4", 5 | "os": [ 6 | "linux" 7 | ], 8 | "cpu": [ 9 | "x64" 10 | ], 11 | "main": "databend-driver.linux-x64-musl.node", 12 | "files": [ 13 | "databend-driver.linux-x64-musl.node" 14 | ], 15 | "license": "Apache-2.0", 16 | "engines": { 17 | "node": ">= 10" 18 | }, 19 | "libc": [ 20 | "musl" 21 | ] 22 | } 23 | -------------------------------------------------------------------------------- /bindings/nodejs/npm/win32-arm64-msvc/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@databend-driver/lib-win32-arm64-msvc", 3 | "repository": "https://github.com/databendlabs/bendsql.git", 4 | "version": "0.27.4", 5 | "os": [ 6 | "win32" 7 | ], 8 | "cpu": [ 9 | "arm64" 10 | ], 11 | "main": "databend-driver.win32-arm64-msvc.node", 12 | "files": [ 13 | "databend-driver.win32-arm64-msvc.node" 14 | ], 15 | "license": "Apache-2.0", 16 | "engines": { 17 | "node": ">= 10" 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /bindings/nodejs/npm/win32-x64-msvc/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@databend-driver/lib-win32-x64-msvc", 3 | "repository": "https://github.com/databendlabs/bendsql.git", 4 | "version": "0.27.4", 5 | "os": [ 6 | "win32" 7 | ], 8 | "cpu": [ 9 | "x64" 10 | ], 11 | "main": "databend-driver.win32-x64-msvc.node", 12 | "files": [ 13 | "databend-driver.win32-x64-msvc.node" 14 | ], 15 | "license": "Apache-2.0", 16 | "engines": { 17 | "node": ">= 10" 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /bindings/nodejs/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "databend-driver", 3 | "author": "Databend Authors ", 4 | "version": "0.27.4", 5 | "license": "Apache-2.0", 6 | "main": "index.js", 7 | "types": "index.d.ts", 8 | "description": "Databend Driver Node.js Binding", 9 | "repository": { 10 | "url": "git+https://github.com/databendlabs/bendsql.git" 11 | }, 12 | "napi": { 13 | "name": "databend-driver", 14 | "package": { 15 | "name": "@databend-driver/lib" 16 | }, 17 | "triples": { 18 | "defaults": false, 19 | "additional": [ 20 | "x86_64-unknown-linux-gnu", 21 | "aarch64-unknown-linux-gnu", 22 | "x86_64-unknown-linux-musl", 23 | "aarch64-unknown-linux-musl", 24 | "x86_64-pc-windows-msvc", 25 | "aarch64-pc-windows-msvc", 26 | "x86_64-apple-darwin", 27 | "aarch64-apple-darwin" 28 | ] 29 | } 30 | }, 31 | "keywords": [ 32 | "api", 33 | "databend", 34 | "driver" 35 | ], 36 | "files": [ 37 | "index.d.ts", 38 | "index.js", 39 | "generated.js", 40 | "LICENSE" 41 | ], 42 | "devDependencies": { 43 | "@cucumber/cucumber": "^11.1.0", 44 | "@napi-rs/cli": "^2.18.4", 45 | "@types/node": "^22.10.1", 46 | "prettier": "^3.4.2", 47 | "typedoc": "^0.27.4", 48 | "typescript": "^5.7.2" 49 | }, 50 | "engines": { 51 | "node": ">= 16" 52 | }, 53 | "scripts": { 54 | "build": "napi build --platform --target=$NAPI_TARGET --release --js generated.js && node ./scripts/header.js", 55 | "build:debug": "napi build --platform --target=$NAPI_TARGET --js generated.js && node ./scripts/header.js", 56 | "docs": "typedoc", 57 | "format": "prettier --write .", 58 | "test": "cucumber-js", 59 | "prepublishOnly": "napi prepublish -t npm" 60 | }, 61 | "prettier": { 62 | "overrides": [ 63 | { 64 | "files": "./**/*.{js,ts,mjs}", 65 | "options": { 66 | "printWidth": 120 67 | } 68 | } 69 | ] 70 | }, 71 | "publishConfig": { 72 | "registry": "https://registry.npmjs.org/", 73 | "access": "public" 74 | }, 75 | "packageManager": "pnpm@9.15.0+sha512.76e2379760a4328ec4415815bcd6628dee727af3779aaa4c914e3944156c4299921a89f976381ee107d41f12cfa4b66681ca9c718f0668fa0831ed4c6d8ba56c" 76 | } 77 | -------------------------------------------------------------------------------- /bindings/nodejs/scripts/header.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2021 Datafuse Labs 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | const fs = require("fs"); 18 | 19 | let files = ["generated.js", "index.d.ts"]; 20 | 21 | for (path of files) { 22 | let data = fs.readFileSync(path, "utf8"); 23 | fs.writeFileSync( 24 | path, 25 | `/* 26 | * Copyright 2021 Datafuse Labs 27 | * 28 | * Licensed under the Apache License, Version 2.0 (the "License"); 29 | * you may not use this file except in compliance with the License. 30 | * You may obtain a copy of the License at 31 | * 32 | * http://www.apache.org/licenses/LICENSE-2.0 33 | * 34 | * Unless required by applicable law or agreed to in writing, software 35 | * distributed under the License is distributed on an "AS IS" BASIS, 36 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 37 | * See the License for the specific language governing permissions and 38 | * limitations under the License. 39 | */ 40 | 41 | ` + data, 42 | ); 43 | } 44 | -------------------------------------------------------------------------------- /bindings/nodejs/tests/binding.feature: -------------------------------------------------------------------------------- 1 | ../../tests/features/binding.feature -------------------------------------------------------------------------------- /bindings/nodejs/tests/data: -------------------------------------------------------------------------------- 1 | ../../tests/data -------------------------------------------------------------------------------- /bindings/nodejs/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2021", 4 | "strict": true, 5 | "moduleResolution": "node", 6 | "module": "CommonJS", 7 | "noUnusedLocals": true, 8 | "noUnusedParameters": true, 9 | "esModuleInterop": true, 10 | "allowSyntheticDefaultImports": true 11 | }, 12 | "include": ["."], 13 | "exclude": ["node_modules"] 14 | } 15 | -------------------------------------------------------------------------------- /bindings/nodejs/typedoc.json: -------------------------------------------------------------------------------- 1 | { 2 | "entryPoints": ["index.d.ts"], 3 | "out": "docs", 4 | "name": "Databend Driver", 5 | "tsconfig": "tsconfig.json", 6 | "excludePrivate": true, 7 | "excludeProtected": true, 8 | "excludeExternals": true, 9 | "includeVersion": true, 10 | "githubPages": false, 11 | "navigationLinks": { 12 | "Homepage": "https://databend.rs/doc/sql-clients/bendsql", 13 | "GitHub": "https://github.com/databendlabs/bendsql" 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /bindings/python/.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | 3 | # Byte-compiled / optimized / DLL files 4 | __pycache__/ 5 | .pytest_cache/ 6 | *.py[cod] 7 | 8 | # C extensions 9 | *.so 10 | 11 | # Distribution / packaging 12 | .Python 13 | .venv/ 14 | env/ 15 | bin/ 16 | build/ 17 | develop-eggs/ 18 | dist/ 19 | eggs/ 20 | lib/ 21 | lib64/ 22 | parts/ 23 | sdist/ 24 | var/ 25 | include/ 26 | man/ 27 | venv/ 28 | *.egg-info/ 29 | .installed.cfg 30 | *.egg 31 | 32 | # Installer logs 33 | pip-log.txt 34 | pip-delete-this-directory.txt 35 | pip-selfcheck.json 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .cache 42 | nosetests.xml 43 | coverage.xml 44 | 45 | # Translations 46 | *.mo 47 | 48 | # Mr Developer 49 | .mr.developer.cfg 50 | .project 51 | .pydevproject 52 | 53 | # Rope 54 | .ropeproject 55 | 56 | # Django stuff: 57 | *.log 58 | *.pot 59 | 60 | .DS_Store 61 | 62 | # Sphinx documentation 63 | docs/_build/ 64 | 65 | # PyCharm 66 | .idea/ 67 | 68 | # VSCode 69 | .vscode/ 70 | 71 | # Pyenv 72 | .python-version 73 | 74 | # Generated docs 75 | docs 76 | 77 | .ruff_cache/ 78 | -------------------------------------------------------------------------------- /bindings/python/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "databend-python" 3 | publish = false 4 | 5 | version = { workspace = true } 6 | edition = { workspace = true } 7 | license = { workspace = true } 8 | authors = { workspace = true } 9 | 10 | [lib] 11 | crate-type = ["cdylib"] 12 | name = "databend_driver" 13 | doc = false 14 | 15 | [dependencies] 16 | chrono = { workspace = true } 17 | databend-driver = { workspace = true, features = ["rustls", "flight-sql"] } 18 | tokio-stream = { workspace = true } 19 | 20 | csv = "1.3" 21 | ctor = "0.2" 22 | once_cell = "1.21" 23 | pyo3 = { version = "0.24.2", features = ["abi3-py37", "chrono"] } 24 | pyo3-async-runtimes = { version = "0.24", features = ["tokio-runtime"] } 25 | tokio = "1.44" 26 | -------------------------------------------------------------------------------- /bindings/python/package/databend_driver/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Datafuse Labs 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # flake8: noqa 16 | 17 | from ._databend_driver import * 18 | -------------------------------------------------------------------------------- /bindings/python/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | build-backend = "maturin" 3 | requires = ["maturin>=1.0,<2.0"] 4 | 5 | [project] 6 | classifiers = [ 7 | "Programming Language :: Rust", 8 | "Programming Language :: Python :: Implementation :: CPython", 9 | "Programming Language :: Python :: Implementation :: PyPy", 10 | "Programming Language :: Python :: 3.8", 11 | "Programming Language :: Python :: 3.9", 12 | "Programming Language :: Python :: 3.10", 13 | "Programming Language :: Python :: 3.11", 14 | "Programming Language :: Python :: 3.12", 15 | "Programming Language :: Python :: 3.13", 16 | 17 | "License :: OSI Approved :: Apache Software License", 18 | 19 | "Operating System :: POSIX", 20 | "Operating System :: MacOS", 21 | "Operating System :: Microsoft :: Windows", 22 | "Operating System :: Unix", 23 | 24 | "Topic :: Database", 25 | "Topic :: Software Development", 26 | "Topic :: Software Development :: Libraries", 27 | "Topic :: Software Development :: Libraries :: Application Frameworks", 28 | "Topic :: Software Development :: Libraries :: Python Modules", 29 | "Topic :: Scientific/Engineering :: Information Analysis" 30 | ] 31 | description = "Databend Driver Python Binding" 32 | license = { text = "Apache-2.0" } 33 | name = "databend-driver" 34 | readme = "README.md" 35 | requires-python = ">=3.8, < 3.14" 36 | dynamic = ["version"] 37 | 38 | [project.urls] 39 | Repository = "https://github.com/databendlabs/bendsql" 40 | 41 | [tool.maturin] 42 | features = ["pyo3/extension-module"] 43 | module-name = "databend_driver._databend_driver" 44 | python-source = "package" 45 | 46 | [dependency-groups] 47 | dev = [ 48 | "behave>=1.2.6", 49 | "maturin>=1.7.8", 50 | "ruff>=0.8.4", 51 | ] 52 | -------------------------------------------------------------------------------- /bindings/python/src/lib.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | mod asyncio; 16 | mod blocking; 17 | mod types; 18 | mod utils; 19 | 20 | use pyo3::prelude::*; 21 | 22 | use crate::asyncio::{AsyncDatabendClient, AsyncDatabendConnection}; 23 | use crate::blocking::{BlockingDatabendClient, BlockingDatabendConnection, BlockingDatabendCursor}; 24 | use crate::types::{ConnectionInfo, Field, Row, RowIterator, Schema, ServerStats}; 25 | 26 | #[pymodule] 27 | fn _databend_driver(_py: Python, m: &Bound<'_, PyModule>) -> PyResult<()> { 28 | m.add_class::()?; 29 | m.add_class::()?; 30 | m.add_class::()?; 31 | m.add_class::()?; 32 | m.add_class::()?; 33 | m.add_class::()?; 34 | m.add_class::()?; 35 | m.add_class::()?; 36 | m.add_class::()?; 37 | m.add_class::()?; 38 | m.add_class::()?; 39 | Ok(()) 40 | } 41 | -------------------------------------------------------------------------------- /bindings/python/src/utils.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use std::collections::BTreeMap; 16 | use std::collections::HashMap; 17 | 18 | use databend_driver::Param; 19 | use databend_driver::Params; 20 | use pyo3::exceptions::PyAttributeError; 21 | use pyo3::types::PyTuple; 22 | use pyo3::{ 23 | prelude::*, 24 | types::{PyDict, PyList}, 25 | }; 26 | 27 | #[ctor::ctor] 28 | pub(crate) static RUNTIME: tokio::runtime::Runtime = tokio::runtime::Builder::new_multi_thread() 29 | .enable_all() 30 | .build() 31 | .unwrap(); 32 | 33 | /// Utility to collect rust futures with GIL released 34 | pub(crate) fn wait_for_future(py: Python, f: F) -> F::Output 35 | where 36 | F: std::future::Future + Send, 37 | F::Output: Send, 38 | { 39 | py.allow_threads(|| RUNTIME.block_on(f)) 40 | } 41 | 42 | pub(crate) fn to_sql_params(v: Option>) -> Params { 43 | match v { 44 | Some(v) => { 45 | if let Ok(v) = v.downcast::() { 46 | let mut params = HashMap::new(); 47 | for (k, v) in v.iter() { 48 | let k = k.extract::().unwrap(); 49 | let v = to_sql_string(v).unwrap(); 50 | params.insert(k, v); 51 | } 52 | Params::NamedParams(params) 53 | } else if let Ok(v) = v.downcast::() { 54 | let mut params = vec![]; 55 | for v in v.iter() { 56 | let v = to_sql_string(v).unwrap(); 57 | params.push(v); 58 | } 59 | Params::QuestionParams(params) 60 | } else if let Ok(v) = v.downcast::() { 61 | let mut params = vec![]; 62 | for v in v.iter() { 63 | let v = to_sql_string(v).unwrap(); 64 | params.push(v); 65 | } 66 | Params::QuestionParams(params) 67 | } else { 68 | Params::QuestionParams(vec![to_sql_string(v).unwrap()]) 69 | } 70 | } 71 | None => Params::default(), 72 | } 73 | } 74 | 75 | fn to_sql_string(v: Bound) -> PyResult { 76 | if v.is_none() { 77 | return Ok("NULL".to_string()); 78 | } 79 | match v.downcast::() { 80 | Ok(v) => { 81 | if let Ok(v) = v.extract::() { 82 | Ok(v.as_sql_string()) 83 | } else if let Ok(v) = v.extract::() { 84 | Ok(v.as_sql_string()) 85 | } else if let Ok(v) = v.extract::() { 86 | Ok(v.as_sql_string()) 87 | } else if let Ok(v) = v.extract::() { 88 | Ok(v.as_sql_string()) 89 | } else { 90 | Err(PyAttributeError::new_err(format!( 91 | "Invalid parameter type for: {:?}, expected str, bool, int or float", 92 | v 93 | ))) 94 | } 95 | } 96 | Err(e) => Err(e.into()), 97 | } 98 | } 99 | 100 | pub(super) fn options_as_ref( 101 | format_options: &Option>, 102 | ) -> Option> { 103 | format_options 104 | .as_ref() 105 | .map(|opts| opts.iter().map(|(k, v)| (k.as_str(), v.as_str())).collect()) 106 | } 107 | -------------------------------------------------------------------------------- /bindings/python/tests/asyncio/binding.feature: -------------------------------------------------------------------------------- 1 | ../../../tests/features/binding.feature -------------------------------------------------------------------------------- /bindings/python/tests/blocking/binding.feature: -------------------------------------------------------------------------------- 1 | ../../../tests/features/binding.feature -------------------------------------------------------------------------------- /bindings/python/tests/cursor/binding.feature: -------------------------------------------------------------------------------- 1 | ../../../tests/features/binding.feature -------------------------------------------------------------------------------- /bindings/python/tests/data: -------------------------------------------------------------------------------- 1 | ../../tests/data -------------------------------------------------------------------------------- /bindings/tests/README.md: -------------------------------------------------------------------------------- 1 | # Databend Driver Binding Tests 2 | 3 | Databend Driver will use [Cucumber](https://github.com/cucumber) for [BDD](Behaviour-Driven Development) tests. This module will provide [Gherkin](https://cucumber.io/docs/gherkin/) files and all bindings should implement againest them. 4 | -------------------------------------------------------------------------------- /bindings/tests/data/test.csv: -------------------------------------------------------------------------------- 1 | -1,1,1.0,"'",,2011-03-06,2011-03-06 06:20:00 2 | -2,2,2.0,"""","",2012-05-31,2012-05-31 11:20:00 3 | -3,3,3.0,"\",NULL,2016-04-04,2016-04-04 11:30:00 4 | -------------------------------------------------------------------------------- /bindings/tests/features/binding.feature: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Datafuse Labs 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | Feature: Databend Driver 16 | 17 | Scenario: Select Simple 18 | Given A new Databend Driver Client 19 | Then Select string "Hello, Databend!" should be equal to "Hello, Databend!" 20 | 21 | Scenario: Select params 22 | Given A new Databend Driver Client 23 | Then Select params binding 24 | 25 | Scenario: Select Types 26 | Given A new Databend Driver Client 27 | Then Select types should be expected native types 28 | 29 | Scenario: Select Iter 30 | Given A new Databend Driver Client 31 | Then Select numbers should iterate all rows 32 | 33 | Scenario: Insert and Select 34 | Given A new Databend Driver Client 35 | When Create a test table 36 | Then Insert and Select should be equal 37 | 38 | Scenario: Stream Load 39 | Given A new Databend Driver Client 40 | When Create a test table 41 | Then Stream load and Select should be equal 42 | 43 | Scenario: Load file 44 | Given A new Databend Driver Client 45 | When Create a test table 46 | Then Load file and Select should be equal 47 | 48 | Scenario: Temp table 49 | Given A new Databend Driver Client 50 | Then Temp table should work with cluster 51 | -------------------------------------------------------------------------------- /cli/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "bendsql" 3 | description = "Databend Native Command Line Tool" 4 | categories = ["database"] 5 | keywords = ["databend", "database", "cli"] 6 | 7 | version = { workspace = true } 8 | edition = { workspace = true } 9 | license = { workspace = true } 10 | authors = { workspace = true } 11 | repository = { workspace = true } 12 | 13 | [target.'cfg(all(target_arch = "x86_64", target_os = "linux"))'.dependencies] 14 | duckdb = { version = "1.2.2", features = ["bundled"]} 15 | 16 | [target.'cfg(all(target_arch = "aarch64", target_os = "macos"))'.dependencies] 17 | duckdb = { version = "1.2.2", features = ["bundled"]} 18 | 19 | [dependencies] 20 | chrono = { workspace = true } 21 | databend-client = { workspace = true } 22 | databend-driver = { workspace = true, features = ["rustls", "flight-sql"] } 23 | tokio-stream = { workspace = true } 24 | 25 | actix-web = "4.10" 26 | anyhow = "1.0" 27 | arrow = { workspace = true } 28 | async-recursion = "1.1.1" 29 | async-trait = "0.1" 30 | clap = { version = "4.5", features = ["derive", "env"] } 31 | comfy-table = "7.1" 32 | csv = "1.3" 33 | ctrlc = { version = "3.4.6", features = ["termination"] } 34 | databend-common-ast = "0.2.1" 35 | fern = { version = "0.7", features = ["colored"] } 36 | indicatif = "0.17" 37 | log = "0.4" 38 | mime_guess = "2.0" 39 | nom = "8.0.0" 40 | once_cell = "1.21" 41 | percent-encoding = "2.3" 42 | rust-embed = "8.7.0" 43 | rustyline = "15.0" 44 | serde = { version = "1.0", features = ["derive"] } 45 | serde_json = "1.0" 46 | sled = "0.34" 47 | sqlformat = "0.3.3" 48 | tempfile = "3.19.1" 49 | terminal_size = "0.4" 50 | tokio = { version = "1.44.2", features = [ 51 | "macros", 52 | "rt", 53 | "rt-multi-thread", 54 | "sync", 55 | "parking_lot", 56 | "full", 57 | ] } 58 | toml = "0.8" 59 | tracing-appender = "0.2" 60 | unicode-segmentation = "1.12" 61 | url = { version = "2.5", default-features = false } 62 | webbrowser = "1.0.4" 63 | 64 | [build-dependencies] 65 | vergen-gix = { version = "1.0.9", features = ["build"] } 66 | 67 | [[bin]] 68 | name = "bendsql" 69 | path = "src/main.rs" 70 | doctest = false 71 | test = false 72 | -------------------------------------------------------------------------------- /cli/build.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use std::{env, error::Error}; 16 | use vergen_gix::{BuildBuilder, Emitter, GixBuilder}; 17 | 18 | fn main() -> Result<(), Box> { 19 | let gix = GixBuilder::default().sha(false).build()?; 20 | let build = BuildBuilder::default().build_timestamp(true).build()?; 21 | Emitter::default() 22 | .fail_on_error() 23 | .add_instructions(&gix)? 24 | .add_instructions(&build)? 25 | .emit() 26 | .unwrap_or_else(|_| { 27 | let info = env::var("BENDSQL_BUILD_INFO").unwrap_or_else(|_| "unknown".to_string()); 28 | println!("cargo:rustc-env=BENDSQL_BUILD_INFO={}", info); 29 | }); 30 | 31 | Ok(()) 32 | } 33 | -------------------------------------------------------------------------------- /cli/frontend/build/asset-manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "files": { 3 | "main.css": "/static/css/main.a94f3081.css", 4 | "main.js": "/static/js/main.900dc5c4.js", 5 | "static/media/download.svg": "/static/media/download.1a2f96254a617a2765b0fca418a3e5bf.svg", 6 | "static/media/zoom-in.svg": "/static/media/zoom-in.b8e914df4e9fbb889ceec372b13f62d1.svg", 7 | "static/media/full-screen.svg": "/static/media/full-screen.972c352cc40ca74a17222b04ad2b1316.svg", 8 | "index.html": "/index.html", 9 | "static/media/zoom-out.svg": "/static/media/zoom-out.9600557781b9acd42f24eddad00f6f36.svg" 10 | }, 11 | "entrypoints": [ 12 | "static/css/main.a94f3081.css", 13 | "static/js/main.900dc5c4.js" 14 | ] 15 | } -------------------------------------------------------------------------------- /cli/frontend/build/index.html: -------------------------------------------------------------------------------- 1 | Databend
-------------------------------------------------------------------------------- /cli/frontend/build/logo.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /cli/frontend/build/static/js/main.900dc5c4.js.LICENSE.txt: -------------------------------------------------------------------------------- 1 | /* 2 | object-assign 3 | (c) Sindre Sorhus 4 | @license MIT 5 | */ 6 | 7 | /*! 8 | Copyright (c) 2018 Jed Watson. 9 | Licensed under the MIT License (MIT), see 10 | http://jedwatson.github.io/classnames 11 | */ 12 | 13 | /*! ***************************************************************************** 14 | Copyright (c) Microsoft Corporation. All rights reserved. 15 | Licensed under the Apache License, Version 2.0 (the "License"); you may not use 16 | this file except in compliance with the License. You may obtain a copy of the 17 | License at http://www.apache.org/licenses/LICENSE-2.0 18 | 19 | THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 20 | KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED 21 | WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, 22 | MERCHANTABLITY OR NON-INFRINGEMENT. 23 | 24 | See the Apache Version 2.0 License for specific language governing permissions 25 | and limitations under the License. 26 | ***************************************************************************** */ 27 | 28 | /*! regenerator-runtime -- Copyright (c) 2014-present, Facebook, Inc. -- license (MIT): https://github.com/facebook/regenerator/blob/main/LICENSE */ 29 | 30 | /** @license React v0.19.1 31 | * scheduler.production.min.js 32 | * 33 | * Copyright (c) Facebook, Inc. and its affiliates. 34 | * 35 | * This source code is licensed under the MIT license found in the 36 | * LICENSE file in the root directory of this source tree. 37 | */ 38 | 39 | /** @license React v16.14.0 40 | * react-dom.production.min.js 41 | * 42 | * Copyright (c) Facebook, Inc. and its affiliates. 43 | * 44 | * This source code is licensed under the MIT license found in the 45 | * LICENSE file in the root directory of this source tree. 46 | */ 47 | 48 | /** @license React v16.14.0 49 | * react-jsx-runtime.production.min.js 50 | * 51 | * Copyright (c) Facebook, Inc. and its affiliates. 52 | * 53 | * This source code is licensed under the MIT license found in the 54 | * LICENSE file in the root directory of this source tree. 55 | */ 56 | 57 | /** @license React v16.14.0 58 | * react.production.min.js 59 | * 60 | * Copyright (c) Facebook, Inc. and its affiliates. 61 | * 62 | * This source code is licensed under the MIT license found in the 63 | * LICENSE file in the root directory of this source tree. 64 | */ 65 | -------------------------------------------------------------------------------- /cli/frontend/build/static/media/download.1a2f96254a617a2765b0fca418a3e5bf.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cli/frontend/build/static/media/full-screen.972c352cc40ca74a17222b04ad2b1316.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cli/frontend/build/static/media/zoom-in.b8e914df4e9fbb889ceec372b13f62d1.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cli/frontend/build/static/media/zoom-out.9600557781b9acd42f24eddad00f6f36.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /cli/src/ast/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | mod query_kind; 16 | pub use query_kind::quote_string_in_box_display; 17 | pub use query_kind::GenType; 18 | pub use query_kind::QueryKind; 19 | 20 | use databend_common_ast::parser::{parse_sql, token::TokenKind, tokenize_sql, Dialect}; 21 | use sqlformat::{FormatOptions, QueryParams}; 22 | 23 | pub fn format_query(query: &str) -> String { 24 | let kind = QueryKind::from(query); 25 | if matches!(kind, QueryKind::Get(_, _) | QueryKind::Put(_, _)) { 26 | return query.to_owned(); 27 | } 28 | 29 | if let Ok(tokens) = databend_common_ast::parser::tokenize_sql(query) { 30 | if let Ok((stmt, _)) = parse_sql(&tokens, Dialect::Experimental) { 31 | let options = FormatOptions::default(); 32 | 33 | let pretty_sql = sqlformat::format(query, &QueryParams::None, &options); 34 | // if pretty sql could be parsed into same stmt, return pretty sql 35 | if let Ok(pretty_tokens) = databend_common_ast::parser::tokenize_sql(&pretty_sql) { 36 | if let Ok((pretty_stmt, _)) = parse_sql(&pretty_tokens, Dialect::Experimental) { 37 | if stmt.to_string() == pretty_stmt.to_string() { 38 | return pretty_sql; 39 | } 40 | } 41 | } 42 | 43 | return stmt.to_string(); 44 | } 45 | } 46 | query.to_string() 47 | } 48 | 49 | pub fn highlight_query(line: &str) -> String { 50 | let tokens = tokenize_sql(line); 51 | let mut line = line.to_owned(); 52 | 53 | if let Ok(tokens) = tokens { 54 | for token in tokens.iter().rev() { 55 | if TokenKind::is_keyword(&token.kind) 56 | || TokenKind::is_reserved_ident(&token.kind, false) 57 | || TokenKind::is_reserved_function_name(&token.kind) 58 | { 59 | line.replace_range( 60 | std::ops::Range::from(token.span), 61 | &format!("\x1b[1;32m{}\x1b[0m", token.text()), 62 | ); 63 | } else if TokenKind::is_literal(&token.kind) { 64 | line.replace_range( 65 | std::ops::Range::from(token.span), 66 | &format!("\x1b[1;33m{}\x1b[0m", token.text()), 67 | ); 68 | } 69 | } 70 | } 71 | 72 | line 73 | } 74 | -------------------------------------------------------------------------------- /cli/src/trace.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use log::LevelFilter; 16 | use std::io::BufWriter; 17 | use std::io::Write; 18 | use std::str::FromStr; 19 | 20 | use anyhow::Result; 21 | use tracing_appender::rolling::RollingFileAppender; 22 | use tracing_appender::rolling::Rotation; 23 | 24 | const MAX_LOG_FILES: usize = 10; 25 | 26 | #[allow(dyn_drop)] 27 | pub async fn init_logging( 28 | dir: &str, 29 | level: &str, 30 | ) -> Result>> { 31 | let mut guards: Vec> = Vec::new(); 32 | let mut logger = fern::Dispatch::new(); 33 | 34 | let rolling = RollingFileAppender::builder() 35 | .rotation(Rotation::DAILY) 36 | .filename_prefix("bendsql.log") 37 | .max_log_files(MAX_LOG_FILES) 38 | .build(dir)?; 39 | let (non_blocking, flush_guard) = tracing_appender::non_blocking(rolling); 40 | let buffered_non_blocking = BufWriter::with_capacity(64 * 1024 * 1024, non_blocking); 41 | 42 | guards.push(Box::new(flush_guard)); 43 | let dispatch_file = fern::Dispatch::new() 44 | .format(|out, message, record| { 45 | out.finish(format_args!( 46 | "[{}] - {} - [{}] {}", 47 | chrono::Local::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true), 48 | record.level(), 49 | record.target(), 50 | message 51 | )) 52 | }) 53 | .level(LevelFilter::from_str(level)?) 54 | .chain(Box::new(buffered_non_blocking) as Box); 55 | logger = logger.chain(dispatch_file); 56 | 57 | let dispatch_stderr = fern::Dispatch::new() 58 | .level(LevelFilter::Warn) 59 | .filter(|metadata| metadata.target() == "server_warnings") 60 | .format(|out, message, _| { 61 | out.finish(format_args!( 62 | "\x1B[{}m{}\x1B[0m", 63 | fern::colors::Color::Yellow.to_fg_str(), 64 | message 65 | )) 66 | }) 67 | .chain(std::io::stderr()); 68 | logger = logger.chain(dispatch_stderr); 69 | 70 | if logger.apply().is_err() { 71 | eprintln!("logger has already been set"); 72 | return Ok(Vec::new()); 73 | } 74 | 75 | Ok(guards) 76 | } 77 | -------------------------------------------------------------------------------- /cli/src/web.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use std::collections::HashMap; 16 | use std::net::TcpListener; 17 | use std::sync::atomic::AtomicUsize; 18 | use std::sync::{Arc, Mutex}; 19 | 20 | use actix_web::dev::Server; 21 | use actix_web::middleware::Logger; 22 | use actix_web::web::Query; 23 | use actix_web::{get, web, App, HttpResponse, HttpServer, Responder}; 24 | use mime_guess::from_path; 25 | use once_cell::sync::Lazy; 26 | use rust_embed::RustEmbed; 27 | use serde::Deserialize; 28 | 29 | #[derive(RustEmbed)] 30 | #[folder = "frontend/build/"] 31 | struct Asset; 32 | 33 | async fn embed_file(path: web::Path) -> HttpResponse { 34 | let file_path = if path.is_empty() { 35 | "index.html".to_string() 36 | } else { 37 | path.into_inner() 38 | }; 39 | 40 | match Asset::get(&file_path) { 41 | Some(content) => { 42 | let mime_type = from_path(&file_path).first_or_octet_stream(); 43 | HttpResponse::Ok() 44 | .content_type(mime_type.as_ref()) 45 | .body(content.data) 46 | } 47 | None => HttpResponse::NotFound().body("File not found"), 48 | } 49 | } 50 | 51 | static PERF_ID: AtomicUsize = AtomicUsize::new(0); 52 | static APP_DATA: Lazy>>> = 53 | Lazy::new(|| Arc::new(Mutex::new(HashMap::new()))); 54 | 55 | #[derive(Deserialize, Debug)] 56 | struct MessageQuery { 57 | perf_id: Option, 58 | } 59 | 60 | pub fn set_data(result: String) -> usize { 61 | let perf_id = PERF_ID.fetch_add(1, std::sync::atomic::Ordering::SeqCst); 62 | let l = APP_DATA.as_ref(); 63 | l.lock().unwrap().insert(perf_id, result); 64 | perf_id 65 | } 66 | 67 | #[get("/api/message")] 68 | async fn get_message(query: Query) -> impl Responder { 69 | query 70 | .perf_id 71 | .as_deref() 72 | .unwrap_or("") 73 | .parse::() 74 | .ok() 75 | .and_then(|id| { 76 | APP_DATA.as_ref().lock().unwrap().get(&id).map(|result| { 77 | HttpResponse::Ok().json(serde_json::json!({ 78 | "result": result, 79 | })) 80 | }) 81 | }) 82 | .unwrap_or_else(|| { 83 | HttpResponse::InternalServerError().json(serde_json::json!({ 84 | "error": format!("Perf ID {:?} not found", query.perf_id), 85 | })) 86 | }) 87 | } 88 | 89 | pub fn start_server(listener: TcpListener) -> Server { 90 | HttpServer::new(move || { 91 | App::new() 92 | .wrap(Logger::default()) 93 | .service(get_message) 94 | .route("/{filename:.*}", web::get().to(embed_file)) 95 | }) 96 | .listen(listener) 97 | .unwrap_or_else(|e| panic!("Cannot listen to address: {}", e)) 98 | .run() 99 | } 100 | -------------------------------------------------------------------------------- /cli/test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2021 Datafuse Labs 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | set -e 18 | 19 | CARGO_TARGET_DIR=${CARGO_TARGET_DIR:-./target} 20 | DATABEND_USER=${DATABEND_USER:-root} 21 | DATABEND_PASSWORD=${DATABEND_PASSWORD:-} 22 | DATABEND_HOST=${DATABEND_HOST:-localhost} 23 | DATABEND_PORT=${DATABEND_PORT:-8000} 24 | 25 | TEST_HANDLER=$1 26 | 27 | cargo build --bin bendsql 28 | 29 | case $TEST_HANDLER in 30 | "flight") 31 | echo "==> Testing Flight SQL handler" 32 | export BENDSQL_DSN="databend+flight://${DATABEND_USER}:${DATABEND_PASSWORD}@${DATABEND_HOST}:8900/?sslmode=disable" 33 | ;; 34 | "http") 35 | echo "==> Testing REST API handler" 36 | export BENDSQL_DSN="databend://${DATABEND_USER}:${DATABEND_PASSWORD}@${DATABEND_HOST}:${DATABEND_PORT}/?sslmode=disable&presign=on" 37 | ;; 38 | *) 39 | echo "Usage: $0 [flight|http]" 40 | exit 1 41 | ;; 42 | esac 43 | 44 | export BENDSQL="${CARGO_TARGET_DIR}/debug/bendsql" 45 | 46 | for tf in cli/tests/*.{sql,sh}; do 47 | [[ -e "$tf" ]] || continue 48 | echo " Running test -- ${tf}" 49 | if [[ $tf == *.sh ]]; then 50 | suite=$(basename "${tf}" | sed -e 's#.sh##') 51 | bash "${tf}" >"cli/tests/${suite}.output" 2>&1 || true 52 | elif [[ $tf == *.sql ]]; then 53 | suite=$(basename "${tf}" | sed -e 's#.sql##') 54 | "${BENDSQL}" --output tsv <"${tf}" >"cli/tests/${suite}.output" 2>&1 || true 55 | fi 56 | diff "cli/tests/${suite}.output" "cli/tests/${suite}.result" 57 | done 58 | rm -f cli/tests/*.output 59 | 60 | for tf in cli/tests/"$TEST_HANDLER"/*.{sql,sh}; do 61 | [[ -e "$tf" ]] || continue 62 | echo " Running test -- ${tf}" 63 | if [[ $tf == *.sh ]]; then 64 | suite=$(basename "${tf}" | sed -e 's#.sh##') 65 | bash "${tf}" >"cli/tests/${TEST_HANDLER}/${suite}.output" 2>&1 || true 66 | elif [[ $tf == *.sql ]]; then 67 | suite=$(basename "${tf}" | sed -e 's#.sql##') 68 | "${BENDSQL}" --output tsv <"${tf}" >"cli/tests/${TEST_HANDLER}/${suite}.output" 2>&1 || true 69 | fi 70 | diff "cli/tests/${TEST_HANDLER}/${suite}.output" "cli/tests/${TEST_HANDLER}/${suite}.result" 71 | done 72 | rm -f cli/tests/"$TEST_HANDLER"/*.output 73 | 74 | echo "--> Tests $1 passed" 75 | echo 76 | -------------------------------------------------------------------------------- /cli/tests/00-base.result: -------------------------------------------------------------------------------- 1 | 1 2 | 1 3 | b 2 false "{""k"":""v""}" 4 | a 1 true [1,2] 5 | 100000 6 | 0 99999 99999 100000 7 | 1 8 | 2 9 | 3 10 | 1740-10-08 10:16:40.001000 1740-10-08 10:16:40.000000 1740-10-08 10:16:39.999000 11 | [] {} 12 | with comment 13 | 1 14 | 2 15 | " 16 | a" 17 | 3 18 | -9999999999999999999999999999999999999999999999999999999999999999999999999999 19 | 1 20 | NULL 21 | 3.00 3.00 0.0000000170141183460469231731687303715884105727000 -0.0000000170141183460469231731687303715884105727000 22 | Asia/Shanghai 23 | 3 24 | 0 0.00 25 | 1 1.00 26 | 2 2.00 27 | 2 28 | [1,2,3] NULL (1,'ab') 29 | NULL {'k1':'v1','k2':'v2'} (2,NULL) 30 | 1 NULL 1 ab 31 | NULL v1 2 NULL 32 | {'k1':'v1','k2':'v2'} [6162,78797A] ('[1,2]','2024-04-10') 33 | bye 34 | -------------------------------------------------------------------------------- /cli/tests/00-base.sql: -------------------------------------------------------------------------------- 1 | drop table if exists test; 2 | create table test(a string, b int, c boolean, d variant); 3 | insert into test values('a', 1, true, '[1,2]'); 4 | insert into test values('b', 2, false, '{"k":"v"}'); 5 | select * from test order by a desc; 6 | 7 | truncate table test; 8 | insert into test select to_string(number), number, false, number from numbers(100000); 9 | select min(a), max(b), max(d), count() from test; 10 | 11 | select '1';select 2; select 1+2; 12 | select TO_TIMESTAMP(-7233803000000+1), TO_TIMESTAMP(-7233803000000), TO_TIMESTAMP(-7233803000000-1); 13 | 14 | select [], {}; 15 | 16 | -- ignore this line 17 | 18 | select /* ignore this block */ 'with comment'; 19 | 20 | select 1; select 2; select ' 21 | a'; select 3; 22 | 23 | select -9999999999999999999999999999999999999999999999999999999999999999999999999999; 24 | 25 | -- issue 409 26 | drop table if exists t; 27 | create table t(id tuple(STRING, array(tuple(array(int), array(STRING NOT NULL))))); 28 | insert into t values(null); 29 | select * from t; 30 | drop table t; 31 | 32 | -- issue 572 33 | CREATE TABLE 🐳🍞(🐳🐳 INTEGER, 🍞🍞 INTEGER); 34 | 35 | -- enable it after we support code string in databend 36 | -- select $$aa$$; 37 | -- select $$ 38 | -- def add(a, b): 39 | -- a + b 40 | -- $$; 41 | 42 | /* ignore this block /* /* 43 | select 'in comment block'; 44 | */ 45 | 46 | select 1.00 + 2.00, 3.00, 0.0000000170141183460469231731687303715884105727000, -0.0000000170141183460469231731687303715884105727000; 47 | 48 | select/*+ SET_VAR(timezone='Asia/Shanghai') */ timezone(); 49 | 50 | drop table if exists test_decimal; 51 | create table test_decimal(a decimal(40, 0), b decimal(20 , 2)); 52 | insert into test_decimal select number, number from numbers(3); 53 | 54 | select * from test_decimal; 55 | 56 | drop table if exists test_nested; 57 | create table test_nested(a array(int), b map(string, string), c tuple(x int, y string null)); 58 | insert into test_nested values([1,2,3], null, (1, 'ab')), (null, {'k1':'v1', 'k2':'v2'}, (2, null)); 59 | select * from test_nested; 60 | select a[1], b['k1'], c:x, c:y from test_nested; 61 | 62 | select {'k1':'v1','k2':'v2'}, [to_binary('ab'), to_binary('xyz')], (parse_json('[1,2]'), to_date('2024-04-10')); 63 | 64 | select 'bye'; 65 | drop table test; 66 | drop table test_decimal; 67 | drop table test_nested; 68 | -------------------------------------------------------------------------------- /cli/tests/01-put.result: -------------------------------------------------------------------------------- 1 | ---- put ---- 2 | /tmp/abc/books.parquet SUCCESS 1882 3 | ---- get ---- 4 | /tmp/edf/books.parquet SUCCESS 1882 5 | ---- copy ---- 6 | abc/books.parquet 4 0 NULL NULL 7 | ---- select ---- 8 | Transaction Processing Jim Gray 1992 9 | Readings in Database Systems Michael Stonebraker 2004 10 | -------------------------------------------------------------------------------- /cli/tests/01-put.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "DROP STAGE IF EXISTS ss_01" | ${BENDSQL} 4 | echo "CREATE STAGE ss_01" | ${BENDSQL} 5 | 6 | cat <= 0; 25 | """ 2>&1 | grep -oE '([0-9]+) rows written' | grep -oE '([0-9]+)'` 26 | 27 | 28 | b=`${BENDSQL} --stats --query=""" 29 | update t3 set user_id = user_id + 1 where inventory_count % 10 < 0; 30 | """ 2>&1 | grep -oE '([0-9]+) rows written' | grep -oE '([0-9]+)'` 31 | 32 | echo "$[a+b]" 33 | 34 | 35 | cat < bool { 22 | code == SESSION_TOKEN_EXPIRED || code == SESSION_TOKEN_NOT_FOUND 23 | } 24 | 25 | #[derive(Deserialize, Debug, Clone)] 26 | pub struct ErrorCode { 27 | pub code: u16, 28 | pub message: String, 29 | pub detail: Option, 30 | } 31 | 32 | /// try to decode to this when status code is not 200. 33 | /// so the error field is expect to exist. 34 | #[derive(Deserialize, Debug)] 35 | pub struct ResponseWithErrorCode { 36 | pub error: ErrorCode, 37 | } 38 | 39 | impl Display for ErrorCode { 40 | fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { 41 | match &self.detail { 42 | Some(d) if !d.is_empty() => { 43 | write!(f, "[{}]{}\n{}", self.code, self.message, d) 44 | } 45 | _ => write!(f, "[{}]{}", self.code, self.message,), 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /core/src/global_cookie_store.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use cookie::Cookie; 16 | use reqwest::cookie::CookieStore; 17 | use reqwest::header::HeaderValue; 18 | use std::collections::HashMap; 19 | use std::sync::RwLock; 20 | use url::Url; 21 | 22 | pub(crate) struct GlobalCookieStore { 23 | cookies: RwLock>>, 24 | } 25 | 26 | impl GlobalCookieStore { 27 | pub fn new() -> Self { 28 | GlobalCookieStore { 29 | cookies: RwLock::new(HashMap::new()), 30 | } 31 | } 32 | } 33 | 34 | impl CookieStore for GlobalCookieStore { 35 | fn set_cookies(&self, cookie_headers: &mut dyn Iterator, _url: &Url) { 36 | let iter = cookie_headers 37 | .filter_map(|val| std::str::from_utf8(val.as_bytes()).ok()) 38 | .filter_map(|kv| Cookie::parse(kv).map(|c| c.into_owned()).ok()); 39 | 40 | let mut guard = self.cookies.write().unwrap(); 41 | for cookie in iter { 42 | guard.insert(cookie.name().to_string(), cookie); 43 | } 44 | } 45 | 46 | fn cookies(&self, _url: &Url) -> Option { 47 | let guard = self.cookies.read().unwrap(); 48 | let s: String = guard 49 | .values() 50 | .map(|cookie| cookie.name_value()) 51 | .map(|(name, value)| format!("{name}={value}")) 52 | .collect::>() 53 | .join("; "); 54 | 55 | if s.is_empty() { 56 | return None; 57 | } 58 | 59 | HeaderValue::from_str(&s).ok() 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /core/src/lib.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | mod client; 16 | 17 | mod auth; 18 | mod error; 19 | mod error_code; 20 | mod global_cookie_store; 21 | mod login; 22 | mod pages; 23 | mod presign; 24 | mod request; 25 | mod response; 26 | 27 | mod session; 28 | mod stage; 29 | 30 | pub use auth::SensitiveString; 31 | pub use client::APIClient; 32 | pub use error::Error; 33 | pub use pages::Page; 34 | pub use pages::Pages; 35 | pub use presign::presign_download_from_stage; 36 | pub use presign::presign_upload_to_stage; 37 | pub use presign::PresignedResponse; 38 | pub use response::QueryStats; 39 | pub use response::SchemaField; 40 | pub use stage::StageLocation; 41 | -------------------------------------------------------------------------------- /core/src/login.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use std::collections::BTreeMap; 16 | 17 | use crate::error_code::ErrorCode; 18 | use crate::session::SessionState; 19 | use serde::{Deserialize, Serialize}; 20 | 21 | #[derive(Serialize, Debug)] 22 | pub struct LoginRequest { 23 | #[serde(skip_serializing_if = "Option::is_none")] 24 | pub database: Option, 25 | #[serde(skip_serializing_if = "Option::is_none")] 26 | pub role: Option, 27 | #[serde(skip_serializing_if = "Option::is_none")] 28 | pub settings: Option>, 29 | } 30 | 31 | impl From<&SessionState> for LoginRequest { 32 | fn from(value: &SessionState) -> Self { 33 | Self { 34 | role: value.role.clone(), 35 | settings: value.settings.clone(), 36 | database: value.database.clone(), 37 | } 38 | } 39 | } 40 | 41 | fn default_session_token_ttl_in_secs() -> u64 { 42 | 3600 43 | } 44 | 45 | #[derive(Deserialize, Debug, Clone)] 46 | pub struct SessionTokenInfo { 47 | pub session_token: String, 48 | #[serde(default = "default_session_token_ttl_in_secs")] 49 | pub session_token_ttl_in_secs: u64, 50 | pub refresh_token: String, 51 | } 52 | 53 | #[derive(Deserialize, Debug, Clone)] 54 | pub struct LoginResponse { 55 | pub version: String, 56 | pub tokens: Option, 57 | } 58 | #[derive(Deserialize, Debug)] 59 | #[serde(untagged)] 60 | pub enum LoginResponseResult { 61 | Ok(LoginResponse), 62 | Err { error: ErrorCode }, 63 | } 64 | 65 | #[derive(Serialize, Debug)] 66 | pub struct RefreshSessionTokenRequest { 67 | pub session_token: String, 68 | } 69 | 70 | #[derive(Deserialize, Debug)] 71 | #[serde(untagged)] 72 | pub enum RefreshResponse { 73 | Ok(SessionTokenInfo), 74 | Err { error: ErrorCode }, 75 | } 76 | -------------------------------------------------------------------------------- /core/src/presign.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use std::{collections::BTreeMap, path::Path}; 16 | 17 | use log::info; 18 | use reqwest::{Body, Client as HttpClient, StatusCode}; 19 | use tokio::io::AsyncRead; 20 | use tokio::io::AsyncWriteExt; 21 | use tokio_stream::StreamExt; 22 | use tokio_util::io::ReaderStream; 23 | 24 | use crate::error::{Error, Result}; 25 | 26 | pub type Reader = Box; 27 | 28 | #[derive(Debug, Clone, Copy)] 29 | pub enum PresignMode { 30 | Auto, 31 | Detect, 32 | On, 33 | Off, 34 | } 35 | 36 | pub struct PresignedResponse { 37 | pub method: String, 38 | pub headers: BTreeMap, 39 | pub url: String, 40 | } 41 | 42 | pub async fn presign_upload_to_stage( 43 | presigned: PresignedResponse, 44 | data: Reader, 45 | size: u64, 46 | ) -> Result<()> { 47 | info!("upload to stage with presigned url, size: {}", size); 48 | let client = HttpClient::new(); 49 | let mut builder = client.put(presigned.url); 50 | for (k, v) in presigned.headers { 51 | if k.to_lowercase() == "content-length" { 52 | continue; 53 | } 54 | builder = builder.header(k, v); 55 | } 56 | builder = builder.header("Content-Length", size.to_string()); 57 | let stream = Body::wrap_stream(ReaderStream::new(data)); 58 | let resp = builder.body(stream).send().await?; 59 | let status = resp.status(); 60 | let body = resp.bytes().await?; 61 | match status { 62 | StatusCode::OK => Ok(()), 63 | _ => Err(Error::IO(format!( 64 | "Upload with presigned url failed: {}", 65 | String::from_utf8_lossy(&body) 66 | ))), 67 | } 68 | } 69 | 70 | pub async fn presign_download_from_stage( 71 | presigned: PresignedResponse, 72 | local_path: &Path, 73 | ) -> Result { 74 | if let Some(p) = local_path.parent() { 75 | tokio::fs::create_dir_all(p).await?; 76 | } 77 | let client = HttpClient::new(); 78 | let mut builder = client.get(presigned.url); 79 | for (k, v) in presigned.headers { 80 | builder = builder.header(k, v); 81 | } 82 | 83 | let resp = builder.send().await?; 84 | let status = resp.status(); 85 | match status { 86 | StatusCode::OK => { 87 | let mut file = tokio::fs::File::create(local_path).await?; 88 | let mut body = resp.bytes_stream(); 89 | while let Some(chunk) = body.next().await { 90 | file.write_all(&chunk?).await?; 91 | } 92 | file.flush().await?; 93 | let metadata = file.metadata().await?; 94 | Ok(metadata.len()) 95 | } 96 | _ => Err(Error::IO(format!( 97 | "Download with presigned url failed: {}", 98 | status 99 | ))), 100 | } 101 | } 102 | -------------------------------------------------------------------------------- /core/src/request.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use std::collections::BTreeMap; 16 | 17 | use crate::session::SessionState; 18 | use serde::{Deserialize, Serialize}; 19 | 20 | #[derive(Deserialize, Serialize, Debug, Clone, PartialEq)] 21 | pub struct ServerInfo { 22 | pub id: String, 23 | pub start_time: String, 24 | } 25 | 26 | #[derive(Serialize, Debug)] 27 | pub struct QueryRequest<'a> { 28 | #[serde(skip_serializing_if = "Option::is_none")] 29 | session: Option, 30 | sql: &'a str, 31 | #[serde(skip_serializing_if = "Option::is_none")] 32 | pagination: Option, 33 | #[serde(skip_serializing_if = "Option::is_none")] 34 | stage_attachment: Option>, 35 | } 36 | 37 | #[derive(Serialize, Debug)] 38 | pub struct PaginationConfig { 39 | #[serde(skip_serializing_if = "Option::is_none")] 40 | pub wait_time_secs: Option, 41 | #[serde(skip_serializing_if = "Option::is_none")] 42 | pub max_rows_in_buffer: Option, 43 | #[serde(skip_serializing_if = "Option::is_none")] 44 | pub max_rows_per_page: Option, 45 | } 46 | 47 | #[derive(Serialize, Debug)] 48 | pub struct StageAttachmentConfig<'a> { 49 | pub location: &'a str, 50 | #[serde(skip_serializing_if = "Option::is_none")] 51 | pub file_format_options: Option>, 52 | #[serde(skip_serializing_if = "Option::is_none")] 53 | pub copy_options: Option>, 54 | } 55 | 56 | impl<'r, 't: 'r> QueryRequest<'r> { 57 | pub fn new(sql: &'r str) -> QueryRequest<'r> { 58 | QueryRequest { 59 | session: None, 60 | sql, 61 | pagination: None, 62 | stage_attachment: None, 63 | } 64 | } 65 | 66 | pub fn with_session(mut self, session: Option) -> Self { 67 | self.session = session; 68 | self 69 | } 70 | 71 | pub fn with_pagination(mut self, pagination: Option) -> Self { 72 | self.pagination = pagination; 73 | self 74 | } 75 | 76 | pub fn with_stage_attachment( 77 | mut self, 78 | stage_attachment: Option>, 79 | ) -> Self { 80 | self.stage_attachment = stage_attachment; 81 | self 82 | } 83 | } 84 | 85 | #[cfg(test)] 86 | mod test { 87 | use super::*; 88 | use crate::error::Result; 89 | 90 | #[test] 91 | fn build_request() -> Result<()> { 92 | let req = QueryRequest::new("select 1") 93 | .with_session(Some( 94 | SessionState::default().with_database(Some("default".to_string())), 95 | )) 96 | .with_pagination(Some(PaginationConfig { 97 | wait_time_secs: Some(1), 98 | max_rows_in_buffer: Some(1), 99 | max_rows_per_page: Some(1), 100 | })) 101 | .with_stage_attachment(Some(StageAttachmentConfig { 102 | location: "@~/my_location", 103 | file_format_options: None, 104 | copy_options: None, 105 | })); 106 | assert_eq!( 107 | serde_json::to_string(&req)?, 108 | r#"{"session":{"database":"default"},"sql":"select 1","pagination":{"wait_time_secs":1,"max_rows_in_buffer":1,"max_rows_per_page":1},"stage_attachment":{"location":"@~/my_location"}}"# 109 | ); 110 | Ok(()) 111 | } 112 | } 113 | -------------------------------------------------------------------------------- /core/src/response.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use crate::error_code::ErrorCode; 16 | use crate::session::SessionState; 17 | use serde::{Deserialize, Serialize}; 18 | 19 | #[derive(Deserialize, Debug, Default)] 20 | pub struct QueryStats { 21 | #[serde(flatten)] 22 | pub progresses: Progresses, 23 | pub running_time_ms: f64, 24 | } 25 | 26 | #[derive(Deserialize, Debug, Default)] 27 | pub struct Progresses { 28 | pub scan_progress: ProgressValues, 29 | pub write_progress: ProgressValues, 30 | pub result_progress: ProgressValues, 31 | // make it optional for backward compatibility 32 | pub total_scan: Option, 33 | #[serde(default)] 34 | pub spill_progress: SpillProgress, 35 | } 36 | 37 | impl Progresses { 38 | pub fn has_progress(&self) -> bool { 39 | self.scan_progress.bytes > 0 40 | || self.scan_progress.rows > 0 41 | || self.write_progress.bytes > 0 42 | || self.write_progress.rows > 0 43 | || self.result_progress.bytes > 0 44 | || self.result_progress.rows > 0 45 | || self 46 | .total_scan 47 | .as_ref() 48 | .is_some_and(|v| v.bytes > 0 || v.rows > 0) 49 | } 50 | } 51 | 52 | #[derive(Debug, Deserialize, Default)] 53 | pub struct ProgressValues { 54 | pub rows: usize, 55 | pub bytes: usize, 56 | } 57 | 58 | #[derive(Debug, Clone, Deserialize, Serialize, Default)] 59 | pub struct SpillProgress { 60 | pub file_nums: usize, 61 | pub bytes: usize, 62 | } 63 | 64 | #[derive(Serialize, Deserialize, Debug, Clone)] 65 | pub struct SchemaField { 66 | pub name: String, 67 | #[serde(rename = "type")] 68 | pub data_type: String, 69 | } 70 | 71 | #[derive(Deserialize, Debug)] 72 | pub struct QueryResponse { 73 | pub id: String, 74 | pub node_id: Option, 75 | pub session_id: Option, 76 | pub session: Option, 77 | pub schema: Vec, 78 | pub data: Vec>>, 79 | pub state: String, 80 | pub error: Option, 81 | // make it optional for backward compatibility 82 | pub warnings: Option>, 83 | pub stats: QueryStats, 84 | // pub affect: Option, 85 | pub stats_uri: Option, 86 | pub final_uri: Option, 87 | pub next_uri: Option, 88 | pub kill_uri: Option, 89 | } 90 | 91 | #[cfg(test)] 92 | mod test { 93 | use std::collections::BTreeMap; 94 | 95 | use super::*; 96 | 97 | #[test] 98 | fn deserialize_session_config() { 99 | let session_json = r#"{"database":"default","settings":{}}"#; 100 | let session_config: SessionState = serde_json::from_str(session_json).unwrap(); 101 | assert_eq!(session_config.database, Some("default".to_string())); 102 | assert_eq!(session_config.settings, Some(BTreeMap::default())); 103 | assert_eq!(session_config.role, None); 104 | assert_eq!(session_config.secondary_roles, None); 105 | 106 | let session_json = r#"{"database":"default","settings":{},"role": "role1", "secondary_roles": [], "unknown_field": 1}"#; 107 | let session_config: SessionState = serde_json::from_str(session_json).unwrap(); 108 | assert_eq!(session_config.database, Some("default".to_string())); 109 | assert_eq!(session_config.settings, Some(BTreeMap::default())); 110 | assert_eq!(session_config.role, Some("role1".to_string())); 111 | assert_eq!(session_config.secondary_roles, Some(vec![])); 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /core/src/session.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use serde::{Deserialize, Serialize}; 16 | use std::collections::{BTreeMap, HashMap}; 17 | 18 | #[derive(Deserialize, Serialize, Debug, Default, Clone)] 19 | pub struct SessionState { 20 | #[serde(skip_serializing_if = "Option::is_none")] 21 | pub catalog: Option, 22 | #[serde(skip_serializing_if = "Option::is_none")] 23 | pub database: Option, 24 | #[serde(skip_serializing_if = "Option::is_none")] 25 | pub settings: Option>, 26 | #[serde(skip_serializing_if = "Option::is_none")] 27 | pub role: Option, 28 | #[serde(skip_serializing_if = "Option::is_none")] 29 | pub secondary_roles: Option>, 30 | #[serde(skip_serializing_if = "Option::is_none")] 31 | pub txn_state: Option, 32 | #[serde(skip_serializing_if = "Option::is_none")] 33 | pub need_sticky: Option, 34 | #[serde(skip_serializing_if = "Option::is_none")] 35 | pub need_keep_alive: Option, 36 | 37 | // hide fields of no interest (but need to send back to server in next query) 38 | #[serde(flatten)] 39 | additional_fields: HashMap, 40 | } 41 | 42 | impl SessionState { 43 | pub fn with_settings(mut self, settings: Option>) -> Self { 44 | self.settings = settings; 45 | self 46 | } 47 | 48 | pub fn with_database(mut self, database: Option) -> Self { 49 | self.database = database; 50 | self 51 | } 52 | 53 | pub fn with_role(mut self, role: Option) -> Self { 54 | self.role = role; 55 | self 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /core/src/stage.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use crate::error::{Error, Result}; 16 | 17 | pub struct StageLocation { 18 | pub name: String, 19 | pub path: String, 20 | } 21 | 22 | impl std::fmt::Display for StageLocation { 23 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 24 | write!(f, "@{}/{}", self.name, self.path) 25 | } 26 | } 27 | 28 | impl TryFrom<&str> for StageLocation { 29 | type Error = Error; 30 | fn try_from(s: &str) -> Result { 31 | if !s.starts_with('@') { 32 | return Err(Error::BadArgument(format!("Invalid stage location: {}", s))); 33 | } 34 | let mut parts = s.splitn(2, '/'); 35 | let name = parts 36 | .next() 37 | .ok_or_else(|| Error::BadArgument(format!("Invalid stage location: {}", s)))? 38 | .trim_start_matches('@'); 39 | let path = parts.next().unwrap_or_default(); 40 | Ok(Self { 41 | name: name.to_string(), 42 | path: path.to_string(), 43 | }) 44 | } 45 | } 46 | 47 | impl StageLocation { 48 | pub fn file_path(&self, file_name: &str) -> String { 49 | if self.path.ends_with('/') { 50 | format!("{}{}", self, file_name) 51 | } else { 52 | format!("{}/{}", self, file_name) 53 | } 54 | } 55 | } 56 | 57 | #[cfg(test)] 58 | mod test { 59 | use super::*; 60 | 61 | #[test] 62 | fn parse_stage() -> Result<()> { 63 | let location = "@stage_name/path/to/file"; 64 | let stage = StageLocation::try_from(location)?; 65 | assert_eq!(stage.name, "stage_name"); 66 | assert_eq!(stage.path, "path/to/file"); 67 | Ok(()) 68 | } 69 | 70 | #[test] 71 | fn parse_stage_empty_path() -> Result<()> { 72 | let location = "@stage_name"; 73 | let stage = StageLocation::try_from(location)?; 74 | assert_eq!(stage.name, "stage_name"); 75 | assert_eq!(stage.path, ""); 76 | Ok(()) 77 | } 78 | 79 | #[test] 80 | fn parse_stage_fail() -> Result<()> { 81 | let location = "stage_name/path/to/file"; 82 | let stage = StageLocation::try_from(location); 83 | assert!(stage.is_err()); 84 | Ok(()) 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /core/tests/core/common/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | pub static DEFAULT_DSN: &str = "databend://root:@localhost:8000/default?sslmode=disable"; 16 | -------------------------------------------------------------------------------- /core/tests/core/data/sample.csv: -------------------------------------------------------------------------------- 1 | 1,'Beijing',100 2 | 2,'Shanghai',80 3 | 3,'Guangzhou',60 4 | 4,'Shenzhen',70 5 | 5,'Shenzhen',55 6 | 6,'Beijing',99 7 | -------------------------------------------------------------------------------- /core/tests/core/main.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | mod common; 16 | 17 | mod simple; 18 | mod stage; 19 | -------------------------------------------------------------------------------- /core/tests/core/simple.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use databend_client::APIClient; 16 | use tokio_stream::StreamExt; 17 | 18 | use crate::common::DEFAULT_DSN; 19 | 20 | #[tokio::test] 21 | async fn select_simple() { 22 | let dsn = option_env!("TEST_DATABEND_DSN").unwrap_or(DEFAULT_DSN); 23 | let client = APIClient::new(dsn, None).await.unwrap(); 24 | let mut pages = client.start_query("select 15532", true).await.unwrap(); 25 | let page = pages.next().await.unwrap().unwrap(); 26 | assert_eq!(page.data, [[Some("15532".to_string())]]); 27 | } 28 | -------------------------------------------------------------------------------- /core/tests/core/stage.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use tokio::fs::File; 16 | use tokio::io::BufReader; 17 | 18 | use databend_client::APIClient; 19 | 20 | use crate::common::DEFAULT_DSN; 21 | 22 | async fn insert_with_stage(presign: bool) { 23 | let dsn = option_env!("TEST_DATABEND_DSN").unwrap_or(DEFAULT_DSN); 24 | let client = if presign { 25 | APIClient::new(&format!("{}&presign=on", dsn), None) 26 | .await 27 | .unwrap() 28 | } else { 29 | APIClient::new(&format!("{}&presign=off", dsn), None) 30 | .await 31 | .unwrap() 32 | }; 33 | 34 | let file = File::open("tests/core/data/sample.csv").await.unwrap(); 35 | let metadata = file.metadata().await.unwrap(); 36 | let data = BufReader::new(file); 37 | 38 | let path = chrono::Utc::now().format("%Y%m%d%H%M%S%9f").to_string(); 39 | let stage_location = format!("@~/{}/sample.csv", path); 40 | let table = if presign { 41 | format!("sample_insert_presigned_{}", path) 42 | } else { 43 | format!("sample_insert_stream_{}", path) 44 | }; 45 | 46 | client 47 | .upload_to_stage(&stage_location, Box::new(data), metadata.len()) 48 | .await 49 | .unwrap(); 50 | let sql = format!( 51 | "CREATE TABLE `{}` (id UInt64, city String, number UInt64)", 52 | table 53 | ); 54 | client.query_all(&sql).await.unwrap(); 55 | 56 | let sql = format!("INSERT INTO `{}` VALUES", table); 57 | let file_format_options = vec![ 58 | ("type", "CSV"), 59 | ("field_delimiter", ","), 60 | ("record_delimiter", "\n"), 61 | ("skip_header", "0"), 62 | ("quote", "'"), 63 | ] 64 | .into_iter() 65 | .collect(); 66 | let copy_options = vec![("purge", "true")].into_iter().collect(); 67 | 68 | client 69 | .insert_with_stage(&sql, &stage_location, file_format_options, copy_options) 70 | .await 71 | .unwrap(); 72 | 73 | let sql = format!("SELECT * FROM `{}`", table); 74 | let resp = client.query_all(&sql).await.unwrap(); 75 | assert_eq!(resp.data.len(), 6); 76 | let expect = [ 77 | ["1", "Beijing", "100"], 78 | ["2", "Shanghai", "80"], 79 | ["3", "Guangzhou", "60"], 80 | ["4", "Shenzhen", "70"], 81 | ["5", "Shenzhen", "55"], 82 | ["6", "Beijing", "99"], 83 | ]; 84 | let result = resp 85 | .data 86 | .into_iter() 87 | .map(|row| { 88 | row.into_iter() 89 | .map(|v| v.unwrap_or_default()) 90 | .collect::>() 91 | }) 92 | .collect::>(); 93 | assert_eq!(result, expect); 94 | 95 | let sql = format!("DROP TABLE `{}`;", table); 96 | client.query_all(&sql).await.unwrap(); 97 | } 98 | 99 | #[tokio::test] 100 | async fn insert_with_stage_presigned() { 101 | insert_with_stage(true).await; 102 | } 103 | 104 | #[tokio::test] 105 | async fn insert_with_stage_stream() { 106 | insert_with_stage(false).await; 107 | } 108 | -------------------------------------------------------------------------------- /deny.toml: -------------------------------------------------------------------------------- 1 | [advisories] 2 | version = 2 3 | db-path = "~/.cargo/advisory-db" 4 | db-urls = ["https://github.com/rustsec/advisory-db"] 5 | ignore = [ 6 | "RUSTSEC-2024-0370", 7 | "RUSTSEC-2024-0384", 8 | "RUSTSEC-2024-0436" 9 | ] 10 | 11 | [licenses] 12 | version = 2 13 | allow = [ 14 | "Apache-2.0 WITH LLVM-exception", 15 | "Apache-2.0", 16 | "BSD-3-Clause", 17 | "BSL-1.0", 18 | "CC0-1.0", 19 | "CDLA-Permissive-2.0", 20 | "ISC", 21 | "MIT", 22 | "MPL-2.0", 23 | "Unicode-3.0", 24 | "Zlib" 25 | ] 26 | 27 | [[licenses.clarify]] 28 | name = "ring" 29 | expression = "MIT AND ISC AND OpenSSL" 30 | license-files = [ 31 | { path = "LICENSE", hash = 0xbd0eed23 } 32 | ] 33 | 34 | [bans] 35 | multiple-versions = "allow" 36 | wildcards = "warn" 37 | highlight = "all" 38 | 39 | [sources] 40 | unknown-git = "deny" 41 | allow-git = [] 42 | -------------------------------------------------------------------------------- /driver/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "databend-driver" 3 | description = "Databend Driver for Rust" 4 | categories = ["database"] 5 | keywords = ["databend", "database", "sdk"] 6 | 7 | version = { workspace = true } 8 | edition = { workspace = true } 9 | license = { workspace = true } 10 | authors = { workspace = true } 11 | repository = { workspace = true } 12 | 13 | [features] 14 | default = ["rustls"] 15 | 16 | # Enable rustls for TLS support 17 | rustls = ["databend-client/rustls"] 18 | # Enable native-tls for TLS support 19 | native-tls = ["databend-client/native-tls"] 20 | 21 | flight-sql = [ 22 | "dep:arrow-flight", 23 | "dep:arrow-schema", 24 | "dep:tonic", 25 | "databend-driver-core/flight-sql", 26 | ] 27 | 28 | [dependencies] 29 | arrow = { workspace = true } 30 | arrow-flight = { workspace = true, optional = true } 31 | arrow-schema = { workspace = true, optional = true } 32 | chrono = { workspace = true } 33 | databend-client = { workspace = true } 34 | databend-driver-core = { workspace = true } 35 | databend-driver-macros = { workspace = true } 36 | tokio-stream = { workspace = true } 37 | tonic = { workspace = true, optional = true } 38 | 39 | async-trait = "0.1" 40 | csv = "1.3" 41 | databend-common-ast = "0.2.1" 42 | derive-visitor = { version = "0.4.0", features = ["std-types-drive"] } 43 | glob = "0.3" 44 | log = "0.4" 45 | once_cell = "1.21" 46 | percent-encoding = "2.3" 47 | serde_json = { version = "1.0", default-features = false, features = ["std"] } 48 | tokio = { version = "1.34", features = ["macros"] } 49 | url = { version = "2.5", default-features = false } 50 | 51 | [dev-dependencies] 52 | -------------------------------------------------------------------------------- /driver/README.md: -------------------------------------------------------------------------------- 1 | # Databend Driver 2 | 3 | Databend unified SQL client for RestAPI and FlightSQL 4 | 5 | [![crates.io](https://img.shields.io/crates/v/databend-driver.svg)](https://crates.io/crates/databend-driver) 6 | ![License](https://img.shields.io/crates/l/databend-driver.svg) 7 | 8 | ## usage 9 | 10 | ### exec 11 | 12 | ```rust 13 | use databend_driver::Client; 14 | 15 | let dsn = "databend://root:@localhost:8000/default?sslmode=disable".to_string(); 16 | let client = Client::new(dsn); 17 | let conn = client.get_conn().await.unwrap(); 18 | 19 | let sql_create = "CREATE TABLE books ( 20 | title VARCHAR, 21 | author VARCHAR, 22 | date Date 23 | );"; 24 | conn.exec(sql_create).await.unwrap(); 25 | let sql_insert = "INSERT INTO books VALUES ('The Little Prince', 'Antoine de Saint-Exupéry', '1943-04-06');"; 26 | conn.exec(sql_insert).await.unwrap(); 27 | ``` 28 | 29 | ### query row 30 | 31 | ```rust 32 | let row = conn.query_row("SELECT * FROM books;", ()).await.unwrap(); 33 | let (title,author,date): (String,String,i32) = row.unwrap().try_into().unwrap(); 34 | println!("{} {} {}", title, author, date); 35 | ``` 36 | 37 | ### query iter 38 | 39 | ```rust 40 | let mut rows = conn.query_iter("SELECT * FROM books;").await.unwrap(); 41 | while let Some(row) = rows.next().await { 42 | let (title,author,date): (String,String,chrono::NaiveDate) = row.unwrap().try_into().unwrap(); 43 | println!("{} {} {}", title, author, date); 44 | } 45 | ``` 46 | 47 | ### Parameter bindings 48 | 49 | ```rust 50 | let row = conn 51 | .query_row("SELECT $1, $2, $3, $4", (3, false, 4, "55")) 52 | .await 53 | .unwrap(); 54 | 55 | let params = params! {a => 3, b => false, c => 4, d => "55"}; 56 | let row = conn 57 | .query_row("SELECT :a, :b, :c, :d", params) 58 | .await 59 | .unwrap(); 60 | 61 | let row = conn 62 | .query_row("SELECT ?, ?, ?, ?", (3, false, 4, "55")) 63 | .await 64 | .unwrap(); 65 | ``` 66 | 67 | ## Type Mapping 68 | 69 | [Databend Types](https://docs.databend.com/sql/sql-reference/data-types/) 70 | 71 | ### General Data Types 72 | 73 | | Databend | Rust | 74 | | ----------- | ----------------------- | 75 | | `BOOLEAN` | `bool` | 76 | | `TINYINT` | `i8`,`u8` | 77 | | `SMALLINT` | `i16`,`u16` | 78 | | `INT` | `i32`,`u32` | 79 | | `BIGINT` | `i64`,`u64` | 80 | | `FLOAT` | `f32` | 81 | | `DOUBLE` | `f64` | 82 | | `DECIMAL` | `String` | 83 | | `DATE` | `chrono::NaiveDate` | 84 | | `TIMESTAMP` | `chrono::NaiveDateTime` | 85 | | `VARCHAR` | `String` | 86 | | `BINARY` | `Vec` | 87 | 88 | ### Semi-Structured Data Types 89 | 90 | | Databend | Rust | 91 | | ------------- | --------------- | 92 | | `ARRAY[T]` | `Vec` | 93 | | `TUPLE[T, U]` | `(T, U)` | 94 | | `MAP[K, V]` | `HashMap` | 95 | | `VARIANT` | `String` | 96 | | `BITMAP` | `String` | 97 | | `GEOMETRY` | `String` | 98 | | `GEOGRAPHY` | `String` | 99 | 100 | Note: `VARIANT` is a json encoded string. Example: 101 | 102 | ```sql 103 | CREATE TABLE example ( 104 | data VARIANT 105 | ); 106 | INSERT INTO example VALUES ('{"a": 1, "b": "hello"}'); 107 | ``` 108 | 109 | ```rust 110 | let row = conn.query_row("SELECT * FROM example limit 1;", ()).await.unwrap(); 111 | let (data,): (String,) = row.unwrap().try_into().unwrap(); 112 | let value: serde_json::Value = serde_json::from_str(&data).unwrap(); 113 | println!("{:?}", value); 114 | ``` 115 | -------------------------------------------------------------------------------- /driver/src/lib.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | mod client; 16 | pub mod conn; 17 | #[cfg(feature = "flight-sql")] 18 | mod flight_sql; 19 | mod params; 20 | mod placeholder; 21 | pub mod rest_api; 22 | 23 | pub use client::Client; 24 | pub use client::Connection; 25 | pub use conn::ConnectionInfo; 26 | pub use params::Param; 27 | pub use params::Params; 28 | 29 | // pub use for convenience 30 | pub use databend_driver_core::error::{Error, Result}; 31 | pub use databend_driver_core::rows::{ 32 | Row, RowIterator, RowStatsIterator, RowWithStats, ServerStats, 33 | }; 34 | pub use databend_driver_core::schema::{ 35 | DataType, DecimalSize, Field, NumberDataType, Schema, SchemaRef, 36 | }; 37 | pub use databend_driver_core::value::Interval; 38 | pub use databend_driver_core::value::{NumberValue, Value}; 39 | 40 | pub use databend_driver_macros::TryFromRow; 41 | 42 | #[doc(hidden)] 43 | pub mod _macro_internal { 44 | pub use databend_driver_core::_macro_internal::*; 45 | } 46 | -------------------------------------------------------------------------------- /driver/tests/driver/common/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | pub static DEFAULT_DSN: &str = "databend://root:@localhost:8000/default?sslmode=disable"; 16 | -------------------------------------------------------------------------------- /driver/tests/driver/connection.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use std::sync::Arc; 16 | 17 | use databend_driver::Client; 18 | 19 | use crate::common::DEFAULT_DSN; 20 | 21 | #[tokio::test] 22 | async fn trait_with_clone() { 23 | let dsn = option_env!("TEST_DATABEND_DSN").unwrap_or(DEFAULT_DSN); 24 | let client = Client::new(dsn.to_string()); 25 | let conn = client.get_conn().await.unwrap(); 26 | let conn = Arc::new(conn); 27 | 28 | let row = conn.query_row("select 'hello'", ()).await.unwrap(); 29 | assert!(row.is_some()); 30 | let row = row.unwrap(); 31 | let (val,): (String,) = row.try_into().unwrap(); 32 | assert_eq!(val, "hello"); 33 | 34 | let conn2 = conn.clone(); 35 | let row = conn2.query_row("select 'world'", ()).await.unwrap(); 36 | assert!(row.is_some()); 37 | let row = row.unwrap(); 38 | let (val,): (String,) = row.try_into().unwrap(); 39 | assert_eq!(val, "world"); 40 | } 41 | -------------------------------------------------------------------------------- /driver/tests/driver/data/books.csv: -------------------------------------------------------------------------------- 1 | Transaction Processing,Jim Gray,1992,2020-01-01 11:11:11.345 2 | Readings in Database Systems,Michael Stonebraker,2004,2020-01-01T11:11:11Z 3 | Three Body,NULL-liucixin,2019,2019-07-04T00:00:00 4 | -------------------------------------------------------------------------------- /driver/tests/driver/main.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | mod common; 16 | 17 | mod connection; 18 | mod load; 19 | mod select_iter; 20 | mod select_simple; 21 | mod session; 22 | mod temp_table; 23 | -------------------------------------------------------------------------------- /driver/tests/driver/session.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use databend_driver::Client; 16 | 17 | use crate::common::DEFAULT_DSN; 18 | 19 | #[tokio::test] 20 | async fn set_timezone() { 21 | let dsn = option_env!("TEST_DATABEND_DSN").unwrap_or(DEFAULT_DSN); 22 | let client = Client::new(dsn.to_string()); 23 | let conn = client.get_conn().await.unwrap(); 24 | 25 | let row = conn.query_row("select timezone()", ()).await.unwrap(); 26 | assert!(row.is_some()); 27 | let row = row.unwrap(); 28 | let (val,): (String,) = row.try_into().unwrap(); 29 | assert_eq!(val, "UTC"); 30 | 31 | conn.exec("set timezone='Europe/London'", ()).await.unwrap(); 32 | let row = conn.query_row("select timezone()", ()).await.unwrap(); 33 | assert!(row.is_some()); 34 | let row = row.unwrap(); 35 | let (val,): (String,) = row.try_into().unwrap(); 36 | assert_eq!(val, "Europe/London"); 37 | } 38 | 39 | #[tokio::test] 40 | async fn set_timezone_with_dsn() { 41 | let dsn = option_env!("TEST_DATABEND_DSN").unwrap_or(DEFAULT_DSN); 42 | if dsn.starts_with("databend+flight://") { 43 | // skip dsn variable test for flight 44 | return; 45 | } 46 | let client = Client::new(format!("{}&timezone=Europe/London", dsn)); 47 | let conn = client.get_conn().await.unwrap(); 48 | 49 | let row = conn.query_row("select timezone()", ()).await.unwrap(); 50 | assert!(row.is_some()); 51 | let row = row.unwrap(); 52 | let (val,): (String,) = row.try_into().unwrap(); 53 | assert_eq!(val, "Europe/London"); 54 | } 55 | 56 | #[tokio::test] 57 | async fn change_password() { 58 | let dsn = option_env!("TEST_DATABEND_DSN").unwrap_or(DEFAULT_DSN); 59 | if dsn.starts_with("databend+flight://") { 60 | return; 61 | } 62 | let client = Client::new(dsn.to_string()); 63 | let conn = client.get_conn().await.unwrap(); 64 | let n = conn.exec("drop user if exists u1 ", ()).await.unwrap(); 65 | assert_eq!(n, 0); 66 | let n = conn 67 | .exec("create user u1 identified by 'p1' ", ()) 68 | .await 69 | .unwrap(); 70 | assert_eq!(n, 0); 71 | 72 | let dsn = "databend://u1:p1@localhost:8000/default?sslmode=disable&session_token=enable"; 73 | let client = Client::new(dsn.to_string()); 74 | let conn = client.get_conn().await.unwrap(); 75 | 76 | let n = conn 77 | .exec("alter user u1 identified by 'p2' ", ()) 78 | .await 79 | .unwrap(); 80 | assert_eq!(n, 0); 81 | 82 | let row = conn.query_row("select 1", ()).await.unwrap(); 83 | assert!(row.is_some()); 84 | let row = row.unwrap(); 85 | let (val,): (i64,) = row.try_into().unwrap(); 86 | assert_eq!(val, 1); 87 | } 88 | -------------------------------------------------------------------------------- /driver/tests/driver/temp_table.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use databend_driver::Client; 16 | use std::time::Duration; 17 | use tokio::time::sleep; 18 | 19 | use crate::common::DEFAULT_DSN; 20 | 21 | async fn test_temp_table(session_token_enabled: bool) { 22 | let dsn = option_env!("TEST_DATABEND_DSN").unwrap_or(DEFAULT_DSN); 23 | if dsn.starts_with("databend+flight://") { 24 | return; 25 | } 26 | 27 | let session_token = if session_token_enabled { 28 | "enable" 29 | } else { 30 | "disable" 31 | }; 32 | let dsn = option_env!("TEST_DATABEND_DSN").unwrap_or(DEFAULT_DSN); 33 | let dsn = format!("{}&session_token={}", dsn, session_token); 34 | let client = Client::new(dsn.to_string()); 35 | let conn = client.get_conn().await.unwrap(); 36 | 37 | let row = conn.query_row("select version()", ()).await.unwrap(); 38 | assert!(row.is_some()); 39 | let row = row.unwrap(); 40 | let (val,): (String,) = row.try_into().unwrap(); 41 | println!("version = {}", val); 42 | 43 | let _ = conn.exec("create temp table t1 (a int)", ()).await.unwrap(); 44 | let n = conn 45 | .exec("insert into t1 values (1),(2)", ()) 46 | .await 47 | .unwrap(); 48 | assert_eq!(n, 2); 49 | 50 | let row = conn.query_row("select count(*) from t1", ()).await.unwrap(); 51 | assert!(row.is_some()); 52 | let row = row.unwrap(); 53 | let (val,): (i64,) = row.try_into().unwrap(); 54 | assert_eq!(val, 2); 55 | drop(conn); 56 | sleep(Duration::from_millis(100)).await; 57 | } 58 | #[tokio::test] 59 | async fn test_temp_table_session_token() { 60 | test_temp_table(true).await; 61 | } 62 | 63 | #[tokio::test] 64 | async fn test_temp_table_password() { 65 | test_temp_table(false).await; 66 | } 67 | -------------------------------------------------------------------------------- /frontend/.env: -------------------------------------------------------------------------------- 1 | GENERATE_SOURCEMAP=false -------------------------------------------------------------------------------- /frontend/.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # production 12 | /build 13 | 14 | # misc 15 | .DS_Store 16 | .env.local 17 | .env.development.local 18 | .env.test.local 19 | .env.production.local 20 | 21 | npm-debug.log* 22 | yarn-debug.log* 23 | yarn-error.log* 24 | 25 | src/test -------------------------------------------------------------------------------- /frontend/README.md: -------------------------------------------------------------------------------- 1 | # Getting Started with Create React App 2 | 3 | This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app). 4 | 5 | ## Available Scripts 6 | 7 | In the project directory, you can run: 8 | 9 | ### `pnpm start` 10 | 11 | Runs the app in the development mode.\ 12 | Open [http://localhost:3000](http://localhost:3000) to view it in the browser. 13 | 14 | The page will reload if you make edits.\ 15 | You will also see any lint errors in the console. 16 | 17 | ### `pnpm test` 18 | 19 | Launches the test runner in the interactive watch mode.\ 20 | See the section about [running tests](https://facebook.github.io/create-react-app/docs/running-tests) for more information. 21 | 22 | ### `pnpm run build` 23 | 24 | Builds the app for production to the `build` folder.\ 25 | It correctly bundles React in production mode and optimizes the build for the best performance. 26 | 27 | The build is minified and the filenames include the hashes.\ 28 | Your app is ready to be deployed! 29 | 30 | See the section about [deployment](https://facebook.github.io/create-react-app/docs/deployment) for more information. 31 | 32 | ### `pnpm run eject` 33 | 34 | **Note: this is a one-way operation. Once you `eject`, you can’t go back!** 35 | 36 | If you aren’t satisfied with the build tool and configuration choices, you can `eject` at any time. This command will remove the single build dependency from your project. 37 | 38 | Instead, it will copy all the configuration files and the transitive dependencies (webpack, Babel, ESLint, etc) right into your project so you have full control over them. All of the commands except `eject` will still work, but they will point to the copied scripts so you can tweak them. At this point you’re on your own. 39 | 40 | You don’t have to ever use `eject`. The curated feature set is suitable for small and middle deployments, and you shouldn’t feel obligated to use this feature. However we understand that this tool wouldn’t be useful if you couldn’t customize it when you are ready for it. 41 | 42 | ## Learn More 43 | 44 | You can learn more in the [Create React App documentation](https://facebook.github.io/create-react-app/docs/getting-started). 45 | 46 | To learn React, check out the [React documentation](https://reactjs.org/). 47 | -------------------------------------------------------------------------------- /frontend/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "frontend", 3 | "version": "0.1.0", 4 | "private": true, 5 | "dependencies": { 6 | "@ant-design/charts": "v1", 7 | "antd": "^4.6.3", 8 | "lodash-es": "^4.17.21", 9 | "pretty-ms": "^9.1.0", 10 | "react": "^16.8.4", 11 | "react-dom": "^16.8.4", 12 | "react-scripts": "5.0.1", 13 | "typescript": "^4.9.5", 14 | "web-vitals": "^2.1.4" 15 | }, 16 | "scripts": { 17 | "start": "react-scripts start", 18 | "build": "cross-env CI=false react-scripts build", 19 | "test": "react-scripts test", 20 | "eject": "react-scripts eject" 21 | }, 22 | "eslintConfig": { 23 | "extends": [ 24 | "react-app", 25 | "react-app/jest" 26 | ] 27 | }, 28 | "browserslist": { 29 | "production": [ 30 | ">0.2%", 31 | "not dead", 32 | "not op_mini all" 33 | ], 34 | "development": [ 35 | "last 1 chrome version", 36 | "last 1 firefox version", 37 | "last 1 safari version" 38 | ] 39 | }, 40 | "devDependencies": { 41 | "@babel/plugin-transform-private-property-in-object": "^7.25.9", 42 | "@testing-library/jest-dom": "^5.17.0", 43 | "@testing-library/react": "^10.4.9", 44 | "@testing-library/user-event": "^13.5.0", 45 | "@types/jest": "^27.5.2", 46 | "@types/lodash-es": "^4.17.12", 47 | "@types/node": "^16.18.101", 48 | "@types/react": "^16.8.4", 49 | "@types/react-dom": "^16.8.4", 50 | "autoprefixer": "^10.4.20", 51 | "cross-env": "^7.0.3", 52 | "postcss": "^8.4.47", 53 | "prettier": "^3.4.1", 54 | "tailwindcss": "^3.4.13" 55 | }, 56 | "packageManager": "pnpm@9.14.2+sha512.6e2baf77d06b9362294152c851c4f278ede37ab1eba3a55fda317a4a17b209f4dbb973fb250a77abc463a341fcb1f17f17cfa24091c4eb319cda0d9b84278387" 57 | } 58 | -------------------------------------------------------------------------------- /frontend/postcss.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | plugins: { 3 | tailwindcss: {}, 4 | autoprefixer: {}, 5 | }, 6 | } 7 | -------------------------------------------------------------------------------- /frontend/public/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | Databend 9 | 10 | 11 | 12 |
13 | 14 | 15 | -------------------------------------------------------------------------------- /frontend/public/logo.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /frontend/src/components/Attributes.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { AttributeItem } from '../types/ProfileGraphDashboard'; 3 | 4 | interface AttributesProps { 5 | attributesData: AttributeItem[]; 6 | } 7 | 8 | const Attributes: React.FC = ({ attributesData }) => { 9 | return ( 10 | <> 11 | {attributesData?.length > 0 && ( 12 |
13 |
14 |

Attributes

15 |
16 | {attributesData?.map((item, index) => ( 17 |
18 |
{item.name}
19 |
20 | {item?.value?.map((value, idx) => ( 21 |
22 | {value} 23 |
24 | ))} 25 |
26 |
27 | ))} 28 |
29 | )} 30 | 31 | ); 32 | }; 33 | 34 | export default Attributes; 35 | -------------------------------------------------------------------------------- /frontend/src/components/FlowAnalysisGraph.tsx: -------------------------------------------------------------------------------- 1 | import { Edge, FlowAnalysisGraph, FlowAnalysisGraphConfig, IGraph } from "@ant-design/charts"; 2 | import { debounce, isEqual } from "lodash-es"; 3 | import { memo, useEffect, useMemo, useState } from "react"; 4 | 5 | import { IGraphSize, IOverview, Profile } from "../types/ProfileGraphDashboard"; 6 | 7 | import { mapEdgesLineWidth, getEdges, getDealData } from "../utills"; 8 | import { useFlowAnalysisGraphConfig } from "../hooks/useFlowAnalysisGraphConfig"; 9 | 10 | export interface EdgeWithLineWidth extends Edge { 11 | lineWidth: number; 12 | _value: number; 13 | } 14 | 15 | const CacheFlowAnalysisGraph = ({ 16 | plainData, 17 | graphSize, 18 | graphRef, 19 | overviewInfoCurrent, 20 | onReady, 21 | }: { 22 | plainData: Profile[]; 23 | graphSize: IGraphSize; 24 | graphRef: React.RefObject; 25 | overviewInfoCurrent: React.RefObject; 26 | onReady: (graph: IGraph) => void; 27 | }) => { 28 | const [renderKey, setRenderKey] = useState(0); 29 | 30 | const handleResetView = () => { 31 | const graph = graphRef?.current; 32 | if (graph) { 33 | graph.fitView(); 34 | graph.refresh(); 35 | } 36 | }; 37 | 38 | const edgesWithLineWidth = mapEdgesLineWidth(getEdges(plainData) as EdgeWithLineWidth[]); 39 | const data = useMemo(() => { 40 | return { 41 | nodes: getDealData(plainData), 42 | edges: edgesWithLineWidth, 43 | }; 44 | }, [plainData, edgesWithLineWidth]); 45 | 46 | const config: FlowAnalysisGraphConfig = useFlowAnalysisGraphConfig({ 47 | graphSize, 48 | onReady, 49 | data, 50 | graphRef, 51 | overviewInfoCurrent, 52 | handleResetView, 53 | edgesWithLineWidth, 54 | }); 55 | 56 | // Debounce the rendering to reduce the number of renders 57 | const debouncedRender = debounce(() => { 58 | setRenderKey(prevKey => prevKey + 1); 59 | }, 300); 60 | 61 | useEffect(() => { 62 | debouncedRender(); 63 | return () => { 64 | debouncedRender.cancel(); 65 | }; 66 | }, [plainData, graphSize]); 67 | 68 | return ; 69 | }; 70 | 71 | export default memo(CacheFlowAnalysisGraph, (pre, next) => { 72 | const isPlainDataEqual = isEqual(pre.plainData, next.plainData); 73 | const isGraphSizeEqual = pre.graphSize === next.graphSize; 74 | const isGraphSizeHeightEqual = pre.graphSize.height === next.graphSize.height; 75 | return isPlainDataEqual && isGraphSizeEqual && isGraphSizeHeightEqual; 76 | }); 77 | 78 | 79 | -------------------------------------------------------------------------------- /frontend/src/components/MostExpensiveNodes.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Profile } from '../types/ProfileGraphDashboard'; 3 | interface MostExpensiveNodesProps { 4 | data: Profile[]; 5 | plainData: Profile[]; 6 | selectedNodeId: string | null; 7 | handleNodeSelection: (nodeId: string) => void; 8 | } 9 | 10 | const MostExpensiveNodes: React.FC = ({ data, plainData, selectedNodeId, handleNodeSelection }) => { 11 | 12 | return ( 13 |
14 |
15 |

Most Expensive Nodes ({data?.length} of {plainData?.length})

16 |
17 | {data?.map((node) => ( 18 |
handleNodeSelection(node?.id!)} 22 | > 23 |
24 | {node?.name} [{node?.id}] 25 |
26 |
27 | {node?.totalTimePercent} 28 |
29 |
30 | ))} 31 |
32 | ); 33 | }; 34 | 35 | export default MostExpensiveNodes; 36 | -------------------------------------------------------------------------------- /frontend/src/components/ProfileOverview.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import prettyMilliseconds from 'pretty-ms'; 3 | 4 | import { IOverview } from '../types/ProfileGraphDashboard'; 5 | 6 | interface ProfileOverviewProps { 7 | overviewInfo?: IOverview; 8 | queryDuration?: number; 9 | } 10 | 11 | const ProfileOverview: React.FC = ({ 12 | overviewInfo, 13 | queryDuration = 0, 14 | }) => { 15 | return ( 16 |
17 |
18 |

19 | Profile Overview (Finished) 20 |

21 |
22 |
23 |
Total Execution Time
24 |
25 | ({prettyMilliseconds(Math.floor(queryDuration/1000/1000))}) 26 | {overviewInfo?.totalTimePercent} 27 |
28 |
29 |
30 |
31 | 35 | CPU Time 36 |
37 |
38 | {overviewInfo?.cpuTimePercent} 39 |
40 |
41 |
42 |
43 | 47 | I/O Time 48 |
49 |
50 | {overviewInfo?.waitTimePercent} 51 |
52 |
53 |
54 | ); 55 | }; 56 | 57 | export default ProfileOverview; 58 | -------------------------------------------------------------------------------- /frontend/src/components/ProfileOverviewNode.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Progress } from 'antd'; 3 | import { IOverview } from '../types/ProfileGraphDashboard'; 4 | 5 | interface ProfileOverviewNodeProps { 6 | overviewInfo?: IOverview; 7 | } 8 | 9 | const ProfileOverviewNode: React.FC = ({ overviewInfo }) => { 10 | const cpuTimePercent = parseFloat(overviewInfo?.cpuTimePercent || "0"); 11 | return ( 12 |
13 |
14 |

Profile Overview

15 |
16 |
17 | 24 |
25 | {overviewInfo?.totalTimePercent} 26 |
27 |
28 |
29 |
30 | 34 | CPU Time 35 |
36 |
37 | {overviewInfo?.cpuTimePercent} 38 |
39 |
40 |
41 |
42 | 46 | I/O Time 47 |
48 |
49 | {overviewInfo?.waitTimePercent} 50 |
51 |
52 |
53 | ); 54 | }; 55 | 56 | export default ProfileOverviewNode; 57 | -------------------------------------------------------------------------------- /frontend/src/components/Statistics.tsx: -------------------------------------------------------------------------------- 1 | import React, { useMemo } from 'react'; 2 | import { StatisticsData } from '../types/ProfileGraphDashboard'; 3 | 4 | interface StatisticsProps { 5 | statisticsData: StatisticsData; 6 | } 7 | 8 | const Statistics: React.FC = ({ statisticsData }) => { 9 | const statistics = useMemo(() => statisticsData?.statistics?.slice(2), [statisticsData]); 10 | 11 | const showStatistics = useMemo(() => statistics?.filter((item) => Boolean(item.value)).length > 0, [statistics]); 12 | 13 | return ( 14 | <> 15 | {showStatistics && ( 16 |
17 |
18 |

Statistics

19 |
20 | {statistics?.map((item, index) => ( 21 | Boolean(item.value) && ( 22 |
23 |
{item.name}
24 |
{item.value} {item.unit}
25 |
26 | ) 27 | ))} 28 |
)} 29 | 30 | ) 31 | } 32 | 33 | export default Statistics; 34 | -------------------------------------------------------------------------------- /frontend/src/constants/index.ts: -------------------------------------------------------------------------------- 1 | export const ALL_NODE_ID = "all"; 2 | 3 | export const OUTPUT_ROWS = "OutputRows"; 4 | 5 | export const pathMoon = [ 6 | ["M", 19.0374 + 2, 4 + 56], 7 | ["H", 13 + 2], 8 | ["C", 10.7909 + 2, 4 + 56, 9 + 2, 5.79086 + 56, 9 + 2, 8 + 56], 9 | ["C", 9 + 2, 10.2091 + 56, 10.7909 + 2, 12 + 56, 13 + 2, 12 + 56], 10 | ["H", 19.1059 + 2], 11 | [ 12 | "C", 13 | 17.8458 + 2, 14 | 13.9437 + 56, 15 | 15.7082 + 2, 16 | 15.2309 + 56, 17 | 13.2964 + 2, 18 | 15.0062 + 56, 19 | ], 20 | ["C", 2.5 + 2, 14 + 56, -1 + 2, 13.5 + 56, 2 + 2, 7.5 + 56], 21 | ["C", -2.5 + 2, 1.00616 + 56, 9.5 + 2, 0.5 + 56, 13.2964 + 2, 1.00616 + 56], 22 | [ 23 | "C", 24 | 15.6724 + 2, 25 | 1.00616 + 56, 26 | 17.772 + 2, 27 | 2.18999 + 56, 28 | 19.0374 + 2, 29 | 4 + 56, 30 | ], 31 | ["Z"], 32 | ]; 33 | export const pathArrow = [ 34 | ["M", 125.5, -18], 35 | ["L", 131.9952, -6.75], 36 | ["L", 119.00481, -6.75], 37 | ["L", 125.5, -18], 38 | ["Z"], 39 | ["M", 125.5, -1], 40 | ["L", 125.5, -7], 41 | ]; -------------------------------------------------------------------------------- /frontend/src/css/ProfileGraphDashboard.css: -------------------------------------------------------------------------------- 1 | .expensive-nodes-card { 2 | margin-top: 20px; 3 | background-color: white; 4 | box-shadow: 0 3px 6px #010e291f; 5 | width: 308px; 6 | box-sizing: border-box; 7 | border-radius: 8px; 8 | padding: 24px; 9 | border: 1px solid rgba(1, 14, 41, .08); 10 | } 11 | 12 | .expensive-nodes-card-header { 13 | margin-bottom: 8px; 14 | } 15 | .expensive-nodes-card-header h2 { 16 | margin: 0; 17 | font-size: 14px; 18 | color: #2c3e50; 19 | } 20 | 21 | .expensive-nodes-card-header span { 22 | font-weight: normal; 23 | color: #bdc3c7; 24 | 25 | } 26 | 27 | .expensive-nodes-node { 28 | font-size: 12px; 29 | display: flex; 30 | justify-content: space-between; 31 | align-items: center; 32 | padding: 8px 6px; 33 | border-bottom: 1px solid #ecf0f1; 34 | } 35 | 36 | .expensive-nodes-node.selected { 37 | /*border-bottom: none;*/ 38 | border: 2px solid #0175f6 !important; 39 | border-radius: 8px; 40 | } 41 | 42 | .expensive-nodes-node-name { 43 | font-size: 12px; 44 | color: #0c162b99; 45 | font-weight: 600; 46 | 47 | } 48 | 49 | .block{ 50 | display: block; 51 | } 52 | .expensive-nodes-node-percentage { 53 | font-size: 12px; 54 | color: rgba(12, 22, 43, 0.8); 55 | font-weight: bold; 56 | } 57 | .expensive-nodes-progress { 58 | display: flex; 59 | align-items: center; 60 | margin-bottom: 16px; 61 | } 62 | 63 | .expensive-nodes-progress .ant-progress-line { 64 | flex: 1; 65 | } 66 | 67 | 68 | .expensive-nodes-percentage { 69 | font-size: 14px; 70 | color: #2c3e50; 71 | font-weight: bold; 72 | margin-left: 8px; 73 | } 74 | .expensive-nodes-node-attributes{ 75 | display: block; 76 | } 77 | 78 | .expensive-nodes-node-title { 79 | color: #0c162bcc; 80 | font-weight: 600; 81 | font-size: 12px; 82 | padding: 8px 6px; 83 | } 84 | 85 | .custom-statistic { 86 | display: flex; 87 | align-items: center; 88 | margin-bottom: 8px; 89 | } 90 | 91 | .custom-dot { 92 | display: inline-block; 93 | width: 8px; 94 | height: 8px; 95 | border-radius: 50%; 96 | margin-right: 8px; 97 | } 98 | 99 | .statistic-title { 100 | font-size: 14px; 101 | color: #595959; 102 | } 103 | -------------------------------------------------------------------------------- /frontend/src/hooks/useGraphEvents.ts: -------------------------------------------------------------------------------- 1 | import { useCallback, useEffect, useState } from "react"; 2 | import { IG6GraphEvent, IGraph, Item } from "@ant-design/charts"; 3 | 4 | import { ALL_NODE_ID } from "../constants"; 5 | import { IOverview, Profile } from "../types/ProfileGraphDashboard"; 6 | 7 | export function useGraphEvents( 8 | plainData: Profile[], 9 | setOverInfo: React.Dispatch>, 10 | setSelectedNodeId: React.Dispatch>, 11 | profileWrapRefCanvas: React.MutableRefObject, 12 | profileWrapRef: React.RefObject, 13 | overviewInfoCurrent: React.RefObject, 14 | setOverviewInfo: React.Dispatch>, 15 | ) { 16 | const getAllNodes = useCallback((graph: IGraph) => { 17 | return graph?.getNodes(); 18 | }, []); 19 | 20 | const setNodeActive = useCallback((graph: IGraph, node?: Item | string) => { 21 | if (node) { 22 | graph?.setItemState(node, "highlight", true); 23 | } 24 | }, []); 25 | 26 | const clearNodeActive = useCallback((graph: IGraph) => { 27 | getAllNodes(graph)?.forEach(n => { 28 | graph?.clearItemStates(n); 29 | }); 30 | }, [getAllNodes]); 31 | 32 | const bindGraphEvents = useCallback((graph: IGraph) => { 33 | const handleNodeClick = (evt: IG6GraphEvent) => { 34 | const modal = evt.item?._cfg?.model; 35 | setOverInfo({ 36 | ...plainData.find(item => item.id === modal?.id), 37 | } as IOverview); 38 | setSelectedNodeId(modal?.id as string); 39 | 40 | const nodes = getAllNodes(graph); 41 | const id = evt.item?._cfg?.id; 42 | const node = nodes?.find(node => node?._cfg?.id === id); 43 | nodes 44 | ?.filter(node => node?._cfg?.id !== id) 45 | .forEach(n => { 46 | graph?.clearItemStates(n); 47 | }); 48 | 49 | setNodeActive(graph, node); 50 | }; 51 | 52 | const handleNodeMouseLeave = () => { 53 | if (!profileWrapRefCanvas.current) { 54 | profileWrapRefCanvas.current = document.getElementsByTagName("canvas")[0]; 55 | } 56 | profileWrapRefCanvas.current.style.cursor = "move"; 57 | }; 58 | 59 | const handleCanvasClick = () => { 60 | setSelectedNodeId(ALL_NODE_ID); 61 | setOverviewInfo(overviewInfoCurrent.current || undefined); 62 | clearNodeActive(graph); 63 | }; 64 | 65 | const handleCanvasDragStart = () => { 66 | if (profileWrapRef?.current) { 67 | profileWrapRef.current.style.userSelect = "none"; 68 | } 69 | }; 70 | 71 | const handleCanvasDragEnd = () => { 72 | if (profileWrapRef?.current) { 73 | profileWrapRef.current.style.userSelect = "unset"; 74 | } 75 | }; 76 | 77 | graph.on("node:click", handleNodeClick); 78 | graph.on("node:mouseleave", handleNodeMouseLeave); 79 | graph.on("canvas:click", handleCanvasClick); 80 | graph.on("canvas:dragstart", handleCanvasDragStart); 81 | graph.on("canvas:dragend", handleCanvasDragEnd); 82 | 83 | }, [profileWrapRefCanvas, profileWrapRef, overviewInfoCurrent, setOverviewInfo, getAllNodes, setNodeActive, clearNodeActive]); 84 | 85 | return { 86 | bindGraphEvents, 87 | }; 88 | } 89 | -------------------------------------------------------------------------------- /frontend/src/hooks/useGraphSize.ts: -------------------------------------------------------------------------------- 1 | import { useState, useEffect, useRef } from "react"; 2 | import { useReshape } from "./useReshape"; 3 | import { IGraphSize } from "../types/ProfileGraphDashboard"; 4 | 5 | 6 | export function useGraphSize() { 7 | const [graphSize, setGraphSize] = useState({ 8 | width: 0, 9 | height: window.innerHeight / 2, 10 | }); 11 | 12 | 13 | const profileRef = useRef(null); 14 | 15 | const { reshapeDOM } = useReshape(); 16 | 17 | const handleResize = () => { 18 | if (profileRef?.current) { 19 | setGraphSize({ 20 | width: profileRef.current.offsetWidth - 408, 21 | height: window.innerHeight, 22 | }); 23 | } 24 | }; 25 | 26 | useEffect(() => { 27 | handleResize(); 28 | reshapeDOM(() => { 29 | handleResize(); 30 | }); 31 | }, []); 32 | 33 | return { graphSize, profileRef, handleResize }; 34 | } 35 | -------------------------------------------------------------------------------- /frontend/src/hooks/useNodeSelection.ts: -------------------------------------------------------------------------------- 1 | import { useCallback } from "react"; 2 | import { calculateNodeOffsets, setNodeActiveState } from "../utills"; 3 | import { IGraph } from "@ant-design/charts"; 4 | import { IErrors, IOverview, IStatisticsDesc, Profile } from "../types/ProfileGraphDashboard"; 5 | 6 | export function useNodeSelection( 7 | graphRef: React.RefObject, 8 | plainData: Profile[], 9 | setSelectedNodeId: React.Dispatch>, 10 | setOverviewInfo: React.Dispatch>, 11 | ): { 12 | handleNodeSelection: (nodeId: string) => void; 13 | setOverInfo: (data: IOverview) => void; 14 | } { 15 | const centerNodeInView = useCallback((nodeId) => { 16 | if (!graphRef.current) return; 17 | const graph: IGraph = graphRef.current; 18 | const nodes = graph?.getNodes(); 19 | const node = nodes?.find(n => n?._cfg?.id === nodeId); 20 | if (node) { 21 | const { offsetX, offsetY } = calculateNodeOffsets(graph, node); 22 | setNodeActiveState(graph, node, true); 23 | graph?.moveTo(offsetX, offsetY); 24 | } 25 | }, [graphRef]); 26 | 27 | const setOverInfo = useCallback((data: IOverview) => { 28 | const { 29 | totalTime, 30 | totalTimePercent, 31 | cpuTime, 32 | waitTime, 33 | cpuTimePercent, 34 | waitTimePercent, 35 | labels, 36 | statisticsDescArray, 37 | errors, 38 | name, 39 | } = data; 40 | setOverviewInfo({ 41 | cpuTime, 42 | waitTime, 43 | totalTime, 44 | totalTimePercent, 45 | cpuTimePercent, 46 | waitTimePercent, 47 | labels, 48 | statisticsDescArray: statisticsDescArray as IStatisticsDesc[], 49 | errors: errors as unknown as IErrors[], 50 | name, 51 | }); 52 | }, [setOverviewInfo]); 53 | 54 | const handleNodeSelection = useCallback((nodeId: string) => { 55 | 56 | const selectedItem = plainData?.find(item => item.id === nodeId); 57 | setSelectedNodeId(nodeId); 58 | 59 | if (selectedItem) { 60 | setOverInfo(selectedItem as unknown as IOverview); 61 | centerNodeInView(selectedItem.id); 62 | } 63 | }, [plainData, setSelectedNodeId, setOverInfo, centerNodeInView]); 64 | 65 | return { handleNodeSelection, setOverInfo }; 66 | } 67 | -------------------------------------------------------------------------------- /frontend/src/hooks/useReshape.ts: -------------------------------------------------------------------------------- 1 | import { useEffect, useRef } from 'react'; 2 | 3 | /** 4 | * Custom hook to handle DOM reshaping events and register callbacks. 5 | * 6 | * @returns {Object} - An object containing the reshapeDOM function. 7 | */ 8 | export const useReshape = () => { 9 | const callbackRef = useRef<(() => void) | null>(null); 10 | 11 | /** 12 | * Registers a callback to be called when a DOM reshape event occurs. 13 | * @param {Function} callback - The function to be called on DOM reshape. 14 | */ 15 | const reshapeDOM = (callback: () => void) => { 16 | // Store the callback in a ref to keep a stable reference 17 | callbackRef.current = callback; 18 | }; 19 | 20 | useEffect(() => { 21 | // Handler to call the stored callback on window resize 22 | const handleResize = () => { 23 | if (callbackRef.current) { 24 | callbackRef.current(); 25 | } 26 | }; 27 | 28 | // Attach resize listener 29 | window.addEventListener('resize', handleResize); 30 | 31 | // Cleanup listener on unmount 32 | return () => { 33 | window.removeEventListener('resize', handleResize); 34 | }; 35 | }, []); 36 | 37 | return { 38 | reshapeDOM, 39 | }; 40 | }; 41 | -------------------------------------------------------------------------------- /frontend/src/images/icons/download.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /frontend/src/images/icons/full-screen.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /frontend/src/images/icons/zoom-in.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /frontend/src/images/icons/zoom-out.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /frontend/src/index.css: -------------------------------------------------------------------------------- 1 | @tailwind base; 2 | @tailwind components; 3 | @tailwind utilities; 4 | 5 | -------------------------------------------------------------------------------- /frontend/src/index.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import ReactDOM from 'react-dom'; 3 | import './index.css'; 4 | 5 | import ProfileGraphDashboard from './ProfileGraphDashboard'; 6 | 7 | ReactDOM.render(, document.getElementById('root')); 8 | -------------------------------------------------------------------------------- /frontend/src/react-app-env.d.ts: -------------------------------------------------------------------------------- 1 | /// 2 | -------------------------------------------------------------------------------- /frontend/src/setupTests.ts: -------------------------------------------------------------------------------- 1 | // jest-dom adds custom jest matchers for asserting on DOM nodes. 2 | // allows you to do things like: 3 | // expect(element).toHaveTextContent(/react/i) 4 | // learn more: https://github.com/testing-library/jest-dom 5 | import '@testing-library/jest-dom'; 6 | -------------------------------------------------------------------------------- /frontend/src/types/ProfileGraphDashboard.ts: -------------------------------------------------------------------------------- 1 | export interface NodeItem { 2 | name: string; 3 | title: string; 4 | progress: number; 5 | text: string; 6 | } 7 | 8 | export interface Node { 9 | id: string; 10 | value: { 11 | items: NodeItem[]; 12 | }; 13 | } 14 | 15 | export interface Edge { 16 | lineWidth: any; 17 | _value: undefined; 18 | source: string; 19 | target: string; 20 | } 21 | 22 | export interface GraphData { 23 | nodes: Node[]; 24 | edges: Edge[]; 25 | } 26 | 27 | export interface ProfileData { 28 | id: string; 29 | totalExecutionTime: number; 30 | cpuTimePercentage: number; 31 | ioTimePercentage: number; 32 | } 33 | 34 | export interface StatisticsItem { 35 | name: string; 36 | value: number | string; 37 | unit: string; 38 | } 39 | 40 | export interface StatisticsData { 41 | id: string; 42 | statistics: StatisticsItem[]; 43 | } 44 | 45 | export interface AttributeItem { 46 | name: string; 47 | value: string[]; 48 | } 49 | 50 | export interface AttributeData { 51 | id: string; 52 | labels: AttributeItem[]; 53 | } 54 | 55 | export interface StatisticsDesc { 56 | desc: string; 57 | display_name: string; 58 | index: number; 59 | unit: string; 60 | plain_statistics: boolean; 61 | } 62 | 63 | export interface Label { 64 | name: string; 65 | value: string[]; 66 | } 67 | 68 | export interface Metric { 69 | name: string; 70 | labels: Record; 71 | value: Record; 72 | } 73 | 74 | export interface Profile { 75 | waitTimePercent: string; 76 | cpuTimePercent: string; 77 | totalTimePercent: string; 78 | statisticsDescArray: { _type: string; desc: string; display_name: string | undefined; index: number; unit: ("NanoSeconds" | "Bytes" | "Rows" | "Count" | "MillisSeconds") | undefined; plain_statistics: boolean | undefined; _value: number; }[]; 79 | waitTime: number; 80 | cpuTime: number; 81 | totalTime: number; 82 | id?: string; 83 | name: string; 84 | parent_id: number | string | null; 85 | title: string; 86 | labels: Label[]; 87 | statistics: number[]; 88 | metrics?: Record; 89 | errors: string[]; 90 | } 91 | 92 | export interface MessageResponse { 93 | result: string; 94 | } 95 | 96 | export type TUnit = "NanoSeconds" | "MillisSeconds" | "Bytes" | "Rows" | "Count"; 97 | export interface IStatisticsDesc { 98 | _type: string; 99 | desc: string; 100 | index: number; 101 | _value: any; 102 | display_name: string; 103 | displayName?: string; 104 | plain_statistics?: boolean; 105 | unit?: TUnit; 106 | } 107 | 108 | export interface IErrors { 109 | backtrace: string; 110 | detail: string; 111 | message: string; 112 | _errorType: string; 113 | } 114 | 115 | export interface IOverview { 116 | cpuTime: number; 117 | waitTime: number; 118 | totalTime: number; 119 | isTotalBiggerZero?: boolean; 120 | totalTimePercent?: string; 121 | cpuTimePercent?: string; 122 | waitTimePercent?: string; 123 | id?: string; 124 | labels?: { name: string; value: any[] }[]; 125 | statisticsDescArray?: IStatisticsDesc[]; 126 | errors?: IErrors[]; 127 | name?: string; 128 | } 129 | 130 | export interface IGraphSize { 131 | width: number; 132 | height: number; 133 | } 134 | -------------------------------------------------------------------------------- /frontend/src/utills/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./graph"; 2 | 3 | /** 4 | * Formats a given percentage value. 5 | * @param {number} numerator - The numerator of the percentage. 6 | * @param {number} denominator - The denominator of the percentage. 7 | * @returns {string} - The formatted percentage string. 8 | */ 9 | export function getPercent(numerator: number, denominator: number): string { 10 | if (denominator === 0) { 11 | return "0%"; 12 | } 13 | const percent = (numerator / denominator) * 100; 14 | return `${percent.toFixed(1)}%`; 15 | } 16 | 17 | 18 | /** 19 | * Transforms the errors array by extracting the error type and merging it with the error details. 20 | * @param {any[]} errors - The array of errors to be transformed. 21 | * @returns {any[]} - The transformed array of errors. 22 | */ 23 | export function transformErrors(errors) { 24 | return errors.map(error => { 25 | const type = Object.keys(error)[0]; 26 | return { _errorType: type, ...error[type] }; 27 | }); 28 | } -------------------------------------------------------------------------------- /frontend/tailwind.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('tailwindcss').Config} */ 2 | module.exports = { 3 | content: ['./src/**/*.{js,jsx,ts,tsx}', './public/index.html'], 4 | theme: { 5 | extend: {}, 6 | }, 7 | plugins: [], 8 | } 9 | 10 | -------------------------------------------------------------------------------- /frontend/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es5", 4 | "lib": [ 5 | "dom", 6 | "dom.iterable", 7 | "esnext" 8 | ], 9 | "noImplicitAny": false, 10 | "allowJs": true, 11 | "skipLibCheck": true, 12 | "esModuleInterop": true, 13 | "allowSyntheticDefaultImports": true, 14 | "strict": true, 15 | "forceConsistentCasingInFileNames": true, 16 | "noFallthroughCasesInSwitch": true, 17 | "module": "esnext", 18 | "moduleResolution": "node", 19 | "resolveJsonModule": true, 20 | "isolatedModules": true, 21 | "noEmit": true, 22 | "jsx": "react-jsx" 23 | }, 24 | "include": [ 25 | "src" 26 | ] 27 | } 28 | -------------------------------------------------------------------------------- /licenserc.toml: -------------------------------------------------------------------------------- 1 | baseDir = "." 2 | 3 | headerPath = "Apache-2.0.txt" 4 | 5 | excludes = [ 6 | # hidden files 7 | ".cargo", 8 | ".github", 9 | ".dockerignore", 10 | ".gitignore", 11 | ".gitattributes", 12 | ".editorconfig", 13 | ".npmignore", 14 | ".prettierignore", 15 | 16 | "LICENSE", 17 | "Dockerfile", 18 | "Makefile", 19 | 20 | # docs and generated files 21 | "**/*.md", 22 | "**/*.hbs", 23 | "**/*.template", 24 | "**/*.cue", 25 | "**/*.json", 26 | "**/*.sql", 27 | "**/*.proto", 28 | "**/*.yml", 29 | "**/*.yaml", 30 | "**/*.toml", 31 | "**/*.lock", 32 | "**/*.yapf", 33 | "**/*.test", 34 | "**/*.txt", 35 | 36 | # test files 37 | "core/tests/**", 38 | "driver/tests/**", 39 | "cli/tests/**", 40 | 41 | # frontend files 42 | "frontend/**", 43 | "cli/frontend", 44 | ] 45 | 46 | [properties] 47 | inceptionYear = 2021 48 | copyrightOwner = "Datafuse Labs" 49 | -------------------------------------------------------------------------------- /macros/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "databend-driver-macros" 3 | description = "Macros for Databend Driver" 4 | categories = ["database"] 5 | keywords = ["databend", "database", "macros"] 6 | 7 | version = { workspace = true } 8 | edition = { workspace = true } 9 | license = { workspace = true } 10 | authors = { workspace = true } 11 | repository = { workspace = true } 12 | 13 | [lib] 14 | proc-macro = true 15 | 16 | [dependencies] 17 | quote = "1.0" 18 | syn = "2.0" 19 | -------------------------------------------------------------------------------- /macros/src/from_row.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use proc_macro::TokenStream; 16 | use quote::{quote, quote_spanned}; 17 | use syn::{spanned::Spanned, DeriveInput}; 18 | 19 | /// #[derive(TryFromRow)] derives TryFromRow for struct 20 | pub fn from_row_derive(tokens_input: TokenStream) -> TokenStream { 21 | let item = syn::parse::(tokens_input).expect("No DeriveInput"); 22 | let struct_fields = crate::parser::parse_named_fields(&item, "TryFromRow"); 23 | 24 | let struct_name = &item.ident; 25 | let (impl_generics, ty_generics, where_clause) = item.generics.split_for_impl(); 26 | 27 | let path = quote!(databend_driver::_macro_internal); 28 | 29 | let set_fields_code = struct_fields.named.iter().map(|field| { 30 | let field_name = &field.ident; 31 | let field_type = &field.ty; 32 | 33 | quote_spanned! {field.span() => 34 | #field_name: { 35 | let (col_ix, col_value) = vals_iter 36 | .next() 37 | .unwrap(); // vals_iter size is checked before this code is reached, so 38 | // it is safe to unwrap 39 | let t = col_value.get_type(); 40 | <#field_type>::try_from(col_value) 41 | .map_err(|_| format!("failed converting column {} from type({:?}) to type({})", col_ix, t, std::any::type_name::<#field_type>()))? 42 | }, 43 | } 44 | }); 45 | 46 | let fields_count = struct_fields.named.len(); 47 | let generated = quote! { 48 | impl #impl_generics TryFrom<#path::Row> for #struct_name #ty_generics #where_clause { 49 | type Error = String; 50 | fn try_from(row: #path::Row) -> #path::Result { 51 | if #fields_count != row.len() { 52 | return Err(format!("row size mismatch: expected {} columns, got {}", #fields_count, row.len())); 53 | } 54 | let mut vals_iter = row.into_iter().enumerate(); 55 | Ok(#struct_name { 56 | #(#set_fields_code)* 57 | }) 58 | } 59 | } 60 | }; 61 | 62 | TokenStream::from(generated) 63 | } 64 | -------------------------------------------------------------------------------- /macros/src/lib.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use proc_macro::TokenStream; 16 | 17 | mod from_row; 18 | mod parser; 19 | 20 | #[proc_macro_derive(TryFromRow)] 21 | pub fn from_row_derive(tokens_input: TokenStream) -> TokenStream { 22 | from_row::from_row_derive(tokens_input) 23 | } 24 | -------------------------------------------------------------------------------- /macros/src/parser.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use syn::{Data, DeriveInput, Fields, FieldsNamed}; 16 | 17 | /// Parses the tokens_input to a DeriveInput and returns the struct name from which it derives and 18 | /// the named fields 19 | pub(crate) fn parse_named_fields<'a>( 20 | input: &'a DeriveInput, 21 | current_derive: &str, 22 | ) -> &'a FieldsNamed { 23 | match &input.data { 24 | Data::Struct(data) => match &data.fields { 25 | Fields::Named(named_fields) => named_fields, 26 | _ => panic!( 27 | "derive({}) works only for structs with named fields. Tuples don't need derive.", 28 | current_derive 29 | ), 30 | }, 31 | _ => panic!("derive({}) works only on structs!", current_derive), 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /nfpm.yaml: -------------------------------------------------------------------------------- 1 | name: "bendsql" 2 | arch: "" 3 | platform: "linux" 4 | version: "" 5 | section: "database" 6 | priority: "extra" 7 | maintainer: "Databend Authors " 8 | description: | 9 | BendSQL is a native command line tool for Databend. 10 | vendor: "Datafuse Labs" 11 | homepage: "https://github.com/databendlabs/bendsql" 12 | license: "Apache-2.0" 13 | contents: 14 | - src: dist/bendsql 15 | dst: /usr/bin/bendsql 16 | -------------------------------------------------------------------------------- /rust-toolchain.toml: -------------------------------------------------------------------------------- 1 | [toolchain] 2 | channel = "stable" 3 | components = ["rustfmt", "clippy"] 4 | -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | edition = "2021" 2 | newline_style = "unix" 3 | reorder_imports = true 4 | # imports_granularity = "Item" 5 | # group_imports = "StdExternalCrate" 6 | # where_single_line = true 7 | # trailing_comma = "Vertical" 8 | # overflow_delimited_expr = true 9 | -------------------------------------------------------------------------------- /scripts/bump.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2021 Datafuse Labs 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | set -e 18 | 19 | VERSION=$1 20 | 21 | if [ -z "$VERSION" ]; then 22 | echo "Usage: bump.sh " 23 | exit 1 24 | fi 25 | 26 | echo "Bumping version to $VERSION" 27 | 28 | git status 29 | git checkout main 30 | git fetch upstream 31 | git rebase upstream/main 32 | git checkout -b "bump-$VERSION" 33 | 34 | if [[ "$OSTYPE" == "darwin"* ]]; then 35 | sed -ri '' "s/^version = \".*\"/version = \"$VERSION\"/g" Cargo.toml 36 | sed -ri '' "s/^databend-(.*)version = \".*\"/databend-\1version = \"$VERSION\"/g" Cargo.toml 37 | sed -i '' "s/\"version\": \".*\"/\"version\": \"$VERSION\"/g" bindings/nodejs/package.json 38 | sed -i '' "s/\"version\": \".*\"/\"version\": \"$VERSION\"/g" bindings/nodejs/npm/*/package.json 39 | else 40 | sed -ri "s/^version = \".*\"/version = \"$VERSION\"/g" Cargo.toml 41 | sed -ri "s/^databend-(.*)version = \".*\"/databend-\\1version = \"$VERSION\"/g" Cargo.toml 42 | sed -i "s/\"version\": \".*\"/\"version\": \"$VERSION\"/g" bindings/nodejs/package.json 43 | sed -i "s/\"version\": \".*\"/\"version\": \"$VERSION\"/g" bindings/nodejs/npm/*/package.json 44 | fi 45 | 46 | git status 47 | git add Cargo.toml bindings/nodejs/package.json bindings/nodejs/npm/*/package.json 48 | git commit -m "chore: bump version to $VERSION" 49 | -------------------------------------------------------------------------------- /sql/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "databend-driver-core" 3 | description = "Core components for Databend Driver" 4 | categories = ["database"] 5 | keywords = ["databend", "database", "sdk"] 6 | 7 | version = { workspace = true } 8 | edition = { workspace = true } 9 | license = { workspace = true } 10 | authors = { workspace = true } 11 | repository = { workspace = true } 12 | 13 | [features] 14 | flight-sql = ["dep:arrow", "dep:arrow-array", "dep:arrow-schema", "dep:tonic", "dep:jsonb"] 15 | 16 | [dependencies] 17 | arrow = { workspace = true, optional = true } 18 | arrow-array = { workspace = true, optional = true } 19 | arrow-buffer = { workspace = true } 20 | arrow-schema = { workspace = true, optional = true } 21 | chrono = { workspace = true } 22 | databend-client = { workspace = true } 23 | jsonb = { workspace = true, optional = true } 24 | tokio-stream = { workspace = true } 25 | tonic = { workspace = true, optional = true } 26 | 27 | geozero = { version = "0.14.0", features = ["with-wkb"] } 28 | glob = "0.3" 29 | hex = "0.4.3" 30 | itertools = "0.14" 31 | lexical-core = "1.0.5" 32 | memchr = "2.7" 33 | roaring = { version = "0.10.12", features = ["serde"] } 34 | serde = { version = "1.0", default-features = false, features = ["derive"] } 35 | serde_json = { version = "1.0", default-features = false, features = ["std"] } 36 | url = { version = "2.5", default-features = false } 37 | -------------------------------------------------------------------------------- /sql/src/cursor_ext/cursor_checkpoint_ext.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use std::io::Cursor; 16 | 17 | pub trait ReadCheckPointExt { 18 | fn checkpoint(&self) -> u64; 19 | fn rollback(&mut self, checkpoint: u64); 20 | } 21 | 22 | impl ReadCheckPointExt for Cursor 23 | where 24 | T: AsRef<[u8]>, 25 | { 26 | fn checkpoint(&self) -> u64 { 27 | self.position() 28 | } 29 | 30 | fn rollback(&mut self, checkpoint: u64) { 31 | self.set_position(checkpoint) 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /sql/src/cursor_ext/cursor_read_string_ext.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use std::io::BufRead; 16 | use std::io::Cursor; 17 | use std::io::ErrorKind; 18 | use std::io::Result; 19 | 20 | use crate::cursor_ext::cursor_read_bytes_ext::ReadBytesExt; 21 | 22 | pub trait BufferReadStringExt { 23 | fn read_quoted_text(&mut self, buf: &mut Vec, quota: u8) -> Result<()>; 24 | } 25 | 26 | impl BufferReadStringExt for Cursor 27 | where 28 | T: AsRef<[u8]>, 29 | { 30 | fn read_quoted_text(&mut self, buf: &mut Vec, quote: u8) -> Result<()> { 31 | self.must_ignore_byte(quote)?; 32 | 33 | loop { 34 | self.keep_read(buf, |b| b != quote && b != b'\\'); 35 | if self.ignore_byte(quote) { 36 | if self.peek_byte() == Some(quote) { 37 | buf.push(quote); 38 | self.consume(1); 39 | } else { 40 | return Ok(()); 41 | } 42 | } else if self.ignore_byte(b'\\') { 43 | let b = self.fill_buf()?; 44 | if b.is_empty() { 45 | return Err(std::io::Error::new( 46 | ErrorKind::InvalidData, 47 | "Expected to have terminated string literal after escaped char '\' ." 48 | .to_string(), 49 | )); 50 | } 51 | let c = b[0]; 52 | self.ignore_byte(c); 53 | 54 | match c { 55 | b'n' => buf.push(b'\n'), 56 | b't' => buf.push(b'\t'), 57 | b'r' => buf.push(b'\r'), 58 | b'0' => buf.push(b'\0'), 59 | b'\'' => buf.push(b'\''), 60 | b'\\' => buf.push(b'\\'), 61 | _ => { 62 | buf.push(b'\\'); 63 | buf.push(c); 64 | } 65 | } 66 | } else { 67 | break; 68 | } 69 | } 70 | Err(std::io::Error::new( 71 | ErrorKind::InvalidData, 72 | format!( 73 | "Expected to have terminated string literal after quota {:?}, while consumed buf: {:?}", 74 | quote as char, buf 75 | ), 76 | )) 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /sql/src/cursor_ext/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | mod cursor_checkpoint_ext; 16 | mod cursor_read_bytes_ext; 17 | mod cursor_read_number_ext; 18 | mod cursor_read_string_ext; 19 | 20 | pub use cursor_checkpoint_ext::ReadCheckPointExt; 21 | pub use cursor_read_bytes_ext::ReadBytesExt; 22 | pub use cursor_read_number_ext::collect_binary_number; 23 | pub use cursor_read_number_ext::collect_number; 24 | pub use cursor_read_number_ext::ReadNumberExt; 25 | pub use cursor_read_string_ext::BufferReadStringExt; 26 | -------------------------------------------------------------------------------- /sql/src/lib.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | mod cursor_ext; 16 | pub mod error; 17 | pub mod raw_rows; 18 | pub mod rows; 19 | pub mod schema; 20 | pub mod value; 21 | 22 | #[doc(hidden)] 23 | pub mod _macro_internal { 24 | pub use crate::error::{Error, Result}; 25 | pub use crate::rows::{Row, RowIterator}; 26 | pub use crate::schema::Schema; 27 | pub use crate::value::Value; 28 | } 29 | -------------------------------------------------------------------------------- /sql/src/raw_rows.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use std::pin::Pin; 16 | use std::task::Context; 17 | use std::task::Poll; 18 | 19 | use tokio_stream::{Stream, StreamExt}; 20 | 21 | use crate::error::Error; 22 | use crate::error::Result; 23 | use crate::rows::Row; 24 | use crate::rows::ServerStats; 25 | use crate::schema::SchemaRef; 26 | use crate::value::Value; 27 | 28 | #[derive(Clone, Debug)] 29 | pub enum RawRowWithStats { 30 | Row(RawRow), 31 | Stats(ServerStats), 32 | } 33 | 34 | #[derive(Clone, Debug, Default)] 35 | pub struct RawRow { 36 | pub row: Row, 37 | pub raw_row: Vec>, 38 | } 39 | 40 | impl RawRow { 41 | pub fn new(row: Row, raw_row: Vec>) -> Self { 42 | Self { row, raw_row } 43 | } 44 | 45 | pub fn len(&self) -> usize { 46 | self.raw_row.len() 47 | } 48 | 49 | pub fn is_empty(&self) -> bool { 50 | self.raw_row.is_empty() 51 | } 52 | 53 | pub fn values(&self) -> &[Option] { 54 | &self.raw_row 55 | } 56 | 57 | pub fn schema(&self) -> SchemaRef { 58 | self.row.schema() 59 | } 60 | } 61 | 62 | impl TryFrom<(SchemaRef, Vec>)> for RawRow { 63 | type Error = Error; 64 | 65 | fn try_from((schema, data): (SchemaRef, Vec>)) -> Result { 66 | let mut values: Vec = Vec::with_capacity(data.len()); 67 | for (field, val) in schema.fields().iter().zip(data.clone().into_iter()) { 68 | values.push(Value::try_from((&field.data_type, val))?); 69 | } 70 | 71 | let row = Row::new(schema, values); 72 | Ok(RawRow::new(row, data)) 73 | } 74 | } 75 | 76 | impl IntoIterator for RawRow { 77 | type Item = Option; 78 | type IntoIter = std::vec::IntoIter; 79 | 80 | fn into_iter(self) -> Self::IntoIter { 81 | self.raw_row.into_iter() 82 | } 83 | } 84 | 85 | #[derive(Clone, Debug)] 86 | pub struct RawRows { 87 | rows: Vec, 88 | } 89 | 90 | impl RawRows { 91 | pub fn new(rows: Vec) -> Self { 92 | Self { rows } 93 | } 94 | 95 | pub fn rows(&self) -> &[RawRow] { 96 | &self.rows 97 | } 98 | 99 | pub fn len(&self) -> usize { 100 | self.rows.len() 101 | } 102 | 103 | pub fn is_empty(&self) -> bool { 104 | self.rows.is_empty() 105 | } 106 | } 107 | 108 | impl IntoIterator for RawRows { 109 | type Item = RawRow; 110 | type IntoIter = std::vec::IntoIter; 111 | 112 | fn into_iter(self) -> Self::IntoIter { 113 | self.rows.into_iter() 114 | } 115 | } 116 | 117 | pub struct RawRowIterator { 118 | schema: SchemaRef, 119 | it: Pin> + Send>>, 120 | } 121 | 122 | impl RawRowIterator { 123 | pub fn new( 124 | schema: SchemaRef, 125 | it: Pin> + Send>>, 126 | ) -> Self { 127 | let it = it.filter_map(|r| match r { 128 | Ok(RawRowWithStats::Row(r)) => Some(Ok(r)), 129 | Ok(_) => None, 130 | Err(err) => Some(Err(err)), 131 | }); 132 | Self { 133 | schema, 134 | it: Box::pin(it), 135 | } 136 | } 137 | 138 | pub fn schema(&self) -> SchemaRef { 139 | self.schema.clone() 140 | } 141 | } 142 | 143 | impl Stream for RawRowIterator { 144 | type Item = Result; 145 | 146 | fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { 147 | Pin::new(&mut self.it).poll_next(cx) 148 | } 149 | } 150 | -------------------------------------------------------------------------------- /taplo.toml: -------------------------------------------------------------------------------- 1 | include = ["**/Cargo.toml"] 2 | 3 | [formatting] 4 | # Align consecutive entries vertically. 5 | align_entries = false 6 | # Append trailing commas for multi-line arrays. 7 | array_trailing_comma = true 8 | # Expand arrays to multiple lines that exceed the maximum column width. 9 | array_auto_expand = true 10 | # Collapse arrays that don't exceed the maximum column width and don't contain comments. 11 | array_auto_collapse = false 12 | # Omit white space padding from single-line arrays 13 | compact_arrays = true 14 | # Omit white space padding from the start and end of inline tables. 15 | compact_inline_tables = false 16 | # Maximum column width in characters, affects array expansion and collapse, this doesn't take whitespace into account. 17 | # Note that this is not set in stone, and works on a best-effort basis. 18 | column_width = 120 19 | # Indent based on tables and arrays of tables and their subtables, subtables out of order are not indented. 20 | indent_tables = false 21 | # The substring that is used for indentation, should be tabs or spaces (but technically can be anything). 22 | indent_string = ' ' 23 | # Add trailing newline at the end of the file if not present. 24 | trailing_newline = true 25 | # Alphabetically reorder keys that are not separated by empty lines. 26 | reorder_keys = false 27 | # Maximum amount of allowed consecutive blank lines. This does not affect the whitespace at the end of the document, as it is always stripped. 28 | allowed_blank_lines = 1 29 | # Use CRLF for line endings. 30 | crlf = false 31 | 32 | [[rule]] 33 | keys = ["dependencies", "dev-dependencies", "build-dependencies"] 34 | formatting = { reorder_keys = true } 35 | -------------------------------------------------------------------------------- /tests/Makefile: -------------------------------------------------------------------------------- 1 | default: run 2 | 3 | run: test-core test-driver test-bendsql down 4 | 5 | prepare: 6 | mkdir -p data/databend 7 | 8 | up: prepare 9 | docker compose up --scale query=3 --quiet-pull -d --wait 10 | grep -q '127.0.0.1 minio' /etc/hosts || echo '127.0.0.1 minio' | sudo tee -a /etc/hosts > /dev/null 11 | curl -u root: -XPOST "http://localhost:8000/v1/query" -H 'Content-Type: application/json' -d '{"sql": "select version()", "pagination": { "wait_time_secs": 10}}' 12 | 13 | start: up 14 | 15 | test-core: up 16 | cargo test --test core 17 | 18 | test-driver: up 19 | cargo test --test driver 20 | TEST_DATABEND_DSN=databend+flight://root:@localhost:8900/default?sslmode=disable cargo test --features flight-sql --test driver 21 | 22 | test-bendsql: up 23 | cd .. && ./cli/test.sh http 24 | cd .. && ./cli/test.sh flight 25 | 26 | test-bindings-python: up 27 | cd ../bindings/python && behave tests/asyncio 28 | cd ../bindings/python && behave tests/blocking 29 | cd ../bindings/python && behave tests/cursor 30 | 31 | test-bindings-nodejs: up 32 | cd ../bindings/nodejs && pnpm run test 33 | 34 | down: 35 | docker compose down 36 | 37 | stop: down 38 | -------------------------------------------------------------------------------- /tests/config/databend-meta-node-1.toml: -------------------------------------------------------------------------------- 1 | # Usage: 2 | # databend-meta -c databend-meta-node-1.toml 3 | 4 | log_dir = "./.databend/logs1" 5 | admin_api_address = "0.0.0.0:28101" 6 | grpc_api_address = "0.0.0.0:9191" 7 | # databend-query fetch this address to update its databend-meta endpoints list, 8 | # in case databend-meta cluster changes. 9 | grpc_api_advertise_host = "meta" 10 | 11 | [raft_config] 12 | id = 1 13 | raft_dir = "./.databend/meta1" 14 | raft_api_port = 28103 15 | 16 | # Assign raft_{listen|advertise}_host in test config. 17 | # This allows you to catch a bug in unit tests when something goes wrong in raft meta nodes communication. 18 | raft_listen_host = "0.0.0.0" 19 | raft_advertise_host = "meta" 20 | 21 | # Start up mode: single node cluster 22 | single = true 23 | -------------------------------------------------------------------------------- /tests/config/databend-query-node-1.toml: -------------------------------------------------------------------------------- 1 | # Usage: 2 | # databend-query -c databend_query_config_spec.toml 3 | 4 | [query] 5 | max_active_sessions = 256 6 | shutdown_wait_timeout_ms = 5000 7 | 8 | # For flight rpc. 9 | flight_api_address = "0.0.0.0:9091" 10 | 11 | # Databend Query http address. 12 | # For admin RESET API. 13 | admin_api_address = "0.0.0.0:8080" 14 | 15 | # Databend Query metrics RESET API. 16 | metric_api_address = "0.0.0.0:7070" 17 | 18 | # Databend Query MySQL Handler. 19 | mysql_handler_host = "0.0.0.0" 20 | mysql_handler_port = 3307 21 | 22 | # Databend Query ClickHouse Handler. 23 | clickhouse_http_handler_host = "0.0.0.0" 24 | clickhouse_http_handler_port = 8124 25 | 26 | # Databend Query HTTP Handler. 27 | http_handler_host = "0.0.0.0" 28 | http_handler_port = 8000 29 | 30 | # Databend Query FlightSQL Handler. 31 | flight_sql_handler_host = "0.0.0.0" 32 | flight_sql_handler_port = 8900 33 | 34 | tenant_id = "test_tenant" 35 | cluster_id = "test_cluster" 36 | 37 | table_engine_memory_enabled = true 38 | default_storage_format = 'parquet' 39 | default_compression = 'zstd' 40 | 41 | enable_udf_server = true 42 | udf_server_allow_list = ['http://0.0.0.0:8815'] 43 | udf_server_allow_insecure = true 44 | 45 | cloud_control_grpc_server_address = "http://0.0.0.0:50051" 46 | 47 | [[query.users]] 48 | name = "root" 49 | auth_type = "no_password" 50 | 51 | [[query.users]] 52 | name = "databend" 53 | auth_type = "double_sha1_password" 54 | # echo -n "databend" | sha1sum | cut -d' ' -f1 | xxd -r -p | sha1sum 55 | auth_string = "3081f32caef285c232d066033c89a78d88a6d8a5" 56 | 57 | # This for test 58 | [[query.udfs]] 59 | name = "ping" 60 | definition = "CREATE FUNCTION ping(STRING) RETURNS STRING LANGUAGE python HANDLER = 'ping' ADDRESS = 'http://0.0.0.0:8815'" 61 | 62 | [query.settings] 63 | aggregate_spilling_memory_ratio = 60 64 | join_spilling_memory_ratio = 60 65 | 66 | [log] 67 | level = "INFO" 68 | 69 | [log.file] 70 | format = "text" 71 | dir = "./.databend/logs_1" 72 | 73 | [meta] 74 | # It is a list of `grpc_api_advertise_host:` of databend-meta config 75 | endpoints = ["meta:9191"] 76 | username = "root" 77 | password = "root" 78 | client_timeout_in_second = 60 79 | auto_sync_interval = 60 80 | 81 | # Storage config. 82 | [storage] 83 | type = "s3" 84 | 85 | [storage.s3] 86 | bucket = "databend" 87 | endpoint_url = "http://minio:9000" 88 | #endpoint_url = "http://localhost:9000" 89 | access_key_id = "minioadmin" 90 | secret_access_key = "minioadmin" 91 | enable_virtual_host_style = false 92 | 93 | # Cache config. 94 | [cache] 95 | data_cache_storage = "none" 96 | 97 | [cache.disk] 98 | # cache path 99 | path = "./.databend/_cache" 100 | # max bytes of cached data 20G 101 | max_bytes = 21474836480 102 | 103 | [spill] 104 | spill_local_disk_path = "./.databend/temp/_query_spill" 105 | -------------------------------------------------------------------------------- /tests/config/nginx.conf: -------------------------------------------------------------------------------- 1 | events { 2 | worker_connections 1024; 3 | } 4 | 5 | http { 6 | upstream backend { 7 | server query:8000; 8 | } 9 | 10 | server { 11 | listen 8000; 12 | 13 | location / { 14 | proxy_pass http://backend; 15 | } 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /tests/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | minio: 3 | image: docker.io/minio/minio 4 | command: server /data 5 | ports: 6 | - "9000:9000" 7 | volumes: 8 | - ./data:/data 9 | meta: 10 | image: docker.io/datafuselabs/databend-meta:nightly 11 | volumes: 12 | - ./config/databend-meta-node-1.toml:/conf.toml:ro 13 | command: -c /conf.toml 14 | ports: 15 | - "28101:28101" 16 | healthcheck: 17 | test: "/databend-metactl status || exit 1" 18 | interval: 2s 19 | retries: 10 20 | start_period: 2s 21 | timeout: 1s 22 | query: 23 | image: docker.io/datafuselabs/databend-query:nightly 24 | volumes: 25 | - ./config/databend-query-node-1.toml:/conf.toml:ro 26 | command: -c /conf.toml 27 | environment: 28 | - QUERY_DATABEND_ENTERPRISE_LICENSE 29 | ports: 30 | - "8900-8902:8900" # flight sql handler 31 | # - "8001-8003:8000" # http handler 32 | depends_on: 33 | minio: 34 | condition: service_started 35 | meta: 36 | condition: service_healthy 37 | healthcheck: 38 | test: "curl -f localhost:8080/v1/health || exit 1" 39 | interval: 2s 40 | retries: 10 41 | start_period: 2s 42 | timeout: 1s 43 | lb: 44 | image: docker.io/nginx 45 | volumes: 46 | - ./config/nginx.conf:/etc/nginx/nginx.conf:ro 47 | ports: 48 | - "8000:8000" 49 | depends_on: 50 | query: 51 | condition: service_healthy 52 | -------------------------------------------------------------------------------- /ttc/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "databend-ttc" 3 | description = "Databend Rust Tcp Test Container" 4 | categories = ["database"] 5 | keywords = ["databend", "database", "test", "cttttontainer"] 6 | 7 | version = { workspace = true } 8 | edition = { workspace = true } 9 | license = { workspace = true } 10 | authors = { workspace = true } 11 | repository = { workspace = true } 12 | 13 | [dependencies] 14 | databend-driver = { workspace = true } 15 | 16 | bytes = "1" 17 | clap = { version = "4.4", features = ["derive", "env"] } 18 | serde = { version = "1.0", features = ["derive"] } 19 | serde_json = { version = "1.0", default-features = false, features = ["std"] } 20 | tokio = { version = "1.34", features = [ 21 | "macros", 22 | "rt", 23 | "rt-multi-thread", 24 | "sync", 25 | "parking_lot", 26 | "full", 27 | ] } 28 | 29 | [[bin]] 30 | name = "ttc-server" 31 | path = "src/server.rs" 32 | doctest = false 33 | test = false 34 | 35 | [[bin]] 36 | name = "ttc-client" 37 | path = "src/client.rs" 38 | doctest = false 39 | test = false 40 | -------------------------------------------------------------------------------- /ttc/Dockerfile: -------------------------------------------------------------------------------- 1 | # Dockerfile 2 | FROM rust:1.83.0-bullseye as builder 3 | 4 | # Set the current working directory inside the container 5 | WORKDIR /usr/src 6 | 7 | # Copy the source code into the container 8 | COPY . . 9 | 10 | # Build the application 11 | RUN cargo build --bin ttc-server --package databend-ttc --release 12 | 13 | FROM debian:bullseye-slim 14 | COPY --from=builder /usr/src/target/release/ttc-server /usr/local/bin/ttc-server 15 | 16 | # Set the startup command 17 | # docker run --net host datafuselabs/ttc-rust -P 9092 --databend_dsn databend://default:@127.0.0.1:8000 18 | CMD ["ttc-server"] 19 | -------------------------------------------------------------------------------- /ttc/README.md: -------------------------------------------------------------------------------- 1 | ## Rust Tcp Test Container 2 | -------------------------------------------------------------------------------- /ttc/src/client.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Datafuse Labs 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use std::io::Write; 16 | 17 | use tokio::io::{AsyncReadExt, AsyncWriteExt}; 18 | use tokio::net::TcpStream; 19 | 20 | #[tokio::main] 21 | async fn main() -> Result<(), Box> { 22 | // Connect to the server 23 | let mut stream = TcpStream::connect("127.0.0.1:9902").await?; 24 | 25 | loop { 26 | // Prepare a sql 27 | let mut sql = String::new(); 28 | print!("> "); 29 | std::io::stdout().flush().unwrap(); // Make sure the prompt is immediately displayed 30 | std::io::stdin().read_line(&mut sql).unwrap(); 31 | 32 | // If the sql is "exit", break the loop 33 | if sql.trim() == "exit" || sql.trim() == "quit" { 34 | break; 35 | } 36 | 37 | let len = sql.len() as u32; 38 | let len_bytes = len.to_be_bytes(); 39 | 40 | // Create a buffer with the length of the sql and the sql itself 41 | let mut buffer = Vec::with_capacity(4 + sql.len()); 42 | buffer.extend_from_slice(&len_bytes); 43 | buffer.extend_from_slice(sql.as_bytes()); 44 | 45 | // Send the sql 46 | stream.write_all(&buffer).await?; 47 | 48 | let mut len_bytes = [0; 4]; 49 | stream.read_exact(&mut len_bytes).await?; 50 | let len = u32::from_be_bytes(len_bytes) as usize; 51 | 52 | // Read the response 53 | let mut response = vec![0; len]; 54 | stream.read_exact(&mut response).await?; 55 | 56 | let response: Response = serde_json::from_reader(response.as_slice()).unwrap(); 57 | // Print the response 58 | println!("response: {:?}", response); 59 | } 60 | 61 | Ok(()) 62 | } 63 | 64 | #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] 65 | struct Response { 66 | values: Vec>>, 67 | error: Option, 68 | } 69 | --------------------------------------------------------------------------------