├── .devcontainer ├── Dockerfile └── devcontainer.json ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md └── workflows │ ├── build_metabase_duckdb_driver.yaml │ └── build_metabase_duckdb_driver_musllinux.yaml ├── .gitignore ├── .gitmodules ├── LICENSE.txt ├── README.md ├── ci ├── metabase_drivers_deps.patch └── metabase_test_deps.patch ├── deps.edn ├── resources └── metabase-plugin.yaml ├── src └── metabase │ └── driver │ ├── duckdb.clj │ └── motherduck.clj └── test └── metabase └── test └── data ├── duckdb.clj └── motherduck.clj /.devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | # [Choice] Java version (use -bullseye variants on local arm64/Apple Silicon): 11, 17, 11-bullseye, 17-bullseye, 11-buster, 17-buster 2 | ARG VARIANT=17 3 | FROM mcr.microsoft.com/vscode/devcontainers/java:0-${VARIANT} 4 | 5 | # [Optional] Clojure version 6 | ARG CLOJURE_VERSION=1.10.3 7 | 8 | # [Optional] Clojure tools version 9 | ARG CLOJURE_CLI_VERSION=1.10.3.1075 10 | 11 | # [Optional] Leiningen version 12 | ARG LEININGEN_VERSION="stable" 13 | 14 | # [Optional] POLYLITH version 15 | ARG POLYLITH_VERSION="0.2.13-alpha" 16 | 17 | # [Optional] Boot version 18 | ENV BOOT_VERSION=2.8.3 19 | 20 | # [Optional] Clojure version used by Boot 21 | ENV BOOT_CLOJURE_VERSION=${CLOJURE_VERSION} 22 | 23 | # [Option] Install Clojure CLI tool 24 | ARG INSTALL_CLOJURE_CLI="true" 25 | 26 | # [Option] Install Boot 27 | ARG INSTALL_BOOT="true" 28 | 29 | # [Option] Install Leiningen 30 | ARG INSTALL_LEININGEN="true" 31 | 32 | # [Option] Install Polylith 33 | ARG INSTALL_POLYLITH="true" 34 | 35 | RUN if [ "${INSTALL_CLOJURE_CLI}" = "true" ]; then \ 36 | apt-get update \ 37 | && apt-get -y install rlwrap \ 38 | && curl -OL "https://download.clojure.org/install/linux-install-${CLOJURE_CLI_VERSION}.sh" \ 39 | && chmod +x linux-install-${CLOJURE_CLI_VERSION}.sh \ 40 | && /linux-install-${CLOJURE_CLI_VERSION}.sh \ 41 | && rm /linux-install-${CLOJURE_CLI_VERSION}.sh \ 42 | && su vscode -c "clj --version"; fi 43 | 44 | RUN if [ "${INSTALL_BOOT}" = "true" ]; then \ 45 | curl -OL "https://github.com/boot-clj/boot-bin/releases/download/latest/boot.sh" \ 46 | && chmod +x boot.sh \ 47 | && mv boot.sh /usr/local/sbin/boot \ 48 | && su vscode -c "boot -u"; fi 49 | 50 | RUN if [ "${INSTALL_LEININGEN}" = "true" ]; then \ 51 | curl -OL "https://raw.githubusercontent.com/technomancy/leiningen/${LEININGEN_VERSION}/bin/lein" \ 52 | && chmod +x lein \ 53 | && mv lein /usr/local/sbin; fi 54 | 55 | # Cache Clojure and dependencies 56 | RUN if [ "${INSTALL_LEININGEN}" = "true" ]; then \ 57 | su vscode -c " cd ~ \ 58 | && echo '(defproject dummy \"\" :dependencies [[org.clojure/clojure \"'${CLOJURE_VERSION}'\"]])' > project.clj \ 59 | && lein deps \ 60 | && rm project.clj"; fi 61 | 62 | RUN if [ "${INSTALL_POLYLITH}" = "true" ]; then \ 63 | curl -OL "https://github.com/polyfy/polylith/releases/download/v${POLYLITH_VERSION}/poly-${POLYLITH_VERSION}.jar" \ 64 | && mkdir -p /usr/local/polylith \ 65 | && mv poly-$POLYLITH_VERSION.jar /usr/local/polylith \ 66 | && echo '#!/bin/sh\nARGS=""\nwhile [ "$1" != "" ] ; do\n ARGS="$ARGS $1"\n shift\ndone\nexec "java" $JVM_OPTS "-jar" "/usr/local/polylith/poly-'$POLYLITH_VERSION'.jar" $ARGS\n' > /usr/local/sbin/poly \ 67 | && chmod +x /usr/local/sbin/poly \ 68 | && /usr/local/sbin/poly version; fi 69 | 70 | # [Choice] Node.js version: none, lts/*, 16, 14, 12, 10 71 | ARG NODE_VERSION="lts/*" 72 | RUN if [ "${NODE_VERSION}" != "none" ]; then su vscode -c "umask 0002 && . /usr/local/share/nvm/nvm.sh && nvm install ${NODE_VERSION} 2>&1"; fi 73 | 74 | # [Optional] Uncomment this section to install additional OS packages. 75 | # RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ 76 | # && apt-get -y install --no-install-recommends 77 | 78 | # [Optional] Uncomment this line to install global node packages. 79 | # RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g " 2>&1 80 | 81 | # Clean up package lists 82 | RUN apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/* 83 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | // For format details, see https://aka.ms/devcontainer.json. For config options, see the README at: 2 | // https://github.com/microsoft/vscode-dev-containers/tree/v0.241.1/containers/clojure 3 | { 4 | "name": "Clojure (Community)", 5 | "build": { 6 | "dockerfile": "Dockerfile", 7 | "args": { 8 | // Update the VARIANT arg to pick a Java version. 9 | // Append -bullseye or -buster to pin to an OS version. 10 | // Use the -bullseye variants on local arm64/Apple Silicon. 11 | "VARIANT": "17", 12 | // Options 13 | "CLOJURE_VERSION": "1.10.3", 14 | "INSTALL_CLOJURE_CLI": "true", 15 | "INSTALL_BOOT": "true", 16 | "INSTALL_LEININGEN": "true", 17 | "INSTALL_POLYLITH": "true", 18 | "NODE_VERSION": "lts/*" 19 | } 20 | }, 21 | 22 | // Configure tool-specific properties. 23 | "customizations": { 24 | // Configure properties specific to VS Code. 25 | "vscode": { 26 | // Set *default* container specific settings.json values on container create. 27 | "settings": { 28 | }, 29 | 30 | // Add the IDs of extensions you want installed when the container is created. 31 | "extensions": [ 32 | "vscjava.vscode-java-pack", 33 | "borkdude.clj-kondo", 34 | "betterthantomorrow.calva" 35 | ] 36 | } 37 | }, 38 | 39 | // Use 'forwardPorts' to make a list of ports inside the container available locally. 40 | // "forwardPorts": [], 41 | 42 | // Use 'postCreateCommand' to run commands after the container is created. 43 | // "postCreateCommand": "java -version", 44 | 45 | // Comment out connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. 46 | "remoteUser": "vscode" 47 | } 48 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: report an issue 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Environment** 27 | - OS/Platform: 28 | - Metabase version: 29 | - Metabsae DuckDB driver version: 30 | 31 | **Additional context** 32 | Add any other context about the problem here. 33 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/workflows/build_metabase_duckdb_driver.yaml: -------------------------------------------------------------------------------- 1 | name: Build Metabase DuckDB Driver 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | pull_request: 7 | 8 | workflow_dispatch: 9 | 10 | jobs: 11 | build: 12 | runs-on: ubuntu-latest 13 | 14 | steps: 15 | - name: Clone Metabase repository 16 | uses: actions/checkout@v4 17 | with: 18 | repository: metabase/metabase 19 | path: metabase 20 | 21 | - name: Checkout driver code 22 | uses: actions/checkout@v4 23 | with: 24 | path: metabase/modules/drivers/duckdb 25 | 26 | - name: Set up JDK 21 27 | uses: actions/setup-java@v4 28 | with: 29 | distribution: 'temurin' 30 | java-version: '21' 31 | 32 | - name: Set up Node.js 33 | uses: actions/setup-node@v4 34 | with: 35 | node-version: 'lts/*' 36 | 37 | - name: Install Yarn 1.x 38 | run: | 39 | npm install -g yarn@1 40 | yarn --version 41 | 42 | - name: Set up Clojure 43 | uses: DeLaGuardo/setup-clojure@12.3 44 | with: 45 | cli: latest 46 | 47 | - name: Build driver 48 | working-directory: ./metabase 49 | run: | 50 | git apply ./modules/drivers/duckdb/ci/metabase_drivers_deps.patch 51 | chmod +x ./bin/build-driver.sh 52 | ./bin/build-driver.sh duckdb 53 | 54 | - name: Upload driver artifact 55 | uses: actions/upload-artifact@v4 56 | with: 57 | name: metabase-duckdb-driver 58 | path: metabase/resources/modules/duckdb.metabase-driver.jar 59 | if-no-files-found: error 60 | 61 | - name: Run integration tests against MotherDuck 62 | working-directory: ./metabase 63 | continue-on-error: true 64 | env: 65 | motherduck_token: ${{ secrets.motherduck_ci_user_token }} 66 | run: | 67 | git apply ./modules/drivers/duckdb/ci/metabase_test_deps.patch 68 | MB_EDITION=ee yarn build-static-viz 69 | DRIVERS=motherduck clojure -X:dev:drivers:drivers-dev:ee:ee-dev:test 70 | 71 | - name: Run integration tests against DuckDB local 72 | working-directory: ./metabase 73 | continue-on-error: true 74 | run: | 75 | DRIVERS=duckdb clojure -X:dev:drivers:drivers-dev:ee:ee-dev:test 76 | 77 | -------------------------------------------------------------------------------- /.github/workflows/build_metabase_duckdb_driver_musllinux.yaml: -------------------------------------------------------------------------------- 1 | name: Build DuckDB JDBC Musl and Metabase Driver 2 | on: 3 | push: 4 | branches: [ main ] 5 | pull_request: 6 | 7 | 8 | jobs: 9 | java-linux-amd64-musl: 10 | name: Java Musllinux (amd64) 11 | runs-on: ubuntu-latest 12 | container: 13 | image: alpine:latest 14 | env: 15 | GEN: ninja 16 | ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true 17 | DUCKDB_PLATFORM: linux_amd64_musl 18 | steps: 19 | - name: Prepare Alpine Container 20 | shell: sh 21 | run: | 22 | # Update package lists and install essential build tools 23 | apk update 24 | apk add --no-cache bash boost-dev build-base cmake gcc g++ git libstdc++ make maven ninja openjdk17 openjdk17-jdk pkgconfig 25 | 26 | rm -rf /var/cache/apk/* 27 | 28 | # Set Java environment variables 29 | echo "JAVA_HOME=/usr/lib/jvm/java-17-openjdk" >> $GITHUB_ENV 30 | echo "/usr/lib/jvm/java-17-openjdk/bin:${PATH}" >> $GITHUB_PATH 31 | 32 | - uses: actions/checkout@v3 33 | with: 34 | fetch-depth: 0 35 | ref: ${{ inputs.git_ref }} 36 | submodules: recursive 37 | 38 | - name: Build 39 | shell: bash 40 | working-directory: duckdb-java 41 | run: make release 42 | 43 | - name: Java Tests 44 | shell: bash 45 | if: ${{ inputs.skip_tests != 'true' }} 46 | working-directory: duckdb-java 47 | run: make test 48 | 49 | - uses: actions/upload-artifact@v4 50 | with: 51 | name: java-linux-amd64-musl 52 | path: | 53 | duckdb-java/build/release/duckdb_jdbc.jar 54 | 55 | build-metabase-driver: 56 | name: Build Metabase Driver 57 | needs: java-linux-amd64-musl 58 | runs-on: ubuntu-latest 59 | 60 | steps: 61 | - name: Clone Metabase repository 62 | uses: actions/checkout@v4 63 | with: 64 | repository: metabase/metabase 65 | path: metabase 66 | 67 | - name: Checkout driver code 68 | uses: actions/checkout@v4 69 | with: 70 | path: metabase/modules/drivers/duckdb 71 | 72 | - name: Set up JDK 21 73 | uses: actions/setup-java@v4 74 | with: 75 | distribution: 'temurin' 76 | java-version: '21' 77 | 78 | - name: Set up Maven 79 | uses: stCarolas/setup-maven@v4.5 80 | with: 81 | maven-version: '3.9.6' 82 | 83 | - name: Set up Node.js 84 | uses: actions/setup-node@v4 85 | with: 86 | node-version: 'lts/*' 87 | 88 | - name: Install Yarn 1.x 89 | run: | 90 | npm install -g yarn@1 91 | yarn --version 92 | 93 | - name: Set up Clojure 94 | uses: DeLaGuardo/setup-clojure@12.3 95 | with: 96 | cli: latest 97 | 98 | - name: Download JDBC Driver 99 | uses: actions/download-artifact@v4 100 | with: 101 | name: java-linux-amd64-musl 102 | path: ./jdbc-driver 103 | 104 | - name: Install DuckDB JDBC Driver (Musllinux) 105 | run: | 106 | VERSION=1.2.0 107 | mvn install:install-file \ 108 | -Dfile=./jdbc-driver/duckdb_jdbc.jar \ 109 | -DgroupId=org.duckdb \ 110 | -DartifactId=duckdb_jdbc \ 111 | -Dversion=$VERSION \ 112 | -Dpackaging=jar 113 | 114 | - name: Build driver 115 | working-directory: ./metabase 116 | run: | 117 | git apply ./modules/drivers/duckdb/ci/metabase_drivers_deps.patch 118 | chmod +x ./bin/build-driver.sh 119 | ./bin/build-driver.sh duckdb 120 | 121 | - name: Upload driver artifact 122 | uses: actions/upload-artifact@v4 123 | with: 124 | name: metabase-duckdb-driver-for-alpine 125 | path: metabase/resources/modules/duckdb.metabase-driver.jar 126 | if-no-files-found: error -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | \#*\# 2 | .\#* 3 | /target 4 | /.nrepl-port 5 | /bin 6 | .classpath 7 | .project 8 | .idea 9 | *.iml 10 | duckdb-driver.iml 11 | .lein-repl-history 12 | .cpcache 13 | .clj-kondo 14 | .lsp 15 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "duckdb-java"] 2 | path = duckdb-java 3 | url = https://github.com/motherduckdb/duckdb-java.git 4 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Metabase DuckDB Driver 2 | 3 | The Metabase DuckDB driver allows [Metabase](https://www.metabase.com/) ([GitHub](https://github.com/metabase/metabase)) to use the embedded [DuckDB](https://duckdb.org/) ([GitHub](https://github.com/duckdb/duckdb)) database. 4 | 5 | This driver is supported by [MotherDuck](https://motherduck.com/). If you would like to open a GitHub issue to report a bug or request new features, or would like to open a pull requests against it, please do so in this repository, and not in the core Metabase GitHub repository. 6 | 7 | ## DuckDB 8 | 9 | [DuckDB](https://duckdb.org) is an in-process SQL OLAP database management. It does not run as a separate process, but completely embedded within a host process. So, it **embedds to the Metabase process** like SQLite. 10 | 11 | ## Obtaining the DuckDB Metabase driver 12 | 13 | ### Where to find it 14 | 15 | [Click here](https://github.com/MotherDuck-Open-Source/metabase_duckdb_driver/releases/latest) to view the latest release of the Metabase DuckDB driver; click the link to download `duckdb.metabase-driver.jar`. 16 | 17 | You can find past releases of the DuckDB driver [here](https://github.com/MotherDuck-Open-Source/metabase_duckdb_driver/releases), and releases earlier than 0.2.6 (corresponding to DuckDB v0.10.0) [here](https://github.com/AlexR2D2/metabase_duckdb_driver/releases). 18 | 19 | ### How to Install it 20 | 21 | Metabase will automatically make the DuckDB driver available if it finds the driver in the Metabase plugins directory when it starts up. 22 | All you need to do is create the directory `plugins` (if it's not already there), move the JAR you just downloaded into it, and restart Metabase. 23 | 24 | By default, the plugins directory is called `plugins`, and lives in the same directory as the Metabase JAR. 25 | 26 | For example, if you're running Metabase from a directory called `/app/`, you should move the DuckDB driver to `/app/plugins/`: 27 | 28 | ```bash 29 | # example directory structure for running Metabase with DuckDB support 30 | /app/metabase.jar 31 | /app/plugins/duckdb.metabase-driver.jar 32 | ``` 33 | 34 | If you're running Metabase from the Mac App, the plugins directory defaults to `~/Library/Application Support/Metabase/Plugins/`: 35 | 36 | ```bash 37 | # example directory structure for running Metabase Mac App with DuckDB support 38 | /Users/you/Library/Application Support/Metabase/Plugins/duckdb.metabase-driver.jar 39 | ``` 40 | 41 | If you are running the Docker image or you want to use another directory for plugins, you should specify a custom plugins directory by setting the environment variable `MB_PLUGINS_DIR`. 42 | 43 | ## Configuring 44 | 45 | Once you've started up Metabase, go to add a database and select "DuckDB". Provide the path to the DuckDB database file. To use DuckDB in the in-memory mode without any database file, you can specify `:memory:` as the database path. 46 | 47 | ## Parquet 48 | 49 | Does it make sense to start DuckDB Database in-memory mode without any data in system like Metabase? Of Course yes! 50 | Because of feature of DuckDB allowing you [to run SQL queries directly on Parquet files](https://duckdb.org/2021/06/25/querying-parquet.html). So, you don't need a DuckDB database. 51 | 52 | For example (somewhere in Metabase SQL Query editor): 53 | 54 | ```sql 55 | # DuckDB selected as source 56 | 57 | SELECT originalTitle, startYear, genres, numVotes, averageRating from '/Users/you/movies/title.basics.parquet' x 58 | JOIN (SELECT * from '/Users/you/movies/title.ratings.parquet') y ON x.tconst = y.tconst 59 | ORDER BY averageRating * numVotes DESC 60 | ``` 61 | 62 | ## Docker 63 | 64 | Unfortunately, DuckDB plugin does't work in the default Alpine based Metabase docker container due to some glibc problems. But thanks to [@ChrisH](https://github.com/ChrisH) and [@lucmartinon](https://github.com/lucmartinon) we have simple Dockerfile to create Docker image of Metabase based on Debian where the DuckDB plugin does work. 65 | 66 | ```bash 67 | FROM openjdk:19-buster 68 | 69 | ENV MB_PLUGINS_DIR=/home/plugins/ 70 | 71 | ADD https://downloads.metabase.com/v0.52.4/metabase.jar /home 72 | ADD https://github.com/MotherDuck-Open-Source/metabase_duckdb_driver/releases/download/0.2.12/duckdb.metabase-driver.jar /home/plugins/ 73 | 74 | RUN chmod 744 /home/plugins/duckdb.metabase-driver.jar 75 | 76 | CMD ["java", "-jar", "/home/metabase.jar"] 77 | ``` 78 | 79 | > Note: check that you are using the latest `metabase` and `duckdb.metabase-driver` versions. See [Where to find it](#where-to-find-it) section for versions details. 80 | 81 | Build the image: 82 | ```bash 83 | docker build . --tag metaduck:latest` 84 | ``` 85 | 86 | Then create the container: 87 | ```bash 88 | docker run --name metaduck -d -p 80:3000 -m 2GB -e MB_PLUGINS_DIR=/home/plugins metaduck 89 | ``` 90 | 91 | Open Metabase in the browser: http://localhost 92 | 93 | ### Using DB file with Docker 94 | 95 | In order to use the DuckDB database file from your local host in the docker container you should mount folder with your DB file into docker container 96 | 97 | ```bash 98 | docker run -v /dir_with_my_duck_db_file_in_the_local_host/:/container/directory ... 99 | ``` 100 | 101 | Next, in the settings page of DuckDB of Metabase Web UI you could set your DB file name like this 102 | 103 | ```bash 104 | /container/directory/ 105 | ``` 106 | 107 | The same way you could mount the dir with parquet files into container and make SQL queries to this files using directory in your container. 108 | 109 | ## How to build the DuckDB .jar plugin yourself 110 | 111 | 1. Install VS Code with [DevContainer](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers) extension (see [details](https://code.visualstudio.com/docs/devcontainers/containers)) 112 | 2. Create some folder, let's say `duckdb_plugin` 113 | 3. Clone the `metabase_duckdb_driver` repository into `duckdb_plugin` folder 114 | 4. Copy `.devcontainer` from `duckdb_plugin/metabase_duckdb_driver` into `duckdb_plugin` 115 | 5. Clone the `metabase` repository of version you need into `duckdb_plugin` folder 116 | 6. Now content of the `duckdb_plugin` folder should looks like this: 117 | ``` 118 | .. 119 | .devcontainer 120 | metabase 121 | metabase_duckdb_driver 122 | ``` 123 | 7. Add duckdb record to the deps file `duckdb_plugin/metabase/modules/drivers/deps.edn` 124 | The end of the file sholud looks like this: 125 | ``` 126 | ... 127 | metabase/sqlserver {:local/root "sqlserver"} 128 | metabase/vertica {:local/root "vertica"} 129 | metabase/duckdb {:local/root "duckdb"}}} <- add this! 130 | ``` 131 | 8. Set the DuckDB version you need in the `duckdb_plugin/metabase_duckdb_driver/deps.edn` 132 | 9. Create duckdb driver directory in the cloned metabase sourcecode: 133 | ``` 134 | > mkdir -p duckdb_plugin/metabase/modules/drivers/duckdb 135 | ``` 136 | 10. Copy the `metabase_duckdb_driver` source code into created dir 137 | ``` 138 | > cp -rf duckdb_plugin/metabase_duckdb_driver/* duckdb_plugin/metabase/modules/drivers/duckdb/ 139 | ``` 140 | 11. Open `duckdb_plugin` folder in VSCode using DevContainer extension (vscode will offer to open this folder using devcontainer). Wait until all stuff will be loaded. At the end you will get the terminal opened directly in the VS Code, smth like this: 141 | ``` 142 | vscode ➜ /workspaces/duckdb_plugin $ 143 | ``` 144 | 12. Build the plugin 145 | ``` 146 | vscode ➜ /workspaces/duckdb_plugin $ cd metabase 147 | vscode ➜ /workspaces/duckdb_plugin $ clojure -X:build:drivers:build/driver :driver :duckdb 148 | ``` 149 | 13. jar file of DuckDB plugin will be generated here duckdb_plugin/metabase/resources/modules/duckdb.metabase-driver.jar 150 | 151 | 152 | ## Acknowledgement 153 | 154 | Thanks [@AlexR2D2](https://github.com/AlexR2D2) for originally authoring this connector. -------------------------------------------------------------------------------- /ci/metabase_drivers_deps.patch: -------------------------------------------------------------------------------- 1 | diff --git a/modules/drivers/deps.edn b/modules/drivers/deps.edn 2 | index 98f65a210d..e0c8da9d68 100644 3 | --- a/modules/drivers/deps.edn 4 | +++ b/modules/drivers/deps.edn 5 | @@ -23,4 +23,6 @@ 6 | metabase/sqlite {:local/root "sqlite"} 7 | metabase/sqlserver {:local/root "sqlserver"} 8 | metabase/starburst {:local/root "starburst"} 9 | - metabase/vertica {:local/root "vertica"}}} 10 | + metabase/vertica {:local/root "vertica"} 11 | + metabase/duckdb {:local/root "duckdb"} 12 | + }} 13 | -------------------------------------------------------------------------------- /ci/metabase_test_deps.patch: -------------------------------------------------------------------------------- 1 | diff --git a/deps.edn b/deps.edn 2 | index 67e3128c77..c656572f23 100644 3 | --- a/deps.edn 4 | +++ b/deps.edn 5 | @@ -430,7 +430,9 @@ 6 | "modules/drivers/sqlite/test" 7 | "modules/drivers/sqlserver/test" 8 | "modules/drivers/starburst/test" 9 | - "modules/drivers/vertica/test"]} 10 | + "modules/drivers/vertica/test" 11 | + "modules/drivers/duckdb/test" 12 | + ]} 13 | 14 | ;;; Linters 15 | 16 | -------------------------------------------------------------------------------- /deps.edn: -------------------------------------------------------------------------------- 1 | {:paths 2 | ["src" "resources"] 3 | 4 | :deps 5 | {org.duckdb/duckdb_jdbc {:mvn/version "1.2.2.0"}}} 6 | -------------------------------------------------------------------------------- /resources/metabase-plugin.yaml: -------------------------------------------------------------------------------- 1 | info: 2 | name: Metabase DuckDB Driver 3 | version: 1.2.1 4 | description: Allows Metabase to connect to DuckDB databases. 5 | contact-info: 6 | name: MotherDuck 7 | address: support@motherduck.com 8 | driver: 9 | name: duckdb 10 | display-name: DuckDB 11 | lazy-load: true 12 | parent: sql-jdbc 13 | connection-properties: 14 | - name: database_file 15 | display-name: Database file 16 | placeholder: /home/to/the.duckdb (or ':memory:' for 'in memory' mode) 17 | required: false 18 | - name: read_only 19 | display-name: Establish a read-only connection 20 | default: false 21 | type: boolean 22 | - name: old_implicit_casting 23 | display-name: Use DuckDB old_implicit_casting option 24 | default: true 25 | type: boolean 26 | - name: allow_unsigned_extensions 27 | display-name: Allow loading unsigned extensions 28 | default: false 29 | type: boolean 30 | - name: memory_limit 31 | display-name: Limit on the amount of memory that DuckDB can use (e.g., 1GB), defaults to 80% of RAM 32 | required: false 33 | - name: azure_transport_option_type 34 | display-name: Azure transport option type 35 | required: false 36 | - name: motherduck_token 37 | display-name: Motherduck Token 38 | type: secret 39 | secret-kind: password 40 | required: false 41 | - advanced-options-start 42 | - merge: 43 | - additional-options 44 | - display-name: Additional DuckDB connection string options 45 | placeholder: 'http_keep_alive=false' 46 | 47 | init: 48 | - step: load-namespace 49 | namespace: metabase.driver.duckdb 50 | - step: register-jdbc-driver 51 | class: org.duckdb.DuckDBDriver 52 | -------------------------------------------------------------------------------- /src/metabase/driver/duckdb.clj: -------------------------------------------------------------------------------- 1 | (ns metabase.driver.duckdb 2 | (:require 3 | [clojure.java.jdbc :as jdbc] 4 | [clojure.string :as str] 5 | [java-time.api :as t] 6 | [medley.core :as m] 7 | [metabase.driver :as driver] 8 | [metabase.driver.sql-jdbc.common :as sql-jdbc.common] 9 | [metabase.driver.sql-jdbc.connection :as sql-jdbc.conn] 10 | [metabase.driver.sql-jdbc.execute :as sql-jdbc.execute] 11 | [metabase.driver.sql-jdbc.sync :as sql-jdbc.sync] 12 | [metabase.driver.sql.query-processor :as sql.qp] 13 | [metabase.util.honey-sql-2 :as h2x] 14 | [metabase.util.log :as log]) 15 | (:import 16 | (java.sql 17 | Connection 18 | PreparedStatement 19 | ResultSet 20 | ResultSetMetaData 21 | Statement 22 | Time 23 | Types) 24 | (java.time LocalDate LocalTime OffsetTime) 25 | (java.time.temporal ChronoField))) 26 | 27 | (set! *warn-on-reflection* true) 28 | 29 | (driver/register! :duckdb, :parent :sql-jdbc) 30 | 31 | (doseq [[feature supported?] {:metadata/key-constraints false ;; fetching metadata about foreign key constraints is not supported, but JOINs generally are. 32 | :upload-with-auto-pk false 33 | :datetime-diff true}] 34 | (defmethod driver/database-supports? [:duckdb feature] [_driver _feature _db] supported?)) 35 | 36 | (defmethod sql-jdbc.conn/data-source-name :duckdb 37 | [_driver details] 38 | ((some-fn :database_file) 39 | details)) 40 | 41 | (def premium-features-namespace 42 | (try 43 | (require '[metabase.premium-features.core :as premium-features]) ;; For Metabase 0.52 or after 44 | 'metabase.premium-features.core 45 | (catch Exception _ 46 | (try 47 | (require '[metabase.public-settings.premium-features :as premium-features]) ;; For Metabase < 0.52 48 | 'metabase.public-settings.premium-features 49 | (catch Exception e 50 | (throw (ex-info "Could not load either premium features namespace" 51 | {:error e}))))))) 52 | 53 | (defn- is-hosted? [] 54 | (let [premium-feature-ns (find-ns premium-features-namespace)] 55 | ((ns-resolve premium-feature-ns 'is-hosted?)))) 56 | 57 | (defn- get-motherduck-token [details-map] 58 | (try 59 | ;; For Metabase 0.52 or after 60 | ((requiring-resolve 'metabase.models.secret/value-as-string) :duckdb details-map "motherduck_token") 61 | (catch Exception _ 62 | ;; For Metabase < 0.52 63 | (or (-> ((requiring-resolve 'metabase.models.secret/db-details-prop->secret-map) details-map "motherduck_token") 64 | ((requiring-resolve 'metabase.models.secret/value->string))) 65 | ((requiring-resolve 'metabase.models.secret/get-secret-string) details-map "motherduck_token"))))) 66 | 67 | (defn- database-file-path-split [database_file] 68 | (let [url-parts (str/split database_file #"\?")] 69 | (if (= 2 (count url-parts)) 70 | (let [database-file (first url-parts) 71 | additional-options (second url-parts)] 72 | [database-file additional-options]) 73 | [database_file ""]))) 74 | 75 | (defn- jdbc-spec 76 | "Creates a spec for `clojure.java.jdbc` to use for connecting to DuckDB via JDBC from the given `opts`" 77 | [{:keys [database_file, read_only, allow_unsigned_extensions, old_implicit_casting, 78 | motherduck_token, memory_limit, azure_transport_option_type, attach_mode], :as details}] 79 | (let [[database_file_base database_file_additional_options] (database-file-path-split database_file)] 80 | (-> details 81 | (merge 82 | {:classname "org.duckdb.DuckDBDriver" 83 | :subprotocol "duckdb" 84 | :subname (or database_file "") 85 | "duckdb.read_only" (str read_only) 86 | "custom_user_agent" (str "metabase" (if (is-hosted?) " metabase-cloud" "")) 87 | "temp_directory" (str database_file_base ".tmp") 88 | "jdbc_stream_results" "true" 89 | :TimeZone "UTC"} 90 | (when old_implicit_casting 91 | {"old_implicit_casting" (str old_implicit_casting)}) 92 | (when memory_limit 93 | {"memory_limit" (str memory_limit)}) 94 | (when azure_transport_option_type 95 | {"azure_transport_option_type" (str azure_transport_option_type)}) 96 | (when allow_unsigned_extensions 97 | {"allow_unsigned_extensions" (str allow_unsigned_extensions)}) 98 | (when (seq (re-find #"^md:" database_file)) 99 | ;; attach_mode option is not settable by the user, it's always single mode when 100 | ;; using motherduck, but in tests we need to be able to connect to motherduck in 101 | ;; workspace mode, so it's handled here. 102 | {"motherduck_attach_mode" (or attach_mode "single")}) ;; when connecting to MotherDuck, explicitly connect to a single database 103 | (when (seq motherduck_token) ;; Only configure the option if token is provided 104 | {"motherduck_token" motherduck_token}) 105 | (sql-jdbc.common/additional-options->map (:additional-options details) :url) 106 | (sql-jdbc.common/additional-options->map database_file_additional_options :url)) 107 | ;; remove fields from the metabase config that do not directly go into the jdbc spec 108 | (dissoc :database_file :read_only :port :engine :allow_unsigned_extensions 109 | :old_implicit_casting :motherduck_token :memory_limit :azure_transport_option_type 110 | :advanced-options :additional-options :attach_mode)))) 111 | 112 | (defn- remove-keys-with-prefix [details prefix] 113 | (apply dissoc details (filter #(str/starts-with? (name %) prefix) (keys details)))) 114 | 115 | (defmethod sql-jdbc.conn/connection-details->spec :duckdb 116 | [_ details-map] 117 | (-> details-map 118 | (merge {:motherduck_token (get-motherduck-token details-map)}) 119 | (remove-keys-with-prefix "motherduck_token-") 120 | jdbc-spec)) 121 | 122 | (defmethod sql-jdbc.execute/do-with-connection-with-options :duckdb 123 | [driver db-or-id-or-spec {:keys [^String session-timezone report-timezone] :as options} f] 124 | ;; First use the parent implementation to get the connection with standard options 125 | (sql-jdbc.execute/do-with-resolved-connection 126 | driver 127 | db-or-id-or-spec 128 | options 129 | (fn [^Connection conn] 130 | ;; Additionally set timezone if provided and we're not in a recursive connection 131 | (when (and (or report-timezone session-timezone) (not (sql-jdbc.execute/recursive-connection?))) 132 | (let [timezone-to-use (or report-timezone session-timezone)] 133 | (try 134 | (with-open [stmt (.createStatement conn)] 135 | (.execute stmt (format "SET TimeZone='%s';" timezone-to-use))) 136 | (catch Throwable e 137 | (log/debugf e "Error setting timezone '%s' for DuckDB database" timezone-to-use))))) 138 | ;; Call the function with the configured connection 139 | (f conn)))) 140 | 141 | (defmethod sql-jdbc.execute/set-timezone-sql :duckdb [_] 142 | "SET GLOBAL TimeZone=%s;") 143 | 144 | (def ^:private database-type->base-type 145 | (sql-jdbc.sync/pattern-based-database-type->base-type 146 | [[#"BOOLEAN" :type/Boolean] 147 | [#"BOOL" :type/Boolean] 148 | [#"LOGICAL" :type/Boolean] 149 | [#"HUGEINT" :type/BigInteger] 150 | [#"UBIGINT" :type/BigInteger] 151 | [#"BIGINT" :type/BigInteger] 152 | [#"INT8" :type/BigInteger] 153 | [#"LONG" :type/BigInteger] 154 | [#"INT4" :type/Integer] 155 | [#"SIGNED" :type/Integer] 156 | [#"INT2" :type/Integer] 157 | [#"SHORT" :type/Integer] 158 | [#"INT1" :type/Integer] 159 | [#"UINTEGER" :type/Integer] 160 | [#"USMALLINT" :type/Integer] 161 | [#"UTINYINT" :type/Integer] 162 | [#"INTEGER" :type/Integer] 163 | [#"SMALLINT" :type/Integer] 164 | [#"TINYINT" :type/Integer] 165 | [#"INT" :type/Integer] 166 | [#"DECIMAL" :type/Decimal] 167 | [#"DOUBLE" :type/Float] 168 | [#"FLOAT8" :type/Float] 169 | [#"NUMERIC" :type/Float] 170 | [#"REAL" :type/Float] 171 | [#"FLOAT4" :type/Float] 172 | [#"FLOAT" :type/Float] 173 | [#"VARCHAR" :type/Text] 174 | [#"BPCHAR" :type/Text] 175 | [#"CHAR" :type/Text] 176 | [#"TEXT" :type/Text] 177 | [#"STRING" :type/Text] 178 | [#"JSON" :type/JSON] 179 | [#"BLOB" :type/*] 180 | [#"BYTEA" :type/*] 181 | [#"VARBINARY" :type/*] 182 | [#"BINARY" :type/*] 183 | [#"UUID" :type/UUID] 184 | [#"TIMESTAMPTZ" :type/DateTimeWithTZ] 185 | [#"TIMESTAMP WITH TIME ZONE" :type/DateTimeWithTZ] 186 | [#"DATETIME" :type/DateTime] 187 | [#"TIMESTAMP_S" :type/DateTime] 188 | [#"TIMESTAMP_MS" :type/DateTime] 189 | [#"TIMESTAMP_NS" :type/DateTime] 190 | [#"TIMESTAMP" :type/DateTime] 191 | [#"DATE" :type/Date] 192 | [#"TIME" :type/Time] 193 | [#"GEOMETRY" :type/*]])) 194 | 195 | (defmethod sql-jdbc.sync/database-type->base-type :duckdb 196 | [_ field-type] 197 | (database-type->base-type field-type)) 198 | 199 | (defn- local-time-to-time [^LocalTime lt] 200 | (Time. (.getLong lt ChronoField/MILLI_OF_DAY))) 201 | 202 | (defmethod sql-jdbc.execute/set-parameter [:duckdb LocalDate] 203 | [_ ^PreparedStatement prepared-statement i t] 204 | (.setObject prepared-statement i (t/local-date-time t (t/local-time 0)))) 205 | 206 | (defmethod sql-jdbc.execute/set-parameter [:duckdb LocalTime] 207 | [_ ^PreparedStatement prepared-statement i t] 208 | (.setObject prepared-statement i (local-time-to-time t))) 209 | 210 | (defmethod sql-jdbc.execute/set-parameter [:duckdb OffsetTime] 211 | [_ ^PreparedStatement prepared-statement i ^OffsetTime t] 212 | (let [adjusted-tz (local-time-to-time (t/local-time (t/with-offset-same-instant t (t/zone-offset 0))))] 213 | (.setObject prepared-statement i adjusted-tz))) 214 | 215 | (defmethod sql-jdbc.execute/set-parameter [:duckdb String] 216 | [_ ^PreparedStatement prepared-statement i t] 217 | (.setObject prepared-statement i t)) 218 | 219 | ;; .getObject of DuckDB (v0.4.0) does't handle the java.time.LocalDate but sql.Date only, 220 | ;; so get the sql.Date from DuckDB and convert it to java.time.LocalDate 221 | (defmethod sql-jdbc.execute/read-column-thunk [:duckdb Types/DATE] 222 | [_ ^ResultSet rs _rsmeta ^Integer i] 223 | (fn [] 224 | (when-let [sqlDate (.getDate rs i)] 225 | (.toLocalDate sqlDate)))) 226 | 227 | ;; .getObject of DuckDB (v0.4.0) does't handle the java.time.LocalTime but sql.Time only, 228 | ;; so get the sql.Time from DuckDB and convert it to java.time.LocalTime 229 | (defmethod sql-jdbc.execute/read-column-thunk [:duckdb Types/TIME] 230 | [_ ^ResultSet rs _rsmeta ^Integer i] 231 | (fn [] 232 | (when-let [sql-time-string (.getString rs i)] 233 | (LocalTime/parse sql-time-string)))) 234 | 235 | ;; override the sql-jdbc.execute/read-column-thunk for TIMESTAMP based on 236 | ;; DuckDB JDBC implementation. 237 | (defmethod sql-jdbc.execute/read-column-thunk [:duckdb Types/TIMESTAMP] 238 | [_ ^ResultSet rs _ ^Integer i] 239 | (fn [] 240 | (when-let [t (.getTimestamp rs i)] 241 | (t/local-date-time t)))) 242 | 243 | ;; date processing for aggregation 244 | (defmethod driver/db-start-of-week :duckdb [_] :monday) 245 | 246 | (defmethod sql.qp/add-interval-honeysql-form :duckdb 247 | [driver hsql-form amount unit] 248 | (if (= unit :quarter) 249 | (recur driver hsql-form (* amount 3) :month) 250 | (h2x/+ (h2x/->timestamp-with-time-zone hsql-form) [:raw (format "(INTERVAL '%d' %s)" (int amount) (name unit))]))) 251 | 252 | (defmethod sql.qp/date [:duckdb :default] [_ _ expr] expr) 253 | (defmethod sql.qp/date [:duckdb :minute] [_ _ expr] [:date_trunc (h2x/literal :minute) expr]) 254 | (defmethod sql.qp/date [:duckdb :minute-of-hour] [_ _ expr] [:minute expr]) 255 | (defmethod sql.qp/date [:duckdb :hour] [_ _ expr] [:date_trunc (h2x/literal :hour) expr]) 256 | (defmethod sql.qp/date [:duckdb :hour-of-day] [_ _ expr] [:hour expr]) 257 | (defmethod sql.qp/date [:duckdb :day] [_ _ expr] [:date_trunc (h2x/literal :day) expr]) 258 | (defmethod sql.qp/date [:duckdb :day-of-month] [_ _ expr] [:day expr]) 259 | (defmethod sql.qp/date [:duckdb :day-of-year] [_ _ expr] [:dayofyear expr]) 260 | 261 | (defmethod sql.qp/date [:duckdb :day-of-week] 262 | [driver _ expr] 263 | (sql.qp/adjust-day-of-week driver [:isodow expr])) 264 | 265 | (defmethod sql.qp/date [:duckdb :week] 266 | [driver _ expr] 267 | (sql.qp/adjust-start-of-week driver (partial conj [:date_trunc] (h2x/literal :week)) expr)) 268 | 269 | (defmethod sql.qp/date [:duckdb :month] [_ _ expr] [:date_trunc (h2x/literal :month) expr]) 270 | (defmethod sql.qp/date [:duckdb :month-of-year] [_ _ expr] [:month expr]) 271 | (defmethod sql.qp/date [:duckdb :quarter] [_ _ expr] [:date_trunc (h2x/literal :quarter) expr]) 272 | (defmethod sql.qp/date [:duckdb :quarter-of-year] [_ _ expr] [:quarter expr]) 273 | (defmethod sql.qp/date [:duckdb :year] [_ _ expr] [:date_trunc (h2x/literal :year) expr]) 274 | 275 | (defmethod sql.qp/datetime-diff [:duckdb :year] 276 | [_driver _unit x y] 277 | [:datesub (h2x/literal :year) (h2x/cast "date" x) (h2x/cast "date" y)]) 278 | 279 | (defmethod sql.qp/datetime-diff [:duckdb :quarter] 280 | [_driver _unit x y] 281 | [:datesub (h2x/literal :quarter) (h2x/cast "date" x) (h2x/cast "date" y)]) 282 | 283 | (defmethod sql.qp/datetime-diff [:duckdb :month] 284 | [_driver _unit x y] 285 | [:datesub (h2x/literal :month) (h2x/cast "date" x) (h2x/cast "date" y)]) 286 | 287 | (defmethod sql.qp/datetime-diff [:duckdb :week] 288 | [_driver _unit x y] 289 | (h2x// [:datesub (h2x/literal :day) (h2x/cast "date" x) (h2x/cast "date" y)] 7)) 290 | 291 | (defmethod sql.qp/datetime-diff [:duckdb :day] 292 | [_driver _unit x y] 293 | [:datesub (h2x/literal :day) (h2x/cast "date" x) (h2x/cast "date" y)]) 294 | 295 | (defmethod sql.qp/datetime-diff [:duckdb :hour] 296 | [_driver _unit x y] 297 | [:datesub (h2x/literal :hour) x y]) 298 | 299 | (defmethod sql.qp/datetime-diff [:duckdb :minute] 300 | [_driver _unit x y] 301 | [:datesub (h2x/literal :minute) x y]) 302 | 303 | (defmethod sql.qp/datetime-diff [:duckdb :second] 304 | [_driver _unit x y] 305 | [:datesub (h2x/literal :second) x y]) 306 | 307 | (defmethod sql.qp/unix-timestamp->honeysql [:duckdb :seconds] 308 | [_ _ expr] 309 | [:to_timestamp (h2x/cast :DOUBLE expr)]) 310 | 311 | (defmethod sql.qp/->honeysql [:duckdb :regex-match-first] 312 | [driver [_ arg pattern]] 313 | [:regexp_extract (sql.qp/->honeysql driver arg) (sql.qp/->honeysql driver pattern)]) 314 | 315 | ;; empty result set for queries without result (like insert...) 316 | (defn- empty-rs [] 317 | (reify 318 | ResultSet 319 | (getMetaData [_] 320 | (reify 321 | ResultSetMetaData 322 | (getColumnCount [_] 1) 323 | (getColumnLabel [_ _idx] "WARNING") 324 | (getColumnTypeName [_ _] "CHAR") 325 | (getColumnType [_ _] Types/CHAR))) 326 | (next [_] false) 327 | (close [_]))) 328 | 329 | ;; override native execute-statement! to make queries that does't returns ResultSet 330 | 331 | (defmethod sql-jdbc.execute/execute-statement! :duckdb 332 | [_driver ^Statement stmt ^String sql] 333 | (if (.execute stmt sql) 334 | (.getResultSet stmt) 335 | (empty-rs))) 336 | 337 | (defn- is_motherduck 338 | [database_file] 339 | (and (seq (re-find #"^md:" database_file)) (> (count database_file) 3))) 340 | 341 | (defn- motherduck_db_name 342 | [database_file] 343 | (subs database_file 3)) 344 | 345 | ;; Creates a new connection to the same DuckDB instance to avoid deadlocks during concurrent operations. 346 | ;; context: observed in tests that sometimes multiple syncs can be triggered on the same db at the same time, 347 | ;; (and potentially the deletion of the local duckdb file) that results in bad_weak_ptr errors on the duckdb 348 | ;; connection object and deadlocks, so creating a lightweight clone of the connection to the same duckdb 349 | ;; instance to avoid deadlocks. 350 | (defn- clone-raw-connection [connection] 351 | (let [c3p0-conn (cast com.mchange.v2.c3p0.C3P0ProxyConnection connection) 352 | clone-method (.getMethod org.duckdb.DuckDBConnection "duplicate" (into-array Class [])) 353 | raw-conn-token com.mchange.v2.c3p0.C3P0ProxyConnection/RAW_CONNECTION 354 | args (into-array Object [])] 355 | (.rawConnectionOperation c3p0-conn clone-method raw-conn-token args))) 356 | 357 | (defmethod driver/describe-database :duckdb 358 | [driver database] 359 | (let 360 | [database_file (get (get database :details) :database_file) 361 | database_file (first (database-file-path-split database_file)) ;; remove additional options in connection string 362 | get_tables_query (str "select * from information_schema.tables " 363 | ;; Additionally filter by db_name if connecting to MotherDuck, since 364 | ;; multiple databases can be attached and information about the 365 | ;; non-target database will be present in information_schema. 366 | (if (is_motherduck database_file) 367 | (let [db_name_without_md (motherduck_db_name database_file)] 368 | (format "where table_catalog = '%s' " db_name_without_md)) 369 | ""))] 370 | {:tables 371 | (sql-jdbc.execute/do-with-connection-with-options 372 | driver database nil 373 | (fn [conn] 374 | (set 375 | (for [{:keys [table_schema table_name]} 376 | (jdbc/query {:connection (clone-raw-connection conn)} 377 | [get_tables_query])] 378 | {:name table_name :schema table_schema}))))})) 379 | 380 | (defmethod driver/describe-table :duckdb 381 | [driver database {table_name :name, schema :schema}] 382 | (let [database_file (get (get database :details) :database_file) 383 | database_file (first (database-file-path-split database_file)) ;; remove additional options in connection string 384 | get_columns_query (str 385 | (format 386 | "select * from information_schema.columns where table_name = '%s' and table_schema = '%s'" 387 | table_name schema) 388 | ;; Additionally filter by db_name if connecting to MotherDuck, since 389 | ;; multiple databases can be attached and information about the 390 | ;; non-target database will be present in information_schema. 391 | (if (is_motherduck database_file) 392 | (let [db_name_without_md (motherduck_db_name database_file)] 393 | (format "and table_catalog = '%s' " db_name_without_md)) 394 | ""))] 395 | {:name table_name 396 | :schema schema 397 | :fields 398 | (sql-jdbc.execute/do-with-connection-with-options 399 | driver database nil 400 | (fn [conn] (let [results (jdbc/query 401 | {:connection (clone-raw-connection conn)} 402 | [get_columns_query])] 403 | (set 404 | (for [[idx {column_name :column_name, data_type :data_type}] (m/indexed results)] 405 | {:name column_name 406 | :database-type data_type 407 | :base-type (sql-jdbc.sync/database-type->base-type driver (keyword data_type)) 408 | :database-position idx})))))})) 409 | 410 | ;; The 0.4.0 DuckDB JDBC .getImportedKeys method throws 'not implemented' yet. 411 | ;; There is no support of FK yet. 412 | (defmethod driver/describe-table-fks :duckdb 413 | [_ _ _] 414 | nil) 415 | -------------------------------------------------------------------------------- /src/metabase/driver/motherduck.clj: -------------------------------------------------------------------------------- 1 | (ns metabase.driver.motherduck 2 | (:require 3 | [metabase.driver :as driver])) 4 | 5 | 6 | (driver/register! :motherduck, :parent :duckdb) -------------------------------------------------------------------------------- /test/metabase/test/data/duckdb.clj: -------------------------------------------------------------------------------- 1 | (ns metabase.test.data.duckdb 2 | (:require 3 | [clojure.java.io :as io] 4 | [metabase.config :as config] 5 | [metabase.driver :as driver] 6 | [metabase.driver.sql-jdbc.connection :as sql-jdbc.conn] 7 | [metabase.driver.sql-jdbc.execute :as sql-jdbc.execute] 8 | [metabase.driver.sql-jdbc.sync.describe-table-test :as describe-table-test] 9 | [metabase.test.data.interface :as tx] 10 | [metabase.test.data.sql :as sql.tx] 11 | [metabase.test.data.sql-jdbc :as sql-jdbc.tx] 12 | [metabase.test.data.sql-jdbc.load-data :as load-data] 13 | 14 | [metabase.test.data.sql.ddl :as ddl])) 15 | 16 | (set! *warn-on-reflection* true) 17 | 18 | (sql-jdbc.tx/add-test-extensions! :duckdb) 19 | 20 | (doseq [[feature supported?] {:upload-with-auto-pk (not config/is-test?) 21 | :test/time-type false 22 | ::describe-table-test/describe-materialized-view-fields false ;; duckdb has no materialized views 23 | :test/cannot-destroy-db true}] 24 | (defmethod driver/database-supports? [:duckdb feature] [_driver _feature _db] supported?)) 25 | 26 | (defmethod tx/bad-connection-details :duckdb 27 | [_driver] 28 | {:unknown_config "single"}) 29 | 30 | (defmethod tx/dbdef->connection-details :duckdb [_ _ {:keys [database-name]}] 31 | {:old_implicit_casting true 32 | "temp_directory" (format "%s.ddb.tmp" database-name) 33 | :database_file (format "%s.ddb" database-name) 34 | "custom_user_agent" "metabase_test" 35 | :subname (format "%s.ddb" database-name)}) 36 | 37 | (doseq [[base-type db-type] {:type/BigInteger "BIGINT" 38 | :type/Boolean "BOOL" 39 | :type/Date "DATE" 40 | :type/DateTime "TIMESTAMP" 41 | :type/DateTimeWithTZ "TIMESTAMPTZ" 42 | :type/Decimal "DECIMAL" 43 | :type/Float "DOUBLE" 44 | :type/Integer "INTEGER" 45 | :type/Text "STRING" 46 | :type/Time "TIME" 47 | :type/UUID "UUID"}] 48 | (defmethod sql.tx/field-base-type->sql-type [:duckdb base-type] [_ _] db-type)) 49 | 50 | (defmethod sql.tx/pk-sql-type :duckdb [_] "INTEGER") 51 | 52 | (defmethod sql.tx/drop-db-if-exists-sql :duckdb [& _] nil) 53 | (defmethod ddl/drop-db-ddl-statements :duckdb [& _] nil) 54 | (defmethod sql.tx/create-db-sql :duckdb [& _] nil) 55 | 56 | (defmethod tx/destroy-db! :duckdb 57 | [_driver dbdef] 58 | (let [file (io/file (str (tx/escaped-database-name dbdef) ".ddb")) 59 | wal-file (io/file (str (tx/escaped-database-name dbdef) ".ddb.wal"))] 60 | (when (.exists file) 61 | (.delete file)) 62 | (when (.exists wal-file) 63 | (.delete wal-file)))) 64 | 65 | (defmethod sql.tx/add-fk-sql :duckdb [& _] nil) 66 | 67 | (defmethod load-data/row-xform :duckdb 68 | [_driver _dbdef tabledef] 69 | (load-data/maybe-add-ids-xform tabledef)) 70 | 71 | (defmethod tx/sorts-nil-first? :duckdb 72 | [_driver _base-type] 73 | false) 74 | 75 | (defmethod tx/dataset-already-loaded? :duckdb 76 | [driver dbdef] 77 | ;; check and make sure the first table in the dbdef has been created. 78 | (let [{:keys [table-name], :as _tabledef} (first (:table-definitions dbdef))] 79 | (sql-jdbc.execute/do-with-connection-with-options 80 | driver 81 | (sql-jdbc.conn/connection-details->spec driver (tx/dbdef->connection-details driver :db dbdef)) 82 | {:write? false} 83 | (fn [^java.sql.Connection conn] 84 | (with-open [rset (.getTables (.getMetaData conn) 85 | #_catalog nil 86 | #_schema-pattern nil 87 | #_table-pattern table-name 88 | #_types (into-array String ["BASE TABLE"]))] 89 | ;; if the ResultSet returns anything we know the table is already loaded. 90 | (.next rset)))))) 91 | -------------------------------------------------------------------------------- /test/metabase/test/data/motherduck.clj: -------------------------------------------------------------------------------- 1 | (ns metabase.test.data.motherduck 2 | (:require 3 | [clojure.tools.logging :as log] 4 | [metabase.config :as config] 5 | [metabase.driver :as driver] 6 | [metabase.driver.sql-jdbc.connection :as sql-jdbc.conn] 7 | [metabase.driver.sql-jdbc.execute :as sql-jdbc.execute] 8 | [metabase.driver.sql-jdbc.sync.describe-table-test :as describe-table-test] 9 | [metabase.test.data.interface :as tx] 10 | [metabase.test.data.sql :as sql.tx :refer [qualify-and-quote]] 11 | [metabase.test.data.sql-jdbc.execute :as sql-jdbc.test-execute] 12 | [metabase.test.data.sql-jdbc.load-data :as load-data] 13 | [metabase.test.data.sql-jdbc.spec :refer [dbdef->spec]] 14 | [metabase.test.data.sql.ddl :as ddl])) 15 | 16 | (set! *warn-on-reflection* true) 17 | 18 | (doseq [[feature supported?] {:upload-with-auto-pk (not config/is-test?) 19 | :test/time-type false 20 | ::describe-table-test/describe-materialized-view-fields false ;; motherduck has no materialized views 21 | :test/cannot-destroy-db true}] 22 | (defmethod driver/database-supports? [:motherduck feature] [_driver _feature _db] supported?)) 23 | 24 | (defmethod tx/bad-connection-details :motherduck 25 | [_driver] 26 | {:unknown_config "single"}) 27 | 28 | (defmethod dbdef->spec :motherduck [driver context dbdef] 29 | (sql-jdbc.conn/connection-details->spec driver (tx/dbdef->connection-details driver context dbdef))) 30 | 31 | ;; use this to connect to MotherDuck in workspace mode, to set up and destroy test databases. 32 | (defn- md-workspace-mode-spec 33 | [] 34 | (sql-jdbc.conn/connection-details->spec :motherduck {:old_implicit_casting true 35 | "custom_user_agent" "metabase_test" 36 | :database_file "md:" 37 | :subname "md:" 38 | :attach_mode "workspace"})) 39 | 40 | (defmethod tx/create-db! :motherduck 41 | [driver dbdef & options] 42 | (sql-jdbc.execute/do-with-connection-with-options 43 | driver 44 | (md-workspace-mode-spec) 45 | {:write? true} 46 | (fn [^java.sql.Connection conn] 47 | (try (.setAutoCommit conn true) 48 | (catch Throwable _ 49 | (log/debugf "`.setAutoCommit` failed with engine `%s`" (name driver)))) 50 | (sql-jdbc.test-execute/execute-sql! driver conn (sql.tx/create-db-sql driver dbdef)))) 51 | 52 | (apply load-data/create-db! driver dbdef options)) 53 | 54 | 55 | (defmethod tx/dbdef->connection-details :motherduck [_ _ {:keys [database-name]} ] 56 | {:old_implicit_casting true 57 | "custom_user_agent" "metabase_test" 58 | :database_file (format "md:%s" database-name) 59 | :subname (format "md:%s" database-name)}) 60 | 61 | (doseq [[base-type db-type] {:type/BigInteger "BIGINT" 62 | :type/Boolean "BOOL" 63 | :type/Date "DATE" 64 | :type/DateTime "TIMESTAMP" 65 | :type/DateTimeWithTZ "TIMESTAMPTZ" 66 | :type/Decimal "DECIMAL" 67 | :type/Float "DOUBLE" 68 | :type/Integer "INTEGER" 69 | :type/Text "STRING" 70 | :type/Time "TIME" 71 | :type/UUID "UUID"}] 72 | (defmethod sql.tx/field-base-type->sql-type [:motherduck base-type] [_ _] db-type)) 73 | 74 | (defmethod sql.tx/pk-sql-type :motherduck [_] "INTEGER") 75 | 76 | (defmethod ddl/drop-db-ddl-statements :motherduck [driver {:keys [database-name]}] 77 | ["ATTACH IF NOT EXISTS ':memory:' AS memdb;" 78 | "USE memdb;" 79 | (format "DROP DATABASE %s CASCADE;" (qualify-and-quote driver database-name))]) 80 | 81 | (defmethod sql.tx/create-db-sql :motherduck 82 | [driver {:keys [database-name]}] 83 | (format "CREATE DATABASE IF NOT EXISTS %s;" (qualify-and-quote driver database-name))) 84 | 85 | (defmethod sql.tx/add-fk-sql :motherduck [& _] nil) 86 | 87 | (defmethod load-data/row-xform :motherduck 88 | [_driver _dbdef tabledef] 89 | (load-data/maybe-add-ids-xform tabledef)) 90 | 91 | (defmethod tx/sorts-nil-first? :motherduck 92 | [_driver _base-type] 93 | false) 94 | 95 | (defmethod tx/dataset-already-loaded? :motherduck 96 | [driver dbdef] 97 | ;; check and make sure the first table in the dbdef has been created. 98 | (let [{:keys [table-name database-name], :as _tabledef} (first (:table-definitions dbdef))] 99 | (sql-jdbc.execute/do-with-connection-with-options 100 | driver 101 | (md-workspace-mode-spec) 102 | {:write? true} 103 | (fn [^java.sql.Connection conn] 104 | (sql-jdbc.test-execute/execute-sql! driver conn (sql.tx/create-db-sql driver dbdef)))) 105 | 106 | (sql-jdbc.execute/do-with-connection-with-options 107 | driver 108 | (sql-jdbc.conn/connection-details->spec driver (tx/dbdef->connection-details driver :db dbdef)) 109 | {:write? false} 110 | (fn [^java.sql.Connection conn] 111 | (with-open [rset (.getTables (.getMetaData conn) 112 | #_catalog database-name 113 | #_schema-pattern nil 114 | #_table-pattern table-name 115 | #_types (into-array String ["BASE TABLE"]))] 116 | (.next rset)))))) 117 | 118 | 119 | (defn- delete-old-databases! 120 | "Remove all databases from motherduck account except for the default ones. Test runs can create databases that need to be cleaned up." 121 | [^java.sql.Connection conn] 122 | (let [drop-sql (fn [db-name] (format "DROP DATABASE IF EXISTS \"%s\" CASCADE;" db-name))] 123 | (with-open [stmt (.createStatement conn)] 124 | (with-open [rset (.executeQuery stmt "select database_name from duckdb_databases() where type = 'motherduck' and database_name not in ('my_db', 'sample_data'); ")] 125 | (while (.next rset) 126 | (let [db-name (.getString rset "database_name")] 127 | (with-open [inner-stmt (.createStatement conn)] 128 | (.execute inner-stmt (drop-sql db-name))))))))) 129 | 130 | (defmethod tx/before-run :motherduck 131 | [driver] 132 | (sql-jdbc.execute/do-with-connection-with-options 133 | driver 134 | (md-workspace-mode-spec) 135 | {:write? true} 136 | delete-old-databases!)) 137 | 138 | 139 | (defmethod tx/after-run :motherduck 140 | [driver] 141 | (sql-jdbc.execute/do-with-connection-with-options 142 | driver 143 | (md-workspace-mode-spec) 144 | {:write? true} 145 | delete-old-databases!)) --------------------------------------------------------------------------------