├── .clang-format ├── .dockerignore ├── .github └── workflows │ └── build-docker-images.yml ├── .gitignore ├── .gitmodules ├── CMakeLists.txt ├── Dockerfile ├── Dockerfile-slim.ci ├── Dockerfile.ci ├── LICENSE ├── README.md ├── data └── .gitignore ├── helm-chart ├── .helmignore ├── Chart.yaml ├── install_helm_chart.sh ├── secrets │ ├── README.md │ ├── opaque │ │ ├── .gitignore │ │ └── README.md │ └── tls │ │ ├── .gitignore │ │ └── README.md ├── templates │ ├── NOTES.txt │ ├── _helpers.tpl │ ├── global-secret.yaml │ ├── server-service.yaml │ ├── server-statefulset.yaml │ └── tls-secret.yaml └── values.yaml ├── macos └── entitlements.plist ├── queries ├── h01.sql ├── h02.sql ├── h03.sql ├── h04.sql ├── h05.sql ├── h06.sql ├── h07.sql ├── h08.sql ├── h09.sql ├── h10.sql ├── h11.sql ├── h12.sql ├── h13.sql ├── h14.sql ├── h15.sql ├── h16.sql ├── h17.sql ├── h18.sql ├── h19.sql ├── h20.sql ├── h21.sql └── h22.sql ├── requirements.txt ├── scripts ├── create_duckdb_database_file.py ├── start_sqlflite.sh ├── start_sqlflite_slim.sh ├── test_sqlflite.py └── test_sqlflite.sh ├── src ├── duckdb │ ├── duckdb_server.cpp │ ├── duckdb_server.h │ ├── duckdb_sql_info.cpp │ ├── duckdb_sql_info.h │ ├── duckdb_statement.cpp │ ├── duckdb_statement.h │ ├── duckdb_statement_batch_reader.cpp │ ├── duckdb_statement_batch_reader.h │ ├── duckdb_tables_schema_batch_reader.cpp │ └── duckdb_tables_schema_batch_reader.h ├── library │ ├── include │ │ ├── .gitignore │ │ ├── flight_sql_fwd.h │ │ ├── sqlflite_library.h │ │ ├── sqlflite_security.h │ │ └── version.h.in │ ├── sqlflite_library.cpp │ └── sqlflite_security.cpp ├── sqlflite_client.cpp ├── sqlflite_server.cpp └── sqlite │ ├── sqlite_server.cc │ ├── sqlite_server.h │ ├── sqlite_sql_info.cc │ ├── sqlite_sql_info.h │ ├── sqlite_statement.cc │ ├── sqlite_statement.h │ ├── sqlite_statement_batch_reader.cc │ ├── sqlite_statement_batch_reader.h │ ├── sqlite_tables_schema_batch_reader.cc │ ├── sqlite_tables_schema_batch_reader.h │ ├── sqlite_type_info.cc │ └── sqlite_type_info.h ├── third_party ├── Arrow_CMakeLists.txt.in ├── DuckDB_CMakeLists.txt.in ├── JWTCPP_CMakeLists.txt.in └── SQLite_CMakeLists.txt.in └── tls ├── .gitignore ├── README.md ├── gen-certs.sh └── v3_usr.cnf /.clang-format: -------------------------------------------------------------------------------- 1 | --- 2 | BasedOnStyle: Google 3 | ColumnLimit: 90 4 | IncludeBlocks: Preserve 5 | ReflowComments: false 6 | SortIncludes: false 7 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | Dockerfile* 2 | .idea 3 | .git 4 | .gitignore 5 | docker_build_and_push.sh 6 | cmake-build-debug 7 | build 8 | data 9 | dist 10 | arrow 11 | duckdb 12 | venv 13 | 14 | # Ignore TLS certs/keys 15 | tls/*.csr 16 | tls/*.key 17 | tls/*.pkcs1 18 | tls/*.pem 19 | tls/*.srl 20 | -------------------------------------------------------------------------------- /.github/workflows/build-docker-images.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | 3 | on: 4 | workflow_dispatch: 5 | push: 6 | 7 | env: 8 | DOCKER_IMAGE_NAME: voltrondata/sqlflite 9 | 10 | jobs: 11 | build-project-macos: 12 | name: Build Project - MacOS 13 | strategy: 14 | matrix: 15 | include: 16 | - platform: amd64 17 | os: macos 18 | runner: macos-13 19 | - platform: arm64 20 | os: macos 21 | runner: macos-13-xlarge 22 | runs-on: ${{ matrix.runner }} 23 | env: 24 | zip_file_name: sqlflite_cli_${{ matrix.os }}_${{ matrix.platform }}.zip 25 | steps: 26 | - name: Checkout 27 | uses: actions/checkout@v4.1.1 28 | 29 | - name: Install build requirements 30 | env: 31 | HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1 32 | run: | 33 | brew install automake boost gflags 34 | 35 | - name: Configure Project 36 | uses: threeal/cmake-action@v1.3.0 37 | with: 38 | generator: Ninja 39 | run-build: true 40 | 41 | - name: Sign and notarize the server release build 42 | uses: prmoore77/action-macos-sign-notarize@b0f525e0d98a47b0884558b786f21453889a04d7 43 | with: 44 | certificate: ${{ secrets.APPLE_CERTIFICATE }} 45 | certificate-password: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }} 46 | username: ${{ secrets.APPLE_ID_USERNAME }} 47 | password: ${{ secrets.APPLE_ID_PASSWORD }} 48 | apple-team-id: ${{ secrets.APPLE_TEAM_ID }} 49 | app-path: build/sqlflite_server 50 | entitlements-path: macos/entitlements.plist 51 | 52 | - name: Sign and notarize the server release build 53 | uses: prmoore77/action-macos-sign-notarize@b0f525e0d98a47b0884558b786f21453889a04d7 54 | with: 55 | certificate: ${{ secrets.APPLE_CERTIFICATE }} 56 | certificate-password: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }} 57 | username: ${{ secrets.APPLE_ID_USERNAME }} 58 | password: ${{ secrets.APPLE_ID_PASSWORD }} 59 | apple-team-id: ${{ secrets.APPLE_TEAM_ID }} 60 | app-path: build/sqlflite_client 61 | 62 | - name: Zip artifacts 63 | run: | 64 | mv build/sqlflite_server build/sqlflite_client . 65 | zip -j ${{ env.zip_file_name }} sqlflite_server sqlflite_client 66 | 67 | - name: Upload artifacts 68 | uses: actions/upload-artifact@v4 69 | with: 70 | name: ${{ env.zip_file_name }} 71 | path: | 72 | ${{ env.zip_file_name }} 73 | 74 | build-project-linux: 75 | name: Build Project - Linux 76 | strategy: 77 | matrix: 78 | include: 79 | - platform: amd64 80 | os: linux 81 | runner: buildjet-8vcpu-ubuntu-2204 82 | - platform: arm64 83 | os: linux 84 | runner: buildjet-8vcpu-ubuntu-2204-arm 85 | runs-on: ${{ matrix.runner }} 86 | env: 87 | zip_file_name: sqlflite_cli_${{ matrix.os }}_${{ matrix.platform }}.zip 88 | steps: 89 | - name: Checkout 90 | uses: actions/checkout@v4.1.1 91 | 92 | - name: Install build requirements 93 | run: | 94 | sudo apt-get update 95 | sudo apt-get install -y \ 96 | build-essential \ 97 | ninja-build \ 98 | automake \ 99 | cmake \ 100 | gcc \ 101 | git \ 102 | libboost-all-dev \ 103 | libgflags-dev 104 | sudo apt-get clean 105 | sudo rm -rf /var/lib/apt/lists/* 106 | 107 | - name: Configure Project 108 | uses: threeal/cmake-action@v1.3.0 109 | with: 110 | generator: Ninja 111 | run-build: true 112 | 113 | - name: Zip artifacts 114 | run: | 115 | mv build/sqlflite_server build/sqlflite_client . 116 | zip -j ${{ env.zip_file_name }} sqlflite_server sqlflite_client 117 | 118 | - name: Upload artifacts 119 | uses: actions/upload-artifact@v4 120 | with: 121 | name: ${{ env.zip_file_name }} 122 | path: | 123 | ${{ env.zip_file_name }} 124 | 125 | - name: Login to Docker Hub 126 | uses: docker/login-action@v2 127 | with: 128 | username: ${{ secrets.DOCKERHUB_USERNAME }} 129 | password: ${{ secrets.DOCKERHUB_PASSWORD }} 130 | 131 | - name: Build and push full Docker image 132 | uses: docker/build-push-action@v5 133 | with: 134 | context: . 135 | platforms: linux/${{ matrix.platform }} 136 | file: Dockerfile.ci 137 | push: ${{ startsWith(github.ref, 'refs/tags/') }} 138 | tags: | 139 | ${{ env.DOCKER_IMAGE_NAME }}:latest-${{ matrix.platform }} 140 | ${{ env.DOCKER_IMAGE_NAME }}:${{ github.ref_name }}-${{ matrix.platform }} 141 | no-cache: true 142 | provenance: false 143 | 144 | - name: Build and push slim Docker image 145 | uses: docker/build-push-action@v5 146 | with: 147 | context: . 148 | platforms: linux/${{ matrix.platform }} 149 | file: Dockerfile-slim.ci 150 | push: ${{ startsWith(github.ref, 'refs/tags/') }} 151 | tags: | 152 | ${{ env.DOCKER_IMAGE_NAME }}:latest-${{ matrix.platform }}-slim 153 | ${{ env.DOCKER_IMAGE_NAME }}:${{ github.ref_name }}-${{ matrix.platform }}-slim 154 | no-cache: true 155 | provenance: false 156 | 157 | create-release: 158 | name: Create a release 159 | if: startsWith(github.ref, 'refs/tags/') 160 | needs: [build-project-macos, build-project-linux] 161 | runs-on: ubuntu-latest 162 | steps: 163 | - name: Checkout 164 | uses: actions/checkout@v4.1.1 165 | 166 | - name: Download All Artifacts 167 | uses: actions/download-artifact@v4 168 | with: 169 | path: artifacts 170 | pattern: sqlflite_cli_*.zip 171 | merge-multiple: true 172 | 173 | - name: Release 174 | uses: softprops/action-gh-release@v1 175 | with: 176 | files: | 177 | artifacts/sqlflite_cli_*.zip 178 | LICENSE 179 | 180 | update-image-manifest: 181 | name: Update DockerHub image manifest to include all built platforms 182 | if: startsWith(github.ref, 'refs/tags/') 183 | needs: build-project-linux 184 | runs-on: ubuntu-latest 185 | steps: 186 | - name: Login to Docker Hub 187 | uses: docker/login-action@v2 188 | with: 189 | username: ${{ secrets.DOCKERHUB_USERNAME }} 190 | password: ${{ secrets.DOCKERHUB_PASSWORD }} 191 | 192 | - name: Create and push full manifest images 193 | uses: Noelware/docker-manifest-action@master # or use a pinned version in the Releases tab 194 | with: 195 | inputs: ${{ env.DOCKER_IMAGE_NAME }}:latest,${{ env.DOCKER_IMAGE_NAME }}:${{ github.ref_name }} 196 | images: ${{ env.DOCKER_IMAGE_NAME }}:latest-amd64,${{ env.DOCKER_IMAGE_NAME }}:latest-arm64 197 | push: true 198 | 199 | - name: Create and push slim manifest images 200 | uses: Noelware/docker-manifest-action@master # or use a pinned version in the Releases tab 201 | with: 202 | inputs: ${{ env.DOCKER_IMAGE_NAME }}:latest-slim,${{ env.DOCKER_IMAGE_NAME }}:${{ github.ref_name }}-slim 203 | images: ${{ env.DOCKER_IMAGE_NAME }}:latest-amd64-slim,${{ env.DOCKER_IMAGE_NAME }}:latest-arm64-slim 204 | push: true 205 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | arrow 2 | 3 | # build is out 4 | build 5 | 6 | # and so it temp (playground) 7 | temp 8 | 9 | # CMake Files 10 | CMakeFiles 11 | 12 | # Mac Junk 13 | ._.DS* 14 | .DS* 15 | 16 | # vscode 17 | .vscode 18 | 19 | .idea 20 | 21 | venv 22 | dist 23 | cmake-build-debug 24 | cmake-build-debug-event-trace 25 | 26 | data 27 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "jwt-cpp"] 2 | path = jwt-cpp 3 | url = https://github.com/Thalhammer/jwt-cpp 4 | -------------------------------------------------------------------------------- /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.25) 2 | project(sqlflite) 3 | 4 | set(CMAKE_CXX_STANDARD 17) 5 | 6 | # Function to retrieve the latest Git tag 7 | function(get_latest_git_tag OUTPUT_VARIABLE) 8 | execute_process( 9 | COMMAND git describe --tags --abbrev=0 10 | WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} 11 | OUTPUT_VARIABLE RETURN_VALUE 12 | OUTPUT_STRIP_TRAILING_WHITESPACE 13 | ) 14 | set(${OUTPUT_VARIABLE} ${RETURN_VALUE} PARENT_SCOPE) 15 | endfunction() 16 | 17 | # Set the version 18 | get_latest_git_tag(LATEST_TAG) 19 | set(PROJECT_VERSION ${LATEST_TAG}) 20 | 21 | # Display variable values using message 22 | message(STATUS "PROJECT_VERSION: ${PROJECT_VERSION}") 23 | 24 | # Configure a header file with the version 25 | configure_file( 26 | ${CMAKE_SOURCE_DIR}/src/library/include/version.h.in 27 | ${CMAKE_SOURCE_DIR}/src/library/include/version.h 28 | ) 29 | 30 | # --------------------- Arrow --------------------- 31 | configure_file(third_party/Arrow_CMakeLists.txt.in arrow/CMakeLists.txt) 32 | execute_process(COMMAND "${CMAKE_COMMAND}" -G "${CMAKE_GENERATOR}" . 33 | WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/arrow" 34 | ) 35 | execute_process(COMMAND "${CMAKE_COMMAND}" --build . 36 | WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/arrow" 37 | ) 38 | 39 | set(ARROW_INSTALL_DIR "${CMAKE_BINARY_DIR}/third_party/arrow") 40 | 41 | find_package(Arrow REQUIRED PATHS ${ARROW_INSTALL_DIR} NO_DEFAULT_PATH) 42 | find_package(ArrowFlight REQUIRED PATHS ${ARROW_INSTALL_DIR} NO_DEFAULT_PATH) 43 | find_package(ArrowFlightSql REQUIRED PATHS ${ARROW_INSTALL_DIR} NO_DEFAULT_PATH) 44 | 45 | # --------------------- SQLite --------------------- 46 | 47 | configure_file(third_party/SQLite_CMakeLists.txt.in sqlite/CMakeLists.txt) 48 | execute_process(COMMAND "${CMAKE_COMMAND}" -G "${CMAKE_GENERATOR}" . 49 | WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/sqlite" 50 | ) 51 | execute_process(COMMAND "${CMAKE_COMMAND}" --build . 52 | WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/sqlite" 53 | ) 54 | 55 | set(SQLITE_INCLUDE_DIR "${CMAKE_BINARY_DIR}/third_party/sqlite/src/sqlite_project") 56 | set(SQLITE_LIBRARY_PATH "${CMAKE_BINARY_DIR}/third_party/sqlite/src/sqlite_project-build/sqlite3-sqlite3.o") 57 | 58 | add_library(sqlite STATIC IMPORTED) 59 | set_property(TARGET sqlite PROPERTY IMPORTED_LOCATION ${SQLITE_LIBRARY_PATH}) 60 | 61 | # --------------------- DuckDB --------------------- 62 | 63 | configure_file(third_party/DuckDB_CMakeLists.txt.in duckdb/CMakeLists.txt) 64 | execute_process(COMMAND "${CMAKE_COMMAND}" -G "${CMAKE_GENERATOR}" . 65 | WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/duckdb" 66 | ) 67 | execute_process(COMMAND "${CMAKE_COMMAND}" --build . 68 | WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/duckdb" 69 | ) 70 | 71 | set(DUCKDB_INSTALL_DIR "${CMAKE_BINARY_DIR}/third_party/duckdb") 72 | 73 | set(DUCKDB_LIBRARY_PATH "${DUCKDB_INSTALL_DIR}/lib/libduckdb_static.a") 74 | set(DUCKDB_INCLUDE_DIR "${DUCKDB_INSTALL_DIR}/include") 75 | 76 | add_library(duckdb STATIC IMPORTED) 77 | set_property(TARGET duckdb PROPERTY IMPORTED_LOCATION ${DUCKDB_LIBRARY_PATH}) 78 | set_property(TARGET duckdb APPEND PROPERTY INTERFACE_INCLUDE_DIRECTORIES ${DUCKDB_INCLUDE_DIR}) 79 | 80 | target_link_libraries(duckdb 81 | INTERFACE 82 | "${DUCKDB_INSTALL_DIR}/lib/libduckdb_fastpforlib.a" 83 | "${DUCKDB_INSTALL_DIR}/lib/libduckdb_fmt.a" 84 | "${DUCKDB_INSTALL_DIR}/lib/libduckdb_fsst.a" 85 | "${DUCKDB_INSTALL_DIR}/lib/libduckdb_hyperloglog.a" 86 | "${DUCKDB_INSTALL_DIR}/lib/libduckdb_mbedtls.a" 87 | "${DUCKDB_INSTALL_DIR}/lib/libduckdb_miniz.a" 88 | "${DUCKDB_INSTALL_DIR}/lib/libduckdb_pg_query.a" 89 | "${DUCKDB_INSTALL_DIR}/lib/libduckdb_re2.a" 90 | "${DUCKDB_INSTALL_DIR}/lib/libduckdb_skiplistlib.a" 91 | "${DUCKDB_INSTALL_DIR}/lib/libduckdb_utf8proc.a" 92 | "${DUCKDB_INSTALL_DIR}/lib/libduckdb_yyjson.a" 93 | "${DUCKDB_INSTALL_DIR}/lib/libparquet_extension.a" 94 | ) 95 | 96 | # Only link jemalloc_extension on Linux x86_64 and i386 (not arm64) 97 | if (CMAKE_SYSTEM_NAME STREQUAL "Linux" AND CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64") 98 | target_link_libraries(duckdb 99 | INTERFACE 100 | "${DUCKDB_INSTALL_DIR}/lib/libjemalloc_extension.a" 101 | ) 102 | endif() 103 | 104 | # --------------------- JWT-CPP --------------------- 105 | 106 | configure_file(third_party/JWTCPP_CMakeLists.txt.in jwt-cpp/CMakeLists.txt) 107 | execute_process(COMMAND "${CMAKE_COMMAND}" -G "${CMAKE_GENERATOR}" . 108 | WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/jwt-cpp" 109 | ) 110 | execute_process(COMMAND "${CMAKE_COMMAND}" --build . 111 | WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/jwt-cpp" 112 | ) 113 | 114 | set(JWT_CPP_INCLUDE_DIR "${CMAKE_BINARY_DIR}/third_party/jwt-cpp/include") 115 | 116 | # -------------------------------------------------- 117 | 118 | find_package(Threads REQUIRED) 119 | 120 | set(Boost_USE_STATIC_LIBS ON) 121 | find_package(Boost COMPONENTS program_options REQUIRED) 122 | 123 | set(SQLFLITE_SQLITE_SERVER_SRCS 124 | src/sqlite/sqlite_sql_info.cc 125 | src/sqlite/sqlite_type_info.cc 126 | src/sqlite/sqlite_statement.cc 127 | src/sqlite/sqlite_statement_batch_reader.cc 128 | src/sqlite/sqlite_server.cc 129 | src/sqlite/sqlite_tables_schema_batch_reader.cc) 130 | 131 | 132 | set(SQLFLITE_DUCKDB_SERVER_SRCS 133 | src/duckdb/duckdb_sql_info.cpp 134 | src/duckdb/duckdb_statement.cpp 135 | src/duckdb/duckdb_statement_batch_reader.cpp 136 | src/duckdb/duckdb_server.cpp 137 | src/duckdb/duckdb_tables_schema_batch_reader.cpp) 138 | 139 | # Add header file 140 | set(HEADER_FILES src/library/include/sqlflite_library.h) 141 | 142 | # Add a library target 143 | add_library(sqlfliteserver STATIC 144 | src/library/sqlflite_library.cpp 145 | src/library/sqlflite_security.cpp 146 | ${SQLFLITE_SQLITE_SERVER_SRCS} 147 | ${SQLFLITE_DUCKDB_SERVER_SRCS} 148 | ) 149 | 150 | set_target_properties(sqlfliteserver PROPERTIES PUBLIC_HEADER ${HEADER_FILES}) 151 | 152 | target_include_directories(sqlfliteserver PRIVATE 153 | src/sqlite 154 | src/duckdb 155 | src/library/include 156 | ${SQLITE_INCLUDE_DIR} 157 | ${DUCKDB_INCLUDE_DIR} 158 | ${JWT_CPP_INCLUDE_DIR} 159 | ) 160 | 161 | target_link_libraries(sqlfliteserver 162 | PRIVATE 163 | Threads::Threads 164 | ArrowFlightSql::arrow_flight_sql_static 165 | sqlite 166 | duckdb 167 | ${Boost_LIBRARIES} 168 | "-lresolv" 169 | ) 170 | 171 | if (APPLE) 172 | # macOS-specific libraries and options 173 | target_link_libraries(sqlfliteserver PRIVATE "-framework CoreFoundation") 174 | elseif (UNIX AND NOT APPLE) 175 | target_link_libraries(sqlfliteserver PRIVATE "-lssl -lcrypto") 176 | endif () 177 | 178 | # Specify the installation directory for the library 179 | install(TARGETS sqlfliteserver 180 | LIBRARY DESTINATION lib 181 | PUBLIC_HEADER DESTINATION include 182 | ) 183 | 184 | # ------------ Server Executable section ------------ 185 | add_executable(sqlflite_server 186 | src/sqlflite_server.cpp 187 | ) 188 | 189 | target_link_libraries(sqlflite_server PRIVATE 190 | sqlfliteserver 191 | ${Boost_LIBRARIES} 192 | ) 193 | 194 | target_compile_options(sqlflite_server PRIVATE "-static") 195 | 196 | install(TARGETS sqlflite_server 197 | DESTINATION bin 198 | ) 199 | 200 | # ------------ Client Executable section ------------ 201 | add_executable(sqlflite_client 202 | src/sqlflite_client.cpp 203 | ) 204 | 205 | target_link_libraries(sqlflite_client PRIVATE 206 | Threads::Threads 207 | ArrowFlightSql::arrow_flight_sql_static 208 | ${Boost_LIBRARIES} 209 | "-lresolv" 210 | ) 211 | 212 | if (APPLE) 213 | # macOS-specific libraries and options 214 | target_link_libraries(sqlflite_client PRIVATE "-framework CoreFoundation") 215 | elseif (UNIX AND NOT APPLE) 216 | target_link_libraries(sqlflite_client PRIVATE "-lssl -lcrypto") 217 | endif () 218 | 219 | target_compile_options(sqlflite_client PRIVATE "-static") 220 | 221 | install(TARGETS sqlflite_client 222 | DESTINATION bin 223 | ) 224 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.12.6 2 | 3 | ARG TARGETPLATFORM 4 | ARG TARGETARCH 5 | ARG TARGETVARIANT 6 | RUN printf "I'm building for TARGETPLATFORM=${TARGETPLATFORM}" \ 7 | && printf ", TARGETARCH=${TARGETARCH}" \ 8 | && printf ", TARGETVARIANT=${TARGETVARIANT} \n" \ 9 | && printf "With uname -s : " && uname -s \ 10 | && printf "and uname -m : " && uname -m 11 | 12 | RUN apt-get update && \ 13 | apt-get dist-upgrade --yes && \ 14 | apt-get install -y \ 15 | build-essential \ 16 | automake \ 17 | cmake \ 18 | wget \ 19 | gcc \ 20 | git \ 21 | ninja-build \ 22 | libboost-all-dev \ 23 | libgflags-dev \ 24 | sqlite3 \ 25 | vim && \ 26 | apt-get clean && \ 27 | rm -rf /var/lib/apt/lists/* 28 | 29 | # Setup the AWS Client (so we can copy S3 files to the container if needed) 30 | RUN case ${TARGETPLATFORM} in \ 31 | "linux/amd64") AWSCLI_FILE=https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip ;; \ 32 | "linux/arm64") AWSCLI_FILE=https://awscli.amazonaws.com/awscli-exe-linux-aarch64.zip ;; \ 33 | esac && \ 34 | curl "${AWSCLI_FILE}" -o "awscliv2.zip" && \ 35 | unzip awscliv2.zip && \ 36 | ./aws/install && \ 37 | rm -f awscliv2.zip 38 | 39 | # Create an application user 40 | RUN useradd app_user --create-home 41 | 42 | ARG APP_DIR=/opt/sqlflite 43 | 44 | RUN mkdir --parents ${APP_DIR} && \ 45 | chown app_user:app_user ${APP_DIR} && \ 46 | chown --recursive app_user:app_user /usr/local 47 | 48 | # Switch to a less privileged user... 49 | USER app_user 50 | 51 | WORKDIR ${APP_DIR} 52 | 53 | RUN python3 -m venv ${APP_DIR}/venv && \ 54 | echo ". ${APP_DIR}/venv/bin/activate" >> ~/.bashrc && \ 55 | . ~/.bashrc && \ 56 | pip install --upgrade pip setuptools wheel 57 | 58 | # Set the PATH so that the Python Virtual environment is referenced for subsequent RUN steps (hat tip: https://pythonspeed.com/articles/activate-virtualenv-dockerfile/) 59 | ENV PATH="${VIRTUAL_ENV}/bin:${PATH}" 60 | 61 | # Copy the scripts directory into the image (we copy directory-by-directory in order to maximize Docker caching) 62 | COPY --chown=app_user:app_user ./scripts ./scripts 63 | 64 | # Get the SQLite3 database file 65 | RUN mkdir data && \ 66 | wget https://github.com/lovasoa/TPCH-sqlite/releases/download/v1.0/TPC-H-small.db -O data/TPC-H-small.db 67 | 68 | # Install Python requirements 69 | COPY --chown=app_user:app_user ./requirements.txt ./ 70 | RUN pip install --requirement ./requirements.txt 71 | 72 | # Create DuckDB database file 73 | RUN python "scripts/create_duckdb_database_file.py" \ 74 | --file-name="TPC-H-small.duckdb" \ 75 | --file-path="data" \ 76 | --overwrite-file=true \ 77 | --scale-factor=0.01 78 | 79 | COPY --chown=app_user:app_user ./CMakeLists.txt ./ 80 | COPY --chown=app_user:app_user ./third_party ./third_party 81 | COPY --chown=app_user:app_user ./src ./src 82 | 83 | # Run the CMake build (then cleanup) 84 | RUN cmake -S . -B build -G Ninja \ 85 | -DCMAKE_BUILD_TYPE=Release \ 86 | -DCMAKE_INSTALL_PREFIX=/usr/local && \ 87 | cmake --build build --target install && \ 88 | rm -rf build src third_party CMakeLists.txt 89 | 90 | COPY --chown=app_user:app_user ./tls ./tls 91 | 92 | # Install DuckDB CLI for troubleshooting, etc. 93 | ARG DUCKDB_VERSION="1.1.1" 94 | 95 | RUN case ${TARGETPLATFORM} in \ 96 | "linux/amd64") DUCKDB_FILE=https://github.com/duckdb/duckdb/releases/download/v${DUCKDB_VERSION}/duckdb_cli-linux-amd64.zip ;; \ 97 | "linux/arm64") DUCKDB_FILE=https://github.com/duckdb/duckdb/releases/download/v${DUCKDB_VERSION}/duckdb_cli-linux-aarch64.zip ;; \ 98 | esac && \ 99 | curl --output /tmp/duckdb.zip --location ${DUCKDB_FILE} && \ 100 | unzip /tmp/duckdb.zip -d /usr/local/bin && \ 101 | rm /tmp/duckdb.zip 102 | 103 | EXPOSE 31337 104 | 105 | # Run a test to ensure that the server works... 106 | RUN scripts/test_sqlflite.sh 107 | 108 | ENTRYPOINT scripts/start_sqlflite.sh 109 | -------------------------------------------------------------------------------- /Dockerfile-slim.ci: -------------------------------------------------------------------------------- 1 | FROM ubuntu:24.04 2 | 3 | ARG TARGETPLATFORM 4 | ARG TARGETARCH 5 | ARG TARGETVARIANT 6 | RUN printf "I'm building for TARGETPLATFORM=${TARGETPLATFORM}" \ 7 | && printf ", TARGETARCH=${TARGETARCH}" \ 8 | && printf ", TARGETVARIANT=${TARGETVARIANT} \n" \ 9 | && printf "With uname -s : " && uname -s \ 10 | && printf "and uname -m : " && uname -m 11 | 12 | RUN apt-get update && \ 13 | apt-get dist-upgrade --yes && \ 14 | apt-get install -y \ 15 | curl \ 16 | zip && \ 17 | apt-get clean && \ 18 | rm -rf /var/lib/apt/lists/* 19 | 20 | # Create an application user 21 | RUN useradd app_user --create-home 22 | 23 | ARG APP_DIR=/opt/sqlflite 24 | 25 | RUN mkdir --parents ${APP_DIR} && \ 26 | chown app_user:app_user ${APP_DIR} && \ 27 | chown --recursive app_user:app_user /usr/local 28 | 29 | # Switch to a less privileged user... 30 | USER app_user 31 | 32 | WORKDIR ${APP_DIR} 33 | 34 | # Copy the scripts directory into the image (we copy directory-by-directory in order to maximize Docker caching) 35 | COPY --chown=app_user:app_user scripts scripts 36 | 37 | COPY --chown=app_user:app_user sqlflite_server /usr/local/bin/sqlflite_server 38 | RUN chmod +x /usr/local/bin/sqlflite_server 39 | 40 | COPY --chown=app_user:app_user sqlflite_client /usr/local/bin/sqlflite_client 41 | RUN chmod +x /usr/local/bin/sqlflite_client 42 | 43 | EXPOSE 31337 44 | 45 | ENTRYPOINT scripts/start_sqlflite_slim.sh 46 | -------------------------------------------------------------------------------- /Dockerfile.ci: -------------------------------------------------------------------------------- 1 | FROM python:3.12.6 2 | 3 | ARG TARGETPLATFORM 4 | ARG TARGETARCH 5 | ARG TARGETVARIANT 6 | RUN printf "I'm building for TARGETPLATFORM=${TARGETPLATFORM}" \ 7 | && printf ", TARGETARCH=${TARGETARCH}" \ 8 | && printf ", TARGETVARIANT=${TARGETVARIANT} \n" \ 9 | && printf "With uname -s : " && uname -s \ 10 | && printf "and uname -m : " && uname -m 11 | 12 | RUN apt-get update && \ 13 | apt-get dist-upgrade --yes && \ 14 | apt-get install -y \ 15 | build-essential \ 16 | wget \ 17 | gcc \ 18 | git \ 19 | sqlite3 \ 20 | vim \ 21 | zip && \ 22 | apt-get clean && \ 23 | rm -rf /var/lib/apt/lists/* 24 | 25 | # Setup the AWS Client (so we can copy S3 files to the container if needed) 26 | RUN case ${TARGETPLATFORM} in \ 27 | "linux/amd64") AWSCLI_FILE=https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip ;; \ 28 | "linux/arm64") AWSCLI_FILE=https://awscli.amazonaws.com/awscli-exe-linux-aarch64.zip ;; \ 29 | esac && \ 30 | curl "${AWSCLI_FILE}" -o "awscliv2.zip" && \ 31 | unzip awscliv2.zip && \ 32 | aws/install && \ 33 | rm -f awscliv2.zip 34 | 35 | # Create an application user 36 | RUN useradd app_user --create-home 37 | 38 | ARG APP_DIR=/opt/sqlflite 39 | 40 | RUN mkdir --parents ${APP_DIR} && \ 41 | chown app_user:app_user ${APP_DIR} && \ 42 | chown --recursive app_user:app_user /usr/local 43 | 44 | # Switch to a less privileged user... 45 | USER app_user 46 | 47 | WORKDIR ${APP_DIR} 48 | 49 | RUN python3 -m venv ${APP_DIR}/venv && \ 50 | echo ". ${APP_DIR}/venv/bin/activate" >> ~/.bashrc && \ 51 | . ~/.bashrc && \ 52 | pip install --upgrade pip setuptools wheel 53 | 54 | # Set the PATH so that the Python Virtual environment is referenced for subsequent RUN steps (hat tip: https://pythonspeed.com/articles/activate-virtualenv-dockerfile/) 55 | ENV PATH="${VIRTUAL_ENV}/bin:${PATH}" 56 | 57 | # Copy the scripts directory into the image (we copy directory-by-directory in order to maximize Docker caching) 58 | COPY --chown=app_user:app_user scripts scripts 59 | 60 | # Get the SQLite3 database file 61 | RUN mkdir data && \ 62 | wget https://github.com/lovasoa/TPCH-sqlite/releases/download/v1.0/TPC-H-small.db -O data/TPC-H-small.db 63 | 64 | # Install Python requirements 65 | COPY --chown=app_user:app_user requirements.txt . 66 | RUN pip install --requirement requirements.txt 67 | 68 | # Create DuckDB database file 69 | RUN python "scripts/create_duckdb_database_file.py" \ 70 | --file-name="TPC-H-small.duckdb" \ 71 | --file-path="data" \ 72 | --overwrite-file=true \ 73 | --scale-factor=0.01 74 | 75 | COPY --chown=app_user:app_user sqlflite_server /usr/local/bin/sqlflite_server 76 | RUN chmod +x /usr/local/bin/sqlflite_server 77 | 78 | COPY --chown=app_user:app_user sqlflite_client /usr/local/bin/sqlflite_client 79 | RUN chmod +x /usr/local/bin/sqlflite_client 80 | 81 | COPY --chown=app_user:app_user tls tls 82 | 83 | # Install DuckDB CLI for troubleshooting, etc. 84 | ARG DUCKDB_VERSION="1.1.1" 85 | 86 | RUN case ${TARGETPLATFORM} in \ 87 | "linux/amd64") DUCKDB_FILE=https://github.com/duckdb/duckdb/releases/download/v${DUCKDB_VERSION}/duckdb_cli-linux-amd64.zip ;; \ 88 | "linux/arm64") DUCKDB_FILE=https://github.com/duckdb/duckdb/releases/download/v${DUCKDB_VERSION}/duckdb_cli-linux-aarch64.zip ;; \ 89 | esac && \ 90 | curl --output /tmp/duckdb.zip --location ${DUCKDB_FILE} && \ 91 | unzip /tmp/duckdb.zip -d /usr/local/bin && \ 92 | rm /tmp/duckdb.zip 93 | 94 | EXPOSE 31337 95 | 96 | # Run a test to ensure that the server works... 97 | RUN scripts/test_sqlflite.sh 98 | 99 | ENTRYPOINT scripts/start_sqlflite.sh 100 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2023 Voltron Data 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. -------------------------------------------------------------------------------- /data/.gitignore: -------------------------------------------------------------------------------- 1 | * -------------------------------------------------------------------------------- /helm-chart/.helmignore: -------------------------------------------------------------------------------- 1 | # Patterns to ignore when building packages. 2 | # This supports shell glob matching, relative path matching, and 3 | # negation (prefixed with !). Only one pattern per line. 4 | .DS_Store 5 | # Common VCS dirs 6 | .git/ 7 | .gitignore 8 | .bzr/ 9 | .bzrignore 10 | .hg/ 11 | .hgignore 12 | .svn/ 13 | # Common backup files 14 | *.swp 15 | *.bak 16 | *.tmp 17 | *.orig 18 | *~ 19 | # Various IDEs 20 | .project 21 | .idea/ 22 | *.tmproj 23 | .vscode/ 24 | -------------------------------------------------------------------------------- /helm-chart/Chart.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v2 2 | name: sqlflite 3 | description: A Helm chart for Kubernetes 4 | 5 | # A chart can be either an 'application' or a 'library' chart. 6 | # 7 | # Application charts are a collection of templates that can be packaged into versioned archives 8 | # to be deployed. 9 | # 10 | # Library charts provide useful utilities or functions for the chart developer. They're included as 11 | # a dependency of application charts to inject those utilities and functions into the rendering 12 | # pipeline. Library charts do not define any templates and therefore cannot be deployed. 13 | type: application 14 | 15 | # This is the chart version. This version number should be incremented each time you make changes 16 | # to the chart and its templates, including the app version. 17 | # Versions are expected to follow Semantic Versioning (https://semver.org/) 18 | version: 0.1.0 19 | 20 | # This is the version number of the application being deployed. This version number should be 21 | # incremented each time you make changes to the application. Versions are not expected to 22 | # follow Semantic Versioning. They should reflect the version the application is using. 23 | # It is recommended to use it with quotes. 24 | appVersion: "1.5.0" 25 | -------------------------------------------------------------------------------- /helm-chart/install_helm_chart.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | kubectl config set-context --current --namespace=sqlflite 6 | 7 | helm upgrade demo \ 8 | --install . \ 9 | --namespace sqlflite \ 10 | --create-namespace \ 11 | --values values.yaml 12 | -------------------------------------------------------------------------------- /helm-chart/secrets/README.md: -------------------------------------------------------------------------------- 1 | # Needed files to support the helm chart 2 | Use the "opaque" folder to create opaque secrets. 3 | 4 | Use the "tls" folder to create tls secrets (cert0.pem, cert0.key, ca.crt) 5 | 6 | The files are git ignored for security reasons. 7 | -------------------------------------------------------------------------------- /helm-chart/secrets/opaque/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | -------------------------------------------------------------------------------- /helm-chart/secrets/opaque/README.md: -------------------------------------------------------------------------------- 1 | # Needed files to support the helm chart 2 | Create the following files in this folder with the EXACT names shown (no extensions): 3 | 1. SQLFLITE_PASSWORD 4 | 5 | You may set the contents of the files however you choose. 6 | 7 | The files are git ignored for security reasons. 8 | 9 | # AWS variables (optional) 10 | Authenticate to AWS S3, choose your project, then choose: "Command line or programmatic access" to get the values for the AWS_* variables. 11 | 12 | Create files with these EXACT names for the AWS variables - and put the values provided by AWS into the corresponding files: 13 | 1. AWS_ACCESS_KEY_ID 14 | 2. AWS_DEFAULT_REGION 15 | 3. AWS_SECRET_ACCESS_KEY 16 | 4. AWS_SESSION_TOKEN 17 | -------------------------------------------------------------------------------- /helm-chart/secrets/tls/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | -------------------------------------------------------------------------------- /helm-chart/secrets/tls/README.md: -------------------------------------------------------------------------------- 1 | # Needed files to support the helm chart 2 | Create the following files in this folder with the EXACT names shown: 3 | 1. cert0.pem 4 | 2. cert0.key 5 | 6 | Descriptions: 7 | cert0.pem - Full-chain TLS Public Certificate 8 | cert0.key - TLS Private Key 9 | 10 | The files are git ignored for security reasons. 11 | -------------------------------------------------------------------------------- /helm-chart/templates/NOTES.txt: -------------------------------------------------------------------------------- 1 | SQLFlite has been installed. 2 | 3 | 1. Get the JDBC URL and ADBC URI by running these commands: 4 | 5 | ```bash 6 | export SERVICE_NAME={{ include "sqlflite.fullname" . }} 7 | export NAMESPACE={{ .Release.Namespace }} 8 | 9 | # Wait for the external IP to be allocated 10 | unset EXTERNAL_IP 11 | while [ -z "$EXTERNAL_IP" ]; do 12 | EXTERNAL_IP=$(kubectl get svc $SERVICE_NAME -n $NAMESPACE -o jsonpath='{.status.loadBalancer.ingress[0].hostname}') 13 | [ -z "$EXTERNAL_IP" ] && echo "Waiting for external IP..." && sleep 10 14 | done 15 | 16 | PORT=$(kubectl get svc $SERVICE_NAME -n $NAMESPACE -o jsonpath='{.spec.ports[0].port}') 17 | echo -e "\nJDBC URL:\njdbc:arrow-flight-sql://${EXTERNAL_IP}:${PORT}?useEncryption=true&disableCertificateVerification=true" 18 | 19 | echo -e "\nADBC URI:\ngrpc+tls://${EXTERNAL_IP}:${PORT}" 20 | ``` 21 | -------------------------------------------------------------------------------- /helm-chart/templates/_helpers.tpl: -------------------------------------------------------------------------------- 1 | {{/* 2 | Expand the name of the chart. 3 | */}} 4 | {{- define "sqlflite.name" -}} 5 | {{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} 6 | {{- end }} 7 | 8 | {{/* 9 | Create a default fully qualified app name. 10 | We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). 11 | If release name contains chart name it will be used as a full name. 12 | */}} 13 | {{- define "sqlflite.fullname" -}} 14 | {{- if .Values.fullnameOverride }} 15 | {{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} 16 | {{- else }} 17 | {{- $name := default .Chart.Name .Values.nameOverride }} 18 | {{- if contains $name .Release.Name }} 19 | {{- .Release.Name | trunc 63 | trimSuffix "-" }} 20 | {{- else }} 21 | {{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} 22 | {{- end }} 23 | {{- end }} 24 | {{- end }} 25 | 26 | {{/* 27 | Create chart name and version as used by the chart label. 28 | */}} 29 | {{- define "sqlflite.chart" -}} 30 | {{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} 31 | {{- end }} 32 | 33 | {{/* 34 | Common labels 35 | */}} 36 | {{- define "sqlflite.labels" -}} 37 | helm.sh/chart: {{ include "sqlflite.chart" . }} 38 | {{ include "sqlflite.selectorLabels" . }} 39 | {{- if .Chart.AppVersion }} 40 | app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} 41 | {{- end }} 42 | app.kubernetes.io/managed-by: {{ .Release.Service }} 43 | {{- end }} 44 | 45 | {{/* 46 | Selector labels 47 | */}} 48 | {{- define "sqlflite.selectorLabels" -}} 49 | app.kubernetes.io/name: {{ include "sqlflite.name" . }} 50 | app.kubernetes.io/instance: {{ .Release.Name }} 51 | {{- end }} 52 | -------------------------------------------------------------------------------- /helm-chart/templates/global-secret.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Secret 3 | metadata: 4 | name: {{ include "sqlflite.fullname" . }}-secret 5 | labels: 6 | {{- include "sqlflite.labels" . | nindent 4 }} 7 | type: Opaque 8 | data: 9 | {{ (.Files.Glob "secrets/opaque/*").AsSecrets | indent 2 }} 10 | -------------------------------------------------------------------------------- /helm-chart/templates/server-service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: {{ include "sqlflite.fullname" . }} 5 | labels: 6 | {{- include "sqlflite.labels" . | nindent 4 }} 7 | spec: 8 | type: {{ .Values.sqlFliteServer.service.type }} 9 | ports: 10 | - port: {{ .Values.sqlFliteServer.service.port }} 11 | targetPort: sqlflite-port 12 | protocol: TCP 13 | name: sqlflite-port 14 | selector: 15 | {{- include "sqlflite.selectorLabels" . | nindent 4 }} 16 | app: sqlflite-server 17 | -------------------------------------------------------------------------------- /helm-chart/templates/server-statefulset.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: StatefulSet 3 | metadata: 4 | name: {{ include "sqlflite.fullname" . }}-server 5 | labels: 6 | {{- include "sqlflite.labels" . | nindent 4 }} 7 | app: sqlflite-server 8 | spec: 9 | replicas: 1 10 | selector: 11 | matchLabels: 12 | {{- include "sqlflite.selectorLabels" . | nindent 6 }} 13 | app: sqlflite-server 14 | serviceName: sqlflite 15 | template: 16 | metadata: 17 | annotations: 18 | checksum/config: {{ include (print $.Template.BasePath "/global-secret.yaml") . | sha256sum }} 19 | {{- with .Values.sqlFliteServer.podAnnotations }} 20 | {{- toYaml . | nindent 8 }} 21 | {{- end }} 22 | labels: 23 | {{- include "sqlflite.selectorLabels" . | nindent 8 }} 24 | app: sqlflite-server 25 | spec: 26 | {{- with .Values.sqlFliteServer.imagePullSecrets }} 27 | imagePullSecrets: 28 | {{- toYaml . | nindent 8 }} 29 | {{- end }} 30 | securityContext: 31 | {{- toYaml .Values.sqlFliteServer.podSecurityContext | nindent 8 }} 32 | volumes: 33 | # See: https://aws.amazon.com/blogs/containers/eks-persistent-volumes-for-instance-store/ for details on using local NVMe for supported nodes 34 | # Uncomment the following lines to mount the local NVMe disk as a data volume 35 | # - name: nvme-volume 36 | # hostPath: 37 | # # directory location on host 38 | # path: /nvme/disk 39 | # # this field is optional 40 | # type: Directory 41 | - name: tls-volume 42 | secret: 43 | secretName: {{ include "sqlflite.fullname" . }}-tls-secret 44 | optional: true 45 | # Uncomment the following lines to mount the local NVMe disk as a data volume 46 | # initContainers: 47 | # - name: volume-mount-chmod 48 | # image: busybox 49 | # command: [ "sh", "-c", "chmod 777 /data" ] 50 | # volumeMounts: 51 | # - name: nvme-volume 52 | # mountPath: /data 53 | containers: 54 | - name: sqlflite 55 | volumeMounts: 56 | # Uncomment the following lines to mount the local NVMe disk as a data volume 57 | # - name: nvme-volume 58 | # mountPath: /opt/sqlflite/data 59 | - name: tls-volume 60 | mountPath: /opt/sqlflite/tls 61 | readOnly: true 62 | env: 63 | - name: DATABASE_BACKEND 64 | value: duckdb 65 | - name: DATABASE_FILENAME 66 | value: {{ .Values.sqlFliteServer.config.DATABASE_FILENAME }} 67 | - name: SQLFLITE_PASSWORD 68 | valueFrom: 69 | secretKeyRef: 70 | name: {{ include "sqlflite.fullname" . }}-secret 71 | key: SQLFLITE_PASSWORD 72 | optional: false 73 | - name: AWS_ACCESS_KEY_ID 74 | valueFrom: 75 | secretKeyRef: 76 | name: {{ include "sqlflite.fullname" . }}-secret 77 | key: AWS_ACCESS_KEY_ID 78 | optional: true 79 | - name: AWS_SECRET_ACCESS_KEY 80 | valueFrom: 81 | secretKeyRef: 82 | name: {{ include "sqlflite.fullname" . }}-secret 83 | key: AWS_SECRET_ACCESS_KEY 84 | optional: true 85 | - name: AWS_SESSION_TOKEN 86 | valueFrom: 87 | secretKeyRef: 88 | name: {{ include "sqlflite.fullname" . }}-secret 89 | key: AWS_SESSION_TOKEN 90 | optional: true 91 | - name: AWS_DEFAULT_REGION 92 | valueFrom: 93 | secretKeyRef: 94 | name: {{ include "sqlflite.fullname" . }}-secret 95 | key: AWS_DEFAULT_REGION 96 | optional: true 97 | securityContext: 98 | {{- toYaml .Values.sqlFliteServer.securityContext | nindent 12 }} 99 | image: "{{ .Values.sqlFliteServer.image.repository }}:{{ .Values.sqlFliteServer.image.tag | default .Chart.AppVersion }}" 100 | imagePullPolicy: {{ .Values.sqlFliteServer.image.pullPolicy }} 101 | ports: 102 | - name: sqlflite-port 103 | containerPort: {{ .Values.sqlFliteServer.service.port }} 104 | protocol: TCP 105 | # livenessProbe: 106 | # httpGet: 107 | # path: / 108 | # port: http 109 | # readinessProbe: 110 | # httpGet: 111 | # path: / 112 | # port: http 113 | resources: 114 | {{- toYaml .Values.sqlFliteServer.resources | nindent 12 }} 115 | {{- with .Values.sqlFliteServer.nodeSelector }} 116 | nodeSelector: 117 | {{- toYaml . | nindent 8 }} 118 | {{- end }} 119 | {{- with .Values.sqlFliteServer.affinity }} 120 | affinity: 121 | {{- toYaml . | nindent 8 }} 122 | {{- end }} 123 | {{- with .Values.sqlFliteServer.tolerations }} 124 | tolerations: 125 | {{- toYaml . | nindent 8 }} 126 | {{- end }} 127 | -------------------------------------------------------------------------------- /helm-chart/templates/tls-secret.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Secret 3 | metadata: 4 | name: {{ include "sqlflite.fullname" . }}-tls-secret 5 | labels: 6 | {{- include "sqlflite.labels" . | nindent 4 }} 7 | type: Opaque 8 | data: 9 | {{ (.Files.Glob "secrets/tls/*").AsSecrets | indent 2 }} 10 | -------------------------------------------------------------------------------- /helm-chart/values.yaml: -------------------------------------------------------------------------------- 1 | sqlFliteServer: 2 | hostName: sqlflite.example.com 3 | replicaCount: 1 4 | 5 | config: 6 | DATABASE_BACKEND: duckdb 7 | DATABASE_FILENAME: data/TPC-H-small.duckdb 8 | 9 | image: 10 | repository: voltrondata/sqlflite 11 | pullPolicy: Always 12 | # Overrides the image tag whose default is the chart appVersion. 13 | tag: latest 14 | 15 | imagePullSecrets: [] 16 | nameOverride: "" 17 | fullnameOverride: "" 18 | 19 | podAnnotations: {} 20 | 21 | podSecurityContext: 22 | fsGroup: 1000 23 | 24 | securityContext: {} 25 | 26 | service: 27 | type: LoadBalancer 28 | port: 31337 29 | 30 | resources: 31 | limits: 32 | cpu: 500m 33 | memory: 6Gi 34 | requests: 35 | cpu: 500m 36 | memory: 6Gi 37 | 38 | nodeSelector: 39 | instanceType: r7gd.medium # Change this to match your node's label(s) 40 | 41 | # tolerations: 42 | # - key: sidewinder 43 | # operator: Equal 44 | # value: "true" 45 | # effect: NoSchedule 46 | 47 | affinity: {} 48 | -------------------------------------------------------------------------------- /macos/entitlements.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | com.apple.security.cs.disable-library-validation 7 | 8 | 9 | -------------------------------------------------------------------------------- /queries/h01.sql: -------------------------------------------------------------------------------- 1 | SELECT l_returnflag, 2 | l_linestatus, 3 | sum(l_quantity) AS sum_qty, 4 | sum(l_extendedprice) AS sum_base_price, 5 | sum(l_extendedprice*(1-l_discount)) AS sum_disc_price, 6 | sum(l_extendedprice*(1-l_discount)*(1+l_tax)) AS sum_charge, 7 | avg(l_quantity) AS avg_qty, 8 | avg(l_extendedprice) AS avg_price, 9 | avg(l_discount) AS avg_disc, 10 | count(*) AS count_order 11 | FROM lineitem 12 | WHERE l_shipdate <= '1998-09-01' -- date '1998-12-01' - interval '[DELTA=90]' DAY 13 | GROUP BY l_returnflag, 14 | l_linestatus 15 | ORDER BY l_returnflag, 16 | l_linestatus; 17 | -------------------------------------------------------------------------------- /queries/h02.sql: -------------------------------------------------------------------------------- 1 | SELECT s_acctbal, 2 | s_name, 3 | n_name, 4 | p_partkey, 5 | p_mfgr, 6 | s_address, 7 | s_phone, 8 | s_comment 9 | FROM part, 10 | supplier, 11 | partsupp, 12 | nation, 13 | region 14 | WHERE p_partkey = ps_partkey 15 | AND s_suppkey = ps_suppkey 16 | AND p_size = 25 -- [SIZE] 17 | AND p_type like '%BRASS' -- '%[TYPE]' 18 | AND s_nationkey = n_nationkey 19 | AND n_regionkey = r_regionkey 20 | AND r_name = 'EUROPE' -- '[REGION]' 21 | AND ps_supplycost = 22 | (SELECT min(ps_supplycost) 23 | FROM partsupp, 24 | supplier, 25 | nation, 26 | region 27 | WHERE p_partkey = ps_partkey 28 | AND s_suppkey = ps_suppkey 29 | AND s_nationkey = n_nationkey 30 | AND n_regionkey = r_regionkey 31 | AND r_name = 'EUROPE') -- '[REGION]' ) 32 | ORDER BY s_acctbal DESC, 33 | n_name, 34 | s_name, 35 | p_partkey 36 | LIMIT 100 37 | ; 38 | -------------------------------------------------------------------------------- /queries/h03.sql: -------------------------------------------------------------------------------- 1 | SELECT l_orderkey, 2 | sum(l_extendedprice * (1 - l_discount)) AS revenue, 3 | o_orderdate, 4 | o_shippriority 5 | FROM customer, 6 | orders, 7 | lineitem 8 | WHERE c_mktsegment = 'BUILDING' 9 | AND c_custkey = o_custkey 10 | AND l_orderkey = o_orderkey 11 | AND o_orderdate < '1995-03-15' 12 | AND l_shipdate > '1995-03-15' 13 | GROUP BY l_orderkey, 14 | o_orderdate, 15 | o_shippriority 16 | ORDER BY revenue DESC, 17 | o_orderdate 18 | LIMIT 10 19 | ; 20 | -------------------------------------------------------------------------------- /queries/h04.sql: -------------------------------------------------------------------------------- 1 | SELECT o_orderpriority, 2 | count(*) AS order_count 3 | FROM orders 4 | WHERE o_orderdate >= '1993-07-01' 5 | AND o_orderdate < '1993-10-01' 6 | AND EXISTS 7 | (SELECT * 8 | FROM lineitem 9 | WHERE l_orderkey = o_orderkey 10 | AND l_commitdate < l_receiptdate ) 11 | GROUP BY o_orderpriority 12 | ORDER BY o_orderpriority 13 | ; 14 | -------------------------------------------------------------------------------- /queries/h05.sql: -------------------------------------------------------------------------------- 1 | SELECT n_name, 2 | sum(l_extendedprice * (1 - l_discount)) AS revenue 3 | FROM customer, 4 | orders, 5 | lineitem, 6 | supplier, 7 | nation, 8 | region 9 | WHERE c_custkey = o_custkey 10 | AND l_orderkey = o_orderkey 11 | AND l_suppkey = s_suppkey 12 | AND c_nationkey = s_nationkey 13 | AND s_nationkey = n_nationkey 14 | AND n_regionkey = r_regionkey 15 | AND r_name = 'ASIA' 16 | AND o_orderdate >= '1994-01-01' 17 | AND o_orderdate < '1995-01-01' 18 | GROUP BY n_name 19 | ORDER BY revenue DESC 20 | ; 21 | -------------------------------------------------------------------------------- /queries/h06.sql: -------------------------------------------------------------------------------- 1 | SELECT sum(l_extendedprice * l_discount) AS revenue 2 | FROM lineitem 3 | WHERE l_shipdate >= '1994-01-01' 4 | AND l_shipdate < '1995-01-01' 5 | AND l_discount BETWEEN 0.05 AND 0.07 6 | AND l_quantity < 24 7 | ; 8 | -------------------------------------------------------------------------------- /queries/h07.sql: -------------------------------------------------------------------------------- 1 | SELECT supp_nation, 2 | cust_nation, 3 | l_year, 4 | sum(volume) AS revenue 5 | FROM 6 | (SELECT n1.n_name AS supp_nation, 7 | n2.n_name AS cust_nation, 8 | strftime('%Y', l_shipdate) AS l_year, 9 | l_extendedprice * (1 - l_discount) AS volume 10 | FROM supplier, 11 | lineitem, 12 | orders, 13 | customer, 14 | nation n1, 15 | nation n2 16 | WHERE s_suppkey = l_suppkey 17 | AND o_orderkey = l_orderkey 18 | AND c_custkey = o_custkey 19 | AND s_nationkey = n1.n_nationkey 20 | AND c_nationkey = n2.n_nationkey 21 | AND ((n1.n_name = 'FRANCE' 22 | AND n2.n_name = 'GERMANY') 23 | OR (n1.n_name = 'GERMANY' 24 | AND n2.n_name = 'FRANCE')) 25 | AND l_shipdate BETWEEN '1995-01-01' AND '1996-12-31' ) AS shipping 26 | GROUP BY supp_nation, 27 | cust_nation, 28 | l_year 29 | ORDER BY supp_nation, 30 | cust_nation, 31 | l_year 32 | ; 33 | -------------------------------------------------------------------------------- /queries/h08.sql: -------------------------------------------------------------------------------- 1 | SELECT o_year, 2 | sum(CASE 3 | WHEN nation = 'BRAZIL' THEN volume 4 | ELSE 0 5 | END) / sum(volume) AS mkt_share 6 | FROM 7 | (SELECT strftime('%Y', o_orderdate) AS o_year, 8 | l_extendedprice * (1 - l_discount) AS volume, 9 | n2.n_name AS nation 10 | FROM part, 11 | supplier, 12 | lineitem, 13 | orders, 14 | customer, 15 | nation n1, 16 | nation n2, 17 | region 18 | WHERE p_partkey = l_partkey 19 | AND s_suppkey = l_suppkey 20 | AND l_orderkey = o_orderkey 21 | AND o_custkey = c_custkey 22 | AND c_nationkey = n1.n_nationkey 23 | AND n1.n_regionkey = r_regionkey 24 | AND r_name = 'AMERICA' 25 | AND s_nationkey = n2.n_nationkey 26 | AND o_orderdate BETWEEN '1995-01-01' AND '1996-12-31' 27 | AND p_type = 'ECONOMY ANODIZED STEEL' ) AS all_nations 28 | GROUP BY o_year 29 | ORDER BY o_year 30 | ; 31 | -------------------------------------------------------------------------------- /queries/h09.sql: -------------------------------------------------------------------------------- 1 | SELECT nation, 2 | o_year, 3 | sum(amount) AS sum_profit 4 | FROM 5 | (SELECT n_name AS nation, 6 | strftime('%Y', o_orderdate) AS o_year, 7 | l_extendedprice * (1 - l_discount) - ps_supplycost * l_quantity AS amount 8 | FROM part, 9 | supplier, 10 | lineitem, 11 | partsupp, 12 | orders, 13 | nation 14 | WHERE s_suppkey = l_suppkey 15 | AND ps_suppkey = l_suppkey 16 | AND ps_partkey = l_partkey 17 | AND p_partkey = l_partkey 18 | AND o_orderkey = l_orderkey 19 | AND s_nationkey = n_nationkey 20 | AND p_name like '%green%' ) AS profit 21 | GROUP BY nation, 22 | o_year 23 | ORDER BY nation, 24 | o_year DESC ; 25 | -------------------------------------------------------------------------------- /queries/h10.sql: -------------------------------------------------------------------------------- 1 | SELECT c_custkey, 2 | c_name, 3 | sum(l_extendedprice * (1 - l_discount)) AS revenue, 4 | c_acctbal, 5 | n_name, 6 | c_address, 7 | c_phone, 8 | c_comment 9 | FROM customer, 10 | orders, 11 | lineitem, 12 | nation 13 | WHERE c_custkey = o_custkey 14 | AND l_orderkey = o_orderkey 15 | AND o_orderdate >= '1993-10-01' 16 | AND o_orderdate < '1994-01-01' 17 | AND l_returnflag = 'R' 18 | AND c_nationkey = n_nationkey 19 | GROUP BY c_custkey, 20 | c_name, 21 | c_acctbal, 22 | c_phone, 23 | n_name, 24 | c_address, 25 | c_comment 26 | ORDER BY revenue DESC 27 | LIMIT 20 ; 28 | -------------------------------------------------------------------------------- /queries/h11.sql: -------------------------------------------------------------------------------- 1 | SELECT ps_partkey, 2 | sum(ps_supplycost * ps_availqty) AS value 3 | FROM partsupp, 4 | supplier, 5 | nation 6 | WHERE ps_suppkey = s_suppkey 7 | AND s_nationkey = n_nationkey 8 | AND n_name = 'GERMANY' 9 | GROUP BY ps_partkey 10 | HAVING sum(ps_supplycost * ps_availqty) > 11 | (SELECT sum(ps_supplycost * ps_availqty) * .0001 -- FRACTION = .0001/SF 12 | FROM partsupp, 13 | supplier, 14 | nation 15 | WHERE ps_suppkey = s_suppkey 16 | AND s_nationkey = n_nationkey 17 | AND n_name = 'GERMANY' ) 18 | ORDER BY value DESC 19 | ; 20 | -------------------------------------------------------------------------------- /queries/h12.sql: -------------------------------------------------------------------------------- 1 | SELECT l_shipmode, 2 | sum(CASE 3 | WHEN o_orderpriority ='1-URGENT' 4 | OR o_orderpriority ='2-HIGH' THEN 1 5 | ELSE 0 6 | END) AS high_line_count, 7 | sum(CASE 8 | WHEN o_orderpriority <> '1-URGENT' 9 | AND o_orderpriority <> '2-HIGH' THEN 1 10 | ELSE 0 11 | END) AS low_line_count 12 | FROM orders, 13 | lineitem 14 | WHERE o_orderkey = l_orderkey 15 | AND l_shipmode in ('MAIL', 16 | 'SHIP') 17 | AND l_commitdate < l_receiptdate 18 | AND l_shipdate < l_commitdate 19 | AND l_receiptdate >= '1994-01-01' 20 | AND l_receiptdate < '1995-01-01' 21 | GROUP BY l_shipmode 22 | ORDER BY l_shipmode 23 | ; 24 | -------------------------------------------------------------------------------- /queries/h13.sql: -------------------------------------------------------------------------------- 1 | SELECT c_count, 2 | count(*) AS custdist 3 | FROM 4 | (SELECT c_custkey, 5 | count(o_orderkey) AS c_count 6 | FROM customer 7 | LEFT OUTER JOIN orders ON c_custkey = o_custkey 8 | AND o_comment NOT LIKE '%special%requests%' 9 | GROUP BY c_custkey) 10 | GROUP BY c_count 11 | ORDER BY custdist DESC, 12 | c_count DESC 13 | ; 14 | -------------------------------------------------------------------------------- /queries/h14.sql: -------------------------------------------------------------------------------- 1 | SELECT 100.00 * sum(CASE 2 | WHEN p_type like 'PROMO%' THEN l_extendedprice*(1-l_discount) 3 | ELSE 0 4 | END) / sum(l_extendedprice * (1 - l_discount)) AS promo_revenue 5 | FROM lineitem, 6 | part 7 | WHERE l_partkey = p_partkey 8 | AND l_shipdate >= '1995-09-01' 9 | AND l_shipdate < '1995-10-01' 10 | ; 11 | -------------------------------------------------------------------------------- /queries/h15.sql: -------------------------------------------------------------------------------- 1 | SELECT s_suppkey, 2 | s_name, 3 | s_address, 4 | s_phone, 5 | total_revenue 6 | FROM supplier, 7 | 8 | (SELECT l_suppkey AS supplier_no, 9 | sum(l_extendedprice * (1 - l_discount)) AS total_revenue 10 | FROM lineitem 11 | WHERE l_shipdate >= '1996-01-01' 12 | AND l_shipdate < '1996-04-01' 13 | GROUP BY supplier_no) revenue0 14 | WHERE s_suppkey = supplier_no 15 | AND total_revenue = 16 | (SELECT max(total_revenue) 17 | FROM 18 | (SELECT l_suppkey AS supplier_no, 19 | sum(l_extendedprice * (1 - l_discount)) AS total_revenue 20 | FROM lineitem 21 | WHERE l_shipdate >= '1996-01-01' 22 | AND l_shipdate < '1996-04-01' 23 | GROUP BY supplier_no) revenue1) 24 | ORDER BY s_suppkey 25 | ; 26 | -------------------------------------------------------------------------------- /queries/h16.sql: -------------------------------------------------------------------------------- 1 | SELECT p_brand, 2 | p_type, 3 | p_size, 4 | count(DISTINCT ps_suppkey) AS supplier_cnt 5 | FROM partsupp, 6 | part 7 | WHERE p_partkey = ps_partkey 8 | AND p_brand <> 'Brand#45' 9 | AND p_type not like 'MEDIUM POLISHED%' 10 | AND p_size in (49, 14, 23, 45, 19, 3, 36, 9) 11 | AND ps_suppkey not in 12 | (SELECT s_suppkey 13 | FROM supplier 14 | WHERE s_comment like '%Customer%Complaints%' ) 15 | GROUP BY p_brand, 16 | p_type, 17 | p_size 18 | ORDER BY supplier_cnt DESC, 19 | p_brand, 20 | p_type, 21 | p_size 22 | ; 23 | -------------------------------------------------------------------------------- /queries/h17.sql: -------------------------------------------------------------------------------- 1 | SELECT sum(l_extendedprice) / 7.0 AS avg_yearly 2 | FROM lineitem, 3 | part 4 | WHERE p_partkey = l_partkey 5 | AND p_brand = 'Brand#23' 6 | AND p_container = 'MED BOX' 7 | AND l_quantity < 8 | (SELECT 0.2 * avg(l_quantity) 9 | FROM lineitem 10 | WHERE l_partkey = p_partkey ) 11 | ; 12 | -------------------------------------------------------------------------------- /queries/h18.sql: -------------------------------------------------------------------------------- 1 | SELECT c_name, 2 | c_custkey, 3 | o_orderkey, 4 | o_orderdate, 5 | o_totalprice, 6 | sum(l_quantity) AS sum_qty 7 | FROM customer, 8 | orders, 9 | lineitem 10 | WHERE o_orderkey in 11 | (SELECT l_orderkey 12 | FROM lineitem 13 | GROUP BY l_orderkey 14 | HAVING sum(l_quantity) > 300) 15 | AND c_custkey = o_custkey 16 | AND o_orderkey = l_orderkey 17 | GROUP BY c_name, 18 | c_custkey, 19 | o_orderkey, 20 | o_orderdate, 21 | o_totalprice 22 | ORDER BY o_totalprice DESC, 23 | o_orderdate 24 | LIMIT 100 25 | ; 26 | -------------------------------------------------------------------------------- /queries/h19.sql: -------------------------------------------------------------------------------- 1 | SELECT sum(l_extendedprice * (1 - l_discount)) AS revenue 2 | FROM lineitem, 3 | part 4 | WHERE (p_partkey = l_partkey 5 | AND p_brand = 'Brand#12' 6 | AND p_container in ('SM CASE', 7 | 'SM BOX', 8 | 'SM PACK', 9 | 'SM PKG') 10 | AND l_quantity >= 1 11 | AND l_quantity <= 11 12 | AND p_size BETWEEN 1 AND 5 13 | AND l_shipmode in ('AIR', 14 | 'AIR REG') 15 | AND l_shipinstruct = 'DELIVER IN PERSON') 16 | OR (p_partkey = l_partkey 17 | AND p_brand = 'Brand#23' 18 | AND p_container in ('MED BAG', 19 | 'MED BOX', 20 | 'MED PKG', 21 | 'MED PACK') 22 | AND l_quantity >= 10 23 | AND l_quantity <= 20 24 | AND p_size BETWEEN 1 AND 10 25 | AND l_shipmode in ('AIR', 26 | 'AIR REG') 27 | AND l_shipinstruct = 'DELIVER IN PERSON') 28 | OR (p_partkey = l_partkey 29 | AND p_brand = 'Brand#34' 30 | AND p_container in ('LG CASE', 31 | 'LG BOX', 32 | 'LG PACK', 33 | 'LG PKG') 34 | AND l_quantity >= 20 35 | AND l_quantity <= 30 36 | AND p_size BETWEEN 1 AND 15 37 | AND l_shipmode in ('AIR', 38 | 'AIR REG') 39 | AND l_shipinstruct = 'DELIVER IN PERSON') 40 | ; 41 | -------------------------------------------------------------------------------- /queries/h20.sql: -------------------------------------------------------------------------------- 1 | SELECT s_name, 2 | s_address 3 | FROM supplier, 4 | nation 5 | WHERE s_suppkey in 6 | (SELECT ps_suppkey 7 | FROM partsupp 8 | WHERE ps_partkey in 9 | (SELECT p_partkey 10 | FROM part 11 | WHERE p_name like 'forest%' ) 12 | AND ps_availqty > 13 | (SELECT 0.5 * sum(l_quantity) 14 | FROM lineitem 15 | WHERE l_partkey = ps_partkey 16 | AND l_suppkey = ps_suppkey 17 | AND l_shipdate >= '1994-01-01' 18 | AND l_shipdate < '1995-01-01' ) ) 19 | AND s_nationkey = n_nationkey 20 | AND n_name = 'CANADA' 21 | ORDER BY s_name 22 | ; 23 | -------------------------------------------------------------------------------- /queries/h21.sql: -------------------------------------------------------------------------------- 1 | SELECT s_name, 2 | count(*) AS numwait 3 | FROM supplier, 4 | lineitem l1, 5 | orders, 6 | nation 7 | WHERE s_suppkey = l1.l_suppkey 8 | AND o_orderkey = l1.l_orderkey 9 | AND o_orderstatus = 'F' 10 | AND l1.l_receiptdate > l1.l_commitdate 11 | AND EXISTS 12 | (SELECT * 13 | FROM lineitem l2 14 | WHERE l2.l_orderkey = l1.l_orderkey 15 | AND l2.l_suppkey <> l1.l_suppkey ) 16 | AND NOT EXISTS 17 | (SELECT * 18 | FROM lineitem l3 19 | WHERE l3.l_orderkey = l1.l_orderkey 20 | AND l3.l_suppkey <> l1.l_suppkey 21 | AND l3.l_receiptdate > l3.l_commitdate ) 22 | AND s_nationkey = n_nationkey 23 | AND n_name = 'SAUDI ARABIA' 24 | GROUP BY s_name 25 | ORDER BY numwait DESC, 26 | s_name 27 | LIMIT 100 28 | ; 29 | -------------------------------------------------------------------------------- /queries/h22.sql: -------------------------------------------------------------------------------- 1 | SELECT cntrycode, 2 | COUNT(*) AS numcust, 3 | SUM(c_acctbal) AS totacctbal 4 | FROM 5 | (SELECT SUBSTR(c_phone, 1, 2) AS cntrycode, 6 | c_acctbal 7 | FROM customer 8 | WHERE SUBSTR(c_phone, 1, 2) 9 | IN ('13', '31', '23', '29', '30', '18', '17') 10 | AND c_acctbal > 11 | (SELECT AVG(c_acctbal) 12 | FROM customer 13 | WHERE c_acctbal > 0.00 14 | AND SUBSTR(c_phone, 1, 2) 15 | IN ('13', '31', '23', '29', '30', '18', '17') ) 16 | AND NOT EXISTS 17 | (SELECT * 18 | FROM orders 19 | WHERE o_custkey = c_custkey ) ) AS custsale 20 | GROUP BY cntrycode 21 | ORDER BY cntrycode 22 | ; 23 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pandas==2.2.* 2 | duckdb==1.1.1 3 | click==8.1.* 4 | pyarrow==17.0.0 5 | adbc-driver-flightsql==1.2.* 6 | adbc-driver-manager==1.2.* 7 | -------------------------------------------------------------------------------- /scripts/create_duckdb_database_file.py: -------------------------------------------------------------------------------- 1 | import duckdb 2 | import os 3 | from pathlib import Path 4 | import click 5 | 6 | 7 | DIR_PATH = os.path.dirname(os.path.realpath(__file__)) 8 | 9 | 10 | @click.command() 11 | @click.option( 12 | "--file-name", 13 | type=str, 14 | required=True, 15 | help="The name of the DuckDB database file to create." 16 | ) 17 | @click.option( 18 | "--file-path", 19 | type=str, 20 | default=f"{DIR_PATH}/../data", 21 | show_default=True, 22 | help="The target directory path for the DuckDB database file" 23 | ) 24 | @click.option( 25 | "--overwrite-file", 26 | type=bool, 27 | default=False, 28 | required=True, 29 | help="Overwrite the DuckDB database file if it exists..." 30 | ) 31 | @click.option( 32 | "--scale-factor", 33 | type=float, 34 | default=0.01, 35 | show_default=True, 36 | required=True, 37 | help="The TPC-H Scale factor used to create the DuckDB database file." 38 | ) 39 | def main(file_name: str, 40 | file_path: str, 41 | overwrite_file: bool, 42 | scale_factor: float 43 | ): 44 | data_dir_path = Path(file_path) 45 | duckdb_db_file = data_dir_path / file_name 46 | 47 | if os.path.exists(path=duckdb_db_file): 48 | if overwrite_file: 49 | os.remove(path=duckdb_db_file) 50 | else: 51 | raise(Exception(f"DuckDB database file: '{duckdb_db_file.as_posix()}' already exists. Aborting")) 52 | 53 | # establish all connections to database 54 | con = duckdb.connect(database=duckdb_db_file.as_posix(), read_only=False) 55 | 56 | con.execute(f"CALL dbgen(sf={scale_factor})") 57 | con.execute(f"VACUUM ANALYZE") 58 | 59 | # close the connection 60 | con.close() 61 | 62 | print(f"Successfully created DuckDB database file: '{duckdb_db_file.as_posix()}' - with TPC-H Scale Factor: {scale_factor}") 63 | 64 | 65 | if __name__ == "__main__": 66 | main() 67 | -------------------------------------------------------------------------------- /scripts/start_sqlflite.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | SCRIPT_DIR=$(dirname ${0}) 6 | TLS_DIR=${SCRIPT_DIR}/../tls 7 | 8 | L_DATABASE_BACKEND=${1:-${DATABASE_BACKEND:-"duckdb"}} 9 | L_DATABASE_FILENAME=${2:-${DATABASE_FILENAME:-"data/TPC-H-small.duckdb"}} 10 | L_TLS_ENABLED=${3:-${TLS_ENABLED:-"1"}} 11 | L_PRINT_QUERIES=${4:-${PRINT_QUERIES:-"1"}} 12 | 13 | TLS_ARG="" 14 | if [ "${L_TLS_ENABLED}" == "1" ] 15 | then 16 | pushd ${TLS_DIR} 17 | if [ ! -f ./cert0.pem ] 18 | then 19 | echo -n "Generating TLS certs...\n" 20 | ./gen-certs.sh 21 | fi 22 | TLS_ARG="--tls tls/cert0.pem tls/cert0.key" 23 | popd 24 | fi 25 | 26 | # Setup the print_queries option 27 | PRINT_QUERIES_FLAG="" 28 | if [ "${L_PRINT_QUERIES}" == "1" ] 29 | then 30 | PRINT_QUERIES_FLAG="--print-queries" 31 | fi 32 | 33 | sqlflite_server --backend="${L_DATABASE_BACKEND}" --database-filename="${L_DATABASE_FILENAME}" ${TLS_ARG} ${PRINT_QUERIES_FLAG} 34 | -------------------------------------------------------------------------------- /scripts/start_sqlflite_slim.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | L_DATABASE_FILENAME=${1:-${DATABASE_FILENAME?"You must specify a database filename."}} 6 | L_DATABASE_BACKEND=${2:-${DATABASE_BACKEND:-"duckdb"}} 7 | L_PRINT_QUERIES=${3:-${PRINT_QUERIES:-"1"}} 8 | L_TLS_ENABLED=${4:-${TLS_ENABLED:-"0"}} 9 | L_TLS_CERT=${5:-${TLS_CERT}} 10 | L_TLS_KEY=${6:-${TLS_KEY}} 11 | 12 | TLS_ARG="" 13 | if [ "${L_TLS_ENABLED}" == "1" ] 14 | then 15 | # Make sure L_TLS_CERT and L_TLS_KEY were provided 16 | if [ -z "${L_TLS_CERT}" ] || [ -z "${L_TLS_KEY}" ] 17 | then 18 | echo "TLS_CERT and TLS_KEY must be passed when TLS is enabled." 19 | exit 1 20 | fi 21 | 22 | TLS_ARG="--tls ${L_TLS_CERT} ${L_TLS_KEY}" 23 | fi 24 | 25 | # Setup the print_queries option 26 | PRINT_QUERIES_FLAG="" 27 | if [ "${L_PRINT_QUERIES}" == "1" ] 28 | then 29 | PRINT_QUERIES_FLAG="--print-queries" 30 | fi 31 | 32 | sqlflite_server --backend="${L_DATABASE_BACKEND}" --database-filename="${L_DATABASE_FILENAME}" ${TLS_ARG} ${PRINT_QUERIES_FLAG} 33 | -------------------------------------------------------------------------------- /scripts/test_sqlflite.py: -------------------------------------------------------------------------------- 1 | import os 2 | from time import sleep 3 | import pyarrow 4 | from adbc_driver_flightsql import dbapi as sqlflite, DatabaseOptions 5 | 6 | 7 | # Setup variables 8 | max_attempts: int = 10 9 | sleep_interval: int = 10 10 | sqlflite_password = os.environ["SQLFLITE_PASSWORD"] 11 | 12 | def main(): 13 | for attempt in range(max_attempts): 14 | try: 15 | with sqlflite.connect(uri="grpc+tls://localhost:31337", 16 | db_kwargs={"username": "sqlflite_username", 17 | "password": sqlflite_password, 18 | # Not needed if you use a trusted CA-signed TLS cert 19 | DatabaseOptions.TLS_SKIP_VERIFY.value: "true" 20 | } 21 | ) as conn: 22 | with conn.cursor() as cur: 23 | cur.execute("SELECT n_nationkey, n_name FROM nation WHERE n_nationkey = ?", 24 | parameters=[24] 25 | ) 26 | x = cur.fetch_arrow_table() 27 | print(x) 28 | except Exception as e: 29 | if attempt == max_attempts - 1: 30 | raise e 31 | else: 32 | print(f"Attempt {attempt + 1} failed: {e}, sleeping for {sleep_interval} seconds") 33 | sleep(sleep_interval) 34 | else: 35 | print("Success!") 36 | break 37 | 38 | 39 | if __name__ == "__main__": 40 | main() 41 | -------------------------------------------------------------------------------- /scripts/test_sqlflite.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | SCRIPT_DIR=$(dirname ${0}) 4 | TLS_DIR=${SCRIPT_DIR}/../tls 5 | 6 | # Set a dummy password for the test... 7 | export SQLFLITE_PASSWORD="testing123" 8 | 9 | # Start the Flight SQL Server - in the background... 10 | ${SCRIPT_DIR}/start_sqlflite.sh & 11 | 12 | # Set a timeout limit for waiting 13 | timeout_limit=300 14 | elapsed_time=0 15 | interval=1 # seconds 16 | started="0" 17 | 18 | # Check if the process is running 19 | while [ $elapsed_time -lt $timeout_limit ]; do 20 | # Check if the process is running 21 | if pgrep "sqlflite" > /dev/null; then 22 | echo "Flight SQL Server process started successfully!" 23 | started="1" 24 | # Sleep for a few more seconds... 25 | sleep 10 26 | break 27 | fi 28 | 29 | # Wait for a short interval before checking again 30 | sleep $interval 31 | elapsed_time=$((elapsed_time + interval)) 32 | done 33 | 34 | # If the process didn't start within the timeout, exit 35 | if [ "${started}" != "1" ]; then 36 | echo "The Flight SQL Server process did not start within the timeout period. Exiting." 37 | exit 1 38 | fi 39 | 40 | python "${SCRIPT_DIR}/test_sqlflite.py" 41 | 42 | RC=$? 43 | 44 | # Stop the server... 45 | kill %1 46 | 47 | # Remove temporary TLS cert files 48 | pushd ${TLS_DIR} 49 | rm -f ./*.csr \ 50 | ./*.key \ 51 | ./*.pkcs1 \ 52 | ./*.pem \ 53 | ./*.srl 54 | 55 | popd 56 | 57 | # Exit with the code of the python test... 58 | exit ${RC} 59 | -------------------------------------------------------------------------------- /src/duckdb/duckdb_server.h: -------------------------------------------------------------------------------- 1 | // Licensed to the Apache Software Foundation (ASF) under one 2 | // or more contributor license agreements. See the NOTICE file 3 | // distributed with this work for additional information 4 | // regarding copyright ownership. The ASF licenses this file 5 | // to you under the Apache License, Version 2.0 (the 6 | // "License"); you may not use this file except in compliance 7 | // with the License. You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, 12 | // software distributed under the License is distributed on an 13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | // KIND, either express or implied. See the License for the 15 | // specific language governing permissions and limitations 16 | // under the License. 17 | 18 | #pragma once 19 | 20 | #include 21 | 22 | #include 23 | #include 24 | 25 | #include 26 | #include 27 | #include "flight_sql_fwd.h" 28 | 29 | namespace sqlflite::ddb { 30 | 31 | /// \brief Convert a column type to a ArrowType. 32 | /// \param duckdb_type the duckdb type. 33 | /// \return The equivalent ArrowType. 34 | std::shared_ptr GetArrowType(const char *duckdb_type); 35 | 36 | /// \brief Example implementation of FlightSqlServerBase backed by an in-memory DuckDB 37 | /// database. 38 | class DuckDBFlightSqlServer : public flight::sql::FlightSqlServerBase { 39 | public: 40 | ~DuckDBFlightSqlServer() override; 41 | 42 | static arrow::Result> Create( 43 | const std::string &path, const duckdb::DBConfig &config, const bool &print_queries); 44 | 45 | /// \brief Auxiliary method used to execute an arbitrary SQL statement on the underlying 46 | /// SQLite database. 47 | arrow::Status ExecuteSql(const std::string &sql); 48 | 49 | arrow::Result> GetFlightInfoStatement( 50 | const flight::ServerCallContext &context, 51 | const flight::sql::StatementQuery &command, 52 | const flight::FlightDescriptor &descriptor) override; 53 | 54 | arrow::Result> DoGetStatement( 55 | const flight::ServerCallContext &context, 56 | const flight::sql::StatementQueryTicket &command) override; 57 | 58 | arrow::Result> GetFlightInfoCatalogs( 59 | const flight::ServerCallContext &context, 60 | const flight::FlightDescriptor &descriptor) override; 61 | 62 | arrow::Result> DoGetCatalogs( 63 | const flight::ServerCallContext &context) override; 64 | 65 | arrow::Result> GetFlightInfoSchemas( 66 | const flight::ServerCallContext &context, const flight::sql::GetDbSchemas &command, 67 | const flight::FlightDescriptor &descriptor) override; 68 | 69 | arrow::Result> DoGetDbSchemas( 70 | const flight::ServerCallContext &context, 71 | const flight::sql::GetDbSchemas &command) override; 72 | 73 | arrow::Result DoPutCommandStatementUpdate( 74 | const flight::ServerCallContext &context, 75 | const flight::sql::StatementUpdate &update) override; 76 | 77 | arrow::Result CreatePreparedStatement( 78 | const flight::ServerCallContext &context, 79 | const flight::sql::ActionCreatePreparedStatementRequest &request) override; 80 | 81 | arrow::Status ClosePreparedStatement( 82 | const flight::ServerCallContext &context, 83 | const flight::sql::ActionClosePreparedStatementRequest &request) override; 84 | 85 | arrow::Result> GetFlightInfoPreparedStatement( 86 | const flight::ServerCallContext &context, 87 | const flight::sql::PreparedStatementQuery &command, 88 | const flight::FlightDescriptor &descriptor) override; 89 | 90 | arrow::Result> DoGetPreparedStatement( 91 | const flight::ServerCallContext &context, 92 | const flight::sql::PreparedStatementQuery &command) override; 93 | 94 | arrow::Status DoPutPreparedStatementQuery( 95 | const flight::ServerCallContext &context, 96 | const flight::sql::PreparedStatementQuery &command, 97 | flight::FlightMessageReader *reader, flight::FlightMetadataWriter *writer) override; 98 | 99 | arrow::Result DoPutPreparedStatementUpdate( 100 | const flight::ServerCallContext &context, 101 | const flight::sql::PreparedStatementUpdate &command, 102 | flight::FlightMessageReader *reader) override; 103 | 104 | arrow::Result> GetFlightInfoTables( 105 | const flight::ServerCallContext &context, const flight::sql::GetTables &command, 106 | const flight::FlightDescriptor &descriptor) override; 107 | 108 | arrow::Result> DoGetTables( 109 | const flight::ServerCallContext &context, 110 | const flight::sql::GetTables &command) override; 111 | 112 | arrow::Result> GetFlightInfoTableTypes( 113 | const flight::ServerCallContext &context, 114 | const flight::FlightDescriptor &descriptor) override; 115 | 116 | arrow::Result> DoGetTableTypes( 117 | const flight::ServerCallContext &context) override; 118 | 119 | arrow::Result> GetFlightInfoImportedKeys( 120 | const flight::ServerCallContext &context, 121 | const flight::sql::GetImportedKeys &command, 122 | const flight::FlightDescriptor &descriptor) override; 123 | 124 | arrow::Result> DoGetImportedKeys( 125 | const flight::ServerCallContext &context, 126 | const flight::sql::GetImportedKeys &command) override; 127 | 128 | arrow::Result> GetFlightInfoExportedKeys( 129 | const flight::ServerCallContext &context, 130 | const flight::sql::GetExportedKeys &command, 131 | const flight::FlightDescriptor &descriptor) override; 132 | 133 | arrow::Result> DoGetExportedKeys( 134 | const flight::ServerCallContext &context, 135 | const flight::sql::GetExportedKeys &command) override; 136 | 137 | arrow::Result> GetFlightInfoCrossReference( 138 | const flight::ServerCallContext &context, 139 | const flight::sql::GetCrossReference &command, 140 | const flight::FlightDescriptor &descriptor) override; 141 | 142 | arrow::Result> DoGetCrossReference( 143 | const flight::ServerCallContext &context, 144 | const flight::sql::GetCrossReference &command) override; 145 | 146 | arrow::Result> GetFlightInfoPrimaryKeys( 147 | const flight::ServerCallContext &context, 148 | const flight::sql::GetPrimaryKeys &command, 149 | const flight::FlightDescriptor &descriptor) override; 150 | 151 | arrow::Result> DoGetPrimaryKeys( 152 | const flight::ServerCallContext &context, 153 | const flight::sql::GetPrimaryKeys &command) override; 154 | 155 | arrow::Result BeginTransaction( 156 | const flight::ServerCallContext &context, 157 | const flight::sql::ActionBeginTransactionRequest &request) override; 158 | 159 | arrow::Status EndTransaction( 160 | const flight::ServerCallContext &context, 161 | const flight::sql::ActionEndTransactionRequest &request) override; 162 | 163 | private: 164 | class Impl; 165 | 166 | std::shared_ptr impl_; 167 | 168 | explicit DuckDBFlightSqlServer(std::shared_ptr impl); 169 | }; 170 | 171 | } // namespace sqlflite::ddb 172 | -------------------------------------------------------------------------------- /src/duckdb/duckdb_sql_info.h: -------------------------------------------------------------------------------- 1 | // Licensed to the Apache Software Foundation (ASF) under one 2 | // or more contributor license agreements. See the NOTICE file 3 | // distributed with this work for additional information 4 | // regarding copyright ownership. The ASF licenses this file 5 | // to you under the Apache License, Version 2.0 (the 6 | // "License"); you may not use this file except in compliance 7 | // with the License. You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, 12 | // software distributed under the License is distributed on an 13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | // KIND, either express or implied. See the License for the 15 | // specific language governing permissions and limitations 16 | // under the License. 17 | 18 | #pragma once 19 | 20 | #include 21 | #include "flight_sql_fwd.h" 22 | 23 | namespace sqlflite::ddb { 24 | 25 | /// \brief Gets the mapping from SQL info ids to SqlInfoResult instances. 26 | /// \return the cache. 27 | flight::sql::SqlInfoResultMap GetSqlInfoResultMap(); 28 | 29 | } // namespace sqlflite::ddb 30 | -------------------------------------------------------------------------------- /src/duckdb/duckdb_statement.cpp: -------------------------------------------------------------------------------- 1 | // Licensed to the Apache Software Foundation (ASF) under one 2 | // or more contributor license agreements. See the NOTICE file 3 | // distributed with this work for additional information 4 | // regarding copyright ownership. The ASF licenses this file 5 | // to you under the Apache License, Version 2.0 (the 6 | // "License"); you may not use this file except in compliance 7 | // with the License. You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, 12 | // software distributed under the License is distributed on an 13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | // KIND, either express or implied. See the License for the 15 | // specific language governing permissions and limitations 16 | // under the License. 17 | 18 | #include "duckdb_statement.h" 19 | 20 | #include 21 | #include 22 | #include 23 | 24 | #include 25 | 26 | #include 27 | #include 28 | #include "duckdb_server.h" 29 | 30 | using arrow::Status; 31 | using duckdb::QueryResult; 32 | 33 | namespace sqlflite::ddb { 34 | 35 | std::shared_ptr GetDataTypeFromDuckDbType( 36 | const duckdb::LogicalType duckdb_type) { 37 | const duckdb::LogicalTypeId column_type_id = duckdb_type.id(); 38 | switch (column_type_id) { 39 | case duckdb::LogicalTypeId::INTEGER: 40 | return arrow::int32(); 41 | case duckdb::LogicalTypeId::DECIMAL: { 42 | uint8_t width = 0; 43 | uint8_t scale = 0; 44 | bool dec_properties = duckdb_type.GetDecimalProperties(width, scale); 45 | return arrow::decimal(scale, width); 46 | } 47 | case duckdb::LogicalTypeId::FLOAT: 48 | return arrow::float32(); 49 | case duckdb::LogicalTypeId::DOUBLE: 50 | return arrow::float64(); 51 | case duckdb::LogicalTypeId::CHAR: 52 | case duckdb::LogicalTypeId::VARCHAR: 53 | return arrow::utf8(); 54 | case duckdb::LogicalTypeId::BLOB: 55 | return arrow::binary(); 56 | case duckdb::LogicalTypeId::TINYINT: 57 | return arrow::int8(); 58 | case duckdb::LogicalTypeId::SMALLINT: 59 | return arrow::int16(); 60 | case duckdb::LogicalTypeId::BIGINT: 61 | return arrow::int64(); 62 | case duckdb::LogicalTypeId::BOOLEAN: 63 | return arrow::boolean(); 64 | case duckdb::LogicalTypeId::DATE: 65 | return arrow::date32(); 66 | case duckdb::LogicalTypeId::TIME: 67 | case duckdb::LogicalTypeId::TIMESTAMP_MS: 68 | return timestamp(arrow::TimeUnit::MILLI); 69 | case duckdb::LogicalTypeId::TIMESTAMP: 70 | return timestamp(arrow::TimeUnit::MICRO); 71 | case duckdb::LogicalTypeId::TIMESTAMP_SEC: 72 | return timestamp(arrow::TimeUnit::SECOND); 73 | case duckdb::LogicalTypeId::TIMESTAMP_NS: 74 | return timestamp(arrow::TimeUnit::NANO); 75 | case duckdb::LogicalTypeId::INTERVAL: 76 | return duration( 77 | arrow::TimeUnit::MICRO); // ASSUMING MICRO AS DUCKDB's DOCS DOES NOT SPECIFY 78 | case duckdb::LogicalTypeId::UTINYINT: 79 | return arrow::uint8(); 80 | case duckdb::LogicalTypeId::USMALLINT: 81 | return arrow::uint16(); 82 | case duckdb::LogicalTypeId::UINTEGER: 83 | return arrow::uint32(); 84 | case duckdb::LogicalTypeId::UBIGINT: 85 | return arrow::int64(); 86 | case duckdb::LogicalTypeId::INVALID: 87 | case duckdb::LogicalTypeId::SQLNULL: 88 | case duckdb::LogicalTypeId::UNKNOWN: 89 | case duckdb::LogicalTypeId::ANY: 90 | case duckdb::LogicalTypeId::USER: 91 | case duckdb::LogicalTypeId::TIMESTAMP_TZ: 92 | case duckdb::LogicalTypeId::TIME_TZ: 93 | case duckdb::LogicalTypeId::HUGEINT: 94 | return arrow::decimal128(38, 0); 95 | case duckdb::LogicalTypeId::POINTER: 96 | case duckdb::LogicalTypeId::VALIDITY: 97 | case duckdb::LogicalTypeId::UUID: 98 | case duckdb::LogicalTypeId::STRUCT: 99 | case duckdb::LogicalTypeId::LIST: 100 | case duckdb::LogicalTypeId::MAP: 101 | case duckdb::LogicalTypeId::TABLE: 102 | case duckdb::LogicalTypeId::ENUM: 103 | default: 104 | return arrow::null(); 105 | } 106 | } 107 | 108 | arrow::Result> DuckDBStatement::Create( 109 | std::shared_ptr con, const std::string &sql) { 110 | std::shared_ptr stmt = con->Prepare(sql); 111 | 112 | if (not stmt->success) { 113 | std::string err_msg = 114 | "Can't prepare statement: '" + sql + "' - Error: " + stmt->error.Message(); 115 | return Status::Invalid(err_msg); 116 | } 117 | 118 | std::shared_ptr result(new DuckDBStatement(con, stmt)); 119 | 120 | return result; 121 | } 122 | 123 | DuckDBStatement::~DuckDBStatement() {} 124 | 125 | arrow::Result DuckDBStatement::Execute() { 126 | query_result_ = stmt_->Execute(bind_parameters); 127 | 128 | return 0; 129 | } 130 | 131 | arrow::Result> DuckDBStatement::FetchResult() { 132 | std::shared_ptr record_batch; 133 | ArrowArray res_arr; 134 | ArrowSchema res_schema; 135 | duckdb::ClientProperties res_options; 136 | res_options.time_zone = query_result_->client_properties.time_zone; 137 | 138 | duckdb::ArrowConverter::ToArrowSchema(&res_schema, query_result_->types, 139 | query_result_->names, res_options); 140 | 141 | duckdb::unique_ptr data_chunk; 142 | duckdb::ErrorData fetch_error; 143 | auto fetch_success = query_result_->TryFetch(data_chunk, fetch_error); 144 | if (!fetch_success) { 145 | ARROW_RETURN_NOT_OK(arrow::Status::ExecutionError(fetch_error.Message())); 146 | } 147 | 148 | if (data_chunk != nullptr) { 149 | duckdb::ArrowConverter::ToArrowArray(*data_chunk, &res_arr, res_options); 150 | ARROW_ASSIGN_OR_RAISE(record_batch, arrow::ImportRecordBatch(&res_arr, &res_schema)); 151 | } 152 | 153 | return record_batch; 154 | } 155 | 156 | std::shared_ptr DuckDBStatement::GetDuckDBStmt() const { 157 | return stmt_; 158 | } 159 | 160 | arrow::Result DuckDBStatement::ExecuteUpdate() { 161 | ARROW_RETURN_NOT_OK(Execute()); 162 | auto result = FetchResult(); 163 | return result->get()->num_rows(); 164 | } 165 | 166 | arrow::Result> DuckDBStatement::GetSchema() const { 167 | // get the names and types of the result schema 168 | auto names = stmt_->GetNames(); 169 | auto types = stmt_->GetTypes(); 170 | 171 | auto &context = stmt_->context; 172 | auto client_properties = context->GetClientProperties(); 173 | 174 | ArrowSchema arrow_schema; 175 | duckdb::ArrowConverter::ToArrowSchema(&arrow_schema, types, names, client_properties); 176 | 177 | auto return_value = arrow::ImportSchema(&arrow_schema); 178 | 179 | return return_value; 180 | } 181 | 182 | } // namespace sqlflite::ddb 183 | -------------------------------------------------------------------------------- /src/duckdb/duckdb_statement.h: -------------------------------------------------------------------------------- 1 | // Licensed to the Apache Software Foundation (ASF) under one 2 | // or more contributor license agreements. See the NOTICE file 3 | // distributed with this work for additional information 4 | // regarding copyright ownership. The ASF licenses this file 5 | // to you under the Apache License, Version 2.0 (the 6 | // "License"); you may not use this file except in compliance 7 | // with the License. You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, 12 | // software distributed under the License is distributed on an 13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | // KIND, either express or implied. See the License for the 15 | // specific language governing permissions and limitations 16 | // under the License. 17 | 18 | #pragma once 19 | 20 | #include 21 | 22 | #include 23 | #include 24 | 25 | #include 26 | #include 27 | 28 | #include "flight_sql_fwd.h" 29 | 30 | namespace sqlflite::ddb { 31 | 32 | std::shared_ptr GetDataTypeFromDuckDbType( 33 | const duckdb::LogicalType duckdb_type); 34 | 35 | /// \brief Create an object ColumnMetadata using the column type and 36 | /// table name. 37 | /// \param column_type The DuckDB type. 38 | /// \param table The table name. 39 | /// \return A Column Metadata object. 40 | flight::sql::ColumnMetadata GetColumnMetadata(int column_type, const char* table); 41 | 42 | class DuckDBStatement { 43 | public: 44 | /// \brief Creates a duckdb statement. 45 | /// \param[in] db duckdb database instance. 46 | /// \param[in] sql SQL statement. 47 | /// \return A DuckDBStatement object. 48 | static arrow::Result> Create( 49 | std::shared_ptr con, const std::string& sql); 50 | 51 | ~DuckDBStatement(); 52 | 53 | /// \brief Creates an Arrow Schema based on the results of this statement. 54 | /// \return The resulting Schema. 55 | arrow::Result> GetSchema() const; 56 | 57 | arrow::Result Execute(); 58 | arrow::Result> FetchResult(); 59 | // arrow::Result> GetArrowSchema(); 60 | 61 | std::shared_ptr GetDuckDBStmt() const; 62 | 63 | /// \brief Executes an UPDATE, INSERT or DELETE statement. 64 | /// \return The number of rows changed by execution. 65 | arrow::Result ExecuteUpdate(); 66 | 67 | duckdb::vector bind_parameters; 68 | 69 | private: 70 | std::shared_ptr con_; 71 | std::shared_ptr stmt_; 72 | duckdb::unique_ptr query_result_; 73 | 74 | DuckDBStatement(std::shared_ptr con, 75 | std::shared_ptr stmt) { 76 | con_ = con; 77 | stmt_ = stmt; 78 | } 79 | }; 80 | 81 | } // namespace sqlflite::ddb 82 | -------------------------------------------------------------------------------- /src/duckdb/duckdb_statement_batch_reader.cpp: -------------------------------------------------------------------------------- 1 | // Licensed to the Apache Software Foundation (ASF) under one 2 | // or more contributor license agreements. See the NOTICE file 3 | // distributed with this work for additional information 4 | // regarding copyright ownership. The ASF licenses this file 5 | // to you under the Apache License, Version 2.0 (the 6 | // "License"); you may not use this file except in compliance 7 | // with the License. You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, 12 | // software distributed under the License is distributed on an 13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | // KIND, either express or implied. See the License for the 15 | // specific language governing permissions and limitations 16 | // under the License. 17 | 18 | #include "duckdb_statement_batch_reader.h" 19 | 20 | #include 21 | 22 | #include 23 | 24 | #include "arrow/builder.h" 25 | #include 26 | 27 | #include "duckdb_statement.h" 28 | 29 | namespace sqlflite::ddb { 30 | 31 | // Batch size for SQLite statement results 32 | static constexpr int kMaxBatchSize = 1024; 33 | 34 | std::shared_ptr DuckDBStatementBatchReader::schema() const { 35 | return schema_; 36 | } 37 | 38 | DuckDBStatementBatchReader::DuckDBStatementBatchReader( 39 | std::shared_ptr statement, std::shared_ptr schema) 40 | : statement_(std::move(statement)), 41 | schema_(std::move(schema)), 42 | rc_(DuckDBSuccess), 43 | already_executed_(false), 44 | results_read_(false) {} 45 | 46 | arrow::Result> 47 | DuckDBStatementBatchReader::Create(const std::shared_ptr& statement_) { 48 | ARROW_ASSIGN_OR_RAISE(auto schema, statement_->GetSchema()); 49 | 50 | std::shared_ptr result( 51 | new DuckDBStatementBatchReader(statement_, schema)); 52 | 53 | return result; 54 | } 55 | 56 | arrow::Result> 57 | DuckDBStatementBatchReader::Create(const std::shared_ptr& statement, 58 | const std::shared_ptr& schema) { 59 | std::shared_ptr result( 60 | new DuckDBStatementBatchReader(statement, schema)); 61 | 62 | return result; 63 | } 64 | 65 | arrow::Status DuckDBStatementBatchReader::ReadNext( 66 | std::shared_ptr* out) { 67 | if (!already_executed_) { 68 | ARROW_ASSIGN_OR_RAISE(rc_, statement_->Execute()); 69 | already_executed_ = true; 70 | } 71 | ARROW_ASSIGN_OR_RAISE(*out, statement_->FetchResult()); 72 | 73 | return arrow::Status::OK(); 74 | } 75 | 76 | } // namespace sqlflite::ddb 77 | -------------------------------------------------------------------------------- /src/duckdb/duckdb_statement_batch_reader.h: -------------------------------------------------------------------------------- 1 | // Licensed to the Apache Software Foundation (ASF) under one 2 | // or more contributor license agreements. See the NOTICE file 3 | // distributed with this work for additional information 4 | // regarding copyright ownership. The ASF licenses this file 5 | // to you under the Apache License, Version 2.0 (the 6 | // "License"); you may not use this file except in compliance 7 | // with the License. You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, 12 | // software distributed under the License is distributed on an 13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | // KIND, either express or implied. See the License for the 15 | // specific language governing permissions and limitations 16 | // under the License. 17 | 18 | #pragma once 19 | 20 | #include 21 | #include 22 | #include 23 | #include "duckdb_statement.h" 24 | #include "flight_sql_fwd.h" 25 | 26 | namespace sqlflite::ddb { 27 | 28 | class DuckDBStatementBatchReader : public arrow::RecordBatchReader { 29 | public: 30 | /// \brief Creates a RecordBatchReader backed by a duckdb statement. 31 | /// \param[in] statement duckdb statement to be read. 32 | /// \return A DuckDBStatementBatchReader. 33 | static arrow::Result> Create( 34 | const std::shared_ptr& statement); 35 | 36 | /// \brief Creates a RecordBatchReader backed by a duckdb statement. 37 | /// \param[in] statement duckdb statement to be read. 38 | /// \param[in] schema Schema to be used on results. 39 | /// \return A DuckDBStatementBatchReader.. 40 | static arrow::Result> Create( 41 | const std::shared_ptr& statement, 42 | const std::shared_ptr& schema); 43 | 44 | std::shared_ptr schema() const override; 45 | 46 | arrow::Status ReadNext(std::shared_ptr* out) override; 47 | 48 | private: 49 | std::shared_ptr statement_; 50 | std::shared_ptr schema_; 51 | int rc_; 52 | bool already_executed_; 53 | bool results_read_; 54 | 55 | DuckDBStatementBatchReader(std::shared_ptr statement, 56 | std::shared_ptr schema); 57 | }; 58 | 59 | } // namespace sqlflite::ddb 60 | -------------------------------------------------------------------------------- /src/duckdb/duckdb_tables_schema_batch_reader.cpp: -------------------------------------------------------------------------------- 1 | // Licensed to the Apache Software Foundation (ASF) under one 2 | // or more contributor license agreements. See the NOTICE file 3 | // distributed with this work for additional information 4 | // regarding copyright ownership. The ASF licenses this file 5 | // to you under the Apache License, Version 2.0 (the 6 | // "License"); you may not use this file except in compliance 7 | // with the License. You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, 12 | // software distributed under the License is distributed on an 13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | // KIND, either express or implied. See the License for the 15 | // specific language governing permissions and limitations 16 | // under the License. 17 | 18 | #include "duckdb_tables_schema_batch_reader.h" 19 | 20 | #include 21 | 22 | #include 23 | 24 | #include "arrow/array/builder_binary.h" 25 | #include "arrow/flight/sql/column_metadata.h" 26 | #include "arrow/flight/sql/server.h" 27 | #include "arrow/ipc/writer.h" 28 | #include "arrow/record_batch.h" 29 | 30 | #include "flight_sql_fwd.h" 31 | 32 | using arrow::Status; 33 | 34 | namespace sqlflite::ddb { 35 | 36 | std::shared_ptr DuckDBTablesWithSchemaBatchReader::schema() const { 37 | return flight::sql::SqlSchema::GetTablesSchemaWithIncludedSchema(); 38 | } 39 | 40 | Status DuckDBTablesWithSchemaBatchReader::ReadNext( 41 | std::shared_ptr *batch) { 42 | if (already_executed_) { 43 | *batch = NULLPTR; 44 | return Status::OK(); 45 | } else { 46 | std::shared_ptr schema_statement; 47 | ARROW_ASSIGN_OR_RAISE(schema_statement, 48 | DuckDBStatement::Create(db_conn_, main_query_)); 49 | 50 | std::shared_ptr first_batch; 51 | 52 | ARROW_RETURN_NOT_OK(reader_->ReadNext(&first_batch)); 53 | 54 | if (!first_batch) { 55 | *batch = NULLPTR; 56 | return Status::OK(); 57 | } 58 | 59 | const std::shared_ptr table_name_array = 60 | first_batch->GetColumnByName("table_name"); 61 | 62 | arrow::BinaryBuilder schema_builder; 63 | 64 | auto *string_array = reinterpret_cast(table_name_array.get()); 65 | 66 | for (int table_name_index = 0; table_name_index < table_name_array->length(); 67 | table_name_index++) { 68 | const std::string &table_name = string_array->GetString(table_name_index); 69 | 70 | // Just get the schema from a prepared statement 71 | std::shared_ptr table_schema_statement; 72 | ARROW_ASSIGN_OR_RAISE( 73 | table_schema_statement, 74 | DuckDBStatement::Create(db_conn_, 75 | "SELECT * FROM " + table_name + " WHERE 1 = 0")); 76 | 77 | ARROW_ASSIGN_OR_RAISE(auto table_schema, table_schema_statement->GetSchema()); 78 | 79 | const arrow::Result> &value = 80 | arrow::ipc::SerializeSchema(*table_schema); 81 | 82 | std::shared_ptr schema_buffer; 83 | ARROW_ASSIGN_OR_RAISE(schema_buffer, value); 84 | 85 | ARROW_RETURN_NOT_OK(schema_builder.Append(::std::string_view(*schema_buffer))); 86 | } 87 | 88 | std::shared_ptr schema_array; 89 | ARROW_RETURN_NOT_OK(schema_builder.Finish(&schema_array)); 90 | 91 | ARROW_ASSIGN_OR_RAISE(*batch, 92 | first_batch->AddColumn(4, "table_schema", schema_array)); 93 | already_executed_ = true; 94 | 95 | return Status::OK(); 96 | } 97 | } 98 | 99 | } // namespace sqlflite::ddb 100 | -------------------------------------------------------------------------------- /src/duckdb/duckdb_tables_schema_batch_reader.h: -------------------------------------------------------------------------------- 1 | // Licensed to the Apache Software Foundation (ASF) under one 2 | // or more contributor license agreements. See the NOTICE file 3 | // distributed with this work for additional information 4 | // regarding copyright ownership. The ASF licenses this file 5 | // to you under the Apache License, Version 2.0 (the 6 | // "License"); you may not use this file except in compliance 7 | // with the License. You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, 12 | // software distributed under the License is distributed on an 13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | // KIND, either express or implied. See the License for the 15 | // specific language governing permissions and limitations 16 | // under the License. 17 | 18 | #pragma once 19 | 20 | #include 21 | 22 | #include 23 | #include 24 | 25 | #include "duckdb_statement.h" 26 | #include "duckdb_statement_batch_reader.h" 27 | #include "arrow/record_batch.h" 28 | 29 | namespace sqlflite::ddb { 30 | 31 | class DuckDBTablesWithSchemaBatchReader : public arrow::RecordBatchReader { 32 | private: 33 | std::shared_ptr reader_; 34 | std::string main_query_; 35 | std::shared_ptr db_conn_; 36 | bool already_executed_; 37 | 38 | public: 39 | /// Constructor for DuckDBTablesWithSchemaBatchReader class 40 | /// \param reader an shared_ptr from a DuckDBStatementBatchReader. 41 | /// \param main_query SQL query that originated reader's data. 42 | /// \param db a pointer to the sqlite3 db. 43 | DuckDBTablesWithSchemaBatchReader(std::shared_ptr reader, 44 | std::string main_query, 45 | std::shared_ptr db_conn) 46 | : reader_(std::move(reader)), 47 | main_query_(std::move(main_query)), 48 | db_conn_(db_conn), 49 | already_executed_(false) {} 50 | 51 | std::shared_ptr schema() const override; 52 | 53 | arrow::Status ReadNext(std::shared_ptr* batch) override; 54 | }; 55 | 56 | } // namespace sqlflite::ddb 57 | -------------------------------------------------------------------------------- /src/library/include/.gitignore: -------------------------------------------------------------------------------- 1 | version.h 2 | -------------------------------------------------------------------------------- /src/library/include/flight_sql_fwd.h: -------------------------------------------------------------------------------- 1 | // Licensed to the Apache Software Foundation (ASF) under one 2 | // or more contributor license agreements. See the NOTICE file 3 | // distributed with this work for additional information 4 | // regarding copyright ownership. The ASF licenses this file 5 | // to you under the Apache License, Version 2.0 (the 6 | // "License"); you may not use this file except in compliance 7 | // with the License. You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, 12 | // software distributed under the License is distributed on an 13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | // KIND, either express or implied. See the License for the 15 | // specific language governing permissions and limitations 16 | // under the License. 17 | 18 | #pragma once 19 | 20 | namespace arrow::flight {} 21 | 22 | namespace flight = arrow::flight; 23 | -------------------------------------------------------------------------------- /src/library/include/sqlflite_library.h: -------------------------------------------------------------------------------- 1 | // Licensed to the Apache Software Foundation (ASF) under one 2 | // or more contributor license agreements. See the NOTICE file 3 | // distributed with this work for additional information 4 | // regarding copyright ownership. The ASF licenses this file 5 | // to you under the Apache License, Version 2.0 (the 6 | // "License"); you may not use this file except in compliance 7 | // with the License. You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, 12 | // software distributed under the License is distributed on an 13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | // KIND, either express or implied. See the License for the 15 | // specific language governing permissions and limitations 16 | // under the License. 17 | 18 | #pragma once 19 | 20 | #include 21 | #include "version.h" 22 | 23 | // Constants 24 | const std::string SQLFLITE_SERVER_VERSION = PROJECT_VERSION; 25 | const std::string DEFAULT_SQLFLITE_HOSTNAME = "0.0.0.0"; 26 | const std::string DEFAULT_SQLFLITE_USERNAME = "sqlflite_username"; 27 | const int DEFAULT_FLIGHT_PORT = 31337; 28 | 29 | enum class BackendType { duckdb, sqlite }; 30 | 31 | /** 32 | * @brief Run a SQLFlite Server with the specified configuration. 33 | * 34 | * This function initializes and runs a SQLFlite Server with the given parameters. 35 | * 36 | * @param backend The backend to use (duckdb or sqlite). 37 | * @param database_filename The path to the database file. 38 | * @param hostname The hostname for the SQLFlite Server. Default is "" - if so, we use environment variable: "SQLFLITE_HOSTNAME", 39 | * and fallback to: DEFAULT_SQLFLITE_HOSTNAME if that is not set. 40 | * @param port The port to listen on for the SQLFlite Server. Default is DEFAULT_FLIGHT_PORT 41 | * @param username The username to use for authentication. Default is now "" - if not set, we use environment variable: "SQLFLITE_USERNAME", 42 | * if this is not defined we set this to "sqlflite_username" again in sqlflite_library. 43 | * @param password The password for authentication. Default is "" - if so, we use environment variable: "SQLFLITE_PASSWORD", 44 | * if both are not set, we exit with an error. 45 | * @param secret_key The secret key for authentication. Default is "", if so, we use environment variable: "SECRET_KEY", 46 | and fallback to a random string if both are not set. 47 | * @param tls_cert_path The path to the TLS certificate file (PEM format). Default is an empty path. 48 | * @param tls_key_path The path to the TLS private key file (PEM format). Default is an empty path. 49 | * @param mtls_ca_cert_path The path to the mTLS CA certificate file used to verify clients (in PEM format). Default is an empty path. 50 | * @param init_sql_commands The initial SQL commands to execute. Default is "" - if not set, we use environment variable: "INIT_SQL_COMMANDS". 51 | * @param init_sql_commands_file The path to a file containing initial SQL commands. Default is an empty path - if not set, we use environment variable: "INIT_SQL_COMMANDS_FILE" 52 | * @param print_queries Set to true if SQL queries should be printed; false otherwise. Default is false. 53 | * 54 | * @return Returns an integer status code. 0 indicates success, and non-zero values indicate errors. 55 | */ 56 | 57 | extern "C" { 58 | int RunFlightSQLServer( 59 | const BackendType backend, std::filesystem::path &database_filename, 60 | std::string hostname = "", const int &port = DEFAULT_FLIGHT_PORT, 61 | std::string username = "", std::string password = "", std::string secret_key = "", 62 | std::filesystem::path tls_cert_path = std::filesystem::path(), 63 | std::filesystem::path tls_key_path = std::filesystem::path(), 64 | std::filesystem::path mtls_ca_cert_path = std::filesystem::path(), 65 | std::string init_sql_commands = "", 66 | std::filesystem::path init_sql_commands_file = std::filesystem::path(), 67 | const bool &print_queries = false); 68 | } 69 | -------------------------------------------------------------------------------- /src/library/include/sqlflite_security.h: -------------------------------------------------------------------------------- 1 | // Licensed to the Apache Software Foundation (ASF) under one 2 | // or more contributor license agreements. See the NOTICE file 3 | // distributed with this work for additional information 4 | // regarding copyright ownership. The ASF licenses this file 5 | // to you under the Apache License, Version 2.0 (the 6 | // "License"); you may not use this file except in compliance 7 | // with the License. You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, 12 | // software distributed under the License is distributed on an 13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | // KIND, either express or implied. See the License for the 15 | // specific language governing permissions and limitations 16 | // under the License. 17 | 18 | #include 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | #include 25 | #include 26 | #include 27 | #include "jwt-cpp/jwt.h" 28 | #include 29 | #include 30 | #include 31 | #include "flight_sql_fwd.h" 32 | 33 | namespace sqlflite { 34 | 35 | class SecurityUtilities { 36 | public: 37 | static arrow::Status FlightServerTlsCertificates(const std::filesystem::path &cert_path, 38 | const std::filesystem::path &key_path, 39 | std::vector *out); 40 | 41 | static arrow::Status FlightServerMtlsCACertificate(const std::string &cert_path, 42 | std::string *out); 43 | 44 | static std::string FindKeyValPrefixInCallHeaders( 45 | const flight::CallHeaders &incoming_headers, const std::string &key, 46 | const std::string &prefix); 47 | 48 | static arrow::Status GetAuthHeaderType(const flight::CallHeaders &incoming_headers, 49 | std::string *out); 50 | 51 | static void ParseBasicHeader(const flight::CallHeaders &incoming_headers, 52 | std::string &username, std::string &password); 53 | }; 54 | 55 | class HeaderAuthServerMiddleware : public flight::ServerMiddleware { 56 | public: 57 | HeaderAuthServerMiddleware(const std::string &username, const std::string &secret_key); 58 | 59 | void SendingHeaders(flight::AddCallHeaders *outgoing_headers) override; 60 | 61 | void CallCompleted(const arrow::Status &status) override; 62 | 63 | std::string name() const override; 64 | 65 | private: 66 | std::string username_; 67 | std::string secret_key_; 68 | 69 | std::string CreateJWTToken() const; 70 | }; 71 | 72 | class HeaderAuthServerMiddlewareFactory : public flight::ServerMiddlewareFactory { 73 | public: 74 | HeaderAuthServerMiddlewareFactory(const std::string &username, 75 | const std::string &password, 76 | const std::string &secret_key); 77 | 78 | arrow::Status StartCall(const flight::CallInfo &info, 79 | const flight::CallHeaders &incoming_headers, 80 | std::shared_ptr *middleware) override; 81 | 82 | private: 83 | std::string username_; 84 | std::string password_; 85 | std::string secret_key_; 86 | }; 87 | 88 | class BearerAuthServerMiddleware : public flight::ServerMiddleware { 89 | public: 90 | explicit BearerAuthServerMiddleware(const std::string &secret_key, 91 | const flight::CallHeaders &incoming_headers, 92 | std::optional *isValid); 93 | 94 | void SendingHeaders(flight::AddCallHeaders *outgoing_headers) override; 95 | 96 | void CallCompleted(const arrow::Status &status) override; 97 | 98 | std::string name() const override; 99 | 100 | private: 101 | std::string secret_key_; 102 | flight::CallHeaders incoming_headers_; 103 | std::optional *isValid_; 104 | 105 | bool VerifyToken(const std::string &token) const; 106 | }; 107 | 108 | class BearerAuthServerMiddlewareFactory : public flight::ServerMiddlewareFactory { 109 | public: 110 | explicit BearerAuthServerMiddlewareFactory(const std::string &secret_key); 111 | 112 | arrow::Status StartCall(const flight::CallInfo &info, 113 | const flight::CallHeaders &incoming_headers, 114 | std::shared_ptr *middleware) override; 115 | 116 | std::optional GetIsValid(); 117 | 118 | private: 119 | std::optional isValid_; 120 | std::string secret_key_; 121 | }; 122 | 123 | } // namespace sqlflite 124 | -------------------------------------------------------------------------------- /src/library/include/version.h.in: -------------------------------------------------------------------------------- 1 | // version.h.in 2 | // This file is used by CMake to generate version.h 3 | #ifndef PROJECT_VERSION_H 4 | #define PROJECT_VERSION_H 5 | 6 | #define PROJECT_VERSION "@PROJECT_VERSION@" 7 | 8 | #endif // PROJECT_VERSION_H 9 | -------------------------------------------------------------------------------- /src/library/sqlflite_library.cpp: -------------------------------------------------------------------------------- 1 | // Licensed to the Apache Software Foundation (ASF) under one 2 | // or more contributor license agreements. See the NOTICE file 3 | // distributed with this work for additional information 4 | // regarding copyright ownership. The ASF licenses this file 5 | // to you under the Apache License, Version 2.0 (the 6 | // "License"); you may not use this file except in compliance 7 | // with the License. You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, 12 | // software distributed under the License is distributed on an 13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | // KIND, either express or implied. See the License for the 15 | // specific language governing permissions and limitations 16 | // under the License. 17 | 18 | #include "include/sqlflite_library.h" 19 | 20 | #include 21 | #include 22 | #include 23 | #include 24 | #include 25 | #include 26 | #include 27 | #include 28 | #include 29 | #include 30 | #include 31 | #include 32 | #include 33 | 34 | #include "sqlite_server.h" 35 | #include "duckdb_server.h" 36 | #include "include/flight_sql_fwd.h" 37 | #include "include/sqlflite_security.h" 38 | 39 | namespace fs = std::filesystem; 40 | 41 | namespace sqlflite { 42 | 43 | const int port = 31337; 44 | 45 | #define RUN_INIT_COMMANDS(serverType, init_sql_commands) \ 46 | do { \ 47 | if (init_sql_commands != "") { \ 48 | std::regex regex_pattern(";(?=(?:[^']*'[^']*')*[^']*$)"); \ 49 | std::sregex_token_iterator iter(init_sql_commands.begin(), \ 50 | init_sql_commands.end(), regex_pattern, -1); \ 51 | std::sregex_token_iterator end; \ 52 | while (iter != end) { \ 53 | std::string init_sql_command = *iter; \ 54 | if (init_sql_command.empty()) continue; \ 55 | std::cout << "Running Init SQL command: " << std::endl \ 56 | << init_sql_command << ";" << std::endl; \ 57 | ARROW_RETURN_NOT_OK(serverType->ExecuteSql(init_sql_command)); \ 58 | ++iter; \ 59 | } \ 60 | } \ 61 | } while (false) 62 | 63 | arrow::Result> FlightSQLServerBuilder( 64 | const BackendType backend, const fs::path &database_filename, 65 | const std::string &hostname, const int &port, const std::string &username, 66 | const std::string &password, const std::string &secret_key, 67 | const fs::path &tls_cert_path, const fs::path &tls_key_path, 68 | const fs::path &mtls_ca_cert_path, const std::string &init_sql_commands, 69 | const bool &print_queries) { 70 | ARROW_ASSIGN_OR_RAISE(auto location, 71 | (!tls_cert_path.empty()) 72 | ? flight::Location::ForGrpcTls(hostname, port) 73 | : flight::Location::ForGrpcTcp(hostname, port)); 74 | 75 | std::cout << "Apache Arrow version: " << ARROW_VERSION_STRING << std::endl; 76 | 77 | flight::FlightServerOptions options(location); 78 | 79 | if (!tls_cert_path.empty() && !tls_key_path.empty()) { 80 | ARROW_CHECK_OK(sqlflite::SecurityUtilities::FlightServerTlsCertificates( 81 | tls_cert_path, tls_key_path, &options.tls_certificates)); 82 | } else { 83 | std::cout << "WARNING - TLS is disabled for the SQLFlite server - this is insecure." 84 | << std::endl; 85 | } 86 | 87 | // Setup authentication middleware (using the same TLS certificate keypair) 88 | auto header_middleware = std::make_shared( 89 | username, password, secret_key); 90 | auto bearer_middleware = 91 | std::make_shared(secret_key); 92 | 93 | options.auth_handler = std::make_unique(); 94 | options.middleware.push_back({"header-auth-server", header_middleware}); 95 | options.middleware.push_back({"bearer-auth-server", bearer_middleware}); 96 | 97 | if (!mtls_ca_cert_path.empty()) { 98 | std::cout << "Using mTLS CA certificate: " << mtls_ca_cert_path << std::endl; 99 | ARROW_CHECK_OK(sqlflite::SecurityUtilities::FlightServerMtlsCACertificate( 100 | mtls_ca_cert_path, &options.root_certificates)); 101 | options.verify_client = true; 102 | } 103 | 104 | std::shared_ptr server = nullptr; 105 | 106 | std::string db_type = ""; 107 | if (backend == BackendType::sqlite) { 108 | db_type = "SQLite"; 109 | std::shared_ptr sqlite_server = nullptr; 110 | ARROW_ASSIGN_OR_RAISE(sqlite_server, sqlflite::sqlite::SQLiteFlightSqlServer::Create( 111 | database_filename)); 112 | RUN_INIT_COMMANDS(sqlite_server, init_sql_commands); 113 | server = sqlite_server; 114 | } else if (backend == BackendType::duckdb) { 115 | db_type = "DuckDB"; 116 | std::shared_ptr duckdb_server = nullptr; 117 | duckdb::DBConfig config; 118 | ARROW_ASSIGN_OR_RAISE(duckdb_server, sqlflite::ddb::DuckDBFlightSqlServer::Create( 119 | database_filename, config, print_queries)) 120 | // Run additional commands (first) for the DuckDB back-end... 121 | auto duckdb_init_sql_commands = 122 | "SET autoinstall_known_extensions = true; SET autoload_known_extensions = true;" + 123 | init_sql_commands; 124 | RUN_INIT_COMMANDS(duckdb_server, duckdb_init_sql_commands); 125 | server = duckdb_server; 126 | } 127 | 128 | std::cout << "Using database file: " << database_filename << std::endl; 129 | 130 | std::cout << "Print Queries option is set to: " << std::boolalpha << print_queries 131 | << std::endl; 132 | 133 | if (server != nullptr) { 134 | ARROW_CHECK_OK(server->Init(options)); 135 | 136 | // Exit with a clean error code (0) on SIGTERM 137 | ARROW_CHECK_OK(server->SetShutdownOnSignals({SIGTERM})); 138 | 139 | std::cout << "SQLFlite server version: " << SQLFLITE_SERVER_VERSION 140 | << " - with engine: " << db_type << " - will listen on " 141 | << server->location().ToString() << std::endl; 142 | 143 | return server; 144 | } else { 145 | std::string err_msg = "Unable to create the SQLFlite Server"; 146 | return arrow::Status::Invalid(err_msg); 147 | } 148 | } 149 | 150 | std::string SafeGetEnvVarValue(const std::string &env_var_name) { 151 | auto env_var_value = std::getenv(env_var_name.c_str()); 152 | if (env_var_value) { 153 | return std::string(env_var_value); 154 | } else { 155 | return ""; 156 | } 157 | } 158 | 159 | arrow::Result> CreateFlightSQLServer( 160 | const BackendType backend, fs::path &database_filename, std::string hostname, 161 | const int &port, std::string username, std::string password, std::string secret_key, 162 | fs::path tls_cert_path, fs::path tls_key_path, fs::path mtls_ca_cert_path, 163 | std::string init_sql_commands, fs::path init_sql_commands_file, 164 | const bool &print_queries) { 165 | // Validate and default the arguments to env var values where applicable 166 | if (database_filename.empty()) { 167 | return arrow::Status::Invalid("The database filename was not provided!"); 168 | } else { 169 | // We do not check for existence of the database file, b/c they may want to create a new one 170 | database_filename = fs::absolute(database_filename); 171 | } 172 | 173 | if (hostname.empty()) { 174 | hostname = SafeGetEnvVarValue("SQLFLITE_HOSTNAME"); 175 | if (hostname.empty()) { 176 | hostname = DEFAULT_SQLFLITE_HOSTNAME; 177 | } 178 | } 179 | 180 | if (username.empty()) { 181 | username = SafeGetEnvVarValue("SQLFLITE_USERNAME"); 182 | if (username.empty()) { 183 | username = DEFAULT_SQLFLITE_USERNAME; 184 | } 185 | } 186 | 187 | if (password.empty()) { 188 | password = SafeGetEnvVarValue("SQLFLITE_PASSWORD"); 189 | if (password.empty()) { 190 | return arrow::Status::Invalid( 191 | "The SQLFlite Server password is empty and env var: 'SQLFLITE_PASSWORD' is not " 192 | "set. Pass a value to this argument to secure the server."); 193 | } 194 | } 195 | 196 | if (secret_key.empty()) { 197 | secret_key = SafeGetEnvVarValue("SECRET_KEY"); 198 | if (secret_key.empty()) { 199 | // Generate a random secret key 200 | boost::uuids::uuid uuid = boost::uuids::random_generator()(); 201 | secret_key = "SECRET-" + boost::uuids::to_string(uuid); 202 | } 203 | } 204 | 205 | if (!tls_cert_path.empty()) { 206 | tls_cert_path = fs::absolute(tls_cert_path); 207 | if (!fs::exists(tls_cert_path)) { 208 | return arrow::Status::Invalid("TLS certificate file does not exist: " + 209 | tls_cert_path.string()); 210 | } 211 | 212 | if (tls_key_path.empty()) { 213 | return arrow::Status::Invalid( 214 | "tls_key_path was not specified (when tls_cert_path WAS specified)"); 215 | } else { 216 | tls_key_path = fs::absolute(tls_key_path); 217 | if (!fs::exists(tls_key_path)) { 218 | return arrow::Status::Invalid("TLS key file does not exist: " + 219 | tls_key_path.string()); 220 | } 221 | } 222 | } 223 | 224 | if (init_sql_commands.empty()) { 225 | init_sql_commands = SafeGetEnvVarValue("INIT_SQL_COMMANDS"); 226 | } 227 | 228 | if (init_sql_commands_file.empty()) { 229 | init_sql_commands_file = fs::path(SafeGetEnvVarValue("INIT_SQL_COMMANDS_FILE")); 230 | if (!init_sql_commands_file.empty()) { 231 | init_sql_commands_file = fs::absolute(init_sql_commands_file); 232 | if (!fs::exists(init_sql_commands_file)) { 233 | return arrow::Status::Invalid("INIT_SQL_COMMANDS_FILE does not exist: " + 234 | init_sql_commands_file.string()); 235 | } else { 236 | std::ifstream ifs(init_sql_commands_file); 237 | std::string init_sql_commands_file_contents((std::istreambuf_iterator(ifs)), 238 | (std::istreambuf_iterator())); 239 | init_sql_commands += init_sql_commands_file_contents; 240 | } 241 | } 242 | } 243 | 244 | if (!mtls_ca_cert_path.empty()) { 245 | mtls_ca_cert_path = fs::absolute(mtls_ca_cert_path); 246 | if (!fs::exists(mtls_ca_cert_path)) { 247 | return arrow::Status::Invalid("mTLS CA certificate file does not exist: " + 248 | mtls_ca_cert_path.string()); 249 | } 250 | } 251 | 252 | return FlightSQLServerBuilder(backend, database_filename, hostname, port, username, 253 | password, secret_key, tls_cert_path, tls_key_path, 254 | mtls_ca_cert_path, init_sql_commands, print_queries); 255 | } 256 | 257 | arrow::Status StartFlightSQLServer( 258 | std::shared_ptr server) { 259 | return arrow::Status::OK(); 260 | } 261 | 262 | } // namespace sqlflite 263 | 264 | extern "C" { 265 | 266 | int RunFlightSQLServer(const BackendType backend, fs::path &database_filename, 267 | std::string hostname, const int &port, std::string username, 268 | std::string password, std::string secret_key, 269 | fs::path tls_cert_path, fs::path tls_key_path, 270 | fs::path mtls_ca_cert_path, std::string init_sql_commands, 271 | fs::path init_sql_commands_file, const bool &print_queries) { 272 | auto create_server_result = sqlflite::CreateFlightSQLServer( 273 | backend, database_filename, hostname, port, username, password, secret_key, 274 | tls_cert_path, tls_key_path, mtls_ca_cert_path, init_sql_commands, 275 | init_sql_commands_file, print_queries); 276 | 277 | if (create_server_result.ok()) { 278 | auto server_ptr = create_server_result.ValueOrDie(); 279 | std::cout << "SQLFlite server - started" << std::endl; 280 | ARROW_CHECK_OK(server_ptr->Serve()); 281 | return EXIT_SUCCESS; 282 | } else { 283 | // Handle the error 284 | std::cerr << "Error: " << create_server_result.status().ToString() << std::endl; 285 | return EXIT_FAILURE; 286 | } 287 | } 288 | } 289 | -------------------------------------------------------------------------------- /src/library/sqlflite_security.cpp: -------------------------------------------------------------------------------- 1 | // Licensed to the Apache Software Foundation (ASF) under one 2 | // or more contributor license agreements. See the NOTICE file 3 | // distributed with this work for additional information 4 | // regarding copyright ownership. The ASF licenses this file 5 | // to you under the Apache License, Version 2.0 (the 6 | // "License"); you may not use this file except in compliance 7 | // with the License. You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, 12 | // software distributed under the License is distributed on an 13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | // KIND, either express or implied. See the License for the 15 | // specific language governing permissions and limitations 16 | // under the License. 17 | 18 | #include "include/sqlflite_security.h" 19 | 20 | namespace fs = std::filesystem; 21 | 22 | using arrow::Status; 23 | 24 | namespace sqlflite { 25 | 26 | const std::string kJWTIssuer = "sqlflite"; 27 | const int kJWTExpiration = 24 * 3600; 28 | const std::string kValidUsername = "sqlflite_username"; 29 | const std::string kBasicPrefix = "Basic "; 30 | const std::string kBearerPrefix = "Bearer "; 31 | const std::string kAuthHeader = "authorization"; 32 | 33 | // ---------------------------------------- 34 | Status SecurityUtilities::FlightServerTlsCertificates( 35 | const fs::path &cert_path, const fs::path &key_path, 36 | std::vector *out) { 37 | std::cout << "Using TLS Cert file: " << cert_path << std::endl; 38 | std::cout << "Using TLS Key file: " << key_path << std::endl; 39 | 40 | *out = std::vector(); 41 | try { 42 | std::ifstream cert_file(cert_path); 43 | if (!cert_file) { 44 | return Status::IOError("Could not open certificate: " + cert_path.string()); 45 | } 46 | std::stringstream cert; 47 | cert << cert_file.rdbuf(); 48 | 49 | std::ifstream key_file(key_path); 50 | if (!key_file) { 51 | return Status::IOError("Could not open key: " + key_path.string()); 52 | } 53 | std::stringstream key; 54 | key << key_file.rdbuf(); 55 | 56 | out->push_back(flight::CertKeyPair{cert.str(), key.str()}); 57 | } catch (const std::ifstream::failure &e) { 58 | return Status::IOError(e.what()); 59 | } 60 | return Status::OK(); 61 | } 62 | 63 | Status SecurityUtilities::FlightServerMtlsCACertificate(const std::string &cert_path, 64 | std::string *out) { 65 | try { 66 | std::ifstream cert_file(cert_path); 67 | if (!cert_file) { 68 | return Status::IOError("Could not open MTLS CA certificate: " + cert_path); 69 | } 70 | std::stringstream cert; 71 | cert << cert_file.rdbuf(); 72 | 73 | *out = cert.str(); 74 | } catch (const std::ifstream::failure &e) { 75 | return Status::IOError(e.what()); 76 | } 77 | return Status::OK(); 78 | } 79 | 80 | // Function to look in CallHeaders for a key that has a value starting with prefix and 81 | // return the rest of the value after the prefix. 82 | std::string SecurityUtilities::FindKeyValPrefixInCallHeaders( 83 | const flight::CallHeaders &incoming_headers, const std::string &key, 84 | const std::string &prefix) { 85 | // Lambda function to compare characters without case sensitivity. 86 | auto char_compare = [](const char &char1, const char &char2) { 87 | return (::toupper(char1) == ::toupper(char2)); 88 | }; 89 | 90 | auto iter = incoming_headers.find(key); 91 | if (iter == incoming_headers.end()) { 92 | return ""; 93 | } 94 | const std::string val(iter->second); 95 | if (val.size() > prefix.length()) { 96 | if (std::equal(val.begin(), val.begin() + prefix.length(), prefix.begin(), 97 | char_compare)) { 98 | return val.substr(prefix.length()); 99 | } 100 | } 101 | return ""; 102 | } 103 | 104 | Status SecurityUtilities::GetAuthHeaderType(const flight::CallHeaders &incoming_headers, 105 | std::string *out) { 106 | if (!FindKeyValPrefixInCallHeaders(incoming_headers, kAuthHeader, kBasicPrefix) 107 | .empty()) { 108 | *out = "Basic"; 109 | } else if (!FindKeyValPrefixInCallHeaders(incoming_headers, kAuthHeader, kBearerPrefix) 110 | .empty()) { 111 | *out = "Bearer"; 112 | } else { 113 | return Status::IOError("Invalid Authorization Header type!"); 114 | } 115 | return Status::OK(); 116 | } 117 | 118 | void SecurityUtilities::ParseBasicHeader(const flight::CallHeaders &incoming_headers, 119 | std::string &username, std::string &password) { 120 | std::string encoded_credentials = 121 | FindKeyValPrefixInCallHeaders(incoming_headers, kAuthHeader, kBasicPrefix); 122 | std::stringstream decoded_stream(arrow::util::base64_decode(encoded_credentials)); 123 | std::getline(decoded_stream, username, ':'); 124 | std::getline(decoded_stream, password, ':'); 125 | } 126 | 127 | // ---------------------------------------- 128 | HeaderAuthServerMiddleware::HeaderAuthServerMiddleware(const std::string &username, 129 | const std::string &secret_key) 130 | : username_(username), secret_key_(secret_key) {} 131 | 132 | void HeaderAuthServerMiddleware::SendingHeaders( 133 | flight::AddCallHeaders *outgoing_headers) { 134 | auto token = CreateJWTToken(); 135 | outgoing_headers->AddHeader(kAuthHeader, std::string(kBearerPrefix) + token); 136 | } 137 | 138 | void HeaderAuthServerMiddleware::CallCompleted(const Status &status) {} 139 | 140 | std::string HeaderAuthServerMiddleware::name() const { 141 | return "HeaderAuthServerMiddleware"; 142 | } 143 | 144 | std::string HeaderAuthServerMiddleware::CreateJWTToken() const { 145 | auto token = jwt::create() 146 | .set_issuer(std::string(kJWTIssuer)) 147 | .set_type("JWT") 148 | .set_id("sqlflite-server-" + 149 | boost::uuids::to_string(boost::uuids::random_generator()())) 150 | .set_issued_at(std::chrono::system_clock::now()) 151 | .set_expires_at(std::chrono::system_clock::now() + 152 | std::chrono::seconds{kJWTExpiration}) 153 | .set_payload_claim("username", jwt::claim(username_)) 154 | .sign(jwt::algorithm::hs256{secret_key_}); 155 | 156 | return token; 157 | } 158 | 159 | // ---------------------------------------- 160 | HeaderAuthServerMiddlewareFactory::HeaderAuthServerMiddlewareFactory( 161 | const std::string &username, const std::string &password, 162 | const std::string &secret_key) 163 | : username_(username), password_(password), secret_key_(secret_key) {} 164 | 165 | Status HeaderAuthServerMiddlewareFactory::StartCall( 166 | const flight::CallInfo &info, const flight::CallHeaders &incoming_headers, 167 | std::shared_ptr *middleware) { 168 | std::string auth_header_type; 169 | ARROW_RETURN_NOT_OK( 170 | SecurityUtilities::GetAuthHeaderType(incoming_headers, &auth_header_type)); 171 | if (auth_header_type == "Basic") { 172 | std::string username; 173 | std::string password; 174 | 175 | SecurityUtilities::ParseBasicHeader(incoming_headers, username, password); 176 | 177 | if ((username == username_) && (password == password_)) { 178 | *middleware = std::make_shared(username, secret_key_); 179 | } else { 180 | return MakeFlightError(flight::FlightStatusCode::Unauthenticated, 181 | "Invalid credentials"); 182 | } 183 | } 184 | return Status::OK(); 185 | } 186 | 187 | // ---------------------------------------- 188 | BearerAuthServerMiddleware::BearerAuthServerMiddleware( 189 | const std::string &secret_key, const flight::CallHeaders &incoming_headers, 190 | std::optional *isValid) 191 | : secret_key_(secret_key), incoming_headers_(incoming_headers), isValid_(isValid) {} 192 | 193 | void BearerAuthServerMiddleware::SendingHeaders( 194 | flight::AddCallHeaders *outgoing_headers) { 195 | std::string bearer_token = SecurityUtilities::FindKeyValPrefixInCallHeaders( 196 | incoming_headers_, kAuthHeader, kBearerPrefix); 197 | *isValid_ = (VerifyToken(bearer_token)); 198 | } 199 | 200 | void BearerAuthServerMiddleware::CallCompleted(const Status &status) {} 201 | 202 | std::string BearerAuthServerMiddleware::name() const { 203 | return "BearerAuthServerMiddleware"; 204 | } 205 | 206 | bool BearerAuthServerMiddleware::VerifyToken(const std::string &token) const { 207 | if (token.empty()) { 208 | return false; 209 | } 210 | auto verify = jwt::verify() 211 | .allow_algorithm(jwt::algorithm::hs256{secret_key_}) 212 | .with_issuer(std::string(kJWTIssuer)); 213 | 214 | try { 215 | auto decoded = jwt::decode(token); 216 | verify.verify(decoded); 217 | // If we got this far, the token verified successfully... 218 | return true; 219 | } catch (const std::exception &e) { 220 | std::cout << "Bearer Token verification failed with exception: " << e.what() 221 | << std::endl; 222 | return false; 223 | } 224 | } 225 | 226 | // ---------------------------------------- 227 | BearerAuthServerMiddlewareFactory::BearerAuthServerMiddlewareFactory( 228 | const std::string &secret_key) 229 | : secret_key_(secret_key) {} 230 | 231 | Status BearerAuthServerMiddlewareFactory::StartCall( 232 | const flight::CallInfo &info, const flight::CallHeaders &incoming_headers, 233 | std::shared_ptr *middleware) { 234 | if (const std::pair &iter_pair = 236 | incoming_headers.equal_range(kAuthHeader); 237 | iter_pair.first != iter_pair.second) { 238 | std::string auth_header_type; 239 | ARROW_RETURN_NOT_OK( 240 | SecurityUtilities::GetAuthHeaderType(incoming_headers, &auth_header_type)); 241 | if (auth_header_type == "Bearer") { 242 | *middleware = std::make_shared( 243 | secret_key_, incoming_headers, &isValid_); 244 | } 245 | } 246 | if (isValid_.has_value() && !*isValid_) { 247 | isValid_.reset(); 248 | 249 | return MakeFlightError(flight::FlightStatusCode::Unauthenticated, 250 | "Invalid bearer token provided"); 251 | } 252 | 253 | return Status::OK(); 254 | } 255 | 256 | std::optional BearerAuthServerMiddlewareFactory::GetIsValid() { return isValid_; } 257 | 258 | } // namespace sqlflite 259 | -------------------------------------------------------------------------------- /src/sqlflite_client.cpp: -------------------------------------------------------------------------------- 1 | // Licensed to the Apache Software Foundation (ASF) under one 2 | // or more contributor license agreements. See the NOTICE file 3 | // distributed with this work for additional information 4 | // regarding copyright ownership. The ASF licenses this file 5 | // to you under the Apache License, Version 2.0 (the 6 | // "License"); you may not use this file except in compliance 7 | // with the License. You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, 12 | // software distributed under the License is distributed on an 13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | // KIND, either express or implied. See the License for the 15 | // specific language governing permissions and limitations 16 | // under the License. 17 | 18 | #include 19 | #define BOOST_NO_CXX98_FUNCTION_BASE // ARROW-17805 20 | #include 21 | #include 22 | #include 23 | #include 24 | #include 25 | #include 26 | 27 | #include "arrow/array/builder_binary.h" 28 | #include "arrow/array/builder_primitive.h" 29 | #include "arrow/flight/api.h" 30 | #include "arrow/flight/sql/api.h" 31 | #include "arrow/io/memory.h" 32 | #include "arrow/pretty_print.h" 33 | #include "arrow/status.h" 34 | #include "arrow/table.h" 35 | 36 | #include "library/include/flight_sql_fwd.h" 37 | 38 | using arrow::Status; 39 | 40 | namespace sqlflite { 41 | 42 | DEFINE_string(host, "localhost", "Host to connect to"); 43 | DEFINE_int32(port, 31337, "Port to connect to"); 44 | DEFINE_string(username, "", "Username"); 45 | DEFINE_string(password, "", "Password"); 46 | DEFINE_bool(use_tls, false, "Use TLS for connection"); 47 | DEFINE_string(tls_roots, "", "Path to Root certificates for TLS (in PEM format)"); 48 | DEFINE_bool(tls_skip_verify, false, "Skip TLS server certificate verification"); 49 | DEFINE_string(mtls_cert_chain, "", 50 | "Path to Certificate chain (in PEM format) used for mTLS authentication - " 51 | "if server requires it, must be accompanied by mtls_private_key"); 52 | DEFINE_string(mtls_private_key, "", 53 | "Path to Private key (in PEM format) used for mTLS authentication - if " 54 | "server requires it"); 55 | 56 | DEFINE_string(command, "", "Method to run"); 57 | DEFINE_string(query, "", "Query"); 58 | DEFINE_string(catalog, "", "Catalog"); 59 | DEFINE_string(schema, "", "Schema"); 60 | DEFINE_string(table, "", "Table"); 61 | 62 | Status PrintResultsForEndpoint(flight::sql::FlightSqlClient &client, 63 | const flight::FlightCallOptions &call_options, 64 | const flight::FlightEndpoint &endpoint) { 65 | ARROW_ASSIGN_OR_RAISE(auto stream, client.DoGet(call_options, endpoint.ticket)); 66 | 67 | const arrow::Result> &schema = stream->GetSchema(); 68 | ARROW_RETURN_NOT_OK(schema); 69 | 70 | std::cout << "Schema:" << std::endl; 71 | std::cout << schema->get()->ToString() << std::endl << std::endl; 72 | 73 | std::cout << "Results:" << std::endl; 74 | 75 | int64_t num_rows = 0; 76 | 77 | while (true) { 78 | ARROW_ASSIGN_OR_RAISE(flight::FlightStreamChunk chunk, stream->Next()); 79 | if (chunk.data == nullptr) { 80 | break; 81 | } 82 | std::cout << chunk.data->ToString() << std::endl; 83 | num_rows += chunk.data->num_rows(); 84 | } 85 | 86 | std::cout << "Total: " << num_rows << std::endl; 87 | 88 | return Status::OK(); 89 | } 90 | 91 | Status PrintResults(flight::sql::FlightSqlClient &client, 92 | const flight::FlightCallOptions &call_options, 93 | const std::unique_ptr &info) { 94 | const std::vector &endpoints = info->endpoints(); 95 | 96 | for (size_t i = 0; i < endpoints.size(); i++) { 97 | std::cout << "Results from endpoint " << i + 1 << " of " << endpoints.size() 98 | << std::endl; 99 | ARROW_RETURN_NOT_OK(PrintResultsForEndpoint(client, call_options, endpoints[i])); 100 | } 101 | 102 | return Status::OK(); 103 | } 104 | 105 | Status getPEMCertFileContents(const std::string &cert_file_path, 106 | std::string &cert_contents) { 107 | std::ifstream cert_file(cert_file_path); 108 | if (!cert_file.is_open()) { 109 | return Status::IOError("Could not open file: " + cert_file_path); 110 | } 111 | 112 | std::stringstream cert_stream; 113 | cert_stream << cert_file.rdbuf(); 114 | cert_contents = cert_stream.str(); 115 | 116 | return Status::OK(); 117 | } 118 | 119 | Status RunMain() { 120 | ARROW_ASSIGN_OR_RAISE(auto location, 121 | (FLAGS_use_tls) 122 | ? flight::Location::ForGrpcTls(FLAGS_host, FLAGS_port) 123 | : flight::Location::ForGrpcTcp(FLAGS_host, FLAGS_port)); 124 | 125 | // Setup our options 126 | flight::FlightClientOptions options; 127 | 128 | if (!FLAGS_tls_roots.empty()) { 129 | ARROW_RETURN_NOT_OK(getPEMCertFileContents(FLAGS_tls_roots, options.tls_root_certs)); 130 | } 131 | 132 | options.disable_server_verification = FLAGS_tls_skip_verify; 133 | 134 | if (!FLAGS_mtls_cert_chain.empty()) { 135 | ARROW_RETURN_NOT_OK( 136 | getPEMCertFileContents(FLAGS_mtls_cert_chain, options.cert_chain)); 137 | 138 | if (!FLAGS_mtls_private_key.empty()) { 139 | ARROW_RETURN_NOT_OK( 140 | getPEMCertFileContents(FLAGS_mtls_private_key, options.private_key)); 141 | } else { 142 | return Status::Invalid( 143 | "mTLS private key file must be provided if mTLS certificate chain is provided"); 144 | } 145 | } 146 | 147 | ARROW_ASSIGN_OR_RAISE(auto client, flight::FlightClient::Connect(location, options)); 148 | 149 | flight::FlightCallOptions call_options; 150 | 151 | if (!FLAGS_username.empty() || !FLAGS_password.empty()) { 152 | arrow::Result> bearer_result = 153 | client->AuthenticateBasicToken({}, FLAGS_username, FLAGS_password); 154 | ARROW_RETURN_NOT_OK(bearer_result); 155 | 156 | call_options.headers.push_back(bearer_result.ValueOrDie()); 157 | } 158 | 159 | flight::sql::FlightSqlClient sql_client(std::move(client)); 160 | 161 | if (FLAGS_command == "ExecuteUpdate") { 162 | ARROW_ASSIGN_OR_RAISE(auto rows, sql_client.ExecuteUpdate(call_options, FLAGS_query)); 163 | 164 | std::cout << "Result: " << rows << std::endl; 165 | 166 | return Status::OK(); 167 | } 168 | 169 | std::unique_ptr info; 170 | 171 | std::shared_ptr prepared_statement; 172 | 173 | if (FLAGS_command == "Execute") { 174 | ARROW_ASSIGN_OR_RAISE(info, sql_client.Execute(call_options, FLAGS_query)); 175 | } else if (FLAGS_command == "GetCatalogs") { 176 | ARROW_ASSIGN_OR_RAISE(info, sql_client.GetCatalogs(call_options)); 177 | } else if (FLAGS_command == "PreparedStatementExecute") { 178 | ARROW_ASSIGN_OR_RAISE(prepared_statement, 179 | sql_client.Prepare(call_options, FLAGS_query)); 180 | ARROW_ASSIGN_OR_RAISE(info, prepared_statement->Execute(call_options)); 181 | } else if (FLAGS_command == "PreparedStatementExecuteParameterBinding") { 182 | ARROW_ASSIGN_OR_RAISE(prepared_statement, sql_client.Prepare({}, FLAGS_query)); 183 | auto parameter_schema = prepared_statement->parameter_schema(); 184 | auto result_set_schema = prepared_statement->dataset_schema(); 185 | 186 | std::cout << result_set_schema->ToString(false) << std::endl; 187 | arrow::Int64Builder int_builder; 188 | ARROW_RETURN_NOT_OK(int_builder.Append(1)); 189 | std::shared_ptr int_array; 190 | ARROW_RETURN_NOT_OK(int_builder.Finish(&int_array)); 191 | std::shared_ptr result; 192 | result = arrow::RecordBatch::Make(parameter_schema, 1, {int_array}); 193 | 194 | ARROW_RETURN_NOT_OK(prepared_statement->SetParameters(result)); 195 | ARROW_ASSIGN_OR_RAISE(info, prepared_statement->Execute(call_options)); 196 | } else if (FLAGS_command == "GetDbSchemas") { 197 | ARROW_ASSIGN_OR_RAISE( 198 | info, sql_client.GetDbSchemas(call_options, &FLAGS_catalog, &FLAGS_schema)); 199 | } else if (FLAGS_command == "GetTableTypes") { 200 | ARROW_ASSIGN_OR_RAISE(info, sql_client.GetTableTypes(call_options)); 201 | } else if (FLAGS_command == "GetTables") { 202 | ARROW_ASSIGN_OR_RAISE( 203 | info, sql_client.GetTables(call_options, &FLAGS_catalog, &FLAGS_schema, 204 | &FLAGS_table, false, nullptr)); 205 | } else if (FLAGS_command == "GetExportedKeys") { 206 | flight::sql::TableRef table_ref = {std::make_optional(FLAGS_catalog), 207 | std::make_optional(FLAGS_schema), FLAGS_table}; 208 | ARROW_ASSIGN_OR_RAISE(info, sql_client.GetExportedKeys(call_options, table_ref)); 209 | } else if (FLAGS_command == "GetImportedKeys") { 210 | flight::sql::TableRef table_ref = {std::make_optional(FLAGS_catalog), 211 | std::make_optional(FLAGS_schema), FLAGS_table}; 212 | ARROW_ASSIGN_OR_RAISE(info, sql_client.GetImportedKeys(call_options, table_ref)); 213 | } else if (FLAGS_command == "GetPrimaryKeys") { 214 | flight::sql::TableRef table_ref = {std::make_optional(FLAGS_catalog), 215 | std::make_optional(FLAGS_schema), FLAGS_table}; 216 | ARROW_ASSIGN_OR_RAISE(info, sql_client.GetPrimaryKeys(call_options, table_ref)); 217 | } else if (FLAGS_command == "GetSqlInfo") { 218 | ARROW_ASSIGN_OR_RAISE(info, sql_client.GetSqlInfo(call_options, {})); 219 | } 220 | 221 | arrow::Status print_status; 222 | if (info != NULLPTR) { 223 | print_status = PrintResults(sql_client, call_options, info); 224 | 225 | if (prepared_statement != NULLPTR) { 226 | ARROW_RETURN_NOT_OK(prepared_statement->Close(call_options)); 227 | } 228 | } 229 | 230 | return print_status; 231 | } 232 | 233 | } // namespace sqlflite 234 | 235 | int main(int argc, char **argv) { 236 | gflags::ParseCommandLineFlags(&argc, &argv, true); 237 | 238 | Status st = sqlflite::RunMain(); 239 | if (!st.ok()) { 240 | std::cerr << st << std::endl; 241 | return 1; 242 | } 243 | return 0; 244 | } 245 | -------------------------------------------------------------------------------- /src/sqlflite_server.cpp: -------------------------------------------------------------------------------- 1 | // Licensed to the Apache Software Foundation (ASF) under one 2 | // or more contributor license agreements. See the NOTICE file 3 | // distributed with this work for additional information 4 | // regarding copyright ownership. The ASF licenses this file 5 | // to you under the Apache License, Version 2.0 (the 6 | // "License"); you may not use this file except in compliance 7 | // with the License. You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, 12 | // software distributed under the License is distributed on an 13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | // KIND, either express or implied. See the License for the 15 | // specific language governing permissions and limitations 16 | // under the License. 17 | 18 | #include "library/include/sqlflite_library.h" 19 | #include 20 | #include 21 | 22 | namespace po = boost::program_options; 23 | namespace fs = std::filesystem; 24 | 25 | int main(int argc, char **argv) { 26 | std::vector tls_token_values; 27 | 28 | // Declare the supported options. 29 | po::options_description desc("Allowed options"); 30 | // clang-format off 31 | desc.add_options() 32 | ("help", "produce this help message") 33 | ("version", "Print the version and exit") 34 | ("backend,B", po::value()->default_value("duckdb"), 35 | "Specify the database backend. Allowed options: duckdb, sqlite.") 36 | ("hostname,H", po::value()->default_value(""), 37 | "Specify the hostname to listen on for the SQLFlite Server. If not set, we will use env var: 'SQLFLITE_HOSTNAME'. " 38 | "If that isn't set, we will use the default of: '0.0.0.0'.") 39 | ("port,R", po::value()->default_value(DEFAULT_FLIGHT_PORT), 40 | "Specify the port to listen on for the SQLFlite Server.") 41 | ("database-filename,D", po::value()->default_value(""), 42 | "Specify the database filename (absolute or relative to the current working directory)") 43 | ("username,U", po::value()->default_value(""), 44 | "Specify the username to allow to connect to the SQLFlite Server for clients. If not set, we will use env var: 'SQLFLITE_USERNAME'. " 45 | "If that isn't set, we will use the default of: 'sqlflite_username'.") 46 | ("password,P", po::value()->default_value(""), 47 | "Specify the password to set on the SQLFlite Server for clients to connect with. If not set, we will use env var: 'SQLFLITE_PASSWORD'. " 48 | "If that isn't set, the server will exit with failure.") 49 | ("secret-key,S", po::value()->default_value(""), 50 | "Specify the secret key used to sign JWTs issued by the SQLFlite Server. " 51 | "If it isn't set, we use env var: 'SECRET_KEY'. If that isn't set, the server will create a random secret key.") 52 | ("tls,T", po::value>(&tls_token_values)->multitoken()->default_value( 53 | std::vector{"", ""}, ""), 54 | "Specify the TLS certificate and key file paths.") 55 | ("init-sql-commands,I", po::value()->default_value(""), 56 | "Specify the SQL commands to run on server startup. " 57 | "If not set, we will use env var: 'INIT_SQL_COMMANDS'.") 58 | ("init-sql-commands-file,F", po::value()->default_value(""), 59 | "Specify a file containing SQL commands to run on server startup. " 60 | "If not set, we will use env var: 'INIT_SQL_COMMANDS_FILE'.") 61 | ("mtls-ca-cert-filename,M", po::value()->default_value(""), 62 | "Specify an optional mTLS CA certificate path used to verify clients. The certificate MUST be in PEM format.") 63 | ("print-queries,Q", po::bool_switch()->default_value(false), "Print queries run by clients to stdout"); 64 | // clang-format on 65 | 66 | po::variables_map vm; 67 | po::store(po::parse_command_line(argc, argv, desc), vm); 68 | po::notify(vm); 69 | 70 | if (vm.count("help")) { 71 | std::cout << desc << "\n"; 72 | return 0; 73 | } 74 | 75 | if (vm.count("version")) { 76 | std::cout << "SQLFlite Server CLI: " << SQLFLITE_SERVER_VERSION << std::endl; 77 | return 0; 78 | } 79 | 80 | std::string backend_str = vm["backend"].as(); 81 | BackendType backend; 82 | if (backend_str == "duckdb") { 83 | backend = BackendType::duckdb; 84 | } else if (backend_str == "sqlite") { 85 | backend = BackendType::sqlite; 86 | } else { 87 | std::cout << "Invalid backend: " << backend_str << std::endl; 88 | return 1; 89 | } 90 | 91 | auto database_filename = fs::path(vm["database-filename"].as()); 92 | 93 | std::string hostname = ""; 94 | if (vm.count("hostname")) { 95 | hostname = vm["hostname"].as(); 96 | } 97 | 98 | int port = vm["port"].as(); 99 | 100 | std::string username = ""; 101 | if (vm.count("username")) { 102 | username = vm["username"].as(); 103 | } 104 | 105 | std::string password = ""; 106 | if (vm.count("password")) { 107 | password = vm["password"].as(); 108 | } 109 | 110 | std::string secret_key = ""; 111 | if (vm.count("secret-key")) { 112 | secret_key = vm["secret-key"].as(); 113 | } 114 | 115 | auto tls_cert_path = fs::path(); 116 | auto tls_key_path = fs::path(); 117 | if (vm.count("tls")) { 118 | std::vector tls_tokens = tls_token_values; 119 | if (tls_tokens.size() != 2) { 120 | std::cout << "--tls requires 2 entries - separated by a space!" << std::endl; 121 | return 1; 122 | } 123 | tls_cert_path = fs::path(tls_tokens[0]); 124 | tls_key_path = fs::path(tls_tokens[1]); 125 | } 126 | 127 | std::string init_sql_commands = ""; 128 | if (vm.count("init-sql-commands")) { 129 | init_sql_commands = vm["init-sql-commands"].as(); 130 | } 131 | 132 | std::string init_sql_commands_file = ""; 133 | if (vm.count("init-sql-commands-file")) { 134 | init_sql_commands_file = fs::path(vm["init-sql-commands-file"].as()); 135 | } 136 | 137 | fs::path mtls_ca_cert_path; 138 | if (vm.count("mtls-ca-cert-filename")) { 139 | mtls_ca_cert_path = fs::path(vm["mtls-ca-cert-filename"].as()); 140 | } 141 | 142 | bool print_queries = vm["print-queries"].as(); 143 | 144 | return RunFlightSQLServer(backend, database_filename, hostname, port, username, 145 | password, secret_key, tls_cert_path, tls_key_path, 146 | mtls_ca_cert_path, init_sql_commands, init_sql_commands_file, 147 | print_queries); 148 | } 149 | -------------------------------------------------------------------------------- /src/sqlite/sqlite_server.h: -------------------------------------------------------------------------------- 1 | // Licensed to the Apache Software Foundation (ASF) under one 2 | // or more contributor license agreements. See the NOTICE file 3 | // distributed with this work for additional information 4 | // regarding copyright ownership. The ASF licenses this file 5 | // to you under the Apache License, Version 2.0 (the 6 | // "License"); you may not use this file except in compliance 7 | // with the License. You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, 12 | // software distributed under the License is distributed on an 13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | // KIND, either express or implied. See the License for the 15 | // specific language governing permissions and limitations 16 | // under the License. 17 | 18 | #pragma once 19 | 20 | #include 21 | 22 | #include 23 | #include 24 | #include 25 | 26 | #include "sqlite_statement.h" 27 | #include "sqlite_statement_batch_reader.h" 28 | #include "arrow/flight/sql/server.h" 29 | #include "arrow/flight/types.h" 30 | #include "arrow/result.h" 31 | #include "flight_sql_fwd.h" 32 | 33 | namespace sqlflite::sqlite { 34 | 35 | /// \brief Convert a column type to a ArrowType. 36 | /// \param sqlite_type the sqlite type. 37 | /// \return The equivalent ArrowType. 38 | arrow::Result> GetArrowType(const char* sqlite_type); 39 | 40 | /// \brief Convert a column type name to SQLite type. 41 | /// \param type_name the type name. 42 | /// \return The equivalent SQLite type. 43 | int32_t GetSqlTypeFromTypeName(const char* type_name); 44 | 45 | /// \brief Get the DataType used when parameter type is not known. 46 | /// \return DataType used when parameter type is not known. 47 | inline std::shared_ptr GetUnknownColumnDataType() { 48 | return arrow::dense_union({ 49 | field("string", arrow::utf8()), 50 | field("bytes", arrow::binary()), 51 | field("bigint", arrow::int64()), 52 | field("double", arrow::float64()), 53 | }); 54 | } 55 | 56 | /// \brief Example implementation of FlightSqlServerBase backed by an in-memory SQLite3 57 | /// database. 58 | class SQLiteFlightSqlServer : public flight::sql::FlightSqlServerBase { 59 | public: 60 | ~SQLiteFlightSqlServer() override; 61 | 62 | static arrow::Result> Create(std::string path); 63 | 64 | /// \brief Auxiliary method used to execute an arbitrary SQL statement on the underlying 65 | /// SQLite database. 66 | arrow::Status ExecuteSql(const std::string& sql); 67 | 68 | arrow::Result> GetFlightInfoStatement( 69 | const flight::ServerCallContext& context, 70 | const flight::sql::StatementQuery& command, 71 | const flight::FlightDescriptor& descriptor) override; 72 | 73 | arrow::Result> DoGetStatement( 74 | const flight::ServerCallContext& context, 75 | const flight::sql::StatementQueryTicket& command) override; 76 | arrow::Result> GetFlightInfoCatalogs( 77 | const flight::ServerCallContext& context, 78 | const flight::FlightDescriptor& descriptor) override; 79 | arrow::Result> DoGetCatalogs( 80 | const flight::ServerCallContext& context) override; 81 | arrow::Result> GetFlightInfoSchemas( 82 | const flight::ServerCallContext& context, const flight::sql::GetDbSchemas& command, 83 | const flight::FlightDescriptor& descriptor) override; 84 | arrow::Result> DoGetDbSchemas( 85 | const flight::ServerCallContext& context, 86 | const flight::sql::GetDbSchemas& command) override; 87 | arrow::Result DoPutCommandStatementUpdate( 88 | const flight::ServerCallContext& context, 89 | const flight::sql::StatementUpdate& update) override; 90 | arrow::Result CreatePreparedStatement( 91 | const flight::ServerCallContext& context, 92 | const flight::sql::ActionCreatePreparedStatementRequest& request) override; 93 | arrow::Status ClosePreparedStatement( 94 | const flight::ServerCallContext& context, 95 | const flight::sql::ActionClosePreparedStatementRequest& request) override; 96 | arrow::Result> GetFlightInfoPreparedStatement( 97 | const flight::ServerCallContext& context, 98 | const flight::sql::PreparedStatementQuery& command, 99 | const flight::FlightDescriptor& descriptor) override; 100 | arrow::Result> DoGetPreparedStatement( 101 | const flight::ServerCallContext& context, 102 | const flight::sql::PreparedStatementQuery& command) override; 103 | arrow::Status DoPutPreparedStatementQuery( 104 | const flight::ServerCallContext& context, 105 | const flight::sql::PreparedStatementQuery& command, 106 | flight::FlightMessageReader* reader, flight::FlightMetadataWriter* writer) override; 107 | arrow::Result DoPutPreparedStatementUpdate( 108 | const flight::ServerCallContext& context, 109 | const flight::sql::PreparedStatementUpdate& command, 110 | flight::FlightMessageReader* reader) override; 111 | 112 | arrow::Result> GetFlightInfoTables( 113 | const flight::ServerCallContext& context, const flight::sql::GetTables& command, 114 | const flight::FlightDescriptor& descriptor) override; 115 | 116 | arrow::Result> DoGetTables( 117 | const flight::ServerCallContext& context, 118 | const flight::sql::GetTables& command) override; 119 | arrow::Result> GetFlightInfoXdbcTypeInfo( 120 | const flight::ServerCallContext& context, 121 | const flight::sql::GetXdbcTypeInfo& command, 122 | const flight::FlightDescriptor& descriptor) override; 123 | arrow::Result> DoGetXdbcTypeInfo( 124 | const flight::ServerCallContext& context, 125 | const flight::sql::GetXdbcTypeInfo& command) override; 126 | arrow::Result> GetFlightInfoTableTypes( 127 | const flight::ServerCallContext& context, 128 | const flight::FlightDescriptor& descriptor) override; 129 | arrow::Result> DoGetTableTypes( 130 | const flight::ServerCallContext& context) override; 131 | arrow::Result> GetFlightInfoImportedKeys( 132 | const flight::ServerCallContext& context, 133 | const flight::sql::GetImportedKeys& command, 134 | const flight::FlightDescriptor& descriptor) override; 135 | arrow::Result> DoGetImportedKeys( 136 | const flight::ServerCallContext& context, 137 | const flight::sql::GetImportedKeys& command) override; 138 | arrow::Result> GetFlightInfoExportedKeys( 139 | const flight::ServerCallContext& context, 140 | const flight::sql::GetExportedKeys& command, 141 | const flight::FlightDescriptor& descriptor) override; 142 | arrow::Result> DoGetExportedKeys( 143 | const flight::ServerCallContext& context, 144 | const flight::sql::GetExportedKeys& command) override; 145 | arrow::Result> GetFlightInfoCrossReference( 146 | const flight::ServerCallContext& context, 147 | const flight::sql::GetCrossReference& command, 148 | const flight::FlightDescriptor& descriptor) override; 149 | arrow::Result> DoGetCrossReference( 150 | const flight::ServerCallContext& context, 151 | const flight::sql::GetCrossReference& command) override; 152 | 153 | arrow::Result> GetFlightInfoPrimaryKeys( 154 | const flight::ServerCallContext& context, 155 | const flight::sql::GetPrimaryKeys& command, 156 | const flight::FlightDescriptor& descriptor) override; 157 | 158 | arrow::Result> DoGetPrimaryKeys( 159 | const flight::ServerCallContext& context, 160 | const flight::sql::GetPrimaryKeys& command) override; 161 | 162 | arrow::Result BeginTransaction( 163 | const flight::ServerCallContext& context, 164 | const flight::sql::ActionBeginTransactionRequest& request) override; 165 | arrow::Status EndTransaction( 166 | const flight::ServerCallContext& context, 167 | const flight::sql::ActionEndTransactionRequest& request) override; 168 | 169 | private: 170 | class Impl; 171 | std::shared_ptr impl_; 172 | 173 | explicit SQLiteFlightSqlServer(std::shared_ptr impl); 174 | }; 175 | 176 | } // namespace sqlflite::sqlite 177 | -------------------------------------------------------------------------------- /src/sqlite/sqlite_sql_info.h: -------------------------------------------------------------------------------- 1 | // Licensed to the Apache Software Foundation (ASF) under one 2 | // or more contributor license agreements. See the NOTICE file 3 | // distributed with this work for additional information 4 | // regarding copyright ownership. The ASF licenses this file 5 | // to you under the Apache License, Version 2.0 (the 6 | // "License"); you may not use this file except in compliance 7 | // with the License. You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, 12 | // software distributed under the License is distributed on an 13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | // KIND, either express or implied. See the License for the 15 | // specific language governing permissions and limitations 16 | // under the License. 17 | 18 | #pragma once 19 | 20 | #include "arrow/flight/sql/types.h" 21 | 22 | namespace sqlflite::sqlite { 23 | 24 | /// \brief Gets the mapping from SQL info ids to SqlInfoResult instances. 25 | /// \return the cache. 26 | arrow::flight::sql::SqlInfoResultMap GetSqlInfoResultMap(); 27 | 28 | } // namespace sqlflite::sqlite 29 | -------------------------------------------------------------------------------- /src/sqlite/sqlite_statement.cc: -------------------------------------------------------------------------------- 1 | // Licensed to the Apache Software Foundation (ASF) under one 2 | // or more contributor license agreements. See the NOTICE file 3 | // distributed with this work for additional information 4 | // regarding copyright ownership. The ASF licenses this file 5 | // to you under the Apache License, Version 2.0 (the 6 | // "License"); you may not use this file except in compliance 7 | // with the License. You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, 12 | // software distributed under the License is distributed on an 13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | // KIND, either express or implied. See the License for the 15 | // specific language governing permissions and limitations 16 | // under the License. 17 | 18 | #include "sqlite_statement.h" 19 | 20 | #include 21 | 22 | #include 23 | 24 | #include "arrow/array/array_base.h" 25 | #include "arrow/array/array_binary.h" 26 | #include "arrow/array/array_nested.h" 27 | #include "arrow/array/array_primitive.h" 28 | #include "arrow/flight/sql/column_metadata.h" 29 | #include "sqlite_server.h" 30 | #include "arrow/scalar.h" 31 | #include "arrow/table.h" 32 | #include "arrow/type.h" 33 | #include "arrow/util/checked_cast.h" 34 | #include "flight_sql_fwd.h" 35 | 36 | namespace sqlflite::sqlite { 37 | 38 | using arrow::internal::checked_cast; 39 | 40 | std::shared_ptr GetDataTypeFromSqliteType(const int column_type) { 41 | switch (column_type) { 42 | case SQLITE_INTEGER: 43 | return arrow::int64(); 44 | case SQLITE_FLOAT: 45 | return arrow::float64(); 46 | case SQLITE_BLOB: 47 | return arrow::binary(); 48 | case SQLITE_TEXT: 49 | return arrow::utf8(); 50 | case SQLITE_NULL: 51 | default: 52 | return arrow::null(); 53 | } 54 | } 55 | 56 | int32_t GetPrecisionFromColumn(int column_type) { 57 | switch (column_type) { 58 | case SQLITE_INTEGER: 59 | return 10; 60 | case SQLITE_FLOAT: 61 | return 15; 62 | case SQLITE_NULL: 63 | default: 64 | return 0; 65 | } 66 | } 67 | 68 | flight::sql::ColumnMetadata GetColumnMetadata(int column_type, const char* table) { 69 | flight::sql::ColumnMetadata::ColumnMetadataBuilder builder = 70 | flight::sql::ColumnMetadata::Builder(); 71 | 72 | builder.Scale(15).IsAutoIncrement(false).IsReadOnly(false); 73 | if (table == NULLPTR) { 74 | return builder.Build(); 75 | } else if (column_type == SQLITE_TEXT || column_type == SQLITE_BLOB) { 76 | std::string table_name(table); 77 | builder.TableName(table_name); 78 | } else { 79 | std::string table_name(table); 80 | builder.TableName(table_name).Precision(GetPrecisionFromColumn(column_type)); 81 | } 82 | return builder.Build(); 83 | } 84 | 85 | arrow::Result> SqliteStatement::Create( 86 | sqlite3* db, const std::string& sql) { 87 | sqlite3_stmt* stmt = nullptr; 88 | int rc = 89 | sqlite3_prepare_v2(db, sql.c_str(), static_cast(sql.size()), &stmt, NULLPTR); 90 | 91 | if (rc != SQLITE_OK) { 92 | std::string err_msg = "Can't prepare statement: " + std::string(sqlite3_errmsg(db)); 93 | if (stmt != nullptr) { 94 | rc = sqlite3_finalize(stmt); 95 | if (rc != SQLITE_OK) { 96 | err_msg += "; Failed to finalize SQLite statement: "; 97 | err_msg += std::string(sqlite3_errmsg(db)); 98 | } 99 | } 100 | return arrow::Status::Invalid(err_msg); 101 | } 102 | 103 | std::shared_ptr result(new SqliteStatement(db, stmt)); 104 | return result; 105 | } 106 | 107 | arrow::Result> SqliteStatement::GetSchema() const { 108 | std::vector> fields; 109 | int column_count = sqlite3_column_count(stmt_); 110 | for (int i = 0; i < column_count; i++) { 111 | const char* column_name = sqlite3_column_name(stmt_, i); 112 | 113 | // SQLite does not always provide column types, especially when the statement has not 114 | // been executed yet. Because of this behaviour this method tries to get the column 115 | // types in two attempts: 116 | // 1. Use sqlite3_column_type(), which return SQLITE_NULL if the statement has not 117 | // been executed yet 118 | // 2. Use sqlite3_column_decltype(), which returns correctly if given column is 119 | // declared in the table. 120 | // Because of this limitation, it is not possible to know the column types for some 121 | // prepared statements, in this case it returns a dense_union type covering any type 122 | // SQLite supports. 123 | const int column_type = sqlite3_column_type(stmt_, i); 124 | const char* table = sqlite3_column_table_name(stmt_, i); 125 | std::shared_ptr data_type = GetDataTypeFromSqliteType(column_type); 126 | if (data_type->id() == arrow::Type::NA) { 127 | // Try to retrieve column type from sqlite3_column_decltype 128 | const char* column_decltype = sqlite3_column_decltype(stmt_, i); 129 | if (column_decltype != NULLPTR) { 130 | ARROW_ASSIGN_OR_RAISE(data_type, sqlflite::sqlite::GetArrowType(column_decltype)); 131 | } else { 132 | // If it cannot determine the actual column type, return a dense_union type 133 | // covering any type SQLite supports. 134 | data_type = sqlflite::sqlite::GetUnknownColumnDataType(); 135 | } 136 | } 137 | flight::sql::ColumnMetadata column_metadata = GetColumnMetadata(column_type, table); 138 | 139 | fields.push_back( 140 | arrow::field(column_name, data_type, column_metadata.metadata_map())); 141 | } 142 | 143 | return arrow::schema(fields); 144 | } 145 | 146 | SqliteStatement::~SqliteStatement() { sqlite3_finalize(stmt_); } 147 | 148 | arrow::Result SqliteStatement::Step() { 149 | int rc = sqlite3_step(stmt_); 150 | if (rc == SQLITE_ERROR) { 151 | return arrow::Status::ExecutionError("A SQLite runtime error has occurred: ", 152 | sqlite3_errmsg(db_)); 153 | } 154 | 155 | return rc; 156 | } 157 | 158 | arrow::Result SqliteStatement::Reset() { 159 | int rc = sqlite3_reset(stmt_); 160 | if (rc == SQLITE_ERROR) { 161 | return arrow::Status::ExecutionError("A SQLite runtime error has occurred: ", 162 | sqlite3_errmsg(db_)); 163 | } 164 | 165 | return rc; 166 | } 167 | 168 | sqlite3_stmt* SqliteStatement::GetSqlite3Stmt() const { return stmt_; } 169 | 170 | arrow::Result SqliteStatement::ExecuteUpdate() { 171 | while (true) { 172 | ARROW_ASSIGN_OR_RAISE(int rc, Step()); 173 | if (rc == SQLITE_DONE) break; 174 | } 175 | return sqlite3_changes(db_); 176 | } 177 | 178 | arrow::Status SqliteStatement::SetParameters( 179 | std::vector> parameters) { 180 | const int num_params = sqlite3_bind_parameter_count(stmt_); 181 | for (const auto& batch : parameters) { 182 | if (batch->num_columns() != num_params) { 183 | return arrow::Status::Invalid("Expected ", num_params, " parameters, but got ", 184 | batch->num_columns()); 185 | } 186 | } 187 | parameters_ = std::move(parameters); 188 | auto end = std::remove_if(parameters_.begin(), parameters_.end(), 189 | [](const std::shared_ptr& batch) { 190 | return batch->num_rows() == 0; 191 | }); 192 | parameters_.erase(end, parameters_.end()); 193 | return arrow::Status::OK(); 194 | } 195 | 196 | arrow::Status SqliteStatement::Bind(size_t batch_index, int64_t row_index) { 197 | if (batch_index >= parameters_.size()) { 198 | return arrow::Status::IndexError("Cannot bind to batch ", batch_index); 199 | } 200 | const arrow::RecordBatch& batch = *parameters_[batch_index]; 201 | if (row_index < 0 || row_index >= batch.num_rows()) { 202 | return arrow::Status::IndexError("Cannot bind to row ", row_index, " in batch ", 203 | batch_index); 204 | } 205 | 206 | if (sqlite3_clear_bindings(stmt_) != SQLITE_OK) { 207 | return arrow::Status::Invalid("Failed to reset bindings: ", sqlite3_errmsg(db_)); 208 | } 209 | for (int c = 0; c < batch.num_columns(); ++c) { 210 | arrow::Array* column = batch.column(c).get(); 211 | int64_t column_index = row_index; 212 | if (column->type_id() == arrow::Type::DENSE_UNION) { 213 | // Allow polymorphic bindings via union 214 | const auto& u = checked_cast(*column); 215 | column_index = u.value_offset(column_index); 216 | column = u.field(u.child_id(row_index)).get(); 217 | } 218 | 219 | int rc = 0; 220 | if (column->IsNull(column_index)) { 221 | rc = sqlite3_bind_null(stmt_, c + 1); 222 | continue; 223 | } 224 | switch (column->type_id()) { 225 | case arrow::Type::INT32: { 226 | const int32_t value = 227 | checked_cast(*column).Value(column_index); 228 | rc = sqlite3_bind_int64(stmt_, c + 1, value); 229 | break; 230 | } 231 | case arrow::Type::INT64: { 232 | const int64_t value = 233 | checked_cast(*column).Value(column_index); 234 | rc = sqlite3_bind_int64(stmt_, c + 1, value); 235 | break; 236 | } 237 | case arrow::Type::FLOAT: { 238 | const float value = 239 | checked_cast(*column).Value(column_index); 240 | rc = sqlite3_bind_double(stmt_, c + 1, value); 241 | break; 242 | } 243 | case arrow::Type::DOUBLE: { 244 | const double value = 245 | checked_cast(*column).Value(column_index); 246 | rc = sqlite3_bind_double(stmt_, c + 1, value); 247 | break; 248 | } 249 | case arrow::Type::STRING: { 250 | const std::string_view value = 251 | checked_cast(*column).Value(column_index); 252 | rc = sqlite3_bind_text(stmt_, c + 1, value.data(), static_cast(value.size()), 253 | SQLITE_TRANSIENT); 254 | break; 255 | } 256 | default: 257 | return arrow::Status::TypeError("Received unsupported data type: ", 258 | *column->type()); 259 | } 260 | if (rc != SQLITE_OK) { 261 | return arrow::Status::UnknownError("Failed to bind parameter: ", 262 | sqlite3_errmsg(db_)); 263 | } 264 | } 265 | 266 | return arrow::Status::OK(); 267 | } 268 | 269 | } // namespace sqlflite::sqlite 270 | -------------------------------------------------------------------------------- /src/sqlite/sqlite_statement.h: -------------------------------------------------------------------------------- 1 | // Licensed to the Apache Software Foundation (ASF) under one 2 | // or more contributor license agreements. See the NOTICE file 3 | // distributed with this work for additional information 4 | // regarding copyright ownership. The ASF licenses this file 5 | // to you under the Apache License, Version 2.0 (the 6 | // "License"); you may not use this file except in compliance 7 | // with the License. You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, 12 | // software distributed under the License is distributed on an 13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | // KIND, either express or implied. See the License for the 15 | // specific language governing permissions and limitations 16 | // under the License. 17 | 18 | #pragma once 19 | 20 | #include 21 | 22 | #include 23 | #include 24 | 25 | #include "arrow/flight/sql/column_metadata.h" 26 | #include "arrow/type_fwd.h" 27 | 28 | namespace sqlflite::sqlite { 29 | 30 | /// \brief Create an object ColumnMetadata using the column type and 31 | /// table name. 32 | /// \param column_type The SQLite type. 33 | /// \param table The table name. 34 | /// \return A Column Metadata object. 35 | arrow::flight::sql::ColumnMetadata GetColumnMetadata(int column_type, const char* table); 36 | 37 | class SqliteStatement { 38 | public: 39 | /// \brief Creates a SQLite3 statement. 40 | /// \param[in] db SQLite3 database instance. 41 | /// \param[in] sql SQL statement. 42 | /// \return A SqliteStatement object. 43 | static arrow::Result> Create(sqlite3* db, 44 | const std::string& sql); 45 | 46 | ~SqliteStatement(); 47 | 48 | /// \brief Creates an Arrow Schema based on the results of this statement. 49 | /// \return The resulting Schema. 50 | arrow::Result> GetSchema() const; 51 | 52 | /// \brief Steps on underlying sqlite3_stmt. 53 | /// \return The resulting return code from SQLite. 54 | arrow::Result Step(); 55 | 56 | /// \brief Reset the state of the sqlite3_stmt. 57 | /// \return The resulting return code from SQLite. 58 | arrow::Result Reset(); 59 | 60 | /// \brief Returns the underlying sqlite3_stmt. 61 | /// \return A sqlite statement. 62 | sqlite3_stmt* GetSqlite3Stmt() const; 63 | 64 | sqlite3* db() const { return db_; } 65 | 66 | /// \brief Executes an UPDATE, INSERT or DELETE statement. 67 | /// \return The number of rows changed by execution. 68 | arrow::Result ExecuteUpdate(); 69 | 70 | const std::vector>& parameters() const { 71 | return parameters_; 72 | } 73 | arrow::Status SetParameters( 74 | std::vector> parameters); 75 | arrow::Status Bind(size_t batch_index, int64_t row_index); 76 | 77 | private: 78 | sqlite3* db_; 79 | sqlite3_stmt* stmt_; 80 | std::vector> parameters_; 81 | 82 | SqliteStatement(sqlite3* db, sqlite3_stmt* stmt) : db_(db), stmt_(stmt) {} 83 | }; 84 | 85 | } // namespace sqlflite::sqlite 86 | -------------------------------------------------------------------------------- /src/sqlite/sqlite_statement_batch_reader.cc: -------------------------------------------------------------------------------- 1 | // Licensed to the Apache Software Foundation (ASF) under one 2 | // or more contributor license agreements. See the NOTICE file 3 | // distributed with this work for additional information 4 | // regarding copyright ownership. The ASF licenses this file 5 | // to you under the Apache License, Version 2.0 (the 6 | // "License"); you may not use this file except in compliance 7 | // with the License. You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, 12 | // software distributed under the License is distributed on an 13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | // KIND, either express or implied. See the License for the 15 | // specific language governing permissions and limitations 16 | // under the License. 17 | 18 | #include "sqlite_statement_batch_reader.h" 19 | 20 | #include 21 | 22 | #include "arrow/builder.h" 23 | #include "sqlite_statement.h" 24 | 25 | #define STRING_BUILDER_CASE(TYPE_CLASS, STMT, COLUMN) \ 26 | case arrow::TYPE_CLASS##Type::type_id: { \ 27 | auto builder = reinterpret_cast(array_builder); \ 28 | const int bytes = sqlite3_column_bytes(STMT, COLUMN); \ 29 | const uint8_t* string = \ 30 | reinterpret_cast(sqlite3_column_text(STMT, COLUMN)); \ 31 | if (string == nullptr) { \ 32 | ARROW_RETURN_NOT_OK(builder->AppendNull()); \ 33 | break; \ 34 | } \ 35 | ARROW_RETURN_NOT_OK(builder->Append(string, bytes)); \ 36 | break; \ 37 | } 38 | 39 | #define BINARY_BUILDER_CASE(TYPE_CLASS, STMT, COLUMN) \ 40 | case arrow::TYPE_CLASS##Type::type_id: { \ 41 | auto builder = reinterpret_cast(array_builder); \ 42 | const int bytes = sqlite3_column_bytes(STMT, COLUMN); \ 43 | const uint8_t* blob = \ 44 | reinterpret_cast(sqlite3_column_blob(STMT, COLUMN)); \ 45 | if (blob == nullptr) { \ 46 | ARROW_RETURN_NOT_OK(builder->AppendNull()); \ 47 | break; \ 48 | } \ 49 | ARROW_RETURN_NOT_OK(builder->Append(blob, bytes)); \ 50 | break; \ 51 | } 52 | 53 | #define INT_BUILDER_CASE(TYPE_CLASS, STMT, COLUMN) \ 54 | case arrow::TYPE_CLASS##Type::type_id: { \ 55 | using c_type = typename arrow::TYPE_CLASS##Type::c_type; \ 56 | auto builder = reinterpret_cast(array_builder); \ 57 | const sqlite3_int64 value = sqlite3_column_int64(STMT, COLUMN); \ 58 | ARROW_RETURN_NOT_OK(builder->Append(static_cast(value))); \ 59 | break; \ 60 | } 61 | 62 | #define FLOAT_BUILDER_CASE(TYPE_CLASS, STMT, COLUMN) \ 63 | case arrow::TYPE_CLASS##Type::type_id: { \ 64 | auto builder = reinterpret_cast(array_builder); \ 65 | const double value = sqlite3_column_double(STMT, COLUMN); \ 66 | ARROW_RETURN_NOT_OK( \ 67 | builder->Append(static_cast(value))); \ 68 | break; \ 69 | } 70 | 71 | namespace sqlflite::sqlite { 72 | 73 | // Batch size for SQLite statement results 74 | static constexpr int32_t kMaxBatchSize = 16384; 75 | 76 | std::shared_ptr SqliteStatementBatchReader::schema() const { 77 | return schema_; 78 | } 79 | 80 | SqliteStatementBatchReader::SqliteStatementBatchReader( 81 | std::shared_ptr statement, std::shared_ptr schema) 82 | : statement_(std::move(statement)), 83 | schema_(std::move(schema)), 84 | rc_(SQLITE_OK), 85 | already_executed_(false) {} 86 | 87 | arrow::Result> 88 | SqliteStatementBatchReader::Create(const std::shared_ptr& statement_) { 89 | ARROW_RETURN_NOT_OK(statement_->Reset()); 90 | if (!statement_->parameters().empty()) { 91 | // If there are parameters, infer the schema after binding the first row 92 | ARROW_RETURN_NOT_OK(statement_->Bind(0, 0)); 93 | } 94 | ARROW_RETURN_NOT_OK(statement_->Step()); 95 | ARROW_ASSIGN_OR_RAISE(auto schema, statement_->GetSchema()); 96 | 97 | std::shared_ptr result( 98 | new SqliteStatementBatchReader(statement_, schema)); 99 | 100 | return result; 101 | } 102 | 103 | arrow::Result> 104 | SqliteStatementBatchReader::Create(const std::shared_ptr& statement, 105 | const std::shared_ptr& schema) { 106 | return std::shared_ptr( 107 | new SqliteStatementBatchReader(statement, schema)); 108 | } 109 | 110 | arrow::Status SqliteStatementBatchReader::ReadNext( 111 | std::shared_ptr* out) { 112 | sqlite3_stmt* stmt_ = statement_->GetSqlite3Stmt(); 113 | 114 | const int num_fields = schema_->num_fields(); 115 | std::vector> builders(num_fields); 116 | 117 | for (int i = 0; i < num_fields; i++) { 118 | const std::shared_ptr& field = schema_->field(i); 119 | const std::shared_ptr& field_type = field->type(); 120 | 121 | ARROW_RETURN_NOT_OK( 122 | MakeBuilder(arrow::default_memory_pool(), field_type, &builders[i])); 123 | } 124 | 125 | int64_t rows = 0; 126 | while (true) { 127 | if (!already_executed_) { 128 | ARROW_ASSIGN_OR_RAISE(rc_, statement_->Reset()); 129 | if (!statement_->parameters().empty()) { 130 | if (batch_index_ >= statement_->parameters().size()) { 131 | *out = nullptr; 132 | break; 133 | } 134 | ARROW_RETURN_NOT_OK(statement_->Bind(batch_index_, row_index_)); 135 | } 136 | ARROW_ASSIGN_OR_RAISE(rc_, statement_->Step()); 137 | already_executed_ = true; 138 | } 139 | 140 | while (rows < kMaxBatchSize && rc_ == SQLITE_ROW) { 141 | rows++; 142 | for (int i = 0; i < num_fields; i++) { 143 | const std::shared_ptr& field = schema_->field(i); 144 | const std::shared_ptr& field_type = field->type(); 145 | arrow::ArrayBuilder* array_builder = builders[i].get(); 146 | 147 | if (sqlite3_column_type(stmt_, i) == SQLITE_NULL) { 148 | ARROW_RETURN_NOT_OK(array_builder->AppendNull()); 149 | continue; 150 | } 151 | 152 | switch (field_type->id()) { 153 | // XXX This doesn't handle overflows when converting to the target 154 | // integer type. 155 | INT_BUILDER_CASE(Int64, stmt_, i) 156 | INT_BUILDER_CASE(UInt64, stmt_, i) 157 | INT_BUILDER_CASE(Int32, stmt_, i) 158 | INT_BUILDER_CASE(UInt32, stmt_, i) 159 | INT_BUILDER_CASE(Int16, stmt_, i) 160 | INT_BUILDER_CASE(UInt16, stmt_, i) 161 | INT_BUILDER_CASE(Int8, stmt_, i) 162 | INT_BUILDER_CASE(UInt8, stmt_, i) 163 | FLOAT_BUILDER_CASE(Double, stmt_, i) 164 | FLOAT_BUILDER_CASE(Float, stmt_, i) 165 | FLOAT_BUILDER_CASE(HalfFloat, stmt_, i) 166 | BINARY_BUILDER_CASE(Binary, stmt_, i) 167 | BINARY_BUILDER_CASE(LargeBinary, stmt_, i) 168 | STRING_BUILDER_CASE(String, stmt_, i) 169 | STRING_BUILDER_CASE(LargeString, stmt_, i) 170 | default: 171 | return arrow::Status::NotImplemented( 172 | "Not implemented SQLite data conversion to ", field_type->name()); 173 | } 174 | } 175 | 176 | ARROW_ASSIGN_OR_RAISE(rc_, statement_->Step()); 177 | } 178 | 179 | // If we still have bind parameters, bind again and retry 180 | const std::vector>& params = 181 | statement_->parameters(); 182 | if (!params.empty() && rc_ == SQLITE_DONE && batch_index_ < params.size()) { 183 | row_index_++; 184 | if (row_index_ < params[batch_index_]->num_rows()) { 185 | already_executed_ = false; 186 | } else { 187 | batch_index_++; 188 | row_index_ = 0; 189 | if (batch_index_ < params.size()) { 190 | already_executed_ = false; 191 | } 192 | } 193 | 194 | if (!already_executed_ && rows < kMaxBatchSize) continue; 195 | } 196 | 197 | if (rows > 0) { 198 | std::vector> arrays(builders.size()); 199 | for (int i = 0; i < num_fields; i++) { 200 | ARROW_RETURN_NOT_OK(builders[i]->Finish(&arrays[i])); 201 | } 202 | 203 | *out = arrow::RecordBatch::Make(schema_, rows, arrays); 204 | } else { 205 | *out = nullptr; 206 | } 207 | break; 208 | } 209 | return arrow::Status::OK(); 210 | } 211 | 212 | #undef STRING_BUILDER_CASE 213 | #undef BINARY_BUILDER_CASE 214 | #undef INT_BUILDER_CASE 215 | #undef FLOAT_BUILDER_CASE 216 | 217 | } // namespace sqlflite::sqlite 218 | -------------------------------------------------------------------------------- /src/sqlite/sqlite_statement_batch_reader.h: -------------------------------------------------------------------------------- 1 | // Licensed to the Apache Software Foundation (ASF) under one 2 | // or more contributor license agreements. See the NOTICE file 3 | // distributed with this work for additional information 4 | // regarding copyright ownership. The ASF licenses this file 5 | // to you under the Apache License, Version 2.0 (the 6 | // "License"); you may not use this file except in compliance 7 | // with the License. You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, 12 | // software distributed under the License is distributed on an 13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | // KIND, either express or implied. See the License for the 15 | // specific language governing permissions and limitations 16 | // under the License. 17 | 18 | #pragma once 19 | 20 | #include 21 | 22 | #include 23 | 24 | #include "sqlite_statement.h" 25 | #include "arrow/record_batch.h" 26 | 27 | namespace sqlflite::sqlite { 28 | 29 | class SqliteStatementBatchReader : public arrow::RecordBatchReader { 30 | public: 31 | /// \brief Creates a RecordBatchReader backed by a SQLite statement. 32 | /// \param[in] statement SQLite statement to be read. 33 | /// \return A SqliteStatementBatchReader. 34 | static arrow::Result> Create( 35 | const std::shared_ptr& statement); 36 | 37 | /// \brief Creates a RecordBatchReader backed by a SQLite statement. 38 | /// \param[in] statement SQLite statement to be read. 39 | /// \param[in] schema Schema to be used on results. 40 | /// \return A SqliteStatementBatchReader.. 41 | static arrow::Result> Create( 42 | const std::shared_ptr& statement, 43 | const std::shared_ptr& schema); 44 | 45 | std::shared_ptr schema() const override; 46 | 47 | arrow::Status ReadNext(std::shared_ptr* out) override; 48 | 49 | private: 50 | std::shared_ptr statement_; 51 | std::shared_ptr schema_; 52 | int rc_; 53 | bool already_executed_; 54 | 55 | // State for parameter binding 56 | size_t batch_index_{0}; 57 | int64_t row_index_{0}; 58 | 59 | SqliteStatementBatchReader(std::shared_ptr statement, 60 | std::shared_ptr schema); 61 | }; 62 | 63 | } // namespace sqlflite::sqlite 64 | -------------------------------------------------------------------------------- /src/sqlite/sqlite_tables_schema_batch_reader.cc: -------------------------------------------------------------------------------- 1 | // Licensed to the Apache Software Foundation (ASF) under one 2 | // or more contributor license agreements. See the NOTICE file 3 | // distributed with this work for additional information 4 | // regarding copyright ownership. The ASF licenses this file 5 | // to you under the Apache License, Version 2.0 (the 6 | // "License"); you may not use this file except in compliance 7 | // with the License. You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, 12 | // software distributed under the License is distributed on an 13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | // KIND, either express or implied. See the License for the 15 | // specific language governing permissions and limitations 16 | // under the License. 17 | 18 | #include "sqlite_tables_schema_batch_reader.h" 19 | 20 | #include 21 | 22 | #include 23 | 24 | #include "arrow/array/builder_binary.h" 25 | #include "arrow/flight/sql/column_metadata.h" 26 | #include "sqlite_server.h" 27 | #include "sqlite_statement.h" 28 | #include "arrow/flight/sql/server.h" 29 | #include "arrow/ipc/writer.h" 30 | #include "arrow/record_batch.h" 31 | 32 | #include "flight_sql_fwd.h" 33 | 34 | namespace sqlflite::sqlite { 35 | 36 | std::shared_ptr SqliteTablesWithSchemaBatchReader::schema() const { 37 | return flight::sql::SqlSchema::GetTablesSchemaWithIncludedSchema(); 38 | } 39 | 40 | arrow::Status SqliteTablesWithSchemaBatchReader::ReadNext( 41 | std::shared_ptr* batch) { 42 | std::stringstream schema_query; 43 | 44 | schema_query 45 | << "SELECT table_name, name, type, [notnull] FROM pragma_table_info(table_name)" 46 | << "JOIN(" << main_query_ << ") order by table_name"; 47 | 48 | std::shared_ptr schema_statement; 49 | ARROW_ASSIGN_OR_RAISE(schema_statement, 50 | sqlite::SqliteStatement::Create(db_, schema_query.str())) 51 | 52 | std::shared_ptr first_batch; 53 | 54 | ARROW_RETURN_NOT_OK(reader_->ReadNext(&first_batch)); 55 | 56 | if (!first_batch) { 57 | *batch = NULLPTR; 58 | return arrow::Status::OK(); 59 | } 60 | 61 | const std::shared_ptr table_name_array = 62 | first_batch->GetColumnByName("table_name"); 63 | 64 | arrow::BinaryBuilder schema_builder; 65 | 66 | auto* string_array = reinterpret_cast(table_name_array.get()); 67 | 68 | std::vector> column_fields; 69 | for (int i = 0; i < table_name_array->length(); i++) { 70 | const std::string& table_name = string_array->GetString(i); 71 | 72 | while (sqlite3_step(schema_statement->GetSqlite3Stmt()) == SQLITE_ROW) { 73 | std::string sqlite_table_name = std::string(reinterpret_cast( 74 | sqlite3_column_text(schema_statement->GetSqlite3Stmt(), 0))); 75 | if (sqlite_table_name == table_name) { 76 | const char* column_name = reinterpret_cast( 77 | sqlite3_column_text(schema_statement->GetSqlite3Stmt(), 1)); 78 | const char* column_type = reinterpret_cast( 79 | sqlite3_column_text(schema_statement->GetSqlite3Stmt(), 2)); 80 | int nullable = sqlite3_column_int(schema_statement->GetSqlite3Stmt(), 3); 81 | 82 | const flight::sql::ColumnMetadata& column_metadata = 83 | GetColumnMetadata(sqlflite::sqlite::GetSqlTypeFromTypeName(column_type), 84 | sqlite_table_name.c_str()); 85 | std::shared_ptr arrow_type; 86 | auto status = sqlflite::sqlite::GetArrowType(column_type).Value(&arrow_type); 87 | if (!status.ok()) { 88 | return arrow::Status::NotImplemented("Unknown SQLite type '", column_type, 89 | "' for column '", column_name, 90 | "' in table '", table_name, "': ", status); 91 | } 92 | column_fields.push_back(arrow::field(column_name, arrow_type, nullable == 0, 93 | column_metadata.metadata_map())); 94 | } 95 | } 96 | ARROW_ASSIGN_OR_RAISE(std::shared_ptr schema_buffer, 97 | arrow::ipc::SerializeSchema(*arrow::schema(column_fields))); 98 | 99 | column_fields.clear(); 100 | ARROW_RETURN_NOT_OK(schema_builder.Append(::std::string_view(*schema_buffer))); 101 | } 102 | 103 | std::shared_ptr schema_array; 104 | ARROW_RETURN_NOT_OK(schema_builder.Finish(&schema_array)); 105 | 106 | ARROW_ASSIGN_OR_RAISE(*batch, first_batch->AddColumn(4, "table_schema", schema_array)); 107 | 108 | return arrow::Status::OK(); 109 | } 110 | 111 | } // namespace sqlflite::sqlite 112 | -------------------------------------------------------------------------------- /src/sqlite/sqlite_tables_schema_batch_reader.h: -------------------------------------------------------------------------------- 1 | // Licensed to the Apache Software Foundation (ASF) under one 2 | // or more contributor license agreements. See the NOTICE file 3 | // distributed with this work for additional information 4 | // regarding copyright ownership. The ASF licenses this file 5 | // to you under the Apache License, Version 2.0 (the 6 | // "License"); you may not use this file except in compliance 7 | // with the License. You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, 12 | // software distributed under the License is distributed on an 13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | // KIND, either express or implied. See the License for the 15 | // specific language governing permissions and limitations 16 | // under the License. 17 | 18 | #pragma once 19 | 20 | #include 21 | 22 | #include 23 | #include 24 | 25 | #include "sqlite_statement.h" 26 | #include "sqlite_statement_batch_reader.h" 27 | #include "arrow/record_batch.h" 28 | 29 | namespace sqlflite::sqlite { 30 | 31 | class SqliteTablesWithSchemaBatchReader : public arrow::RecordBatchReader { 32 | private: 33 | std::shared_ptr reader_; 34 | std::string main_query_; 35 | sqlite3* db_; 36 | 37 | public: 38 | /// Constructor for SqliteTablesWithSchemaBatchReader class 39 | /// \param reader an shared_ptr from a SqliteStatementBatchReader. 40 | /// \param main_query SQL query that originated reader's data. 41 | /// \param db a pointer to the sqlite3 db. 42 | SqliteTablesWithSchemaBatchReader( 43 | std::shared_ptr reader, std::string main_query, 44 | sqlite3* db) 45 | : reader_(std::move(reader)), main_query_(std::move(main_query)), db_(db) {} 46 | 47 | std::shared_ptr schema() const override; 48 | 49 | arrow::Status ReadNext(std::shared_ptr* batch) override; 50 | }; 51 | 52 | } // namespace sqlflite::sqlite 53 | -------------------------------------------------------------------------------- /src/sqlite/sqlite_type_info.cc: -------------------------------------------------------------------------------- 1 | // Licensed to the Apache Software Foundation (ASF) under one 2 | // or more contributor license agreements. See the NOTICE file 3 | // distributed with this work for additional information 4 | // regarding copyright ownership. The ASF licenses this file 5 | // to you under the Apache License, Version 2.0 (the 6 | // "License"); you may not use this file except in compliance 7 | // with the License. You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, 12 | // software distributed under the License is distributed on an 13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | // KIND, either express or implied. See the License for the 15 | // specific language governing permissions and limitations 16 | // under the License. 17 | 18 | #include "sqlite_type_info.h" 19 | 20 | #include "arrow/array/builder_binary.h" 21 | #include "arrow/array/builder_nested.h" 22 | #include "arrow/array/builder_primitive.h" 23 | #include "arrow/flight/sql/server.h" 24 | #include "arrow/flight/sql/types.h" 25 | #include "arrow/record_batch.h" 26 | #include "arrow/util/rows_to_batches.h" 27 | 28 | #include "flight_sql_fwd.h" 29 | 30 | namespace sql = flight::sql; 31 | 32 | namespace sqlflite::sqlite { 33 | 34 | arrow::Result> DoGetTypeInfoResult() { 35 | auto schema = sql::SqlSchema::GetXdbcTypeInfoSchema(); 36 | using ValueType = 37 | std::variant>; 38 | auto VariantConverter = [](arrow::ArrayBuilder& array_builder, const ValueType& value) { 39 | if (std::holds_alternative(value)) { 40 | return dynamic_cast(array_builder) 41 | .Append(std::get(value)); 42 | } else if (std::holds_alternative(value)) { 43 | return dynamic_cast(array_builder) 44 | .Append(std::get(value)); 45 | } else if (std::holds_alternative(value)) { 46 | return array_builder.AppendNull(); 47 | } else if (std::holds_alternative(value)) { 48 | return dynamic_cast(array_builder) 49 | .Append(std::get(value)); 50 | } else { 51 | auto& list_builder = dynamic_cast(array_builder); 52 | ARROW_RETURN_NOT_OK(list_builder.Append()); 53 | auto value_builder = 54 | dynamic_cast(list_builder.value_builder()); 55 | for (const auto& v : std::get>(value)) { 56 | ARROW_RETURN_NOT_OK(value_builder->Append(v)); 57 | } 58 | return arrow::Status::OK(); 59 | } 60 | }; 61 | std::vector> rows = { 62 | { 63 | "bit", -7, 1, nullptr, nullptr, std::vector({}), 64 | 1, false, 3, false, false, false, 65 | "bit", 0, 0, -7, 0, 0, 66 | 0, 67 | }, 68 | { 69 | "tinyint", -6, 3, nullptr, nullptr, std::vector({}), 70 | 1, false, 3, false, false, false, 71 | "tinyint", 0, 0, -6, 0, 0, 72 | 0, 73 | }, 74 | { 75 | "bigint", -5, 19, nullptr, nullptr, std::vector({}), 76 | 1, false, 3, false, false, false, 77 | "bigint", 0, 0, -5, 0, 0, 78 | 0, 79 | }, 80 | { 81 | "longvarbinary", 82 | -4, 83 | 65536, 84 | nullptr, 85 | nullptr, 86 | std::vector({}), 87 | 1, 88 | false, 89 | 3, 90 | false, 91 | false, 92 | false, 93 | "longvarbinary", 94 | 0, 95 | 0, 96 | -4, 97 | 0, 98 | 0, 99 | 0, 100 | }, 101 | { 102 | "varbinary", -3, 255, nullptr, nullptr, std::vector({}), 103 | 1, false, 3, false, false, false, 104 | 105 | "varbinary", 0, 0, -3, 0, 0, 106 | 0, 107 | }, 108 | { 109 | "text", -1, 65536, "'", "'", std::vector({"length"}), 110 | 1, false, 3, false, false, false, 111 | "text", 0, 0, -1, 0, 0, 112 | 0, 113 | }, 114 | { 115 | "longvarchar", 116 | -1, 117 | 65536, 118 | "'", 119 | "'", 120 | std::vector({"length"}), 121 | 1, 122 | false, 123 | 3, 124 | false, 125 | false, 126 | false, 127 | "longvarchar", 128 | 0, 129 | 0, 130 | -1, 131 | 0, 132 | 0, 133 | 0, 134 | }, 135 | { 136 | "char", 1, 255, "'", "'", std::vector({"length"}), 137 | 1, false, 3, false, false, false, 138 | "char", 0, 0, 1, 0, 0, 139 | 0, 140 | }, 141 | { 142 | "integer", 4, 9, nullptr, nullptr, std::vector({}), 143 | 1, false, 3, false, false, false, 144 | "integer", 0, 0, 4, 0, 0, 145 | 0, 146 | }, 147 | { 148 | "smallint", 5, 5, nullptr, nullptr, std::vector({}), 149 | 1, false, 3, false, false, false, 150 | "smallint", 0, 0, 5, 0, 0, 151 | 0, 152 | }, 153 | { 154 | "float", 6, 7, nullptr, nullptr, std::vector({}), 155 | 1, false, 3, false, false, false, 156 | "float", 0, 0, 6, 0, 0, 157 | 0, 158 | }, 159 | { 160 | "double", 8, 15, nullptr, nullptr, std::vector({}), 161 | 1, false, 3, false, false, false, 162 | "double", 0, 0, 8, 0, 0, 163 | 0, 164 | }, 165 | { 166 | "numeric", 8, 15, nullptr, nullptr, std::vector({}), 167 | 1, false, 3, false, false, false, 168 | "numeric", 0, 0, 8, 0, 0, 169 | 0, 170 | }, 171 | { 172 | "varchar", 12, 255, "'", "'", std::vector({"length"}), 173 | 1, false, 3, false, false, false, 174 | "varchar", 0, 0, 12, 0, 0, 175 | 0, 176 | }, 177 | { 178 | "date", 91, 10, "'", "'", std::vector({}), 179 | 1, false, 3, false, false, false, 180 | "date", 0, 0, 91, 0, 0, 181 | 0, 182 | }, 183 | { 184 | "time", 92, 8, "'", "'", std::vector({}), 185 | 1, false, 3, false, false, false, 186 | "time", 0, 0, 92, 0, 0, 187 | 0, 188 | }, 189 | { 190 | "timestamp", 93, 32, "'", "'", std::vector({}), 191 | 1, false, 3, false, false, false, 192 | "timestamp", 0, 0, 93, 0, 0, 193 | 0, 194 | }, 195 | }; 196 | ARROW_ASSIGN_OR_RAISE(auto reader, RowsToBatches(schema, rows, VariantConverter)); 197 | return reader->Next(); 198 | } 199 | 200 | arrow::Result> DoGetTypeInfoResult( 201 | int data_type_filter) { 202 | ARROW_ASSIGN_OR_RAISE(auto record_batch, DoGetTypeInfoResult()); 203 | 204 | std::vector data_type_vector{-7, -6, -5, -4, -3, -1, -1, 1, 4, 205 | 5, 6, 8, 8, 12, 91, 92, 93}; 206 | 207 | // Checking if the data_type is in the vector with the sqlite3 data types 208 | // and returning a slice from the vector containing the filtered values. 209 | auto pair = std::equal_range(data_type_vector.begin(), data_type_vector.end(), 210 | data_type_filter); 211 | 212 | return record_batch->Slice(pair.first - data_type_vector.begin(), 213 | pair.second - pair.first); 214 | } 215 | 216 | } // namespace sqlflite::sqlite 217 | -------------------------------------------------------------------------------- /src/sqlite/sqlite_type_info.h: -------------------------------------------------------------------------------- 1 | // Licensed to the Apache Software Foundation (ASF) under one 2 | // or more contributor license agreements. See the NOTICE file 3 | // distributed with this work for additional information 4 | // regarding copyright ownership. The ASF licenses this file 5 | // to you under the Apache License, Version 2.0 (the 6 | // "License"); you may not use this file except in compliance 7 | // with the License. You may obtain a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, 12 | // software distributed under the License is distributed on an 13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | // KIND, either express or implied. See the License for the 15 | // specific language governing permissions and limitations 16 | // under the License. 17 | 18 | #pragma once 19 | 20 | #include "arrow/record_batch.h" 21 | 22 | namespace sqlflite::sqlite { 23 | 24 | /// \brief Gets the hard-coded type info from Sqlite for all data types. 25 | /// \return A record batch. 26 | arrow::Result> DoGetTypeInfoResult(); 27 | 28 | /// \brief Gets the hard-coded type info from Sqlite filtering 29 | /// for a specific data type. 30 | /// \return A record batch. 31 | arrow::Result> DoGetTypeInfoResult( 32 | int data_type_filter); 33 | 34 | } // namespace sqlflite::sqlite 35 | -------------------------------------------------------------------------------- /third_party/Arrow_CMakeLists.txt.in: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.25) 2 | project(arrow) 3 | 4 | set(CMAKE_CXX_STANDARD 17) 5 | 6 | include(ExternalProject) 7 | 8 | ExternalProject_Add( 9 | arrow_project 10 | PREFIX ${CMAKE_BINARY_DIR}/third_party 11 | GIT_REPOSITORY https://github.com/apache/arrow.git 12 | GIT_TAG apache-arrow-17.0.0 13 | SOURCE_SUBDIR cpp 14 | CMAKE_ARGS 15 | -DCMAKE_INSTALL_PREFIX=${CMAKE_BINARY_DIR}/third_party/arrow 16 | -DCMAKE_INSTALL_LIBDIR=lib 17 | -DCMAKE_INSTALL_INCLUDEDIR=include 18 | -DCMAKE_BUILD_TYPE=Release 19 | -DARROW_BUILD_TESTS=OFF 20 | -DARROW_BUILD_STATIC=ON 21 | -DARROW_BUILD_SHARED=OFF 22 | -DARROW_COMPUTE=ON 23 | -DARROW_CSV=ON 24 | -DARROW_JSON=ON 25 | -DARROW_DATASET=ON 26 | -DARROW_FILESYSTEM=ON 27 | -DARROW_FLIGHT=ON 28 | -DARROW_FLIGHT_SQL=ON 29 | -DARROW_WITH_ZLIB=ON 30 | -DARROW_DEPENDENCY_SOURCE=BUNDLED 31 | -DARROW_DEPENDENCY_USE_SHARED=OFF 32 | -Dxsimd_SOURCE=BUNDLED 33 | ) 34 | -------------------------------------------------------------------------------- /third_party/DuckDB_CMakeLists.txt.in: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.25) 2 | project(duckdb) 3 | 4 | set(CMAKE_CXX_STANDARD 17) 5 | 6 | include(ExternalProject) 7 | 8 | ExternalProject_Add( 9 | duckdb_project 10 | PREFIX ${CMAKE_BINARY_DIR}/third_party 11 | GIT_REPOSITORY https://github.com/duckdb/duckdb 12 | GIT_TAG v1.1.1 13 | CMAKE_ARGS 14 | -DCMAKE_INSTALL_PREFIX=${CMAKE_BINARY_DIR}/third_party/duckdb 15 | ) 16 | -------------------------------------------------------------------------------- /third_party/JWTCPP_CMakeLists.txt.in: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.25) 2 | project(jwt-cpp) 3 | 4 | set(CMAKE_CXX_STANDARD 17) 5 | 6 | include(ExternalProject) 7 | 8 | ExternalProject_Add( 9 | jwt_cpp_project 10 | PREFIX ${CMAKE_BINARY_DIR}/third_party 11 | GIT_REPOSITORY https://github.com/Thalhammer/jwt-cpp.git 12 | GIT_TAG v0.7.0 13 | CMAKE_ARGS 14 | -DCMAKE_INSTALL_PREFIX=${CMAKE_BINARY_DIR}/third_party/jwt-cpp 15 | -DJWT_BUILD_EXAMPLES=OFF 16 | BUILD_COMMAND "" # This is a header only library 17 | ) 18 | -------------------------------------------------------------------------------- /third_party/SQLite_CMakeLists.txt.in: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.25) 2 | project(sqlite) 3 | 4 | set(CMAKE_CXX_STANDARD 17) 5 | 6 | include(ExternalProject) 7 | 8 | # Download and configure SQLite during the CMake configuration step 9 | ExternalProject_Add( 10 | sqlite_project 11 | URL "https://www.sqlite.org/2024/sqlite-autoconf-3460100.tar.gz" 12 | DOWNLOAD_EXTRACT_TIMESTAMP TRUE 13 | PREFIX "${CMAKE_BINARY_DIR}/third_party/sqlite" 14 | CONFIGURE_COMMAND ../sqlite_project/configure "CFLAGS=-DSQLITE_ENABLE_COLUMN_METADATA=1" 15 | BUILD_COMMAND make 16 | INSTALL_COMMAND "" 17 | ) 18 | -------------------------------------------------------------------------------- /tls/.gitignore: -------------------------------------------------------------------------------- 1 | *.csr 2 | *.key 3 | *.pkcs1 4 | *.pem 5 | *.srl 6 | -------------------------------------------------------------------------------- /tls/README.md: -------------------------------------------------------------------------------- 1 | These are static resources for unit tests. 2 | 3 | The TLS certificates can be regenerated using `gen-certs.sh`. The 4 | script assumes OpenSSL is installed. 5 | -------------------------------------------------------------------------------- /tls/gen-certs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eux 4 | 5 | openssl genrsa -out root-ca.key 4096 6 | openssl req -x509 -new -nodes \ 7 | -subj "/C=US/ST=CA/O=MyOrg, Inc./CN=test" \ 8 | -key root-ca.key -sha256 -days 10000 -out root-ca.pem -extensions v3_ca 9 | 10 | openssl genrsa -out cert0.key 4096 11 | openssl req -new -sha256 -key cert0.key \ 12 | -subj "/C=US/ST=CA/O=MyOrg, Inc./CN=localhost" \ 13 | -out cert0.csr 14 | 15 | openssl x509 -req -in cert0.csr -CA root-ca.pem -CAkey root-ca.key -CAcreateserial \ 16 | -out cert0.pem -days 10000 -sha256 -extfile v3_usr.cnf -extensions v3_usr_extensions 17 | 18 | openssl genrsa -out cert1.key 4096 19 | openssl req -new -sha256 -key cert1.key \ 20 | -subj "/C=US/ST=CA/O=MyOrg, Inc./CN=localhost" \ 21 | -out cert1.csr 22 | openssl x509 -req -in cert1.csr -CA root-ca.pem -CAkey root-ca.key -CAcreateserial \ 23 | -out cert1.pem -days 10000 -sha256 -extfile v3_usr.cnf -extensions v3_usr_extensions 24 | 25 | # Convert to PKCS#1 for Java 26 | openssl pkcs8 -in cert0.key -topk8 -nocrypt > cert0.pkcs1 27 | openssl pkcs8 -in cert1.key -topk8 -nocrypt > cert1.pkcs1 28 | -------------------------------------------------------------------------------- /tls/v3_usr.cnf: -------------------------------------------------------------------------------- 1 | [v3_usr_extensions] 2 | basicConstraints=CA:FALSE 3 | keyUsage = digitalSignature, keyEncipherment 4 | extendedKeyUsage = serverAuth, clientAuth 5 | --------------------------------------------------------------------------------