├── dart ├── .gitignore ├── dart_test.yaml ├── README.md ├── pubspec.yaml ├── test │ ├── goldens │ │ ├── starting_stream.json │ │ └── simple_iteration.json │ ├── update_hooks_test.dart │ ├── utils │ │ ├── test_utils.dart │ │ ├── fix_035_fixtures.dart │ │ ├── tracking_vfs.dart │ │ ├── schema.dart │ │ └── native_test_utils.dart │ ├── error_test.dart │ └── js_key_encoding_test.dart ├── benchmark │ └── apply_lines.dart └── tool │ └── download_sqlite3.dart ├── crates ├── sqlite │ ├── src │ │ └── main.rs │ ├── README.md │ ├── Cargo.toml │ └── build.rs ├── loadable │ ├── README.md │ ├── Cargo.toml │ └── src │ │ └── lib.rs ├── static │ ├── README.md │ ├── Cargo.toml │ └── src │ │ └── lib.rs ├── shell │ ├── README.md │ ├── Cargo.toml │ ├── build.rs │ └── src │ │ └── main.rs ├── sqlite_nostd │ ├── src │ │ ├── lib.rs │ │ └── allocator.rs │ ├── Cargo.toml │ ├── build.rs │ └── README.md └── core │ ├── src │ ├── sync │ │ ├── mod.rs │ │ ├── checkpoint.rs │ │ ├── bucket_priority.rs │ │ ├── subscriptions.rs │ │ ├── checksum.rs │ │ └── operations.rs │ ├── constants.rs │ ├── schema │ │ ├── mod.rs │ │ ├── inspection.rs │ │ └── table_info.rs │ ├── ext.rs │ ├── version.rs │ ├── uuid.rs │ ├── checkpoint.rs │ ├── operations.rs │ ├── vtab_util.rs │ ├── kv.rs │ ├── json_util.rs │ ├── macros.rs │ ├── lib.rs │ ├── bson │ │ └── error.rs │ ├── diff.rs │ ├── operations_vtab.rs │ ├── state.rs │ ├── util.rs │ ├── update_hooks.rs │ ├── view_admin.rs │ └── fix_data.rs │ ├── build.rs │ ├── README.md │ └── Cargo.toml ├── NOTICE ├── .gitattributes ├── rust-toolchain.toml ├── android ├── src │ ├── prefab │ │ ├── modules │ │ │ └── powersync │ │ │ │ ├── module.json │ │ │ │ ├── libs │ │ │ │ ├── android.x86 │ │ │ │ │ └── abi.json │ │ │ │ ├── android.x86_64 │ │ │ │ │ └── abi.json │ │ │ │ ├── android.arm64-v8a │ │ │ │ │ └── abi.json │ │ │ │ └── android.armeabi-v7a │ │ │ │ │ └── abi.json │ │ │ │ └── include │ │ │ │ └── powersync.h │ │ └── prefab.json │ └── AndroidManifest.xml ├── gradle │ └── wrapper │ │ ├── gradle-wrapper.jar │ │ └── gradle-wrapper.properties ├── .gitignore ├── settings.gradle.kts ├── .gitattributes └── build.gradle.kts ├── .gitignore ├── wasm └── asyncify_imports.json ├── Package.swift ├── .github ├── actions │ ├── macos │ │ └── action.yml │ ├── upload │ │ └── action.yml │ ├── wasm │ │ └── action.yml │ ├── windows │ │ └── action.yml │ ├── linux │ │ └── action.yml │ ├── xcframework │ │ └── action.yml │ └── android │ │ └── action.yml └── workflows │ └── tests.yml ├── tool ├── build_windows.sh ├── build_macos.sh ├── build_linux.sh └── build_wasm.sh ├── Cargo.toml ├── powersync-sqlite-core.podspec ├── docs ├── RELEASING.md ├── schema.md └── sync.md ├── README.md ├── UUID.md └── .cargo └── config.toml /dart/.gitignore: -------------------------------------------------------------------------------- 1 | .dart_tool 2 | -------------------------------------------------------------------------------- /crates/sqlite/src/main.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | Copyright 2023 Journey Mobile, Inc. 2 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | crates/shell/sqlite/* linguist-vendored 2 | -------------------------------------------------------------------------------- /rust-toolchain.toml: -------------------------------------------------------------------------------- 1 | [toolchain] 2 | channel = "nightly-2025-10-31" 3 | -------------------------------------------------------------------------------- /dart/dart_test.yaml: -------------------------------------------------------------------------------- 1 | tags: 2 | slow: 3 | 4 | presets: 5 | skip_slow: 6 | exclude_tags: slow 7 | -------------------------------------------------------------------------------- /android/src/prefab/modules/powersync/module.json: -------------------------------------------------------------------------------- 1 | { 2 | "export_libraries": [], 3 | "android": {} 4 | } -------------------------------------------------------------------------------- /crates/loadable/README.md: -------------------------------------------------------------------------------- 1 | # powersync_loadable 2 | 3 | Builds the loadable extension as a dynamic library. 4 | -------------------------------------------------------------------------------- /crates/static/README.md: -------------------------------------------------------------------------------- 1 | Builds the core extension as a static library, exposing the `powersync_init_static` function to load it. 2 | -------------------------------------------------------------------------------- /android/gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/powersync-ja/powersync-sqlite-core/HEAD/android/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /android/src/prefab/prefab.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "powersync_sqlite_core", 3 | "schema_version": 2, 4 | "dependencies": [], 5 | "version": "0.4.10" 6 | } 7 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | .vscode 3 | dist/ 4 | *.db 5 | build/ 6 | target/ 7 | .idea 8 | *.xcframework 9 | *.tar.gz 10 | *.tar.xz 11 | *.zip 12 | .build 13 | -------------------------------------------------------------------------------- /android/.gitignore: -------------------------------------------------------------------------------- 1 | # Ignore Gradle project-specific cache directory 2 | .gradle 3 | 4 | # Ignore Gradle build output directory 5 | build 6 | 7 | local.properties 8 | -------------------------------------------------------------------------------- /android/src/prefab/modules/powersync/libs/android.x86/abi.json: -------------------------------------------------------------------------------- 1 | { 2 | "abi": "x86", 3 | "api": 21, 4 | "ndk": 25, 5 | "stl": "none", 6 | "static": false 7 | } 8 | -------------------------------------------------------------------------------- /android/src/prefab/modules/powersync/libs/android.x86_64/abi.json: -------------------------------------------------------------------------------- 1 | { 2 | "abi": "x86_64", 3 | "api": 21, 4 | "ndk": 25, 5 | "stl": "none", 6 | "static": false 7 | } 8 | -------------------------------------------------------------------------------- /android/src/prefab/modules/powersync/libs/android.arm64-v8a/abi.json: -------------------------------------------------------------------------------- 1 | { 2 | "abi": "arm64-v8a", 3 | "api": 21, 4 | "ndk": 25, 5 | "stl": "none", 6 | "static": false 7 | } 8 | -------------------------------------------------------------------------------- /android/src/prefab/modules/powersync/libs/android.armeabi-v7a/abi.json: -------------------------------------------------------------------------------- 1 | { 2 | "abi": "armeabi-v7a", 3 | "api": 21, 4 | "ndk": 25, 5 | "stl": "none", 6 | "static": false 7 | } 8 | -------------------------------------------------------------------------------- /crates/sqlite/README.md: -------------------------------------------------------------------------------- 1 | # sqlite 2 | 3 | This builds a plain sqlite3 shell. 4 | 5 | We could build this with plain gcc/clang, or download a pre-built binary, but it's simple enough with Rust tooling. 6 | -------------------------------------------------------------------------------- /android/settings.gradle.kts: -------------------------------------------------------------------------------- 1 | 2 | pluginManagement { 3 | repositories { 4 | gradlePluginPortal() 5 | google() 6 | } 7 | } 8 | 9 | rootProject.name = "powersync-sqlite-core" 10 | 11 | -------------------------------------------------------------------------------- /android/src/AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | 7 | -------------------------------------------------------------------------------- /crates/shell/README.md: -------------------------------------------------------------------------------- 1 | # powersync-sqlite 2 | 3 | Builds sqlite with powersync extension embedded. 4 | 5 | SQLite itself is built using [build.rs](./build.rs), and linked into the Rust binary. 6 | 7 | The main function is defined in SQLite, so we use `#![no_main]` here. 8 | -------------------------------------------------------------------------------- /android/.gitattributes: -------------------------------------------------------------------------------- 1 | # 2 | # https://help.github.com/articles/dealing-with-line-endings/ 3 | # 4 | # Linux start script should use lf 5 | /gradlew text eol=lf 6 | 7 | # These are Windows script files and should use crlf 8 | *.bat text eol=crlf 9 | 10 | -------------------------------------------------------------------------------- /android/src/prefab/modules/powersync/include/powersync.h: -------------------------------------------------------------------------------- 1 | #ifndef POWERSYNC_H 2 | #define POWERSYNC_H 3 | 4 | #include "sqlite3.h" 5 | 6 | extern "C" int sqlite3_powersync_init(sqlite3 *db, char **pzErrMsg, 7 | const sqlite3_api_routines *pApi); 8 | 9 | #endif 10 | -------------------------------------------------------------------------------- /android/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-9.1.0-bin.zip 4 | networkTimeout=10000 5 | validateDistributionUrl=true 6 | zipStoreBase=GRADLE_USER_HOME 7 | zipStorePath=wrapper/dists 8 | -------------------------------------------------------------------------------- /dart/README.md: -------------------------------------------------------------------------------- 1 | # Dart Tests 2 | 3 | This folder contains tests written in Dart, as a convenient higher-level language. 4 | 5 | The tests loads the compiled debug library. Before testing, build first using: 6 | 7 | ```sh 8 | cargo build -p powersync_loadable 9 | ``` 10 | 11 | Then test here: 12 | 13 | ```sh 14 | dart test 15 | ``` 16 | -------------------------------------------------------------------------------- /wasm/asyncify_imports.json: -------------------------------------------------------------------------------- 1 | [ 2 | "sqlite3_close", 3 | "sqlite3_finalize", 4 | "sqlite3_open_v2", 5 | "sqlite3_prepare", 6 | "sqlite3_prepare16", 7 | "sqlite3_prepare_v2", 8 | "sqlite3_prepare16_v2", 9 | "sqlite3_prepare_v3", 10 | "sqlite3_prepare16_v3", 11 | "sqlite3_reset", 12 | "sqlite3_step", 13 | "sqlite3_exec" 14 | ] 15 | -------------------------------------------------------------------------------- /crates/sqlite/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sqlite3" 3 | edition.workspace = true 4 | version.workspace = true 5 | homepage.workspace = true 6 | repository.workspace = true 7 | license.workspace = true 8 | authors.workspace = true 9 | keywords.workspace = true 10 | 11 | [dependencies] 12 | 13 | [features] 14 | 15 | [build-dependencies] 16 | cc = "1.0.46" 17 | -------------------------------------------------------------------------------- /crates/sqlite_nostd/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![no_std] 2 | #![allow(non_upper_case_globals)] 3 | #![allow(non_camel_case_types)] 4 | #![allow(non_snake_case)] 5 | 6 | pub mod bindings { 7 | include!(concat!(env!("OUT_DIR"), "/bindings.rs")); 8 | } 9 | 10 | mod allocator; 11 | mod capi; 12 | mod nostd; 13 | 14 | pub use allocator::SQLite3Allocator; 15 | pub use nostd::*; 16 | -------------------------------------------------------------------------------- /dart/pubspec.yaml: -------------------------------------------------------------------------------- 1 | name: powersync_sqlite_core_tests 2 | publish_to: "none" 3 | version: 0.0.1 4 | description: Tests for powersync-sqlite-core 5 | environment: 6 | sdk: ^3.4.0 7 | dependencies: 8 | sqlite3: ^2.7.6 9 | bson: ^5.0.5 10 | 11 | dev_dependencies: 12 | test: ^1.25.0 13 | test_descriptor: ^2.0.2 14 | file: ^7.0.1 15 | sqlite3_test: ^0.1.1 16 | fake_async: ^1.3.3 17 | convert: ^3.1.2 18 | meta: ^1.16.0 19 | path: ^1.9.1 20 | http: ^1.5.0 21 | archive: ^4.0.7 22 | -------------------------------------------------------------------------------- /crates/shell/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "powersync_sqlite" 3 | edition.workspace = true 4 | version.workspace = true 5 | homepage.workspace = true 6 | repository.workspace = true 7 | license.workspace = true 8 | authors.workspace = true 9 | keywords.workspace = true 10 | 11 | [dependencies] 12 | powersync_core = { path="../core" } 13 | powersync_sqlite_nostd = { path = "../sqlite_nostd" } 14 | 15 | [features] 16 | default = ["powersync_core/static", "powersync_sqlite_nostd/static"] 17 | 18 | [build-dependencies] 19 | cc = "1.0.46" 20 | -------------------------------------------------------------------------------- /crates/static/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "powersync_static" 3 | edition.workspace = true 4 | version.workspace = true 5 | homepage.workspace = true 6 | repository.workspace = true 7 | license.workspace = true 8 | authors.workspace = true 9 | keywords.workspace = true 10 | 11 | [lib] 12 | name = "powersync" 13 | crate-type = ["staticlib"] 14 | 15 | [dependencies] 16 | powersync_sqlite_nostd = { path = "../sqlite_nostd" } 17 | 18 | [dependencies.powersync_core] 19 | path = "../core" 20 | default-features = false 21 | features = [] 22 | 23 | [features] 24 | nightly = [] 25 | default = ["powersync_core/static"] 26 | -------------------------------------------------------------------------------- /crates/core/src/sync/mod.rs: -------------------------------------------------------------------------------- 1 | use alloc::rc::Rc; 2 | use powersync_sqlite_nostd::{self as sqlite, ResultCode}; 3 | 4 | mod bucket_priority; 5 | pub mod checkpoint; 6 | mod checksum; 7 | mod interface; 8 | pub mod line; 9 | pub mod operations; 10 | pub mod storage_adapter; 11 | mod streaming_sync; 12 | mod subscriptions; 13 | mod sync_status; 14 | 15 | pub use bucket_priority::BucketPriority; 16 | pub use checksum::Checksum; 17 | 18 | use crate::state::DatabaseState; 19 | 20 | pub fn register(db: *mut sqlite::sqlite3, state: Rc) -> Result<(), ResultCode> { 21 | interface::register(db, state) 22 | } 23 | -------------------------------------------------------------------------------- /crates/loadable/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "powersync_loadable" 3 | edition.workspace = true 4 | version.workspace = true 5 | homepage.workspace = true 6 | repository.workspace = true 7 | license.workspace = true 8 | authors.workspace = true 9 | keywords.workspace = true 10 | 11 | [lib] 12 | name = "powersync" 13 | crate-type = ["cdylib", "staticlib"] 14 | 15 | [dependencies] 16 | powersync_sqlite_nostd = { path = "../sqlite_nostd" } 17 | 18 | [dependencies.powersync_core] 19 | path = "../core" 20 | default-features = false 21 | features = [] 22 | 23 | [features] 24 | nightly = [] 25 | static = ["powersync_core/static"] 26 | default = ["powersync_core/getrandom"] 27 | -------------------------------------------------------------------------------- /crates/sqlite_nostd/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "powersync_sqlite_nostd" 3 | edition.workspace = true 4 | version.workspace = true 5 | homepage.workspace = true 6 | repository.workspace = true 7 | license.workspace = true 8 | authors.workspace = true 9 | keywords.workspace = true 10 | description = "Lightweight, semi-unsafe, nostd bindings to sqlite3" 11 | readme = "README.md" 12 | 13 | [lib] 14 | name = "powersync_sqlite_nostd" 15 | crate-type = ["rlib"] 16 | 17 | 18 | [dependencies] 19 | num-derive = "0.4.2" 20 | num-traits = { version = "0.2.19", default-features = false } 21 | 22 | [features] 23 | static = [] 24 | 25 | [build-dependencies] 26 | bindgen = "0.72.1" 27 | -------------------------------------------------------------------------------- /Package.swift: -------------------------------------------------------------------------------- 1 | // swift-tools-version: 5.7 2 | 3 | // NOTE! This is never released, we're only using this to support local builds builds for the 4 | // Swift SDK. 5 | import PackageDescription 6 | let packageName = "PowerSyncSQLiteCore" 7 | 8 | let package = Package( 9 | name: packageName, 10 | platforms: [ 11 | .iOS(.v13), 12 | .macOS(.v10_15), 13 | .watchOS(.v9) 14 | ], 15 | products: [ 16 | .library( 17 | name: packageName, 18 | targets: [packageName]), 19 | ], 20 | targets: [ 21 | .binaryTarget( 22 | name: packageName, 23 | path: "powersync-sqlite-core.xcframework" 24 | ) 25 | ] 26 | ) 27 | -------------------------------------------------------------------------------- /crates/core/build.rs: -------------------------------------------------------------------------------- 1 | use std::process::Command; 2 | fn main() { 3 | let mut git_hash = Command::new("git") 4 | .args(&["rev-parse", "HEAD"]) 5 | .output() 6 | .ok() 7 | .and_then(|output| String::from_utf8(output.stdout).ok()) 8 | .unwrap_or_default(); 9 | 10 | if git_hash.is_empty() { 11 | // We can't compute the git hash for versions pushed to crates.io. That's fine, we'll use a 12 | // separate designator for that instead. The designator needs to be 8 chars in length since 13 | // that's the substring used in version numbers. 14 | git_hash = "cratesio".to_owned(); 15 | } 16 | 17 | println!("cargo:rustc-env=GIT_HASH={}", git_hash); 18 | } 19 | -------------------------------------------------------------------------------- /crates/core/src/constants.rs: -------------------------------------------------------------------------------- 1 | use core::ffi::c_int; 2 | 3 | pub const CORE_PKG_VERSION: &'static str = env!("CARGO_PKG_VERSION"); 4 | pub const FULL_GIT_HASH: &'static str = env!("GIT_HASH"); 5 | 6 | // We need 3.44 or later to use an `ORDER BY` in an aggregate function invocation. 7 | // 8 | // When raising the minimum version requirement, also change it in download_sqlite3.dart to ensure 9 | // we're testing with the minimum version we claim to support. 10 | pub const MIN_SQLITE_VERSION_NUMBER: c_int = 3044000; 11 | 12 | pub const SUBTYPE_JSON: u32 = 'J' as u32; 13 | 14 | pub fn short_git_hash() -> &'static str { 15 | if FULL_GIT_HASH.len() >= 8 { 16 | &FULL_GIT_HASH[..8] 17 | } else { 18 | "no-git-unknown" 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /.github/actions/macos/action.yml: -------------------------------------------------------------------------------- 1 | name: "Build macoS libraries" 2 | description: "Create artifact for macOS libraries" 3 | 4 | runs: 5 | using: "composite" 6 | steps: 7 | - name: Install Rust Nightly 8 | uses: dtolnay/rust-toolchain@stable 9 | with: 10 | toolchain: nightly-2025-10-31 11 | components: rust-src 12 | targets: x86_64-apple-darwin,aarch64-apple-darwin 13 | 14 | - name: Build binaries 15 | shell: bash 16 | run: | 17 | ./tool/build_macos.sh x64 18 | ./tool/build_macos.sh aarch64 19 | 20 | - uses: actions/upload-artifact@v4 21 | with: 22 | name: macos-library 23 | retention-days: 14 24 | path: | 25 | *.dylib 26 | *.a 27 | -------------------------------------------------------------------------------- /crates/core/src/schema/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod inspection; 2 | mod management; 3 | mod table_info; 4 | 5 | use alloc::{rc::Rc, vec::Vec}; 6 | use powersync_sqlite_nostd as sqlite; 7 | use serde::Deserialize; 8 | use sqlite::ResultCode; 9 | pub use table_info::{ 10 | Column, DiffIncludeOld, PendingStatement, PendingStatementValue, RawTable, Table, 11 | TableInfoFlags, 12 | }; 13 | 14 | use crate::state::DatabaseState; 15 | 16 | #[derive(Deserialize, Default)] 17 | pub struct Schema { 18 | pub tables: Vec, 19 | #[serde(default)] 20 | pub raw_tables: Vec, 21 | } 22 | 23 | pub fn register(db: *mut sqlite::sqlite3, state: Rc) -> Result<(), ResultCode> { 24 | management::register(db, state) 25 | } 26 | -------------------------------------------------------------------------------- /.github/actions/upload/action.yml: -------------------------------------------------------------------------------- 1 | name: "Upload binary file" 2 | description: "Upload binary file to GitHub releases" 3 | inputs: 4 | repo-token: 5 | required: true 6 | description: "The secret created for the workflow run" 7 | file-name: 8 | required: true 9 | description: "The file name to be uploaded" 10 | tag: 11 | required: false 12 | description: "The short ref name of the branch or tag that triggered the workflow run." 13 | default: ${{ github.ref_name }} 14 | runs: 15 | using: "composite" 16 | steps: 17 | - name: Upload binary 18 | shell: bash 19 | env: 20 | GH_TOKEN: ${{ github.token }} 21 | GH_REPO: ${{ github.repository }} 22 | run: | 23 | gh release upload "${{ inputs.tag }}" "${{ inputs.file-name }}" 24 | -------------------------------------------------------------------------------- /.github/actions/wasm/action.yml: -------------------------------------------------------------------------------- 1 | name: "Build wasm libraries" 2 | description: "Create artifact for wasm libraries" 3 | 4 | runs: 5 | using: "composite" 6 | steps: 7 | - name: Install Rust Nightly 8 | uses: dtolnay/rust-toolchain@stable 9 | with: 10 | toolchain: nightly-2025-10-31 11 | components: rust-src 12 | 13 | - name: Setup emsdk 14 | uses: mymindstorm/setup-emsdk@v14 15 | with: 16 | version: 4.0.10 17 | 18 | - name: Build WASM 19 | shell: bash 20 | run: ./tool/build_wasm.sh 21 | 22 | - uses: actions/upload-artifact@v4 23 | with: 24 | name: wasm-library 25 | retention-days: 14 26 | path: | 27 | libpowersync-async.wasm 28 | libpowersync.wasm 29 | libpowersync-wasm.a 30 | -------------------------------------------------------------------------------- /tool/build_windows.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | function compile() { 5 | local triple=$1 6 | local suffix=$2 7 | 8 | cargo build -p powersync_loadable -Z build-std=panic_abort,core,alloc --features=nightly --release --target $triple 9 | cargo build -p powersync_static -Z build-std=panic_abort,core,alloc --features=nightly --release --target $triple 10 | 11 | mv "target/$triple/release/powersync.dll" "powersync_$suffix.dll" 12 | mv "target/$triple/release/powersync.lib" "powersync_$suffix.lib" 13 | } 14 | 15 | case "$1" in 16 | x64) 17 | compile x86_64-pc-windows-msvc x64 18 | ;; 19 | x86) 20 | compile i686-pc-windows-msvc x86 21 | ;; 22 | aarch64) 23 | compile aarch64-pc-windows-msvc aarch64 24 | ;; 25 | *) 26 | echo "Unknown architecture" 27 | exit 1 28 | ;; 29 | esac 30 | -------------------------------------------------------------------------------- /.github/actions/windows/action.yml: -------------------------------------------------------------------------------- 1 | name: "Build Windows libraries" 2 | description: "Create artifact for Windows libraries" 3 | 4 | runs: 5 | using: "composite" 6 | steps: 7 | - name: Install Rust Nightly 8 | uses: dtolnay/rust-toolchain@stable 9 | with: 10 | toolchain: nightly-2025-10-31 11 | components: rust-src 12 | targets: x86_64-pc-windows-msvc,aarch64-pc-windows-msvc,i686-pc-windows-msvc 13 | 14 | - name: Build binaries 15 | shell: bash 16 | run: | 17 | ./tool/build_windows.sh x64 18 | ./tool/build_windows.sh aarch64 19 | ./tool/build_windows.sh x86 20 | 21 | - uses: actions/upload-artifact@v4 22 | with: 23 | name: windows-library 24 | retention-days: 14 25 | path: | 26 | *.dll 27 | *.lib 28 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | 3 | members = [ 4 | "crates/*" 5 | ] 6 | resolver = "2" 7 | # We cannot build shell and lodable and the same time 8 | default-members = ["crates/shell", "crates/sqlite"] 9 | 10 | [profile.dev] 11 | panic = "abort" 12 | 13 | [profile.release] 14 | panic = "abort" 15 | strip = true 16 | opt-level = "z" 17 | lto = true 18 | 19 | [profile.release_apple] 20 | inherits = "release" 21 | strip = false 22 | debug = true 23 | 24 | [profile.wasm] 25 | inherits = "release" 26 | 27 | [profile.wasm_asyncify] 28 | inherits = "wasm" 29 | 30 | [workspace.package] 31 | version = "0.4.10" 32 | edition = "2024" 33 | authors = ["JourneyApps"] 34 | keywords = ["sqlite", "powersync"] 35 | license = "Apache-2.0" 36 | homepage = "https://powersync.com" 37 | repository = "https://github.com/powersync-ja/powersync-sqlite-core" 38 | -------------------------------------------------------------------------------- /crates/sqlite/build.rs: -------------------------------------------------------------------------------- 1 | fn main() { 2 | let mut cfg = cc::Build::new(); 3 | 4 | // Compile the SQLite source 5 | cfg.file("./sqlite/sqlite3.c"); 6 | cfg.file("./sqlite/shell.c"); 7 | cfg.include("./sqlite"); 8 | 9 | // General SQLite options 10 | cfg.define("SQLITE_THREADSAFE", Some("0")); 11 | cfg.define("SQLITE_ENABLE_BYTECODE_VTAB", Some("1")); 12 | 13 | // Compile with readline support (also requires -lreadline / cargo:rustc-link-lib=readline below) 14 | cfg.define("HAVE_READLINE", Some("1")); 15 | 16 | // Silence warnings generated for SQLite 17 | cfg.flag("-Wno-implicit-fallthrough"); 18 | cfg.flag("-Wno-unused-parameter"); 19 | cfg.flag("-Wno-null-pointer-subtraction"); 20 | 21 | cfg.compile("sqlite"); 22 | 23 | println!("cargo:rustc-link-lib=readline"); 24 | } 25 | -------------------------------------------------------------------------------- /crates/sqlite_nostd/src/allocator.rs: -------------------------------------------------------------------------------- 1 | use core::alloc::{GlobalAlloc, Layout}; 2 | 3 | /// A [GlobalAlloc] implementation forwarding allocations to the 4 | /// [memory allocation subsystem](https://sqlite.org/c3ref/free.html) in SQLite. 5 | /// 6 | /// Using this allocator allows moving allocated Rust values to SQLite. 7 | pub struct SQLite3Allocator {} 8 | 9 | unsafe impl GlobalAlloc for SQLite3Allocator { 10 | unsafe fn alloc(&self, layout: Layout) -> *mut u8 { 11 | crate::capi::malloc(layout.size()) 12 | } 13 | 14 | unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout) { 15 | crate::capi::free(ptr as *mut core::ffi::c_void); 16 | } 17 | 18 | unsafe fn realloc(&self, ptr: *mut u8, _layout: Layout, new_size: usize) -> *mut u8 { 19 | crate::capi::realloc(ptr.cast(), new_size) 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /powersync-sqlite-core.podspec: -------------------------------------------------------------------------------- 1 | Pod::Spec.new do |s| 2 | s.name = 'powersync-sqlite-core' 3 | s.version = '0.4.10' 4 | s.summary = 'PowerSync SQLite Extension' 5 | s.description = <<-DESC 6 | PowerSync extension for SQLite. 7 | DESC 8 | 9 | s.homepage = 'https://github.com/powersync-ja/powersync-sqlite-core' 10 | s.license = 'Apache License, Version 2.0' 11 | s.author = 'Journey Mobile, Inc.' 12 | 13 | s.source = { :http => "https://github.com/powersync-ja/powersync-sqlite-core/releases/download/v#{s.version}/powersync-sqlite-core.xcframework.zip" } 14 | s.vendored_frameworks = 'powersync-sqlite-core.xcframework' 15 | 16 | s.ios.deployment_target = '11.0' 17 | s.osx.deployment_target = '10.13' 18 | s.watchos.deployment_target = '9.0' 19 | end 20 | -------------------------------------------------------------------------------- /tool/build_macos.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | function compile() { 5 | local triple=$1 6 | local suffix=$2 7 | local os=$3 8 | 9 | cargo build -p powersync_loadable -Z build-std=panic_abort,core,alloc --features nightly --release --target $triple 10 | cargo build -p powersync_static -Z build-std=panic_abort,core,alloc --features nightly --release --target $triple 11 | 12 | mv "target/$triple/release/libpowersync.dylib" "libpowersync_$suffix.$os.dylib" 13 | mv "target/$triple/release/libpowersync.a" "libpowersync_$suffix.$os.a" 14 | } 15 | 16 | case "$1" in 17 | x64) 18 | compile x86_64-apple-darwin x64 macos 19 | compile x86_64-apple-ios x64 ios-sim 20 | ;; 21 | aarch64) 22 | compile aarch64-apple-darwin aarch64 macos 23 | compile aarch64-apple-ios-sim aarch64 ios-sim 24 | compile aarch64-apple-ios aarch64 ios 25 | ;; 26 | *) 27 | echo "Unknown architecture" 28 | exit 1; 29 | ;; 30 | esac 31 | -------------------------------------------------------------------------------- /crates/shell/build.rs: -------------------------------------------------------------------------------- 1 | fn main() { 2 | let mut cfg = cc::Build::new(); 3 | 4 | // Compile the SQLite source 5 | cfg.file("../sqlite/sqlite/sqlite3.c"); 6 | cfg.file("../sqlite/sqlite/shell.c"); 7 | cfg.include("../sqlite/sqlite"); 8 | 9 | // General SQLite options 10 | cfg.define("SQLITE_THREADSAFE", Some("0")); 11 | cfg.define("SQLITE_ENABLE_BYTECODE_VTAB", Some("1")); 12 | 13 | // Call core_init() in main.rs 14 | cfg.define("SQLITE_EXTRA_INIT", Some("core_init")); 15 | 16 | // Compile with readline support (also requires -lreadline / cargo:rustc-link-lib=readline below) 17 | cfg.define("HAVE_READLINE", Some("1")); 18 | 19 | // Silence warnings generated for SQLite 20 | cfg.flag("-Wno-implicit-fallthrough"); 21 | cfg.flag("-Wno-unused-parameter"); 22 | cfg.flag("-Wno-null-pointer-subtraction"); 23 | 24 | cfg.compile("sqlite-ps"); 25 | 26 | println!("cargo:rustc-link-lib=readline"); 27 | } 28 | -------------------------------------------------------------------------------- /crates/static/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![no_std] 2 | #![allow(internal_features)] 3 | #![cfg_attr(feature = "nightly", feature(core_intrinsics))] 4 | 5 | extern crate alloc; 6 | 7 | // Defines sqlite3_powersync_init 8 | #[allow(unused_imports)] 9 | use powersync_core; 10 | 11 | // Use the SQLite allocator, allowing us to freely transfer memory between SQLite and Rust. 12 | #[cfg(not(test))] 13 | use powersync_sqlite_nostd::SQLite3Allocator; 14 | 15 | #[cfg(not(test))] 16 | #[global_allocator] 17 | static ALLOCATOR: SQLite3Allocator = SQLite3Allocator {}; 18 | 19 | // Custom Panic handler for WASM and other no_std builds 20 | #[cfg(not(test))] 21 | mod panic_handler { 22 | #[cfg(feature = "nightly")] 23 | #[panic_handler] 24 | fn panic(_info: &core::panic::PanicInfo) -> ! { 25 | core::intrinsics::abort() 26 | } 27 | 28 | #[cfg(not(feature = "nightly"))] 29 | #[panic_handler] 30 | fn panic(_info: &core::panic::PanicInfo) -> ! { 31 | loop {} 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /crates/shell/src/main.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | #![no_std] 3 | #![allow(internal_features)] 4 | #![feature(lang_items)] 5 | #![feature(core_intrinsics)] 6 | 7 | use core::ffi::{c_char, c_int}; 8 | 9 | use powersync_core::powersync_init_static; 10 | 11 | // Use the SQLite allocator, allowing us to freely transfer memory between SQLite and Rust. 12 | #[cfg(not(test))] 13 | use powersync_sqlite_nostd::SQLite3Allocator; 14 | 15 | #[cfg(not(test))] 16 | #[global_allocator] 17 | static ALLOCATOR: SQLite3Allocator = SQLite3Allocator {}; 18 | 19 | // Custom Panic handler for WASM and other no_std builds 20 | #[cfg(not(test))] 21 | #[panic_handler] 22 | fn panic(_info: &core::panic::PanicInfo) -> ! { 23 | core::intrinsics::abort() 24 | } 25 | 26 | #[cfg(not(target_family = "wasm"))] 27 | #[cfg(not(test))] 28 | #[lang = "eh_personality"] 29 | extern "C" fn eh_personality() {} 30 | 31 | #[unsafe(no_mangle)] 32 | pub extern "C" fn core_init(_dummy: *mut c_char) -> c_int { 33 | powersync_init_static() 34 | } 35 | -------------------------------------------------------------------------------- /tool/build_linux.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | function compile() { 5 | local triple=$1 6 | local suffix=$2 7 | 8 | cargo build -p powersync_loadable -Z build-std=panic_abort,core,alloc --features nightly --release --target $triple 9 | cargo build -p powersync_static -Z build-std=panic_abort,core,alloc --features nightly --release --target $triple 10 | 11 | mv "target/$triple/release/libpowersync.so" "libpowersync_$suffix.linux.so" 12 | mv "target/$triple/release/libpowersync.a" "libpowersync_$suffix.linux.a" 13 | } 14 | 15 | case "$1" in 16 | x64) 17 | compile x86_64-unknown-linux-gnu x64 18 | ;; 19 | x86) 20 | compile i686-unknown-linux-gnu x86 21 | ;; 22 | aarch64) 23 | compile aarch64-unknown-linux-gnu aarch64 24 | ;; 25 | armv7) 26 | compile armv7-unknown-linux-gnueabihf armv7 27 | ;; 28 | riscv64gc) 29 | compile riscv64gc-unknown-linux-gnu riscv64gc 30 | ;; 31 | *) 32 | echo "Unknown architecture" 33 | exit 1; 34 | ;; 35 | esac 36 | -------------------------------------------------------------------------------- /dart/test/goldens/starting_stream.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "operation": "start", 4 | "data": { 5 | "parameters": { 6 | "foo": "bar" 7 | } 8 | }, 9 | "output": [ 10 | { 11 | "UpdateSyncStatus": { 12 | "status": { 13 | "connected": false, 14 | "connecting": true, 15 | "priority_status": [], 16 | "downloading": null, 17 | "streams": [] 18 | } 19 | } 20 | }, 21 | { 22 | "EstablishSyncStream": { 23 | "request": { 24 | "buckets": [], 25 | "include_checksum": true, 26 | "raw_data": true, 27 | "binary_data": true, 28 | "client_id": "test-test-test-test", 29 | "parameters": { 30 | "foo": "bar" 31 | }, 32 | "streams": { 33 | "include_defaults": true, 34 | "subscriptions": [] 35 | } 36 | } 37 | } 38 | } 39 | ] 40 | } 41 | ] -------------------------------------------------------------------------------- /crates/core/src/ext.rs: -------------------------------------------------------------------------------- 1 | use powersync_sqlite_nostd::{Connection, Destructor, ManagedStmt, ResultCode, sqlite3}; 2 | 3 | pub trait SafeManagedStmt { 4 | fn exec(&self) -> Result<(), ResultCode>; 5 | } 6 | 7 | impl SafeManagedStmt for ManagedStmt { 8 | fn exec(&self) -> Result<(), ResultCode> { 9 | loop { 10 | let rs = self.step()?; 11 | if rs == ResultCode::ROW { 12 | continue; 13 | } 14 | 15 | self.reset()?; 16 | if rs == ResultCode::DONE { 17 | break; 18 | } else { 19 | return Err(rs); 20 | } 21 | } 22 | Ok(()) 23 | } 24 | } 25 | 26 | pub trait ExtendedDatabase { 27 | fn exec_text(&self, sql: &str, param: &str) -> Result<(), ResultCode>; 28 | } 29 | 30 | impl ExtendedDatabase for *mut sqlite3 { 31 | fn exec_text(&self, sql: &str, param: &str) -> Result<(), ResultCode> { 32 | let statement = self.prepare_v2(sql)?; 33 | statement.bind_text(1, param, Destructor::STATIC)?; 34 | 35 | statement.exec()?; 36 | Ok(()) 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /crates/core/src/version.rs: -------------------------------------------------------------------------------- 1 | extern crate alloc; 2 | 3 | use alloc::format; 4 | use alloc::string::String; 5 | use core::ffi::c_int; 6 | 7 | use powersync_sqlite_nostd as sqlite; 8 | use powersync_sqlite_nostd::{Connection, Context}; 9 | use sqlite::ResultCode; 10 | 11 | use crate::constants::{CORE_PKG_VERSION, short_git_hash}; 12 | use crate::create_sqlite_text_fn; 13 | use crate::error::PowerSyncError; 14 | 15 | fn powersync_rs_version_impl( 16 | _ctx: *mut sqlite::context, 17 | _args: &[*mut sqlite::value], 18 | ) -> Result { 19 | let version = format!("{}/{}", CORE_PKG_VERSION, short_git_hash()); 20 | Ok(version) 21 | } 22 | 23 | create_sqlite_text_fn!( 24 | powersync_rs_version, 25 | powersync_rs_version_impl, 26 | "powersync_rs_version" 27 | ); 28 | 29 | pub fn register(db: *mut sqlite::sqlite3) -> Result<(), ResultCode> { 30 | db.create_function_v2( 31 | "powersync_rs_version", 32 | 0, 33 | sqlite::UTF8, 34 | None, 35 | Some(powersync_rs_version), 36 | None, 37 | None, 38 | None, 39 | )?; 40 | 41 | Ok(()) 42 | } 43 | -------------------------------------------------------------------------------- /.github/actions/linux/action.yml: -------------------------------------------------------------------------------- 1 | name: "Build Linux libraries" 2 | description: "Create artifact for Linux libraries" 3 | 4 | runs: 5 | using: "composite" 6 | steps: 7 | - name: Install Rust Nightly 8 | uses: dtolnay/rust-toolchain@stable 9 | with: 10 | toolchain: nightly-2025-10-31 11 | components: rust-src 12 | targets: aarch64-unknown-linux-gnu,x86_64-unknown-linux-gnu,i686-unknown-linux-gnu,riscv64gc-unknown-linux-gnu,armv7-unknown-linux-gnueabihf 13 | 14 | - name: Install cross-compiling GCC 15 | shell: bash 16 | run: | 17 | sudo apt update 18 | sudo apt install -y gcc-aarch64-linux-gnu gcc-riscv64-linux-gnu gcc-arm-linux-gnueabihf gcc-i686-linux-gnu 19 | 20 | - name: Build binaries 21 | shell: bash 22 | run: | 23 | ./tool/build_linux.sh x64 24 | ./tool/build_linux.sh aarch64 25 | ./tool/build_linux.sh x86 26 | ./tool/build_linux.sh armv7 27 | ./tool/build_linux.sh riscv64gc 28 | 29 | - uses: actions/upload-artifact@v4 30 | with: 31 | name: linux-library 32 | retention-days: 14 33 | path: | 34 | *.so 35 | *.linux.a 36 | -------------------------------------------------------------------------------- /crates/core/src/uuid.rs: -------------------------------------------------------------------------------- 1 | extern crate alloc; 2 | 3 | use alloc::string::String; 4 | use alloc::string::ToString; 5 | use core::ffi::c_int; 6 | 7 | use powersync_sqlite_nostd as sqlite; 8 | use powersync_sqlite_nostd::{Connection, Context}; 9 | use sqlite::ResultCode; 10 | 11 | use crate::create_sqlite_text_fn; 12 | use crate::error::PowerSyncError; 13 | use crate::util::*; 14 | 15 | fn uuid_v4_impl( 16 | _ctx: *mut sqlite::context, 17 | _args: &[*mut sqlite::value], 18 | ) -> Result { 19 | let id = gen_uuid(); 20 | Ok(id.hyphenated().to_string()) 21 | } 22 | 23 | create_sqlite_text_fn!(uuid_v4, uuid_v4_impl, "gen_random_uuid"); 24 | 25 | pub fn register(db: *mut sqlite::sqlite3) -> Result<(), ResultCode> { 26 | db.create_function_v2( 27 | "gen_random_uuid", 28 | 0, 29 | sqlite::UTF8, 30 | None, 31 | Some(uuid_v4), 32 | None, 33 | None, 34 | None, 35 | )?; 36 | 37 | db.create_function_v2( 38 | "uuid", 39 | 0, 40 | sqlite::UTF8, 41 | None, 42 | Some(uuid_v4), 43 | None, 44 | None, 45 | None, 46 | )?; 47 | 48 | Ok(()) 49 | } 50 | -------------------------------------------------------------------------------- /crates/sqlite_nostd/build.rs: -------------------------------------------------------------------------------- 1 | extern crate bindgen; 2 | 3 | use std::env; 4 | use std::path::PathBuf; 5 | 6 | fn main() { 7 | println!("cargo:rerun-if-changed=deps/sqlite3ext.h"); 8 | 9 | let bindings = bindgen::Builder::default() 10 | // The input header we would like to generate 11 | // bindings for. 12 | .header("deps/sqlite3ext.h") 13 | .clang_arg("-fvisibility=default") 14 | // ^-- to get functions exposed in wasm 15 | // https://github.com/rust-lang/rust-bindgen/issues/751 16 | .use_core() 17 | // Tell cargo to invalidate the built crate whenever any of the 18 | // included header files changed. 19 | .parse_callbacks(Box::new(bindgen::CargoCallbacks::new())) 20 | // Finish the builder and generate the bindings. 21 | .generate() 22 | // Unwrap the Result and panic on failure. 23 | .expect("Unable to generate bindings"); 24 | 25 | // Write the bindings to the $OUT_DIR/bindings.rs file. 26 | let out_path = PathBuf::from(env::var("OUT_DIR").unwrap()); 27 | bindings 28 | .write_to_file(out_path.join("bindings.rs")) 29 | .expect("Couldn't write bindings!"); 30 | } 31 | -------------------------------------------------------------------------------- /docs/RELEASING.md: -------------------------------------------------------------------------------- 1 | # Preparing Release 2 | 3 | First, bump the version number in these places: 4 | 5 | 1. Cargo.toml 6 | 2. powersync-sqlite-core.podspec. 7 | 3. android/build.gradle.kts 8 | 4. android/src/prefab/prefab.json 9 | 5. tool/build_xcframework.sh - `VERSION` variable. 10 | 6. Version dependency from `crates/core` to `crates/sqlite_nostd`. 11 | 7. `cargo build` to update Cargo.lock 12 | 13 | Next, open a PR with these changes and wait for it to get approved and merged. 14 | 15 | # Perform Release 16 | 17 | Create a tag, which will trigger a release workflow when pushed: 18 | 19 | ```sh 20 | git tag -am v1.2.3 v1.2.3 21 | git push --tags 22 | ``` 23 | 24 | The publishing workflow does the following: 25 | 26 | 1. Create a draft GitHub release. 27 | 2. Build the xcframework for iOS and macOS, and upload to GitHub (attached to the above release). 28 | 3. Build and publish an Android aar to Sonatype. Afterwards, you can monitor the status of the publishing step [here](https://central.sonatype.com/publishing/deployments). 29 | 30 | The cocoapod needs to be published manually: 31 | 32 | ```sh 33 | pod trunk push powersync-sqlite-core.podspec 34 | ``` 35 | 36 | # Updating SDKs 37 | 38 | The release workflow will create an issue with a list of items to update the individual SDKs and intermediate packages. 39 | -------------------------------------------------------------------------------- /tool/build_wasm.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | emcc --version 4 | 5 | # Normal build 6 | # target/wasm32-unknown-emscripten/wasm/powersync.wasm 7 | RUSTFLAGS="-C link-arg=-sSIDE_MODULE=2" \ 8 | cargo build \ 9 | -p powersync_loadable \ 10 | --profile wasm \ 11 | --no-default-features \ 12 | --features "static nightly" \ 13 | -Z build-std=panic_abort,core,alloc \ 14 | --target wasm32-unknown-emscripten 15 | 16 | cp "target/wasm32-unknown-emscripten/wasm/powersync.wasm" "libpowersync.wasm" 17 | 18 | # Asyncify 19 | # target/wasm32-unknown-emscripten/wasm_asyncify/powersync.wasm 20 | RUSTFLAGS="-C link-arg=-sSIDE_MODULE=2 -C link-arg=-sASYNCIFY=1 -C link-arg=-sJSPI_IMPORTS=@wasm/asyncify_imports.json" \ 21 | cargo build \ 22 | -p powersync_loadable \ 23 | --profile wasm_asyncify \ 24 | --no-default-features \ 25 | --features "static nightly" \ 26 | -Z build-std=panic_abort,core,alloc \ 27 | --target wasm32-unknown-emscripten 28 | 29 | cp "target/wasm32-unknown-emscripten/wasm_asyncify/powersync.wasm" "libpowersync-async.wasm" 30 | 31 | 32 | # Static lib. 33 | # Works for both sync and asyncify builds. 34 | # Works for both emscripten and wasi. 35 | # target/wasm32-wasip1/wasm/libpowersync.a 36 | cargo build \ 37 | -p powersync_loadable \ 38 | --profile wasm \ 39 | --no-default-features \ 40 | --features "static nightly" \ 41 | -Z build-std=panic_abort,core,alloc \ 42 | --target wasm32-wasip1 43 | 44 | cp "target/wasm32-wasip1/wasm/libpowersync.a" "libpowersync-wasm.a" 45 | -------------------------------------------------------------------------------- /crates/core/README.md: -------------------------------------------------------------------------------- 1 |

2 | 3 |

4 | 5 | _[PowerSync](https://www.powersync.com) is a sync engine for building local-first apps with instantly-responsive UI/UX and simplified state transfer. Syncs between SQLite on the client-side and Postgres, MongoDB or MySQL on the server-side._ 6 | 7 | # powersync_core 8 | 9 | This is the core SQLite extension, containing all the logic. This is used internally by PowerSync SDKs, 10 | and would typically not be used by users directly. 11 | 12 | The role of the extension is to create user-defined functions that higher-level SDKs would use to implement 13 | schema management and a PowerSync client. 14 | Not all of this is documented, but [this directory](https://github.com/powersync-ja/powersync-sqlite-core/tree/main/docs) 15 | provides some hints on how a custom PowerSync SDK could be implemented. 16 | 17 | For this reason, the crate doesn't have much of a public API. In the default build mode, it doesn't expect 18 | SQLite to be linked and exposes a single function: `sqlite3_powersync_init`, 19 | a [loadable extension](https://sqlite.org/loadext.html) entrypoint. 20 | 21 | For applications linking SQLite, the `static` feature of this crate can be enabled. 22 | With that feature, `powersync_init_static()` can be called to load the 23 | extension for all new connections. 24 | The application is responsible for linking SQLite in that case. 25 | -------------------------------------------------------------------------------- /crates/core/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "powersync_core" 3 | edition.workspace = true 4 | version.workspace = true 5 | homepage.workspace = true 6 | repository.workspace = true 7 | license.workspace = true 8 | authors.workspace = true 9 | keywords.workspace = true 10 | description = "The PowerSync SQLite extension" 11 | readme = "README.md" 12 | 13 | [lib] 14 | name = "powersync_core" 15 | crate-type = ["rlib"] 16 | 17 | [dependencies] 18 | powersync_sqlite_nostd = { version = "=0.4.10", path = "../sqlite_nostd" } 19 | bytes = { version = "1.4", default-features = false } 20 | num-traits = { version = "0.2.15", default-features = false } 21 | num-derive = "0.3" 22 | serde_json = { version = "1.0", default-features = false, features = ["alloc", "raw_value"] } 23 | serde = { version = "1.0", default-features = false, features = ["alloc", "derive", "rc"] } 24 | const_format = "0.2.34" 25 | futures-lite = { version = "2.6.0", default-features = false, features = ["alloc"] } 26 | rustc-hash = { version = "2.1", default-features = false } 27 | thiserror = { version = "2", default-features = false } 28 | serde_with = { version = "3.14.0", default-features = false, features = ["alloc", "macros"] } 29 | 30 | [dependencies.uuid] 31 | version = "1.4.1" 32 | default-features = false 33 | features = [] 34 | 35 | 36 | [dev-dependencies] 37 | 38 | 39 | [features] 40 | default = ["getrandom"] 41 | 42 | static = ["powersync_sqlite_nostd/static"] 43 | # Enable to use the getrandom crate instead of sqlite3_randomness 44 | # Enable for Windows builds; do not enable for WASM 45 | getrandom = ["uuid/v4"] 46 | 47 | -------------------------------------------------------------------------------- /crates/sqlite_nostd/README.md: -------------------------------------------------------------------------------- 1 | This is a fork of https://github.com/vlcn-io/sqlite-rs-embedded with adaptations for the PowerSync core extension. 2 | 3 | # SQLite no_std 4 | 5 | > Note: these bindings are faithful to the base SQLite C-API as much as possible for minimum rust<->c overhead. This, however, means that the bindings are not entirely safe. E.g., the SQLite statement object will clear returned values out from under you if you step or finalize it while those references exist in your Rust program. 6 | 7 | SQLite is lite. Its bindings should be lite too. They should be able to be used _anywhere_ SQLite is used, _not_ incur any performance impact, _not_ include any extra dependencies, and be usable against _any_ SQLite version. 8 | 9 | Thus this repository was born. 10 | 11 | These bindings: 12 | 13 | - Do not require the rust standard library 14 | - Can use the SQLite memory subsystem if no allocator exists 15 | - Can be used to write SQLite extensions that compile to WASM and run in the browser 16 | - Does 0 copying. E.g., through some tricks, Rust strings are passed directly to SQLite with no conversion to or copying to CString. 17 | 18 | ## Features 19 | 20 | By default, this crate compiles to be used in a loadable SQLite extension: All calls are dispatched through 21 | the `sqlite3_api_routines` struct, and one needs to call `EXTENSION_INIT2()` from an entrypoint before using 22 | the library. 23 | 24 | Outside of loadable extensions, one can enable the `static` feature. When enabled, calls go to `sqlite3_` 25 | functions directly, SQLite needs to be linked for this library to work. 26 | -------------------------------------------------------------------------------- /.github/actions/xcframework/action.yml: -------------------------------------------------------------------------------- 1 | name: "Build xcframework" 2 | description: "Create artifact with XCFramework for apple targets" 3 | 4 | runs: 5 | using: "composite" 6 | steps: 7 | - name: Setup 8 | shell: bash 9 | run: | 10 | rustup toolchain install nightly-2025-10-31-aarch64-apple-darwin 11 | rustup component add rust-src --toolchain nightly-2025-10-31-aarch64-apple-darwin 12 | rustup target add \ 13 | x86_64-apple-darwin \ 14 | aarch64-apple-darwin \ 15 | aarch64-apple-ios \ 16 | aarch64-apple-ios-sim \ 17 | x86_64-apple-ios 18 | 19 | - name: setup-cocoapods 20 | uses: maxim-lobanov/setup-cocoapods@v1 21 | with: 22 | version: 1.16.2 23 | 24 | - name: Set up XCode 25 | uses: maxim-lobanov/setup-xcode@v1 26 | with: 27 | # TODO: Update to latest-stable once GH installs iOS 26 simulators 28 | xcode-version: '^16.4.0' 29 | 30 | - name: List simulators 31 | shell: bash 32 | run: | 33 | xcrun xctrace list devices 34 | 35 | - name: Build iOS & macOS xcframework 36 | shell: bash 37 | run: | 38 | ./tool/build_xcframework.sh 39 | 40 | - name: Lint pod 41 | shell: bash 42 | run: | 43 | pod lib lint 44 | 45 | - uses: actions/upload-artifact@v4 46 | with: 47 | name: xcframework 48 | retention-days: 14 49 | compression-level: 0 # We're uploading a zip archive, no need to compress agan 50 | path: | 51 | powersync-sqlite-core.xcframework.zip 52 | -------------------------------------------------------------------------------- /crates/loadable/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![no_std] 2 | #![allow(internal_features)] 3 | #![cfg_attr(feature = "nightly", feature(core_intrinsics))] 4 | #![cfg_attr(feature = "nightly", feature(lang_items))] 5 | 6 | extern crate alloc; 7 | 8 | // Defines sqlite3_powersync_init 9 | #[allow(unused_imports)] 10 | use powersync_core; 11 | 12 | // Use the SQLite allocator, allowing us to freely transfer memory between SQLite and Rust. 13 | #[cfg(not(test))] 14 | use powersync_sqlite_nostd::SQLite3Allocator; 15 | 16 | #[cfg(not(test))] 17 | #[global_allocator] 18 | static ALLOCATOR: SQLite3Allocator = SQLite3Allocator {}; 19 | 20 | // Custom Panic handler for WASM and other no_std builds 21 | #[cfg(not(test))] 22 | mod panic_handler { 23 | #[cfg(feature = "nightly")] 24 | #[panic_handler] 25 | fn panic(_info: &core::panic::PanicInfo) -> ! { 26 | core::intrinsics::abort() 27 | } 28 | 29 | #[cfg(not(feature = "nightly"))] 30 | #[panic_handler] 31 | fn panic(_info: &core::panic::PanicInfo) -> ! { 32 | loop {} 33 | } 34 | 35 | #[cfg(not(target_family = "wasm"))] 36 | #[cfg(feature = "nightly")] 37 | #[lang = "eh_personality"] 38 | extern "C" fn eh_personality() {} 39 | 40 | #[cfg(not(target_family = "wasm"))] 41 | #[cfg(not(feature = "nightly"))] 42 | #[unsafe(no_mangle)] 43 | extern "C" fn rust_eh_personality() { 44 | // This avoids missing _rust_eh_personality symbol errors. 45 | // This isn't used for any builds we distribute, but it's heplful to compile the library 46 | // with stable Rust, which we do for testing. 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /dart/benchmark/apply_lines.dart: -------------------------------------------------------------------------------- 1 | import 'dart:io'; 2 | import 'dart:typed_data'; 3 | 4 | import '../test/utils/native_test_utils.dart'; 5 | 6 | /// Usage: dart run benchmark/apply_lines.dart path/to/lines.bin 7 | /// 8 | /// This creates a new in-memory database and applies concatenated BSON sync 9 | /// lines from a file. 10 | void main(List args) { 11 | if (args.length != 1) { 12 | throw 'Usage: dart run benchmark/apply_lines.dart path/to/lines.bin'; 13 | } 14 | 15 | final [path] = args; 16 | final file = File(path).openSync(); 17 | final db = openTestDatabase(); 18 | 19 | db 20 | ..execute('select powersync_init()') 21 | ..execute('select powersync_control(?, null)', ['start']); 22 | 23 | final stopwatch = Stopwatch()..start(); 24 | 25 | final lengthBuffer = Uint8List(4); 26 | while (file.positionSync() < file.lengthSync()) { 27 | // BSON document: 28 | final bytesRead = file.readIntoSync(lengthBuffer); 29 | if (bytesRead != 4) { 30 | throw 'short read, expected length'; 31 | } 32 | final length = lengthBuffer.buffer.asByteData().getInt32(0, Endian.little); 33 | file.setPositionSync(file.positionSync() - 4); 34 | 35 | final syncLineBson = file.readSync(length); 36 | if (syncLineBson.length != length) { 37 | throw 'short read for bson document'; 38 | } 39 | 40 | db 41 | ..execute('BEGIN') 42 | ..execute('SELECT powersync_control(?, ?)', ['line_binary', syncLineBson]) 43 | ..execute('COMMIT;'); 44 | } 45 | 46 | stopwatch.stop(); 47 | print('Applying $path took ${stopwatch.elapsed}'); 48 | } 49 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # PowerSync SQLite Extension 2 | 3 | This extension is used by PowerSync client SDKs. 4 | 5 | The APIs here not currently stable, and may change in any release. The APIs are intended to be used by PowerSync SDKs only. 6 | 7 | # API 8 | 9 | Primary APIs: 10 | 11 | ```sql 12 | -- Load the extension 13 | -- Sets up functions and views, but does not touch the database itself. 14 | .load powersync 15 | 16 | -- Configure the schemas. 17 | -- Creates data tables, indexes and views. 18 | SELECT powersync_replace_schema('{"tables": [{"name": "test", "columns": [{"name": "name", "type": "text"}]}]}'); 19 | 20 | ``` 21 | 22 | Other APIs: 23 | 24 | ```sql 25 | -- Initialize the extension data (creates internal tables). 26 | -- Optional - also called as part of powersync_replace_schema(). 27 | -- Only useful to ensure internal tables are configured without touching the schema. 28 | SELECT powersync_init(); 29 | 30 | ``` 31 | 32 | # Building and running 33 | 34 | Initialize submodules recursively 35 | 36 | ``` 37 | git submodule update --init --recursive 38 | ``` 39 | 40 | ```sh 41 | # Build the shell 42 | cargo build -p powersync_sqlite 43 | ./target/debug/powersync_sqlite test.db "select powersync_rs_version()" 44 | 45 | # Build the loadable extension 46 | cargo build -p powersync_loadable 47 | sqlite3 ":memory:" ".load ./target/debug/libpowersync" "select powersync_rs_version()" #This requires sqlite3 installed 48 | 49 | # Build the release loadable extension 50 | cargo build -p powersync_loadable --release 51 | 52 | # Build for iOS 53 | ./tool/build_xcframework.sh 54 | ``` 55 | 56 | # Acknowledgements 57 | 58 | Structure of the SQLite extension using Rust is inspired by [cr-sqlite](https://github.com/vlcn-io/cr-sqlite/). 59 | -------------------------------------------------------------------------------- /dart/test/update_hooks_test.dart: -------------------------------------------------------------------------------- 1 | import 'dart:convert'; 2 | 3 | import 'package:sqlite3/common.dart'; 4 | import 'package:test/test.dart'; 5 | 6 | import 'utils/native_test_utils.dart'; 7 | 8 | void main() { 9 | late CommonDatabase db; 10 | 11 | setUp(() async { 12 | db = openTestDatabase() 13 | ..select('select powersync_init()') 14 | ..execute('CREATE TABLE foo (bar INTEGER);') 15 | ..select("SELECT powersync_update_hooks('install')"); 16 | }); 17 | 18 | tearDown(() { 19 | db.dispose(); 20 | }); 21 | 22 | List collectUpdates() { 23 | final [row] = db.select("SELECT powersync_update_hooks('get')"); 24 | return (json.decode(row.values[0] as String) as List).cast(); 25 | } 26 | 27 | test('is empty initially', () { 28 | expect(collectUpdates(), isEmpty); 29 | }); 30 | 31 | test('reports changed tables', () { 32 | db.execute('INSERT INTO foo DEFAULT VALUES'); 33 | expect(collectUpdates(), ['foo']); 34 | }); 35 | 36 | test('deduplicates tables', () { 37 | final stmt = db.prepare('INSERT INTO foo (bar) VALUES (?)'); 38 | for (var i = 0; i < 1000; i++) { 39 | stmt.execute([i]); 40 | } 41 | stmt.dispose(); 42 | 43 | expect(collectUpdates(), ['foo']); 44 | }); 45 | 46 | test('does not report changes before end of transaction', () { 47 | db.execute('BEGIN'); 48 | db.execute('INSERT INTO foo DEFAULT VALUES'); 49 | expect(collectUpdates(), isEmpty); 50 | db.execute('COMMIT'); 51 | 52 | expect(collectUpdates(), ['foo']); 53 | }); 54 | 55 | test('does not report rollbacks', () { 56 | db.execute('BEGIN'); 57 | db.execute('INSERT INTO foo DEFAULT VALUES'); 58 | expect(collectUpdates(), isEmpty); 59 | db.execute('ROLLBACK'); 60 | 61 | expect(collectUpdates(), isEmpty); 62 | }); 63 | } 64 | -------------------------------------------------------------------------------- /dart/test/utils/test_utils.dart: -------------------------------------------------------------------------------- 1 | import 'package:sqlite3/common.dart'; 2 | import 'package:test/test.dart'; 3 | 4 | /// Creates a `checkpoint` line. 5 | Object checkpoint({ 6 | required int lastOpId, 7 | List buckets = const [], 8 | String? writeCheckpoint, 9 | List streams = const [], 10 | }) { 11 | return { 12 | 'checkpoint': { 13 | 'last_op_id': '$lastOpId', 14 | 'write_checkpoint': writeCheckpoint, 15 | 'buckets': buckets, 16 | 'streams': streams, 17 | } 18 | }; 19 | } 20 | 21 | Object stream(String name, bool isDefault, {List errors = const []}) { 22 | return {'name': name, 'is_default': isDefault, 'errors': errors}; 23 | } 24 | 25 | /// Creates a `checkpoint_complete` or `partial_checkpoint_complete` line. 26 | Object checkpointComplete({int? priority, String lastOpId = '1'}) { 27 | return { 28 | priority == null ? 'checkpoint_complete' : 'partial_checkpoint_complete': { 29 | 'last_op_id': lastOpId, 30 | if (priority != null) 'priority': priority, 31 | }, 32 | }; 33 | } 34 | 35 | Object bucketDescription( 36 | String name, { 37 | int checksum = 0, 38 | int priority = 3, 39 | int count = 1, 40 | Object? subscriptions, 41 | }) { 42 | return { 43 | 'bucket': name, 44 | 'checksum': checksum, 45 | 'priority': priority, 46 | 'count': count, 47 | if (subscriptions != null) 'subscriptions': subscriptions, 48 | }; 49 | } 50 | 51 | Matcher isSqliteException(int code, dynamic message) { 52 | return isA() 53 | .having((e) => e.extendedResultCode, 'extendedResultCode', code) 54 | .having((e) => e.message, 'message', message); 55 | } 56 | 57 | const testSchema = { 58 | 'tables': [ 59 | { 60 | 'name': 'items', 61 | 'columns': [ 62 | {'name': 'col', 'type': 'text'} 63 | ], 64 | } 65 | ] 66 | }; 67 | -------------------------------------------------------------------------------- /dart/test/error_test.dart: -------------------------------------------------------------------------------- 1 | import 'package:sqlite3/common.dart'; 2 | import 'package:test/test.dart'; 3 | 4 | import 'utils/native_test_utils.dart'; 5 | import 'utils/test_utils.dart'; 6 | 7 | void main() { 8 | group('error reporting', () { 9 | late CommonDatabase db; 10 | 11 | setUp(() async { 12 | db = openTestDatabase(); 13 | }); 14 | 15 | tearDown(() { 16 | db.dispose(); 17 | }); 18 | 19 | test('contain inner SQLite descriptions', () { 20 | // Create a wrong migrations table for the core extension to trip over. 21 | db.execute('CREATE TABLE IF NOT EXISTS ps_migration(foo TEXT)'); 22 | 23 | expect( 24 | () => db.execute('SELECT powersync_init()'), 25 | throwsA(isSqliteException( 26 | 1, 27 | 'powersync_init: internal SQLite call returned ERROR: no such column: id', 28 | )), 29 | ); 30 | }); 31 | 32 | test('missing client id', () { 33 | db 34 | ..execute('SELECT powersync_init()') 35 | ..execute('DELETE FROM ps_kv;'); 36 | 37 | expect( 38 | () => db.execute('SELECT powersync_client_id()'), 39 | throwsA(isSqliteException( 40 | 4, 41 | 'powersync_client_id: No client_id found in ps_kv', 42 | )), 43 | ); 44 | }); 45 | 46 | group('sync protocol', () { 47 | setUp(() => db.execute('SELECT powersync_init()')); 48 | 49 | test('invalid json', () { 50 | const stmt = 'SELECT powersync_control(?,?)'; 51 | db.execute('BEGIN'); 52 | final control = db.prepare(stmt); 53 | 54 | control.execute(['start', null]); 55 | expect( 56 | () => control.execute(['line_text', 'invalid sync line']), 57 | throwsA(isSqliteException( 58 | 4, 59 | 'powersync_control: Sync protocol error: invalid text line. cause: expected value at line 1 column 1', 60 | )), 61 | ); 62 | }); 63 | }); 64 | }); 65 | } 66 | -------------------------------------------------------------------------------- /crates/core/src/checkpoint.rs: -------------------------------------------------------------------------------- 1 | extern crate alloc; 2 | 3 | use alloc::string::String; 4 | use alloc::vec::Vec; 5 | use core::ffi::c_int; 6 | 7 | use powersync_sqlite_nostd as sqlite; 8 | use powersync_sqlite_nostd::{Connection, Context, Value}; 9 | use serde::Serialize; 10 | use serde_json as json; 11 | use sqlite::ResultCode; 12 | 13 | use crate::create_sqlite_text_fn; 14 | use crate::error::PowerSyncError; 15 | use crate::sync::checkpoint::{OwnedBucketChecksum, validate_checkpoint}; 16 | use crate::sync::line::Checkpoint; 17 | 18 | #[derive(Serialize)] 19 | struct CheckpointResult { 20 | valid: bool, 21 | failed_buckets: Vec, 22 | } 23 | 24 | fn powersync_validate_checkpoint_impl( 25 | ctx: *mut sqlite::context, 26 | args: &[*mut sqlite::value], 27 | ) -> Result { 28 | let data = args[0].text(); 29 | let checkpoint: Checkpoint = 30 | serde_json::from_str(data).map_err(PowerSyncError::as_argument_error)?; 31 | let db = ctx.db_handle(); 32 | let buckets: Vec = checkpoint 33 | .buckets 34 | .iter() 35 | .map(OwnedBucketChecksum::from) 36 | .collect(); 37 | 38 | let failures = validate_checkpoint(buckets.iter(), None, db)?; 39 | let mut failed_buckets = Vec::::with_capacity(failures.len()); 40 | for failure in failures { 41 | failed_buckets.push(failure.bucket_name); 42 | } 43 | 44 | let result = CheckpointResult { 45 | valid: failed_buckets.is_empty(), 46 | failed_buckets: failed_buckets, 47 | }; 48 | 49 | Ok(json::to_string(&result).map_err(PowerSyncError::internal)?) 50 | } 51 | 52 | create_sqlite_text_fn!( 53 | powersync_validate_checkpoint, 54 | powersync_validate_checkpoint_impl, 55 | "powersync_validate_checkpoint" 56 | ); 57 | 58 | pub fn register(db: *mut sqlite::sqlite3) -> Result<(), ResultCode> { 59 | db.create_function_v2( 60 | "powersync_validate_checkpoint", 61 | 1, 62 | sqlite::UTF8 | sqlite::DETERMINISTIC, 63 | None, 64 | Some(powersync_validate_checkpoint), 65 | None, 66 | None, 67 | None, 68 | )?; 69 | 70 | Ok(()) 71 | } 72 | -------------------------------------------------------------------------------- /docs/schema.md: -------------------------------------------------------------------------------- 1 | # Internal PowerSync tables 2 | 3 | This document is intended as a reference when working on the core PowerSync extension itself. 4 | For informtion relevant to PowerSync users, see [client-architecture](https://docs.powersync.com/architecture/client-architecture#schema). 5 | The document is also incomplete at the moment. 6 | 7 | ## `ps_migration` 8 | 9 | __TODO__: Document 10 | 11 | ## `ps_buckets` 12 | 13 | `ps_buckets` stores information about [buckets](https://docs.powersync.com/architecture/powersync-protocol#buckets) relevant to clients. 14 | A bucket is instantiated for every row returned by a parameter query in a [bucket definition](https://docs.powersync.com/usage/sync-rules/organize-data-into-buckets#organize-data-into-buckets). 15 | 16 | Clients create entries in `ps_buckets` when receiving a checkpoint message from the sync service, they are also 17 | responsible for removing buckets that are no longer relevant to the client. 18 | There is also a special `$local` bucket representing pending 19 | uploads. 20 | 21 | We store the following information in `ps_buckets`: 22 | 23 | 1. `id`: Internal (client-side only), alias to rowid for foreign references. 24 | 2. `name`: The name of the bucket as received from the sync service. 25 | 3. `last_applied_op`: The last operation id that has been verified and published to views (meaning that it was part of 26 | a checkpoint and that we have validated its checksum). 27 | 4. `target_op`: Only used for `$local`. TODO: Document further. 28 | 5. `add_checksum`: TODO: Document further. 29 | 6. `op_checksum`: TODO: Document further. 30 | 7. `pending_delete`: TODO: Appears to be unused, document further. 31 | 8. `count_at_last`: The amount of operations in the bucket at the last verified checkpoint. 32 | 9. `count_since_last`: The amount of operations downloaded since the last verified checkpoint. 33 | 34 | ## `ps_crud` 35 | 36 | __TODO__: Document 37 | 38 | ## `ps_kv` 39 | 40 | __TODO__: Document 41 | 42 | ## `ps_oplog` 43 | 44 | __TODO__: Document 45 | 46 | ## `ps_sync_state` 47 | 48 | __TODO__: Document 49 | 50 | ## `ps_tx` 51 | 52 | __TODO__: Document 53 | 54 | ## `ps_untyped` 55 | 56 | __TODO__: Document 57 | 58 | ## `ps_updated_rows` 59 | 60 | __TODO__: Document 61 | -------------------------------------------------------------------------------- /crates/core/src/operations.rs: -------------------------------------------------------------------------------- 1 | use crate::error::PowerSyncError; 2 | use crate::sync::line::DataLine; 3 | use crate::sync::operations::insert_bucket_operations; 4 | use crate::sync::storage_adapter::StorageAdapter; 5 | use alloc::vec::Vec; 6 | use powersync_sqlite_nostd as sqlite; 7 | use powersync_sqlite_nostd::{Connection, ResultCode}; 8 | use serde::Deserialize; 9 | 10 | use crate::ext::SafeManagedStmt; 11 | 12 | // Run inside a transaction 13 | pub fn insert_operation(db: *mut sqlite::sqlite3, data: &str) -> Result<(), PowerSyncError> { 14 | #[derive(Deserialize)] 15 | struct BucketBatch<'a> { 16 | #[serde(borrow)] 17 | buckets: Vec>, 18 | } 19 | 20 | let batch: BucketBatch = 21 | serde_json::from_str(data).map_err(PowerSyncError::as_argument_error)?; 22 | let adapter = StorageAdapter::new(db)?; 23 | 24 | for line in &batch.buckets { 25 | insert_bucket_operations(&adapter, &line)?; 26 | } 27 | 28 | Ok(()) 29 | } 30 | 31 | pub fn clear_remove_ops(_db: *mut sqlite::sqlite3, _data: &str) -> Result<(), ResultCode> { 32 | // No-op 33 | 34 | Ok(()) 35 | } 36 | 37 | pub fn delete_pending_buckets(_db: *mut sqlite::sqlite3, _data: &str) -> Result<(), ResultCode> { 38 | // No-op 39 | 40 | Ok(()) 41 | } 42 | 43 | pub fn delete_bucket(db: *mut sqlite::sqlite3, name: &str) -> Result<(), ResultCode> { 44 | // language=SQLite 45 | let statement = db.prepare_v2("DELETE FROM ps_buckets WHERE name = ?1 RETURNING id")?; 46 | statement.bind_text(1, name, sqlite::Destructor::STATIC)?; 47 | 48 | if statement.step()? == ResultCode::ROW { 49 | let bucket_id = statement.column_int64(0); 50 | 51 | // language=SQLite 52 | let updated_statement = db.prepare_v2( 53 | "\ 54 | INSERT OR IGNORE INTO ps_updated_rows(row_type, row_id) 55 | SELECT row_type, row_id 56 | FROM ps_oplog 57 | WHERE bucket = ?1", 58 | )?; 59 | updated_statement.bind_int64(1, bucket_id)?; 60 | updated_statement.exec()?; 61 | 62 | // language=SQLite 63 | let delete_statement = db.prepare_v2("DELETE FROM ps_oplog WHERE bucket=?1")?; 64 | delete_statement.bind_int64(1, bucket_id)?; 65 | delete_statement.exec()?; 66 | } 67 | 68 | Ok(()) 69 | } 70 | -------------------------------------------------------------------------------- /dart/test/utils/fix_035_fixtures.dart: -------------------------------------------------------------------------------- 1 | /// Data with some records in actual tables but not in ps_oplog 2 | const dataBroken = ''' 3 | ;INSERT INTO ps_buckets(id, name, last_applied_op, last_op, target_op, add_checksum, op_checksum, pending_delete) VALUES 4 | (1, 'b1', 0, 0, 0, 0, 120, 0), 5 | (2, 'b2', 0, 0, 0, 0, 3, 0) 6 | ;INSERT INTO ps_oplog(bucket, op_id, row_type, row_id, key, data, hash) VALUES 7 | (1, 1, 'todos', 't1', '', '{}', 100), 8 | (1, 2, 'todos', 't2', '', '{}', 20), 9 | (2, 3, 'lists', 'l1', '', '{}', 3) 10 | ;INSERT INTO ps_data__lists(id, data) VALUES 11 | ('l1', '{}'), 12 | ('l3', '{}') 13 | ;INSERT INTO ps_data__todos(id, data) VALUES 14 | ('t1', '{}'), 15 | ('t2', '{}'), 16 | ('t3', '{}') 17 | '''; 18 | 19 | /// Data after applying the migration fix, but before sync_local 20 | const dataMigrated = ''' 21 | ;INSERT INTO ps_buckets(id, name, last_applied_op, last_op, target_op, add_checksum, op_checksum, pending_delete, count_at_last, count_since_last) VALUES 22 | (1, 'b1', 0, 0, 0, 0, 120, 0, 0, 0), 23 | (2, 'b2', 0, 0, 0, 0, 3, 0, 0, 0) 24 | ;INSERT INTO ps_oplog(bucket, op_id, row_type, row_id, key, data, hash) VALUES 25 | (1, 1, 'todos', 't1', '', '{}', 100), 26 | (1, 2, 'todos', 't2', '', '{}', 20), 27 | (2, 3, 'lists', 'l1', '', '{}', 3) 28 | ;INSERT INTO ps_updated_rows(row_type, row_id) VALUES 29 | ('lists', 'l3'), 30 | ('todos', 't3') 31 | ;INSERT INTO ps_data__lists(id, data) VALUES 32 | ('l1', '{}'), 33 | ('l3', '{}') 34 | ;INSERT INTO ps_data__todos(id, data) VALUES 35 | ('t1', '{}'), 36 | ('t2', '{}'), 37 | ('t3', '{}') 38 | '''; 39 | 40 | /// Data after applying the migration fix and sync_local 41 | const dataFixed = ''' 42 | ;INSERT INTO ps_buckets(id, name, last_applied_op, last_op, target_op, add_checksum, op_checksum, pending_delete, count_at_last, count_since_last) VALUES 43 | (1, 'b1', 0, 0, 0, 0, 120, 0, 0, 0), 44 | (2, 'b2', 0, 0, 0, 0, 3, 0, 0, 0) 45 | ;INSERT INTO ps_oplog(bucket, op_id, row_type, row_id, key, data, hash) VALUES 46 | (1, 1, 'todos', 't1', '', '{}', 100), 47 | (1, 2, 'todos', 't2', '', '{}', 20), 48 | (2, 3, 'lists', 'l1', '', '{}', 3) 49 | ;INSERT INTO ps_data__lists(id, data) VALUES 50 | ('l1', '{}') 51 | ;INSERT INTO ps_data__todos(id, data) VALUES 52 | ('t1', '{}'), 53 | ('t2', '{}') 54 | '''; 55 | -------------------------------------------------------------------------------- /crates/core/src/vtab_util.rs: -------------------------------------------------------------------------------- 1 | extern crate alloc; 2 | 3 | use alloc::string::ToString; 4 | use core::ffi::{c_char, c_int}; 5 | 6 | use powersync_sqlite_nostd as sqlite; 7 | use powersync_sqlite_nostd::VTab; 8 | use sqlite::ResultCode; 9 | 10 | use crate::error::PowerSyncError; 11 | 12 | // For insert-only virtual tables, there are many functions that have to be defined, even if they're 13 | // not intended to be used. We return MISUSE for each. 14 | 15 | pub extern "C" fn vtab_no_filter( 16 | _cursor: *mut sqlite::vtab_cursor, 17 | _idx_num: c_int, 18 | _idx_str: *const c_char, 19 | _argc: c_int, 20 | _argv: *mut *mut sqlite::value, 21 | ) -> c_int { 22 | ResultCode::MISUSE as c_int 23 | } 24 | 25 | pub extern "C" fn vtab_no_next(_cursor: *mut sqlite::vtab_cursor) -> c_int { 26 | ResultCode::MISUSE as c_int 27 | } 28 | 29 | pub extern "C" fn vtab_no_eof(_cursor: *mut sqlite::vtab_cursor) -> c_int { 30 | ResultCode::MISUSE as c_int 31 | } 32 | 33 | pub extern "C" fn vtab_no_column( 34 | _cursor: *mut sqlite::vtab_cursor, 35 | _ctx: *mut sqlite::context, 36 | _col_num: c_int, 37 | ) -> c_int { 38 | ResultCode::MISUSE as c_int 39 | } 40 | 41 | pub extern "C" fn vtab_no_rowid( 42 | _cursor: *mut sqlite::vtab_cursor, 43 | _row_id: *mut sqlite::int64, 44 | ) -> c_int { 45 | ResultCode::MISUSE as c_int 46 | } 47 | 48 | pub extern "C" fn vtab_no_best_index( 49 | _vtab: *mut sqlite::vtab, 50 | _index_info: *mut sqlite::index_info, 51 | ) -> c_int { 52 | return ResultCode::MISUSE as c_int; 53 | } 54 | 55 | pub extern "C" fn vtab_no_open( 56 | _vtab: *mut sqlite::vtab, 57 | _cursor: *mut *mut sqlite::vtab_cursor, 58 | ) -> c_int { 59 | ResultCode::MISUSE as c_int 60 | } 61 | 62 | pub extern "C" fn vtab_no_close(_cursor: *mut sqlite::vtab_cursor) -> c_int { 63 | // If open never allocates a cursor, this should never be called 64 | ResultCode::MISUSE as c_int 65 | } 66 | 67 | pub fn vtab_result>( 68 | vtab: *mut sqlite::vtab, 69 | result: Result, 70 | ) -> c_int { 71 | if let Err(error) = result { 72 | let error = error.into(); 73 | 74 | vtab.set_err(&error.to_string()); 75 | error.sqlite_error_code() as c_int 76 | } else { 77 | ResultCode::OK as c_int 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /dart/test/js_key_encoding_test.dart: -------------------------------------------------------------------------------- 1 | import 'dart:convert'; 2 | 3 | import 'package:file/local.dart'; 4 | import 'package:sqlite3/common.dart'; 5 | import 'package:sqlite3/sqlite3.dart'; 6 | import 'package:sqlite3_test/sqlite3_test.dart'; 7 | import 'package:test/test.dart'; 8 | 9 | import 'utils/native_test_utils.dart'; 10 | 11 | void main() { 12 | // Needs an unique name per test file to avoid concurrency issues 13 | final vfs = TestSqliteFileSystem( 14 | fs: const LocalFileSystem(), name: 'js-key-encoding-test-vfs'); 15 | late CommonDatabase db; 16 | 17 | setUpAll(() { 18 | loadExtension(); 19 | sqlite3.registerVirtualFileSystem(vfs, makeDefault: false); 20 | }); 21 | tearDownAll(() => sqlite3.unregisterVirtualFileSystem(vfs)); 22 | 23 | setUp(() async { 24 | db = openTestDatabase(vfs: vfs) 25 | ..select('select powersync_init();') 26 | ..select('select powersync_replace_schema(?)', [json.encode(_schema)]); 27 | }); 28 | 29 | tearDown(() { 30 | db.dispose(); 31 | }); 32 | 33 | test('can fix JS key encoding', () { 34 | db.execute('insert into powersync_operations (op, data) VALUES (?, ?);', [ 35 | 'save', 36 | json.encode({ 37 | 'buckets': [ 38 | { 39 | 'bucket': 'a', 40 | 'data': [ 41 | { 42 | 'op_id': '1', 43 | 'op': 'PUT', 44 | 'object_type': 'items', 45 | 'object_id': '1', 46 | 'subkey': json.encode('subkey'), 47 | 'checksum': 0, 48 | 'data': json.encode({'col': 'a'}), 49 | } 50 | ], 51 | } 52 | ], 53 | }) 54 | ]); 55 | 56 | db.execute('INSERT INTO powersync_operations(op, data) VALUES (?, ?)', 57 | ['sync_local', null]); 58 | var [row] = db.select('select * from ps_oplog'); 59 | expect(row['key'], 'items/1/"subkey"'); 60 | 61 | // Apply migration 62 | db.execute( 63 | 'UPDATE ps_oplog SET key = powersync_remove_duplicate_key_encoding(key);'); 64 | 65 | [row] = db.select('select * from ps_oplog'); 66 | expect(row['key'], 'items/1/subkey'); 67 | }); 68 | } 69 | 70 | const _schema = { 71 | 'tables': [ 72 | { 73 | 'name': 'items', 74 | 'columns': [ 75 | {'name': 'col', 'type': 'text'} 76 | ], 77 | } 78 | ] 79 | }; 80 | -------------------------------------------------------------------------------- /crates/core/src/kv.rs: -------------------------------------------------------------------------------- 1 | extern crate alloc; 2 | 3 | use alloc::string::{String, ToString}; 4 | use core::ffi::c_int; 5 | 6 | use powersync_sqlite_nostd as sqlite; 7 | use powersync_sqlite_nostd::{Connection, Context}; 8 | use sqlite::ResultCode; 9 | 10 | use crate::create_sqlite_optional_text_fn; 11 | use crate::create_sqlite_text_fn; 12 | use crate::error::PowerSyncError; 13 | use crate::sync::BucketPriority; 14 | 15 | fn powersync_client_id_impl( 16 | ctx: *mut sqlite::context, 17 | _args: &[*mut sqlite::value], 18 | ) -> Result { 19 | let db = ctx.db_handle(); 20 | 21 | client_id(db) 22 | } 23 | 24 | pub fn client_id(db: *mut sqlite::sqlite3) -> Result { 25 | // language=SQLite 26 | let statement = db.prepare_v2("select value from ps_kv where key = 'client_id'")?; 27 | 28 | if statement.step()? == ResultCode::ROW { 29 | let client_id = statement.column_text(0)?; 30 | Ok(client_id.to_string()) 31 | } else { 32 | Err(PowerSyncError::missing_client_id()) 33 | } 34 | } 35 | 36 | create_sqlite_text_fn!( 37 | powersync_client_id, 38 | powersync_client_id_impl, 39 | "powersync_client_id" 40 | ); 41 | 42 | fn powersync_last_synced_at_impl( 43 | ctx: *mut sqlite::context, 44 | _args: &[*mut sqlite::value], 45 | ) -> Result, ResultCode> { 46 | let db = ctx.db_handle(); 47 | 48 | // language=SQLite 49 | let statement = db.prepare_v2("select last_synced_at from ps_sync_state where priority = ?")?; 50 | statement.bind_int(1, BucketPriority::SENTINEL.into())?; 51 | 52 | if statement.step()? == ResultCode::ROW { 53 | let client_id = statement.column_text(0)?; 54 | Ok(Some(client_id.to_string())) 55 | } else { 56 | Ok(None) 57 | } 58 | } 59 | 60 | create_sqlite_optional_text_fn!( 61 | powersync_last_synced_at, 62 | powersync_last_synced_at_impl, 63 | "powersync_last_synced_at" 64 | ); 65 | 66 | pub fn register(db: *mut sqlite::sqlite3) -> Result<(), ResultCode> { 67 | db.create_function_v2( 68 | "powersync_client_id", 69 | 0, 70 | sqlite::UTF8 | sqlite::DETERMINISTIC, 71 | None, 72 | Some(powersync_client_id), 73 | None, 74 | None, 75 | None, 76 | )?; 77 | db.create_function_v2( 78 | "powersync_last_synced_at", 79 | 0, 80 | sqlite::UTF8 | sqlite::DETERMINISTIC, 81 | None, 82 | Some(powersync_last_synced_at), 83 | None, 84 | None, 85 | None, 86 | )?; 87 | 88 | Ok(()) 89 | } 90 | -------------------------------------------------------------------------------- /UUID.md: -------------------------------------------------------------------------------- 1 | ### sqlite3 sqlite3_randomness 2 | 3 | As part of the extension, we provide `uuid()` and `gen_random_uuid()` functions, which generate a UUIDv4. We want reasonable guarantees that these uuids are unguessable, so we need to use a [cryptographically secure pseudorandom number generator](https://en.wikipedia.org/wiki/Cryptographically_secure_pseudorandom_number_generator) (CSPRNG). Additionally, we want this to be fast (generating uuids shouldn't be a bottleneck for inserting rows). 4 | 5 | We provide two options: 6 | 7 | 1. The `getrandom` crate, via the `getrandom` feature (enabled by default). 8 | 2. `sqlite3_randomness`, when `getradnom` feature is not enabled. 9 | 10 | Everywhere it's available, `getrandom` is recommended. One exception is WASI, where `sqlite3_randomness` is simpler. In that case, make sure the VFS xRandomness function provides secure random values. 11 | 12 | ## Details 13 | 14 | SQLite has a sqlite3_randomness function, that does: 15 | 16 | 1. Seed using the VFS xRandomness function. 17 | 2. Generate a sequence using ChaCha20 (CSPRNG, as long as the seed is sufficiently random). 18 | 19 | The VFS implementations differ: 20 | 21 | 1. For unix (Linux, macOS, Android and iOS), the default VFS uses `/dev/urandom` for the xRandomness seed above. This is generally secure. 22 | 2. For Windows, the default VFS uses a some system state such as pid, current time, current tick count for the entropy. This is okay for generating random numbers, but not quite secure (may be possible to guess). 23 | 3. For wa-sqlite, it defaults to the unix VFS. Emscripten intercepts the `/dev/urandom` call and provides values using `crypto.getRandomValues()`: https://github.com/emscripten-core/emscripten/blob/2a00e26013b0a02411af09352c6731b89023f382/src/library.js#L2151-L2163 24 | 4. Dart sqlite3 WASM uses `Random.secure()` to provide values. This is relatively slow, but only for the seed, so that's fine. This translates to `crypto.getRandomValues()` in JS. 25 | 26 | ### getrandom crate 27 | 28 | The Rust uuid crate uses the getrandom crate for the "v4" feature by default. 29 | 30 | Full platform support is listed here: https://docs.rs/getrandom/latest/getrandom/ 31 | 32 | Summary: 33 | 34 | - Linux, Android: getrandom system call if available, falling batch to `/dev/urandom`. 35 | - Windows: [BCryptGenRandom](https://docs.microsoft.com/en-us/windows/win32/api/bcrypt/nf-bcrypt-bcryptgenrandom) 36 | - macOS: [getentropy](https://www.unix.com/man-page/mojave/2/getentropy/) 37 | - iOS: [CCRandomGenerateBytes](https://opensource.apple.com/source/CommonCrypto/CommonCrypto-60074/include/CommonRandom.h.auto.html) 38 | - WASI (used for Dart sqlite3 WASM build): [random_get](https://wasix.org/docs/api-reference/wasi/random_get) 39 | 40 | The WASI one may be problematic, since that's not provided in all environments. 41 | -------------------------------------------------------------------------------- /crates/core/src/json_util.rs: -------------------------------------------------------------------------------- 1 | extern crate alloc; 2 | 3 | use alloc::string::{String, ToString}; 4 | use core::ffi::c_int; 5 | 6 | use crate::constants::SUBTYPE_JSON; 7 | use crate::create_sqlite_text_fn; 8 | use crate::error::PowerSyncError; 9 | use powersync_sqlite_nostd as sqlite; 10 | use powersync_sqlite_nostd::bindings::{SQLITE_RESULT_SUBTYPE, SQLITE_SUBTYPE}; 11 | use powersync_sqlite_nostd::{Connection, Context, Value}; 12 | use sqlite::ResultCode; 13 | 14 | extern "C" fn powersync_strip_subtype( 15 | ctx: *mut sqlite::context, 16 | argc: c_int, 17 | argv: *mut *mut sqlite::value, 18 | ) { 19 | if argc != 1 { 20 | return; 21 | } 22 | 23 | let arg = unsafe { *argv }; 24 | ctx.result_value(arg); 25 | ctx.result_subtype(0); 26 | } 27 | 28 | /// Given any number of JSON TEXT arguments, merge them into a single JSON object. 29 | /// 30 | /// This assumes each argument is a valid JSON object, with no duplicate keys. 31 | /// No JSON parsing or validation is performed - this performs simple string concatenation. 32 | fn powersync_json_merge_impl( 33 | ctx: *mut sqlite::context, 34 | args: &[*mut sqlite::value], 35 | ) -> Result { 36 | if args.is_empty() { 37 | return Ok("{}".to_string()); 38 | } 39 | let mut result = String::from("{"); 40 | for arg in args { 41 | let chunk = arg.text(); 42 | if chunk.is_empty() || !chunk.starts_with('{') || !chunk.ends_with('}') { 43 | return Err(PowerSyncError::argument_error("Expected json object")); 44 | } 45 | 46 | // Strip outer braces 47 | let inner = &chunk[1..(chunk.len() - 1)]; 48 | 49 | // If this is not the first chunk, insert a comma 50 | if result.len() > 1 { 51 | result.push(','); 52 | } 53 | 54 | // Append the inner content 55 | result.push_str(inner); 56 | } 57 | 58 | // Close the outer brace 59 | result.push('}'); 60 | ctx.result_subtype(SUBTYPE_JSON); 61 | Ok(result) 62 | } 63 | 64 | create_sqlite_text_fn!( 65 | powersync_json_merge, 66 | powersync_json_merge_impl, 67 | "powersync_json_merge" 68 | ); 69 | 70 | pub fn register(db: *mut sqlite::sqlite3) -> Result<(), ResultCode> { 71 | db.create_function_v2( 72 | "powersync_json_merge", 73 | -1, 74 | sqlite::UTF8 | sqlite::DETERMINISTIC | SQLITE_RESULT_SUBTYPE, 75 | None, 76 | Some(powersync_json_merge), 77 | None, 78 | None, 79 | None, 80 | )?; 81 | 82 | db.create_function_v2( 83 | "powersync_strip_subtype", 84 | 1, 85 | sqlite::UTF8 | sqlite::DETERMINISTIC | SQLITE_SUBTYPE | SQLITE_RESULT_SUBTYPE, 86 | None, 87 | Some(powersync_strip_subtype), 88 | None, 89 | None, 90 | None, 91 | )?; 92 | 93 | Ok(()) 94 | } 95 | -------------------------------------------------------------------------------- /dart/test/utils/tracking_vfs.dart: -------------------------------------------------------------------------------- 1 | import 'dart:typed_data'; 2 | 3 | import 'package:sqlite3/sqlite3.dart'; 4 | 5 | final class TrackingFileSystem extends BaseVirtualFileSystem { 6 | BaseVirtualFileSystem parent; 7 | int tempReads = 0; 8 | int tempWrites = 0; 9 | int dataReads = 0; 10 | int dataWrites = 0; 11 | 12 | TrackingFileSystem({super.name = 'tracking', required this.parent}); 13 | 14 | @override 15 | int xAccess(String path, int flags) { 16 | return parent.xAccess(path, flags); 17 | } 18 | 19 | @override 20 | void xDelete(String path, int syncDir) { 21 | parent.xDelete(path, syncDir); 22 | } 23 | 24 | @override 25 | String xFullPathName(String path) { 26 | return parent.xFullPathName(path); 27 | } 28 | 29 | @override 30 | XOpenResult xOpen(Sqlite3Filename path, int flags) { 31 | final result = parent.xOpen(path, flags); 32 | return ( 33 | outFlags: result.outFlags, 34 | file: TrackingFile( 35 | result.file, this, flags & SqlFlag.SQLITE_OPEN_DELETEONCLOSE != 0), 36 | ); 37 | } 38 | 39 | @override 40 | void xSleep(Duration duration) {} 41 | 42 | String stats() { 43 | return "Reads: $dataReads + $tempReads | Writes: $dataWrites + $tempWrites"; 44 | } 45 | 46 | void clearStats() { 47 | tempReads = 0; 48 | tempWrites = 0; 49 | dataReads = 0; 50 | dataWrites = 0; 51 | } 52 | } 53 | 54 | class TrackingFile implements VirtualFileSystemFile { 55 | final TrackingFileSystem vfs; 56 | final VirtualFileSystemFile parentFile; 57 | final bool deleteOnClose; 58 | 59 | TrackingFile(this.parentFile, this.vfs, this.deleteOnClose); 60 | 61 | @override 62 | void xWrite(Uint8List buffer, int fileOffset) { 63 | if (deleteOnClose) { 64 | vfs.tempWrites++; 65 | } else { 66 | vfs.dataWrites++; 67 | } 68 | parentFile.xWrite(buffer, fileOffset); 69 | } 70 | 71 | @override 72 | void xRead(Uint8List buffer, int offset) { 73 | if (deleteOnClose) { 74 | vfs.tempReads++; 75 | } else { 76 | vfs.dataReads++; 77 | } 78 | parentFile.xRead(buffer, offset); 79 | } 80 | 81 | @override 82 | int xCheckReservedLock() { 83 | return parentFile.xCheckReservedLock(); 84 | } 85 | 86 | @override 87 | void xClose() { 88 | return parentFile.xClose(); 89 | } 90 | 91 | @override 92 | int xFileSize() { 93 | return parentFile.xFileSize(); 94 | } 95 | 96 | @override 97 | void xLock(int mode) { 98 | return parentFile.xLock(mode); 99 | } 100 | 101 | @override 102 | void xSync(int flags) { 103 | return parentFile.xSync(flags); 104 | } 105 | 106 | @override 107 | void xTruncate(int size) { 108 | return parentFile.xTruncate(size); 109 | } 110 | 111 | @override 112 | void xUnlock(int mode) { 113 | return parentFile.xUnlock(mode); 114 | } 115 | 116 | @override 117 | int get xDeviceCharacteristics => parentFile.xDeviceCharacteristics; 118 | } 119 | -------------------------------------------------------------------------------- /.cargo/config.toml: -------------------------------------------------------------------------------- 1 | # Previously we added this to rustflags for all linux builds: 2 | # "-C", "link-arg=-lgcc_eh" 3 | # It was to fix this error when loading the loadable extension: 4 | # undefined symbol: _Unwind_Resume 5 | # Now, we instead build using: 6 | # -Z build-std=panic_abort,core,alloc 7 | # This fixes the same issue. We still keep -lgcc_eh, 8 | # to support manual builds without -Z build-std. 9 | 10 | # Without -Z build-std, with -lgcc_eh: 11 | # 241KB, loading works 12 | # Without -Z build-std, without -lgcc_eh: 13 | # 207KB, undefined symbol: _Unwind_Resume 14 | # With -Z build-std, without -lgcc_eh: 15 | # 173K, loading works 16 | # With -Z build-std, with -lgcc_eh: 17 | # 173K, loading works 18 | # Conclusion: -lgcc_eh has no effect when using -Z build-std. 19 | 20 | [target.'cfg(target_os = "linux")'] 21 | rustflags = [ 22 | "-C", "link-arg=-lgcc_eh", 23 | ] 24 | 25 | [target.x86_64-unknown-linux-gnu] 26 | linker = "x86_64-linux-gnu-gcc" 27 | 28 | [target.i686-unknown-linux-gnu] 29 | linker = "i686-linux-gnu-gcc" 30 | 31 | [target.aarch64-unknown-linux-gnu] 32 | linker = "aarch64-linux-gnu-gcc" 33 | 34 | [target.armv7-unknown-linux-gnueabihf] 35 | linker = "arm-linux-gnueabihf-gcc" 36 | 37 | [target.riscv64gc-unknown-linux-gnu] 38 | linker = "riscv64-linux-gnu-gcc" 39 | 40 | # For iOS and macOS, we need to specify the minimum/target version. 41 | # This must match the versions in the podspec file. 42 | [target.aarch64-apple-ios] 43 | rustflags = [ 44 | "-C", "link-arg=-mios-version-min=11.0", 45 | ] 46 | 47 | [target.aarch64-apple-ios-sim] 48 | rustflags = [ 49 | "-C", "link-arg=-miphonesimulator-version-min=11.0", 50 | ] 51 | 52 | [target.x86_64-apple-ios] 53 | rustflags = [ 54 | "-C", "link-arg=-miphonesimulator-version-min=11.0", 55 | ] 56 | 57 | 58 | [target.x86_64-apple-darwin] 59 | rustflags = [ 60 | "-C", "link-arg=-mmacosx-version-min=10.13", 61 | ] 62 | 63 | [target.aarch64-apple-darwin] 64 | rustflags = [ 65 | "-C", "link-arg=-mmacosx-version-min=10.13", 66 | ] 67 | 68 | 69 | # For Android, it is important to set the soname. 70 | # Otherwise, the linker hardcodes the path in the lib, 71 | # which breaks loading. 72 | [target.aarch64-linux-android] 73 | rustflags = [ 74 | "-C", "link-arg=-Wl,-soname,libpowersync.so", 75 | ] 76 | 77 | [target.armv7-linux-androideabi] 78 | rustflags = [ 79 | "-C", "link-arg=-Wl,-soname,libpowersync.so", 80 | ] 81 | 82 | [target.x86_64-linux-android] 83 | rustflags = [ 84 | "-C", "link-arg=-Wl,-soname,libpowersync.so", 85 | ] 86 | 87 | [target.i686-linux-android] 88 | rustflags = [ 89 | "-C", "link-arg=-Wl,-soname,libpowersync.so", 90 | ] 91 | 92 | [target.aarch64-apple-watchos] 93 | rustflags = [ 94 | "-C", "link-arg=-mwatchos-version-min=9.0", 95 | ] 96 | 97 | [target.aarch64-apple-watchos-sim] 98 | rustflags = [ 99 | "-C", "link-arg=-mwatchsimulator-version-min=9.0", 100 | ] 101 | 102 | [target.x86_64-apple-watchos-sim] 103 | rustflags = [ 104 | "-C", "link-arg=-mwatchos-version-min=9.0", 105 | ] 106 | -------------------------------------------------------------------------------- /crates/core/src/macros.rs: -------------------------------------------------------------------------------- 1 | #[macro_export] 2 | macro_rules! create_sqlite_text_fn { 3 | ($fn_name:ident, $fn_impl_name:ident, $description:literal) => { 4 | extern "C" fn $fn_name( 5 | ctx: *mut sqlite::context, 6 | argc: c_int, 7 | argv: *mut *mut sqlite::value, 8 | ) { 9 | let args = sqlite::args!(argc, argv); 10 | 11 | let result = $fn_impl_name(ctx, args); 12 | 13 | if let Err(err) = result { 14 | PowerSyncError::from(err).apply_to_ctx($description, ctx); 15 | } else if let Ok(r) = result { 16 | ctx.result_text_transient(&r); 17 | } 18 | } 19 | }; 20 | } 21 | 22 | #[macro_export] 23 | macro_rules! create_sqlite_optional_text_fn { 24 | ($fn_name:ident, $fn_impl_name:ident, $description:literal) => { 25 | extern "C" fn $fn_name( 26 | ctx: *mut sqlite::context, 27 | argc: c_int, 28 | argv: *mut *mut sqlite::value, 29 | ) { 30 | let args = sqlite::args!(argc, argv); 31 | 32 | let result = $fn_impl_name(ctx, args); 33 | 34 | if let Err(err) = result { 35 | PowerSyncError::from(err).apply_to_ctx($description, ctx); 36 | } else if let Ok(r) = result { 37 | if let Some(s) = r { 38 | ctx.result_text_transient(&s); 39 | } else { 40 | ctx.result_null(); 41 | } 42 | } 43 | } 44 | }; 45 | } 46 | 47 | // Wrap a function in an auto-transaction. 48 | // Gives the equivalent of SQLite's auto-commit behaviour, except that applies to all statements 49 | // inside the function. Otherwise, each statement inside the function would be a transaction on its 50 | // own if the function itself is not wrapped in a transaction. 51 | #[macro_export] 52 | macro_rules! create_auto_tx_function { 53 | ($fn_name:ident, $fn_impl_name:ident) => { 54 | fn $fn_name( 55 | ctx: *mut sqlite::context, 56 | args: &[*mut sqlite::value], 57 | ) -> Result { 58 | let db = ctx.db_handle(); 59 | 60 | // Auto-start a transaction if we're not in a transaction 61 | let started_tx = if db.get_autocommit() { 62 | db.exec_safe("BEGIN")?; 63 | true 64 | } else { 65 | false 66 | }; 67 | 68 | let result = $fn_impl_name(ctx, args); 69 | if result.is_err() { 70 | // Always ROLLBACK, even when we didn't start the transaction. 71 | // Otherwise the user may be able to continue the transaction and end up in an inconsistent state. 72 | // We ignore rollback errors. 73 | if !db.get_autocommit() { 74 | let _ignore = db.exec_safe("ROLLBACK"); 75 | } 76 | } else if started_tx { 77 | // Only COMMIT our own transactions. 78 | db.exec_safe("COMMIT")?; 79 | } 80 | 81 | result 82 | } 83 | }; 84 | } 85 | -------------------------------------------------------------------------------- /.github/actions/android/action.yml: -------------------------------------------------------------------------------- 1 | name: "Build Android library" 2 | description: "Create artifact for Android library" 3 | inputs: 4 | sign-publication: 5 | description: "Whether to sign the built library" 6 | default: '1' 7 | gpg-key: 8 | required: false 9 | description: "The GPG key to use when signing the publication" 10 | gpg-password: 11 | required: false 12 | description: "Password for the GPG key." 13 | 14 | runs: 15 | using: "composite" 16 | steps: 17 | - uses: actions/setup-java@v5 18 | with: 19 | distribution: "temurin" 20 | java-version: "25" 21 | 22 | - name: Validate Gradle wrapper 23 | uses: gradle/actions/wrapper-validation@v4 24 | 25 | - name: Setup 26 | shell: bash 27 | run: | 28 | rustup toolchain install nightly-2025-10-31-x86_64-unknown-linux-gnu 29 | rustup component add rust-src --toolchain nightly-2025-10-31-x86_64-unknown-linux-gnu 30 | rustup target add \ 31 | aarch64-linux-android \ 32 | armv7-linux-androideabi \ 33 | x86_64-linux-android \ 34 | i686-linux-android 35 | cargo install cargo-ndk 36 | 37 | - name: Build signed library 38 | shell: bash 39 | if: ${{ inputs.sign-publication == '1' }} 40 | run: | 41 | cd android 42 | ./gradlew build zipPublication -PgpgKey=${{ inputs.gpg-key }} -PgpgPassword=${{ inputs.gpg-password }} 43 | ls -lh build/outputs/aar 44 | find build/repository 45 | 46 | - name: Build library without signing 47 | shell: bash 48 | if: ${{ inputs.sign-publication == '0' }} 49 | run: | 50 | cd android 51 | ./gradlew build zipPublication -PsignPublication=0 52 | ls -lh build/outputs/aar 53 | find build/repository 54 | 55 | - name: Upload binary 56 | uses: actions/upload-artifact@v4 57 | with: 58 | name: android-library 59 | retention-days: 14 60 | compression-level: 0 # We're uploading a zip, no need to compress again 61 | path: android/build/distributions/powersync_android.zip 62 | if-no-files-found: error 63 | 64 | - name: Copy static libraries 65 | shell: bash 66 | run: | 67 | cp target/aarch64-linux-android/release/libpowersync.a libpowersync_aarch64.android.a 68 | cp target/aarch64-linux-android/release/libpowersync.so libpowersync_aarch64.android.so 69 | 70 | cp target/armv7-linux-androideabi/release/libpowersync.a libpowersync_armv7.android.a 71 | cp target/armv7-linux-androideabi/release/libpowersync.so libpowersync_armv7.android.so 72 | 73 | cp target/i686-linux-android/release/libpowersync.a libpowersync_x86.android.a 74 | cp target/i686-linux-android/release/libpowersync.so libpowersync_x86.android.so 75 | 76 | cp target/x86_64-linux-android/release/libpowersync.a libpowersync_x64.android.a 77 | cp target/x86_64-linux-android/release/libpowersync.so libpowersync_x64.android.so 78 | 79 | - name: Upload static libraries 80 | uses: actions/upload-artifact@v4 81 | with: 82 | name: android-static 83 | retention-days: 14 84 | path: | 85 | *.a 86 | *.so 87 | -------------------------------------------------------------------------------- /crates/core/src/sync/checkpoint.rs: -------------------------------------------------------------------------------- 1 | use alloc::{rc::Rc, string::String, vec::Vec}; 2 | use num_traits::Zero; 3 | 4 | use crate::sync::line::{BucketChecksum, BucketSubscriptionReason}; 5 | use crate::sync::{BucketPriority, Checksum}; 6 | use powersync_sqlite_nostd::{self as sqlite, Connection, ResultCode}; 7 | 8 | /// A structure cloned from [BucketChecksum]s with an owned bucket name instead of one borrowed from 9 | /// a sync line. 10 | #[derive(Debug, Clone)] 11 | pub struct OwnedBucketChecksum { 12 | pub bucket: String, 13 | pub checksum: Checksum, 14 | pub priority: BucketPriority, 15 | pub count: Option, 16 | pub subscriptions: Rc>, 17 | } 18 | 19 | impl OwnedBucketChecksum { 20 | pub fn is_in_priority(&self, prio: Option) -> bool { 21 | match prio { 22 | None => true, 23 | Some(prio) => self.priority >= prio, 24 | } 25 | } 26 | } 27 | 28 | impl From<&'_ BucketChecksum<'_>> for OwnedBucketChecksum { 29 | fn from(value: &'_ BucketChecksum<'_>) -> Self { 30 | Self { 31 | bucket: value.bucket.clone().into_owned(), 32 | checksum: value.checksum, 33 | priority: value.priority.unwrap_or(BucketPriority::FALLBACK), 34 | count: value.count, 35 | subscriptions: value.subscriptions.clone(), 36 | } 37 | } 38 | } 39 | 40 | pub struct ChecksumMismatch { 41 | pub bucket_name: String, 42 | pub expected_checksum: Checksum, 43 | pub actual_op_checksum: Checksum, 44 | pub actual_add_checksum: Checksum, 45 | } 46 | 47 | pub fn validate_checkpoint<'a>( 48 | buckets: impl Iterator, 49 | priority: Option, 50 | db: *mut sqlite::sqlite3, 51 | ) -> Result, ResultCode> { 52 | // language=SQLite 53 | let statement = db.prepare_v2( 54 | " 55 | SELECT 56 | ps_buckets.add_checksum as add_checksum, 57 | ps_buckets.op_checksum as oplog_checksum 58 | FROM ps_buckets WHERE name = ?;", 59 | )?; 60 | 61 | let mut failures: Vec = Vec::new(); 62 | for bucket in buckets { 63 | if bucket.is_in_priority(priority) { 64 | statement.bind_text(1, &bucket.bucket, sqlite::Destructor::STATIC)?; 65 | 66 | let (add_checksum, oplog_checksum) = match statement.step()? { 67 | ResultCode::ROW => { 68 | let add_checksum = Checksum::from_i32(statement.column_int(0)); 69 | let oplog_checksum = Checksum::from_i32(statement.column_int(1)); 70 | (add_checksum, oplog_checksum) 71 | } 72 | _ => (Checksum::zero(), Checksum::zero()), 73 | }; 74 | 75 | let actual = add_checksum + oplog_checksum; 76 | 77 | if actual != bucket.checksum { 78 | failures.push(ChecksumMismatch { 79 | bucket_name: bucket.bucket.clone(), 80 | expected_checksum: bucket.checksum, 81 | actual_add_checksum: add_checksum, 82 | actual_op_checksum: oplog_checksum, 83 | }); 84 | } 85 | 86 | statement.reset()?; 87 | } 88 | } 89 | 90 | Ok(failures) 91 | } 92 | -------------------------------------------------------------------------------- /crates/core/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![no_std] 2 | extern crate alloc; 3 | 4 | use core::ffi::{c_char, c_int}; 5 | 6 | use alloc::{ffi::CString, format, rc::Rc}; 7 | use powersync_sqlite_nostd as sqlite; 8 | use sqlite::ResultCode; 9 | 10 | use crate::{error::PowerSyncError, state::DatabaseState}; 11 | 12 | mod bson; 13 | mod checkpoint; 14 | mod constants; 15 | mod crud_vtab; 16 | mod diff; 17 | mod error; 18 | mod ext; 19 | mod fix_data; 20 | mod json_util; 21 | mod kv; 22 | mod macros; 23 | mod migrations; 24 | mod operations; 25 | mod operations_vtab; 26 | mod schema; 27 | mod state; 28 | mod sync; 29 | mod sync_local; 30 | mod update_hooks; 31 | mod util; 32 | mod uuid; 33 | mod version; 34 | mod view_admin; 35 | mod views; 36 | mod vtab_util; 37 | 38 | /// The entrypoint for the PowerSync SQLite core extension. 39 | /// 40 | /// When compiling this Rust crate into a dynamic library, it can be used by embedders to load it 41 | /// through [SQLite's opening mechanism](https://sqlite.org/loadext.html#loading_an_extension). 42 | #[unsafe(no_mangle)] 43 | pub extern "C" fn sqlite3_powersync_init( 44 | db: *mut sqlite::sqlite3, 45 | err_msg: *mut *mut c_char, 46 | api: *mut sqlite::api_routines, 47 | ) -> c_int { 48 | debug_assert!(unsafe { *err_msg }.is_null()); 49 | sqlite::EXTENSION_INIT2(api); 50 | 51 | let result = init_extension(db); 52 | 53 | return if let Err(code) = result { 54 | if let Ok(desc) = CString::new(format!("Could not initialize PowerSync: {}", code)) { 55 | // Note: This is fine since we're using sqlite3_malloc to allocate in Rust 56 | unsafe { *err_msg = desc.into_raw() as *mut c_char }; 57 | } 58 | 59 | code.sqlite_error_code() as c_int 60 | } else { 61 | ResultCode::OK as c_int 62 | }; 63 | } 64 | 65 | fn init_extension(db: *mut sqlite::sqlite3) -> Result<(), PowerSyncError> { 66 | PowerSyncError::check_sqlite3_version()?; 67 | 68 | let state = Rc::new(DatabaseState::new()); 69 | 70 | crate::version::register(db)?; 71 | crate::uuid::register(db)?; 72 | crate::diff::register(db)?; 73 | crate::fix_data::register(db)?; 74 | crate::json_util::register(db)?; 75 | crate::view_admin::register(db, state.clone())?; 76 | crate::checkpoint::register(db)?; 77 | crate::kv::register(db)?; 78 | crate::state::register(db, state.clone())?; 79 | sync::register(db, state.clone())?; 80 | update_hooks::register(db, state.clone())?; 81 | 82 | crate::schema::register(db, state.clone())?; 83 | crate::operations_vtab::register(db, state.clone())?; 84 | crate::crud_vtab::register(db, state)?; 85 | 86 | Ok(()) 87 | } 88 | 89 | unsafe extern "C" { 90 | #[cfg(feature = "static")] 91 | #[allow(non_snake_case)] 92 | fn sqlite3_auto_extension( 93 | xEntryPoint: Option< 94 | extern "C" fn( 95 | *mut sqlite::sqlite3, 96 | *mut *mut c_char, 97 | *mut sqlite::api_routines, 98 | ) -> c_int, 99 | >, 100 | ) -> ::core::ffi::c_int; 101 | } 102 | 103 | /// Calls `sqlite3_auto_extension` with [sqlite3_powersync_init] to automatically load 104 | /// the PowerSync core extension into new connections. 105 | /// 106 | /// For details, see https://sqlite.org/loadext.html#statically_linking_a_run_time_loadable_extension 107 | #[cfg(feature = "static")] 108 | #[unsafe(no_mangle)] 109 | pub extern "C" fn powersync_init_static() -> c_int { 110 | unsafe { 111 | let f = sqlite3_powersync_init; 112 | return sqlite3_auto_extension(Some(f)); 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /crates/core/src/sync/bucket_priority.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize, de::Visitor}; 2 | 3 | use crate::error::PowerSyncError; 4 | 5 | #[repr(transparent)] 6 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] 7 | pub struct BucketPriority { 8 | pub number: i32, 9 | } 10 | 11 | impl BucketPriority { 12 | pub fn may_publish_with_outstanding_uploads(self) -> bool { 13 | self == BucketPriority::HIGHEST 14 | } 15 | 16 | /// The priority to use when the sync service doesn't attach priorities in checkpoints. 17 | pub const FALLBACK: BucketPriority = BucketPriority { number: 3 }; 18 | pub const HIGHEST: BucketPriority = BucketPriority { number: 0 }; 19 | 20 | /// A low priority used to represent fully-completed sync operations across all priorities. 21 | pub const SENTINEL: BucketPriority = BucketPriority { number: i32::MAX }; 22 | } 23 | 24 | impl TryFrom for BucketPriority { 25 | type Error = PowerSyncError; 26 | 27 | fn try_from(value: i32) -> Result { 28 | if value < BucketPriority::HIGHEST.number || value == Self::SENTINEL.number { 29 | return Err(PowerSyncError::argument_error( 30 | "Invalid bucket priority value", 31 | )); 32 | } 33 | 34 | return Ok(BucketPriority { number: value }); 35 | } 36 | } 37 | 38 | impl Into for BucketPriority { 39 | fn into(self) -> i32 { 40 | self.number 41 | } 42 | } 43 | 44 | impl PartialOrd for BucketPriority { 45 | fn partial_cmp(&self, other: &BucketPriority) -> Option { 46 | Some(self.cmp(other)) 47 | } 48 | } 49 | 50 | impl Ord for BucketPriority { 51 | fn cmp(&self, other: &Self) -> core::cmp::Ordering { 52 | self.number.cmp(&other.number).reverse() 53 | } 54 | } 55 | 56 | impl<'de> Deserialize<'de> for BucketPriority { 57 | fn deserialize(deserializer: D) -> Result 58 | where 59 | D: serde::Deserializer<'de>, 60 | { 61 | struct PriorityVisitor; 62 | impl<'de> Visitor<'de> for PriorityVisitor { 63 | type Value = BucketPriority; 64 | 65 | fn expecting(&self, formatter: &mut core::fmt::Formatter) -> core::fmt::Result { 66 | formatter.write_str("a priority as an integer between 0 and 3 (inclusive)") 67 | } 68 | 69 | fn visit_i32(self, v: i32) -> Result 70 | where 71 | E: serde::de::Error, 72 | { 73 | BucketPriority::try_from(v).map_err(|e| E::custom(e)) 74 | } 75 | 76 | fn visit_i64(self, v: i64) -> Result 77 | where 78 | E: serde::de::Error, 79 | { 80 | let i: i32 = v.try_into().map_err(|_| E::custom("int too large"))?; 81 | Self::visit_i32(self, i) 82 | } 83 | 84 | fn visit_u64(self, v: u64) -> Result 85 | where 86 | E: serde::de::Error, 87 | { 88 | let i: i32 = v.try_into().map_err(|_| E::custom("int too large"))?; 89 | Self::visit_i32(self, i) 90 | } 91 | } 92 | 93 | deserializer.deserialize_i32(PriorityVisitor) 94 | } 95 | } 96 | 97 | impl Serialize for BucketPriority { 98 | fn serialize(&self, serializer: S) -> Result 99 | where 100 | S: serde::Serializer, 101 | { 102 | serializer.serialize_i32(self.number) 103 | } 104 | } 105 | -------------------------------------------------------------------------------- /crates/core/src/bson/error.rs: -------------------------------------------------------------------------------- 1 | use core::{fmt::Display, str::Utf8Error}; 2 | 3 | use alloc::{ 4 | boxed::Box, 5 | string::{String, ToString}, 6 | }; 7 | use serde::de::{self}; 8 | 9 | use super::parser::ElementType; 10 | 11 | #[derive(Debug)] 12 | pub struct BsonError { 13 | /// Using a [Box] here keeps the size of this type as small, which makes results of this error 14 | /// type smaller (at the cost of making errors more expensive to report, but that's fine because 15 | /// we expect them to be rare). 16 | err: Box, 17 | } 18 | 19 | #[derive(Debug)] 20 | struct BsonErrorImpl { 21 | offset: Option, 22 | kind: ErrorKind, 23 | } 24 | 25 | #[derive(Debug)] 26 | pub enum ErrorKind { 27 | Custom(String), 28 | UnknownElementType(i8), 29 | UnterminatedCString, 30 | InvalidCString(Utf8Error), 31 | UnexpectedEoF, 32 | InvalidEndOfDocument, 33 | InvalidSize, 34 | InvalidStateExpectedType, 35 | InvalidStateExpectedName, 36 | InvalidStateExpectedValue, 37 | ExpectedEnum { actual: ElementType }, 38 | ExpectedString, 39 | UnexpectedEndOfDocumentForEnumVariant, 40 | } 41 | 42 | impl BsonError { 43 | pub fn new(offset: Option, kind: ErrorKind) -> Self { 44 | Self { 45 | err: Box::new(BsonErrorImpl { offset, kind }), 46 | } 47 | } 48 | } 49 | 50 | impl core::error::Error for BsonError {} 51 | 52 | impl Display for BsonError { 53 | fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { 54 | self.err.fmt(f) 55 | } 56 | } 57 | 58 | impl Display for BsonErrorImpl { 59 | fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { 60 | if let Some(offset) = self.offset { 61 | write!(f, "bson error, at {offset}: {}", self.kind) 62 | } else { 63 | write!(f, "bson error at unknown offset: {}", self.kind) 64 | } 65 | } 66 | } 67 | 68 | impl Display for ErrorKind { 69 | fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { 70 | match self { 71 | ErrorKind::Custom(msg) => write!(f, "custom {msg}"), 72 | ErrorKind::UnknownElementType(code) => write!(f, "unknown element code: {code}"), 73 | ErrorKind::UnterminatedCString => write!(f, "unterminated cstring"), 74 | ErrorKind::InvalidCString(e) => write!(f, "cstring with non-utf8 content: {e}"), 75 | ErrorKind::UnexpectedEoF => write!(f, "unexpected end of file"), 76 | ErrorKind::InvalidEndOfDocument => write!(f, "unexpected end of document"), 77 | ErrorKind::InvalidSize => write!(f, "invalid document size"), 78 | ErrorKind::InvalidStateExpectedType => write!(f, "internal state error, expected type"), 79 | ErrorKind::InvalidStateExpectedName => write!(f, "internal state error, expected name"), 80 | ErrorKind::InvalidStateExpectedValue => { 81 | write!(f, "internal state error, expected value") 82 | } 83 | ErrorKind::ExpectedEnum { actual } => write!(f, "expected enum, got {}", *actual as u8), 84 | ErrorKind::ExpectedString => write!(f, "expected a string value"), 85 | ErrorKind::UnexpectedEndOfDocumentForEnumVariant => { 86 | write!(f, "unexpected end of document for enum variant") 87 | } 88 | } 89 | } 90 | } 91 | 92 | impl de::Error for BsonError { 93 | fn custom(msg: T) -> Self 94 | where 95 | T: Display, 96 | { 97 | BsonError::new(None, ErrorKind::Custom(msg.to_string())) 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /dart/test/utils/schema.dart: -------------------------------------------------------------------------------- 1 | import 'package:sqlite3/common.dart'; 2 | 3 | /// Utilities for getting the SQLite schema 4 | 5 | /// Get tables, indexes, views and triggers, as one big string 6 | String getSchema(CommonDatabase db) { 7 | final rows = db.select(""" 8 | SELECT type, name, sql FROM sqlite_master ORDER BY 9 | CASE 10 | WHEN type = 'table' AND name LIKE 'ps_data_%' THEN 3 11 | WHEN type = 'table' THEN 1 12 | WHEN type = 'index' THEN 2 13 | WHEN type = 'view' THEN 4 14 | WHEN type = 'trigger' THEN 5 15 | END ASC, name ASC"""); 16 | 17 | List result = []; 18 | for (var row in rows) { 19 | if (row['name'].startsWith('__') || row['name'] == 'sqlite_sequence') { 20 | // Internal SQLite tables. 21 | continue; 22 | } 23 | if (row['sql'] != null) { 24 | var sql = (row['sql'] as String).trim(); 25 | // We put a semicolon before each statement instead of after, 26 | // so that comments at the end of the statement are preserved. 27 | result.add(';$sql'); 28 | } 29 | } 30 | return result.join('\n'); 31 | } 32 | 33 | /// Get data from the ps_migration table 34 | String getMigrations(CommonDatabase db) { 35 | List result = []; 36 | var migrationRows = 37 | db.select('SELECT id, down_migrations FROM ps_migration ORDER BY id ASC'); 38 | 39 | for (var row in migrationRows) { 40 | var version = row['id']!; 41 | var downMigrations = row['down_migrations']; 42 | if (downMigrations == null) { 43 | result.add( 44 | ';INSERT INTO ps_migration(id, down_migrations) VALUES($version, null)'); 45 | } else { 46 | result.add( 47 | ';INSERT INTO ps_migration(id, down_migrations) VALUES($version, ${escapeSqlString(downMigrations)})'); 48 | } 49 | } 50 | return result.join('\n'); 51 | } 52 | 53 | /// Get data from specific tables, as INSERT INTO statements. 54 | String getData(CommonDatabase db) { 55 | const queries = [ 56 | {'table': 'ps_buckets', 'query': 'select * from ps_buckets order by name'}, 57 | { 58 | 'table': 'ps_oplog', 59 | 'query': 'select * from ps_oplog order by bucket, op_id' 60 | }, 61 | { 62 | 'table': 'ps_updated_rows', 63 | 'query': 'select * from ps_updated_rows order by row_type, row_id' 64 | }, 65 | { 66 | 'table': 'ps_data__lists', 67 | 'query': 'select * from ps_data__lists order by id' 68 | }, 69 | { 70 | 'table': 'ps_data__todos', 71 | 'query': 'select * from ps_data__todos order by id' 72 | } 73 | ]; 74 | List result = []; 75 | for (var q in queries) { 76 | try { 77 | final rs = db.select(q['query']!); 78 | if (rs.isEmpty) { 79 | continue; 80 | } 81 | 82 | result.add( 83 | ';INSERT INTO ${q['table']}(${rs.columnNames.join(', ')}) VALUES'); 84 | var values = rs.rows 85 | .map((row) => 86 | '(${row.map((column) => escapeSqlLiteral(column)).join(', ')})') 87 | .join(',\n '); 88 | result.add(' $values'); 89 | } catch (e) { 90 | if (e.toString().contains('no such table')) { 91 | // Table doesn't exist - ignore 92 | } else { 93 | rethrow; 94 | } 95 | } 96 | } 97 | return result.join('\n'); 98 | } 99 | 100 | /// Escape an integer, string or null value as a literal for a query. 101 | String escapeSqlLiteral(dynamic value) { 102 | if (value == null) { 103 | return 'null'; 104 | } else if (value is String) { 105 | return escapeSqlString(value); 106 | } else if (value is int) { 107 | return '$value'; 108 | } else { 109 | throw ArgumentError('Unsupported value type: $value'); 110 | } 111 | } 112 | 113 | /// Quote a string for usage in a SQLite query. 114 | /// 115 | /// Not safe for general usage, but should be sufficient for these tests. 116 | String escapeSqlString(String text) { 117 | return """'${text.replaceAll(RegExp(r"'"), "''")}'"""; 118 | } 119 | -------------------------------------------------------------------------------- /dart/test/utils/native_test_utils.dart: -------------------------------------------------------------------------------- 1 | import 'dart:ffi'; 2 | import 'dart:io'; 3 | 4 | import 'package:fake_async/fake_async.dart'; 5 | import 'package:meta/meta.dart'; 6 | import 'package:sqlite3/common.dart'; 7 | import 'package:sqlite3/open.dart' as sqlite_open; 8 | import 'package:sqlite3/sqlite3.dart'; 9 | import 'package:path/path.dart' as p; 10 | import 'package:test/test.dart'; 11 | 12 | const defaultSqlitePath = 'libsqlite3.so.0'; 13 | 14 | const cargoDebugPath = '../target/debug'; 15 | var didLoadExtension = false; 16 | 17 | void applyOpenOverride() { 18 | if (Platform.environment['CORE_TEST_SQLITE'] case final sqlite?) { 19 | sqlite_open.open 20 | .overrideForAll(() => DynamicLibrary.open(p.absolute(sqlite))); 21 | } 22 | 23 | sqlite_open.open.overrideFor(sqlite_open.OperatingSystem.linux, () { 24 | return DynamicLibrary.open('libsqlite3.so.0'); 25 | }); 26 | sqlite_open.open.overrideFor(sqlite_open.OperatingSystem.macOS, () { 27 | // Prefer using Homebrew's SQLite which allows loading extensions. 28 | const fromHomebrew = '/opt/homebrew/opt/sqlite/lib/libsqlite3.dylib'; 29 | if (File(fromHomebrew).existsSync()) { 30 | return DynamicLibrary.open(fromHomebrew); 31 | } 32 | 33 | return DynamicLibrary.open('libsqlite3.dylib'); 34 | }); 35 | } 36 | 37 | CommonDatabase openTestDatabase( 38 | {VirtualFileSystem? vfs, String fileName = ':memory:'}) { 39 | applyOpenOverride(); 40 | if (!didLoadExtension) { 41 | loadExtension(); 42 | } 43 | 44 | return sqlite3.open(fileName, vfs: vfs?.name); 45 | } 46 | 47 | void loadExtension() { 48 | applyOpenOverride(); 49 | 50 | // Using an absolute path is required for macOS, where Dart can't dlopen 51 | // relative paths due to being a "hardened program". 52 | var lib = 53 | DynamicLibrary.open(p.normalize(p.absolute(resolvePowerSyncLibrary()))); 54 | var extension = SqliteExtension.inLibrary(lib, 'sqlite3_powersync_init'); 55 | sqlite3.ensureExtensionLoaded(extension); 56 | didLoadExtension = true; 57 | } 58 | 59 | String resolvePowerSyncLibrary() { 60 | if (Directory('assets').existsSync()) { 61 | // For the CI tests, we download prebuilt artifacts from an earlier step 62 | // into assets. Use that. 63 | const prefix = 'assets'; 64 | 65 | return p.join( 66 | prefix, 67 | switch (Abi.current()) { 68 | Abi.macosX64 => 'libpowersync_x64.macos.dylib', 69 | Abi.macosArm64 => 'libpowersync_aarch64.macos.dylib', 70 | Abi.windowsX64 => 'powersync_x64.dll', 71 | Abi.windowsArm64 => 'powersync_aarch64.dll', 72 | Abi.linuxX64 => 'libpowersync_x64.linux.so', 73 | Abi.linuxArm => 'libpowersync_armv7.linux.so', 74 | Abi.linuxArm64 => 'libpowersync_aarch64.linux.so', 75 | Abi.linuxRiscv64 => 'libpowersync_riscv64gc.linux.so', 76 | _ => throw ArgumentError( 77 | 'Unsupported processor architecture "${Abi.current()}". ' 78 | 'Please open an issue on GitHub to request it.', 79 | ) 80 | }); 81 | } else { 82 | // Otherwise, use a local build from ../target/debug/. 83 | return _getLibraryForPlatform(); 84 | } 85 | } 86 | 87 | String _getLibraryForPlatform({String? path = cargoDebugPath}) { 88 | return switch (Abi.current()) { 89 | Abi.androidArm || 90 | Abi.androidArm64 || 91 | Abi.androidX64 => 92 | '$path/libpowersync.so', 93 | Abi.macosArm64 || Abi.macosX64 => '$path/libpowersync.dylib', 94 | Abi.linuxX64 || Abi.linuxArm64 => '$path/libpowersync.so', 95 | Abi.windowsX64 => '$path/powersync.dll', 96 | Abi.androidIA32 => throw ArgumentError( 97 | 'Unsupported processor architecture. X86 Android emulators are not ' 98 | 'supported. Please use an x86_64 emulator instead. All physical ' 99 | 'Android devices are supported including 32bit ARM.', 100 | ), 101 | _ => throw ArgumentError( 102 | 'Unsupported processor architecture "${Abi.current()}". ' 103 | 'Please open an issue on GitHub to request it.', 104 | ) 105 | }; 106 | } 107 | 108 | @isTest 109 | void syncTest(String description, void Function(FakeAsync controller) body) { 110 | return test(description, () { 111 | // Give each test the same starting time to make goldens easier to compare. 112 | fakeAsync(body, initialTime: DateTime.utc(2025, 3, 1, 10)); 113 | }); 114 | } 115 | -------------------------------------------------------------------------------- /dart/test/goldens/simple_iteration.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "operation": "start", 4 | "data": null, 5 | "output": [ 6 | { 7 | "UpdateSyncStatus": { 8 | "status": { 9 | "connected": false, 10 | "connecting": true, 11 | "priority_status": [], 12 | "downloading": null, 13 | "streams": [] 14 | } 15 | } 16 | }, 17 | { 18 | "EstablishSyncStream": { 19 | "request": { 20 | "buckets": [], 21 | "include_checksum": true, 22 | "raw_data": true, 23 | "binary_data": true, 24 | "client_id": "test-test-test-test", 25 | "parameters": null, 26 | "streams": { 27 | "include_defaults": true, 28 | "subscriptions": [] 29 | } 30 | } 31 | } 32 | } 33 | ] 34 | }, 35 | { 36 | "operation": "line_text", 37 | "data": { 38 | "checkpoint": { 39 | "last_op_id": "1", 40 | "write_checkpoint": null, 41 | "buckets": [ 42 | { 43 | "bucket": "a", 44 | "checksum": 0, 45 | "priority": 3, 46 | "count": 1 47 | } 48 | ] 49 | } 50 | }, 51 | "output": [ 52 | { 53 | "UpdateSyncStatus": { 54 | "status": { 55 | "connected": true, 56 | "connecting": false, 57 | "priority_status": [], 58 | "downloading": { 59 | "buckets": { 60 | "prio_3": { 61 | "priority": 3, 62 | "at_last": 0, 63 | "since_last": 0, 64 | "target_count": 1 65 | } 66 | } 67 | }, 68 | "streams": [] 69 | } 70 | } 71 | } 72 | ] 73 | }, 74 | { 75 | "operation": "line_text", 76 | "data": { 77 | "token_expires_in": 60 78 | }, 79 | "output": [] 80 | }, 81 | { 82 | "operation": "line_text", 83 | "data": { 84 | "data": { 85 | "bucket": "a", 86 | "has_more": false, 87 | "after": null, 88 | "next_after": null, 89 | "data": [ 90 | { 91 | "op_id": "1", 92 | "op": "PUT", 93 | "object_type": "items", 94 | "object_id": "1", 95 | "checksum": 0, 96 | "data": "{\"col\":\"hi\"}" 97 | } 98 | ] 99 | } 100 | }, 101 | "output": [ 102 | { 103 | "UpdateSyncStatus": { 104 | "status": { 105 | "connected": true, 106 | "connecting": false, 107 | "priority_status": [], 108 | "downloading": { 109 | "buckets": { 110 | "prio_3": { 111 | "priority": 3, 112 | "at_last": 0, 113 | "since_last": 1, 114 | "target_count": 1 115 | } 116 | } 117 | }, 118 | "streams": [] 119 | } 120 | } 121 | } 122 | ] 123 | }, 124 | { 125 | "operation": "line_text", 126 | "data": { 127 | "checkpoint_complete": { 128 | "last_op_id": "1" 129 | } 130 | }, 131 | "output": [ 132 | { 133 | "LogLine": { 134 | "severity": "DEBUG", 135 | "line": "Validated and applied checkpoint" 136 | } 137 | }, 138 | { 139 | "FlushFileSystem": {} 140 | }, 141 | { 142 | "DidCompleteSync": {} 143 | }, 144 | { 145 | "UpdateSyncStatus": { 146 | "status": { 147 | "connected": true, 148 | "connecting": false, 149 | "priority_status": [ 150 | { 151 | "priority": 2147483647, 152 | "last_synced_at": 1740823200, 153 | "has_synced": true 154 | } 155 | ], 156 | "downloading": null, 157 | "streams": [] 158 | } 159 | } 160 | } 161 | ] 162 | }, 163 | { 164 | "operation": "line_text", 165 | "data": { 166 | "token_expires_in": 10 167 | }, 168 | "output": [ 169 | { 170 | "FetchCredentials": { 171 | "did_expire": false 172 | } 173 | } 174 | ] 175 | } 176 | ] -------------------------------------------------------------------------------- /crates/core/src/diff.rs: -------------------------------------------------------------------------------- 1 | extern crate alloc; 2 | 3 | use alloc::string::{String, ToString}; 4 | use core::ffi::c_int; 5 | 6 | use powersync_sqlite_nostd as sqlite; 7 | use powersync_sqlite_nostd::{Connection, Context, Value}; 8 | use sqlite::ResultCode; 9 | 10 | use crate::constants::SUBTYPE_JSON; 11 | use crate::create_sqlite_text_fn; 12 | use crate::error::PowerSyncError; 13 | use powersync_sqlite_nostd::bindings::SQLITE_RESULT_SUBTYPE; 14 | use serde_json as json; 15 | 16 | fn powersync_diff_impl( 17 | ctx: *mut sqlite::context, 18 | args: &[*mut sqlite::value], 19 | ) -> Result { 20 | let data_old = args[0].text(); 21 | let data_new = args[1].text(); 22 | 23 | ctx.result_subtype(SUBTYPE_JSON); 24 | diff_objects(data_old, data_new) 25 | } 26 | 27 | pub fn diff_objects(data_old: &str, data_new: &str) -> Result { 28 | let v_new: json::Value = json::from_str(data_new).map_err(PowerSyncError::as_argument_error)?; 29 | let v_old: json::Value = json::from_str(data_old).map_err(PowerSyncError::as_argument_error)?; 30 | 31 | if let (json::Value::Object(mut left), json::Value::Object(mut right)) = (v_new, v_old) { 32 | // Remove all null values 33 | right.retain(|_, v| !v.is_null()); 34 | left.retain(|_, v| !v.is_null()); 35 | 36 | if right.len() == 0 { 37 | // Simple case 38 | return Ok(json::Value::Object(left).to_string()); 39 | } 40 | 41 | // Add missing nulls to left 42 | for key in right.keys() { 43 | if !left.contains_key(key) { 44 | left.insert(key.clone(), json::Value::Null); 45 | } 46 | } 47 | 48 | left.retain(|key, value| { 49 | let r = right.get(key); 50 | if let Some(r) = r { 51 | // Check if value is different 52 | value != r 53 | } else { 54 | // Value not present in right 55 | true 56 | } 57 | }); 58 | 59 | Ok(json::Value::Object(left).to_string()) 60 | } else { 61 | return Err(PowerSyncError::argument_error("expected two JSON objects")); 62 | } 63 | } 64 | 65 | create_sqlite_text_fn!(powersync_diff, powersync_diff_impl, "powersync_diff"); 66 | 67 | pub fn register(db: *mut sqlite::sqlite3) -> Result<(), ResultCode> { 68 | db.create_function_v2( 69 | "powersync_diff", 70 | 2, 71 | sqlite::UTF8 | sqlite::DETERMINISTIC | SQLITE_RESULT_SUBTYPE, 72 | None, 73 | Some(powersync_diff), 74 | None, 75 | None, 76 | None, 77 | )?; 78 | 79 | Ok(()) 80 | } 81 | 82 | #[cfg(test)] 83 | mod tests { 84 | use super::*; 85 | 86 | #[test] 87 | fn basic_diff_test() { 88 | assert_eq!(diff_objects("{}", "{}").unwrap(), "{}"); 89 | assert_eq!(diff_objects(r#"{"a": null}"#, "{}").unwrap(), "{}"); 90 | assert_eq!(diff_objects(r#"{}"#, r#"{"a": null}"#).unwrap(), "{}"); 91 | assert_eq!( 92 | diff_objects(r#"{"b": 1}"#, r#"{"a": null, "b": 1}"#).unwrap(), 93 | "{}" 94 | ); 95 | assert_eq!( 96 | diff_objects(r#"{"b": 1}"#, r#"{"a": null, "b": 2}"#).unwrap(), 97 | r#"{"b":2}"# 98 | ); 99 | assert_eq!( 100 | diff_objects(r#"{"a": 0, "b": 1}"#, r#"{"a": null, "b": 2}"#).unwrap(), 101 | r#"{"a":null,"b":2}"# 102 | ); 103 | assert_eq!( 104 | diff_objects(r#"{"a": 1}"#, r#"{"a": null}"#).unwrap(), 105 | r#"{"a":null}"# 106 | ); 107 | assert_eq!( 108 | diff_objects(r#"{"a": 1}"#, r#"{}"#).unwrap(), 109 | r#"{"a":null}"# 110 | ); 111 | assert_eq!( 112 | diff_objects(r#"{"a": 1}"#, r#"{"a": 2}"#).unwrap(), 113 | r#"{"a":2}"# 114 | ); 115 | assert_eq!( 116 | diff_objects(r#"{"a": 1}"#, r#"{"a": "1"}"#).unwrap(), 117 | r#"{"a":"1"}"# 118 | ); 119 | assert_eq!( 120 | diff_objects(r#"{"a": 1}"#, r#"{"a": 1.0}"#).unwrap(), 121 | r#"{"a":1.0}"# 122 | ); 123 | assert_eq!( 124 | diff_objects(r#"{"a": 1.00}"#, r#"{"a": 1.0}"#).unwrap(), 125 | r#"{}"# 126 | ); 127 | assert_eq!( 128 | diff_objects(r#"{}"#, r#"{"a": 1.0}"#).unwrap(), 129 | r#"{"a":1.0}"# 130 | ); 131 | assert_eq!( 132 | diff_objects(r#"{}"#, r#"{"a": [1,2,3]}"#).unwrap(), 133 | r#"{"a":[1,2,3]}"# 134 | ); 135 | assert_eq!( 136 | diff_objects(r#"{"a": 1}"#, r#"{"a": [1,2,3]}"#).unwrap(), 137 | r#"{"a":[1,2,3]}"# 138 | ); 139 | } 140 | } 141 | -------------------------------------------------------------------------------- /docs/sync.md: -------------------------------------------------------------------------------- 1 | ## Sync interface 2 | 3 | The core extension implements the state machine and necessary SQL handling to decode and apply 4 | sync line sent from a PowerSync service instance. 5 | 6 | After registering the PowerSync extension, this client is available through the `powersync_control` 7 | function, which takes two arguments: A command (text), and a payload (text, blob, or null). 8 | The function should always be called in a transaction. 9 | 10 | The following commands are supported: 11 | 12 | 1. `start`: Payload is a JSON-encoded object. This requests the client to start a sync iteration. 13 | The payload can either be `null` or an JSON object with: 14 | - An optional `parameters: Record` entry, specifying parameters to include in the request 15 | to the sync service. 16 | - A `schema: { tables: Table[], raw_tables: RawTable[] }` entry specifying the schema of the database to 17 | use. Regular tables are also inferred from the database itself, but raw tables need to be specified. 18 | If no raw tables are used, the `schema` entry can be omitted. 19 | - `active_streams`: An array of `{name: string, params: Record}` entries representing streams that 20 | have an active subscription object in the application at the time the stream was opened. 21 | 2. `stop`: No payload, requests the current sync iteration (if any) to be shut down. 22 | 3. `line_text`: Payload is a serialized JSON object received from the sync service. 23 | 4. `line_binary`: Payload is a BSON-encoded object received from the sync service. 24 | 5. `refreshed_token`: Notify the sync client that the JWT used to authenticate to the PowerSync service has 25 | changed. 26 | - The client will emit an instruction to stop the current stream, clients should restart by sending another `start` 27 | command. 28 | 6. `completed_upload`: Notify the sync implementation that all local changes have been uploaded. 29 | 7. `update_subscriptions`: Notify the sync implementation that subscriptions which are currently active in the app 30 | have changed. Depending on the TTL of caches, this may cause it to request a reconnect. 31 | 8. `connection`: Notify the sync implementation about the connection being opened (second parameter should be `established`) 32 | or the HTTP stream closing (second parameter should be `end`). 33 | This is used to set `connected` to true in the sync status without waiting for the first sync line. 34 | 9. `subscriptions`: Store a new sync steam subscription in the database or remove it. 35 | This command can run outside of a sync iteration and does not affect it. 36 | 10. `update_subscriptions`: Second parameter is a JSON-encoded array of `{name: string, params: Record}`. 37 | If a new subscription is created, or when a subscription without a TTL has been removed, the client will ask to 38 | restart the connection. 39 | 40 | `powersync_control` returns a JSON-encoded array of instructions for the client: 41 | 42 | ```typescript 43 | type Instruction = { LogLine: LogLine } 44 | | { UpdateSyncStatus: UpdateSyncStatus } 45 | | { EstablishSyncStream: EstablishSyncStream } 46 | | { FetchCredentials: FetchCredentials } 47 | // Close a connection previously started after EstablishSyncStream 48 | | { CloseSyncStream: { hide_disconnect: boolean } } 49 | // For the Dart web client, flush the (otherwise non-durable) file system. 50 | | { FlushFileSystem: {} } 51 | // Notify clients that a checkpoint was completed. Clients can clear the 52 | // download error state in response to this. 53 | | { DidCompleteSync: {} } 54 | 55 | interface LogLine { 56 | severity: 'DEBUG' | 'INFO' | 'WARNING', 57 | line: String, 58 | } 59 | 60 | // Instructs client SDKs to open a connection to the sync service. 61 | interface EstablishSyncStream { 62 | request: any // The JSON-encoded StreamingSyncRequest to send to the sync service 63 | } 64 | 65 | // Instructs SDKS to update the downloading state of their SyncStatus. 66 | interface UpdateSyncStatus { 67 | connected: boolean, 68 | connecting: boolean, 69 | priority_status: [], 70 | downloading: null | DownloadProgress, 71 | } 72 | 73 | // Instructs SDKs to refresh credentials from the backend connector. 74 | // They don't necessary have to close the connection, a CloseSyncStream instruction 75 | // will be sent when the token has already expired. 76 | interface FetchCredentials { 77 | // Set as an option in case fetching and prefetching should be handled differently. 78 | did_expire: boolean 79 | } 80 | 81 | interface SyncPriorityStatus { 82 | priority: int, 83 | last_synced_at: null | int, 84 | has_synced: null | boolean, 85 | } 86 | 87 | interface DownloadProgress { 88 | buckets: Record 89 | } 90 | 91 | interface BucketProgress { 92 | priority: int, 93 | at_last: int, 94 | since_last: int, 95 | target_count: int 96 | } 97 | ``` 98 | -------------------------------------------------------------------------------- /crates/core/src/sync/subscriptions.rs: -------------------------------------------------------------------------------- 1 | use core::time::Duration; 2 | 3 | use alloc::{boxed::Box, string::String}; 4 | use powersync_sqlite_nostd::{self as sqlite, Connection}; 5 | use serde::Deserialize; 6 | use serde_with::{DurationSeconds, serde_as}; 7 | 8 | use crate::{ 9 | error::{PSResult, PowerSyncError}, 10 | ext::SafeManagedStmt, 11 | sync::BucketPriority, 12 | util::JsonString, 13 | }; 14 | 15 | /// A row in the `ps_stream_subscriptions` table. 16 | pub struct LocallyTrackedSubscription { 17 | pub id: i64, 18 | pub stream_name: String, 19 | pub active: bool, 20 | pub is_default: bool, 21 | pub local_priority: Option, 22 | pub local_params: Option>, 23 | pub ttl: Option, 24 | pub expires_at: Option, 25 | pub last_synced_at: Option, 26 | } 27 | 28 | impl LocallyTrackedSubscription { 29 | /// The default TTL of non-default subscriptions if none is set: One day. 30 | pub const DEFAULT_TTL: i64 = 60 * 60 * 24; 31 | 32 | pub fn has_subscribed_manually(&self) -> bool { 33 | self.ttl.is_some() 34 | } 35 | } 36 | 37 | /// A request sent from a PowerSync SDK to alter the subscriptions managed by this client. 38 | #[derive(Deserialize)] 39 | pub enum SubscriptionChangeRequest { 40 | #[serde(rename = "subscribe")] 41 | Subscribe(SubscribeToStream), 42 | 43 | /// Explicitly unsubscribes from a stream. This corresponds to the `unsubscribeAll()` API in the 44 | /// SDKs. 45 | /// 46 | /// Unsubscribing a single stream subscription happens internally in the SDK by reducing its 47 | /// refcount. Once no references are remaining, it's no longer listed in 48 | /// [StartSyncStream.active_streams] which will cause it to get unsubscribed after its TTL. 49 | #[serde(rename = "unsubscribe")] 50 | Unsubscribe(StreamKey), 51 | } 52 | 53 | /// A key uniquely identifying a stream. 54 | #[derive(Deserialize)] 55 | pub struct StreamKey { 56 | pub name: String, 57 | #[serde(default)] 58 | pub params: Option>, 59 | } 60 | 61 | impl StreamKey { 62 | pub fn serialized_params(&self) -> &str { 63 | match &self.params { 64 | Some(params) => params.get(), 65 | None => "null", 66 | } 67 | } 68 | } 69 | 70 | #[serde_as] 71 | #[derive(Deserialize)] 72 | pub struct SubscribeToStream { 73 | pub stream: StreamKey, 74 | #[serde_as(as = "Option")] 75 | #[serde(default)] 76 | pub ttl: Option, 77 | #[serde(default)] 78 | pub priority: Option, 79 | } 80 | 81 | pub fn apply_subscriptions( 82 | db: *mut sqlite::sqlite3, 83 | subscription: SubscriptionChangeRequest, 84 | ) -> Result<(), PowerSyncError> { 85 | match subscription { 86 | SubscriptionChangeRequest::Subscribe(subscription) => { 87 | let stmt = db 88 | .prepare_v2( 89 | " 90 | INSERT INTO ps_stream_subscriptions (stream_name, local_priority, local_params, ttl, expires_at) 91 | VALUES (?, ?2, ?, ?4, unixepoch() + ?4) 92 | ON CONFLICT DO UPDATE SET 93 | local_priority = min(coalesce(?2, local_priority), 94 | local_priority), 95 | ttl = ?4, 96 | expires_at = unixepoch() + ?4 97 | ", 98 | ) 99 | .into_db_result(db)?; 100 | 101 | stmt.bind_text(1, &subscription.stream.name, sqlite::Destructor::STATIC)?; 102 | match &subscription.priority { 103 | Some(priority) => stmt.bind_int(2, priority.number), 104 | None => stmt.bind_null(2), 105 | }?; 106 | stmt.bind_text( 107 | 3, 108 | subscription.stream.serialized_params(), 109 | sqlite::Destructor::STATIC, 110 | )?; 111 | stmt.bind_int64( 112 | 4, 113 | subscription 114 | .ttl 115 | .map(|f| f.as_secs() as i64) 116 | .unwrap_or(LocallyTrackedSubscription::DEFAULT_TTL) as i64, 117 | )?; 118 | stmt.exec()?; 119 | } 120 | SubscriptionChangeRequest::Unsubscribe(subscription) => { 121 | let stmt = db 122 | .prepare_v2("UPDATE ps_stream_subscriptions SET ttl = NULL WHERE stream_name = ? AND local_params = ?") 123 | .into_db_result(db)?; 124 | stmt.bind_text(1, &subscription.name, sqlite::Destructor::STATIC)?; 125 | stmt.bind_text( 126 | 2, 127 | subscription.serialized_params(), 128 | sqlite::Destructor::STATIC, 129 | )?; 130 | stmt.exec()?; 131 | } 132 | } 133 | 134 | Ok(()) 135 | } 136 | -------------------------------------------------------------------------------- /crates/core/src/operations_vtab.rs: -------------------------------------------------------------------------------- 1 | extern crate alloc; 2 | 3 | use alloc::boxed::Box; 4 | use alloc::rc::Rc; 5 | use core::ffi::{c_char, c_int, c_void}; 6 | 7 | use powersync_sqlite_nostd as sqlite; 8 | use sqlite::{Connection, ResultCode, Value}; 9 | 10 | use crate::operations::{ 11 | clear_remove_ops, delete_bucket, delete_pending_buckets, insert_operation, 12 | }; 13 | use crate::state::DatabaseState; 14 | use crate::sync_local::sync_local; 15 | use crate::vtab_util::*; 16 | 17 | #[repr(C)] 18 | struct VirtualTable { 19 | base: sqlite::vtab, 20 | db: *mut sqlite::sqlite3, 21 | state: Rc, 22 | 23 | target_applied: bool, 24 | target_validated: bool, 25 | } 26 | 27 | extern "C" fn connect( 28 | db: *mut sqlite::sqlite3, 29 | aux: *mut c_void, 30 | _argc: c_int, 31 | _argv: *const *const c_char, 32 | vtab: *mut *mut sqlite::vtab, 33 | _err: *mut *mut c_char, 34 | ) -> c_int { 35 | if let Err(rc) = 36 | sqlite::declare_vtab(db, "CREATE TABLE powersync_operations(op TEXT, data TEXT);") 37 | { 38 | return rc as c_int; 39 | } 40 | 41 | unsafe { 42 | let tab = Box::into_raw(Box::new(VirtualTable { 43 | base: sqlite::vtab { 44 | nRef: 0, 45 | pModule: core::ptr::null(), 46 | zErrMsg: core::ptr::null_mut(), 47 | }, 48 | db, 49 | state: DatabaseState::clone_from(aux), 50 | target_validated: false, 51 | target_applied: false, 52 | })); 53 | *vtab = tab.cast::(); 54 | let _ = sqlite::vtab_config(db, 0); 55 | } 56 | ResultCode::OK as c_int 57 | } 58 | 59 | extern "C" fn disconnect(vtab: *mut sqlite::vtab) -> c_int { 60 | unsafe { 61 | drop(Box::from_raw(vtab as *mut VirtualTable)); 62 | } 63 | ResultCode::OK as c_int 64 | } 65 | 66 | extern "C" fn update( 67 | vtab: *mut sqlite::vtab, 68 | argc: c_int, 69 | argv: *mut *mut sqlite::value, 70 | p_row_id: *mut sqlite::int64, 71 | ) -> c_int { 72 | let args = sqlite::args!(argc, argv); 73 | 74 | let rowid = args[0]; 75 | 76 | return if args.len() == 1 { 77 | // DELETE 78 | ResultCode::MISUSE as c_int 79 | } else if rowid.value_type() == sqlite::ColumnType::Null { 80 | // INSERT 81 | let op = args[2].text(); 82 | 83 | let tab = unsafe { &mut *vtab.cast::() }; 84 | let db = tab.db; 85 | 86 | if op == "save" { 87 | let result = insert_operation(db, args[3].text()); 88 | vtab_result(vtab, result) 89 | } else if op == "sync_local" { 90 | let result = sync_local(&tab.state, db, &args[3]); 91 | if let Ok(result_row) = result { 92 | unsafe { 93 | *p_row_id = result_row; 94 | } 95 | } 96 | vtab_result(vtab, result) 97 | } else if op == "clear_remove_ops" { 98 | let result = clear_remove_ops(db, args[3].text()); 99 | vtab_result(vtab, result) 100 | } else if op == "delete_pending_buckets" { 101 | let result = delete_pending_buckets(db, args[3].text()); 102 | vtab_result(vtab, result) 103 | } else if op == "delete_bucket" { 104 | let result = delete_bucket(db, args[3].text()); 105 | vtab_result(vtab, result) 106 | } else { 107 | ResultCode::MISUSE as c_int 108 | } 109 | } else { 110 | // UPDATE - not supported 111 | ResultCode::MISUSE as c_int 112 | } as c_int; 113 | } 114 | 115 | // Insert-only virtual table. 116 | // The primary functionality here is in update. 117 | // connect and disconnect configures the table and allocates the required resources. 118 | static MODULE: sqlite::module = sqlite::module { 119 | iVersion: 0, 120 | xCreate: None, 121 | xConnect: Some(connect), 122 | xBestIndex: Some(vtab_no_best_index), 123 | xDisconnect: Some(disconnect), 124 | xDestroy: None, 125 | xOpen: Some(vtab_no_open), 126 | xClose: Some(vtab_no_close), 127 | xFilter: Some(vtab_no_filter), 128 | xNext: Some(vtab_no_next), 129 | xEof: Some(vtab_no_eof), 130 | xColumn: Some(vtab_no_column), 131 | xRowid: Some(vtab_no_rowid), 132 | xUpdate: Some(update), 133 | xBegin: None, 134 | xSync: None, 135 | xCommit: None, 136 | xRollback: None, 137 | xFindFunction: None, 138 | xRename: None, 139 | xSavepoint: None, 140 | xRelease: None, 141 | xRollbackTo: None, 142 | xShadowName: None, 143 | xIntegrity: None, 144 | }; 145 | 146 | pub fn register(db: *mut sqlite::sqlite3, state: Rc) -> Result<(), ResultCode> { 147 | db.create_module_v2( 148 | "powersync_operations", 149 | &MODULE, 150 | Some(Rc::into_raw(state) as *mut c_void), 151 | Some(DatabaseState::destroy_rc), 152 | )?; 153 | 154 | Ok(()) 155 | } 156 | -------------------------------------------------------------------------------- /dart/tool/download_sqlite3.dart: -------------------------------------------------------------------------------- 1 | import 'dart:ffi'; 2 | import 'dart:io'; 3 | 4 | import 'package:archive/archive_io.dart'; 5 | import 'package:path/path.dart' as p; 6 | import 'package:http/http.dart'; 7 | 8 | typedef SqliteVersion = ({String version, String year}); 9 | 10 | const SqliteVersion latest = (version: '3500200', year: '2025'); 11 | const SqliteVersion minimum = (version: '3440000', year: '2023'); 12 | 13 | Future main(List args) async { 14 | if (args.contains('version')) { 15 | print(latest.version); 16 | exit(0); 17 | } 18 | 19 | await _downloadAndCompile('latest', latest, force: args.contains('--force')); 20 | await _downloadAndCompile('minimum', minimum, 21 | force: args.contains('--force')); 22 | } 23 | 24 | extension on SqliteVersion { 25 | String get autoconfUrl => 26 | 'https://sqlite.org/$year/sqlite-autoconf-$version.tar.gz'; 27 | 28 | String get windowsArm64Url => 29 | 'https://sqlite.org/$year/sqlite-dll-win-arm64-$version.zip'; 30 | 31 | String get windowsX64Url => 32 | 'https://sqlite.org/$year/sqlite-dll-win-x64-$version.zip'; 33 | } 34 | 35 | Future _downloadAndCompile(String name, SqliteVersion version, 36 | {bool force = false}) async { 37 | final dartDirectory = p.dirname(p.dirname(Platform.script.toFilePath())); 38 | final target = p.join(dartDirectory, '.dart_tool', 'sqlite3', name); 39 | final versionFile = File(p.join(target, 'version')); 40 | 41 | final needsDownload = force || 42 | !versionFile.existsSync() || 43 | versionFile.readAsStringSync() != version.version; 44 | 45 | if (!needsDownload) { 46 | print( 47 | 'Not downloading sqlite3 $name as it has already been downloaded. Use ' 48 | '--force to re-compile it.', 49 | ); 50 | return; 51 | } 52 | 53 | print('Downloading and compiling sqlite3 $name (${version.version})'); 54 | final targetDirectory = Directory(target); 55 | 56 | if (!targetDirectory.existsSync()) { 57 | targetDirectory.createSync(recursive: true); 58 | } 59 | 60 | final temporaryDir = 61 | await Directory.systemTemp.createTemp('powersync-core-compile-sqlite3'); 62 | final temporaryDirPath = temporaryDir.path; 63 | 64 | // Compiling on Windows is ugly because we need users to have Visual Studio 65 | // installed and all those tools activated in the current shell. 66 | // Much easier to just download precompiled builds. 67 | if (Platform.isWindows) { 68 | final windowsUri = Abi.current() == Abi.windowsX64 69 | ? version.windowsX64Url 70 | : version.windowsArm64Url; 71 | final sqlite3Zip = p.join(temporaryDirPath, 'sqlite3.zip'); 72 | final client = Client(); 73 | final response = await client 74 | .send(Request('GET', Uri.parse(windowsUri))..followRedirects = true); 75 | if (response.statusCode != 200) { 76 | print( 77 | 'Could not download $windowsUri, status code ${response.statusCode}'); 78 | exit(1); 79 | } 80 | await response.stream.pipe(File(sqlite3Zip).openWrite()); 81 | 82 | final inputStream = InputFileStream(sqlite3Zip); 83 | final archive = ZipDecoder().decodeStream(inputStream); 84 | 85 | for (final file in archive.files) { 86 | if (file.isFile && file.name == 'sqlite3.dll') { 87 | final outputStream = OutputFileStream(p.join(target, 'sqlite3.dll')); 88 | 89 | file.writeContent(outputStream); 90 | outputStream.close(); 91 | } 92 | } 93 | 94 | await File(p.join(target, 'version')).writeAsString(version.version); 95 | exit(0); 96 | } 97 | 98 | await _run('curl -L ${version.autoconfUrl} --output sqlite.tar.gz', 99 | workingDirectory: temporaryDirPath); 100 | await _run('tar zxvf sqlite.tar.gz', workingDirectory: temporaryDirPath); 101 | 102 | final sqlitePath = 103 | p.join(temporaryDirPath, 'sqlite-autoconf-${version.version}'); 104 | 105 | await _run('./configure', workingDirectory: sqlitePath); 106 | await _run('make -j', workingDirectory: sqlitePath); 107 | 108 | await File(p.join(sqlitePath, 'sqlite3')).copy(p.join(target, 'sqlite3')); 109 | final libsPath = name == 'latest' ? sqlitePath : p.join(sqlitePath, '.libs'); 110 | 111 | if (Platform.isLinux) { 112 | await File(p.join(libsPath, 'libsqlite3.so')) 113 | .copy(p.join(target, 'libsqlite3.so')); 114 | } else if (Platform.isMacOS) { 115 | await File(p.join(libsPath, 'libsqlite3.dylib')) 116 | .copy(p.join(target, 'libsqlite3.dylib')); 117 | } 118 | 119 | await File(p.join(target, 'version')).writeAsString(version.version); 120 | } 121 | 122 | Future _run(String command, {String? workingDirectory}) async { 123 | print('Running $command'); 124 | 125 | final proc = await Process.start( 126 | 'sh', 127 | ['-c', command], 128 | mode: ProcessStartMode.inheritStdio, 129 | workingDirectory: workingDirectory, 130 | ); 131 | final exitCode = await proc.exitCode; 132 | 133 | if (exitCode != 0) { 134 | exit(exitCode); 135 | } 136 | } 137 | -------------------------------------------------------------------------------- /crates/core/src/schema/inspection.rs: -------------------------------------------------------------------------------- 1 | use alloc::borrow::ToOwned; 2 | use alloc::{format, vec}; 3 | use alloc::{string::String, vec::Vec}; 4 | use powersync_sqlite_nostd::Connection; 5 | use powersync_sqlite_nostd::{self as sqlite, ResultCode}; 6 | 7 | use crate::error::{PSResult, PowerSyncError}; 8 | use crate::util::quote_identifier; 9 | 10 | /// An existing PowerSync-managed view that was found in the schema. 11 | #[derive(PartialEq)] 12 | pub struct ExistingView { 13 | /// The name of the view itself. 14 | pub name: String, 15 | /// SQL contents of the `CREATE VIEW` statement. 16 | pub sql: String, 17 | /// SQL contents of all triggers implementing deletes by forwarding to 18 | /// `ps_data` and `ps_crud`. 19 | pub delete_trigger_sql: String, 20 | /// SQL contents of the trigger implementing inserts on this view. 21 | pub insert_trigger_sql: String, 22 | /// SQL contents of the trigger implementing updates on this view. 23 | pub update_trigger_sql: String, 24 | } 25 | 26 | impl ExistingView { 27 | pub fn list(db: *mut sqlite::sqlite3) -> Result, PowerSyncError> { 28 | let mut results = vec![]; 29 | let stmt = db.prepare_v2(" 30 | SELECT 31 | view.name, 32 | view.sql, 33 | ifnull(group_concat(trigger1.sql, ';\n' ORDER BY trigger1.name DESC), ''), 34 | ifnull(trigger2.sql, ''), 35 | ifnull(trigger3.sql, '') 36 | FROM sqlite_master view 37 | LEFT JOIN sqlite_master trigger1 38 | ON trigger1.tbl_name = view.name AND trigger1.type = 'trigger' AND trigger1.name GLOB 'ps_view_delete*' 39 | LEFT JOIN sqlite_master trigger2 40 | ON trigger2.tbl_name = view.name AND trigger2.type = 'trigger' AND trigger2.name GLOB 'ps_view_insert*' 41 | LEFT JOIN sqlite_master trigger3 42 | ON trigger3.tbl_name = view.name AND trigger3.type = 'trigger' AND trigger3.name GLOB 'ps_view_update*' 43 | WHERE view.type = 'view' AND view.sql GLOB '*-- powersync-auto-generated' 44 | GROUP BY view.name; 45 | ").into_db_result(db)?; 46 | 47 | while stmt.step()? == ResultCode::ROW { 48 | let name = stmt.column_text(0)?.to_owned(); 49 | let sql = stmt.column_text(1)?.to_owned(); 50 | let delete = stmt.column_text(2)?.to_owned(); 51 | let insert = stmt.column_text(3)?.to_owned(); 52 | let update = stmt.column_text(4)?.to_owned(); 53 | 54 | results.push(ExistingView { 55 | name, 56 | sql, 57 | delete_trigger_sql: delete, 58 | insert_trigger_sql: insert, 59 | update_trigger_sql: update, 60 | }); 61 | } 62 | 63 | Ok(results) 64 | } 65 | 66 | pub fn drop_by_name(db: *mut sqlite::sqlite3, name: &str) -> Result<(), PowerSyncError> { 67 | let q = format!("DROP VIEW IF EXISTS {:}", quote_identifier(name)); 68 | db.exec_safe(&q)?; 69 | Ok(()) 70 | } 71 | 72 | pub fn create(&self, db: *mut sqlite::sqlite3) -> Result<(), PowerSyncError> { 73 | Self::drop_by_name(db, &self.name)?; 74 | db.exec_safe(&self.sql).into_db_result(db)?; 75 | db.exec_safe(&self.delete_trigger_sql).into_db_result(db)?; 76 | db.exec_safe(&self.insert_trigger_sql).into_db_result(db)?; 77 | db.exec_safe(&self.update_trigger_sql).into_db_result(db)?; 78 | 79 | Ok(()) 80 | } 81 | } 82 | 83 | pub struct ExistingTable { 84 | pub name: String, 85 | pub internal_name: String, 86 | pub local_only: bool, 87 | } 88 | 89 | impl ExistingTable { 90 | pub fn list(db: *mut sqlite::sqlite3) -> Result, PowerSyncError> { 91 | let mut results = vec![]; 92 | let stmt = db 93 | .prepare_v2( 94 | " 95 | SELECT name FROM sqlite_master WHERE type = 'table' AND name GLOB 'ps_data_*'; 96 | ", 97 | ) 98 | .into_db_result(db)?; 99 | 100 | while stmt.step()? == ResultCode::ROW { 101 | let internal_name = stmt.column_text(0)?; 102 | let Some((name, local_only)) = Self::external_name(internal_name) else { 103 | continue; 104 | }; 105 | 106 | results.push(ExistingTable { 107 | internal_name: internal_name.to_owned(), 108 | name: name.to_owned(), 109 | local_only: local_only, 110 | }); 111 | } 112 | 113 | Ok(results) 114 | } 115 | 116 | /// Extracts the public name from a `ps_data__` or a `ps_data_local__` table. 117 | /// 118 | /// Also returns whether the name is from a local table. 119 | pub fn external_name(name: &str) -> Option<(&str, bool)> { 120 | const LOCAL_PREFIX: &str = "ps_data_local__"; 121 | const NORMAL_PREFIX: &str = "ps_data__"; 122 | 123 | if name.starts_with(LOCAL_PREFIX) { 124 | Some((&name[LOCAL_PREFIX.len()..], true)) 125 | } else if name.starts_with(NORMAL_PREFIX) { 126 | Some((&name[NORMAL_PREFIX.len()..], false)) 127 | } else { 128 | None 129 | } 130 | } 131 | } 132 | -------------------------------------------------------------------------------- /crates/core/src/state.rs: -------------------------------------------------------------------------------- 1 | use core::{ 2 | cell::{Cell, Ref, RefCell}, 3 | ffi::{c_int, c_void}, 4 | }; 5 | 6 | use alloc::{ 7 | collections::btree_set::BTreeSet, 8 | rc::Rc, 9 | string::{String, ToString}, 10 | }; 11 | use powersync_sqlite_nostd::{self as sqlite, Context}; 12 | use sqlite::{Connection, ResultCode}; 13 | 14 | use crate::schema::Schema; 15 | 16 | /// State that is shared for a SQLite database connection after the core extension has been 17 | /// registered on it. 18 | /// 19 | /// `init_extension` allocates an instance of this in an `Arc` that is shared as user-data for 20 | /// functions/vtabs that need access to it. 21 | #[derive(Default)] 22 | pub struct DatabaseState { 23 | pub is_in_sync_local: Cell, 24 | schema: RefCell>, 25 | pending_updates: RefCell>, 26 | commited_updates: RefCell>, 27 | } 28 | 29 | impl DatabaseState { 30 | pub fn new() -> Self { 31 | Self::default() 32 | } 33 | 34 | pub fn view_schema(&'_ self) -> Option> { 35 | let schema_ref = self.schema.borrow(); 36 | if schema_ref.is_none() { 37 | None 38 | } else { 39 | Some(Ref::map(schema_ref, |f| f.as_ref().unwrap())) 40 | } 41 | } 42 | 43 | /// Marks the given [Schema] as being the one currently installed to the database. 44 | pub fn set_schema(&self, schema: Schema) { 45 | self.schema.replace(Some(schema)); 46 | } 47 | 48 | pub fn sync_local_guard<'a>(&'a self) -> impl Drop + use<'a> { 49 | if self.is_in_sync_local.replace(true) { 50 | panic!("Should ont be syncing already"); 51 | } 52 | 53 | struct ClearOnDrop<'a>(&'a DatabaseState); 54 | 55 | impl Drop for ClearOnDrop<'_> { 56 | fn drop(&mut self) { 57 | self.0.is_in_sync_local.set(false); 58 | } 59 | } 60 | 61 | ClearOnDrop(self) 62 | } 63 | 64 | pub fn track_update(&self, tbl: &str) { 65 | let mut set = self.pending_updates.borrow_mut(); 66 | // TODO: Use set.get_or_insert_with(tbl, str::to_string) after btree_set_entry is stable, 67 | // https://github.com/rust-lang/rust/issues/133549 68 | if !set.contains(tbl) { 69 | // Check whether the set contains the entry first to avoid an unconditional allocation. 70 | set.insert(tbl.to_string()); 71 | } 72 | } 73 | 74 | pub fn track_rollback(&self) { 75 | self.pending_updates.borrow_mut().clear(); 76 | } 77 | 78 | pub fn track_commit(&self) { 79 | let mut commited = self.commited_updates.borrow_mut(); 80 | let mut pending = self.pending_updates.borrow_mut(); 81 | let pending = core::mem::replace(&mut *pending, Default::default()); 82 | 83 | for pending in pending.into_iter() { 84 | commited.insert(pending); 85 | } 86 | } 87 | 88 | pub fn take_updates(&self) -> BTreeSet { 89 | let mut committed = self.commited_updates.borrow_mut(); 90 | core::mem::replace(&mut *committed, Default::default()) 91 | } 92 | 93 | /// ## Safety 94 | /// 95 | /// This is only safe to call when an `Rc` has been installed as the `user_data` 96 | /// pointer when registering the function. 97 | pub unsafe fn from_context(context: &impl Context) -> &Self { 98 | let user_data = context.user_data().cast::(); 99 | unsafe { 100 | // Safety: user_data() points to valid DatabaseState reference alive as long as the 101 | // context. 102 | &*user_data 103 | } 104 | } 105 | 106 | /// ## Safety 107 | /// 108 | /// This is only save to call if `context` is the user-data pointer of a function or virtual 109 | /// table created with an `Rc Rc { 112 | let context = context as *mut DatabaseState; 113 | 114 | unsafe { 115 | // Safety: It's a valid pointer that has at least one reference (owned by SQLite while 116 | // the function is registered). 117 | Rc::increment_strong_count(context); 118 | // Safety: Moves the clone we've just created into Rust. 119 | Rc::from_raw(context) 120 | } 121 | } 122 | 123 | pub unsafe extern "C" fn destroy_rc(ptr: *mut c_void) { 124 | drop(unsafe { Rc::from_raw(ptr.cast::()) }); 125 | } 126 | } 127 | 128 | pub fn register(db: *mut sqlite::sqlite3, state: Rc) -> Result<(), ResultCode> { 129 | unsafe extern "C" fn func( 130 | ctx: *mut sqlite::context, 131 | _argc: c_int, 132 | _argv: *mut *mut sqlite::value, 133 | ) { 134 | let data = unsafe { DatabaseState::from_context(&ctx) }; 135 | 136 | ctx.result_int(if data.is_in_sync_local.get() { 1 } else { 0 }); 137 | } 138 | 139 | db.create_function_v2( 140 | "powersync_in_sync_operation", 141 | 0, 142 | 0, 143 | Some(Rc::into_raw(state) as *mut c_void), 144 | Some(func), 145 | None, 146 | None, 147 | Some(DatabaseState::destroy_rc), 148 | )?; 149 | Ok(()) 150 | } 151 | -------------------------------------------------------------------------------- /crates/core/src/util.rs: -------------------------------------------------------------------------------- 1 | extern crate alloc; 2 | 3 | use core::fmt::{Display, Write}; 4 | 5 | use alloc::format; 6 | use alloc::string::{String, ToString}; 7 | use core::{cmp::Ordering, hash::Hash}; 8 | 9 | use alloc::boxed::Box; 10 | use powersync_sqlite_nostd::{ColumnType, ManagedStmt}; 11 | use serde::Serialize; 12 | use serde_json::value::RawValue; 13 | 14 | use crate::error::PowerSyncError; 15 | #[cfg(not(feature = "getrandom"))] 16 | use crate::sqlite; 17 | 18 | use uuid::Uuid; 19 | 20 | #[cfg(not(feature = "getrandom"))] 21 | use uuid::Builder; 22 | 23 | pub fn quote_string(s: &str) -> String { 24 | return QuotedString(s).to_string(); 25 | } 26 | 27 | pub fn quote_json_path(s: &str) -> String { 28 | quote_string(&format!("$.{:}", s)) 29 | } 30 | 31 | pub fn quote_identifier(name: &str) -> String { 32 | format!("\"{:}\"", name.replace("\"", "\"\"")) 33 | } 34 | 35 | pub fn quote_internal_name(name: &str, local_only: bool) -> String { 36 | if local_only { 37 | quote_identifier_prefixed("ps_data_local__", name) 38 | } else { 39 | quote_identifier_prefixed("ps_data__", name) 40 | } 41 | } 42 | 43 | /// A string that [Display]s as a SQLite string literal. 44 | pub struct QuotedString<'a>(pub &'a str); 45 | 46 | impl<'a> Display for QuotedString<'a> { 47 | fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { 48 | const SINGLE_QUOTE: char = '\''; 49 | const ESCAPE_SEQUENCE: &'static str = "''"; 50 | 51 | f.write_char(SINGLE_QUOTE)?; 52 | 53 | for (i, group) in self.0.split(SINGLE_QUOTE).enumerate() { 54 | if i != 0 { 55 | f.write_str(ESCAPE_SEQUENCE)?; 56 | } 57 | 58 | f.write_str(group)?; 59 | } 60 | 61 | f.write_char(SINGLE_QUOTE) 62 | } 63 | } 64 | 65 | pub fn quote_identifier_prefixed(prefix: &str, name: &str) -> String { 66 | return format!("\"{:}{:}\"", prefix, name.replace("\"", "\"\"")); 67 | } 68 | 69 | /// Calls [read] to read a column if it's not null, otherwise returns [None]. 70 | #[inline] 71 | pub fn column_nullable Result>( 72 | stmt: &ManagedStmt, 73 | index: i32, 74 | read: R, 75 | ) -> Result, PowerSyncError> { 76 | if stmt.column_type(index)? == ColumnType::Null { 77 | Ok(None) 78 | } else { 79 | Ok(Some(read()?)) 80 | } 81 | } 82 | 83 | /// An opaque wrapper around a JSON-serialized value. 84 | /// 85 | /// This wraps [RawValue] from `serde_json`, adding implementations for comparisons and hashes. 86 | #[derive(Debug)] 87 | #[repr(transparent)] 88 | pub struct JsonString(pub RawValue); 89 | 90 | impl JsonString { 91 | pub fn from_string(string: String) -> Result, PowerSyncError> { 92 | let underlying = 93 | RawValue::from_string(string).map_err(PowerSyncError::as_argument_error)?; 94 | unsafe { 95 | // Safety: repr(transparent) 96 | core::mem::transmute(underlying) 97 | } 98 | } 99 | } 100 | 101 | impl Hash for JsonString { 102 | fn hash(&self, state: &mut H) { 103 | self.0.get().hash(state); 104 | } 105 | } 106 | 107 | impl PartialEq for JsonString { 108 | fn eq(&self, other: &Self) -> bool { 109 | self.0.get() == other.0.get() 110 | } 111 | } 112 | 113 | impl Eq for JsonString {} 114 | 115 | impl PartialOrd for JsonString { 116 | fn partial_cmp(&self, other: &Self) -> Option { 117 | Some(self.cmp(other)) 118 | } 119 | } 120 | 121 | impl Ord for JsonString { 122 | fn cmp(&self, other: &Self) -> Ordering { 123 | self.0.get().cmp(other.0.get()) 124 | } 125 | } 126 | 127 | impl Serialize for JsonString { 128 | fn serialize(&self, serializer: S) -> Result 129 | where 130 | S: serde::Serializer, 131 | { 132 | self.0.serialize(serializer) 133 | } 134 | } 135 | 136 | impl Clone for Box { 137 | fn clone(&self) -> Self { 138 | let raw_value_box: &Box = unsafe { 139 | // SAFETY: repr(transparent) 140 | core::mem::transmute(self) 141 | }; 142 | 143 | unsafe { core::mem::transmute(raw_value_box.clone()) } 144 | } 145 | } 146 | 147 | impl Display for JsonString { 148 | fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { 149 | self.0.fmt(f) 150 | } 151 | } 152 | 153 | // Use getrandom crate to generate UUID. 154 | // This is not available in all WASM builds - use the default in those cases. 155 | #[cfg(feature = "getrandom")] 156 | pub fn gen_uuid() -> Uuid { 157 | let id = Uuid::new_v4(); 158 | id 159 | } 160 | 161 | // Default - use sqlite3_randomness to generate UUID 162 | // This uses ChaCha20 PRNG, with /dev/urandom as a seed on unix. 163 | // On Windows, it uses custom logic for the seed, which may not be secure. 164 | // Rather avoid this version for most builds. 165 | #[cfg(not(feature = "getrandom"))] 166 | pub fn gen_uuid() -> Uuid { 167 | let mut random_bytes: [u8; 16] = [0; 16]; 168 | sqlite::randomness(&mut random_bytes); 169 | let id = Builder::from_random_bytes(random_bytes).into_uuid(); 170 | id 171 | } 172 | 173 | pub const MAX_OP_ID: &str = "9223372036854775807"; 174 | 175 | #[cfg(test)] 176 | mod tests { 177 | use super::*; 178 | 179 | #[test] 180 | fn quote_identifier_test() { 181 | assert_eq!(quote_identifier("test"), "\"test\""); 182 | assert_eq!(quote_identifier("\"quote\""), "\"\"\"quote\"\"\""); 183 | assert_eq!( 184 | quote_identifier("other characters."), 185 | "\"other characters.\"" 186 | ); 187 | } 188 | 189 | #[test] 190 | fn quote_string_test() { 191 | assert_eq!(quote_string("test"), "'test'"); 192 | assert_eq!(quote_string("\"quote\""), "'\"quote\"'"); 193 | assert_eq!(quote_string("'quote'"), "'''quote'''"); 194 | } 195 | } 196 | -------------------------------------------------------------------------------- /crates/core/src/update_hooks.rs: -------------------------------------------------------------------------------- 1 | use core::{ 2 | cell::Cell, 3 | ffi::{CStr, c_char, c_int, c_void}, 4 | ptr::null_mut, 5 | }; 6 | 7 | use alloc::{boxed::Box, rc::Rc}; 8 | use powersync_sqlite_nostd::{ 9 | self as sqlite, Connection, Context, ResultCode, Value, bindings::SQLITE_RESULT_SUBTYPE, 10 | }; 11 | 12 | use crate::{constants::SUBTYPE_JSON, error::PowerSyncError, state::DatabaseState}; 13 | 14 | /// The `powersync_update_hooks` methods works like this: 15 | /// 16 | /// 1. `powersync_update_hooks('install')` installs update hooks on the database, failing if 17 | /// another hook already exists. 18 | /// 2. `powersync_update_hooks('get')` returns a JSON array of table names that have been changed 19 | /// and comitted since the last `powersync_update_hooks` call. 20 | /// 21 | /// The update hooks don't have to be uninstalled manually, that happens when the connection is 22 | /// closed and the function is unregistered. 23 | pub fn register(db: *mut sqlite::sqlite3, state: Rc) -> Result<(), ResultCode> { 24 | let state = Box::new(HookState { 25 | has_registered_hooks: Cell::new(false), 26 | db, 27 | state, 28 | }); 29 | 30 | db.create_function_v2( 31 | "powersync_update_hooks", 32 | 1, 33 | sqlite::UTF8 | sqlite::DETERMINISTIC | SQLITE_RESULT_SUBTYPE, 34 | Some(Box::into_raw(state) as *mut c_void), 35 | Some(powersync_update_hooks), 36 | None, 37 | None, 38 | Some(destroy_function), 39 | )?; 40 | Ok(()) 41 | } 42 | 43 | struct HookState { 44 | has_registered_hooks: Cell, 45 | db: *mut sqlite::sqlite3, 46 | state: Rc, 47 | } 48 | 49 | extern "C" fn destroy_function(ctx: *mut c_void) { 50 | let state = unsafe { Box::from_raw(ctx as *mut HookState) }; 51 | 52 | if state.has_registered_hooks.get() { 53 | check_previous( 54 | "update", 55 | &state.state, 56 | state.db.update_hook(None, null_mut()), 57 | ); 58 | check_previous( 59 | "commit", 60 | &state.state, 61 | state.db.commit_hook(None, null_mut()), 62 | ); 63 | check_previous( 64 | "rollback", 65 | &state.state, 66 | state.db.rollback_hook(None, null_mut()), 67 | ); 68 | } 69 | } 70 | 71 | extern "C" fn powersync_update_hooks( 72 | ctx: *mut sqlite::context, 73 | argc: c_int, 74 | argv: *mut *mut sqlite::value, 75 | ) { 76 | let args = sqlite::args!(argc, argv); 77 | let op = args[0].text(); 78 | let db = ctx.db_handle(); 79 | let user_data = ctx.user_data() as *const HookState; 80 | 81 | match op { 82 | "install" => { 83 | let state = unsafe { user_data.as_ref().unwrap_unchecked() }; 84 | let db_state = &state.state; 85 | 86 | check_previous( 87 | "update", 88 | db_state, 89 | db.update_hook( 90 | Some(update_hook_impl), 91 | Rc::into_raw(db_state.clone()) as *mut c_void, 92 | ), 93 | ); 94 | check_previous( 95 | "commit", 96 | db_state, 97 | db.commit_hook( 98 | Some(commit_hook_impl), 99 | Rc::into_raw(db_state.clone()) as *mut c_void, 100 | ), 101 | ); 102 | check_previous( 103 | "rollback", 104 | db_state, 105 | db.rollback_hook( 106 | Some(rollback_hook_impl), 107 | Rc::into_raw(db_state.clone()) as *mut c_void, 108 | ), 109 | ); 110 | state.has_registered_hooks.set(true); 111 | } 112 | "get" => { 113 | let state = unsafe { user_data.as_ref().unwrap_unchecked() }; 114 | let formatted = serde_json::to_string(&state.state.take_updates()) 115 | .map_err(PowerSyncError::internal); 116 | match formatted { 117 | Ok(result) => { 118 | ctx.result_text_transient(&result); 119 | ctx.result_subtype(SUBTYPE_JSON); 120 | } 121 | Err(e) => e.apply_to_ctx("powersync_update_hooks", ctx), 122 | } 123 | } 124 | _ => { 125 | ctx.result_error("Unknown operation"); 126 | ctx.result_error_code(ResultCode::MISUSE); 127 | } 128 | }; 129 | } 130 | 131 | unsafe extern "C" fn update_hook_impl( 132 | ctx: *mut c_void, 133 | _kind: c_int, 134 | _db: *const c_char, 135 | table: *const c_char, 136 | _rowid: i64, 137 | ) { 138 | let state = unsafe { (ctx as *const DatabaseState).as_ref().unwrap_unchecked() }; 139 | let table = unsafe { CStr::from_ptr(table) }; 140 | let Ok(table) = table.to_str() else { 141 | return; 142 | }; 143 | 144 | state.track_update(table); 145 | } 146 | 147 | unsafe extern "C" fn commit_hook_impl(ctx: *mut c_void) -> c_int { 148 | let state = unsafe { (ctx as *const DatabaseState).as_ref().unwrap_unchecked() }; 149 | state.track_commit(); 150 | return 0; // Allow commit to continue normally 151 | } 152 | 153 | unsafe extern "C" fn rollback_hook_impl(ctx: *mut c_void) { 154 | let state = unsafe { (ctx as *const DatabaseState).as_ref().unwrap_unchecked() }; 155 | state.track_rollback(); 156 | } 157 | 158 | fn check_previous(desc: &'static str, expected: &Rc, previous: *const c_void) { 159 | let expected = Rc::as_ptr(expected); 160 | 161 | assert!( 162 | previous.is_null() || previous == expected.cast(), 163 | "Previous call to {desc} hook outside of PowerSync: Expected {expected:p}, installed was {previous:p}", 164 | ); 165 | if !previous.is_null() { 166 | // The hook callbacks own an Arc that needs to be dropped now. 167 | unsafe { 168 | Rc::decrement_strong_count(previous); 169 | } 170 | } 171 | } 172 | -------------------------------------------------------------------------------- /crates/core/src/sync/checksum.rs: -------------------------------------------------------------------------------- 1 | use core::{ 2 | fmt::Display, 3 | num::Wrapping, 4 | ops::{Add, AddAssign, Sub, SubAssign}, 5 | }; 6 | 7 | use num_traits::Zero; 8 | use num_traits::float::FloatCore; 9 | use serde::{Deserialize, Serialize, de::Visitor}; 10 | 11 | /// A checksum as received from the sync service. 12 | /// 13 | /// Conceptually, we use unsigned 32 bit integers to represent checksums, and adding checksums 14 | /// should be a wrapping add. 15 | #[repr(transparent)] 16 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize)] 17 | pub struct Checksum(Wrapping); 18 | 19 | impl Checksum { 20 | pub const fn value(self) -> u32 { 21 | self.0.0 22 | } 23 | 24 | pub const fn from_value(value: u32) -> Self { 25 | Self(Wrapping(value)) 26 | } 27 | 28 | pub const fn from_i32(value: i32) -> Self { 29 | Self::from_value(value as u32) 30 | } 31 | 32 | pub const fn bitcast_i32(self) -> i32 { 33 | self.value() as i32 34 | } 35 | } 36 | 37 | impl Zero for Checksum { 38 | fn zero() -> Self { 39 | const { Self::from_value(0) } 40 | } 41 | 42 | fn is_zero(&self) -> bool { 43 | self.value() == 0 44 | } 45 | } 46 | 47 | impl Add for Checksum { 48 | type Output = Self; 49 | 50 | #[inline] 51 | fn add(self, rhs: Self) -> Self::Output { 52 | Self(self.0 + rhs.0) 53 | } 54 | } 55 | 56 | impl AddAssign for Checksum { 57 | #[inline] 58 | fn add_assign(&mut self, rhs: Self) { 59 | self.0 += rhs.0 60 | } 61 | } 62 | 63 | impl Sub for Checksum { 64 | type Output = Self; 65 | 66 | fn sub(self, rhs: Self) -> Self::Output { 67 | Self(self.0 - rhs.0) 68 | } 69 | } 70 | 71 | impl SubAssign for Checksum { 72 | fn sub_assign(&mut self, rhs: Self) { 73 | self.0 -= rhs.0; 74 | } 75 | } 76 | 77 | impl From for Checksum { 78 | fn from(value: u32) -> Self { 79 | Self::from_value(value) 80 | } 81 | } 82 | 83 | impl Display for Checksum { 84 | fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { 85 | write!(f, "{:#010x}", self.value()) 86 | } 87 | } 88 | 89 | impl<'de> Deserialize<'de> for Checksum { 90 | fn deserialize(deserializer: D) -> Result 91 | where 92 | D: serde::Deserializer<'de>, 93 | { 94 | struct MyVisitor; 95 | 96 | impl<'de> Visitor<'de> for MyVisitor { 97 | type Value = Checksum; 98 | 99 | fn expecting(&self, formatter: &mut core::fmt::Formatter) -> core::fmt::Result { 100 | write!(formatter, "a number to interpret as a checksum") 101 | } 102 | 103 | fn visit_u32(self, v: u32) -> Result 104 | where 105 | E: serde::de::Error, 106 | { 107 | Ok(v.into()) 108 | } 109 | 110 | fn visit_u64(self, v: u64) -> Result 111 | where 112 | E: serde::de::Error, 113 | { 114 | let as_u32: u32 = v.try_into().map_err(|_| { 115 | E::invalid_value(serde::de::Unexpected::Unsigned(v), &"a 32-bit int") 116 | })?; 117 | Ok(as_u32.into()) 118 | } 119 | 120 | fn visit_i32(self, v: i32) -> Result 121 | where 122 | E: serde::de::Error, 123 | { 124 | Ok(Checksum::from_i32(v)) 125 | } 126 | 127 | fn visit_i64(self, v: i64) -> Result 128 | where 129 | E: serde::de::Error, 130 | { 131 | // This is supposed to be an u32, but it could also be a i32 that we need to 132 | // normalize. 133 | let min: i64 = u32::MIN.into(); 134 | let max: i64 = u32::MAX.into(); 135 | 136 | if v >= min && v <= max { 137 | return Ok(Checksum::from(v as u32)); 138 | } 139 | 140 | let as_i32: i32 = v.try_into().map_err(|_| { 141 | E::invalid_value(serde::de::Unexpected::Signed(v), &"a 32-bit int") 142 | })?; 143 | Ok(Checksum::from_i32(as_i32)) 144 | } 145 | 146 | fn visit_f64(self, v: f64) -> Result 147 | where 148 | E: serde::de::Error, 149 | { 150 | if !v.is_finite() || f64::trunc(v) != v { 151 | return Err(E::invalid_value( 152 | serde::de::Unexpected::Float(v), 153 | &"a whole number", 154 | )); 155 | } 156 | 157 | self.visit_i64(v as i64) 158 | } 159 | } 160 | 161 | deserializer.deserialize_u32(MyVisitor) 162 | } 163 | } 164 | 165 | #[cfg(test)] 166 | mod test { 167 | use num_traits::Zero; 168 | 169 | use super::Checksum; 170 | 171 | #[test] 172 | pub fn test_binary_representation() { 173 | assert_eq!(Checksum::from_i32(-1).value(), u32::MAX); 174 | assert_eq!(Checksum::from(u32::MAX).value(), u32::MAX); 175 | assert_eq!(Checksum::from(u32::MAX).bitcast_i32(), -1); 176 | } 177 | 178 | fn deserialize(from: &str) -> Checksum { 179 | serde_json::from_str(from).expect("should deserialize") 180 | } 181 | 182 | #[test] 183 | pub fn test_deserialize() { 184 | assert_eq!(deserialize("0").value(), 0); 185 | assert_eq!(deserialize("-1").value(), u32::MAX); 186 | assert_eq!(deserialize("-1.0").value(), u32::MAX); 187 | 188 | assert_eq!(deserialize("3573495687").value(), 3573495687); 189 | assert_eq!(deserialize("3573495687.0").value(), 3573495687); 190 | assert_eq!(deserialize("-721471609.0").value(), 3573495687); 191 | } 192 | 193 | #[test] 194 | pub fn test_arithmetic() { 195 | assert_eq!(Checksum::from(3) + Checksum::from(7), Checksum::from(10)); 196 | 197 | // Checksums should always wrap around 198 | assert_eq!( 199 | Checksum::from(0xFFFFFFFF) + Checksum::from(1), 200 | Checksum::zero() 201 | ); 202 | assert_eq!( 203 | Checksum::zero() - Checksum::from(1), 204 | Checksum::from(0xFFFFFFFF) 205 | ); 206 | 207 | let mut cs = Checksum::from(0x8FFFFFFF); 208 | cs += Checksum::from(0x80000000); 209 | assert_eq!(cs, Checksum::from(0x0FFFFFFF)); 210 | 211 | cs -= Checksum::from(0x80000001); 212 | assert_eq!(cs, Checksum::from(0x8FFFFFFE)); 213 | } 214 | } 215 | -------------------------------------------------------------------------------- /android/build.gradle.kts: -------------------------------------------------------------------------------- 1 | import java.util.Base64 2 | import java.util.Properties 3 | import kotlin.io.path.Path 4 | import kotlin.io.path.absolutePathString 5 | import kotlin.io.path.exists 6 | import kotlin.io.path.listDirectoryEntries 7 | import kotlin.io.path.name 8 | 9 | plugins { 10 | id("maven-publish") 11 | id("signing") 12 | } 13 | 14 | group = "com.powersync" 15 | version = "0.4.10" 16 | description = "PowerSync Core SQLite Extension" 17 | 18 | val localRepo = uri("build/repository/") 19 | 20 | repositories { 21 | mavenCentral() 22 | google() 23 | } 24 | 25 | fun ndkPath(): String { 26 | val file = project.rootProject.file("local.properties") 27 | var androidHome = System.getenv("ANDROID_HOME") 28 | 29 | if (file.exists()) { 30 | val properties = Properties() 31 | properties.load(project.rootProject.file("local.properties").inputStream()) 32 | 33 | properties["sdk.dir"]?.let { 34 | androidHome = it as String 35 | } 36 | } 37 | 38 | check(androidHome != null) { "Could not find android SDK dir" } 39 | 40 | val ndks = Path(androidHome).resolve("ndk") 41 | check(ndks.exists()) { "Expected NDK installations at $ndks" } 42 | 43 | for (entry in ndks.listDirectoryEntries()) { 44 | val name = entry.name 45 | val majorVersion = name.split('.').first().toInt() 46 | 47 | // We want to use NDK 28 or newer to build with 16KB support by default. 48 | if (majorVersion >= 28) { 49 | return entry.absolutePathString() 50 | } 51 | } 52 | 53 | error("Expected an NDK 28 or later installation in $ndks") 54 | } 55 | 56 | fun Exec.rustCompilation(project: String, output: String? = null) { 57 | group = "build" 58 | environment("ANDROID_NDK_HOME", ndkPath()) 59 | 60 | workingDir("..") 61 | val args = buildList { 62 | this += listOf( 63 | "cargo", 64 | "ndk", 65 | "-t", 66 | "armeabi-v7a", 67 | "-t", 68 | "arm64-v8a", 69 | "-t", 70 | "x86", 71 | "-t", 72 | "x86_64", 73 | ) 74 | 75 | output?.let { 76 | this += "-o" 77 | this += it 78 | } 79 | 80 | this += listOf( 81 | "build", 82 | "--release", 83 | "-Zbuild-std", 84 | "-p", 85 | project, 86 | "--features", 87 | "nightly" 88 | ) 89 | } 90 | 91 | commandLine(args) 92 | } 93 | 94 | val buildRust = tasks.register("buildRust") { 95 | rustCompilation("powersync_loadable", "./android/build/intermediates/jniLibs") 96 | } 97 | 98 | val buildRustStatic = tasks.register("buildRustStatic") { 99 | rustCompilation("powersync_static") 100 | } 101 | 102 | val prefabAar = tasks.register("prefabAar") { 103 | dependsOn(buildRust) 104 | 105 | from("build/intermediates/jniLibs") { 106 | include("**/*") 107 | into("jni") 108 | } 109 | 110 | from("src/") { 111 | include("**/*") 112 | } 113 | 114 | val architectures = listOf( 115 | "armeabi-v7a", 116 | "arm64-v8a", 117 | "x86", 118 | "x86_64" 119 | ) 120 | 121 | architectures.forEach { architecture -> 122 | from("build/intermediates/jniLibs/$architecture/") { 123 | include("libpowersync.so") 124 | into("prefab/modules/powersync/libs/android.$architecture/") 125 | } 126 | } 127 | 128 | archiveFileName.set("build/outputs/aar/powersync-sqlite-core.aar") 129 | destinationDirectory.set(file("./")) 130 | } 131 | 132 | val sourcesJar = tasks.register("sourcesJar") { 133 | // We don't have any actual java sources to bundle 134 | archiveClassifier.set("sources") 135 | } 136 | 137 | publishing { 138 | publications { 139 | register("maven") { 140 | groupId = project.group.toString() 141 | artifactId = project.name 142 | version = project.version.toString() 143 | 144 | afterEvaluate { 145 | artifact(prefabAar) { 146 | extension = "aar" 147 | } 148 | 149 | artifact(sourcesJar) { 150 | classifier = "sources" 151 | } 152 | } 153 | 154 | pom { 155 | name.set(project.name) 156 | description.set(project.description) 157 | url.set("https://github.com/powersync-ja/powersync-sqlite-core") 158 | 159 | developers { 160 | developer { 161 | id.set("journeyapps") 162 | name.set("Journey Mobile, Inc.") 163 | email.set("info@journeyapps.com") 164 | } 165 | } 166 | 167 | licenses { 168 | license { 169 | name.set("Apache License, Version 2.0") 170 | url.set("http://www.apache.org/licenses/LICENSE-2.0.txt") 171 | } 172 | } 173 | 174 | scm { 175 | connection.set("scm:git:github.com/powersync-ja/powersync-sqlite-core.git") 176 | developerConnection.set("scm:git:ssh://github.com/powersync-ja/powersync-sqlite-core.git") 177 | url.set("https://github.com/powersync-ja/powersync-sqlite-core") 178 | } 179 | } 180 | } 181 | } 182 | 183 | repositories { 184 | maven { 185 | name = "here" 186 | url = localRepo 187 | } 188 | } 189 | } 190 | 191 | signing { 192 | val sign = providers.gradleProperty("signPublication").getOrElse("1") 193 | 194 | if (sign != "0") { 195 | val key = providers.gradleProperty("gpgKey") 196 | val password = providers.gradleProperty("gpgPassword") 197 | 198 | if (key.isPresent()) { 199 | val signingKey = String(Base64.getDecoder().decode(key.get())).trim() 200 | useInMemoryPgpKeys(signingKey, password.get()) 201 | } else { 202 | useGpgCmd() 203 | } 204 | 205 | sign(publishing.publications) 206 | } 207 | } 208 | 209 | tasks.withType() { 210 | dependsOn(prefabAar) 211 | } 212 | 213 | val zipPublication by tasks.registering(Zip::class) { 214 | dependsOn(tasks.named("publishAllPublicationsToHereRepository")) 215 | 216 | archiveFileName.set("powersync_android.zip") 217 | from(localRepo) 218 | } 219 | 220 | tasks.named("build") { 221 | dependsOn(prefabAar, buildRustStatic) 222 | } 223 | -------------------------------------------------------------------------------- /crates/core/src/view_admin.rs: -------------------------------------------------------------------------------- 1 | extern crate alloc; 2 | 3 | use alloc::format; 4 | use alloc::rc::Rc; 5 | use alloc::string::{String, ToString}; 6 | use alloc::vec::Vec; 7 | use core::ffi::{c_int, c_void}; 8 | 9 | use powersync_sqlite_nostd as sqlite; 10 | use powersync_sqlite_nostd::{Connection, Context}; 11 | use sqlite::{ResultCode, Value}; 12 | 13 | use crate::error::PowerSyncError; 14 | use crate::migrations::{LATEST_VERSION, powersync_migrate}; 15 | use crate::schema::inspection::ExistingView; 16 | use crate::state::DatabaseState; 17 | use crate::util::quote_identifier; 18 | use crate::{create_auto_tx_function, create_sqlite_text_fn}; 19 | 20 | // Used in old down migrations, do not remove. 21 | extern "C" fn powersync_drop_view( 22 | ctx: *mut sqlite::context, 23 | argc: c_int, 24 | argv: *mut *mut sqlite::value, 25 | ) { 26 | let args = sqlite::args!(argc, argv); 27 | let name = args[0].text(); 28 | 29 | if let Err(e) = ExistingView::drop_by_name(ctx.db_handle(), name) { 30 | e.apply_to_ctx("powersync_drop_view", ctx); 31 | } 32 | } 33 | 34 | fn powersync_init_impl( 35 | ctx: *mut sqlite::context, 36 | _args: &[*mut sqlite::value], 37 | ) -> Result { 38 | powersync_migrate(ctx, LATEST_VERSION)?; 39 | 40 | Ok(String::from("")) 41 | } 42 | 43 | create_auto_tx_function!(powersync_init_tx, powersync_init_impl); 44 | create_sqlite_text_fn!(powersync_init, powersync_init_tx, "powersync_init"); 45 | 46 | fn powersync_test_migration_impl( 47 | ctx: *mut sqlite::context, 48 | args: &[*mut sqlite::value], 49 | ) -> Result { 50 | let target_version = args[0].int(); 51 | powersync_migrate(ctx, target_version)?; 52 | 53 | Ok(String::from("")) 54 | } 55 | 56 | create_auto_tx_function!(powersync_test_migration_tx, powersync_test_migration_impl); 57 | create_sqlite_text_fn!( 58 | powersync_test_migration, 59 | powersync_test_migration_tx, 60 | "powersync_test_migration" 61 | ); 62 | 63 | fn powersync_clear_impl( 64 | ctx: *mut sqlite::context, 65 | args: &[*mut sqlite::value], 66 | ) -> Result { 67 | let local_db = ctx.db_handle(); 68 | let state = unsafe { DatabaseState::from_context(&ctx) }; 69 | 70 | let flags = PowerSyncClearFlags(args[0].int()); 71 | 72 | if !flags.soft_clear() { 73 | // With a soft clear, we want to delete public data while keeping internal data around. When 74 | // connect() is called with compatible JWTs yielding a large overlap of buckets, this can 75 | // speed up the next sync. 76 | local_db.exec_safe("DELETE FROM ps_oplog; DELETE FROM ps_buckets")?; 77 | } else { 78 | local_db.exec_safe("UPDATE ps_buckets SET last_applied_op = 0")?; 79 | local_db.exec_safe("DELETE FROM ps_buckets WHERE name = '$local'")?; 80 | } 81 | 82 | // language=SQLite 83 | local_db.exec_safe( 84 | "\ 85 | DELETE FROM ps_crud; 86 | DELETE FROM ps_untyped; 87 | DELETE FROM ps_updated_rows; 88 | DELETE FROM ps_kv WHERE key != 'client_id'; 89 | DELETE FROM ps_sync_state; 90 | DELETE FROM ps_stream_subscriptions; 91 | ", 92 | )?; 93 | 94 | let table_glob = if flags.clear_local() { 95 | "ps_data_*" 96 | } else { 97 | "ps_data__*" 98 | }; 99 | 100 | let tables_stmt = local_db 101 | .prepare_v2("SELECT name FROM sqlite_master WHERE type='table' AND name GLOB ?1")?; 102 | tables_stmt.bind_text(1, table_glob, sqlite::Destructor::STATIC)?; 103 | 104 | let mut tables: Vec = alloc::vec![]; 105 | 106 | while tables_stmt.step()? == ResultCode::ROW { 107 | let name = tables_stmt.column_text(0)?; 108 | tables.push(name.to_string()); 109 | } 110 | 111 | for name in tables { 112 | let quoted = quote_identifier(&name); 113 | // The first delete statement deletes a single row, to trigger an update notification for the table. 114 | // The second delete statement uses the truncate optimization to delete the remainder of the data. 115 | let delete_sql = format!( 116 | "\ 117 | DELETE FROM {table} WHERE rowid IN (SELECT rowid FROM {table} LIMIT 1); 118 | DELETE FROM {table};", 119 | table = quoted 120 | ); 121 | local_db.exec_safe(&delete_sql)?; 122 | } 123 | 124 | if let Some(schema) = state.view_schema() { 125 | for raw_table in &schema.raw_tables { 126 | if let Some(stmt) = &raw_table.clear { 127 | local_db.exec_safe(&stmt).map_err(|e| { 128 | PowerSyncError::from_sqlite( 129 | local_db, 130 | e, 131 | format!("Clearing raw table {}", raw_table.name), 132 | ) 133 | })?; 134 | } 135 | } 136 | } 137 | 138 | Ok(String::from("")) 139 | } 140 | 141 | #[derive(Clone, Copy)] 142 | struct PowerSyncClearFlags(i32); 143 | 144 | impl PowerSyncClearFlags { 145 | const MASK_CLEAR_LOCAL: i32 = 0x01; 146 | const MASK_SOFT_CLEAR: i32 = 0x02; 147 | 148 | fn clear_local(self) -> bool { 149 | self.0 & Self::MASK_CLEAR_LOCAL != 0 150 | } 151 | 152 | fn soft_clear(self) -> bool { 153 | self.0 & Self::MASK_SOFT_CLEAR != 0 154 | } 155 | } 156 | 157 | create_auto_tx_function!(powersync_clear_tx, powersync_clear_impl); 158 | create_sqlite_text_fn!(powersync_clear, powersync_clear_tx, "powersync_clear"); 159 | 160 | pub fn register(db: *mut sqlite::sqlite3, state: Rc) -> Result<(), ResultCode> { 161 | // This entire module is just making it easier to edit sqlite_master using queries. 162 | 163 | // Internal function, used exclusively in existing migrations. 164 | db.create_function_v2( 165 | "powersync_drop_view", 166 | 1, 167 | sqlite::UTF8, 168 | None, 169 | Some(powersync_drop_view), 170 | None, 171 | None, 172 | None, 173 | )?; 174 | 175 | // Initialize the extension internal tables, and start a migration. 176 | db.create_function_v2( 177 | "powersync_init", 178 | 0, 179 | sqlite::UTF8, 180 | None, 181 | Some(powersync_init), 182 | None, 183 | None, 184 | None, 185 | )?; 186 | 187 | db.create_function_v2( 188 | "powersync_test_migration", 189 | 1, 190 | sqlite::UTF8, 191 | None, 192 | Some(powersync_test_migration), 193 | None, 194 | None, 195 | None, 196 | )?; 197 | 198 | db.create_function_v2( 199 | "powersync_clear", 200 | 1, 201 | sqlite::UTF8, 202 | Some(Rc::into_raw(state) as *mut c_void), 203 | Some(powersync_clear), 204 | None, 205 | None, 206 | Some(DatabaseState::destroy_rc), 207 | )?; 208 | 209 | Ok(()) 210 | } 211 | -------------------------------------------------------------------------------- /crates/core/src/fix_data.rs: -------------------------------------------------------------------------------- 1 | use core::ffi::c_int; 2 | 3 | use alloc::format; 4 | use alloc::string::String; 5 | 6 | use crate::create_sqlite_optional_text_fn; 7 | use crate::error::{PSResult, PowerSyncError}; 8 | use crate::schema::inspection::ExistingTable; 9 | use powersync_sqlite_nostd::{self as sqlite, ColumnType, Value}; 10 | use powersync_sqlite_nostd::{Connection, Context, ResultCode}; 11 | 12 | use crate::ext::SafeManagedStmt; 13 | use crate::util::quote_identifier; 14 | 15 | // Apply a data migration to fix any existing data affected by the issue 16 | // fixed in v0.3.5. 17 | // 18 | // The issue was that the `ps_updated_rows` table was not being populated 19 | // with remove operations in some cases. This causes the rows to be removed 20 | // from ps_oplog, but not from the ps_data__tables, resulting in dangling rows. 21 | // 22 | // The fix here is to find these dangling rows, and add them to ps_updated_rows. 23 | // The next time the sync_local operation is run, these rows will be removed. 24 | pub fn apply_v035_fix(db: *mut sqlite::sqlite3) -> Result { 25 | // language=SQLite 26 | let statement = db 27 | .prepare_v2("SELECT name FROM sqlite_master WHERE type='table' AND name GLOB 'ps_data__*'") 28 | .into_db_result(db)?; 29 | 30 | while statement.step()? == ResultCode::ROW { 31 | let full_name = statement.column_text(0)?; 32 | let Some((short_name, _)) = ExistingTable::external_name(full_name) else { 33 | continue; 34 | }; 35 | 36 | let quoted = quote_identifier(full_name); 37 | 38 | // language=SQLite 39 | let statement = db.prepare_v2(&format!( 40 | " 41 | INSERT OR IGNORE INTO ps_updated_rows(row_type, row_id) 42 | SELECT ?1, id FROM {} 43 | WHERE NOT EXISTS ( 44 | SELECT 1 FROM ps_oplog 45 | WHERE row_type = ?1 AND row_id = {}.id 46 | );", 47 | quoted, quoted 48 | ))?; 49 | statement.bind_text(1, short_name, sqlite::Destructor::STATIC)?; 50 | 51 | statement.exec()?; 52 | } 53 | 54 | Ok(1) 55 | } 56 | 57 | /// Older versions of the JavaScript SDK for PowerSync used to encode the subkey in oplog data 58 | /// entries as JSON. 59 | /// 60 | /// It wasn't supposed to do that, since the keys are regular strings already. To make databases 61 | /// created with those SDKs compatible with other SDKs or the sync client implemented in the core 62 | /// extensions, a migration is necessary. Since this migration is only relevant for the JS SDK, it 63 | /// is mostly implemented there. However, the helper function to remove the key encoding is 64 | /// implemented here because user-defined functions are expensive on JavaScript. 65 | fn remove_duplicate_key_encoding(key: &str) -> Option { 66 | // Acceptable format: // 67 | // Inacceptable format: //"" 68 | // This is a bit of a tricky conversion because both type and id can contain slashes and quotes. 69 | // However, the subkey is either a UUID value or a `/UUID` value - so we know it can't 70 | // end in a quote unless the improper encoding was used. 71 | if !key.ends_with('"') { 72 | return None; 73 | } 74 | 75 | // Since the subkey is JSON-encoded, find the start quote by going backwards. 76 | let mut chars = key.char_indices(); 77 | chars.next_back()?; // Skip the quote ending the string 78 | 79 | enum FoundStartingQuote { 80 | HasQuote { index: usize }, 81 | HasBackslachThenQuote { quote_index: usize }, 82 | } 83 | let mut state: Option = None; 84 | let found_starting_quote = loop { 85 | if let Some((i, char)) = chars.next_back() { 86 | state = match state { 87 | Some(FoundStartingQuote::HasQuote { index }) => { 88 | if char == '\\' { 89 | // We've seen a \" pattern, not the start of the string 90 | Some(FoundStartingQuote::HasBackslachThenQuote { quote_index: index }) 91 | } else { 92 | break Some(index); 93 | } 94 | } 95 | Some(FoundStartingQuote::HasBackslachThenQuote { quote_index }) => { 96 | if char == '\\' { 97 | // \\" pattern, the quote is unescaped 98 | break Some(quote_index); 99 | } else { 100 | None 101 | } 102 | } 103 | None => { 104 | if char == '"' { 105 | Some(FoundStartingQuote::HasQuote { index: i }) 106 | } else { 107 | None 108 | } 109 | } 110 | } 111 | } else { 112 | break None; 113 | } 114 | }?; 115 | 116 | let before_json = &key[..found_starting_quote]; 117 | let mut result: String = serde_json::from_str(&key[found_starting_quote..]).ok()?; 118 | 119 | result.insert_str(0, before_json); 120 | Some(result) 121 | } 122 | 123 | fn powersync_remove_duplicate_key_encoding_impl( 124 | _ctx: *mut sqlite::context, 125 | args: &[*mut sqlite::value], 126 | ) -> Result, PowerSyncError> { 127 | let arg = args.get(0).ok_or(ResultCode::MISUSE)?; 128 | 129 | if arg.value_type() != ColumnType::Text { 130 | return Err(ResultCode::MISMATCH.into()); 131 | } 132 | 133 | return Ok(remove_duplicate_key_encoding(arg.text())); 134 | } 135 | 136 | create_sqlite_optional_text_fn!( 137 | powersync_remove_duplicate_key_encoding, 138 | powersync_remove_duplicate_key_encoding_impl, 139 | "powersync_remove_duplicate_key_encoding" 140 | ); 141 | 142 | pub fn register(db: *mut sqlite::sqlite3) -> Result<(), ResultCode> { 143 | db.create_function_v2( 144 | "powersync_remove_duplicate_key_encoding", 145 | 1, 146 | sqlite::UTF8 | sqlite::DETERMINISTIC, 147 | None, 148 | Some(powersync_remove_duplicate_key_encoding), 149 | None, 150 | None, 151 | None, 152 | )?; 153 | Ok(()) 154 | } 155 | 156 | #[cfg(test)] 157 | mod test { 158 | 159 | use super::remove_duplicate_key_encoding; 160 | 161 | fn assert_unaffected(source: &str) { 162 | assert!(matches!(remove_duplicate_key_encoding(source), None)); 163 | } 164 | 165 | #[test] 166 | fn does_not_change_unaffected_keys() { 167 | assert_unaffected("object_type/object_id/subkey"); 168 | assert_unaffected("object_type/object_id/null"); 169 | 170 | // Object type and ID could technically contain quotes and forward slashes 171 | assert_unaffected(r#""object"/"type"/subkey"#); 172 | assert_unaffected("object\"/type/object\"/id/subkey"); 173 | 174 | // Invalid key, but we shouldn't crash 175 | assert_unaffected("\"key\""); 176 | } 177 | 178 | #[test] 179 | fn removes_quotes() { 180 | assert_eq!( 181 | remove_duplicate_key_encoding("foo/bar/\"baz\"").unwrap(), 182 | "foo/bar/baz", 183 | ); 184 | 185 | assert_eq!( 186 | remove_duplicate_key_encoding(r#"foo/bar/"nested/subkey""#).unwrap(), 187 | "foo/bar/nested/subkey" 188 | ); 189 | 190 | assert_eq!( 191 | remove_duplicate_key_encoding(r#"foo/bar/"escaped\"key""#).unwrap(), 192 | "foo/bar/escaped\"key" 193 | ); 194 | assert_eq!( 195 | remove_duplicate_key_encoding(r#"foo/bar/"escaped\\key""#).unwrap(), 196 | "foo/bar/escaped\\key" 197 | ); 198 | assert_eq!( 199 | remove_duplicate_key_encoding(r#"foo/bar/"/\\"subkey""#).unwrap(), 200 | "foo/bar/\"/\\\\subkey" 201 | ); 202 | } 203 | } 204 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | pull_request: 4 | name: "tests" 5 | 6 | jobs: 7 | libs_linux: 8 | name: Building Linux libraries 9 | if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository) 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v4 13 | - name: Build Linux libraries 14 | uses: ./.github/actions/linux 15 | 16 | libs_macos: 17 | name: Building macOS libraries 18 | if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository) 19 | runs-on: macos-latest 20 | steps: 21 | - uses: actions/checkout@v4 22 | - name: Build macOS 23 | uses: ./.github/actions/macos 24 | 25 | libs_windows: 26 | name: Building Windows libraries 27 | if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository) 28 | runs-on: windows-latest 29 | steps: 30 | - uses: actions/checkout@v4 31 | 32 | - name: Build Windows 33 | uses: ./.github/actions/windows 34 | 35 | libs_android: 36 | name: Building Android libraries 37 | if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository) 38 | runs-on: ubuntu-latest 39 | steps: 40 | - uses: actions/checkout@v4 41 | - name: Build Android 42 | uses: ./.github/actions/android 43 | with: 44 | sign-publication: '0' 45 | 46 | libs_wasm: 47 | name: Basic WASM build 48 | if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository) 49 | runs-on: ubuntu-latest 50 | steps: 51 | - uses: actions/checkout@v4 52 | 53 | - name: Build wasm 54 | uses: ./.github/actions/wasm 55 | 56 | libs_xcframework: 57 | name: Build XCFramework 58 | if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository) 59 | runs-on: macos-latest 60 | steps: 61 | - uses: actions/checkout@v4 62 | - name: Build XCFramework 63 | uses: ./.github/actions/xcframework 64 | 65 | rust_unit_tests: 66 | name: Rust unit tests on ${{ matrix.os }} 67 | if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository) 68 | runs-on: ${{ matrix.os }} 69 | strategy: 70 | fail-fast: false 71 | matrix: 72 | os: [ubuntu-24.04, macos-latest] 73 | steps: 74 | - uses: actions/checkout@v4 75 | 76 | - name: Ubuntu setup 77 | if: matrix.os == 'ubuntu-24.04' 78 | run: | 79 | sudo apt install libreadline-dev 80 | 81 | - name: Install Rust Nightly 82 | uses: dtolnay/rust-toolchain@stable 83 | with: 84 | toolchain: nightly-2025-10-31 85 | components: rust-src,rustfmt,clippy 86 | 87 | - name: Check formatting 88 | run: cargo fmt --all --check 89 | 90 | - name: Build 91 | run: | 92 | cargo build -p powersync_loadable --release 93 | cargo build -p powersync_core --release --features static 94 | cargo build -p powersync_sqlite --release 95 | cargo build -p sqlite3 --release 96 | 97 | - name: Test powersync 98 | run: | 99 | cargo test -p powersync_core 100 | 101 | - name: Check shell 102 | run: | 103 | ./target/release/powersync_sqlite ":memory:" "select powersync_rs_version()" 104 | 105 | - name: Check loadable extension 106 | run: | 107 | ./target/release/sqlite3 ":memory:" ".load ./target/release/libpowersync" "select powersync_rs_version()" 108 | 109 | build: 110 | name: Testing on ${{ matrix.os }} 111 | if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository) 112 | runs-on: ${{ matrix.os }} 113 | needs: [libs_linux, libs_macos, libs_windows] 114 | strategy: 115 | fail-fast: false 116 | matrix: 117 | os: [ubuntu-24.04, ubuntu-arm64, macos-latest, windows-latest, windows-11-arm] 118 | 119 | steps: 120 | - uses: actions/checkout@v4 121 | 122 | - uses: dart-lang/setup-dart@v1 123 | 124 | - uses: actions/cache@v4 125 | id: sqlite_build 126 | with: 127 | path: dart/.dart_tool/sqlite3/ 128 | key: ${{ matrix.os }}-${{ hashFiles('dart/tool/') }} 129 | 130 | - name: Setup Dart tests 131 | working-directory: dart 132 | run: | 133 | dart pub get 134 | dart run tool/download_sqlite3.dart 135 | dart analyze 136 | 137 | - name: Download libs 138 | uses: actions/download-artifact@v5 139 | with: 140 | name: linux-library 141 | path: dart/assets 142 | - name: Download libs 143 | uses: actions/download-artifact@v5 144 | with: 145 | name: macos-library 146 | path: dart/assets 147 | - name: Download libs 148 | uses: actions/download-artifact@v5 149 | with: 150 | name: windows-library 151 | path: dart/assets 152 | 153 | - name: View downloaded artifacts 154 | if: runner.os == 'Linux' 155 | working-directory: dart 156 | run: | 157 | ls -al assets/ 158 | 159 | - name: Dart tests on Linux 160 | if: runner.os == 'Linux' 161 | working-directory: dart 162 | run: | 163 | CORE_TEST_SQLITE=.dart_tool/sqlite3/latest/libsqlite3.so dart test 164 | CORE_TEST_SQLITE=.dart_tool/sqlite3/minimum/libsqlite3.so dart test 165 | 166 | - name: Dart tests on macOS 167 | if: runner.os == 'macOS' 168 | working-directory: dart 169 | # We skip sync_local_performance_test on macOS because the runners are just so slow... 170 | run: | 171 | CORE_TEST_SQLITE=.dart_tool/sqlite3/latest/libsqlite3.dylib dart test -P skip_slow 172 | CORE_TEST_SQLITE=.dart_tool/sqlite3/minimum/libsqlite3.dylib dart test -P skip_slow 173 | 174 | build_stable_rust: 175 | runs-on: ubuntu-latest 176 | steps: 177 | - uses: actions/checkout@v4 178 | 179 | - uses: dart-lang/setup-dart@v1 180 | - name: Install Rust Stable 181 | uses: dtolnay/rust-toolchain@stable 182 | 183 | - uses: actions/cache@v4 184 | id: sqlite_build 185 | with: 186 | path: dart/.dart_tool/sqlite3/ 187 | key: ${{ matrix.os }}-${{ hashFiles('dart/tool/') }} 188 | 189 | - name: Setup Dart tests 190 | working-directory: dart 191 | run: | 192 | dart pub get 193 | dart run tool/download_sqlite3.dart 194 | dart analyze 195 | 196 | - name: Compile with stable Rust 197 | run: | 198 | cargo +stable build -p powersync_loadable 199 | 200 | - name: Dart tests with stable Rust 201 | working-directory: dart 202 | run: | 203 | CORE_TEST_SQLITE=.dart_tool/sqlite3/latest/libsqlite3.so dart test 204 | CORE_TEST_SQLITE=.dart_tool/sqlite3/minimum/libsqlite3.so dart test 205 | 206 | valgrind: 207 | name: Testing with Valgrind on ${{ matrix.os }} 208 | runs-on: ${{ matrix.os }} 209 | if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository) 210 | strategy: 211 | matrix: 212 | include: 213 | - os: ubuntu-latest 214 | steps: 215 | - uses: actions/checkout@v4 216 | - name: Install Rust Nightly 217 | uses: dtolnay/rust-toolchain@stable 218 | with: 219 | toolchain: nightly-2025-10-31 220 | components: rust-src 221 | 222 | - name: Install valgrind 223 | run: sudo apt update && sudo apt install -y valgrind 224 | 225 | - name: Install Cargo Valgrind 226 | run: | 227 | cargo install cargo-valgrind 228 | 229 | - name: Test Core 230 | run: | 231 | cargo valgrind test -p powersync_core 232 | -------------------------------------------------------------------------------- /crates/core/src/schema/table_info.rs: -------------------------------------------------------------------------------- 1 | use alloc::{format, string::String, vec, vec::Vec}; 2 | use serde::{Deserialize, de::Visitor}; 3 | 4 | #[derive(Deserialize)] 5 | pub struct Table { 6 | pub name: String, 7 | #[serde(rename = "view_name")] 8 | pub view_name_override: Option, 9 | pub columns: Vec, 10 | #[serde(default)] 11 | pub indexes: Vec, 12 | #[serde( 13 | default, 14 | rename = "include_old", 15 | deserialize_with = "deserialize_include_old" 16 | )] 17 | pub diff_include_old: Option, 18 | #[serde(flatten)] 19 | pub flags: TableInfoFlags, 20 | } 21 | 22 | #[derive(Deserialize)] 23 | pub struct RawTable { 24 | pub name: String, 25 | pub put: PendingStatement, 26 | pub delete: PendingStatement, 27 | #[serde(default)] 28 | pub clear: Option, 29 | } 30 | 31 | impl Table { 32 | pub fn view_name(&self) -> &str { 33 | self.view_name_override 34 | .as_deref() 35 | .unwrap_or(self.name.as_str()) 36 | } 37 | 38 | pub fn local_only(&self) -> bool { 39 | self.flags.local_only() 40 | } 41 | 42 | pub fn internal_name(&self) -> String { 43 | if self.local_only() { 44 | format!("ps_data_local__{:}", self.name) 45 | } else { 46 | format!("ps_data__{:}", self.name) 47 | } 48 | } 49 | 50 | pub fn filtered_columns<'a>( 51 | &'a self, 52 | names: impl Iterator, 53 | ) -> impl Iterator { 54 | // First, sort all columns by name for faster lookups by name. 55 | let mut sorted_by_name: Vec<&Column> = self.columns.iter().collect(); 56 | sorted_by_name.sort_by_key(|c| &*c.name); 57 | 58 | names.filter_map(move |name| { 59 | let index = sorted_by_name 60 | .binary_search_by_key(&name, |c| c.name.as_str()) 61 | .ok()?; 62 | 63 | Some(sorted_by_name[index]) 64 | }) 65 | } 66 | } 67 | 68 | #[derive(Deserialize)] 69 | pub struct Column { 70 | pub name: String, 71 | #[serde(rename = "type")] 72 | pub type_name: String, 73 | } 74 | 75 | #[derive(Deserialize)] 76 | pub struct Index { 77 | pub name: String, 78 | pub columns: Vec, 79 | } 80 | 81 | #[derive(Deserialize)] 82 | pub struct IndexedColumn { 83 | pub name: String, 84 | pub ascending: bool, 85 | #[serde(rename = "type")] 86 | pub type_name: String, 87 | } 88 | 89 | pub enum DiffIncludeOld { 90 | OnlyForColumns { columns: Vec }, 91 | ForAllColumns, 92 | } 93 | 94 | fn deserialize_include_old<'de, D: serde::Deserializer<'de>>( 95 | deserializer: D, 96 | ) -> Result, D::Error> { 97 | struct IncludeOldVisitor; 98 | 99 | impl<'de> Visitor<'de> for IncludeOldVisitor { 100 | type Value = Option; 101 | 102 | fn expecting(&self, formatter: &mut core::fmt::Formatter) -> core::fmt::Result { 103 | write!(formatter, "an array of columns, or true") 104 | } 105 | 106 | fn visit_some(self, deserializer: D) -> Result 107 | where 108 | D: serde::Deserializer<'de>, 109 | { 110 | deserializer.deserialize_any(self) 111 | } 112 | 113 | fn visit_none(self) -> Result 114 | where 115 | E: serde::de::Error, 116 | { 117 | return Ok(None); 118 | } 119 | 120 | fn visit_bool(self, v: bool) -> Result 121 | where 122 | E: serde::de::Error, 123 | { 124 | Ok(if v { 125 | Some(DiffIncludeOld::ForAllColumns) 126 | } else { 127 | None 128 | }) 129 | } 130 | 131 | fn visit_seq(self, mut seq: A) -> Result 132 | where 133 | A: serde::de::SeqAccess<'de>, 134 | { 135 | let mut elements: Vec = vec![]; 136 | while let Some(next) = seq.next_element::()? { 137 | elements.push(next); 138 | } 139 | 140 | Ok(Some(DiffIncludeOld::OnlyForColumns { columns: elements })) 141 | } 142 | } 143 | 144 | deserializer.deserialize_option(IncludeOldVisitor) 145 | } 146 | 147 | #[derive(Clone, Copy)] 148 | #[repr(transparent)] 149 | pub struct TableInfoFlags(pub u32); 150 | 151 | impl TableInfoFlags { 152 | pub const LOCAL_ONLY: u32 = 1; 153 | pub const INSERT_ONLY: u32 = 2; 154 | pub const INCLUDE_METADATA: u32 = 4; 155 | pub const INCLUDE_OLD_ONLY_WHEN_CHANGED: u32 = 8; 156 | pub const IGNORE_EMPTY_UPDATE: u32 = 16; 157 | 158 | pub const fn local_only(self) -> bool { 159 | self.0 & Self::LOCAL_ONLY != 0 160 | } 161 | 162 | pub const fn insert_only(self) -> bool { 163 | self.0 & Self::INSERT_ONLY != 0 164 | } 165 | 166 | pub const fn include_metadata(self) -> bool { 167 | self.0 & Self::INCLUDE_METADATA != 0 168 | } 169 | 170 | pub const fn include_old_only_when_changed(self) -> bool { 171 | self.0 & Self::INCLUDE_OLD_ONLY_WHEN_CHANGED != 0 172 | } 173 | 174 | pub const fn ignore_empty_update(self) -> bool { 175 | self.0 & Self::IGNORE_EMPTY_UPDATE != 0 176 | } 177 | 178 | const fn with_flag(self, flag: u32) -> Self { 179 | Self(self.0 | flag) 180 | } 181 | 182 | const fn without_flag(self, flag: u32) -> Self { 183 | Self(self.0 & !flag) 184 | } 185 | 186 | const fn set_flag(self, flag: u32, enable: bool) -> Self { 187 | if enable { 188 | self.with_flag(flag) 189 | } else { 190 | self.without_flag(flag) 191 | } 192 | } 193 | } 194 | 195 | impl Default for TableInfoFlags { 196 | fn default() -> Self { 197 | Self(0) 198 | } 199 | } 200 | 201 | impl<'de> Deserialize<'de> for TableInfoFlags { 202 | fn deserialize(deserializer: D) -> Result 203 | where 204 | D: serde::Deserializer<'de>, 205 | { 206 | struct FlagsVisitor; 207 | 208 | impl<'de> Visitor<'de> for FlagsVisitor { 209 | type Value = TableInfoFlags; 210 | 211 | fn expecting(&self, formatter: &mut core::fmt::Formatter) -> core::fmt::Result { 212 | write!(formatter, "an object with table flags") 213 | } 214 | 215 | fn visit_map(self, mut map: A) -> Result 216 | where 217 | A: serde::de::MapAccess<'de>, 218 | { 219 | let mut flags = TableInfoFlags::default(); 220 | 221 | while let Some((key, value)) = map.next_entry::<&'de str, bool>()? { 222 | flags = flags.set_flag( 223 | match key { 224 | "local_only" => TableInfoFlags::LOCAL_ONLY, 225 | "insert_only" => TableInfoFlags::INSERT_ONLY, 226 | "include_metadata" => TableInfoFlags::INCLUDE_METADATA, 227 | "include_old_only_when_changed" => { 228 | TableInfoFlags::INCLUDE_OLD_ONLY_WHEN_CHANGED 229 | } 230 | "ignore_empty_update" => TableInfoFlags::IGNORE_EMPTY_UPDATE, 231 | _ => continue, 232 | }, 233 | value, 234 | ); 235 | } 236 | 237 | Ok(flags) 238 | } 239 | } 240 | 241 | deserializer.deserialize_struct( 242 | "TableInfoFlags", 243 | &[ 244 | "local_only", 245 | "insert_only", 246 | "include_metadata", 247 | "include_old_only_when_changed", 248 | "ignore_empty_update", 249 | ], 250 | FlagsVisitor, 251 | ) 252 | } 253 | } 254 | 255 | #[derive(Deserialize)] 256 | pub struct PendingStatement { 257 | pub sql: String, 258 | /// This vec should contain an entry for each parameter in [sql]. 259 | pub params: Vec, 260 | } 261 | 262 | #[derive(Deserialize)] 263 | pub enum PendingStatementValue { 264 | Id, 265 | Column(String), 266 | // TODO: Stuff like a raw object of put data? 267 | } 268 | -------------------------------------------------------------------------------- /crates/core/src/sync/operations.rs: -------------------------------------------------------------------------------- 1 | use alloc::format; 2 | use alloc::string::String; 3 | use num_traits::Zero; 4 | use powersync_sqlite_nostd::Connection; 5 | use powersync_sqlite_nostd::{self as sqlite, ResultCode}; 6 | 7 | use crate::{ 8 | error::{PSResult, PowerSyncError}, 9 | ext::SafeManagedStmt, 10 | }; 11 | 12 | use super::Checksum; 13 | use super::line::OplogData; 14 | use super::{ 15 | line::{DataLine, OpType}, 16 | storage_adapter::{BucketInfo, StorageAdapter}, 17 | }; 18 | 19 | pub fn insert_bucket_operations( 20 | adapter: &StorageAdapter, 21 | data: &DataLine, 22 | ) -> Result<(), PowerSyncError> { 23 | let db = adapter.db; 24 | let BucketInfo { 25 | id: bucket_id, 26 | last_applied_op, 27 | } = adapter.lookup_bucket(&*data.bucket)?; 28 | 29 | // This is an optimization for initial sync - we can avoid persisting individual REMOVE 30 | // operations when last_applied_op = 0. 31 | // We do still need to do the "supersede_statement" step for this case, since a REMOVE 32 | // operation can supersede another PUT operation we're syncing at the same time. 33 | let mut is_empty = last_applied_op == 0; 34 | 35 | // Statement to supersede (replace) operations with the same key. 36 | // language=SQLite 37 | let supersede_statement = db.prepare_v2( 38 | "\ 39 | DELETE FROM ps_oplog 40 | WHERE unlikely(ps_oplog.bucket = ?1) 41 | AND ps_oplog.key = ?2 42 | RETURNING op_id, hash", 43 | )?; 44 | supersede_statement.bind_int64(1, bucket_id)?; 45 | 46 | // language=SQLite 47 | let insert_statement = db.prepare_v2("\ 48 | INSERT INTO ps_oplog(bucket, op_id, key, row_type, row_id, data, hash) VALUES (?, ?, ?, ?, ?, ?, ?)")?; 49 | insert_statement.bind_int64(1, bucket_id)?; 50 | 51 | let updated_row_statement = db.prepare_v2( 52 | "\ 53 | INSERT OR IGNORE INTO ps_updated_rows(row_type, row_id) VALUES(?1, ?2)", 54 | )?; 55 | 56 | let mut last_op: Option = None; 57 | let mut add_checksum = Checksum::zero(); 58 | let mut op_checksum = Checksum::zero(); 59 | let mut added_ops: i32 = 0; 60 | 61 | for line in &data.data { 62 | let op_id = line.op_id; 63 | let op = line.op; 64 | let object_type = line.object_type.as_ref(); 65 | let object_id = line.object_id.as_ref(); 66 | let checksum = line.checksum; 67 | let op_data = line.data.as_ref(); 68 | 69 | last_op = Some(op_id); 70 | added_ops += 1; 71 | 72 | if op == OpType::PUT || op == OpType::REMOVE { 73 | let key: String; 74 | if let (Some(object_type), Some(object_id)) = (object_type, object_id) { 75 | let subkey = line.subkey.as_ref().map(|i| &**i).unwrap_or("null"); 76 | key = format!("{}/{}/{}", &object_type, &object_id, subkey); 77 | } else { 78 | key = String::from(""); 79 | } 80 | 81 | supersede_statement.bind_text(2, &key, sqlite::Destructor::STATIC)?; 82 | 83 | let mut superseded = false; 84 | 85 | while supersede_statement.step()? == ResultCode::ROW { 86 | // Superseded (deleted) a previous operation, add the checksum 87 | let supersede_checksum = Checksum::from_i32(supersede_statement.column_int(1)); 88 | add_checksum += supersede_checksum; 89 | op_checksum -= supersede_checksum; 90 | 91 | // Superseded an operation, only skip if the bucket was empty 92 | // Previously this checked "superseded_op <= last_applied_op". 93 | // However, that would not account for a case where a previous 94 | // PUT operation superseded the original PUT operation in this 95 | // same batch, in which case superseded_op is not accurate for this. 96 | if !is_empty { 97 | superseded = true; 98 | } 99 | } 100 | supersede_statement.reset()?; 101 | 102 | if op == OpType::REMOVE { 103 | let should_skip_remove = !superseded; 104 | 105 | add_checksum += checksum; 106 | 107 | if !should_skip_remove { 108 | if let (Some(object_type), Some(object_id)) = (object_type, object_id) { 109 | updated_row_statement.bind_text( 110 | 1, 111 | object_type, 112 | sqlite::Destructor::STATIC, 113 | )?; 114 | updated_row_statement.bind_text( 115 | 2, 116 | object_id, 117 | sqlite::Destructor::STATIC, 118 | )?; 119 | updated_row_statement.exec()?; 120 | } 121 | } 122 | 123 | continue; 124 | } 125 | 126 | insert_statement.bind_int64(2, op_id)?; 127 | if key != "" { 128 | insert_statement.bind_text(3, &key, sqlite::Destructor::STATIC)?; 129 | } else { 130 | insert_statement.bind_null(3)?; 131 | } 132 | 133 | if let (Some(object_type), Some(object_id)) = (object_type, object_id) { 134 | insert_statement.bind_text(4, object_type, sqlite::Destructor::STATIC)?; 135 | insert_statement.bind_text(5, object_id, sqlite::Destructor::STATIC)?; 136 | } else { 137 | insert_statement.bind_null(4)?; 138 | insert_statement.bind_null(5)?; 139 | } 140 | if let Some(data) = op_data { 141 | let OplogData::Json { data } = data; 142 | 143 | insert_statement.bind_text(6, data, sqlite::Destructor::STATIC)?; 144 | } else { 145 | insert_statement.bind_null(6)?; 146 | } 147 | 148 | insert_statement.bind_int(7, checksum.bitcast_i32())?; 149 | insert_statement.exec()?; 150 | 151 | op_checksum += checksum; 152 | } else if op == OpType::MOVE { 153 | add_checksum += checksum; 154 | } else if op == OpType::CLEAR { 155 | // Any remaining PUT operations should get an implicit REMOVE 156 | // language=SQLite 157 | let clear_statement1 = db 158 | .prepare_v2( 159 | "INSERT OR IGNORE INTO ps_updated_rows(row_type, row_id) 160 | SELECT row_type, row_id 161 | FROM ps_oplog 162 | WHERE bucket = ?1", 163 | ) 164 | .into_db_result(db)?; 165 | clear_statement1.bind_int64(1, bucket_id)?; 166 | clear_statement1.exec()?; 167 | 168 | let clear_statement2 = db 169 | .prepare_v2("DELETE FROM ps_oplog WHERE bucket = ?1") 170 | .into_db_result(db)?; 171 | clear_statement2.bind_int64(1, bucket_id)?; 172 | clear_statement2.exec()?; 173 | 174 | // And we need to re-apply all of those. 175 | // We also replace the checksum with the checksum of the CLEAR op. 176 | // language=SQLite 177 | let clear_statement2 = db.prepare_v2( 178 | "UPDATE ps_buckets SET last_applied_op = 0, add_checksum = ?1, op_checksum = 0 WHERE id = ?2", 179 | )?; 180 | clear_statement2.bind_int64(2, bucket_id)?; 181 | clear_statement2.bind_int(1, checksum.bitcast_i32())?; 182 | clear_statement2.exec()?; 183 | 184 | add_checksum = Checksum::zero(); 185 | is_empty = true; 186 | op_checksum = Checksum::zero(); 187 | } 188 | } 189 | 190 | if let Some(last_op) = &last_op { 191 | // language=SQLite 192 | let statement = db.prepare_v2( 193 | "UPDATE ps_buckets 194 | SET last_op = ?2, 195 | add_checksum = (add_checksum + ?3) & 0xffffffff, 196 | op_checksum = (op_checksum + ?4) & 0xffffffff, 197 | count_since_last = count_since_last + ?5 198 | WHERE id = ?1", 199 | )?; 200 | statement.bind_int64(1, bucket_id)?; 201 | statement.bind_int64(2, *last_op)?; 202 | statement.bind_int(3, add_checksum.bitcast_i32())?; 203 | statement.bind_int(4, op_checksum.bitcast_i32())?; 204 | statement.bind_int(5, added_ops)?; 205 | 206 | statement.exec()?; 207 | } 208 | 209 | Ok(()) 210 | } 211 | --------------------------------------------------------------------------------