├── .github └── workflows │ ├── Dockerfile.release-docker │ ├── ci.yml │ ├── deploy-docs.yml │ ├── release-binaries.yml │ ├── release-docker.yml │ └── tmate.yml ├── .gitignore ├── AGENTS.md ├── Cargo.lock ├── Cargo.toml ├── LICENSE ├── README.md ├── changes ├── 0.0.9.md ├── 0.1.0.md ├── 0.2.0-docs.png ├── 0.2.0-logging.png ├── 0.2.0.md ├── 0.4.0.md └── 0.4.2.md ├── docs ├── .gitignore ├── .vscode │ ├── extensions.json │ └── launch.json ├── README.md ├── astro.config.mjs ├── package-lock.json ├── package.json ├── public │ └── favicon.svg ├── src │ ├── assets │ │ ├── 01-bash.png │ │ ├── 01-nu.png │ │ ├── 02-bash.png │ │ └── 02-nu.png │ ├── content.config.ts │ ├── content │ │ └── docs │ │ │ ├── getting-started │ │ │ ├── concepts.mdx │ │ │ ├── first-stream.mdx │ │ │ └── installation.mdx │ │ │ ├── index.mdx │ │ │ ├── reference │ │ │ ├── architecture.mdx │ │ │ ├── cli.mdx │ │ │ ├── commands.mdx │ │ │ ├── contexts.mdx │ │ │ ├── generators.mdx │ │ │ ├── handlers.mdx │ │ │ ├── import-export.mdx │ │ │ └── store-api.mdx │ │ │ └── tutorials │ │ │ └── threaded-conversations.mdx │ ├── css │ │ └── custom.css │ └── utils │ │ ├── custom-code-output-plugin.js │ │ └── links.jsx └── tsconfig.json ├── examples ├── discord-bot │ ├── README.md │ ├── handler-bookmarklet.nu │ ├── handler-heartbeat.nu │ ├── handler-roller.nu │ └── handler-slash-dice.nu └── x-macos-pasteboard │ ├── README.md │ └── solid-ui │ ├── .gitignore │ ├── README.md │ ├── deno.json │ ├── deno.lock │ ├── handler-pb.map.nu │ ├── index.html │ ├── src │ ├── App.tsx │ ├── Card.tsx │ ├── index.css │ ├── main.tsx │ └── store │ │ ├── cas.ts │ │ ├── index.ts │ │ └── stream.ts │ └── vite.config.ts ├── notes ├── how-to-release.md ├── notes.md ├── overview.png └── screenshot.png ├── scripts └── check.sh ├── src ├── api.rs ├── client │ ├── commands.rs │ ├── connect.rs │ ├── mod.rs │ ├── request.rs │ └── types.rs ├── commands │ ├── mod.rs │ ├── serve.rs │ └── tests.rs ├── error.rs ├── generators │ ├── generator.rs │ ├── mod.rs │ ├── serve.rs │ └── tests.rs ├── handlers │ ├── handler.rs │ ├── mod.rs │ ├── serve.rs │ └── tests.rs ├── lib.rs ├── listener.rs ├── main.rs ├── nu │ ├── commands │ │ ├── append_command.rs │ │ ├── append_command_buffered.rs │ │ ├── cas_command.rs │ │ ├── cat_command.rs │ │ ├── get_command.rs │ │ ├── head_command.rs │ │ ├── mod.rs │ │ └── remove_command.rs │ ├── config.rs │ ├── engine.rs │ ├── mod.rs │ ├── test_commands.rs │ ├── test_engine.rs │ └── util.rs ├── store │ ├── mod.rs │ ├── tests.rs │ └── ttl.rs └── trace.rs ├── tests └── integration.rs └── xs.nu /.github/workflows/Dockerfile.release-docker: -------------------------------------------------------------------------------- 1 | FROM ubuntu:latest 2 | 3 | RUN apt-get update && apt-get install -y libssl-dev curl git jq jo unzip 4 | 5 | # nu 6 | ARG NU_VERSION=0.104.0 7 | RUN curl --fail -L https://github.com/nushell/nushell/releases/download/${NU_VERSION}/nu-${NU_VERSION}-x86_64-unknown-linux-gnu.tar.gz | \ 8 | tar -xz -C /usr/bin --strip-components=1 nu-${NU_VERSION}-x86_64-unknown-linux-gnu/nu && \ 9 | mkdir -p /root/.config/nushell && nu -c "config nu --default" > /root/.config/nushell/config.nu && nu -c "config env --default" > /root/.config/nushell/env.nu 10 | 11 | # websocat 12 | ARG WEBSOCAT_VERSION=1.13.0 13 | RUN curl --fail -L -o /usr/bin/websocat https://github.com/vi/websocat/releases/download/v${WEBSOCAT_VERSION}/websocat_max.x86_64-unknown-linux-musl && \ 14 | chmod +x /usr/bin/websocat 15 | 16 | # caddy 17 | ARG CADDY_VERSION=2.8.4 18 | RUN curl --fail -L -o /tmp/caddy.tar.gz https://github.com/caddyserver/caddy/releases/download/v${CADDY_VERSION}/caddy_${CADDY_VERSION}_linux_amd64.tar.gz && \ 19 | tar -xz -C /usr/bin -f /tmp/caddy.tar.gz caddy && \ 20 | rm /tmp/caddy.tar.gz && \ 21 | chmod +x /usr/bin/caddy 22 | 23 | # aws 24 | RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" && \ 25 | unzip awscliv2.zip && \ 26 | ./aws/install && \ 27 | rm -rf aws awscliv2.zip 28 | 29 | # deno 30 | ARG DENO_VERSION=2.1.1 31 | RUN curl --fail -L -o /tmp/deno.zip https://github.com/denoland/deno/releases/download/v${DENO_VERSION}/deno-x86_64-unknown-linux-gnu.zip && \ 32 | unzip /tmp/deno.zip -d /usr/bin && \ 33 | chmod +x /usr/bin/deno && \ 34 | rm /tmp/deno.zip 35 | 36 | WORKDIR /app 37 | 38 | COPY target/release/xs /usr/local/bin/xs 39 | COPY target/release/scru128 /usr/bin/scru128 40 | COPY xs.nu /app/xs.nu 41 | 42 | # Run XS_START script content if set, otherwise sleep indefinitely 43 | ENTRYPOINT /bin/sh -c 'if [ -n "$XS_START" ]; then echo "$XS_START" > /tmp/xs_start.sh && chmod +x /tmp/xs_start.sh && exec /tmp/xs_start.sh; else exec sleep infinity; fi' 44 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | pull_request: 7 | branches: [ "main" ] 8 | 9 | env: 10 | CARGO_TERM_COLOR: always 11 | 12 | jobs: 13 | build: 14 | strategy: 15 | matrix: 16 | os: [ubuntu-latest, macos-latest] 17 | runs-on: ${{ matrix.os }} 18 | steps: 19 | - uses: actions/checkout@v4 20 | - name: Build 21 | run: cargo build --verbose 22 | - name: Run tests 23 | run: cargo test --verbose 24 | - name: Run clippy 25 | run: cargo clippy --verbose 26 | -------------------------------------------------------------------------------- /.github/workflows/deploy-docs.yml: -------------------------------------------------------------------------------- 1 | name: Deploy Documentation 2 | 3 | on: 4 | push: 5 | branches: 6 | - 'main' 7 | 8 | jobs: 9 | build-and-deploy: 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - name: Checkout repository 14 | uses: actions/checkout@v3 15 | 16 | - name: Set up Node.js 17 | uses: actions/setup-node@v3 18 | with: 19 | node-version: 18 20 | 21 | - name: Install dependencies 22 | run: npm ci 23 | working-directory: ./docs 24 | 25 | - name: Build documentation 26 | run: npm run build 27 | working-directory: ./docs 28 | 29 | - name: Deploy to GitHub Pages 30 | uses: peaceiris/actions-gh-pages@v3 31 | with: 32 | github_token: ${{ secrets.GITHUB_TOKEN }} 33 | publish_dir: ./docs/dist 34 | -------------------------------------------------------------------------------- /.github/workflows/release-binaries.yml: -------------------------------------------------------------------------------- 1 | name: Release Binaries 2 | 3 | on: 4 | push: 5 | tags: [ "v*" ] # e.g. v1.4.0 6 | workflow_dispatch: 7 | 8 | ############################################################################### 9 | # 1 ── Create (or update) the GitHub release ────────────────────────────────── 10 | ############################################################################### 11 | jobs: 12 | create_release: 13 | name: Create Release 14 | runs-on: ubuntu-latest 15 | permissions: 16 | contents: write 17 | steps: 18 | - uses: actions/checkout@v4 19 | 20 | - name: Publish release 21 | uses: softprops/action-gh-release@v2 22 | env: 23 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 24 | with: 25 | tag_name: ${{ github.ref_name }} 26 | name: Release ${{ github.ref_name }} 27 | prerelease: ${{ contains(github.ref_name, '-dev.') }} 28 | draft: false 29 | 30 | ############################################################################### 31 | # 2 ── MUSL build using Alpine container (most reliable) ────────────────────── 32 | ############################################################################### 33 | build_musl: 34 | name: Build MUSL (x86_64) 35 | needs: create_release 36 | runs-on: ubuntu-latest 37 | container: 38 | image: rust:alpine 39 | 40 | steps: 41 | - uses: actions/checkout@v4 42 | 43 | - name: Install dependencies 44 | run: | 45 | apk add --no-cache musl-dev openssl-dev openssl-libs-static pkgconfig git build-base 46 | 47 | - name: Cache Cargo artifacts 48 | uses: actions/cache@v4 49 | with: 50 | path: | 51 | ~/.cargo/registry 52 | ~/.cargo/git 53 | target 54 | key: musl-${{ hashFiles('**/Cargo.lock') }} 55 | 56 | - name: Build MUSL binary 57 | env: 58 | OPENSSL_STATIC: true 59 | OPENSSL_LIB_DIR: /usr/lib 60 | OPENSSL_INCLUDE_DIR: /usr/include 61 | run: | 62 | rustup target add x86_64-unknown-linux-musl 63 | cargo build --release --target x86_64-unknown-linux-musl 64 | 65 | - name: Upload MUSL binary 66 | uses: svenstaro/upload-release-action@v2 67 | with: 68 | repo_token: ${{ secrets.GITHUB_TOKEN }} 69 | tag: ${{ github.ref_name }} 70 | file: target/x86_64-unknown-linux-musl/release/xs 71 | asset_name: xs-${{ github.ref_name }}-x86_64-linux-musl 72 | overwrite: true 73 | 74 | ############################################################################### 75 | # 3 ── Regular GNU Linux build using cross ──────────────────────────────────── 76 | ############################################################################### 77 | build_linux_gnu: 78 | name: Build Linux GNU 79 | needs: create_release 80 | runs-on: ubuntu-latest 81 | 82 | strategy: 83 | fail-fast: false 84 | matrix: 85 | target: 86 | - x86_64-unknown-linux-gnu 87 | - aarch64-unknown-linux-gnu 88 | 89 | steps: 90 | - uses: actions/checkout@v4 91 | 92 | - name: Cache Cargo artifacts 93 | uses: actions/cache@v4 94 | with: 95 | path: | 96 | ~/.cargo/registry 97 | ~/.cargo/git 98 | target 99 | key: ${{ runner.os }}-${{ matrix.target }}-${{ hashFiles('**/Cargo.lock') }} 100 | 101 | - name: Set up Rust 102 | uses: dtolnay/rust-toolchain@stable 103 | 104 | - name: Install cross 105 | uses: taiki-e/install-action@v2 106 | with: 107 | tool: cross 108 | 109 | - name: Configure cross 110 | run: | 111 | if [[ "${{ matrix.target }}" == "x86_64-unknown-linux-gnu" ]]; then 112 | cat > Cross.toml <<'EOF' 113 | [target.x86_64-unknown-linux-gnu] 114 | pre-build = [ 115 | "apt-get update && apt-get install -y pkg-config libssl-dev" 116 | ] 117 | EOF 118 | 119 | elif [[ "${{ matrix.target }}" == "aarch64-unknown-linux-gnu" ]]; then 120 | cat > Cross.toml <<'EOF' 121 | [target.aarch64-unknown-linux-gnu] 122 | pre-build = [ 123 | "dpkg --add-architecture arm64", 124 | "apt-get update && apt-get install -y pkg-config gcc-aarch64-linux-gnu", 125 | "apt-get install -y libssl-dev:arm64" 126 | ] 127 | 128 | [target.aarch64-unknown-linux-gnu.env] 129 | passthrough = [ 130 | "PKG_CONFIG_PATH=/usr/lib/aarch64-linux-gnu/pkgconfig", 131 | "PKG_CONFIG_ALLOW_CROSS=1" 132 | ] 133 | EOF 134 | fi 135 | 136 | - name: Build (${{ matrix.target }}) 137 | run: | 138 | for i in 1 2 3; do 139 | echo "Build attempt $i for ${{ matrix.target }}" 140 | if cross build --release --target ${{ matrix.target }}; then 141 | echo "Build succeeded on attempt $i" 142 | break 143 | else 144 | echo "Build failed on attempt $i" 145 | if [ $i -eq 3 ]; then 146 | echo "All build attempts failed" 147 | exit 1 148 | fi 149 | sleep 30 150 | fi 151 | done 152 | 153 | - name: Upload binary 154 | uses: svenstaro/upload-release-action@v2 155 | with: 156 | repo_token: ${{ secrets.GITHUB_TOKEN }} 157 | tag: ${{ github.ref_name }} 158 | file: target/${{ matrix.target }}/release/xs 159 | asset_name: xs-${{ github.ref_name }}-${{ matrix.target == 'x86_64-unknown-linux-gnu' && 'x86_64-linux-gnu' || 'aarch64-linux-gnu' }} 160 | overwrite: true 161 | 162 | ############################################################################### 163 | # 4 ── Native macOS (Apple Silicon) build ───────────────────────────────────── 164 | ############################################################################### 165 | build_macos: 166 | name: Build macOS (aarch64) 167 | needs: create_release 168 | runs-on: macos-latest 169 | 170 | steps: 171 | - uses: actions/checkout@v4 172 | 173 | - name: Cache Cargo artifacts 174 | uses: actions/cache@v4 175 | with: 176 | path: | 177 | ~/.cargo/registry 178 | ~/.cargo/git 179 | target 180 | key: macos-aarch64-${{ hashFiles('**/Cargo.lock') }} 181 | 182 | - name: Set up Rust 183 | uses: dtolnay/rust-toolchain@stable 184 | with: 185 | targets: aarch64-apple-darwin 186 | 187 | - name: Build (macOS) 188 | run: cargo build --release --target aarch64-apple-darwin 189 | 190 | - name: Upload binary 191 | uses: svenstaro/upload-release-action@v2 192 | with: 193 | repo_token: ${{ secrets.GITHUB_TOKEN }} 194 | tag: ${{ github.ref_name }} 195 | file: target/aarch64-apple-darwin/release/xs 196 | asset_name: xs-${{ github.ref_name }}-aarch64-apple-darwin 197 | overwrite: true -------------------------------------------------------------------------------- /.github/workflows/release-docker.yml: -------------------------------------------------------------------------------- 1 | name: Release Docker 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | tags: 8 | - v* # Match version tags like v1.0.0 9 | workflow_dispatch: 10 | 11 | env: 12 | CARGO_TERM_COLOR: always 13 | IMAGE_NAME: ghcr.io/${{ github.repository_owner }}/xs 14 | 15 | jobs: 16 | build: 17 | runs-on: ubuntu-latest 18 | 19 | steps: 20 | # Step 1: Checkout the repository 21 | - uses: actions/checkout@v4 22 | 23 | # Step 2: Set up Rust 24 | - name: Set up Rust 25 | uses: actions-rs/toolchain@v1 26 | with: 27 | profile: minimal 28 | toolchain: stable 29 | 30 | # Step 3: Build the binary in release mode 31 | - name: Build in release mode 32 | run: cargo build --release --verbose 33 | 34 | # Step 4: Install scru128-cli from GitHub 35 | - name: Install scru128-cli from GitHub 36 | run: | 37 | cargo install --git https://github.com/cablehead/scru128-cli --branch main 38 | mv ~/.cargo/bin/scru128 target/release/scru128 39 | 40 | # Step 5: Determine Docker image tag based on ref type 41 | - name: Determine image tag 42 | id: image_tag 43 | run: | 44 | echo "GITHUB_REF: ${{ github.ref }}" 45 | echo "GITHUB_REF_NAME: ${{ github.ref_name }}" 46 | if [[ "${{ github.ref }}" == refs/tags/* ]]; then 47 | echo "This is a tag push." 48 | echo "IMAGE_TAG=${{ github.ref_name }}" >> $GITHUB_ENV 49 | else 50 | echo "This is a branch push." 51 | echo "IMAGE_TAG=latest" >> $GITHUB_ENV 52 | fi 53 | 54 | # Step 6: Build Docker image 55 | - name: Build Docker image 56 | run: | 57 | docker build -t $IMAGE_NAME:$IMAGE_TAG -f .github/workflows/Dockerfile.release-docker . 58 | 59 | # Step 7: Log in to GitHub Container Registry 60 | - name: Log in to GitHub Container Registry 61 | run: echo "${{ secrets.GHCR_PAT }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin 62 | 63 | # Step 8: Push Docker image to GHCR 64 | - name: Push Docker image 65 | run: docker push $IMAGE_NAME:$IMAGE_TAG 66 | -------------------------------------------------------------------------------- /.github/workflows/tmate.yml: -------------------------------------------------------------------------------- 1 | name: tmate session 2 | 3 | on: 4 | workflow_dispatch: 5 | 6 | jobs: 7 | build: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - name: install dependencies (ubuntu only) 11 | run: | 12 | sudo apt-get update 13 | sudo apt-get install -y libwebkit2gtk-4.0-dev libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf 14 | 15 | - uses: actions/checkout@v4 16 | 17 | - name: Setup tmate session 18 | uses: mxschmitt/action-tmate@v3 19 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | store 3 | .aider* 4 | .env 5 | solid-ui 6 | alt 7 | admin 8 | -------------------------------------------------------------------------------- /AGENTS.md: -------------------------------------------------------------------------------- 1 | ## Testing 2 | 3 | Before pushing or creating a PR: 4 | - For Rust changes run `./scripts/check.sh` 5 | - For docs changes run `cd ./docs && npm run build` 6 | 7 | All changes must pass these checks. 8 | 9 | ## Commits 10 | 11 | Use **conventional commit messages** for all commits and PR titles (e.g. 12 | `feat(nu): add new parser`, `fix(engine): resolve job deadlock`). 13 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "cross-stream" 3 | edition = "2021" 4 | version = "0.4.3-dev" 5 | description = "An event stream store for personal, local-first use, specializing in event sourcing." 6 | license = "MIT" 7 | repository = "https://github.com/cablehead/xs" 8 | homepage = "https://github.com/cablehead/xs" 9 | documentation = "https://cablehead.github.io/xs/" 10 | readme = "README.md" 11 | keywords = ["nu", "cli", "queue", "embedded", "eventstore"] 12 | categories = ["command-line-utilities", "embedded", "database"] 13 | 14 | [lib] 15 | name = "xs" 16 | 17 | [[bin]] 18 | name = "xs" 19 | path = "src/main.rs" 20 | 21 | [dependencies] 22 | bytes = "1.6.0" 23 | cacache = { version = "13", default-features = false, features = ["tokio-runtime", "mmap"] } 24 | clap = { version = "4", features = ["derive"] } 25 | fjall = "2.4.4" 26 | futures = "0.3.31" 27 | scru128 = { version = "3", features = ["serde"] } 28 | serde = { version = "1", features = ["derive"] } 29 | serde_json = "1" 30 | serde_urlencoded = "0.7.1" 31 | ssri = "9.2.0" 32 | crossbeam-channel = "0.5.13" 33 | tracing = "0.1.40" 34 | console = "0.15.8" 35 | chrono = "0.4.38" 36 | bon = "2.3" 37 | tempfile = "3.10.1" 38 | 39 | nu-cli = "0.104.0" 40 | nu-command = "0.104.0" 41 | nu-protocol = "0.104.0" 42 | nu-cmd-lang = "0.104.0" 43 | nu-engine = "0.104.0" 44 | nu-parser = "0.104.0" 45 | 46 | http = "1.1.0" 47 | http-body-util = "0.1" 48 | http-serde = "2.1.1" 49 | hyper = { version = "1", features = ["full"] } 50 | hyper-util = { version = "0.1", features = ["full"] } 51 | tokio = { version = "1", features = ["full"] } 52 | tokio-stream = "0.1.15" 53 | tokio-util = { version = "0.7.11", features = ["compat"] } 54 | url = "2.5.0" 55 | rustls = "0.23.16" 56 | tokio-rustls = "0.26.0" 57 | webpki-roots = "0.26.6" 58 | dirs = "5" 59 | base64 = "0.22.1" 60 | tracing-subscriber = "0.3.19" 61 | 62 | [target.'cfg(unix)'.dependencies] 63 | nix = { version = "0.29", default-features = false, features = ["poll"] } 64 | 65 | [dev-dependencies] 66 | assert_cmd = "2.0.14" 67 | duct = "0.13.7" 68 | static_assertions = "1.1.0" 69 | tracing-subscriber = "0.3.18" 70 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Andy Gayton 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # xs (cross-stream) [![CI](https://github.com/cablehead/xs/actions/workflows/ci.yml/badge.svg)](https://github.com/cablehead/xs/actions/workflows/ci.yml) [![Discord](https://img.shields.io/discord/1182364431435436042?logo=discord)](https://discord.com/invite/YNbScHBHrh) 2 | 3 | Pixel art heroes cross proton streams, saving gritty, shadowy Toronto street beneath glowing CN Tower backdrop. 4 | 5 | --- 6 | 7 | > `xs` is a local-first event stream store for personal projects. 8 | Think of it like [`sqlite`](https://sqlite.org/cli.html) but specializing in the 9 | [event sourcing](https://martinfowler.com/eaaDev/EventSourcing.html) use case. 10 | 11 | See the [documentation](https://cablehead.github.io/xs/) for detailed 12 | installation instructions, tutorials and examples. 13 | 14 | ## Quick start 15 | 16 | ```sh 17 | # install 18 | cargo install cross-stream --locked 19 | # or: 20 | brew install cablehead/tap/cross-stream 21 | brew services start cablehead/tap/cross-stream # starts a store in ~/.local/share/cross.stream/store 22 | 23 | # optional Nushell helpers 24 | xs nu --install 25 | # then in Nushell 26 | use xs.nu * 27 | 28 | # start a server 29 | xs serve ./store 30 | 31 | # in another window 32 | echo "hello" | xs append ./store notes 33 | xs cat ./store 34 | 35 | # the xs.nu helpers fall back to ~/.local/share/cross.stream/store 36 | # to use a different location temporarily: 37 | with-env {XS_ADDR: "./store"} { .cat } 38 | ``` 39 | 40 | ## Features 41 | 42 | - Local-first append-only store 43 | - Content-addressable storage for large payloads 44 | - Real-time subscriptions to new events 45 | - Generators and handlers for background processing 46 | 47 | ## Connect 48 | 49 | Join our [Discord](https://discord.com/invite/YNbScHBHrh) to ask questions or share ideas. 50 | 51 | ## Built with 🙏💚 52 | 53 | - [fjall](https://github.com/fjall-rs/fjall): for indexing and metadata 54 | - [cacache](https://github.com/zkat/cacache-rs): for content (CAS) 55 | - [hyper](https://hyper.rs/guides/1/server/echo/): provides an HTTP/1.1 API over 56 | a local Unix domain socket for subscriptions, etc. 57 | - [Nushell](https://www.nushell.sh): for scripting and 58 | [interop](https://utopia.rosano.ca/interoperable-visions/) 59 | -------------------------------------------------------------------------------- /changes/0.0.9.md: -------------------------------------------------------------------------------- 1 | ## Highlights 2 | 3 | Builtin Nushell commands: 4 | 5 | - .cat: added, not follow features though 6 | - .head: added 7 | - .append: binary support 8 | - embedded Nushell version is now: 0.99.1 9 | 10 | CLI / xs.nu: 11 | 12 | - `xs pipe`: added 13 | - `xs get`: added 14 | - `xs head`: added 15 | 16 | Performance / Fixes: 17 | 18 | - @marvin-j97 added a secondary index for topic to better support `.head ` operations ✨ 19 | - replaying frames for new subscribers: Better, Faster, Stronger 20 | 21 | ## Raw commit messages: 22 | 23 | - chore: bump Nushell dependency to 0.99.1 24 | - fix(nu/commands/append): add support for ByteStream inputs 25 | - feat(nu): add .cat command to read event stream with limit and last-id support 26 | - feat(cli): expose `xs pipe` cli command 27 | - feat(nu): add .head command to get most recent frame for topic 28 | - feat: topic secondary index (#19) 29 | - fix: replaying history to new subscribers (#18) 30 | - feat(cli): add `xs get` command to get frame by id 31 | - feat(cli): add `xs head` command to get head frame for topic 32 | - feat(http): detect http client disconnect & emit disconnect events for streaming responses 33 | - fix: prevent stateless handlers from processing their own frames 34 | -------------------------------------------------------------------------------- /changes/0.1.0.md: -------------------------------------------------------------------------------- 1 | ## Highlights 2 | 3 | ### A Major Rework of Handlers 4 | 5 | https://github.com/cablehead/xs/wiki/Handlers 6 | 7 | - Prior to this release, handlers had virtually no error handling. Now, when an error occurs, handlers will unregister themselves and record the error in the unregister frame's metadata. 8 | - The handlers closure shape is now `{|frame, state| ... }`, meaning the frame is passed as an argument instead of being supplied as `$in`. 9 | - Treatment of return values has been normalized: there is no longer any difference between handler return values for stateful and stateless handlers. By default, non-empty return values are appended as `.out` (previously it was just ``). 10 | - You can configure the return value suffix and TTL using the return_options in the metadata of the handler registration frame. 11 | - `.append` calls from a handler are now batched and processed together *after* the handler successfully completes. They aren't yet emitted in a fjall atomic batch but will be in the future. handler_id, frame_id, and state_id (if there is state) are now automatically included in the metadata of these appended frames. 12 | - Stateful handlers now communicate state updates by appending a `.state` frame. 13 | - A handy trick is to set the return value suffix to `.state`: then your return value will become the next state value for the subsequent handler call. 14 | 15 | ### Miscellaneous 16 | 17 | - You can now import from one store into another: https://github.com/cablehead/xs/wiki/Export-and-Import 18 | - The CLI client can now access stores behind HTTPS and BasicAuth 19 | - Two new TTL options for frames: 20 | - `time:`: the frame will exist for the specified number of milliseconds 21 | - `head:`: keeps the most recent `count` frames for a given topic; among other uses, this is useful for using `xs` as a KV store 22 | 23 | ## Breaking Changes 24 | 25 | - See handlers notes above 26 | - fjall dependency has been upgraded from 1.5 to 2.4 27 | - $env.XSPWD has been renamed to $env.XS_ADDR 28 | - `xs pipe` has been renamed to `xs process` -- .pipe -> .process 29 | 30 | 31 | ## Raw commit messages: 32 | 33 | - 4d983f3 docs: add --locked to the cargo install instructions 🙏 @fdncred 34 | - 2d840a8 rename $env.XSPWD to $env.XS_ADDR 35 | - e02edab feat: add a version endpoint; bump version in toml to 0.1.0 36 | - 96c482b feat: add conveniences to dump and load a store 37 | - a190163 feat: add frame import endpoint for backup/restore operations 38 | - 6b5b51f feat: add xs cas-post: which inserts content into the CAS without needing to insert a corresponding frame 39 | - ca9b5bf s/postfix/suffix/g -- 🙏 @marvin-j97 40 | - 1fa2678 feat: lots and lots of work on handlers (#28) 41 | - 0c9e689 tuning: use less (lsm) threads (#27) 42 | - ecf1e2e fix: update the example handlers to take frame as an argument 43 | - 5ab960b fix(handlers): ignore unregistered handlers on startup 44 | - 5f07a3a feat(handlers): validate closure shape and emit .unregister on registration error 45 | - f4f01f3 refactor: rename HandlerMeta to Meta, reuse Handler for /process endpoint 46 | - c5a0390 refactor: rename pipe to process 47 | - ee6193c refactor(handlers): pass frame/state as args instead of pipeline, encapsulate eval 48 | - ea648e9 refactor(handlers): split handlers.rs into module with handler and serve 49 | - 5907db4 fix: only include expose in xs.start meta when expose option is set 50 | - abe717f fix: pull TTL into its own module: bring nu/commands/append_command.rs up to date with TTL changes (#26) 51 | - cf30c8d feat(store): add Head TTL to retain latest N frames per topic (#24) 52 | - cbf70a8 fix: topic index encoding (#25) 53 | - 02938f6 feat: support unregister for handlers 54 | - d1106ae fix: avoid sending any events generated as a response to a handler, back to that handler 55 | - 71b3c83 fix: 's/eval_block/eval_block_with_early_return/g' 56 | - f7d367b feat: flesh out support for TTL::Time(Duration) 57 | - 5d1f2d0 feat: bump fjall dependency from 1.5 to 2.4 🎉 58 | - 9af2792 fix: .pipe now uses eval_block_with_early_return 59 | - 82edbf3 fix: iterate on error handling / messages when using the cli to call the API 60 | - 674181d image: add deno2 61 | - aa02716 example(discord-bot): add README note for slash command handler 62 | - ed4a5ee example(discord-bot): add an example /dice slash command handler 63 | - 48776da example(discord-bot): bump API version to v10 64 | - 4e9b2c7 Update README.md 65 | - dd00b68 Update README.md 66 | - ff3a931 example(discord-bot): read access token from the stream 67 | - ceace87 chore: iterate on the packaged container image 68 | - 6d2d558 chore: iterate on the packaged container image 69 | - 6b74142 feat: client support for stores behind https and basicauth (#22) 70 | - 76e37d1 release: allow --expose to be enabled in the container image 71 | - 84921cf release: allow --expose to be enabled in the container image 72 | - f170252 fix(example/solid-ui): actually push up store/cas.ts 73 | -------------------------------------------------------------------------------- /changes/0.2.0-docs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cablehead/xs/ea20566eddbbe76f9c4a6f1776b308b6db47d798/changes/0.2.0-docs.png -------------------------------------------------------------------------------- /changes/0.2.0-logging.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cablehead/xs/ea20566eddbbe76f9c4a6f1776b308b6db47d798/changes/0.2.0-logging.png -------------------------------------------------------------------------------- /changes/0.2.0.md: -------------------------------------------------------------------------------- 1 | ## Highlights 2 | 3 | - `.get` is now available as an internal `nu` command (for handlers) 4 | - We have a new docs site, powered by [astro starlight](https://starlight.astro.build) ✨ 5 | 6 | ![new docs site](./0.2.0-docs.png) 7 | 8 | ### Handlers 9 | 10 | - `env` is now preserved from your handler configuration script and between handler calls. This makes it convenient to set variables, e.g. access tokens, from your configuration script and to preserve state between handler calls. 11 | - Your configuration script can now specify [modules](https://cablehead.github.io/xs/reference/handlers/#configuration-record-fields) to load for your handler. 12 | 13 | ### Miscellaneous 14 | 15 | - Nushell to v0.101.0 🚀 16 | - Fjall to 2.4.4 🚀 17 | - Improved tracing / logging 18 | 19 | ![logging](./0.2.0-logging.png) 20 | 21 | ## Breaking Changes 22 | 23 | - There is no longer the concept of "stateful" handers, which take a second argument for state. Since `env` is preserved between handler calls, you can use that to store state. 24 | - All configuration for handlers has been moved from the frame metadata to a configuration script. 25 | - The handler configuration script now returns a table of configuration options instead of the closure. 26 | 27 | ## Raw commit messages: 28 | 29 | - feat: add .get as in internal nu command 30 | - docs: add reference/generators 31 | - docs: nu-fmt 32 | - docs: port reference/handlers over from the wiki 33 | - docs: typo 34 | - docs: getting-started/first-stream 35 | - docs: tweaks to installation and architecture 36 | - docs: add an architecture overview 37 | - chore: update next release to 0.2.0: we're skipping the path 0.1.1 took 38 | - chore: update to Nushell v0.101.0 39 | - chore: bump fjall dependency to 2.4.4 40 | - docs: getting started / installation 41 | - feat: move handler config out of frame meta to a configuration script (#37) 42 | - fix: restore the .rm to .remove alias 43 | - feat(nu): replace async/await with sync operations in command implementations 44 | - refactor: move AppendCommand buffer to nu/commands 45 | - feat: add sync versions of the Store.cas_* methods 46 | - feat: add a sync version of Store.read: that only retrieves historic frames 47 | - feat: move cleaning up expired frames to a background thread 48 | - feat: drop async from store.append -- it's not needed 49 | - refactor: store into its own module, including ttl 50 | - refactor: move topic_index_key from Store impl to standalone function 51 | - refactor: Store::new doesn't require async 52 | - refactor: move compaction from ReadOptions to inline tasks.rs, match handler pattern 53 | - refactor: consolidate thread pool and span handling 54 | - example(discord): update to make use of modules and with_env 55 | - feat: add with_env variables option to handler registration 56 | - feat: rename use_modules to modules 57 | - feat: add the ability to load modules to handlers 58 | - feat(deps): switch from async-std to tokio runtime for cacache 59 | - chore: .gitignore 60 | - fix: backoff for logging long outstanding spans 61 | - feat: fleshing out tracing (#35) 62 | - fix: updated handlers now start, on restart (#34) 63 | - example(discord): reduce heartbeat ttl to 5mins 64 | -------------------------------------------------------------------------------- /changes/0.4.0.md: -------------------------------------------------------------------------------- 1 | ## Highlights 2 | 3 | - Reworked [Generators](https://cablehead.github.io/xs/reference/generators/): 4 | improved lifecycle (hot reload, terminate, restart) 5 | - Introduced [Commands](https://cablehead.github.io/xs/reference/commands/): 6 | commands are stateless, on-demand operations with `.define` 7 | scripts; trigger with `.call`; output collected in 8 | `.response` 9 | - Added [Contexts](https://cablehead.github.io/xs/reference/contexts/): isolate 10 | data and definitions; operations default to the current context; use `-c` flag 11 | for cross-context access; manage contexts with `.ctx` commands. 12 | 13 | ### Miscellaneous 14 | 15 | - Upgraded Nushell Engine to v0.104.0. 16 | - Standardized scripting for handlers, commands, and generators using `run` 17 | closures run within Nushell Jobs. 18 | - `xs-meta` header now uses Base64 encoding for reliable Unicode support. 19 | - CAS now supports binary data. 20 | - `xs head` now has a --follow flag to watch for updates 21 | - Added helper commands to install the `xs.nu` Nushell module 22 | (`xs nu --install/--clean`). 23 | - Build improvements (MUSL binary, Docker tags). 24 | 25 | ## Breaking Changes 26 | 27 | - Removed the separate HTTP API listener (`xs serve --http`). 28 | - Data and definitions are now partitioned by contexts; operations default to 29 | the current context, requiring updates for scripts assuming a single global 30 | namespace. 31 | - Generators are now configured with `.spawn` scripts returning a configuration 32 | record with a `run` closure 33 | - Handler scripts now require a `run` closure (instead of `process`) within the 34 | configuration record. 35 | - The `xs-meta` header value must now be Base64-encoded JSON for Unicode 36 | support. 37 | - The internal topic index ordering has changed. 38 | 39 | ## Raw commit messages: 40 | 41 | - fix(commands): use into_value for responses (#93) 42 | - fix(generator): skip restart after terminate (#92) 43 | - fix(docs): Use raw strings for config records in examples and docs 44 | - docs(reference): update ephemeral TTL and generator output (#91) 45 | - feat(generator): stream byte chunks as events (#90) 46 | - feat(generator): kill job by spawn id (#89) 47 | - feat(commands): emit single response frame (#88) 48 | - fix(docs): Update command syntax in getting started example 49 | - docs: clarify store defaults (#86) 50 | - docs(getting-started): explain xs.nu setup (#85) 51 | - docs(generator): document spawn error stop (#84) 52 | - feat(generator): handle hot reload and docs (#83) 53 | - fix(generator): termination for duplex generators (#82) 54 | - docs(cli): use store directory in examples (#81) 55 | - docs(cli): expand command reference (#80) 56 | - fix(nu): dedup clean targets and improve abort (#79) 57 | - feat(nu): confirm install and clean once (#78) 58 | - feat(nu): add embedded xs.nu management (#77) 59 | - feat(generator): emit stop on spawn failure (#75) 60 | - feat(generators): restart pipeline on natural stop 61 | - docs(commands): clarify context and built-in helpers (#72) 62 | - docs(reference): clarify handler registration events (#73) 63 | - fix(generator): prioritize terminate events (#70) 64 | - docs(examples): update generator config format (#69) 65 | - docs(generators): Improve documentation for generator lifecycle and stopping 66 | (#68) 67 | - feat(generators): Extract generator loop and add termination (#67) 68 | - fix(docs): Correct Link component import path 69 | - docs(agent): Update testing instructions 70 | - docs(tutorials): expand threaded conversations (#65) 71 | - docs(reference): describe generators and handlers (#63) 72 | - docs(readme): add quick start and feature highlights (#62) 73 | - feat(generators): parse config with shared options (#61) 74 | - docs: Add testing and commit guidelines 75 | - feat(xs-addr): improve config file handling 76 | - feat(nu): Generalize script parsing and engine execution (#60) 77 | - feat(engine): Execute closures in jobs, enabling job control (#59) 78 | - fix: isolate handler definitions by context (#58) 79 | - fix: isolate command definitions by context 80 | - fix: Isolate generators by context_id 81 | - feat: upgrade to nushell 0.104.0 82 | - fix: update .ctx switch to return the id of the new context 83 | - feat: add --detail flag to .ctx command 84 | - feat: .ctx now show's the name of the current context, if one is set 85 | - fix: remove unreachable code path in generator spawn event handling 86 | - feat(config): xs-addr will now default to ~/.config/cross.stream/XS_ADDR if 87 | present 88 | - feat: upgrade to nushell 0.103.0 (#57) 89 | - fix: example/x-macos-pasteboard (#56) 90 | - fix: add extra validation for corrupt SSRI hashses 91 | - chore: add a github workflow to build a musl binary 92 | - feat: add modules support to commands, refactor config parsing 93 | - fix: [breaking] correct lexicographical ordering between topics for the topics 94 | index (#55) 95 | - feat: proactively detect when stdout goes away on Unix / fixes for streaming 96 | command output (#53) 97 | - fix(store): scope context frame iteration correctly when using last_id 98 | - feat(contexts): add new context features and metadata support (#51) 99 | - fix: base64 encode meta data in order to support unicode 100 | - feat: improve CAS commands to handle both UTF-8 and binary data 101 | - fix: and-then when --else isn't supplied 102 | - feat: update .import to be able to load pre-context stores into 0.3-dev 103 | - feat: Enhance AppendCommand with context and meta improvements 104 | - feat: add the concept of contexts (#46) 105 | - chore: remove builtin HTTP server (#47) 106 | - fix: return single json record from nu head command without --follow 107 | - feat: add a --follow option for the head command (#45) 108 | - examples: add a todo app (#43) 109 | - fix: surface parse errors when registering handlers 110 | - docs: bring over the reference for the http server 111 | - feat: add "commands": a mechanism for one-shot, streaming, parallelized 112 | operations (#42) 113 | - docs: add a reference for the HTTP API 114 | - feat/docs: document import / export, rename .load / .dump to .import / .export 115 | - docs: fix wss://echo.websocket.org example 116 | - feat: direct CAS reads for local stores 117 | - docs: add concepts, event store / generators / handlers 118 | - feat: render full shell error message when there's an error registering a 119 | handler 120 | - tests(handlers): assert custom commands defined in a handler config script can 121 | modify the environment 122 | - fix: engine.add_module now preserves export-env 123 | - docs: skeleton for a new tutorial 124 | -------------------------------------------------------------------------------- /changes/0.4.2.md: -------------------------------------------------------------------------------- 1 | fix(nu-module): update xs module import to use xs.nu 2 | fix(pipe-handling): Improve async pipe close detection cross-platform 3 | fix(context): fix .ctx list to use active context ID 4 | chore(docker): bump nushell version to 0.104. 5 | docs: add brew service start step (#94) 6 | docs: README 7 | -------------------------------------------------------------------------------- /docs/.gitignore: -------------------------------------------------------------------------------- 1 | # build output 2 | dist/ 3 | # generated types 4 | .astro/ 5 | 6 | # dependencies 7 | node_modules/ 8 | 9 | # logs 10 | npm-debug.log* 11 | yarn-debug.log* 12 | yarn-error.log* 13 | pnpm-debug.log* 14 | 15 | 16 | # environment variables 17 | .env 18 | .env.production 19 | 20 | # macOS-specific files 21 | .DS_Store 22 | public/beoe -------------------------------------------------------------------------------- /docs/.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": ["astro-build.astro-vscode"], 3 | "unwantedRecommendations": [] 4 | } 5 | -------------------------------------------------------------------------------- /docs/.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2.0", 3 | "configurations": [ 4 | { 5 | "command": "./node_modules/.bin/astro dev", 6 | "name": "Development server", 7 | "request": "launch", 8 | "type": "node-terminal" 9 | } 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | # Starlight Starter Kit: Basics 2 | 3 | [![Built with Starlight](https://astro.badg.es/v2/built-with-starlight/tiny.svg)](https://starlight.astro.build) 4 | 5 | ``` 6 | npm create astro@latest -- --template starlight 7 | ``` 8 | 9 | [![Open in StackBlitz](https://developer.stackblitz.com/img/open_in_stackblitz.svg)](https://stackblitz.com/github/withastro/starlight/tree/main/examples/basics) 10 | [![Open with CodeSandbox](https://assets.codesandbox.io/github/button-edit-lime.svg)](https://codesandbox.io/p/sandbox/github/withastro/starlight/tree/main/examples/basics) 11 | [![Deploy to Netlify](https://www.netlify.com/img/deploy/button.svg)](https://app.netlify.com/start/deploy?repository=https://github.com/withastro/starlight&create_from_path=examples/basics) 12 | [![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fwithastro%2Fstarlight%2Ftree%2Fmain%2Fexamples%2Fbasics&project-name=my-starlight-docs&repository-name=my-starlight-docs) 13 | 14 | > 🧑‍🚀 **Seasoned astronaut?** Delete this file. Have fun! 15 | 16 | ## 🚀 Project Structure 17 | 18 | Inside of your Astro + Starlight project, you'll see the following folders and files: 19 | 20 | ``` 21 | . 22 | ├── public/ 23 | ├── src/ 24 | │ ├── assets/ 25 | │ ├── content/ 26 | │ │ ├── docs/ 27 | │ └── content.config.ts 28 | ├── astro.config.mjs 29 | ├── package.json 30 | └── tsconfig.json 31 | ``` 32 | 33 | Starlight looks for `.md` or `.mdx` files in the `src/content/docs/` directory. Each file is exposed as a route based on its file name. 34 | 35 | Images can be added to `src/assets/` and embedded in Markdown with a relative link. 36 | 37 | Static assets, like favicons, can be placed in the `public/` directory. 38 | 39 | ## 🧞 Commands 40 | 41 | All commands are run from the root of the project, from a terminal: 42 | 43 | | Command | Action | 44 | | :------------------------ | :----------------------------------------------- | 45 | | `npm install` | Installs dependencies | 46 | | `npm run dev` | Starts local dev server at `localhost:4321` | 47 | | `npm run build` | Build your production site to `./dist/` | 48 | | `npm run preview` | Preview your build locally, before deploying | 49 | | `npm run astro ...` | Run CLI commands like `astro add`, `astro check` | 50 | | `npm run astro -- --help` | Get help using the Astro CLI | 51 | 52 | ## 👀 Want to learn more? 53 | 54 | Check out [Starlight’s docs](https://starlight.astro.build/), read [the Astro documentation](https://docs.astro.build), or jump into the [Astro Discord server](https://astro.build/chat). 55 | -------------------------------------------------------------------------------- /docs/astro.config.mjs: -------------------------------------------------------------------------------- 1 | // @ts-check 2 | import { defineConfig } from "astro/config"; 3 | 4 | import starlight from "@astrojs/starlight"; 5 | 6 | import { pluginCodeOutput } from "./src/utils/custom-code-output-plugin"; 7 | import { rehypeMermaid } from "@beoe/rehype-mermaid"; 8 | import solid from "@astrojs/solid-js"; 9 | 10 | // https://astro.build/config 11 | export default defineConfig({ 12 | base: "/xs/", 13 | 14 | integrations: [ 15 | solid(), 16 | 17 | starlight({ 18 | title: "xs", 19 | 20 | customCss: [ 21 | "./src/css/custom.css", 22 | ], 23 | 24 | social: { 25 | github: "https://github.com/cablehead/xs", 26 | discord: "https://discord.com/invite/YNbScHBHrh", 27 | }, 28 | 29 | sidebar: [ 30 | { 31 | label: "Getting Started", 32 | autogenerate: { directory: "getting-started" }, 33 | }, 34 | 35 | { 36 | label: "Tutorials", 37 | autogenerate: { directory: "tutorials" }, 38 | }, 39 | 40 | { 41 | label: "Reference", 42 | autogenerate: { directory: "reference" }, 43 | }, 44 | ], 45 | 46 | expressiveCode: { 47 | styleOverrides: { borderRadius: "0.25rem" }, 48 | themes: ["dracula", "rose-pine-dawn"], 49 | plugins: [pluginCodeOutput()], 50 | }, 51 | 52 | lastUpdated: true, 53 | credits: true, 54 | }), 55 | ], 56 | 57 | markdown: { 58 | rehypePlugins: [ 59 | [ 60 | rehypeMermaid, 61 | { 62 | strategy: "file", // alternatively use "data-url" 63 | fsPath: "public/beoe", // add this to gitignore 64 | webPath: "/xs/beoe", 65 | darkScheme: "class", 66 | }, 67 | ], 68 | ], 69 | }, 70 | }); 71 | -------------------------------------------------------------------------------- /docs/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "docs", 3 | "type": "module", 4 | "version": "0.0.1", 5 | "scripts": { 6 | "dev": "astro dev", 7 | "start": "astro dev", 8 | "build": "astro build", 9 | "preview": "astro preview", 10 | "astro": "astro", 11 | "postinstall": "playwright install chromium" 12 | }, 13 | "dependencies": { 14 | "@astrojs/solid-js": "^5.0.1", 15 | "@astrojs/starlight": "^0.30.3", 16 | "@beoe/rehype-mermaid": "^0.3.0", 17 | "@fujocoded/expressive-code-output": "^0.0.1", 18 | "astro": "^5.0.2", 19 | "playwright": "^1.49.1", 20 | "sharp": "^0.32.5" 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /docs/public/favicon.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/src/assets/01-bash.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cablehead/xs/ea20566eddbbe76f9c4a6f1776b308b6db47d798/docs/src/assets/01-bash.png -------------------------------------------------------------------------------- /docs/src/assets/01-nu.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cablehead/xs/ea20566eddbbe76f9c4a6f1776b308b6db47d798/docs/src/assets/01-nu.png -------------------------------------------------------------------------------- /docs/src/assets/02-bash.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cablehead/xs/ea20566eddbbe76f9c4a6f1776b308b6db47d798/docs/src/assets/02-bash.png -------------------------------------------------------------------------------- /docs/src/assets/02-nu.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cablehead/xs/ea20566eddbbe76f9c4a6f1776b308b6db47d798/docs/src/assets/02-nu.png -------------------------------------------------------------------------------- /docs/src/content.config.ts: -------------------------------------------------------------------------------- 1 | import { defineCollection } from 'astro:content'; 2 | import { docsLoader } from '@astrojs/starlight/loaders'; 3 | import { docsSchema } from '@astrojs/starlight/schema'; 4 | 5 | export const collections = { 6 | docs: defineCollection({ loader: docsLoader(), schema: docsSchema() }), 7 | }; 8 | -------------------------------------------------------------------------------- /docs/src/content/docs/getting-started/concepts.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: Core Concepts 3 | description: Understanding the fundamental concepts behind cross.stream's architecture and operation 4 | sidebar: 5 | order: 3 6 | --- 7 | 8 | cross.stream is built around four fundamental concepts that work together to 9 | create a powerful and flexible event streaming system: the event store, 10 | generators, handlers, and commands. Let's explore how these components interact 11 | and complement each other. 12 | 13 | ## Event Store: The Foundation 14 | 15 | At its core, cross.stream is an event store - a specialized database that 16 | maintains an append-only log of events. Think of it as a ledger where each entry 17 | (called a frame) represents something that happened, complete with metadata 18 | about when and how it occurred. 19 | 20 | The event store provides a few key capabilities: 21 | 22 | - **Append-only writes**: New events can only be added to the end of the stream, 23 | preserving the historical record 24 | - **Content separation**: Event metadata is stored separately from content, 25 | optimizing for both quick scanning and efficient content storage 26 | - **Real-time subscriptions**: Clients can follow the stream live, receiving new 27 | events as they occur 28 | 29 | This design makes the event store particularly good at maintaining an accurate 30 | history of what happened and when, while still being efficient to query and 31 | process. 32 | 33 | ## Processing Components 34 | 35 | ### Generators: The Producers 36 | 37 | Generators are like automated watchers that produce new events into the stream. 38 | They run as background processes, monitoring for specific conditions or changes 39 | and emitting corresponding events when they occur. 40 | 41 | For example, a generator might: 42 | 43 | - Watch a log file and emit new lines as events 44 | - Monitor a websocket connection and turn incoming messages into events 45 | - Periodically check a system's status and emit health events 46 | 47 | ### Handlers: The Reactors 48 | 49 | Handlers provide a way to react to and process events in the stream. They are 50 | like event-driven functions that wake up when new events arrive, process them 51 | according to rules you define, and optionally produce new events in response. 52 | 53 | A handler might: 54 | 55 | - Transform events into new formats 56 | - Trigger external actions in response to events 57 | - Aggregate or analyze event data 58 | - Create chains of event processing 59 | 60 | ### Commands: The On-demand Processors 61 | 62 | Commands are reusable operations that can be called on-demand with input data. 63 | Unlike generators which run continuously, or handlers which maintain state, 64 | commands are stateless and execute independently each time they are called. 65 | 66 | A command might: 67 | 68 | - Make an HTTP request and stream back SSE responses 69 | - Transform input data in a complex way 70 | - Interact with external services 71 | 72 | ## Component Comparison 73 | 74 | | Aspect | Generators | Handlers | Commands | 75 | | ---------------- | ------------------------------------ | ----------------------------- | ---------------------------- | 76 | | Purpose | Produce events from external sources | Process existing events | Perform on-demand operations | 77 | | Execution | Continuous background process | Event-driven | Called on-demand | 78 | | State | Stateless | Maintains state between calls | Stateless | 79 | | Output | Immediate streaming | Buffered until completion | Immediate streaming | 80 | | Error Handling | Auto-restarts | Unregisters | Per-invocation | 81 | | Typical Use Case | Watch external sources | Transform/react to events | Reusable operations | 82 | 83 | ## Incremental Adoption 84 | 85 | One of the strengths of cross.stream's design is that you can start simple and 86 | gradually add complexity as needed: 87 | 88 | 1. **Start with the Event Store** 89 | - Begin by just using the store to record and query events 90 | - Get comfortable with the basic append/read operations 91 | - Use it like a specialized database 92 | 93 | 2. **Add Generators** 94 | - When you need to automatically capture events from external sources 95 | - Start with simple file watchers or API monitors 96 | - Let generators feed your event stream 97 | 98 | 3. **Introduce Handlers** 99 | - As you need to process or react to events 100 | - Start with simple transformations 101 | - Build up to more complex event processing chains 102 | 103 | 4. **Define Commands** 104 | - When you need reusable, on-demand operations 105 | - Encapsulate common operations 106 | - Use for streaming interactions with external services 107 | 108 | ## Working Together 109 | 110 | These components create a flexible architecture where: 111 | 112 | 1. **Generators** feed events into the system from external sources 113 | 2. **Handlers** process those events, maintaining state if needed 114 | 3. **Commands** provide reusable operations that can be called on-demand 115 | 4. The **event store** ensures everything is reliably recorded and retrievable 116 | 117 | For example, you might have: 118 | 119 | - A generator watching system metrics 120 | - A handler that processes those metrics and detects anomalies 121 | - A command that can be called to fetch additional data when an anomaly is 122 | detected 123 | - The event store maintaining the complete history of metrics and analysis 124 | -------------------------------------------------------------------------------- /docs/src/content/docs/getting-started/first-stream.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: Your First Stream 3 | description: Create and interact with your first event stream 4 | sidebar: 5 | order: 2 6 | --- 7 | 8 | import { Aside, Tabs, TabItem } from '@astrojs/starlight/components'; 9 | 10 | import { Link } from '../../../utils/links'; 11 | 12 | Let's create your first event stream. 13 | 14 | 19 | 20 | ## Serve 21 | 22 | Unlike `sqlite`, which operates directly on the file system, `xs` requires a 23 | running process to manage access to the local store. This enables features like 24 | subscribing to real-time updates from the event stream. 25 | 26 | Start an `xs` store in a dedicated window: 27 | 28 | ```bash withOutput 29 | > xs serve ./store 30 | 13:35:16.868 TRACE event src/main.rs:185 Starting server with path: "./store" xs:185 31 | 13:35:16.957 INFO read options=ReadOptions { follow: On, tail: false, last_id: None, limit: None } xs::store:174 32 | 13:35:16.957 INFO read options=ReadOptions { follow: On, tail: true, last_id: None, limit: None } xs::store:174 33 | 13:35:16.963 INFO 5ms insert_frame frame=Frame { id: "03d4por2p16i05i81fjy0fx8u", topic: "xs.start", hash: None, meta: None, ttl: None } xs::store:410 34 | 13:35:16.963 0fx8u xs.start 35 | 13:35:16.968 INFO read options=ReadOptions { follow: On, tail: false, last_id: None, limit: None } xs::store:174 36 | ``` 37 | 38 | For a long-running setup you might run `xs serve ~/.local/share/cross.stream/store` 39 | under a process supervisor. This is also the fallback location used by 40 | `xs.nu` when `$env.XS_ADDR` isn't set. Here we keep the demo scoped to `./store`. 41 | 42 | To point tools at another store, set `XS_ADDR`. This can be done temporarily with `with-env`: 43 | 44 | ```bash 45 | with-env {XS_ADDR: "./store"} { .cat } 46 | ``` 47 | 48 | ## Client 49 | 50 | ### `append` command 51 | 52 | OK! Let's append our first event: 53 | 54 | 55 | 56 | 57 | 58 | ```nushell withOutput 59 | > "a quick note" | .append notes 60 | ───────┬───────────────────────────────────────────────────── 61 | topic │ notes 62 | id │ 03d4q1qhbiv09ovtuhokw5yxv 63 | hash │ sha256-wIcRiyKpOjA1Z8O+wZvoiMXYgGEzPQOhlA8AOptOhBY= 64 | meta │ 65 | ttl │ forever 66 | ───────┴───────────────────────────────────────────────────── 67 | ``` 68 | 69 | 70 | 71 | 72 | 73 | ```bash withOutput 74 | > echo "a quick note" | xs append ./store notes 75 | {"topic":"notes","id":"03d4qic9vqkve1krajjtlbavd","hash":"sha256-24yYvzQ4Zd3Go/WevV9ol+KzkdTgQvlyNN2NVSGMjFE=","meta":null,"ttl":"forever"} 76 | ``` 77 | 78 | 79 | 80 | 81 | 82 | ### `cat` and `cas` commands 83 | 84 | and then `cat` the stream: 85 | 86 | 87 | 88 | 89 | 90 | ```nushell withOutput 91 | > .cat 92 | ─#─┬──topic───┬────────────id─────────────┬────────────────────────hash─────────────────────────┬─meta─┬───ttl─── 93 | 0 │ xs.start │ 03d4q1o70y6ek0ig8hwy9q00n │ │ │ 94 | 1 │ notes │ 03d4q1qhbiv09ovtuhokw5yxv │ sha256-wIcRiyKpOjA1Z8O+wZvoiMXYgGEzPQOhlA8AOptOhBY= │ │ forever 95 | ───┴──────────┴───────────────────────────┴─────────────────────────────────────────────────────┴──────┴───────── 96 | ``` 97 | 98 | These are the raw `frames` on the stream. The actually content is stored 99 | separately in the `Content-Addressable Storage` (CAS). You can read more about 100 | that [here](../../reference/architecture/). 101 | 102 | We have the full expressiveness of Nushell available to us—for example, we can 103 | get the content hash of the last frame on the stream using: 104 | 105 | ```nushell withOutput 106 | > .cat | last | $in.hash 107 | sha256-wIcRiyKpOjA1Z8O+wZvoiMXYgGEzPQOhlA8AOptOhBY= 108 | ``` 109 | 110 | and then use the `.cas` command to retrieve the content: 111 | 112 | ```nushell withOutput 113 | > .cat | last | .cas $in.hash 114 | a quick note 115 | ``` 116 | 117 | We can also retrieve the content for a frame by piping it in its entirety 118 | directly to `.cas`: 119 | 120 | ```nushell withOutput 121 | > .cat | last | .cas 122 | a quick note 123 | ``` 124 | 125 | 126 | 127 | 128 | 129 | ```bash withOutput 130 | > xs cat ./store 131 | {"topic":"xs.start","id":"03d4qiab9g5vagrlrvxa2vjw0","hash":null,"meta":null,"ttl":null} 132 | {"topic":"notes","id":"03d4qic9vqkve1krajjtlbavd","hash":"sha256-24yYvzQ4Zd3Go/WevV9ol+KzkdTgQvlyNN2NVSGMjFE=","meta":null,"ttl":"forever"} 133 | ``` 134 | 135 | These are the raw `frames` on the stream. The actually content is stored 136 | separately in the `Content-Addressable Storage` (CAS). You can read more about 137 | that [here](../../reference/architecture/). 138 | 139 | We can retrieve the content for a frame using its content hash: 140 | 141 | ```bash withOutput 142 | > xs cat ./store | tail -n1 | jq -r .hash 143 | sha256-24yYvzQ4Zd3Go/WevV9ol+KzkdTgQvlyNN2NVSGMjFE= 144 | ``` 145 | 146 | ```bash withOutput 147 | > xs cas ./store sha256-24yYvzQ4Zd3Go/WevV9ol+KzkdTgQvlyNN2NVSGMjFE= 148 | a quick note 149 | ``` 150 | 151 | 152 | 153 | 154 | 155 | ### `head` command 156 | 157 | Let's submit another note: 158 | 159 | 160 | 161 | 162 | 163 | ```nushell withOutput 164 | > "submit TPS report" | .append notes 165 | > .cat 166 | ─#─┬──topic───┬────────────id─────────────┬────────────────────────hash─────────────────────────┬─meta─┬───ttl─── 167 | 0 │ xs.start │ 03d4q1o70y6ek0ig8hwy9q00n │ │ │ 168 | 1 │ notes │ 03d4q1qhbiv09ovtuhokw5yxv │ sha256-wIcRiyKpOjA1Z8O+wZvoiMXYgGEzPQOhlA8AOptOhBY= │ │ forever 169 | 2 │ notes │ 03d4qbrxizqgav09m7hicksb0 │ sha256-KDyb7pypM+8aLiq5obfpCqbMmb6LvvPnCu2+y9eWd0c= │ │ forever 170 | ───┴──────────┴───────────────────────────┴─────────────────────────────────────────────────────┴──────┴───────── 171 | ``` 172 | 173 | We can get the most recent note on the stream using the `.head` command: 174 | 175 | ```nushell withOutput 176 | > .head notes 177 | ───────┬───────────────────────────────────────────────────── 178 | topic │ notes 179 | id │ 03d4qbrxizqgav09m7hicksb0 180 | hash │ sha256-KDyb7pypM+8aLiq5obfpCqbMmb6LvvPnCu2+y9eWd0c= 181 | meta │ 182 | ttl │ forever 183 | ───────┴───────────────────────────────────────────────────── 184 | ``` 185 | 186 | ```nushell withOutput 187 | > .head notes | .cas 188 | submit TPS report 189 | ``` 190 | 191 | 192 | 193 | 194 | 195 | ```bash withOutput 196 | > echo "submit TPS report" | xs append ./store notes 197 | > xs cat ./store 198 | {"topic":"xs.start","id":"03d4qiab9g5vagrlrvxa2vjw0","hash":null,"meta":null,"ttl":null} 199 | {"topic":"notes","id":"03d4qic9vqkve1krajjtlbavd","hash":"sha256-24yYvzQ4Zd3Go/WevV9ol+KzkdTgQvlyNN2NVSGMjFE=","meta":null,"ttl":"forever"} 200 | {"topic":"notes","id":"03d4qjwnhwudlfyg1ygemmt7b","hash":"sha256-pwB4w9N1v99Uu/96KsKKaIgDMm18QDMOZJsEwCxBtsA=","meta":null,"ttl":"forever"} 201 | ``` 202 | 203 | We can get the most recent note on the stream using the `head` command: 204 | 205 | ```bash withOutput 206 | > xs head ./store notes 207 | {"topic":"notes","id":"03d4qjwnhwudlfyg1ygemmt7b","hash":"sha256-pwB4w9N1v99Uu/96KsKKaIgDMm18QDMOZJsEwCxBtsA=","meta":null,"ttl":"forever"} 208 | ``` 209 | 210 | ```bash withOutput 211 | > xs head ./store notes | jq -r .hash | xargs xs cas ./store 212 | submit TPS report 213 | ``` 214 | 215 | 216 | 217 | 218 | 219 | ### riffing 220 | 221 | Finally, let's pull a list of all our notes. 222 | 223 | 224 | 225 | 226 | 227 | We can filter directly by topic using `--topic` and then use the `each` command to pull out the content of each note: 228 | 229 | ```nushell withOutput 230 | > .cat --topic notes | each {.cas} 231 | ───┬─────────────────── 232 | 0 │ a quick note 233 | 1 │ submit TPS report 234 | ───┴─────────────────── 235 | ``` 236 | 237 | Fun! 🎉 238 | 239 | 240 | 241 | 242 | 243 | ```bash withOutput 244 | > xs cat ./store --topic notes | jq -r .hash | xargs -I{} xs cas ./store {} 245 | a quick note 246 | submit TPS report 247 | ``` 248 | 249 | Fun! 🎉 250 | 251 | 258 | 259 | 260 | 261 | 262 | -------------------------------------------------------------------------------- /docs/src/content/docs/getting-started/installation.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: Installation 3 | description: Install cross.stream using cargo or homebrew 4 | sidebar: 5 | order: 1 6 | --- 7 | 8 | import { Aside, Tabs, TabItem } from '@astrojs/starlight/components'; 9 | 10 | import { Link } from '../../../utils/links'; 11 | 12 | 13 | 14 | 15 | ```sh 16 | cargo install cross-stream --locked 17 | ``` 18 | 19 | 20 | 21 | ```sh 22 | brew install cablehead/tap/cross-stream 23 | brew services start cablehead/tap/cross-stream # starts a store in ~/.local/share/cross.stream/store 24 | ``` 25 | 26 | 27 | 28 | 29 | ## Verifying Installation 30 | 31 | ```sh 32 | xs --version 33 | ``` 34 | 35 | ## Import `xs.nu` 36 | 37 | 43 | 44 | If you've never used before, you're in for a treat. Visit the 45 | Nushell site to install it, then install the `xs.nu` helper module: 46 | 47 | ```sh 48 | xs nu --install 49 | ``` 50 | 51 | This copies `xs.nu` into your Nushell scripts directory and creates an 52 | autoload stub so the commands are available in every session. You can also run 53 | `xs nu` without options to print the module contents and install it manually. 54 | 55 | After installation, import the module: 56 | 57 | ```nushell 58 | use xs.nu * 59 | ``` 60 | 61 | The commands default to working with a store at `~/.local/share/cross.stream/store`. 62 | You can point commands at another location by setting `$env.XS_ADDR`. 63 | For quick, ad-hoc changes use `with-env`: 64 | 65 | ```nushell 66 | with-env {XS_ADDR: "./store"} { .cat } 67 | ``` 68 | 69 | The `xs` command-line tool still requires the store path to be specified 70 | explicitly (for example `xs serve ./store`). 71 | 72 | ## Troubleshooting 73 | 74 | - Need help? Join our [Discord](https://discord.com/invite/YNbScHBHrh) 75 | -------------------------------------------------------------------------------- /docs/src/content/docs/index.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: "cross-stream" 3 | description: xs is an event stream store for personal, local-first use. Think of it like sqlite, but specializing in the event sourcing use case. 4 | template: splash 5 | hero: 6 | tagline: "`xs` is an event stream store for personal, local-first use" 7 | actions: 8 | - text: Get started 9 | link: getting-started/installation 10 | icon: right-arrow 11 | variant: secondary 12 | - text: View on GitHub 13 | icon: external 14 | variant: minimal 15 | link: https://github.com/cablehead/xs 16 | - text: Join the Discord! 17 | icon: rocket 18 | variant: minimal 19 | link: https://discord.com/invite/YNbScHBHrh 20 | --- 21 | -------------------------------------------------------------------------------- /docs/src/content/docs/reference/architecture.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: Architecture 3 | description: "A technical overview of cross.stream's decoupled architecture, explaining how it separates event metadata and content storage to optimize stream processing and content retrieval" 4 | sidebar: 5 | order: 5 6 | --- 7 | 8 | import { Link } from '../../../utils/links'; 9 | 10 | cross.stream is built on a decoupled architecture that separates event metadata from content storage, optimizing for both stream processing and content retrieval. 11 | 12 | ## Core Components 13 | 14 | ```mermaid 15 | flowchart TD 16 | subgraph Clients[Clients] 17 | P[Publisher] 18 | S[Subscriber] 19 | end 20 | 21 | subgraph Core[Core Components] 22 | Server[Server Process] 23 | subgraph Storage[Storage Layer] 24 | Fjall[fjall index] 25 | CAS 26 | end 27 | end 28 | 29 | P -->|append| Server 30 | S -->|subscribe| Server 31 | Server -->|write frame| Fjall 32 | Server -->|store content| CAS 33 | Server -->|broadcast| S 34 | 35 | class Core core 36 | class Storage,Fjall,CAS store 37 | class Clients,P,S client 38 | ``` 39 | 40 | ### Event Stream 41 | 42 | The event stream uses (a log-structured merge-tree store) as an append-only index for frame metadata. Each frame contains: 43 | 44 | - Topic 45 | - Unique ID (using SCRU128) 46 | - Content hash (if content exists) 47 | - Custom metadata 48 | - TTL (Time-To-Live) settings 49 | 50 | ### Content Storage (CAS) 51 | 52 | Content is stored separately in a Content-Addressable Storage (CAS) system implemented using cacache. Key features: 53 | 54 | - Content is immutable 55 | - Content is referenced by its cryptographic hash 56 | - Multiple frames can reference the same content 57 | - Content is stored once, regardless of how many frames reference it 58 | 59 | ### Server Process 60 | 61 | A server process sits between clients and the store, enabling: 62 | 63 | - Concurrent access from multiple clients 64 | - Real-time subscriptions to new events 65 | - Background maintenance tasks 66 | 67 | ## Data Flow 68 | 69 | ### Writing Events 70 | 71 | When appending an event: 72 | - If content is provided, it's written to CAS first, generating a hash 73 | - A frame is created with metadata and the content hash 74 | - The frame is appended to the index 75 | - The frame is broadcast to any active subscribers 76 | - A topic index is updated for quick head retrieval 77 | 78 | ### Reading Events 79 | 80 | When reading the stream (`cat`): 81 | - Only frame metadata is retrieved from 82 | - Content remains in CAS until specifically requested 83 | - The stream can be read efficiently without pulling content 84 | 85 | Content retrieval (`cas`): 86 | - Content is fetched from CAS using the hash from a frame 87 | - This is a separate operation from stream reading 88 | - Content is retrieved only when needed 89 | -------------------------------------------------------------------------------- /docs/src/content/docs/reference/cli.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: cli 3 | sidebar: 4 | order: 1 5 | --- 6 | 7 | The `xs` CLI provides a collection of subcommands for interacting with a cross.stream store. 8 | 9 | ## Usage 10 | 11 | ```sh 12 | xs [OPTIONS] 13 | ``` 14 | 15 | ### Commands 16 | 17 | - `serve` – Provides an API to interact with a local store 18 | - `cat` – `cat` the event stream 19 | - `append` – Append an event to the stream 20 | - `cas` – Retrieve content from Content-Addressable Storage 21 | - `cas-post` – Store content in Content-Addressable Storage 22 | - `remove` – Remove an item from the stream 23 | - `head` – Get the head frame for a topic 24 | - `get` – Get a frame by ID 25 | - `import` – Import a frame directly into the store 26 | - `version` – Get the version of the server 27 | - `nu` – Manage the embedded xs.nu module 28 | 29 | ### `serve` 30 | 31 | Start the supervisor process. 32 | 33 | ```sh 34 | xs serve [--expose ] 35 | ``` 36 | 37 | | Option | Description | 38 | | ------ | ----------- | 39 | | `` | Path to the store | 40 | | `--expose ` | Expose the API on an additional address ([HOST]:PORT or ``) | 41 | 42 | Example: 43 | 44 | ```sh 45 | xs serve ./store --expose 127.0.0.1:8080 46 | ``` 47 | 48 | ### `cat` 49 | 50 | Stream frames from the store. 51 | 52 | ```sh 53 | xs cat [options] 54 | ``` 55 | 56 | | Option | Description | 57 | | ------ | ----------- | 58 | | `` | Address to connect to `[HOST]:PORT` or `` | 59 | | `--follow`, `-f` | Follow the stream for new events | 60 | | `--pulse `, `-p ` | Send synthetic `xs.pulse` events at interval | 61 | | `--tail`, `-t` | Begin reading from the end of the stream | 62 | | `--last-id `, `-l ` | Start after the given frame ID | 63 | | `--limit ` | Maximum number of events to return | 64 | | `--sse` | Use Server-Sent Events format | 65 | | `--context `, `-c ` | Context ID (defaults to system context) | 66 | | `--all`, `-a` | Retrieve frames across all contexts | 67 | | `--topic `, `-T ` | Filter frames by topic | 68 | 69 | Example: 70 | 71 | ```sh 72 | xs cat ./store --follow 73 | ``` 74 | 75 | ### `append` 76 | 77 | Append an event to a topic. 78 | 79 | ```sh 80 | xs append [options] 81 | ``` 82 | 83 | | Option | Description | 84 | | ------ | ----------- | 85 | | `` | Address to connect to | 86 | | `` | Topic to append to | 87 | | `--meta ` | JSON metadata to include | 88 | | `--ttl ` | Time-to-live: `forever`, `ephemeral`, `time:`, `head:` | 89 | | `--context `, `-c ` | Context ID (defaults to system context) | 90 | 91 | Example: 92 | 93 | ```sh 94 | echo "hello" | xs append ./store chat --meta '{"user":"bob"}' 95 | ``` 96 | 97 | ### `cas` 98 | 99 | Retrieve content from CAS. 100 | 101 | ```sh 102 | xs cas 103 | ``` 104 | 105 | | Option | Description | 106 | | ------ | ----------- | 107 | | `` | Address to connect to | 108 | | `` | Hash of the content to retrieve | 109 | 110 | ### `cas-post` 111 | 112 | Store content in CAS. 113 | 114 | ```sh 115 | xs cas-post 116 | ``` 117 | 118 | | Option | Description | 119 | | ------ | ----------- | 120 | | `` | Address to connect to | 121 | 122 | Example: 123 | 124 | ```sh 125 | echo "content" | xs cas-post ./store 126 | ``` 127 | 128 | ### `remove` 129 | 130 | Remove a frame from the store. 131 | 132 | ```sh 133 | xs remove 134 | ``` 135 | 136 | | Option | Description | 137 | | ------ | ----------- | 138 | | `` | Address to connect to | 139 | | `` | ID of the item to remove | 140 | 141 | ### `head` 142 | 143 | Get the most recent frame for a topic. 144 | 145 | ```sh 146 | xs head [--follow] 147 | ``` 148 | 149 | | Option | Description | 150 | | ------ | ----------- | 151 | | `` | Address to connect to | 152 | | `` | Topic to inspect | 153 | | `--follow`, `-f` | Follow for updates | 154 | | `--context `, `-c ` | Context ID (defaults to system context) | 155 | 156 | ### `get` 157 | 158 | Retrieve a frame by ID. 159 | 160 | ```sh 161 | xs get 162 | ``` 163 | 164 | | Option | Description | 165 | | ------ | ----------- | 166 | | `` | Address to connect to | 167 | | `` | ID of the frame to get | 168 | 169 | ### `import` 170 | 171 | Import a frame dump from standard input. 172 | 173 | ```sh 174 | xs import 175 | ``` 176 | 177 | | Option | Description | 178 | | ------ | ----------- | 179 | | `` | Address to connect to | 180 | 181 | Example: 182 | 183 | ```sh 184 | cat dump.jsonl | xs import ./store 185 | ``` 186 | 187 | ### `version` 188 | 189 | Get version information from the server. 190 | 191 | ```sh 192 | xs version 193 | ``` 194 | 195 | | Option | Description | 196 | | ------ | ----------- | 197 | | `` | Address to connect to | 198 | 199 | ### `nu` 200 | 201 | Manage the embedded `xs.nu` module. 202 | 203 | ```sh 204 | xs nu [--install] [--clean] 205 | ``` 206 | 207 | | Option | Description | 208 | | ------ | ----------- | 209 | | `--install` | Install `xs.nu` into your Nushell config | 210 | | `--clean` | Remove previously installed files | 211 | 212 | Without options the command prints the module contents so it can be redirected or piped. 213 | 214 | Example: 215 | 216 | ```sh 217 | xs nu --install 218 | ``` 219 | -------------------------------------------------------------------------------- /docs/src/content/docs/reference/commands.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: Commands 3 | description: Stateless, parallelizable operations that can be called on-demand with streaming results 4 | sidebar: 5 | order: 4 6 | --- 7 | 8 | import { Link } from '../../../utils/links'; 9 | 10 | cross.stream commands use expressions to define reusable 11 | operations that can be called on-demand with arguments. Unlike handlers which 12 | maintain state between invocations, or generators which run continuously, 13 | commands are stateless and execute independently each time they are called. 14 | 15 | ## Defining Commands 16 | 17 | To create a command, append a definition string with the topic 18 | `.define`: 19 | 20 | ```nushell 21 | r#'{ 22 | # Required: Command closure 23 | run: {|frame| 24 | # frame.topic - always .call 25 | # frame.hash - contains input content if present 26 | # frame.meta.args - contains call arguments 27 | let input = if ($frame.hash != null) { .cas $frame.hash } else { null } 28 | let n = $frame.meta.args.n 29 | 1..($n) | each {$"($in): ($input)"} 30 | } 31 | 32 | # Optional: Module definitions 33 | modules: { 34 | "my-util": "export def format [x] { $\"formatted: ($x)\" }" 35 | } 36 | 37 | # Optional: Control output frame behavior 38 | return_options: { 39 | suffix: ".output" # Output topic suffix (default: ".response") 40 | ttl: "head:1" # Keep only most recent frame 41 | } 42 | }'# | .append repeat.define 43 | ``` 44 | 45 | The `return_options` field controls the suffix and TTL for the `.response` frame 46 | produced by the command. TTL only applies to this `.response` frame—`.error` 47 | events never expire. 48 | 49 | The command definition requires: 50 | 51 | - `run`: A closure that receives the call frame and can return a pipeline of 52 | results 53 | 54 | All values produced by the closure's output pipeline are collected into a single 55 | `.response` event automatically. 56 | 57 | ## Calling Commands 58 | 59 | Commands are called by appending to `.call` with input content and 60 | arguments: 61 | 62 | ```nushell 63 | # Call the repeat command with input and args 64 | "foo" | .append repeat.call --meta {args: {n: 3}} 65 | ``` 66 | 67 | ## Lifecycle Events 68 | 69 | Commands emit events to track their execution: 70 | 71 | | Event | Description | 72 | | -------------------- | ---------------------------------------- | 73 | | `.response` | Collected result of the command pipeline | 74 | | `.error` | Error occurred during command execution | 75 | 76 | All events include: 77 | 78 | - `command_id`: ID of the command definition 79 | - `frame_id`: ID of this specific invocation 80 | 81 | ## Error Handling 82 | 83 | If a command encounters an error during execution, it will: 84 | 85 | 1. Emit a `.error` frame with: 86 | - The error message 87 | - Reference to both command_id and frame_id 88 | 2. Stop processing the current invocation 89 | 90 | Unlike generators, commands do not automatically restart on error - each 91 | invocation is independent. 92 | 93 | ## Modules 94 | 95 | Commands can use custom Nushell modules: 96 | 97 | ```nushell 98 | r#'{ 99 | run: {|frame| 100 | my-math double ($frame.meta.args.number) 101 | } 102 | 103 | modules: { 104 | "my-math": "export def double [x] { $x * 2 }" 105 | } 106 | }'# | .append calculator.define 107 | ``` 108 | 109 | This allows you to modularize your commands and reuse code across different commands. 110 | 111 | ## Contexts 112 | 113 | Command definitions and calls are scoped by context. Defining the same command 114 | name in two different contexts creates two independent commands. Calls are 115 | processed only when a matching definition exists in the same context; 116 | otherwise the call is ignored. 117 | 118 | ## Built-in Store Commands 119 | 120 | When the `run` closure executes it can use several helper commands provided by 121 | cross.stream: 122 | 123 | - `.append` – append a new frame. Metadata you provide is merged with 124 | `command_id` and `frame_id`. 125 | - `.cat` – read frames from the command’s context. 126 | - `.head` – fetch the most recent frame for a topic in this context. 127 | - `.cas` – read content from CAS by hash. 128 | - `.get` – retrieve a frame by ID. 129 | - `.remove` – delete a frame from the stream. 130 | 131 | `cat` and `head` default to the command’s context, while `append` accepts an 132 | explicit context flag if you need to write elsewhere. 133 | 134 | ## Key Differences 135 | 136 | | Feature | Commands | Handlers | Generators | 137 | | -------------- | ----------------------- | ---------------------- | --------------- | 138 | | State | Stateless | Stateful between calls | Stateless | 139 | | Execution | On-demand | Event-driven | Continuous | 140 | | Results | Streamed immediately | Batched on completion | Streamed | 141 | | Parallelism | Multiple parallel calls | Sequential processing | Single instance | 142 | | Error Handling | Per-invocation | Unregisters handler | Auto-restarts | 143 | | Modules | Supported | Supported | Not supported | 144 | -------------------------------------------------------------------------------- /docs/src/content/docs/reference/contexts.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: Working with Contexts 3 | description: Understanding and using contexts to partition your event streams 4 | sidebar: 5 | order: 1 6 | --- 7 | 8 | import { Aside, Tabs, TabItem } from '@astrojs/starlight/components'; 9 | 10 | Cross.stream uses contexts to partition event streams. Every frame belongs to a 11 | context, with the system context being the default where operations happen if no 12 | specific context is specified. 13 | 14 | ## System Context 15 | 16 | When you first start using cross.stream, you're working in the system context. 17 | The system context contains the initial `xs.start` frame and any other frames 18 | you create without specifying a different context. 19 | 20 | ```nushell withOutput 21 | > .cat 22 | ─#─┬──topic───┬────────────id─────────────┬─hash─┬─meta─┬───ttl─── 23 | 0 │ xs.start │ 03d4q1o70y6ek0ig8hwy9q00n │ │ │ 24 | ───┴──────────┴───────────────────────────┴──────┴──────┴───────── 25 | ``` 26 | 27 | ## Creating Contexts 28 | 29 | Create a new named context using the `.ctx new` command: 30 | 31 | ```nushell withOutput 32 | > .ctx new my-project 33 | 03d4qbrxizqgav09m7hicksb0 34 | ``` 35 | 36 | This creates a context named "my-project" and automatically switches to it. 37 | 38 | ## Listing Contexts 39 | 40 | View all available contexts: 41 | 42 | ```nushell withOutput 43 | > .ctx list 44 | ─#─┬───────────────id───────────────┬───name─────┬─active── 45 | 0 │ 0000000000000000000000000 │ system │ true 46 | 1 │ 03d4qbrxizqgav09m7hicksb0 │ my-project │ false 47 | ───┴────────────────────────────────┴────────────┴───────── 48 | ``` 49 | 50 | You can also use the `.ctx ls` alias for the same output. 51 | 52 | ## Switching Contexts 53 | 54 | Switch between contexts using either their names or IDs: 55 | 56 | ```nushell withOutput 57 | > .ctx switch my-project 58 | 03d4qbrxizqgav09m7hicksb0 59 | 60 | > .ctx switch 03d4qbrxizqgav09m7hicksb0 61 | 03d4qbrxizqgav09m7hicksb0 62 | ``` 63 | 64 | You can also switch interactively by running `.ctx switch` without arguments. 65 | 66 | ## Renaming Contexts 67 | 68 | You can rename contexts using the context ID: 69 | 70 | ```nushell withOutput 71 | > .ctx rename 03d4qbrxizqgav09m7hicksb0 feature-work 72 | ``` 73 | 74 | This updates the name associated with the specified context ID. 75 | 76 | ## Using Contexts 77 | 78 | Once you've switched to a context, all operations happen in that context: 79 | 80 | ```nushell withOutput 81 | > "project note" | .append notes 82 | > .cat 83 | ─#─┬──topic───┬────────────id─────────────┬────────────────────────hash─────────────────────────┬─meta─┬───ttl─── 84 | 0 │ notes │ 03d4qbrxizqgav09m7hicksb0 │ sha256-KDyb7pypM+8aLiq5obfpCqbMmb6LvvPnCu2+y9eWd0c= │ │ forever 85 | ───┴──────────┴───────────────────────────┴─────────────────────────────────────────────────────┴──────┴───────── 86 | ``` 87 | 88 | You can explicitly specify a context with any command using the `-c` parameter: 89 | 90 | ```nushell withOutput 91 | > "new feature idea" | .append notes -c feature-branch 92 | > .cat -c feature-branch 93 | ─#─┬──topic───┬────────────id─────────────┬────────────────────────hash─────────────────────────┬─meta─┬───ttl─── 94 | 0 │ notes │ 03f8q6rxnzqgav09n7hicksb9 │ sha256-LMcRiyKpOjA1Z8O+wZvoiMXYgGEzPQOhlA8AOptOhBY= │ │ forever 95 | ───┴──────────┴───────────────────────────┴─────────────────────────────────────────────────────┴──────┴───────── 96 | ``` 97 | 98 | The `head` command is also context-aware: 99 | 100 | ```nushell withOutput 101 | > .head notes -c feature-branch | .cas 102 | new feature idea 103 | ``` 104 | 105 | ## Viewing Current Context 106 | 107 | See the ID of your current context: 108 | 109 | ```nushell withOutput 110 | > .ctx 111 | 03d4qbrxizqgav09m7hicksb0 112 | ``` 113 | 114 | ## Viewing All Contexts 115 | 116 | View frames across all contexts with the `--all` flag: 117 | 118 | ```nushell withOutput 119 | > .cat --all 120 | ─#─┬──topic───┬────────────id─────────────┬────────────────────────hash─────────────────────────┬─meta─┬───ttl─── 121 | 0 │ xs.start │ 03d4q1o70y6ek0ig8hwy9q00n │ │ │ 122 | 1 │ notes │ 03d4qbrxizqgav09m7hicksb0 │ sha256-KDyb7pypM+8aLiq5obfpCqbMmb6LvvPnCu2+y9eWd0c= │ │ forever 123 | 2 │ notes │ 03f8q6rxnzqgav09n7hicksb9 │ sha256-LMcRiyKpOjA1Z8O+wZvoiMXYgGEzPQOhlA8AOptOhBY= │ │ forever 124 | ───┴──────────┴───────────────────────────┴─────────────────────────────────────────────────────┴──────┴───────── 125 | ``` 126 | 127 | 130 | -------------------------------------------------------------------------------- /docs/src/content/docs/reference/generators.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: Generators 3 | description: "How to use Nushell closures to spawn external processes and stream their output into cross.stream, with optional duplex communication and lifecycle events" 4 | sidebar: 5 | order: 2 6 | --- 7 | 8 | import { Link } from '../../../utils/links'; 9 | 10 | cross.stream generators use closures to create streams of data 11 | that are emitted as frames into the store. 12 | 13 | ## Basic Usage 14 | 15 | To create a generator, append a Nushell script that evaluates to a configuration 16 | record with a `run` closure using the topic `.spawn`: 17 | 18 | ```nushell 19 | r#'{ 20 | run: {|| ^tail -F http.log | lines } 21 | }'# | .append log.spawn 22 | ``` 23 | 24 | The generator will: 25 | 26 | - Execute the provided Nushell expression 27 | - Output from the pipeline is streamed as `log.recv` frames. Text pipelines emit 28 | one frame per line, while `ByteStream` pipelines send binary chunks. 29 | - Automatically restarts if it exits until a terminate frame is seen 30 | 31 | All frames produced by the generator use the same context as the `.spawn` frame 32 | so multiple contexts can run generators with the same topic independently. 33 | 34 | ## Lifecycle Events 35 | 36 | Generators emit lifecycle events to track their state: 37 | 38 | | Event | Description | 39 | | --------------------- | --------------------------------------- | 40 | | `.start` | Generator has started processing | 41 | | `.recv` | Output value from the generator | 42 | | `.stop` | Generator pipeline has stopped. The \`meta.reason\` field is a string enum with values `finished`, `error`, `terminate` and `update`. When `finished` or `error`, the pipeline will be restarted automatically; `terminate` means it was stopped manually and the generator loop for this topic/context will shut down. `update` indicates the generator reloaded due to a new `.spawn` frame. | 43 | | `.parse.error` | Script failed to parse | 44 | | `.shutdown` | Generator loop has fully exited; ServeLoop evicts it | 45 | 46 | All events include `source_id` which is the ID of the generator instance. When a `.stop` frame has `meta.reason` set to `update`, it also includes `update_id` referencing the spawn that triggered the reload. ServeLoop evicts a generator when it receives a `.shutdown` frame. 47 | 48 | ## Configuration Options 49 | 50 | | Option | Type | Default | Description | 51 | | -------- | ------- | ------- | ------------------------------------------------------------------- | 52 | | `duplex` | boolean | false | Enable sending input to the generator's pipeline via `.send` | 53 | | `return_options` | record | — | Customize output frames (see Return Options) | 54 | 55 | The `return_options` field controls the suffix and TTL for the `.recv` frames produced by the generator. 56 | 57 | ## Bi-directional Communication 58 | 59 | When `duplex` is enabled, you can send data into the generator's input pipeline 60 | via `.send` frames: 61 | 62 | ```nushell 63 | # Create a websocket connection 64 | r#'{ 65 | run: {|| websocat wss://echo.websocket.org | lines }, 66 | duplex: true 67 | }'# | .append echo.spawn 68 | 69 | # Send input to the websocket: note the "\n", wss://echo.websocket.org won't 70 | # reply until it sees a complete line 71 | "hello\n" | .append echo.send 72 | ``` 73 | 74 | When running this generator: 75 | 76 | - Lines received from the websocket server are emitted as `.recv` frames 77 | - Content from `.send` frames is sent to the websocket server 78 | 79 | ## Error Handling 80 | 81 | If a generator encounters an error during spawning a `.parse.error` frame 82 | is emitted with: 83 | 84 | - `source_id`: ID of the failed spawn attempt 85 | - `reason`: Error message describing what went wrong 86 | 87 | The generator does not start and no stop frame is produced. 88 | 89 | ## Stopping Generators 90 | 91 | To stop a running generator, append a frame with the topic `.terminate`. 92 | The generator will stop and emit a `.stop` frame with `meta.reason` set to 93 | `terminate`. 94 | 95 | Appending a new `.spawn` frame while a generator of the same topic and 96 | context is running reloads it with the new script. If the reload fails to parse, 97 | you'll see a `.parse.error` frame, and the previous generator continues 98 | running. 99 | -------------------------------------------------------------------------------- /docs/src/content/docs/reference/handlers.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: Handlers 3 | description: "Detailed reference for writing Nushell handlers that react to frames, manage state, and emit new events" 4 | sidebar: 5 | order: 3 6 | --- 7 | 8 | import { Aside, Tabs, TabItem } from '@astrojs/starlight/components'; 9 | 10 | import { Link } from '../../../utils/links'; 11 | 12 | cross.stream handlers use 13 | [closures](https://www.nushell.sh/lang-guide/chapters/types/basic_types/closure.html) 14 | to process and act on incoming frames as they are appended to the store. 15 | 16 | ```nushell 17 | { 18 | run: {|frame| 19 | if $frame.topic == "ping" { 20 | "pong" # Will be appended to handler.out 21 | } 22 | } 23 | } 24 | ``` 25 | 26 | The handler closure receives each new frame and can: 27 | 28 | - Process the frame's content 29 | - Return a value (which gets automatically appended to `.out`) 30 | - Explicitly append new frames using the `.append` command 31 | - Filter which frames to process using conditionals 32 | 33 | ## Registering 34 | 35 | To register a handler, append a registration script with the topic 36 | `.register`. The script must return a record that configures the 37 | handler's behavior: 38 | 39 | ```nushell 40 | r###'{ 41 | # Required: Handler closure 42 | run: {|frame| 43 | if $frame.topic == "ping" { 44 | "pong" # Will be appended to handler.out 45 | } 46 | } 47 | 48 | # Optional: Where to resume processing from 49 | # "tail" (default), "head", or scru128 ID 50 | resume_from: "tail" 51 | 52 | # Optional: Module definitions 53 | modules: { 54 | "my-math": "def double [x] { $x * 2 }" 55 | } 56 | 57 | # Optional: Heartbeat interval in ms 58 | pulse: 1000 59 | 60 | # Optional: Control output frame behavior 61 | return_options: { 62 | suffix: ".response" # Output topic suffix 63 | ttl: "head:1" # Keep only most recent frame 64 | } 65 | }'### | .append echo.register 66 | ``` 67 | 68 | The `run` closure must accept **exactly one positional argument** which is the 69 | incoming frame. 70 | 71 | The registration script is stored in CAS and evaluated to obtain the handler's 72 | configuration. 73 | 74 | Upon a successful start the handler appends a `.registered` frame 75 | with metadata: 76 | 77 | - `handler_id` – the ID of the handler instance 78 | - `tail` – whether processing started from the end of the topic 79 | - `last_id` – the frame ID that processing resumed after (if any) 80 | 81 | ### Configuration Record Fields 82 | 83 | | Field | Description | 84 | | ---------------- | -------------------------------------------------------------------------- | 85 | | `run` | Required handler closure that processes each frame | 86 | | `resume_from` | "tail" (default), "head", or scru128 ID to control where processing starts | 87 | | `pulse` | Interval in milliseconds to send synthetic xs.pulse events | 88 | | `return_options` | Controls output frames: see Return Options | 89 | | `modules` | Map of module name to the string content of the module | 90 | 91 | #### Return Options 92 | 93 | The `return_options` field controls how return values are handled: 94 | 95 | - `suffix`: String appended to handler's name for output topic (default: ".out") 96 | - `ttl`: Time-to-live for output frames 97 | - `"forever"`: Never expire 98 | - `"ephemeral"`: Not stored; only active subscribers receive it 99 | - `"time:"`: Expire after duration 100 | - `"head:"`: Keep only N most recent frames 101 | 102 | #### Modules 103 | 104 | The `modules` option allows handlers to use custom Nushell modules: 105 | 106 | ```nushell 107 | r###'{ 108 | run: {|frame| 109 | my-math double 8 # Use module command 110 | } 111 | modules: { 112 | "my-math": "export def double [x] { $x * 2 }" 113 | } 114 | }'### | .append processor.register 115 | ``` 116 | 117 | ## State and Environment 118 | 119 | Handlers can maintain state using environment variables which persist between 120 | calls: 121 | 122 | ```nushell 123 | r#'{ 124 | run: {|frame| 125 | # Initialize or increment counter 126 | let env.count = ($env | get -i count | default 0) + 1 127 | $"Processed ($env.count) frames" 128 | } 129 | }'# | .append counter.register 130 | ``` 131 | 132 | ## Output 133 | 134 | Handlers can produce output in two ways: 135 | 136 | 1. **Return Values**: Any non-null return value is automatically appended to the 137 | handler's output topic (`.out` by default unless modified by 138 | return_options.suffix) 139 | 140 | ```nushell 141 | {|frame| 142 | if $frame.topic == "ping" { 143 | "pong" # Automatically appended to handler.out 144 | } 145 | } 146 | ``` 147 | 148 | 2. **Explicit Appends**: Use the `.append` command to create frames on any topic 149 | 150 | ```nushell 151 | {|frame| 152 | if $frame.topic == "ping" { 153 | "pong" | .append response.topic --meta { "type": "response" } 154 | "logged" | .append audit.topic 155 | } 156 | } 157 | ``` 158 | 159 | All output frames automatically include: 160 | 161 | - `handler_id`: ID of the handler that created the frame 162 | - `frame_id`: ID of the frame that triggered the handler 163 | - Frames with `meta.handler_id` equal to the handler's ID are ignored to avoid 164 | reacting to the handler's own output 165 | 166 | ## Lifecycle 167 | 168 | ```mermaid 169 | stateDiagram-v2 170 | [*] --> Registering: .register event 171 | Registering --> Unregistered: nushell parse error 172 | Registering --> Registered : parse OK 173 | Unregistered --> [*] 174 | 175 | state Registered { 176 | direction LR 177 | [*] --> events.recv() 178 | events.recv() --> should_run: event received 179 | 180 | should_run --> events.recv(): skip 181 | should_run --> process_event: yep 182 | should_run --> [*]: .unregister event 183 | 184 | process_event --> [*]: error encountered 185 | process_event --> events.recv(): OK 186 | } 187 | 188 | Registered --> Unregistered 189 | ``` 190 | 191 | ### Unregistering 192 | 193 | A handler can be unregistered by: 194 | 195 | - Appending `.unregister` 196 | - Registering a new handler with the same name 197 | - Runtime errors in the handler closure 198 | 199 | When unregistered, the handler appends a confirmation frame 200 | `.unregistered`. If unregistered due to an error, the frame 201 | includes an `error` field in its metadata. 202 | 203 | ### Error Handling 204 | 205 | If a handler encounters an error during execution: 206 | 207 | 1. The handler is automatically unregistered 208 | 2. A frame is appended to `.unregistered` with: 209 | - The error message in metadata 210 | - Reference to the triggering frame 211 | -------------------------------------------------------------------------------- /docs/src/content/docs/reference/import-export.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: Import & Export 3 | description: "How to export and import data between cross.stream stores" 4 | sidebar: 5 | order: 6 6 | --- 7 | 8 | The supervisor exposes two endpoints to facilitate data transfer between stores: 9 | 10 | - POST `/import`: Takes JSON frame data and imports it as-is, preserving frame `id` and content `hash` 11 | - POST `/cas`: Stores posted content in CAS and returns its hash 12 | 13 | ## Commands 14 | 15 | `xs.nu` provides two commands to utilize these endpoints: 16 | 17 | ```nushell 18 | # Export store at $env.XS_ADDR to path 19 | .export 20 | 21 | # Import dump at path to $env.XS_ADDR 22 | .import 23 | ``` 24 | 25 | The exported data includes: 26 | - Frame metadata in `frames.jsonl` 27 | - Content files in `cas/` directory 28 | 29 | ## Version Compatibility 30 | 31 | Version 0.1.0 was the first version supporting imports, though the 0.1.0 client can export data from 0.0.9 stores. 32 | 33 | ## Example 34 | 35 | ```sh 36 | # Export from remote store 37 | with-env {XS_ADDR: "https://user:token@remote-store.example.com"} { 38 | .export backup 39 | } 40 | 41 | # Import to local store 42 | with-env {XS_ADDR: "./store"} { 43 | .import backup 44 | } 45 | ``` 46 | -------------------------------------------------------------------------------- /docs/src/content/docs/reference/store-api.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: Store API 3 | description: "Complete reference for the cross.stream store HTTP API, that's exposed by the supervisor process" 4 | sidebar: 5 | order: 7 6 | --- 7 | 8 | The supervisor exposes a HTTP API for interacting with the store. By default, it 9 | listens on a Unix domain socket at `./store/sock`. 10 | 11 | ## Endpoints 12 | 13 | ### `GET /` 14 | 15 | Cat the stream 16 | 17 | ```sh 18 | # Cat all frames 19 | curl --unix-socket ./store/sock http://localhost/ 20 | 21 | # Long poll for new frames 22 | curl --unix-socket ./store/sock -H "Accept: text/event-stream" \ 23 | "http://localhost/?follow=true" 24 | ``` 25 | 26 | Query Parameters: 27 | 28 | - `follow` - Long poll for new frames 29 | - `tail` - Begin reading from end of stream 30 | - `last_id` - Start reading from specific frame ID 31 | - `limit` - Maximum number of frames to return 32 | 33 | Response: newline-delimited JSON frames or SSE stream, based on Accept header. 34 | Use `"Accept: text/event-stream"` for SSE. 35 | 36 | ### `POST /{topic}` 37 | 38 | Append frame to topic 39 | 40 | ```sh 41 | curl --unix-socket ./store/sock \ 42 | -H "xs-meta: $(echo -n '{\"key\":\"value\"}' | base64)" \ 43 | -X POST --data "content" \ 44 | "http://localhost/topic?ttl=forever" 45 | ``` 46 | 47 | Query Parameters: 48 | 49 | - `ttl` - Time-to-live for frame: 50 | - `forever` - Never expire 51 | - `ephemeral` - Not stored; only active subscribers receive it 52 | - `time:` - Expire after duration 53 | - `head:` - Keep only N most recent frames 54 | 55 | Headers: 56 | 57 | - `xs-meta` - Optional Base64-encoded JSON metadata. Must be encoded using standard Base64 to support Unicode characters. 58 | 59 | Response: Frame JSON 60 | 61 | ### `GET /{id}` 62 | 63 | Get frame by id 64 | 65 | ```sh 66 | curl --unix-socket ./store/sock http://localhost/03BCPN2DNQ529QRQKBQCZ4JV4 67 | ``` 68 | 69 | Response: Frame JSON or 404 if not found 70 | 71 | ### `DELETE /{id}` 72 | 73 | Remove frame 74 | 75 | ```sh 76 | curl --unix-socket ./store/sock -X DELETE \ 77 | http://localhost/03BCPN2DNQ529QRQKBQCZ4JV4 78 | ``` 79 | 80 | Response: 204 on success 81 | 82 | ### `GET /head/{topic}` 83 | 84 | Get most recent frame for topic 85 | 86 | ```sh 87 | curl --unix-socket ./store/sock http://localhost/head/topic 88 | ``` 89 | 90 | Response: Most recent frame for topic or 404 if not found 91 | 92 | ### `POST /cas` 93 | 94 | Store content in CAS 95 | 96 | ```sh 97 | curl --unix-socket ./store/sock \ 98 | -X POST --data "content" http://localhost/cas 99 | ``` 100 | 101 | Response: Content hash 102 | 103 | ### `GET /cas/{hash}` 104 | 105 | Get content from CAS 106 | 107 | ```sh 108 | curl --unix-socket ./store/sock http://localhost/cas/sha256-hash 109 | ``` 110 | 111 | Response: Raw content or 404 if not found 112 | 113 | ### `POST /import` 114 | 115 | Import frame as-is 116 | 117 | ```sh 118 | curl --unix-socket ./store/sock \ 119 | -H "Content-Type: application/json" \ 120 | -X POST --data '{"topic":"test","id":"03BCPN2DNQ529QRQKBQCZ4JV4"}' \ 121 | http://localhost/import 122 | ``` 123 | 124 | Response: Imported frame JSON 125 | 126 | ### `GET /version` 127 | 128 | Get version info 129 | 130 | ```sh 131 | curl --unix-socket ./store/sock http://localhost/version 132 | ``` 133 | 134 | Response: Version information JSON 135 | 136 | ## Status Codes 137 | 138 | - 200 - Success 139 | - 204 - Success (no content) 140 | - 400 - Bad request 141 | - 404 - Not found 142 | - 500 - Internal server error 143 | -------------------------------------------------------------------------------- /docs/src/content/docs/tutorials/threaded-conversations.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: Threaded Conversations 3 | description: A tutorial for using cross-stream to maintain threaded conversations with an LLM. 4 | sidebar: 5 | order: 1 6 | --- 7 | 8 | import { Aside } from '@astrojs/starlight/components'; 9 | import { Link } from '../../../utils/links'; 10 | 11 | A *thread* is a chain of conversation turns where each turn knows which message it follows. 12 | We'll use cross.stream to store those turns so we can rebuild the conversation history at any point. 13 | 14 | ## Serve 15 | 16 | Start a local store in a separate terminal: 17 | 18 | ```bash withOutput 19 | xs serve ./chat-store 20 | ``` 21 | 22 | ## Record a conversation 23 | 24 | In another terminal running , import `xs.nu` for some handy commands: 25 | 26 | ```nushell 27 | use xs.nu * 28 | ``` 29 | 30 | Now append a user message to the `chat.turn` topic: 31 | 32 | ```nushell 33 | "What's the capital of France?" | .append chat.turn --meta {role: user} 34 | ``` 35 | 36 | Grab the ID of that turn so later messages can reference it: 37 | 38 | ```nushell 39 | let first = (.head chat.turn).id 40 | ``` 41 | 42 | Respond with an assistant reply that continues the first turn: 43 | 44 | ```nushell 45 | 46 | "The capital is Paris." | .append chat.turn --meta {role: assistant continues: $first} 47 | let second = (.head chat.turn).id 48 | 49 | "Tell me a famous landmark there." | .append chat.turn --meta {role: user continues: $second} 50 | let third = (.head chat.turn).id 51 | 52 | "The Eiffel Tower is one of the most famous." | .append chat.turn --meta {role: assistant continues: $third} 53 | ``` 54 | 55 | ## Following a thread 56 | 57 | The `continues` field forms a linked list. We can walk that list to rebuild the conversation. 58 | Here is a small helper that follows the chain from the most recent turn back to the start: 59 | 60 | ```nushell 61 | def thread [id] { 62 | if $id == null { return [] } 63 | let f = .get $id 64 | thread ($f.meta?.continues?) | append $f 65 | } 66 | 67 | thread (.head chat.turn).id | each {.cas} 68 | ``` 69 | 70 | This prints: 71 | 72 | ```text 73 | What's the capital of France? 74 | The capital is Paris. 75 | Tell me a famous landmark there. 76 | The Eiffel Tower is one of the most famous. 77 | ``` 78 | 79 | ## Branching the conversation 80 | 81 | Because each turn records which message it continues from, you can fork a new branch at any point. 82 | 83 | Continuing from `$third` creates a detail thread about the landmark: 84 | 85 | ```nushell 86 | "What is its height?" | .append chat.turn --meta {role: user continues: $third} 87 | "The Eiffel Tower is about 300 meters tall." | .append chat.turn --meta {role: assistant continues: (.head chat.turn).id} 88 | ``` 89 | 90 | You can also fork from an earlier turn. Continuing again from `$second` starts a different thread: 91 | 92 | ```nushell 93 | "What about Germany?" | .append chat.turn --meta {role: user continues: $second} 94 | let g1 = (.head chat.turn).id 95 | "Germany's capital is Berlin." | .append chat.turn --meta {role: assistant continues: $g1} 96 | "How big is Berlin?" | .append chat.turn --meta {role: user continues: (.head chat.turn).id} 97 | ``` 98 | 99 | You will now see multiple branches starting from the earlier messages. 100 | 101 | ```mermaid 102 | flowchart TD 103 | A[Message ID 1: Start] -->|continues| B[Message ID 2] 104 | B -->|continues| C[Message ID 3] 105 | C -->|continues| D[Message ID 4] 106 | 107 | C -->|forks| E[Message ID 5: Detail Thread 1] 108 | E -->|continues| F[Message ID 6: Detail Thread 1] 109 | 110 | B -->|forks| G[Message ID 7: Detail Thread 2] 111 | G -->|continues| H[Message ID 8: Detail Thread 2] 112 | H -->|continues| I[Message ID 9: Detail Thread 2] 113 | ``` 114 | 115 | 119 | 120 | -------------------------------------------------------------------------------- /docs/src/css/custom.css: -------------------------------------------------------------------------------- 1 | html[data-theme="light"] .beoe-dark { 2 | display: none; 3 | } 4 | 5 | html[data-theme="dark"] .beoe-light { 6 | display: none; 7 | } -------------------------------------------------------------------------------- /docs/src/utils/custom-code-output-plugin.js: -------------------------------------------------------------------------------- 1 | // index.ts 2 | // based on: https://github.com/FujoWebDev/fujocoded-plugins/blob/main/expressive-code-output/index.ts 3 | 4 | import { definePlugin, AttachedPluginData } from "@expressive-code/core"; 5 | import { h } from "@expressive-code/core/hast"; 6 | var outputData = new AttachedPluginData(() => ({ output: [] })); 7 | function pluginCodeOutput() { 8 | return definePlugin({ 9 | name: "Code output", 10 | baseStyles: ` 11 | .expressive-code .frame pre.output { 12 | display: block; 13 | border: var(--ec-brdWd) solid var(--ec-brdCol); 14 | border-top: var(--ec-brdWd) dashed var(--ec-brdCol); 15 | padding: var(--ec-codePadBlk) 0; 16 | padding-inline-start: var(--ec-codePadInl); 17 | } 18 | `, 19 | hooks: { 20 | preprocessCode: (context) => { 21 | if (!context.codeBlock.meta.includes("withOutput")) return; 22 | const blockData = outputData.getOrCreateFor(context.codeBlock); 23 | const outputStart = context.codeBlock.getLines().findIndex((line) => !line.text.startsWith("> ")); 24 | context.codeBlock.getLines(0, outputStart == -1 ? void 0 : outputStart).forEach((line) => { 25 | line.editText(0, 2, ""); 26 | }); 27 | if (outputStart === -1) return; 28 | context.codeBlock.getLines(outputStart).forEach((line) => { 29 | blockData.output.push(line.text); 30 | }); 31 | for (let i = context.codeBlock.getLines().length; i > outputStart; i--) { 32 | context.codeBlock.deleteLine(i - 1); 33 | } 34 | }, 35 | postprocessRenderedBlock: async (context) => { 36 | if (!context.codeBlock.meta.includes("withOutput")) return; 37 | const blockData = outputData.getOrCreateFor(context.codeBlock); 38 | if (!blockData.output.length) return; 39 | const lastPre = context.renderData.blockAst.children.findLastIndex( 40 | (child) => child.type === "element" && child.tagName === "pre" 41 | ); 42 | if (lastPre === -1) return; 43 | const currentChildren = context.renderData.blockAst.children; 44 | const newChildren = [ 45 | ...currentChildren.slice(0, lastPre + 1), 46 | h( 47 | "pre.output", 48 | blockData.output.map((line) => h("div", line)) 49 | ), 50 | ...currentChildren.slice(lastPre + 1) 51 | ]; 52 | context.renderData.blockAst.children = newChildren; 53 | } 54 | } 55 | }); 56 | } 57 | export { 58 | pluginCodeOutput 59 | }; 60 | -------------------------------------------------------------------------------- /docs/src/utils/links.jsx: -------------------------------------------------------------------------------- 1 | // Usage: 2 | 3 | const links = [ 4 | ["fjall", "fjall", "https://github.com/fjall-rs/fjall"], 5 | ["nu", "Nushell", "https://www.nushell.sh"], 6 | ]; 7 | 8 | const linkMap = new Map(links.map(([short, desc, link]) => [ 9 | short, 10 | { desc, link }, 11 | ])); 12 | 13 | export const Link = ({ to }) => { 14 | const link = linkMap.get(to); 15 | if (!link) return null; 16 | 17 | return ( 18 | 24 | {link.desc} 25 | 26 | ); 27 | }; 28 | -------------------------------------------------------------------------------- /docs/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "astro/tsconfigs/strict", 3 | "include": [".astro/types.d.ts", "**/*"], 4 | "exclude": ["dist"] 5 | } 6 | -------------------------------------------------------------------------------- /examples/discord-bot/README.md: -------------------------------------------------------------------------------- 1 | ## starter for a discord bot 2 | 3 | ``` 4 | ┌────────────────────────────────────────┐ 5 | │ Discord Gateway │ 6 | └────────────▲────────────┳──────────────┘ 7 | ┃ ┃ 8 | s r 9 | e e op: 10 Hello 10 | op: 02 Identify n c op: 00 Ready 11 | op: 01 Heartbeat d v op: 11 Heartbeat ACK 12 | ┃ ┃ 13 | ┌────────┻────────────▼────────┐ 14 | ━━ stdin ━▶ $ websocat wss://gatewa... ┣━ stdout ━▶ 15 | ▲└──────────────────────────────┘ │ 16 | │ │ 17 | discord.ws.send discord.ws.recv 18 | │ │ 19 | ┌┴──────────────────────────────────▼─┐ 20 | │ $ xs serve ./store │ 21 | └─────────────────────────────────────┘ 22 | ``` 23 | 24 | Required to run: 25 | 26 | - https://github.com/vi/websocat 27 | - [scru128-cli](https://github.com/cablehead/scru128-cli)- needed for `scru128-since` 28 | 29 | ``` 30 | % xs serve ./store 31 | ``` 32 | 33 | In another session: 34 | 35 | ```nushell 36 | use xs.nu * 37 | 38 | r#'{ 39 | run: {|| websocat "wss://gateway.discord.gg/?v=10&encoding=json" --ping-interval 5 --ping-timeout 10 -E -t | lines }, 40 | duplex: true 41 | }'# | .append discord.ws.spawn 42 | 43 | # append the access token to use to the stream 44 | "" | .append discord.ws.token 45 | 46 | # add the heartbeat handler to authenticate and maintain an active connection 47 | open examples/discord-bot/handler-heartbeat.nu | .append "discord.heartbeat.register" 48 | 49 | # add the discord.nu module for working with discord's REST API 50 | # https://github.com/cablehead/discord.nu 51 | http get https://raw.githubusercontent.com/cablehead/discord.nu/main/discord.nu | .append discord.nu 52 | 53 | # we can now register additional handlers to add functionality to the bot 54 | # for example, to enable a `./roll d` command 55 | open examples/discord-bot/handler-roller.nu | .append "discord.roller.register" 56 | ``` 57 | 58 | #### Slash commands 59 | 60 | We should be able to make this nicer? 61 | 62 | ```nushell 63 | # create the command 64 | # see discord.nu 65 | discord app command create 1227338584814649364 dice "make a dice roll" --options [ 66 | (discord app command option int n "number of dice to roll" --required) 67 | (discord app command option int d "die type / number of sides" --required) 68 | (discord app command option int modifier "modifier") 69 | ] 70 | 71 | # enable the command handler 72 | open examples/discord-bot/handler-slash-dice.nu | .append "discord.slash-dice.register" 73 | ``` 74 | 75 | ### run through 76 | 77 | This is a presentation I gave at the [Creative Code Toronto](https://www.meetup.com/creative-code-toronto/) [Sep '24 meetup](https://www.meetup.com/creative-code-toronto/events/303276625/?eventOrigin=group_events_list) :: [slides](https://cablehead.github.io/creative-codie/) :: [video](https://www.youtube.com/watch?v=Y2rsm5ohDrg&list=PL_YfqG2SCOAK52A4VQ7r7m9laijKSbmUB&index=2) 78 | 79 | 80 | 81 | ### deploy on [SidePro](https://sidepro.cloud) 82 | 83 | https://github.com/user-attachments/assets/3970a907-899b-4b6c-b7c2-79cab0024d8d 84 | -------------------------------------------------------------------------------- /examples/discord-bot/handler-bookmarklet.nu: -------------------------------------------------------------------------------- 1 | {|frame| 2 | if $frame.topic != "discord.ws.recv" { return } 3 | 4 | let message = ($frame | .cas $in.hash | from json) 5 | if $message.op != 0 { return } 6 | 7 | match $message.t { 8 | "READY" | "GUILD_MEMBER_ADD" => return 9 | 10 | "MESSAGE_CREATE" | "MESSAGE_DELETE" | "MESSAGE_UPDATE" => { 11 | .append $"message.($message.d.id)" --meta {id: $frame.id} 12 | return 13 | } 14 | 15 | "MESSAGE_REACTION_ADD" => { 16 | if $message.d.emoji.name != "🔖" { return } 17 | 18 | let bookmarks = ( 19 | .head "bookmarks" | if ($in | is-not-empty) { 20 | $in | .cas | from json 21 | } else { {} }) 22 | 23 | $bookmarks | upsert $message.d.message_id true | 24 | to json -r | .append "bookmarks" --meta {id: $frame.id} 25 | return 26 | } 27 | 28 | "MESSAGE_REACTION_REMOVE" => { 29 | if $message.d.emoji.name != "🔖" { return } 30 | 31 | let bookmarks = ( 32 | .head "bookmarks" | if ($in | is-not-empty) { 33 | $in | .cas | from json 34 | } else { {} }) 35 | 36 | $bookmarks | reject -i $message.d.message_id | 37 | to json -r | .append "bookmarks" --meta {id: $frame.id} 38 | return 39 | } 40 | } 41 | 42 | return $message 43 | } 44 | -------------------------------------------------------------------------------- /examples/discord-bot/handler-heartbeat.nu: -------------------------------------------------------------------------------- 1 | # op.nu 2 | # we need a mechanism to be able to reuse snippets of code 3 | const opcode = { 4 | dispatch: 0, 5 | heartbeat: 1, 6 | identify: 2, 7 | presence_update: 3, 8 | voice_update: 4, 9 | resume: 6, 10 | reconnect: 7, 11 | invalid_session: 9, 12 | hello: 10, 13 | heartbeat_ack: 11, 14 | } 15 | 16 | def "op heartbeat" [seq?: int] { 17 | { 18 | "op": $opcode.heartbeat, 19 | "d": $seq, 20 | } 21 | } 22 | 23 | def "op identify" [token: string, intents: int] { 24 | { 25 | "op": $opcode.identify, 26 | "d": { 27 | token: $token, 28 | intents: $intents, 29 | properties: { 30 | os: (sys host | get name), 31 | browser: "discord.nu", 32 | device: "xs", 33 | }, 34 | }, 35 | } 36 | } 37 | 38 | def "op resume" [token: string, session_id: string, seq: int] { 39 | { 40 | "op": $opcode.resume, 41 | "d": { 42 | token: $token, 43 | session_id: $session_id, 44 | seq: $seq, 45 | }, 46 | } 47 | } 48 | ### end op.nu 49 | 50 | def "scru128-since" [$id1, $id2] { 51 | let t1 = ($id1 | scru128 parse | into int) 52 | let t2 = ($id2 | scru128 parse | into int) 53 | return ($t1 - $t2) 54 | } 55 | 56 | def .send [] { 57 | to json -r | $"($in)\n" | .append "discord.ws.send" --ttl head:5 58 | } 59 | 60 | $env.state = { 61 | s: null, 62 | heartbeat_interval: 0, 63 | last_sent: null, 64 | last_ack: null, 65 | authing: null, 66 | session_id: null, 67 | resume_gateway_url: null 68 | } 69 | 70 | $env.BOT_TOKEN = .head discord.ws.token | .cas $in.hash 71 | 72 | { 73 | resume_from: (.head discord.ws.start | if ($in | is-not-empty) { get id }) 74 | pulse: 1000 75 | 76 | run: {|frame| 77 | # https://discord.com/developers/docs/topics/gateway#list-of-intents 78 | # GUILDS, GUILD_MEMBERS, GUILD_MESSAGES, GUILD_MESSAGE_REACTIONS, MESSAGE_CONTENT 79 | let IDENTIFY_INTENTS = 34307 80 | 81 | if $frame.topic == "xs.pulse" { 82 | # we're not online 83 | if $env.state.heartbeat_interval == 0 { 84 | return 85 | } 86 | 87 | # online, but not authed, attempt to auth 88 | if (($env.state.heartbeat_interval != 0) and ($env.state.authing | is-empty)) { 89 | op identify $env.BOT_TOKEN $IDENTIFY_INTENTS | .send 90 | $env.state.authing = "identify" 91 | return 92 | } 93 | 94 | let since = (scru128-since $frame.id $env.state.last_sent) 95 | let interval = (($env.state.heartbeat_interval / 1000) * 0.9) 96 | if ($since > $interval) { 97 | op heartbeat | .send 98 | $env.state.last_ack = null 99 | $env.state.last_sent = $frame.id 100 | return 101 | } 102 | return 103 | } 104 | 105 | if $frame.topic != "discord.ws.recv" { 106 | return 107 | } 108 | 109 | let message = $frame | .cas $in.hash | from json 110 | 111 | match $message { 112 | # hello 113 | {op: 10} => { 114 | $env.state.heartbeat_interval = $message.d.heartbeat_interval 115 | $env.state.last_ack = $frame.id 116 | $env.state.last_sent = $frame.id 117 | $env.state.authing = null 118 | } 119 | 120 | # heartbeat_ack 121 | {op: 11} => { 122 | $env.state.last_ack = $frame.id 123 | .rm $frame.id 124 | } 125 | 126 | # resume 127 | {op: 6} => { 128 | $env.state.authing = "resume" 129 | } 130 | 131 | # invalid_session 132 | {op: 9} => { 133 | # The inner d key is a boolean that indicates whether the session may be resumable. 134 | # if we get an invalid session while trying to resume, also clear 135 | # out the session 136 | if not $message.d or $env.state.authing == "resume" { 137 | $env.state.resume_gateway_url = null 138 | $env.state.session_id = null 139 | } 140 | $env.state.authing = null 141 | } 142 | 143 | # dispatch:: READY 144 | {op: 0, t: "READY"} => { 145 | $env.state.session_id = $message.d.session_id 146 | $env.state.resume_gateway_url = $message.d.resume_gateway_url 147 | $env.state.authing = "authed" 148 | } 149 | 150 | # dispatch:: RESUMED 151 | {op: 0, t: "RESUMED"} => { 152 | $env.state.authing = "authed" 153 | } 154 | 155 | # dispatch:: GUILD_CREATE 156 | {op: 0, t: "GUILD_CREATE"} => { 157 | # ignore 158 | } 159 | } 160 | } 161 | } 162 | -------------------------------------------------------------------------------- /examples/discord-bot/handler-roller.nu: -------------------------------------------------------------------------------- 1 | def & [action: closure] { 2 | if ($in | is-not-empty) { 3 | $in | do $action 4 | } 5 | } 6 | 7 | def map-values [closure: closure] { 8 | transpose | each {update column1 {do $closure}} | transpose --header-row -d 9 | } 10 | 11 | def parse-roller [] { 12 | parse --regex '\./roll (?P\d+)d(?P\d+)(?:\+(?P\d+))?' | & { 13 | update modifier {if $in == "" {"0"} else {$in}} | map-values {into int} 14 | } 15 | } 16 | 17 | def run-roll [] { 18 | let roll = $in 19 | 20 | let dice = (random dice --dice $roll.dice --sides $roll.sides) 21 | 22 | mut content = ($dice | each {$"($in) <:nondescript_die:1227997035945267232>"} | str join " + ") 23 | 24 | if $roll.modifier != 0 { 25 | $content += $" + ($roll.modifier)" 26 | } 27 | 28 | $content += $" == ($roll.modifier + ($dice | math sum))" 29 | $content 30 | } 31 | 32 | $env.BOT_TOKEN = .head discord.ws.token | .cas $in.hash 33 | 34 | { 35 | modules: {discord: (.head discord.nu | .cas $in.hash)} 36 | 37 | run: {|frame| 38 | if $frame.topic != "discord.ws.recv" { return } 39 | 40 | # TODO: .cas should also be able to take a record, to match xs2.nu's usage 41 | let message = $frame | .cas $in.hash | from json 42 | 43 | if $message.op != 0 { return } 44 | if $message.t != "MESSAGE_CREATE" { return } 45 | 46 | $message.d.content | parse-roller | & { 47 | { 48 | content: ($in | run-roll) 49 | message_reference: { message_id: $message.d.id } 50 | } | discord channel message create $message.d.channel_id 51 | } 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /examples/discord-bot/handler-slash-dice.nu: -------------------------------------------------------------------------------- 1 | ## Really need a way to store modules on the stream, which can be imported by handlers 2 | ## https://github.com/cablehead/discord.nu/blob/main/discord/mod.nu 3 | ## --> 4 | 5 | # Create Interaction Response 6 | # https://discord.com/developers/docs/interactions/receiving-and-responding#create-interaction-response 7 | const API_BASE = "https://discord.com/api/v10" 8 | export def "interaction response" [ 9 | interaction_id: string 10 | interaction_token: string 11 | content: string 12 | --type: int = 4 13 | ] { 14 | let url = $"($API_BASE)/interactions/($interaction_id)/($interaction_token)/callback" 15 | http post --content-type application/json $url { 16 | type: $type 17 | data: { 18 | content: $content 19 | } 20 | } 21 | } 22 | 23 | def run-dice [options: record] { 24 | let dice = (random dice --dice $options.n --sides $options.d) 25 | mut content = ($dice | each { $"($in) <:nondescript_die:1227997035945267232>" } | str join " + ") 26 | 27 | 28 | if $options.modifier != 0 { 29 | $content += $" + ($options.modifier)" 30 | } 31 | 32 | $content += $" == ($options.modifier + ($dice | math sum))" 33 | $content 34 | } 35 | 36 | {|frame| 37 | if $frame.topic != "discord.ws.recv" { return } 38 | 39 | let message = $frame | .cas $in.hash | from json 40 | if $message.op != 0 { return } 41 | if $message.t != "INTERACTION_CREATE" { return } 42 | 43 | let command = $message.d.data 44 | if $command.name != "dice" { return } 45 | 46 | let options = ( 47 | $command.options | 48 | each {|x| {$x.name: $x.value}} | 49 | reduce {|it, acc| $it | merge $acc} | 50 | default 0 modifier 51 | ) 52 | let content = run-dice $options 53 | 54 | $message.d | interaction response $in.id $in.token $content 55 | } 56 | -------------------------------------------------------------------------------- /examples/x-macos-pasteboard/README.md: -------------------------------------------------------------------------------- 1 | [`x-macos-pasteboard`](https://github.com/cablehead/x-macos-pasteboard) is a 2 | micro-cli that watches your macOS pasteboard and emits the raw contents to 3 | stdout as jsonl. 4 | 5 | To install: 6 | 7 | ```sh 8 | brew install cablehead/tap/x-macos-pasteboard 9 | ``` 10 | 11 | You can use it as a [generator](https://cablehead.github.io/xs/reference/generators/) for `xs` to append the 12 | contents of your pasteboard to an event stream. 13 | 14 | ```nushell 15 | r#'{ run: {|| x-macos-pasteboard | lines } }'# | .append pb.spawn 16 | ``` 17 | 18 | You can then subscribe to new pasteboard events with: 19 | 20 | ```nushell 21 | .cat -f | where topic == "pb.recv" | each { .cas | from json } 22 | ``` 23 | 24 | Note this is the _raw_ pasteboard data. For the most common case of copying text, you can get the text with: 25 | 26 | ```nushell 27 | .cat | where topic == "pb.recv" | each {|x| 28 | $x | .cas | from json | get types."public.utf8-plain-text"? | if ($in | is-not-empty) { 29 | decode base64 }} 30 | ``` 31 | 32 | Coming soon(tm): notes on working with the variety of data different macOS apps put on the pasteboard. 33 | -------------------------------------------------------------------------------- /examples/x-macos-pasteboard/solid-ui/.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | pnpm-debug.log* 8 | lerna-debug.log* 9 | 10 | node_modules 11 | dist 12 | dist-ssr 13 | .vite 14 | *.local 15 | 16 | # Editor directories and files 17 | .vscode/* 18 | !.vscode/extensions.json 19 | .idea 20 | .DS_Store 21 | *.suo 22 | *.ntvs* 23 | *.njsproj 24 | *.sln 25 | *.sw? 26 | -------------------------------------------------------------------------------- /examples/x-macos-pasteboard/solid-ui/README.md: -------------------------------------------------------------------------------- 1 | # a Web UI starter to experiment with viewing clipboard history 2 | 3 | This is a [`SolidJS`](https://www.solidjs.com) UI for `xs` + 4 | `x-macos-pasteboard`. 5 | 6 | image 7 | 8 | Requirements: 9 | 10 | - [Deno2](https://deno.com) 11 | - [x-macos-pasteboard](https://github.com/cablehead/x-macos-pasteboard) 12 | - [xs](https://github.com/cablehead/xs) 13 | 14 | ## To run 15 | 16 | Start `xs`: 17 | 18 | ``` 19 | xs serve ./store --expose :3021 20 | ``` 21 | 22 | Bootstrap the store: 23 | 24 | ```nushell 25 | # register x-macos-pasteboard as a generator 26 | r#'{ run: {|| x-macos-pasteboard | lines } }'# | .append pb.spawn 27 | 28 | # register a handler to map raw clipboard data to content 29 | cat handler-pb.map.nu | .append pb.map.register 30 | ``` 31 | 32 | Start UI: 33 | 34 | ``` 35 | deno task dev 36 | open http://localhost:5173 37 | ``` 38 | 39 | ## a base to explore the clipboard for Linux 40 | 41 | A motivation for this example is for people to use it as a base to explore the 42 | [clipboard on Linux](https://github.com/cablehead/stacks/issues/50). 43 | 44 | Here's how you'd do that. Create a cli similar to `x-macos-pasteboard` that 45 | writes new clipboard entries as jsonl to stdout. The format doesn't matter. Try 46 | and dump as much data as the system will give you. 47 | 48 | Replace the bootstrap step with: 49 | 50 | ```bash 51 | echo '{ run: {|| | lines } }' | xs append ./store pb.spawn 52 | ``` 53 | 54 | That's it! As you copy stuff to the clipboard, you'll see your raw data in the 55 | UI. 56 | 57 | You can then start experimenting with mapping the raw data to the `content` 58 | topic. Pick an id of a raw frame and: 59 | 60 | ```bash 61 | xs get ./store | map | xs append ./store content --meta '{"updates":}' 62 | ``` 63 | 64 | If the target content is an image, include `"content_type":"image"` in the 65 | `--meta` object. 66 | 67 | If this is interesting to you, swing by this 68 | [Github issue](https://github.com/cablehead/stacks/issues/50). 69 | -------------------------------------------------------------------------------- /examples/x-macos-pasteboard/solid-ui/deno.json: -------------------------------------------------------------------------------- 1 | { 2 | "tasks": { 3 | "dev": "deno run -A --node-modules-dir npm:vite", 4 | "build": "deno run -A --node-modules-dir npm:vite build", 5 | "preview": "deno run -A --node-modules-dir npm:vite preview", 6 | "serve": "deno run --allow-net --allow-read jsr:@std/http@1/file-server dist/" 7 | }, 8 | "compilerOptions": { 9 | "lib": ["ES2020", "DOM", "DOM.Iterable"], 10 | "jsx": "react-jsx", 11 | "jsxImportSource": "solid-js" 12 | }, 13 | "imports": { 14 | "@deno/vite-plugin": "npm:@deno/vite-plugin@^1.0.0", 15 | "solid-js": "npm:solid-js@^1.9.2", 16 | "solid-styled-components": "npm:solid-styled-components@^0.28.5", 17 | "vite": "npm:vite@^5.4.9", 18 | "vite-plugin-solid": "npm:vite-plugin-solid@^2.10.2" 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /examples/x-macos-pasteboard/solid-ui/handler-pb.map.nu: -------------------------------------------------------------------------------- 1 | { 2 | run: {|frame| 3 | if $frame.topic != "pb.recv" { return } 4 | 5 | let data = .cas $frame.hash | from json | get types 6 | 7 | $data | get -i "public.png" | if ($in | is-not-empty) { 8 | $in | decode base64 | .append content --meta { 9 | updates: $frame.id 10 | content_type: "image" 11 | } 12 | return 13 | } 14 | 15 | $data | get -i "public.utf8-plain-text" | if ($in | is-not-empty) { 16 | $in | decode base64 | decode | .append content --meta {updates: $frame.id} 17 | return 18 | } 19 | 20 | $frame 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /examples/x-macos-pasteboard/solid-ui/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /examples/x-macos-pasteboard/solid-ui/src/App.tsx: -------------------------------------------------------------------------------- 1 | import { Component, For } from "solid-js"; 2 | import { useFrameStream } from "./store/stream"; 3 | import { useStore } from "./store"; 4 | import { createCAS } from "./store/cas"; 5 | import Card from "./Card"; 6 | 7 | const App: Component = () => { 8 | const frameSignal = useFrameStream(); 9 | 10 | const fetchContent = async (hash: string) => { 11 | const response = await fetch(`/api/cas/${hash}`); 12 | if (!response.ok) { 13 | throw new Error(`Failed to fetch content for hash ${hash}`); 14 | } 15 | return await response.text(); 16 | }; 17 | 18 | const { index } = useStore({ dataSignal: frameSignal }); 19 | const CAS = createCAS(fetchContent); 20 | 21 | return ( 22 |
23 |

a solid clipboard

24 | 25 | {(frames) => } 26 | 27 |
28 | ); 29 | }; 30 | 31 | export default App; 32 | -------------------------------------------------------------------------------- /examples/x-macos-pasteboard/solid-ui/src/Card.tsx: -------------------------------------------------------------------------------- 1 | import { Component, createMemo, createSignal, For, Show } from "solid-js"; 2 | import { styled } from "solid-styled-components"; 3 | import { Frame } from "./store/stream"; 4 | import { CASStore } from "./store/cas"; 5 | 6 | const CardWrapper = styled("div")` 7 | display: flex; 8 | flex-direction: column; 9 | margin-bottom: 1em; 10 | overflow: hidden; 11 | border-radius: 0.25em; 12 | `; 13 | 14 | const Content = styled("div")` 15 | flex: 1; 16 | overflow-x: auto; 17 | overflow-y: hidden; 18 | padding: 0.25em 0.5em; 19 | `; 20 | 21 | const Meta = styled("div")` 22 | font-size: 0.80em; 23 | color: var(--color-sub-fg); 24 | background-color: var(--color-sub-bg); 25 | padding: 0.5em 1em; 26 | display: flex; 27 | align-items: center; 28 | justify-content: space-between; 29 | `; 30 | 31 | type CardProps = { 32 | frames: Frame[]; 33 | CAS: CASStore; 34 | }; 35 | 36 | const Card: Component = (props) => { 37 | const { frames, CAS } = props; 38 | const [currentIndex, setCurrentIndex] = createSignal(0); 39 | const frame = () => frames[currentIndex()]; 40 | const contentSignal = () => CAS.get(frame().hash); 41 | 42 | const renderContent = () => { 43 | const content = contentSignal()(); 44 | if (!content) return null; 45 | 46 | if (frame().topic === "pb.recv") { 47 | try { 48 | const jsonContent = JSON.parse(content); 49 | return
{JSON.stringify(jsonContent, null, 2)}
; 50 | } catch (error) { 51 | console.error("Failed to parse JSON content:", error); 52 | return

{content}

; 53 | } 54 | } else if (frame().meta?.content_type === "image") { 55 | return Frame content; 56 | } else { 57 | return
{content}
; 58 | } 59 | }; 60 | 61 | // Create a reactive derived signal for `source` 62 | const source = createMemo(() => { 63 | const sourceFrame = frames.find((f) => f.topic === "pb.recv"); 64 | if (!sourceFrame) return null; 65 | 66 | const sourceContent = CAS.get(sourceFrame.hash)(); 67 | if (!sourceContent) return null; 68 | 69 | try { 70 | const parsedContent = JSON.parse(sourceContent); 71 | return parsedContent.source; 72 | } catch (error) { 73 | console.error("Failed to parse JSON content for source:", error); 74 | return null; 75 | } 76 | }); 77 | 78 | return ( 79 | 80 | 81 | {frame().id} 82 | 95 | 96 | {source()} 97 | 98 | 99 | {renderContent()} 100 | 101 | ); 102 | }; 103 | 104 | export default Card; 105 | -------------------------------------------------------------------------------- /examples/x-macos-pasteboard/solid-ui/src/index.css: -------------------------------------------------------------------------------- 1 | :root { 2 | font-family: ui-monospace, Menlo, Monaco, "Cascadia Mono", "Segoe UI Mono", 3 | "Roboto Mono", "Oxygen Mono", "Ubuntu Monospace", "Source Code Pro", 4 | "Fira Mono", "Droid Sans Mono", "Courier New", monospace; 5 | 6 | font-size: 1.2em; 7 | line-height: 1.5; 8 | 9 | /* Colors */ 10 | --color-fg: #4e5668; 11 | --color-bg: #fff; 12 | 13 | --color-sub-fg: #7d889f; 14 | --color-sub-bg: #eceff4; 15 | 16 | --color-accent: #94bfce; 17 | } 18 | 19 | body { 20 | margin: 0 auto; 21 | padding: 1em; 22 | max-width: 620px; 23 | display: flex; 24 | 25 | color: var(--color-fg); 26 | background-color: var(--color-bg); 27 | } 28 | 29 | main { 30 | width: 100%; 31 | } 32 | 33 | pre { 34 | margin: 0; 35 | padding: 0; 36 | } 37 | 38 | iframe, 39 | img, 40 | input, 41 | select, 42 | textarea { 43 | height: auto; 44 | max-width: 100%; 45 | } 46 | 47 | img { 48 | border-radius: 0.25em; 49 | display: block; 50 | } 51 | -------------------------------------------------------------------------------- /examples/x-macos-pasteboard/solid-ui/src/main.tsx: -------------------------------------------------------------------------------- 1 | /* @refresh reload */ 2 | import "./index.css"; 3 | import { render } from "solid-js/web"; 4 | import App from "./App.tsx"; 5 | 6 | render(() => , document.getElementsByTagName("main")[0] as HTMLElement); 7 | -------------------------------------------------------------------------------- /examples/x-macos-pasteboard/solid-ui/src/store/cas.ts: -------------------------------------------------------------------------------- 1 | import { createSignal } from "solid-js"; 2 | 3 | export type CASStore = { 4 | get: (hash: string) => () => string | null; 5 | }; 6 | 7 | export function createCAS(fetchContent: (hash: string) => Promise): CASStore { 8 | const cache = new Map string | null>(); 9 | 10 | return { 11 | get(hash: string) { 12 | if (!cache.has(hash)) { 13 | const [content, setContent] = createSignal(null); 14 | 15 | // Cache the signal 16 | cache.set(hash, content); 17 | 18 | // Fetch the content and update the signal in the background 19 | fetchContent(hash) 20 | .then((data) => setContent(data)) 21 | .catch((error) => { 22 | console.error("Failed to fetch content for hash:", error); 23 | }); 24 | } 25 | 26 | // Return the signal for the content 27 | return cache.get(hash)!; 28 | }, 29 | }; 30 | } 31 | -------------------------------------------------------------------------------- /examples/x-macos-pasteboard/solid-ui/src/store/index.ts: -------------------------------------------------------------------------------- 1 | import { createEffect, createMemo } from "solid-js"; 2 | import { createStore } from "solid-js/store"; 3 | import { Frame } from "./stream"; 4 | 5 | export type StreamStore = { [key: string]: Frame[] }; 6 | 7 | type StreamProps = { 8 | dataSignal: () => Frame | null; 9 | }; 10 | 11 | export function useStore({ dataSignal }: StreamProps) { 12 | const [frames, setFrames] = createStore({}); 13 | 14 | createEffect(() => { 15 | const frame = dataSignal(); 16 | if (!frame) return; 17 | 18 | if (frame.topic !== "pb.recv" && frame.topic !== "content") return; 19 | 20 | const frameId = frame.meta?.updates ?? frame.id; 21 | setFrames(frameId, (existingFrames = []) => [frame, ...existingFrames]); 22 | }); 23 | 24 | const index = createMemo(() => { 25 | return Object.keys(frames) 26 | .sort((a, b) => b.localeCompare(a)) 27 | .map((id) => frames[id]); 28 | }); 29 | 30 | return { 31 | index, 32 | }; 33 | } 34 | -------------------------------------------------------------------------------- /examples/x-macos-pasteboard/solid-ui/src/store/stream.ts: -------------------------------------------------------------------------------- 1 | import { createSignal, onCleanup, onMount } from "solid-js"; 2 | 3 | export type Frame = { 4 | id: string; 5 | topic: string; 6 | hash: string; 7 | meta?: Record; 8 | }; 9 | 10 | export function useFrameStream() { 11 | const [frame, setFrame] = createSignal(null); 12 | 13 | onMount(() => { 14 | const controller = new AbortController(); 15 | const signal = controller.signal; 16 | 17 | const fetchData = async () => { 18 | const response = await fetch("/api?follow", { signal }); 19 | const textStream = response.body! 20 | .pipeThrough(new TextDecoderStream()) 21 | .pipeThrough(splitStream("\n")); 22 | 23 | const reader = textStream.getReader(); 24 | 25 | while (true) { 26 | const { value, done } = await reader.read(); 27 | if (done) break; 28 | if (value.trim()) { 29 | const json = JSON.parse(value); 30 | setFrame(json); // Update the signal with each new frame 31 | } 32 | } 33 | 34 | reader.releaseLock(); 35 | }; 36 | 37 | fetchData(); 38 | 39 | onCleanup(() => { 40 | controller.abort(); 41 | }); 42 | }); 43 | 44 | return frame; 45 | } 46 | 47 | // Utility function to split a stream by a delimiter 48 | function splitStream(delimiter: string) { 49 | let buffer = ""; 50 | return new TransformStream({ 51 | transform(chunk, controller) { 52 | buffer += chunk; 53 | const parts = buffer.split(delimiter); 54 | buffer = parts.pop()!; 55 | parts.forEach((part) => controller.enqueue(part)); 56 | }, 57 | flush(controller) { 58 | if (buffer) { 59 | controller.enqueue(buffer); 60 | } 61 | }, 62 | }); 63 | } 64 | -------------------------------------------------------------------------------- /examples/x-macos-pasteboard/solid-ui/vite.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from "vite"; 2 | import deno from "@deno/vite-plugin"; 3 | import solid from "vite-plugin-solid"; 4 | 5 | // https://vite.dev/config/ 6 | export default defineConfig({ 7 | plugins: [deno(), solid()], 8 | server: { 9 | proxy: { 10 | "/api": { 11 | target: "http://localhost:3021", 12 | changeOrigin: true, 13 | rewrite: (path) => path.replace(/^\/api/, ""), 14 | }, 15 | }, 16 | }, 17 | }); 18 | -------------------------------------------------------------------------------- /notes/how-to-release.md: -------------------------------------------------------------------------------- 1 | ```nushell 2 | 3 | # update version in Cargo.toml 4 | cargo b # to update Cargo.lock 5 | 6 | let PREVIOUS_RELEASE = git tag | lines | where {$in | str starts-with "v"} | sort | last 7 | let RELEASE = open Cargo.toml | get package.version 8 | 9 | # grab the raw commit messages between the previous release and now 10 | # create the release notes 11 | git log --format=%s $"($PREVIOUS_RELEASE)..HEAD" | vipe | save -f $"changes/($RELEASE).md" 12 | git add changes 13 | 14 | git commit -a -m $"chore: release ($RELEASE)" 15 | git push 16 | 17 | cargo publish 18 | cargo install cross-stream --locked 19 | 20 | rm ~/bin/xs 21 | brew uninstall cross-stream 22 | which xs # should be /Users/andy/.cargo/bin/xs 23 | # test the new version 24 | 25 | let pkgdir = $"cross-stream-($RELEASE)" 26 | let tarball = $"cross-stream-($RELEASE)-macos.tar.gz" 27 | 28 | mkdir $pkgdir 29 | cp /Users/andy/.cargo/bin/xs $pkgdir 30 | tar -czvf $tarball -C $pkgdir xs 31 | 32 | # git tag $"v($RELEASE)" 33 | # git push --tags 34 | # ^^ not needed, as the next line will create the tags --> 35 | gh release create $"v($RELEASE)" -F $"changes/($RELEASE).md" $tarball 36 | 37 | shasum -a 256 $tarball 38 | 39 | # update: git@github.com:cablehead/homebrew-tap.git 40 | 41 | brew install cablehead/tap/cross-stream 42 | which xs # should be /opt/homebrew/bin/xs 43 | # test the new version 44 | ``` 45 | -------------------------------------------------------------------------------- /notes/notes.md: -------------------------------------------------------------------------------- 1 | # xs 2 | 3 | ## Overview / Sketch 4 | 5 | An event stream store for personal, local-first use. Kinda like the 6 | [`sqlite3` cli](https://sqlite.org/cli.html), but specializing in the 7 | [event sourcing](https://martinfowler.com/eaaDev/EventSourcing.html) use case. 8 | 9 | ![screenshot](./screenshot.png) 10 | 11 | > "You don't so much run it, as poke _at_ it." 12 | 13 | Built with: 14 | 15 | - [fjall](https://github.com/fjall-rs/fjall): for indexing and metadata 16 | - [cacache](https://github.com/zkat/cacache-rs): for content (CAS) 17 | - [hyper](https://hyper.rs/guides/1/server/echo/): provides an HTTP/1.1 API over 18 | a local Unix domain socket for subscriptions, etc. 19 | - [nushell](https://www.nushell.sh): for scripting and 20 | [interop](https://utopia.rosano.ca/interoperable-visions/) 21 | 22 | ## Built-in Topics 23 | 24 | - `xs.start`: emitted when the server mounts the stream to expose an API 25 | - `xs.stop`: emitted when the server stops :: TODO 26 | 27 | - `xs.pulse`: (synthetic) a heartbeat event you can configure to be emitted every 28 | N seconds when in follow mode 29 | 30 | - `xs.threshold`: (synthetic) marks the boundary between 31 | replaying events and events that are newly arriving in real-time via a live 32 | subscription 33 | 34 | - `.spawn` :: spawn a generator 35 | - meta:: topic: string, duplex: bool 36 | - `.terminate` 37 | 38 | - `.register` :: register an event handler 39 | - meta:: run-from: start, tail, id? 40 | - `.unregister` 41 | 42 | ## Local socket HTTP API 43 | 44 | WIP, thoughts: 45 | 46 | - `/:topic` should probably be `/stream/:topic` 47 | 48 | ## API Endpoints 49 | 50 | ### GET 51 | 52 | - `/` - Pull the event stream 53 | - `/:id` - Pull a specific event by ID (where ID is a valid Scru128Id) 54 | - `/cas/:hash` - Pull the content addressed by `hash` (where hash is a valid ssri::Integrity) 55 | 56 | ### POST 57 | 58 | - `/:topic` - Append a new event to the stream for `topic`. The body of the POST 59 | will be stored in the CAS. You can also pass arbitrary JSON meta data using 60 | the `xs-meta` HTTP header. 61 | - `/pipe/:id` - Execute a script on a specific event. The ID should be a valid Scru128Id, 62 | and the body should contain the script to be executed. 63 | 64 | ## Features 65 | 66 | - event stream: 67 | - [x] append 68 | - [x] cat: last-id, follow, tail, threshold / heartbeat synthetic events 69 | - [x] get 70 | - [ ] last 71 | - [ ] first 72 | - [ ] next? 73 | - [ ] previous? 74 | - [x] cas, get 75 | - [x] ephemeral events / content 76 | - [ ] content can be chunked, to accomodate slow streams, e.g server sent events 77 | - [ ] secondary indexes for topics: the head of a topic can be used as a materialized view 78 | - process management: you can register snippets of Nushell on the event stream. 79 | server facilitates watching for updates + managing processes 80 | - [x] generators 81 | - [x] handlers 82 | - [x] builtin http server: 83 | - [x] You can optionally serve HTTP requests from your store. Requests are 84 | written to the event stream as `http.request` and then the connection 85 | watches the event stream for a `http.response`. 86 | - [x] You can register event handlers that subscribe to `http.request` events 87 | and emit `http.response` events. 88 | - Ability for a single xs process to serve many stores 89 | - so you generally run just one locally, using the systems local process 90 | manager, and then add and remove stores to serve via the event stream 91 | 92 | ## Path Traveled 93 | 94 | - [xs-3](https://github.com/cablehead/xs-3): 95 | [sled](https://github.com/spacejam/sled) index with 96 | [cacache](https://github.com/zkat/cacache-rs) CAS, no concurrency 97 | - [xs-0](https://github.com/cablehead/xs-0) original experiment. 98 | -[LMDB](http://www.lmdb.tech/doc/) combined index / content store (pre 99 | realizing the event primary content should be stored in a CAS) 100 | - Multi-process concurrent, but polling for subscribe 101 | -------------------------------------------------------------------------------- /notes/overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cablehead/xs/ea20566eddbbe76f9c4a6f1776b308b6db47d798/notes/overview.png -------------------------------------------------------------------------------- /notes/screenshot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cablehead/xs/ea20566eddbbe76f9c4a6f1776b308b6db47d798/notes/screenshot.png -------------------------------------------------------------------------------- /scripts/check.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euo pipefail 4 | 5 | cargo fmt --check 6 | cargo clippy -- -D warnings 7 | cargo t 8 | -------------------------------------------------------------------------------- /src/client/commands.rs: -------------------------------------------------------------------------------- 1 | use futures::StreamExt; 2 | 3 | use base64::Engine; 4 | use ssri::Integrity; 5 | use url::form_urlencoded; 6 | 7 | use http_body_util::{combinators::BoxBody, BodyExt, Empty, StreamBody}; 8 | use hyper::body::Bytes; 9 | use hyper::Method; 10 | use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt}; 11 | use tokio::sync::mpsc::Receiver; 12 | use tokio_util::io::ReaderStream; 13 | 14 | use super::request; 15 | use crate::store::{ReadOptions, TTL}; 16 | 17 | pub async fn cat( 18 | addr: &str, 19 | options: ReadOptions, 20 | sse: bool, 21 | ) -> Result, Box> { 22 | // Convert any usize limit to u64 23 | let query = if options == ReadOptions::default() { 24 | None 25 | } else { 26 | Some(options.to_query_string()) 27 | }; 28 | 29 | let headers = if sse { 30 | Some(vec![( 31 | "Accept".to_string(), 32 | "text/event-stream".to_string(), 33 | )]) 34 | } else { 35 | None 36 | }; 37 | 38 | let res = request::request(addr, Method::GET, "", query.as_deref(), empty(), headers).await?; 39 | 40 | let (_parts, mut body) = res.into_parts(); 41 | let (tx, rx) = tokio::sync::mpsc::channel(100); 42 | 43 | tokio::spawn(async move { 44 | while let Some(frame_result) = body.frame().await { 45 | match frame_result { 46 | Ok(frame) => { 47 | if let Ok(bytes) = frame.into_data() { 48 | if tx.send(bytes).await.is_err() { 49 | break; 50 | } 51 | } 52 | } 53 | Err(e) => { 54 | eprintln!("Error reading body: {}", e); 55 | break; 56 | } 57 | } 58 | } 59 | }); 60 | 61 | Ok(rx) 62 | } 63 | 64 | pub async fn append( 65 | addr: &str, 66 | topic: &str, 67 | data: R, 68 | meta: Option<&serde_json::Value>, 69 | ttl: Option, 70 | context: Option<&str>, 71 | ) -> Result> 72 | where 73 | R: AsyncRead + Unpin + Send + 'static, 74 | { 75 | let mut params = Vec::new(); 76 | if let Some(t) = ttl { 77 | let ttl_query = t.to_query(); 78 | if let Some((k, v)) = ttl_query.split_once('=') { 79 | params.push((k.to_string(), v.to_string())); 80 | } 81 | } 82 | if let Some(c) = context { 83 | params.push(("context".to_string(), c.to_string())); 84 | } 85 | 86 | let query = if !params.is_empty() { 87 | Some( 88 | form_urlencoded::Serializer::new(String::new()) 89 | .extend_pairs(params) 90 | .finish(), 91 | ) 92 | } else { 93 | None 94 | }; 95 | 96 | let reader_stream = ReaderStream::new(data); 97 | let mapped_stream = reader_stream.map(|result| { 98 | result 99 | .map(hyper::body::Frame::data) 100 | .map_err(|e| Box::new(e) as Box) 101 | }); 102 | let body = StreamBody::new(mapped_stream); 103 | 104 | let headers = meta.map(|meta_value| { 105 | let json_string = serde_json::to_string(meta_value).unwrap(); 106 | let encoded = base64::prelude::BASE64_STANDARD.encode(json_string); 107 | vec![("xs-meta".to_string(), encoded)] 108 | }); 109 | 110 | let res = request::request(addr, Method::POST, topic, query.as_deref(), body, headers).await?; 111 | let body = res.collect().await?.to_bytes(); 112 | Ok(body) 113 | } 114 | 115 | pub async fn cas_get( 116 | addr: &str, 117 | integrity: Integrity, 118 | writer: &mut W, 119 | ) -> Result<(), Box> 120 | where 121 | W: AsyncWrite + Unpin, 122 | { 123 | let parts = super::types::RequestParts::parse(addr, &format!("cas/{}", integrity), None)?; 124 | 125 | match parts.connection { 126 | super::types::ConnectionKind::Unix(path) => { 127 | // Direct CAS access for local path 128 | let store_path = path.parent().unwrap_or(&path).to_path_buf(); 129 | let cas_path = store_path.join("cacache"); 130 | let mut reader = cacache::Reader::open_hash(&cas_path, integrity).await?; 131 | tokio::io::copy(&mut reader, writer).await?; 132 | writer.flush().await?; 133 | Ok(()) 134 | } 135 | _ => { 136 | // Remote HTTP access 137 | let res = request::request( 138 | addr, 139 | Method::GET, 140 | &format!("cas/{}", integrity), 141 | None, 142 | empty(), 143 | None, 144 | ) 145 | .await?; 146 | let mut body = res.into_body(); 147 | 148 | while let Some(frame) = body.frame().await { 149 | let frame = frame?; 150 | if let Ok(chunk) = frame.into_data() { 151 | writer.write_all(&chunk).await?; 152 | } 153 | } 154 | 155 | writer.flush().await?; 156 | Ok(()) 157 | } 158 | } 159 | } 160 | 161 | pub async fn cas_post( 162 | addr: &str, 163 | data: R, 164 | ) -> Result> 165 | where 166 | R: AsyncRead + Unpin + Send + 'static, 167 | { 168 | let reader_stream = ReaderStream::new(data); 169 | let mapped_stream = reader_stream.map(|result| { 170 | result 171 | .map(hyper::body::Frame::data) 172 | .map_err(|e| Box::new(e) as Box) 173 | }); 174 | let body = StreamBody::new(mapped_stream); 175 | 176 | let res = request::request(addr, Method::POST, "cas", None, body, None).await?; 177 | let body = res.collect().await?.to_bytes(); 178 | Ok(body) 179 | } 180 | 181 | pub async fn get(addr: &str, id: &str) -> Result> { 182 | let res = request::request(addr, Method::GET, id, None, empty(), None).await?; 183 | let body = res.collect().await?.to_bytes(); 184 | Ok(body) 185 | } 186 | 187 | pub async fn remove(addr: &str, id: &str) -> Result<(), Box> { 188 | let _ = request::request(addr, Method::DELETE, id, None, empty(), None).await?; 189 | Ok(()) 190 | } 191 | 192 | pub async fn head( 193 | addr: &str, 194 | topic: &str, 195 | follow: bool, 196 | context: Option<&str>, 197 | ) -> Result<(), Box> { 198 | let mut params = Vec::new(); 199 | if follow { 200 | params.push(("follow", "true".to_string())); 201 | } 202 | if let Some(c) = context { 203 | params.push(("context", c.to_string())); 204 | } 205 | 206 | let query = if !params.is_empty() { 207 | Some( 208 | form_urlencoded::Serializer::new(String::new()) 209 | .extend_pairs(params) 210 | .finish(), 211 | ) 212 | } else { 213 | None 214 | }; 215 | 216 | let res = request::request( 217 | addr, 218 | Method::GET, 219 | &format!("head/{}", topic), 220 | query.as_deref(), 221 | empty(), 222 | None, 223 | ) 224 | .await?; 225 | 226 | let mut body = res.into_body(); 227 | let mut stdout = tokio::io::stdout(); 228 | 229 | while let Some(frame) = body.frame().await { 230 | let frame = frame?; 231 | if let Ok(chunk) = frame.into_data() { 232 | stdout.write_all(&chunk).await?; 233 | } 234 | } 235 | stdout.flush().await?; 236 | Ok(()) 237 | } 238 | 239 | pub async fn import( 240 | addr: &str, 241 | data: R, 242 | ) -> Result> 243 | where 244 | R: AsyncRead + Unpin + Send + 'static, 245 | { 246 | let reader_stream = ReaderStream::new(data); 247 | let mapped_stream = reader_stream.map(|result| { 248 | result 249 | .map(hyper::body::Frame::data) 250 | .map_err(|e| Box::new(e) as Box) 251 | }); 252 | let body = StreamBody::new(mapped_stream); 253 | 254 | let res = request::request(addr, Method::POST, "import", None, body, None).await?; 255 | let body = res.collect().await?.to_bytes(); 256 | Ok(body) 257 | } 258 | 259 | pub async fn version(addr: &str) -> Result> { 260 | match request::request(addr, Method::GET, "version", None, empty(), None).await { 261 | Ok(res) => { 262 | let body = res.collect().await?.to_bytes(); 263 | Ok(body) 264 | } 265 | Err(e) => { 266 | // this was the version before the /version endpoint was added 267 | if e.to_string().contains("404 Not Found") { 268 | Ok(Bytes::from(r#"{"version":"0.0.9"}"#)) 269 | } else { 270 | Err(e) 271 | } 272 | } 273 | } 274 | } 275 | 276 | fn empty() -> BoxBody> { 277 | Empty::::new() 278 | .map_err(|never| match never {}) 279 | .boxed() 280 | } 281 | -------------------------------------------------------------------------------- /src/client/connect.rs: -------------------------------------------------------------------------------- 1 | use crate::listener::AsyncReadWriteBox; 2 | use rustls::pki_types::ServerName; 3 | use rustls::ClientConfig; 4 | use rustls::RootCertStore; 5 | use std::sync::Arc; 6 | use tokio::net::{TcpStream, UnixStream}; 7 | use tokio_rustls::TlsConnector; 8 | 9 | use super::types::{BoxError, ConnectionKind, RequestParts}; 10 | 11 | async fn create_tls_connector() -> Result { 12 | let mut root_store = RootCertStore::empty(); 13 | root_store.extend(webpki_roots::TLS_SERVER_ROOTS.iter().cloned()); 14 | let config = ClientConfig::builder() 15 | .with_root_certificates(root_store) 16 | .with_no_client_auth(); 17 | Ok(TlsConnector::from(Arc::new(config))) 18 | } 19 | 20 | pub async fn connect(parts: &RequestParts) -> Result { 21 | match &parts.connection { 22 | ConnectionKind::Unix(path) => { 23 | let stream = UnixStream::connect(path).await?; 24 | Ok(Box::new(stream)) 25 | } 26 | ConnectionKind::Tcp { host, port } => { 27 | let stream = TcpStream::connect((host.as_str(), *port)).await?; 28 | Ok(Box::new(stream)) 29 | } 30 | ConnectionKind::Tls { host, port } => { 31 | let tcp_stream = TcpStream::connect((host.as_str(), *port)).await?; 32 | let connector = create_tls_connector().await?; 33 | let server_name = ServerName::try_from(host.clone())?; // Clone the host string 34 | let tls_stream = connector.connect(server_name, tcp_stream).await?; 35 | Ok(Box::new(tls_stream)) 36 | } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /src/client/mod.rs: -------------------------------------------------------------------------------- 1 | mod commands; 2 | mod connect; 3 | mod request; 4 | mod types; 5 | 6 | pub use self::commands::{append, cas_get, cas_post, cat, get, head, import, remove, version}; 7 | -------------------------------------------------------------------------------- /src/client/request.rs: -------------------------------------------------------------------------------- 1 | use http_body_util::BodyExt; 2 | use hyper::{Method, Request}; 3 | use hyper_util::rt::TokioIo; 4 | 5 | use super::connect::connect; 6 | use super::types::{BoxError, RequestParts}; 7 | 8 | pub async fn request( 9 | addr: &str, 10 | method: Method, 11 | path: &str, 12 | query: Option<&str>, 13 | body: B, 14 | headers: Option>, 15 | ) -> Result, BoxError> 16 | where 17 | B: hyper::body::Body + Send + 'static, 18 | B::Error: Into + Send, 19 | { 20 | let parts = RequestParts::parse(addr, path, query)?; 21 | let stream = connect(&parts).await?; 22 | let io = TokioIo::new(stream); 23 | let (mut sender, conn) = hyper::client::conn::http1::handshake(io).await?; 24 | 25 | tokio::spawn(async move { 26 | if let Err(e) = conn.await { 27 | eprintln!("Connection error: {}", e); 28 | } 29 | }); 30 | 31 | let mut builder = Request::builder() 32 | .method(method) 33 | .uri(parts.uri) 34 | .header(hyper::header::USER_AGENT, "xs/0.1") 35 | .header(hyper::header::ACCEPT, "*/*"); 36 | 37 | if let Some(host) = parts.host { 38 | builder = builder.header(hyper::header::HOST, host); 39 | } 40 | if let Some(auth) = parts.authorization { 41 | builder = builder.header(hyper::header::AUTHORIZATION, auth); 42 | } 43 | 44 | if let Some(extra_headers) = headers { 45 | for (name, value) in extra_headers { 46 | builder = builder.header(name, value); 47 | } 48 | } 49 | 50 | let req = builder.body(body)?; 51 | let res = sender.send_request(req).await?; 52 | 53 | // Handle non-OK responses 54 | if res.status() != hyper::StatusCode::OK && res.status() != hyper::StatusCode::NO_CONTENT { 55 | let status = res.status(); 56 | let body = res.collect().await?.to_bytes(); 57 | return Err(format!("{}:: {}", status, String::from_utf8_lossy(&body)).into()); 58 | } 59 | 60 | Ok(res) 61 | } 62 | -------------------------------------------------------------------------------- /src/client/types.rs: -------------------------------------------------------------------------------- 1 | use base64::prelude::*; 2 | 3 | pub type BoxError = Box; 4 | 5 | #[derive(Debug, PartialEq)] 6 | pub enum ConnectionKind { 7 | Unix(std::path::PathBuf), 8 | Tcp { host: String, port: u16 }, 9 | Tls { host: String, port: u16 }, 10 | } 11 | 12 | #[derive(Debug, PartialEq)] 13 | pub struct RequestParts { 14 | pub uri: String, 15 | pub host: Option, 16 | pub authorization: Option, 17 | pub connection: ConnectionKind, 18 | } 19 | 20 | impl RequestParts { 21 | pub fn parse( 22 | addr: &str, 23 | path: &str, 24 | query: Option<&str>, 25 | ) -> Result> { 26 | // Unix socket case 27 | if addr.starts_with('/') || addr.starts_with('.') { 28 | let socket_path = if std::path::Path::new(addr).is_dir() { 29 | std::path::Path::new(addr).join("sock") 30 | } else { 31 | std::path::Path::new(addr).to_path_buf() 32 | }; 33 | 34 | return Ok(RequestParts { 35 | uri: if let Some(q) = query { 36 | format!("http://localhost/{}?{}", path, q) 37 | } else { 38 | format!("http://localhost/{}", path) 39 | }, 40 | host: None, 41 | authorization: None, 42 | connection: ConnectionKind::Unix(socket_path), 43 | }); 44 | } 45 | 46 | // Normalize URL 47 | let addr = if addr.starts_with(':') { 48 | format!("http://127.0.0.1{}", addr) 49 | } else if !addr.contains("://") { 50 | format!("http://{}", addr) 51 | } else { 52 | addr.to_string() 53 | }; 54 | 55 | let url = url::Url::parse(&addr)?; 56 | let scheme = url.scheme(); 57 | let host = url.host_str().ok_or("Missing host")?.to_string(); 58 | let port = url 59 | .port() 60 | .unwrap_or(if scheme == "https" { 443 } else { 80 }); 61 | let port_str = if (scheme == "http" && port == 80) || (scheme == "https" && port == 443) { 62 | "".to_string() 63 | } else { 64 | format!(":{}", port) 65 | }; 66 | 67 | // Build clean request URI (no auth) 68 | let uri = if let Some(q) = query { 69 | format!("{}://{}{}/{}?{}", scheme, host, port_str, path, q) 70 | } else { 71 | format!("{}://{}{}/{}", scheme, host, port_str, path) 72 | }; 73 | 74 | // Set auth if present 75 | let authorization = if let Some(password) = url.password() { 76 | let credentials = format!("{}:{}", url.username(), password); 77 | Some(format!( 78 | "Basic {}", 79 | base64::prelude::BASE64_STANDARD.encode(credentials) 80 | )) 81 | } else if !url.username().is_empty() { 82 | let credentials = format!("{}:", url.username()); 83 | Some(format!( 84 | "Basic {}", 85 | base64::prelude::BASE64_STANDARD.encode(credentials) 86 | )) 87 | } else { 88 | None 89 | }; 90 | 91 | Ok(RequestParts { 92 | uri, 93 | host: Some(format!("{}{}", host, port_str)), 94 | authorization, 95 | connection: if scheme == "https" { 96 | ConnectionKind::Tls { host, port } 97 | } else { 98 | ConnectionKind::Tcp { host, port } 99 | }, 100 | }) 101 | } 102 | } 103 | 104 | #[cfg(test)] 105 | mod tests { 106 | use super::*; 107 | 108 | #[test] 109 | fn test_unix_socket() { 110 | let parts = RequestParts::parse("./store", "foo", None).unwrap(); 111 | assert_eq!(parts.uri, "http://localhost/foo"); 112 | assert_eq!(parts.host, None); 113 | assert_eq!(parts.authorization, None); 114 | } 115 | 116 | #[test] 117 | fn test_port_only() { 118 | let parts = RequestParts::parse(":8080", "bar", Some("q=1")).unwrap(); 119 | assert_eq!(parts.uri, "http://127.0.0.1:8080/bar?q=1"); 120 | assert_eq!(parts.host, Some("127.0.0.1:8080".to_string())); 121 | assert_eq!(parts.authorization, None); 122 | } 123 | 124 | #[test] 125 | fn test_https_url_with_auth() { 126 | let parts = RequestParts::parse("https://user:pass@example.com:400", "", None).unwrap(); 127 | assert_eq!(parts.uri, "https://example.com:400/"); 128 | assert_eq!(parts.host, Some("example.com:400".to_string())); 129 | assert_eq!(parts.authorization, Some("Basic dXNlcjpwYXNz".to_string())); 130 | } 131 | } 132 | -------------------------------------------------------------------------------- /src/commands/mod.rs: -------------------------------------------------------------------------------- 1 | mod serve; 2 | 3 | #[cfg(test)] 4 | mod tests; 5 | 6 | pub use serve::serve; 7 | -------------------------------------------------------------------------------- /src/error.rs: -------------------------------------------------------------------------------- 1 | pub type Error = Box; 2 | -------------------------------------------------------------------------------- /src/generators/mod.rs: -------------------------------------------------------------------------------- 1 | mod generator; 2 | mod serve; 3 | 4 | pub use generator::{ 5 | spawn as spawn_generator_loop, GeneratorEventKind, GeneratorLoop, GeneratorScriptOptions, 6 | StopReason, Task, 7 | }; 8 | 9 | #[cfg(test)] 10 | mod tests; 11 | 12 | pub use serve::serve; 13 | -------------------------------------------------------------------------------- /src/generators/serve.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | 3 | use scru128::Scru128Id; 4 | use serde_json::json; 5 | use tokio::task::JoinHandle; 6 | 7 | use crate::generators::generator; 8 | use crate::nu; 9 | use crate::store::{FollowOption, Frame, ReadOptions, Store}; 10 | 11 | async fn try_start_task( 12 | topic: &str, 13 | frame: &Frame, 14 | active: &mut HashMap<(String, Scru128Id), JoinHandle<()>>, 15 | engine: &nu::Engine, 16 | store: &Store, 17 | ) { 18 | if let Err(e) = 19 | handle_spawn_event(topic, frame.clone(), active, engine.clone(), store.clone()).await 20 | { 21 | let meta = json!({ 22 | "source_id": frame.id.to_string(), 23 | "reason": e.to_string() 24 | }); 25 | 26 | if let Err(e) = store.append( 27 | Frame::builder(format!("{}.parse.error", topic), frame.context_id) 28 | .meta(meta) 29 | .build(), 30 | ) { 31 | tracing::error!("Error appending error frame: {}", e); 32 | } 33 | } 34 | } 35 | 36 | async fn handle_spawn_event( 37 | topic: &str, 38 | frame: Frame, 39 | active: &mut HashMap<(String, Scru128Id), JoinHandle<()>>, 40 | engine: nu::Engine, 41 | store: Store, 42 | ) -> Result<(), Box> { 43 | let key = (topic.to_string(), frame.context_id); 44 | if let Some(handle) = active.get(&key) { 45 | if handle.is_finished() { 46 | active.remove(&key); 47 | } else { 48 | // A generator for this topic/context is already running. Ignore the 49 | // new spawn frame; the running generator will handle it as a hot 50 | // reload. 51 | return Ok(()); 52 | } 53 | } 54 | 55 | let handle = generator::spawn(store.clone(), engine.clone(), frame); 56 | active.insert(key, handle); 57 | Ok(()) 58 | } 59 | 60 | pub async fn serve( 61 | store: Store, 62 | engine: nu::Engine, 63 | ) -> Result<(), Box> { 64 | let options = ReadOptions::builder().follow(FollowOption::On).build(); 65 | let mut recver = store.read(options).await; 66 | 67 | let mut active: HashMap<(String, Scru128Id), JoinHandle<()>> = HashMap::new(); 68 | let mut compacted: HashMap<(String, Scru128Id), Frame> = HashMap::new(); 69 | 70 | while let Some(frame) = recver.recv().await { 71 | if frame.topic == "xs.threshold" { 72 | break; 73 | } 74 | if frame.topic.ends_with(".spawn") || frame.topic.ends_with(".parse.error") { 75 | if let Some(prefix) = frame 76 | .topic 77 | .strip_suffix(".parse.error") 78 | .or_else(|| frame.topic.strip_suffix(".spawn")) 79 | { 80 | compacted.insert((prefix.to_string(), frame.context_id), frame); 81 | } 82 | } else if let Some(prefix) = frame.topic.strip_suffix(".terminate") { 83 | compacted.remove(&(prefix.to_string(), frame.context_id)); 84 | } 85 | } 86 | 87 | for ((topic, _), frame) in &compacted { 88 | if frame.topic.ends_with(".spawn") { 89 | try_start_task(topic, frame, &mut active, &engine, &store).await; 90 | } 91 | } 92 | 93 | while let Some(frame) = recver.recv().await { 94 | if let Some(prefix) = frame.topic.strip_suffix(".spawn") { 95 | try_start_task(prefix, &frame, &mut active, &engine, &store).await; 96 | continue; 97 | } 98 | 99 | if let Some(_prefix) = frame.topic.strip_suffix(".parse.error") { 100 | // parse.error frames are informational; ignore them 101 | continue; 102 | } 103 | 104 | if let Some(prefix) = frame.topic.strip_suffix(".shutdown") { 105 | active.remove(&(prefix.to_string(), frame.context_id)); 106 | continue; 107 | } 108 | } 109 | 110 | Ok(()) 111 | } 112 | -------------------------------------------------------------------------------- /src/handlers/mod.rs: -------------------------------------------------------------------------------- 1 | mod handler; 2 | mod serve; 3 | #[cfg(test)] 4 | mod tests; 5 | 6 | pub use handler::Handler; 7 | pub use serve::serve; 8 | -------------------------------------------------------------------------------- /src/handlers/serve.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | 3 | use crate::handlers::Handler; 4 | use crate::nu; 5 | use crate::nu::commands; 6 | use crate::store::{FollowOption, Frame, ReadOptions, Store}; 7 | 8 | async fn start_handler( 9 | frame: &Frame, 10 | store: &Store, 11 | engine: &nu::Engine, 12 | topic: &str, 13 | ) -> Result<(), Box> { 14 | match Handler::from_frame(frame, store, engine.clone()).await { 15 | Ok(handler) => { 16 | handler.spawn(store.clone()).await?; 17 | Ok(()) 18 | } 19 | Err(err) => { 20 | let _ = store.append( 21 | Frame::builder(format!("{}.unregistered", topic), frame.context_id) 22 | .meta(serde_json::json!({ 23 | "handler_id": frame.id.to_string(), 24 | "error": err.to_string(), 25 | })) 26 | .build(), 27 | ); 28 | Ok(()) 29 | } 30 | } 31 | } 32 | 33 | #[derive(Debug)] 34 | struct TopicState { 35 | register_frame: Frame, 36 | handler_id: String, 37 | } 38 | 39 | pub async fn serve( 40 | store: Store, 41 | mut engine: nu::Engine, 42 | ) -> Result<(), Box> { 43 | engine.add_commands(vec![ 44 | Box::new(commands::cas_command::CasCommand::new(store.clone())), 45 | Box::new(commands::get_command::GetCommand::new(store.clone())), 46 | Box::new(commands::remove_command::RemoveCommand::new(store.clone())), 47 | ])?; 48 | engine.add_alias(".rm", ".remove")?; 49 | 50 | let options = ReadOptions::builder().follow(FollowOption::On).build(); 51 | 52 | let mut recver = store.read(options).await; 53 | let mut topic_states: HashMap<(String, scru128::Scru128Id), TopicState> = HashMap::new(); 54 | 55 | // Process historical frames until threshold 56 | while let Some(frame) = recver.recv().await { 57 | if frame.topic == "xs.threshold" { 58 | break; 59 | } 60 | 61 | // Extract base topic and suffix 62 | if let Some((topic, suffix)) = frame.topic.rsplit_once('.') { 63 | match suffix { 64 | "register" => { 65 | // Store new registration 66 | topic_states.insert( 67 | (topic.to_string(), frame.context_id), 68 | TopicState { 69 | register_frame: frame.clone(), 70 | handler_id: frame.id.to_string(), 71 | }, 72 | ); 73 | } 74 | "unregister" | "unregistered" => { 75 | // Only remove if handler_id matches 76 | if let Some(meta) = &frame.meta { 77 | if let Some(handler_id) = meta.get("handler_id").and_then(|v| v.as_str()) { 78 | let key = (topic.to_string(), frame.context_id); 79 | if let Some(state) = topic_states.get(&key) { 80 | if state.handler_id == handler_id { 81 | topic_states.remove(&key); 82 | } 83 | } 84 | } 85 | } 86 | } 87 | _ => {} 88 | } 89 | } 90 | } 91 | 92 | // Process all retained registrations ordered by frame ID 93 | let mut ordered_states: Vec<_> = topic_states.values().collect(); 94 | ordered_states.sort_by_key(|state| state.register_frame.id); 95 | 96 | for state in ordered_states { 97 | if let Some(topic) = state.register_frame.topic.strip_suffix(".register") { 98 | start_handler(&state.register_frame, &store, &engine, topic).await?; 99 | } 100 | } 101 | 102 | // Continue processing new frames 103 | while let Some(frame) = recver.recv().await { 104 | if let Some(topic) = frame.topic.strip_suffix(".register") { 105 | start_handler(&frame, &store, &engine, topic).await?; 106 | } 107 | } 108 | 109 | Ok(()) 110 | } 111 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod api; 2 | pub mod client; 3 | pub mod commands; 4 | pub mod error; 5 | pub mod generators; 6 | pub mod handlers; 7 | pub mod listener; 8 | pub mod nu; 9 | pub mod store; 10 | pub mod trace; 11 | -------------------------------------------------------------------------------- /src/listener.rs: -------------------------------------------------------------------------------- 1 | use std::io; 2 | 3 | use tokio::io::{AsyncRead, AsyncWrite}; 4 | use tokio::net::{TcpListener, UnixListener}; 5 | #[cfg(test)] 6 | use tokio::net::{TcpStream, UnixStream}; 7 | 8 | pub trait AsyncReadWrite: AsyncRead + AsyncWrite {} 9 | 10 | impl AsyncReadWrite for T {} 11 | 12 | pub type AsyncReadWriteBox = Box; 13 | 14 | pub enum Listener { 15 | Tcp(TcpListener), 16 | Unix(UnixListener), 17 | } 18 | 19 | impl Listener { 20 | pub async fn accept( 21 | &mut self, 22 | ) -> io::Result<(AsyncReadWriteBox, Option)> { 23 | match self { 24 | Listener::Tcp(listener) => { 25 | let (stream, addr) = listener.accept().await?; 26 | Ok((Box::new(stream), Some(addr))) 27 | } 28 | Listener::Unix(listener) => { 29 | let (stream, _) = listener.accept().await?; 30 | Ok((Box::new(stream), None)) 31 | } 32 | } 33 | } 34 | 35 | pub async fn bind(addr: &str) -> io::Result { 36 | if addr.starts_with('/') || addr.starts_with('.') { 37 | // attempt to remove the socket unconditionally 38 | let _ = std::fs::remove_file(addr); 39 | let listener = UnixListener::bind(addr)?; 40 | Ok(Listener::Unix(listener)) 41 | } else { 42 | let mut addr = addr.to_owned(); 43 | if addr.starts_with(':') { 44 | addr = format!("127.0.0.1{}", addr); 45 | }; 46 | let listener = TcpListener::bind(addr).await?; 47 | Ok(Listener::Tcp(listener)) 48 | } 49 | } 50 | 51 | #[cfg(test)] 52 | pub async fn connect(&self) -> io::Result { 53 | match self { 54 | Listener::Tcp(listener) => { 55 | let stream = TcpStream::connect(listener.local_addr()?).await?; 56 | Ok(Box::new(stream)) 57 | } 58 | Listener::Unix(listener) => { 59 | let stream = 60 | UnixStream::connect(listener.local_addr()?.as_pathname().unwrap()).await?; 61 | Ok(Box::new(stream)) 62 | } 63 | } 64 | } 65 | } 66 | 67 | impl std::fmt::Display for Listener { 68 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 69 | match self { 70 | Listener::Tcp(listener) => { 71 | let addr = listener.local_addr().unwrap(); 72 | write!(f, "{}:{}", addr.ip(), addr.port()) 73 | } 74 | Listener::Unix(listener) => { 75 | let addr = listener.local_addr().unwrap(); 76 | let path = addr.as_pathname().unwrap(); 77 | write!(f, "{}", path.display()) 78 | } 79 | } 80 | } 81 | } 82 | 83 | #[cfg(test)] 84 | mod tests { 85 | use super::*; 86 | 87 | use tokio::io::AsyncReadExt; 88 | use tokio::io::AsyncWriteExt; 89 | 90 | async fn exercise_listener(addr: &str) { 91 | let mut listener = Listener::bind(addr).await.unwrap(); 92 | let mut client = listener.connect().await.unwrap(); 93 | 94 | let (mut serve, _) = listener.accept().await.unwrap(); 95 | let want = b"Hello from server!"; 96 | serve.write_all(want).await.unwrap(); 97 | drop(serve); 98 | 99 | let mut got = Vec::new(); 100 | client.read_to_end(&mut got).await.unwrap(); 101 | assert_eq!(want.to_vec(), got); 102 | } 103 | 104 | #[tokio::test] 105 | async fn test_bind_tcp() { 106 | exercise_listener(":0").await; 107 | } 108 | 109 | #[tokio::test] 110 | async fn test_bind_unix() { 111 | let temp_dir = tempfile::tempdir().unwrap(); 112 | let path = temp_dir.path().join("test.sock"); 113 | let path = path.to_str().unwrap(); 114 | exercise_listener(path).await; 115 | } 116 | } 117 | -------------------------------------------------------------------------------- /src/nu/commands/append_command.rs: -------------------------------------------------------------------------------- 1 | use nu_engine::CallExt; 2 | use nu_protocol::engine::{Call, Command, EngineState, Stack}; 3 | use nu_protocol::{Category, PipelineData, ShellError, Signature, SyntaxShape, Type, Value}; 4 | 5 | use serde_json::Value as JsonValue; 6 | 7 | use crate::nu::util; 8 | use crate::store::{Frame, Store, TTL}; 9 | 10 | #[derive(Clone)] 11 | pub struct AppendCommand { 12 | store: Store, 13 | context_id: scru128::Scru128Id, 14 | base_meta: JsonValue, 15 | } 16 | 17 | impl AppendCommand { 18 | pub fn new(store: Store, context_id: scru128::Scru128Id, base_meta: JsonValue) -> Self { 19 | Self { 20 | store, 21 | context_id, 22 | base_meta, 23 | } 24 | } 25 | } 26 | 27 | impl Command for AppendCommand { 28 | fn name(&self) -> &str { 29 | ".append" 30 | } 31 | 32 | fn signature(&self) -> Signature { 33 | Signature::build(".append") 34 | .input_output_types(vec![(Type::Any, Type::Any)]) 35 | .required("topic", SyntaxShape::String, "this clip's topic") 36 | .named( 37 | "meta", 38 | SyntaxShape::Record(vec![]), 39 | "arbitrary metadata", 40 | None, 41 | ) 42 | .named( 43 | "ttl", 44 | SyntaxShape::String, 45 | r#"TTL specification: 'forever', 'ephemeral', 'time:', or 'head:'"#, 46 | None, 47 | ) 48 | .named( 49 | "context", 50 | SyntaxShape::String, 51 | "context ID (defaults to system context)", 52 | None, 53 | ) 54 | .category(Category::Experimental) 55 | } 56 | 57 | fn description(&self) -> &str { 58 | "Writes its input to the CAS and then appends a frame with a hash of this content to the given topic on the stream." 59 | } 60 | 61 | fn run( 62 | &self, 63 | engine_state: &EngineState, 64 | stack: &mut Stack, 65 | call: &Call, 66 | input: PipelineData, 67 | ) -> Result { 68 | let span = call.head; 69 | 70 | let store = self.store.clone(); 71 | 72 | let topic: String = call.req(engine_state, stack, 0)?; 73 | 74 | // Get user-supplied metadata and convert to JSON 75 | let user_meta: Option = call.get_flag(engine_state, stack, "meta")?; 76 | let mut final_meta = self.base_meta.clone(); // Start with base metadata 77 | 78 | // Merge user metadata if provided 79 | if let Some(user_value) = user_meta { 80 | let user_json = util::value_to_json(&user_value); 81 | if let JsonValue::Object(mut base_obj) = final_meta { 82 | if let JsonValue::Object(user_obj) = user_json { 83 | base_obj.extend(user_obj); // Merge user metadata into base 84 | final_meta = JsonValue::Object(base_obj); 85 | } else { 86 | return Err(ShellError::TypeMismatch { 87 | err_message: "Meta must be a record".to_string(), 88 | span: call.span(), 89 | }); 90 | } 91 | } 92 | } 93 | 94 | let ttl: Option = call.get_flag(engine_state, stack, "ttl")?; 95 | let ttl = match ttl { 96 | Some(ttl_str) => Some(TTL::from_query(Some(&format!("ttl={}", ttl_str))).map_err( 97 | |e| ShellError::TypeMismatch { 98 | err_message: format!("Invalid TTL value: {}. {}", ttl_str, e), 99 | span: call.span(), 100 | }, 101 | )?), 102 | None => None, 103 | }; 104 | 105 | let hash = util::write_pipeline_to_cas(input, &store, span).map_err(|boxed| *boxed)?; 106 | let context_str: Option = call.get_flag(engine_state, stack, "context")?; 107 | let context_id = context_str 108 | .map(|ctx| ctx.parse::()) 109 | .transpose() 110 | .map_err(|e| ShellError::GenericError { 111 | error: "Invalid context ID".into(), 112 | msg: e.to_string(), 113 | span: Some(call.head), 114 | help: None, 115 | inner: vec![], 116 | })? 117 | .unwrap_or(self.context_id); 118 | 119 | let frame = store.append( 120 | Frame::builder(topic, context_id) 121 | .maybe_hash(hash) 122 | .meta(final_meta) 123 | .maybe_ttl(ttl) 124 | .build(), 125 | )?; 126 | 127 | Ok(PipelineData::Value( 128 | util::frame_to_value(&frame, span), 129 | None, 130 | )) 131 | } 132 | } 133 | -------------------------------------------------------------------------------- /src/nu/commands/append_command_buffered.rs: -------------------------------------------------------------------------------- 1 | use std::sync::{Arc, Mutex}; 2 | 3 | use nu_engine::CallExt; 4 | use nu_protocol::engine::{Call, Command, EngineState, Stack}; 5 | use nu_protocol::{Category, PipelineData, ShellError, Signature, SyntaxShape, Type, Value}; 6 | 7 | use crate::nu::util::value_to_json; 8 | use crate::store::{Frame, Store, TTL}; 9 | 10 | #[derive(Clone)] 11 | pub struct AppendCommand { 12 | output: Arc>>, 13 | store: Store, 14 | } 15 | 16 | impl AppendCommand { 17 | pub fn new(store: Store, output: Arc>>) -> Self { 18 | Self { output, store } 19 | } 20 | } 21 | 22 | impl Command for AppendCommand { 23 | fn name(&self) -> &str { 24 | ".append" 25 | } 26 | 27 | fn signature(&self) -> Signature { 28 | Signature::build(".append") 29 | .input_output_types(vec![(Type::Any, Type::Any)]) 30 | .required("topic", SyntaxShape::String, "this clip's topic") 31 | .named( 32 | "meta", 33 | SyntaxShape::Record(vec![]), 34 | "arbitrary metadata", 35 | None, 36 | ) 37 | .named( 38 | "ttl", 39 | SyntaxShape::String, 40 | r#"TTL specification: 'forever', 'ephemeral', 'time:', or 'head:'"#, 41 | None, 42 | ) 43 | .named( 44 | "context", 45 | SyntaxShape::String, 46 | "context ID (defaults to system context)", 47 | None, 48 | ) 49 | .category(Category::Experimental) 50 | } 51 | 52 | fn description(&self) -> &str { 53 | "Writes its input to the CAS and buffers a frame for later batch processing. The frame will include the content hash, any provided metadata and TTL settings. Meant for use with handlers that need to batch multiple appends." 54 | } 55 | 56 | fn run( 57 | &self, 58 | engine_state: &EngineState, 59 | stack: &mut Stack, 60 | call: &Call, 61 | input: PipelineData, 62 | ) -> Result { 63 | let span = call.head; 64 | 65 | let topic: String = call.req(engine_state, stack, 0)?; 66 | let meta: Option = call.get_flag(engine_state, stack, "meta")?; 67 | let ttl_str: Option = call.get_flag(engine_state, stack, "ttl")?; 68 | 69 | let ttl = ttl_str 70 | .map(|s| TTL::from_query(Some(&format!("ttl={}", s)))) 71 | .transpose() 72 | .map_err(|e| ShellError::GenericError { 73 | error: "Invalid TTL format".into(), 74 | msg: e.to_string(), 75 | span: Some(span), 76 | help: Some("TTL must be one of: 'forever', 'ephemeral', 'time:', or 'head:'".into()), 77 | inner: vec![], 78 | })?; 79 | 80 | let input_value = input.into_value(span)?; 81 | 82 | let hash = crate::nu::util::write_pipeline_to_cas( 83 | PipelineData::Value(input_value.clone(), None), 84 | &self.store, 85 | span, 86 | ) 87 | .map_err(|boxed| *boxed)?; 88 | 89 | let context_str: Option = call.get_flag(engine_state, stack, "context")?; 90 | let context_id = if let Some(ctx) = context_str { 91 | ctx.parse::() 92 | .map_err(|e| ShellError::GenericError { 93 | error: "Invalid context ID".into(), 94 | msg: e.to_string(), 95 | span: Some(call.head), 96 | help: None, 97 | inner: vec![], 98 | })? 99 | } else { 100 | crate::store::ZERO_CONTEXT 101 | }; 102 | 103 | let frame = Frame::builder(topic, context_id) 104 | .maybe_meta(meta.map(|v| value_to_json(&v))) 105 | .maybe_hash(hash) 106 | .maybe_ttl(ttl) 107 | .build(); 108 | 109 | self.output.lock().unwrap().push(frame); 110 | 111 | Ok(PipelineData::Empty) 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /src/nu/commands/cas_command.rs: -------------------------------------------------------------------------------- 1 | use std::io::Read; 2 | 3 | use nu_engine::CallExt; 4 | use nu_protocol::engine::{Call, Command, EngineState, Stack}; 5 | use nu_protocol::{Category, PipelineData, ShellError, Signature, SyntaxShape, Type, Value}; 6 | 7 | use crate::store::Store; 8 | 9 | #[derive(Clone)] 10 | pub struct CasCommand { 11 | store: Store, 12 | } 13 | 14 | impl CasCommand { 15 | pub fn new(store: Store) -> Self { 16 | Self { store } 17 | } 18 | } 19 | 20 | impl Command for CasCommand { 21 | fn name(&self) -> &str { 22 | ".cas" 23 | } 24 | 25 | fn signature(&self) -> Signature { 26 | Signature::build(".cas") 27 | .input_output_types(vec![(Type::Nothing, Type::String)]) 28 | .required( 29 | "hash", 30 | SyntaxShape::String, 31 | "hash of the content to retrieve", 32 | ) 33 | .category(Category::Experimental) 34 | } 35 | 36 | fn description(&self) -> &str { 37 | "Retrieve content from the CAS for the given hash" 38 | } 39 | 40 | fn run( 41 | &self, 42 | engine_state: &EngineState, 43 | stack: &mut Stack, 44 | call: &Call, 45 | _input: PipelineData, 46 | ) -> Result { 47 | let span = call.head; 48 | let hash: String = call.req(engine_state, stack, 0)?; 49 | let hash: ssri::Integrity = hash.parse().map_err(|e| ShellError::GenericError { 50 | error: "I/O Error".into(), 51 | msg: format!("Malformed ssri::Integrity:: {}", e), 52 | span: Some(span), 53 | help: None, 54 | inner: vec![], 55 | })?; 56 | 57 | let mut reader = 58 | self.store 59 | .cas_reader_sync(hash) 60 | .map_err(|e| ShellError::GenericError { 61 | error: "I/O Error".into(), 62 | msg: e.to_string(), 63 | span: Some(span), 64 | help: None, 65 | inner: vec![], 66 | })?; 67 | 68 | let mut contents = Vec::new(); 69 | reader 70 | .read_to_end(&mut contents) 71 | .map_err(|e| ShellError::GenericError { 72 | error: "I/O Error".into(), 73 | msg: e.to_string(), 74 | span: Some(span), 75 | help: None, 76 | inner: vec![], 77 | })?; 78 | 79 | // Try to convert to string if valid UTF-8, otherwise return as binary 80 | let value = match String::from_utf8(contents.clone()) { 81 | Ok(string) => Value::String { 82 | val: string, 83 | internal_span: span, 84 | }, 85 | Err(_) => Value::Binary { 86 | val: contents, 87 | internal_span: span, 88 | }, 89 | }; 90 | 91 | Ok(PipelineData::Value(value, None)) 92 | } 93 | } 94 | -------------------------------------------------------------------------------- /src/nu/commands/cat_command.rs: -------------------------------------------------------------------------------- 1 | use nu_engine::CallExt; 2 | use nu_protocol::engine::{Call, Command, EngineState, Stack}; 3 | use nu_protocol::{Category, PipelineData, ShellError, Signature, SyntaxShape, Type}; 4 | 5 | use crate::store::Store; 6 | 7 | #[derive(Clone)] 8 | pub struct CatCommand { 9 | store: Store, 10 | context_id: scru128::Scru128Id, 11 | } 12 | 13 | impl CatCommand { 14 | pub fn new(store: Store, context_id: scru128::Scru128Id) -> Self { 15 | Self { store, context_id } 16 | } 17 | } 18 | 19 | impl Command for CatCommand { 20 | fn name(&self) -> &str { 21 | ".cat" 22 | } 23 | 24 | fn signature(&self) -> Signature { 25 | Signature::build(".cat") 26 | .input_output_types(vec![(Type::Nothing, Type::Any)]) 27 | .named( 28 | "limit", 29 | SyntaxShape::Int, 30 | "limit the number of frames to retrieve", 31 | None, 32 | ) 33 | .named( 34 | "last-id", 35 | SyntaxShape::String, 36 | "start from a specific frame ID", 37 | None, 38 | ) 39 | .category(Category::Experimental) 40 | } 41 | 42 | fn description(&self) -> &str { 43 | "Reads the event stream and returns frames" 44 | } 45 | 46 | fn run( 47 | &self, 48 | engine_state: &EngineState, 49 | stack: &mut Stack, 50 | call: &Call, 51 | _input: PipelineData, 52 | ) -> Result { 53 | let limit: Option = call.get_flag(engine_state, stack, "limit")?; 54 | 55 | let last_id: Option = call.get_flag(engine_state, stack, "last-id")?; 56 | let last_id: Option = last_id 57 | .as_deref() 58 | .map(|s| s.parse().expect("Failed to parse Scru128Id")); 59 | 60 | let frames = self 61 | .store 62 | .read_sync(last_id.as_ref(), limit, Some(self.context_id)) 63 | .collect::>(); 64 | 65 | use nu_protocol::Value; 66 | 67 | let output = Value::list( 68 | frames 69 | .into_iter() 70 | .map(|frame| crate::nu::util::frame_to_value(&frame, call.head)) 71 | .collect(), 72 | call.head, 73 | ); 74 | 75 | Ok(PipelineData::Value(output, None)) 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /src/nu/commands/get_command.rs: -------------------------------------------------------------------------------- 1 | use nu_engine::CallExt; 2 | use nu_protocol::engine::{Call, Command, EngineState, Stack}; 3 | use nu_protocol::{Category, PipelineData, ShellError, Signature, SyntaxShape, Type}; 4 | 5 | use crate::nu::util; 6 | use crate::store::Store; 7 | 8 | #[derive(Clone)] 9 | pub struct GetCommand { 10 | store: Store, 11 | } 12 | 13 | impl GetCommand { 14 | pub fn new(store: Store) -> Self { 15 | Self { store } 16 | } 17 | } 18 | 19 | impl Command for GetCommand { 20 | fn name(&self) -> &str { 21 | ".get" 22 | } 23 | 24 | fn signature(&self) -> Signature { 25 | Signature::build(".get") 26 | .input_output_types(vec![(Type::Nothing, Type::Any)]) 27 | .required("id", SyntaxShape::String, "The ID of the frame to retrieve") 28 | .category(Category::Experimental) 29 | } 30 | 31 | fn description(&self) -> &str { 32 | "Retrieves a frame by its ID from the store" 33 | } 34 | 35 | fn run( 36 | &self, 37 | engine_state: &EngineState, 38 | stack: &mut Stack, 39 | call: &Call, 40 | _input: PipelineData, 41 | ) -> Result { 42 | let id_str: String = call.req(engine_state, stack, 0)?; 43 | let id = id_str.parse().map_err(|e| ShellError::TypeMismatch { 44 | err_message: format!("Invalid ID format: {}", e), 45 | span: call.span(), 46 | })?; 47 | 48 | let store = self.store.clone(); 49 | 50 | if let Some(frame) = store.get(&id) { 51 | Ok(PipelineData::Value( 52 | util::frame_to_value(&frame, call.head), 53 | None, 54 | )) 55 | } else { 56 | Err(ShellError::GenericError { 57 | error: "Frame not found".into(), 58 | msg: format!("No frame found with ID: {}", id_str), 59 | span: Some(call.head), 60 | help: None, 61 | inner: vec![], 62 | }) 63 | } 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /src/nu/commands/head_command.rs: -------------------------------------------------------------------------------- 1 | use nu_engine::CallExt; 2 | use nu_protocol::engine::{Call, Command, EngineState, Stack}; 3 | use nu_protocol::{Category, PipelineData, ShellError, Signature, SyntaxShape, Type}; 4 | 5 | use crate::nu::util; 6 | use crate::store::Store; 7 | 8 | #[derive(Clone)] 9 | pub struct HeadCommand { 10 | store: Store, 11 | context_id: scru128::Scru128Id, 12 | } 13 | 14 | impl HeadCommand { 15 | pub fn new(store: Store, context_id: scru128::Scru128Id) -> Self { 16 | Self { store, context_id } 17 | } 18 | } 19 | 20 | impl Command for HeadCommand { 21 | fn name(&self) -> &str { 22 | ".head" 23 | } 24 | 25 | fn signature(&self) -> Signature { 26 | Signature::build(".head") 27 | .input_output_types(vec![(Type::Nothing, Type::Any)]) 28 | .required("topic", SyntaxShape::String, "topic to get head frame from") 29 | .named( 30 | "context", 31 | SyntaxShape::String, 32 | "context ID (defaults to system context)", 33 | None, 34 | ) 35 | .category(Category::Experimental) 36 | } 37 | 38 | fn description(&self) -> &str { 39 | "get the most recent frame for a topic" 40 | } 41 | 42 | fn run( 43 | &self, 44 | engine_state: &EngineState, 45 | stack: &mut Stack, 46 | call: &Call, 47 | _input: PipelineData, 48 | ) -> Result { 49 | let topic: String = call.req(engine_state, stack, 0)?; 50 | let context_str: Option = call.get_flag(engine_state, stack, "context")?; 51 | let context_id = if let Some(ctx) = context_str { 52 | ctx.parse::() 53 | .map_err(|e| ShellError::GenericError { 54 | error: "Invalid context ID".into(), 55 | msg: e.to_string(), 56 | span: Some(call.head), 57 | help: None, 58 | inner: vec![], 59 | })? 60 | } else { 61 | self.context_id 62 | }; 63 | let span = call.head; 64 | 65 | if let Some(frame) = self.store.head(&topic, context_id) { 66 | Ok(PipelineData::Value( 67 | util::frame_to_value(&frame, span), 68 | None, 69 | )) 70 | } else { 71 | Ok(PipelineData::Empty) 72 | } 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /src/nu/commands/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod append_command; 2 | pub mod append_command_buffered; 3 | pub mod cas_command; 4 | pub mod cat_command; 5 | pub mod get_command; 6 | pub mod head_command; 7 | pub mod remove_command; 8 | -------------------------------------------------------------------------------- /src/nu/commands/remove_command.rs: -------------------------------------------------------------------------------- 1 | use std::str::FromStr; 2 | 3 | use nu_engine::CallExt; 4 | use nu_protocol::engine::{Call, Command, EngineState, Stack}; 5 | use nu_protocol::{Category, PipelineData, ShellError, Signature, SyntaxShape, Type}; 6 | 7 | use scru128::Scru128Id; 8 | 9 | use crate::store::Store; 10 | 11 | #[derive(Clone)] 12 | pub struct RemoveCommand { 13 | store: Store, 14 | } 15 | 16 | impl RemoveCommand { 17 | pub fn new(store: Store) -> Self { 18 | Self { store } 19 | } 20 | } 21 | 22 | impl Command for RemoveCommand { 23 | fn name(&self) -> &str { 24 | ".remove" 25 | } 26 | 27 | fn signature(&self) -> Signature { 28 | Signature::build(".remove") 29 | .input_output_types(vec![(Type::Nothing, Type::Nothing)]) 30 | .required("id", SyntaxShape::String, "The ID of the frame to remove") 31 | .category(Category::Experimental) 32 | } 33 | 34 | fn description(&self) -> &str { 35 | "Removes a frame from the store by its ID" 36 | } 37 | 38 | fn run( 39 | &self, 40 | engine_state: &EngineState, 41 | stack: &mut Stack, 42 | call: &Call, 43 | _input: PipelineData, 44 | ) -> Result { 45 | let id_str: String = call.req(engine_state, stack, 0)?; 46 | let id = Scru128Id::from_str(&id_str).map_err(|e| ShellError::TypeMismatch { 47 | err_message: format!("Invalid ID format: {}", e), 48 | span: call.span(), 49 | })?; 50 | 51 | let store = self.store.clone(); 52 | 53 | match store.remove(&id) { 54 | Ok(()) => Ok(PipelineData::Empty), 55 | Err(e) => Err(ShellError::GenericError { 56 | error: "Failed to remove frame".into(), 57 | msg: e.to_string(), 58 | span: Some(call.head), 59 | help: None, 60 | inner: vec![], 61 | }), 62 | } 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /src/nu/mod.rs: -------------------------------------------------------------------------------- 1 | mod config; 2 | mod engine; 3 | 4 | pub mod commands; 5 | pub mod util; 6 | pub use config::{parse_config, parse_config_legacy, CommonOptions, NuScriptConfig, ReturnOptions}; 7 | pub use engine::Engine; 8 | pub use util::{frame_to_pipeline, frame_to_value, value_to_json}; 9 | 10 | #[cfg(test)] 11 | mod test_commands; 12 | #[cfg(test)] 13 | mod test_engine; 14 | -------------------------------------------------------------------------------- /src/nu/test_engine.rs: -------------------------------------------------------------------------------- 1 | use nu_protocol::{PipelineData, Span, Value}; 2 | use tempfile::TempDir; 3 | 4 | use crate::nu::Engine; 5 | use crate::store::Store; 6 | 7 | fn setup_test_env() -> (Store, Engine) { 8 | let temp_dir = TempDir::new().unwrap(); 9 | let store = Store::new(temp_dir.into_path()); 10 | let engine = Engine::new().unwrap(); 11 | (store, engine) 12 | } 13 | 14 | // Helper to evaluate expressions and get Value results 15 | fn eval_to_value(engine: &Engine, expr: &str) -> Value { 16 | engine 17 | .eval(PipelineData::empty(), expr.to_string()) 18 | .unwrap() 19 | .into_value(Span::test_data()) 20 | .unwrap() 21 | } 22 | 23 | #[test] 24 | fn test_add_module() { 25 | let (_store, mut engine) = setup_test_env(); 26 | 27 | // Add a module that exports two functions 28 | engine 29 | .add_module( 30 | "testmod", 31 | r#" 32 | # Double the input 33 | export def double [x] { $x * 2 } 34 | 35 | # Add then double 36 | export def add_then_double [x, y] { 37 | ($x + $y) * 2 38 | } 39 | "#, 40 | ) 41 | .unwrap(); 42 | 43 | // Test the double function 44 | let result = eval_to_value(&engine, "testmod double 5"); 45 | assert_eq!(result.as_int().unwrap(), 10); 46 | 47 | // Test the add_then_double function 48 | let result = eval_to_value(&engine, "testmod add_then_double 3 4"); 49 | assert_eq!(result.as_int().unwrap(), 14); 50 | } 51 | 52 | #[test] 53 | fn test_add_module_syntax_error() { 54 | let (_store, mut engine) = setup_test_env(); 55 | 56 | // Try to add a module with invalid syntax 57 | let result = engine.add_module( 58 | "bad_mod", 59 | r#" 60 | export def bad_fn [] { 61 | let x = 62 | } 63 | "#, 64 | ); 65 | 66 | assert!(result.is_err()); 67 | } 68 | 69 | #[test] 70 | fn test_add_multiple_modules() { 71 | let (_store, mut engine) = setup_test_env(); 72 | 73 | // Add first module 74 | engine 75 | .add_module( 76 | "my-math", 77 | r#" 78 | export def add [x, y] { $x + $y } 79 | "#, 80 | ) 81 | .unwrap(); 82 | 83 | // Add second module 84 | engine 85 | .add_module( 86 | "my-strings", 87 | r#" 88 | export def join [x, y] { $x + $y } 89 | "#, 90 | ) 91 | .unwrap(); 92 | 93 | // Test both modules work 94 | let num_result = eval_to_value(&engine, "my-math add 5 3"); 95 | assert_eq!(num_result.as_int().unwrap(), 8); 96 | 97 | let str_result = eval_to_value(&engine, "my-strings join 'hello ' 'world'"); 98 | assert_eq!(str_result.as_str().unwrap(), "hello world"); 99 | } 100 | 101 | #[test] 102 | fn test_add_module_env_var_persistence() { 103 | let (_store, mut engine) = setup_test_env(); 104 | 105 | // Add a module that sets an environment variable 106 | engine 107 | .add_module("testmod", r#"export-env { $env.MY_VAR = 'hello' }"#) 108 | .unwrap(); 109 | 110 | // Verify the environment variable persists 111 | let result = eval_to_value(&engine, "$env.MY_VAR"); 112 | assert_eq!(result.as_str().unwrap(), "hello"); 113 | } 114 | 115 | #[test] 116 | fn test_engine_env_vars() { 117 | let (_store, engine) = setup_test_env(); 118 | 119 | let engine = engine 120 | .with_env_vars([("TEST_VAR".to_string(), "test_value".to_string())]) 121 | .unwrap(); 122 | 123 | // Test accessing the environment variable 124 | let result = eval_to_value(&engine, "$env.TEST_VAR"); 125 | assert_eq!(result.as_str().unwrap(), "test_value"); 126 | } 127 | 128 | use nu_engine::eval_block_with_early_return; 129 | use nu_parser::parse; 130 | use nu_protocol::debugger::WithoutDebug; 131 | use nu_protocol::engine::Stack; 132 | use nu_protocol::engine::StateWorkingSet; 133 | 134 | #[test] 135 | fn test_env_var_persistence() { 136 | // this test is just to build understanding of how Nushell works with respect to preserving 137 | // environment variables across evaluations 138 | let (_store, engine) = setup_test_env(); 139 | let mut engine = engine; 140 | 141 | // First evaluation - set env var 142 | let mut stack = Stack::new(); 143 | let mut working_set = StateWorkingSet::new(&engine.state); 144 | let block = parse(&mut working_set, None, b"$env.TEST_VAR = '123'", false); 145 | let _ = eval_block_with_early_return::( 146 | &engine.state, 147 | &mut stack, 148 | &block, 149 | PipelineData::empty(), 150 | ); 151 | engine.state.merge_env(&mut stack).unwrap(); 152 | 153 | // Second evaluation - verify env var persists 154 | let result = eval_to_value(&engine, "$env.TEST_VAR"); 155 | assert_eq!(result.as_str().unwrap(), "123"); 156 | } 157 | -------------------------------------------------------------------------------- /src/nu/util.rs: -------------------------------------------------------------------------------- 1 | use std::io::Read; 2 | use std::io::Write; 3 | 4 | use nu_protocol::{PipelineData, Record, ShellError, Span, Value}; 5 | 6 | use crate::store::Frame; 7 | use crate::store::Store; 8 | 9 | pub fn json_to_value(json: &serde_json::Value, span: Span) -> Value { 10 | match json { 11 | serde_json::Value::Null => Value::nothing(span), 12 | serde_json::Value::Bool(b) => Value::bool(*b, span), 13 | serde_json::Value::Number(n) => { 14 | if let Some(i) = n.as_i64() { 15 | Value::int(i, span) 16 | } else if let Some(f) = n.as_f64() { 17 | Value::float(f, span) 18 | } else { 19 | Value::string(n.to_string(), span) 20 | } 21 | } 22 | serde_json::Value::String(s) => Value::string(s, span), 23 | serde_json::Value::Array(arr) => { 24 | let values: Vec = arr.iter().map(|v| json_to_value(v, span)).collect(); 25 | Value::list(values, span) 26 | } 27 | serde_json::Value::Object(obj) => { 28 | let mut record = Record::new(); 29 | for (k, v) in obj { 30 | record.push(k, json_to_value(v, span)); 31 | } 32 | Value::record(record, span) 33 | } 34 | } 35 | } 36 | 37 | pub fn frame_to_value(frame: &Frame, span: Span) -> Value { 38 | let mut record = Record::new(); 39 | 40 | record.push("id", Value::string(frame.id.to_string(), span)); 41 | record.push("topic", Value::string(frame.topic.clone(), span)); 42 | record.push("context_id", Value::string(frame.context_id, span)); 43 | 44 | if let Some(hash) = &frame.hash { 45 | record.push("hash", Value::string(hash.to_string(), span)); 46 | } 47 | 48 | if let Some(meta) = &frame.meta { 49 | record.push("meta", json_to_value(meta, span)); 50 | } 51 | 52 | Value::record(record, span) 53 | } 54 | 55 | pub fn frame_to_pipeline(frame: &Frame) -> PipelineData { 56 | PipelineData::Value(frame_to_value(frame, Span::unknown()), None) 57 | } 58 | 59 | pub fn value_to_json(value: &Value) -> serde_json::Value { 60 | match value { 61 | Value::Nothing { .. } => serde_json::Value::Null, 62 | Value::Bool { val, .. } => serde_json::Value::Bool(*val), 63 | Value::Int { val, .. } => serde_json::Value::Number((*val).into()), 64 | Value::Float { val, .. } => serde_json::Number::from_f64(*val) 65 | .map(serde_json::Value::Number) 66 | .unwrap_or(serde_json::Value::Null), 67 | Value::String { val, .. } => serde_json::Value::String(val.clone()), 68 | Value::List { vals, .. } => { 69 | serde_json::Value::Array(vals.iter().map(value_to_json).collect()) 70 | } 71 | Value::Record { val, .. } => { 72 | let mut map = serde_json::Map::new(); 73 | for (k, v) in val.iter() { 74 | map.insert(k.clone(), value_to_json(v)); 75 | } 76 | serde_json::Value::Object(map) 77 | } 78 | _ => serde_json::Value::Null, 79 | } 80 | } 81 | 82 | pub fn write_pipeline_to_cas( 83 | input: PipelineData, 84 | store: &Store, 85 | span: Span, 86 | ) -> Result, Box> { 87 | let mut writer = store.cas_writer_sync().map_err(|e| { 88 | Box::new(ShellError::GenericError { 89 | error: "I/O Error".into(), 90 | msg: e.to_string(), 91 | span: Some(span), 92 | help: None, 93 | inner: vec![], 94 | }) 95 | })?; 96 | 97 | match input { 98 | PipelineData::Value(value, _) => match value { 99 | Value::Nothing { .. } => Ok(None), 100 | Value::String { val, .. } => { 101 | writer.write_all(val.as_bytes()).map_err(|e| { 102 | Box::new(ShellError::GenericError { 103 | error: "I/O Error".into(), 104 | msg: e.to_string(), 105 | span: Some(span), 106 | help: None, 107 | inner: vec![], 108 | }) 109 | })?; 110 | 111 | let hash = writer.commit().map_err(|e| { 112 | Box::new(ShellError::GenericError { 113 | error: "I/O Error".into(), 114 | msg: e.to_string(), 115 | span: Some(span), 116 | help: None, 117 | inner: vec![], 118 | }) 119 | })?; 120 | 121 | Ok(Some(hash)) 122 | } 123 | Value::Binary { val, .. } => { 124 | writer.write_all(&val).map_err(|e| { 125 | Box::new(ShellError::GenericError { 126 | error: "I/O Error".into(), 127 | msg: e.to_string(), 128 | span: Some(span), 129 | help: None, 130 | inner: vec![], 131 | }) 132 | })?; 133 | 134 | let hash = writer.commit().map_err(|e| { 135 | Box::new(ShellError::GenericError { 136 | error: "I/O Error".into(), 137 | msg: e.to_string(), 138 | span: Some(span), 139 | help: None, 140 | inner: vec![], 141 | }) 142 | })?; 143 | 144 | Ok(Some(hash)) 145 | } 146 | Value::Record { .. } => { 147 | let json = value_to_json(&value); 148 | let json_string = serde_json::to_string(&json).map_err(|e| { 149 | Box::new(ShellError::GenericError { 150 | error: "I/O Error".into(), 151 | msg: e.to_string(), 152 | span: Some(span), 153 | help: None, 154 | inner: vec![], 155 | }) 156 | })?; 157 | 158 | writer.write_all(json_string.as_bytes()).map_err(|e| { 159 | Box::new(ShellError::GenericError { 160 | error: "I/O Error".into(), 161 | msg: e.to_string(), 162 | span: Some(span), 163 | help: None, 164 | inner: vec![], 165 | }) 166 | })?; 167 | 168 | let hash = writer.commit().map_err(|e| { 169 | Box::new(ShellError::GenericError { 170 | error: "I/O Error".into(), 171 | msg: e.to_string(), 172 | span: Some(span), 173 | help: None, 174 | inner: vec![], 175 | }) 176 | })?; 177 | 178 | Ok(Some(hash)) 179 | } 180 | _ => Err(Box::new(ShellError::PipelineMismatch { 181 | exp_input_type: format!( 182 | "expected: string, binary, record, or nothing :: received: {:?}", 183 | value.get_type() 184 | ), 185 | dst_span: span, 186 | src_span: value.span(), 187 | })), 188 | }, 189 | PipelineData::ListStream(_stream, ..) => { 190 | panic!("ListStream handling is not yet implemented"); 191 | } 192 | PipelineData::ByteStream(stream, ..) => { 193 | if let Some(mut reader) = stream.reader() { 194 | let mut buffer = [0; 8192]; 195 | loop { 196 | let bytes_read = reader.read(&mut buffer).map_err(|e| { 197 | Box::new(ShellError::GenericError { 198 | error: "I/O Error".into(), 199 | msg: e.to_string(), 200 | span: Some(span), 201 | help: None, 202 | inner: vec![], 203 | }) 204 | })?; 205 | 206 | if bytes_read == 0 { 207 | break; 208 | } 209 | 210 | writer.write_all(&buffer[..bytes_read]).map_err(|e| { 211 | Box::new(ShellError::GenericError { 212 | error: "I/O Error".into(), 213 | msg: e.to_string(), 214 | span: Some(span), 215 | help: None, 216 | inner: vec![], 217 | }) 218 | })?; 219 | } 220 | } 221 | 222 | let hash = writer.commit().map_err(|e| { 223 | Box::new(ShellError::GenericError { 224 | error: "I/O Error".into(), 225 | msg: e.to_string(), 226 | span: Some(span), 227 | help: None, 228 | inner: vec![], 229 | }) 230 | })?; 231 | 232 | Ok(Some(hash)) 233 | } 234 | PipelineData::Empty => Ok(None), 235 | } 236 | } 237 | -------------------------------------------------------------------------------- /src/store/ttl.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use std::time::Duration; 3 | 4 | use serde::{Deserialize, Deserializer, Serialize, Serializer}; 5 | 6 | /// Enum representing the TTL (Time-To-Live) for an event. 7 | #[derive(Default, PartialEq, Eq, Clone, Debug)] 8 | pub enum TTL { 9 | #[default] 10 | Forever, // Event is kept indefinitely. 11 | Ephemeral, // Event is not stored; only active subscribers can see it. 12 | Time(Duration), // Event is kept for a custom duration 13 | Head(u32), // Retains only the last n events for a topic (n >= 1). 14 | } 15 | 16 | impl TTL { 17 | /// Converts a `TTL` into its query string representation. 18 | pub fn to_query(&self) -> String { 19 | match self { 20 | TTL::Forever => "ttl=forever".to_string(), 21 | TTL::Ephemeral => "ttl=ephemeral".to_string(), 22 | TTL::Time(duration) => format!("ttl=time:{}", duration.as_millis()), 23 | TTL::Head(n) => format!("ttl=head:{}", n), 24 | } 25 | } 26 | 27 | /// Parses a `TTL` from a query string. 28 | pub fn from_query(query: Option<&str>) -> Result { 29 | // Parse query string into key-value pairs 30 | let params = match query { 31 | None => return Ok(TTL::default()), // Use default TTL if query is None 32 | Some(q) => serde_urlencoded::from_str::>(q) 33 | .map_err(|_| "invalid query string".to_string())?, 34 | }; 35 | 36 | // Extract the `ttl` parameter if it exists 37 | if let Some(ttl_str) = params.get("ttl") { 38 | parse_ttl(ttl_str) 39 | } else { 40 | Ok(TTL::default()) // Use default TTL if `ttl` is not present 41 | } 42 | } 43 | } 44 | 45 | impl Serialize for TTL { 46 | fn serialize(&self, serializer: S) -> Result 47 | where 48 | S: Serializer, 49 | { 50 | match self { 51 | TTL::Forever => serializer.serialize_str("forever"), 52 | TTL::Ephemeral => serializer.serialize_str("ephemeral"), 53 | TTL::Time(duration) => { 54 | serializer.serialize_str(&format!("time:{}", duration.as_millis())) 55 | } 56 | TTL::Head(n) => serializer.serialize_str(&format!("head:{}", n)), 57 | } 58 | } 59 | } 60 | 61 | impl<'de> Deserialize<'de> for TTL { 62 | fn deserialize(deserializer: D) -> Result 63 | where 64 | D: Deserializer<'de>, 65 | { 66 | let s: String = Deserialize::deserialize(deserializer)?; 67 | parse_ttl(&s).map_err(serde::de::Error::custom) 68 | } 69 | } 70 | 71 | /// Parses a raw TTL string and converts it to the `TTL` enum. 72 | pub fn parse_ttl(s: &str) -> Result { 73 | match s { 74 | "forever" => Ok(TTL::Forever), 75 | "ephemeral" => Ok(TTL::Ephemeral), 76 | _ if s.starts_with("time:") => { 77 | let duration_str = &s[5..]; 78 | let duration = duration_str 79 | .parse::() 80 | .map_err(|_| "Invalid duration for 'time' TTL".to_string())?; 81 | Ok(TTL::Time(Duration::from_millis(duration))) 82 | } 83 | _ if s.starts_with("head:") => { 84 | let n_str = &s[5..]; 85 | let n = n_str 86 | .parse::() 87 | .map_err(|_| "Invalid 'n' value for 'head' TTL".to_string())?; 88 | if n < 1 { 89 | Err("'n' must be >= 1 for 'head' TTL".to_string()) 90 | } else { 91 | Ok(TTL::Head(n)) 92 | } 93 | } 94 | _ => Err("Invalid TTL format".to_string()), 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /xs.nu: -------------------------------------------------------------------------------- 1 | export alias "h. get" = h. request get 2 | export alias "h. post" = h. request post 3 | 4 | export const XS_CONTEXT_SYSTEM = "0000000000000000000000000" 5 | 6 | def and-then [next: closure --else: closure] { 7 | if ($in | is-not-empty) { do $next } else { 8 | if $else != null { do $else } 9 | } 10 | } 11 | 12 | def or-else [or_else: closure] { 13 | if ($in | is-not-empty) { $in } else { do $or_else } 14 | } 15 | 16 | def conditional-pipe [ 17 | condition: bool 18 | action: closure 19 | ] { 20 | if $condition { do $action } else { } 21 | } 22 | 23 | export def xs-addr [] { 24 | $env | get XS_ADDR? | or-else { try { open ~/.config/cross.stream/XS_ADDR | str trim | path expand } } | or-else { "~/.local/share/cross.stream/store" | path expand } 25 | } 26 | 27 | export def xs-context-collect [] { 28 | _cat {context: $XS_CONTEXT_SYSTEM} | reduce --fold {} {|frame, acc| 29 | match $frame.topic { 30 | "xs.context" => ($acc | insert $frame.id $frame.meta?.name?) 31 | "xs.annotate" => ( 32 | if $frame.meta?.updates? in $acc { 33 | $acc | update $frame.meta.updates $frame.meta?.name? 34 | } else { 35 | $acc 36 | } 37 | ) 38 | _ => $acc 39 | } 40 | } | transpose id name | prepend { 41 | id: $XS_CONTEXT_SYSTEM 42 | name: "system" 43 | } 44 | } 45 | 46 | export def xs-context [selected?: string span?] { 47 | if $selected == null { 48 | return ($env | get XS_CONTEXT?) 49 | } 50 | 51 | xs-context-collect | where id == $selected or name == $selected | try { first | get id } catch { 52 | if $span != null { 53 | error make { 54 | msg: $"context not found: ($selected)" 55 | label: {text: "provided span" span: $span} 56 | } 57 | } else { 58 | error make -u {msg: $"context not found: ($selected)"} 59 | } 60 | } 61 | } 62 | 63 | def _cat [options: record] { 64 | let params = [ 65 | (if ($options | get follow? | default false) { "--follow" }) 66 | (if ($options | get tail? | default false) { "--tail" }) 67 | (if ($options | get all? | default false) { "--all" }) 68 | 69 | (if $options.last_id? != null { ["--last-id" $options.last_id] }) 70 | 71 | (if $options.limit? != null { ["--limit" $options.limit] }) 72 | (if $options.pulse? != null { ["--pulse" $options.pulse] }) 73 | (if $options.context? != null { ["--context" $options.context] }) 74 | (if $options.topic? != null { ["--topic" $options.topic] }) 75 | ] | compact | flatten 76 | 77 | xs cat (xs-addr) ...$params | lines | each {|x| $x | from json } 78 | } 79 | 80 | export def .cat [ 81 | --follow (-f) # long poll for new events 82 | --pulse (-p): int # specifies the interval (in milliseconds) to receive a synthetic "xs.pulse" event 83 | --tail (-t) # begin long after the end of the stream 84 | --detail (-d) # include all frame fields in the output 85 | --last-id (-l): string 86 | --limit: int 87 | --context (-c): string # the context to read from 88 | --all (-a) # cat across all contexts 89 | --topic (-T): string # filter by topic 90 | ] { 91 | _cat { 92 | follow: $follow 93 | pulse: $pulse 94 | tail: $tail 95 | last_id: $last_id 96 | limit: $limit 97 | context: (if not $all { (xs-context $context (metadata $context).span) }) 98 | all: $all 99 | topic: $topic 100 | } | conditional-pipe (not ($detail or $all)) { each { reject context_id ttl } } 101 | } 102 | 103 | def read_hash [hash?: any] { 104 | match ($hash | describe -d | get type) { 105 | "string" => $hash 106 | "record" => ($hash | get hash?) 107 | _ => null 108 | } 109 | } 110 | 111 | export def .cas [hash?: any] { 112 | let alt = $in 113 | let hash = read_hash (if $hash != null { $hash } else { $alt }) 114 | if $hash == null { return } 115 | xs cas (xs-addr) $hash 116 | } 117 | 118 | export def .get [id: string] { 119 | xs get (xs-addr) $id | from json 120 | } 121 | 122 | export def .head [ 123 | topic: string 124 | --follow (-f) 125 | --context (-c): string 126 | ] { 127 | let params = [ 128 | (xs-context $context (metadata $context).span | and-then { ["--context" $in] }) 129 | ] | compact | flatten 130 | 131 | if $follow { 132 | xs head (xs-addr) $topic ...($params) --follow | lines | each {|x| $x | from json } 133 | } else { 134 | xs head (xs-addr) $topic ...($params) | from json 135 | } 136 | } 137 | 138 | # Append an event to the stream 139 | export def .append [ 140 | topic: string # The topic to append the event to 141 | --meta: record # Optional metadata to include with the event, provided as a record 142 | --context (-c): string # the context to append to 143 | --ttl: string # Optional Time-To-Live for the event. Supported formats: 144 | # - "forever": The event is kept indefinitely. 145 | # - "ephemeral": The event is not stored; only active subscribers can see it. 146 | # - "time:": The event is kept for a custom duration in milliseconds. 147 | # - "head:": Retains only the last n events for the topic (n must be >= 1). 148 | ] { 149 | xs append (xs-addr) $topic ...( 150 | [ 151 | (if $meta != null { ["--meta" ($meta | to json -r)] }) 152 | (if $ttl != null { ["--ttl" $ttl] }) 153 | (xs-context $context (metadata $context).span | and-then { ["--context" $in] }) 154 | ] | compact | flatten 155 | ) | from json 156 | } 157 | 158 | export def .remove [id: string] { 159 | xs remove (xs-addr) $id 160 | } 161 | 162 | export alias .rm = .remove 163 | 164 | export def ".ctx" [ 165 | --detail (-d) # return a record with id and name fields 166 | ] { 167 | let id = xs-context | or-else { $XS_CONTEXT_SYSTEM } 168 | let name = xs-context-collect | where id == $id | get name.0 169 | if $detail { 170 | {id: $id} | if $name != null { insert name $name } else { $in } 171 | } else { 172 | $name | default $id 173 | } 174 | } 175 | 176 | export def ".ctx list" [] { 177 | let active = .ctx -d | get id 178 | xs-context-collect | insert active { 179 | $in.id == $active 180 | } 181 | } 182 | 183 | export alias ".ctx ls" = .ctx list 184 | 185 | export def --env ".ctx switch" [id?: string] { 186 | $env.XS_CONTEXT = $id | or-else { .ctx select } 187 | .ctx --detail | get id 188 | } 189 | 190 | export def --env ".ctx new" [name: string] { 191 | .append "xs.context" -c $XS_CONTEXT_SYSTEM --meta {name: $name} | .ctx switch $in.id 192 | } 193 | 194 | export def --env ".ctx rename" [id: string name: string] { 195 | .append "xs.annotate" -c $XS_CONTEXT_SYSTEM --meta { 196 | updates: (xs-context $id (metadata $id).span) 197 | name: $name 198 | } 199 | } 200 | 201 | export def --env ".ctx select" [] { 202 | .ctx list | input list | get id 203 | } 204 | 205 | export def .export [path: string] { 206 | if ($path | path exists) { 207 | print "path exists" 208 | return 209 | } 210 | mkdir ($path | path join "cas") 211 | 212 | xs cat (xs-addr) | save ($path | path join "frames.jsonl") 213 | 214 | open ($path | path join "frames.jsonl") | lines | each { from json | get hash } | uniq | each {|hash| 215 | let hash_64 = $hash | encode base64 216 | let out_path = $"($path)/cas/($hash_64)" 217 | print $out_path 218 | .cas $hash | save $out_path 219 | } 220 | } 221 | 222 | export def .import [path: string] { 223 | glob ([$path "cas"] | path join "*") | each {|x| 224 | let want = ($x | path basename | decode base64 | decode) 225 | let got = cat $x | xs cas-post (xs-addr) 226 | if $got != $want { 227 | return ( 228 | error make { 229 | msg: $"hash mismatch got=($got) want=($want)" 230 | } 231 | ) 232 | } 233 | $got 234 | } 235 | 236 | open ($path | path join "frames.jsonl") | lines | each { 237 | from json | default "0000000000000000000000000" context_id | to json -r | xs import (xs-addr) 238 | } 239 | } 240 | --------------------------------------------------------------------------------