├── .github └── workflows │ ├── backup.yml │ ├── client.yml │ ├── feldera.yml │ ├── restore.yml │ └── server.yml ├── .gitignore ├── Cargo.lock ├── Cargo.toml ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── backup ├── backup.sh └── restore.sh ├── client ├── .gitignore ├── Cargo.toml ├── Trunk.toml ├── assets │ ├── egui.ico │ ├── favicon.ico │ ├── feldera-dark.svg │ ├── feldera-light.svg │ ├── ferris.png │ ├── manifest.json │ ├── nextjs-icon.svg │ └── sw.js ├── index.html ├── rust-toolchain └── src │ ├── app.rs │ ├── cell_cache.rs │ ├── debouncer.rs │ ├── http.rs │ ├── lib.rs │ ├── main.rs │ └── reference.rs ├── feldera ├── deploy.sh ├── program.sql └── udf │ ├── Cargo.toml │ ├── src │ └── lib.rs │ └── udf.toml ├── loadtest ├── img2xls.py └── loadtest.py └── server ├── .dockerignore ├── .gitignore ├── Cargo.lock ├── Cargo.toml ├── Dockerfile ├── fly.toml └── src ├── feldera.rs ├── main.rs ├── spreadsheet.rs └── stats.rs /.github/workflows/backup.yml: -------------------------------------------------------------------------------- 1 | name: S3 Backup 2 | 3 | on: 4 | schedule: 5 | - cron: "0 0 * * *" 6 | workflow_dispatch: 7 | 8 | jobs: 9 | backup: 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | # Checkout the repository to get access to the backup script 14 | - name: Checkout repository 15 | uses: actions/checkout@v3 16 | 17 | # Set up AWS CLI 18 | - name: Configure AWS CLI 19 | uses: aws-actions/configure-aws-credentials@v2 20 | with: 21 | aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} 22 | aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 23 | aws-region: ${{ secrets.AWS_DEFAULT_REGION }} 24 | 25 | # Ensure the backup script is executable 26 | - name: Make backup script executable 27 | run: chmod +x ./backup/backup.sh 28 | 29 | # Run the backup script 30 | - name: Run backup script 31 | env: 32 | FELDERA_HOST: ${{ secrets.FELDERA_HOST }} 33 | FELDERA_API_KEY: ${{ secrets.FELDERA_API_KEY }} 34 | run: ./backup/backup.sh 35 | -------------------------------------------------------------------------------- /.github/workflows/client.yml: -------------------------------------------------------------------------------- 1 | name: Host Website on Github Pages 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | paths: 8 | - 'client/**' 9 | - '.github/workflows/client.yml' 10 | - 'Cargo.lock' 11 | - 'Cargo.toml' 12 | 13 | permissions: 14 | contents: write 15 | pages: write 16 | id-token: write 17 | 18 | env: 19 | public_url: "https://xls.feldera.io/" 20 | 21 | jobs: 22 | build-github-pages: 23 | runs-on: ubuntu-latest 24 | steps: 25 | - uses: actions/checkout@v4 # repo checkout 26 | - name: Setup toolchain for wasm 27 | run: | 28 | rustup update stable 29 | rustup default stable 30 | rustup set profile minimal 31 | rustup target add wasm32-unknown-unknown 32 | - name: Rust Cache # cache the rust build artefacts 33 | uses: Swatinem/rust-cache@v2 34 | - name: Download and install Trunk binary 35 | run: wget -qO- https://github.com/thedodd/trunk/releases/latest/download/trunk-x86_64-unknown-linux-gnu.tar.gz | tar -xzf- 36 | - name: Build # build 37 | # Environment $public_url resolves to the github project page. 38 | # If using a user/organization page, remove the `${{ github.event.repository.name }}` part. 39 | # using --public-url something will allow trunk to modify all the href paths like from favicon.ico to repo_name/favicon.ico . 40 | # this is necessary for github pages where the site is deployed to username.github.io/repo_name and all files must be requested 41 | # relatively as eframe_template/favicon.ico. if we skip public-url option, the href paths will instead request username.github.io/favicon.ico which 42 | # will obviously return error 404 not found. 43 | run: ./trunk build --release --public-url $public_url --config client/Trunk.toml 44 | env: 45 | API_HOST: ${{ secrets.API_HOST }} 46 | - name: Upload WASM build artifact 47 | uses: actions/upload-pages-artifact@v1 48 | with: 49 | path: client/dist 50 | 51 | # Deployment job 52 | deploy: 53 | # Only deploy on main 54 | if: ${{ github.ref == 'refs/heads/main' }} 55 | runs-on: ubuntu-latest 56 | needs: build-github-pages 57 | steps: 58 | - name: Deploy to GitHub Pages 59 | id: deployment 60 | uses: actions/deploy-pages@v2 61 | -------------------------------------------------------------------------------- /.github/workflows/feldera.yml: -------------------------------------------------------------------------------- 1 | name: Deploy Feldera Pipeline 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | paths: 8 | - 'feldera/**' 9 | - '.github/workflows/feldera.yml' 10 | - 'Cargo.lock' 11 | - 'Cargo.toml' 12 | 13 | permissions: 14 | contents: read 15 | 16 | jobs: 17 | deploy-pipeline: 18 | runs-on: ubuntu-latest 19 | steps: 20 | - uses: actions/checkout@v4 # repo checkout 21 | - name: Setup rust toolchain 22 | run: | 23 | rustup update stable 24 | rustup default stable 25 | rustup set profile minimal 26 | - name: Rust Cache # cache the rust build artefacts 27 | uses: Swatinem/rust-cache@v2 28 | - name: Download and install fda binary 29 | run: cargo install fda 30 | - name: Deploy Feldera Pipeline 31 | run: cd feldera && bash deploy.sh 32 | env: 33 | FELDERA_API_KEY: ${{ secrets.FELDERA_API_KEY }} 34 | FELDERA_HOST: ${{ secrets.FELDERA_HOST }} -------------------------------------------------------------------------------- /.github/workflows/restore.yml: -------------------------------------------------------------------------------- 1 | name: Restore from S3 Backup 2 | 3 | on: 4 | workflow_run: 5 | workflows: ["Deploy Feldera Pipeline"] 6 | types: 7 | - completed 8 | workflow_dispatch: 9 | 10 | jobs: 11 | restore: 12 | runs-on: ubuntu-latest 13 | if: ${{ github.event.workflow_run.conclusion == 'success' || github.event_name == 'workflow_dispatch' }} 14 | 15 | steps: 16 | - uses: actions/checkout@v4 # repo checkout 17 | - uses: actions-rust-lang/setup-rust-toolchain@v1 18 | - name: Rust Cache # cache the rust build artefacts 19 | uses: Swatinem/rust-cache@v2 20 | - name: Download and install fda binary 21 | run: cargo install fda 22 | 23 | # Set up AWS CLI 24 | - name: Configure AWS CLI 25 | uses: aws-actions/configure-aws-credentials@v2 26 | with: 27 | aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} 28 | aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 29 | aws-region: ${{ secrets.AWS_DEFAULT_REGION }} 30 | 31 | # Ensure the restore script is executable 32 | - name: Make backup script executable 33 | run: chmod +x ./backup/restore.sh 34 | 35 | # Run the restore script 36 | - name: Run restore script 37 | env: 38 | FELDERA_HOST: ${{ secrets.FELDERA_HOST }} 39 | FELDERA_API_KEY: ${{ secrets.FELDERA_API_KEY }} 40 | run: ./backup/restore.sh 41 | 42 | # Restart the backend *this invalidates the cell cache just in case* 43 | - uses: superfly/flyctl-actions/setup-flyctl@master 44 | - run: cd server && flyctl apps restart 45 | env: 46 | FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }} 47 | -------------------------------------------------------------------------------- /.github/workflows/server.yml: -------------------------------------------------------------------------------- 1 | # See https://fly.io/docs/app-guides/continuous-deployment-with-github-actions/ 2 | 3 | name: Fly Deploy Backend 4 | on: 5 | push: 6 | branches: 7 | - main 8 | paths: 9 | - 'server/**' 10 | - '.github/workflows/server.yml' 11 | - 'Cargo.lock' 12 | - 'Cargo.toml' 13 | 14 | jobs: 15 | deploy: 16 | name: Deploy app 17 | runs-on: ubuntu-latest 18 | concurrency: deploy-group # optional: ensure only one action runs at a time 19 | steps: 20 | - uses: actions/checkout@v4 21 | - uses: superfly/flyctl-actions/setup-flyctl@master 22 | - run: cd server && flyctl deploy --remote-only 23 | env: 24 | FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }} 25 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | .idea 3 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = [ 3 | "server", 4 | "client", 5 | "feldera/udf", 6 | ] 7 | resolver = "2" 8 | 9 | [profile.release] 10 | opt-level = 2 11 | 12 | [profile.dev.package."*"] 13 | opt-level = 2 14 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Permission is hereby granted, free of charge, to any 2 | person obtaining a copy of this software and associated 3 | documentation files (the "Software"), to deal in the 4 | Software without restriction, including without 5 | limitation the rights to use, copy, modify, merge, 6 | publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software 8 | is furnished to do so, subject to the following 9 | conditions: 10 | 11 | The above copyright notice and this permission notice 12 | shall be included in all copies or substantial portions 13 | of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 16 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 17 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 18 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 19 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 20 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 21 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 22 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 23 | DEALINGS IN THE SOFTWARE. 24 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 1bln Cell Spreadsheet Techdemo 2 | 3 | This is a tech demo showing incremental computation for a simple, spreadsheet-like application. 4 | The application uses feldera as the DBMS/incremental compute engine, axum as the backend 5 | and egui as the frontend. 6 | 7 | The project is split into three components from the root directory: 8 | 9 | - The `feldera` directory contains the feldera pipeline (written in Feldera/SQL and some Rust UDF code). 10 | - The `server` directory contains the backend application (written in Rust using the axum webserver). 11 | - The `client` directory contains the frontend application (written in Rust using the egui UI library). 12 | 13 | ## Local Installation 14 | 15 | You'll need a working rust installation to run the project locally. 16 | 17 | ### Feldera 18 | 19 | Install a [feldera instance](https://docs.feldera.com/get-started), or alternatively use the 20 | feldera instance running on `https://try.feldera.com`. 21 | Also, you'll need to install Feldera's CLI tool [fda](https://docs.feldera.com/reference/cli). 22 | 23 | Set the `FELDERA_API_KEY` and `FELDERA_HOST` environment variables to access the right feldera instance. 24 | Finally, execute the `deploy.sh` script in the `feldera` directory to deploy the pipeline: 25 | 26 | ```bash 27 | export FELDERA_API_KEY=apikey:... 28 | export FELDERA_HOST=https://try.feldera.com 29 | cd feldera && bash deploy.sh 30 | ``` 31 | 32 | ### Server 33 | 34 | Run the `server` application with cargo: 35 | 36 | ```bash 37 | cd server 38 | cargo run 39 | ``` 40 | 41 | Now the backend should be running on `http://localhost:3000`. The backend will connect 42 | to your feldera instance to fetch the data. The server uses the `FELDERA_API_KEY` and `FELDERA_HOST` 43 | environment variables set earlier, make sure they're still set correctly. 44 | 45 | ### Client 46 | 47 | Run the `client` application with trunk: 48 | 49 | ```bash 50 | cd client 51 | API_HOST=http://localhost:3000 trunk serve --port 7777 52 | ``` 53 | 54 | Now the frontend should be running on `http://localhost:7777`. The frontend will connect 55 | to the backend to fetch the data. The `API_HOST` environment variable is set to point to the 56 | backend running on `http://localhost:3000`. 57 | 58 | ## Automated Deployment with Github Actions 59 | 60 | The project is set up to deploy the server backend to [fly.io](https://fly.io/) 61 | and the client application is published using github pages. 62 | 63 | ## Feldera 64 | 65 | Make sure to set the `FELDERA_API_KEY` and `FELDERA_HOST` secrets in the github repository settings. 66 | 67 | ## Server 68 | 69 | Get a fly.io account and install the [fly CLI tool](https://fly.io/docs/flyctl/install/). 70 | 71 | Next make sure to set the `FELDERA_API_KEY` and `FELDERA_HOST` secrets also in your fly.io application. 72 | 73 | ```bash 74 | cd server 75 | fly login 76 | fly secrets set FELDERA_HOST=https://try.feldera.com 77 | fly secrets set FELDERA_API_KEY=apikey:... 78 | ``` 79 | 80 | Finally, you'll need to get an API token from fly.io and set it as a secret named `FLY_API_TOKEN` in the github 81 | repository settings. 82 | 83 | ## Client 84 | 85 | Make sure to set the `API_HOST` secret in the github repository settings to point to your fly.io application URL. 86 | Enable github pages, set the source to `Github Actions`. Then adjust the `public_url` env variable in the `client.yml` 87 | github action file to point to your github pages URL. -------------------------------------------------------------------------------- /backup/backup.sh: -------------------------------------------------------------------------------- 1 | set -ex 2 | FILE_PATH="/tmp/spreadsheet_data.parquet" 3 | BUCKET_NAME="spreadsheet-backups" 4 | 5 | curl -L "${FELDERA_HOST}/v0/pipelines/xls/query?sql=SELECT%20*%20FROM%20spreadsheet_data%3B&format=parquet" -H "Authorization: Bearer ${FELDERA_API_KEY}" > ${FILE_PATH} 6 | TIMESTAMP=$(date -u +"%Y%m%d%H%M%S") 7 | 8 | # Extract the base filename and append the timestamp 9 | BASE_FILENAME=$(basename "$FILE_PATH") 10 | RENAMED_FILENAME="${BASE_FILENAME/spreadsheet_data/spreadsheet_data_$TIMESTAMP}" 11 | S3_KEY="$RENAMED_FILENAME" 12 | 13 | # Check if the file exists 14 | if [[ ! -f "$FILE_PATH" ]]; then 15 | echo "Error: File $FILE_PATH does not exist." 16 | exit 1 17 | fi 18 | 19 | # Upload the file to S3 20 | echo "Uploading $FILE_PATH as s3://$BUCKET_NAME/$S3_KEY" 21 | aws s3 cp "$FILE_PATH" "s3://$BUCKET_NAME/$S3_KEY" 22 | 23 | # Verify the upload 24 | if [[ $? -eq 0 ]]; then 25 | echo "File uploaded successfully to s3://$BUCKET_NAME/$S3_KEY" 26 | else 27 | echo "Error: Upload failed." 28 | exit 1 29 | fi -------------------------------------------------------------------------------- /backup/restore.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -ex 3 | 4 | FILE_PATH="/tmp/spreadsheet_data_restore.parquet" 5 | BUCKET_NAME="spreadsheet-backups" 6 | 7 | # Get the most recent file from the S3 bucket 8 | echo "Fetching the most recent backup file from s3://$BUCKET_NAME" 9 | LATEST_FILE=$(aws s3 ls "s3://$BUCKET_NAME/" | sort | tail -1 | awk '{print $4}') 10 | 11 | # Check if a file was found 12 | if [[ -z "$LATEST_FILE" ]]; then 13 | echo "Error: No files found in s3://$BUCKET_NAME/" 14 | exit 1 15 | fi 16 | 17 | echo "Most recent backup file: $LATEST_FILE" 18 | 19 | # Download the file 20 | aws s3 cp "s3://$BUCKET_NAME/$LATEST_FILE" "$FILE_PATH" 21 | 22 | # Verify the download 23 | if [[ $? -ne 0 || ! -f "$FILE_PATH" ]]; then 24 | echo "Error: Failed to download the file from s3://$BUCKET_NAME/$LATEST_FILE" 25 | exit 1 26 | fi 27 | 28 | echo "File downloaded successfully to $FILE_PATH" 29 | 30 | fda restart xls 31 | sleep 5 32 | 33 | # Insert the data back into Feldera 34 | echo "Inserting data back into Feldera" 35 | curl -X POST \ 36 | --data-binary @$FILE_PATH \ 37 | "${FELDERA_HOST}/v0/pipelines/xls/ingress/spreadsheet_data?format=parquet" \ 38 | -H "Authorization: Bearer ${FELDERA_API_KEY}" 39 | 40 | # Verify the insertion 41 | if [[ $? -eq 0 ]]; then 42 | echo "Data successfully inserted back into Feldera." 43 | else 44 | echo "Error: Data insertion failed." 45 | exit 1 46 | fi 47 | -------------------------------------------------------------------------------- /client/.gitignore: -------------------------------------------------------------------------------- 1 | # Mac stuff: 2 | .DS_Store 3 | 4 | # trunk output folder 5 | dist 6 | 7 | # Rust compile target directories: 8 | target 9 | target_ra 10 | target_wasm 11 | 12 | # https://github.com/lycheeverse/lychee 13 | .lycheecache 14 | -------------------------------------------------------------------------------- /client/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "spreadsheet-techdemo" 3 | version = "0.1.0" 4 | authors = ["Gerd Zellweger "] 5 | edition = "2021" 6 | include = ["LICENSE-APACHE", "LICENSE-MIT", "**/*.rs", "Cargo.toml"] 7 | rust-version = "1.76" 8 | 9 | [package.metadata.docs.rs] 10 | all-features = true 11 | targets = ["wasm32-unknown-unknown"] 12 | 13 | [dependencies] 14 | egui = "0.30" 15 | egui_extras = { version = "0.30", features = ["all_loaders", "svg"] } 16 | eframe = { version = "0.30", default-features = false, features = ["default_fonts", "glow"] } 17 | log = "0.4" 18 | ehttp = { version = "0.5", features = ["streaming", "json"] } 19 | ewebsock = "0.8.0" 20 | lru = "0.12.5" 21 | gloo-timers = "0.3.0" 22 | serde = { version = "1.0.210", features = ["derive"] } 23 | serde_json = "1.0.128" 24 | 25 | [target.'cfg(not(target_arch = "wasm32"))'.dependencies] 26 | env_logger = "0.11" 27 | 28 | [target.'cfg(target_arch = "wasm32")'.dependencies] 29 | wasm-logger = "0.2.0" 30 | wasm-bindgen-futures = "0.4" 31 | web-sys = { version = "0.3.70", features = ["console"] } 32 | 33 | -------------------------------------------------------------------------------- /client/Trunk.toml: -------------------------------------------------------------------------------- 1 | [build] 2 | -------------------------------------------------------------------------------- /client/assets/egui.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/feldera/techdemo-spreadsheet/09a8afd446c84664fb009f092f8a2a6c2917bd28/client/assets/egui.ico -------------------------------------------------------------------------------- /client/assets/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/feldera/techdemo-spreadsheet/09a8afd446c84664fb009f092f8a2a6c2917bd28/client/assets/favicon.ico -------------------------------------------------------------------------------- /client/assets/feldera-dark.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /client/assets/feldera-light.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /client/assets/ferris.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/feldera/techdemo-spreadsheet/09a8afd446c84664fb009f092f8a2a6c2917bd28/client/assets/ferris.png -------------------------------------------------------------------------------- /client/assets/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Spreadsheet TechDemo PWA", 3 | "short_name": "spreadsheet-techdemo-pwa", 4 | "icons": [ 5 | { 6 | "src": "./feldera-dark.svg", 7 | "sizes": "654x285", 8 | "type": "image/svg" 9 | }, 10 | { 11 | "src": "./feldera-light.svg", 12 | "sizes": "654x285", 13 | "type": "image/svg" 14 | }, 15 | { 16 | "src": "./ferris.png", 17 | "sizes": "460x307", 18 | "type": "image/png" 19 | } 20 | ], 21 | "lang": "en-US", 22 | "id": "/index.html", 23 | "start_url": "./index.html", 24 | "display": "standalone", 25 | "background_color": "white", 26 | "theme_color": "white" 27 | } 28 | -------------------------------------------------------------------------------- /client/assets/nextjs-icon.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /client/assets/sw.js: -------------------------------------------------------------------------------- 1 | var cacheName = 'spreadsheet-techdemo-pwa'; 2 | var filesToCache = [ 3 | './', 4 | './index.html', 5 | './spreadsheet_techdemo.js', 6 | './spreadsheet_techdemo_bg.wasm', 7 | ]; 8 | 9 | /* Start the service worker and cache all of the app's content */ 10 | self.addEventListener('install', function (e) { 11 | e.waitUntil( 12 | caches.open(cacheName).then(function (cache) { 13 | return cache.addAll(filesToCache); 14 | }) 15 | ); 16 | }); 17 | 18 | /* Serve cached content when offline */ 19 | self.addEventListener('fetch', function (e) { 20 | e.respondWith( 21 | caches.match(e.request).then(function (response) { 22 | return response || fetch(e.request); 23 | }) 24 | ); 25 | }); 26 | -------------------------------------------------------------------------------- /client/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | Billion Cell Spreadsheet 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 |
118 |

119 | Loading… 120 |

121 |
122 |
123 | 124 | 125 | 126 | 134 | 135 | 136 | 137 | -------------------------------------------------------------------------------- /client/rust-toolchain: -------------------------------------------------------------------------------- 1 | # If you see this, run "rustup self update" to get rustup 1.23 or newer. 2 | 3 | # NOTE: above comment is for older `rustup` (before TOML support was added), 4 | # which will treat the first line as the toolchain name, and therefore show it 5 | # to the user in the error, instead of "error: invalid channel name '[toolchain]'". 6 | 7 | [toolchain] 8 | channel = "1.80" # Avoid specifying a patch version here; see https://github.com/emilk/eframe_template/issues/145 9 | components = [ "rustfmt", "clippy" ] 10 | targets = [ "wasm32-unknown-unknown" ] 11 | -------------------------------------------------------------------------------- /client/src/app.rs: -------------------------------------------------------------------------------- 1 | use std::ops::ControlFlow; 2 | use std::sync::atomic::Ordering; 3 | use std::sync::Arc; 4 | 5 | use egui::color_picker::Alpha; 6 | use egui::mutex::RwLock; 7 | use egui::special_emojis::GITHUB; 8 | use egui::{Color32, Key, OpenUrl, Pos2, Rect, RichText, ScrollArea, Sense, Ui, Vec2, Window}; 9 | use egui_extras::{Column, TableBuilder}; 10 | use ewebsock::{WsEvent, WsMessage, WsReceiver}; 11 | use log::{error, trace}; 12 | use serde_json::Deserializer; 13 | 14 | use crate::cell_cache::{Cell, CellCache, Loader}; 15 | use crate::http::streaming_request; 16 | use crate::reference::ReferenceWindow; 17 | 18 | #[derive(serde::Deserialize, Default, Debug, Clone)] 19 | pub struct Stats { 20 | pub filled_total: u64, 21 | pub filled_this_hour: u64, 22 | pub filled_today: u64, 23 | pub filled_this_week: u64, 24 | pub currently_active_users: u64, 25 | } 26 | 27 | pub struct SpreadsheetApp { 28 | focused_row: usize, 29 | focused_col: usize, 30 | bg_color_picked: Color32, 31 | last_key_time: f64, 32 | num_cols: usize, 33 | num_rows: usize, 34 | loader: Arc, 35 | ws_receiver: WsReceiver, 36 | stats: Arc>, 37 | cell_cache: CellCache, 38 | editing_cell: Option, 39 | reference_open: bool, 40 | } 41 | 42 | pub fn is_mobile(ctx: &egui::Context) -> bool { 43 | let screen_size = ctx.screen_rect().size(); 44 | screen_size.x < 550.0 45 | } 46 | 47 | impl SpreadsheetApp { 48 | const DEFAULT_COLS: usize = 26; 49 | const DEFAULT_ROWS: usize = 40_000_000; // 26*40_000_000 = 1_040_000_000 cells 50 | const DEFAULT_ROW_HEIGHT: f32 = 18.0; 51 | 52 | /// Called once before the first frame. 53 | pub fn new(cc: &eframe::CreationContext<'_>) -> Self { 54 | // This is also where you can customize the look and feel of egui using 55 | // `cc.egui_ctx.set_visuals` and `cc.egui_ctx.set_fonts`. 56 | egui_extras::install_image_loaders(&cc.egui_ctx); 57 | let server = CellCache::API_HOST.unwrap_or("http://localhost:3000"); 58 | 59 | // Refresh stats 60 | let stats = Arc::new(RwLock::new(Stats::default())); 61 | { 62 | let egui_ctx = cc.egui_ctx.clone(); 63 | let stats = stats.clone(); 64 | let handle_chunk = Arc::new(move |current_chunk: String| { 65 | let stream = Deserializer::from_str(¤t_chunk).into_iter::(); 66 | for maybe_value in stream { 67 | match maybe_value { 68 | Ok(value) => { 69 | *stats.write() = value; 70 | } 71 | Err(err) => { 72 | error!("an error occurred while reading stats: {err}"); 73 | return ControlFlow::Break(()); 74 | } 75 | } 76 | } 77 | egui_ctx.request_repaint(); 78 | ControlFlow::Continue(()) 79 | }); 80 | streaming_request(format!("{}/api/stats", server), handle_chunk); 81 | } 82 | 83 | // Change stream connection 84 | let (ws_sender, ws_receiver) = { 85 | let egui_ctx = cc.egui_ctx.clone(); 86 | let wakeup = move || egui_ctx.request_repaint(); 87 | let url = format!("{}/api/spreadsheet", server); 88 | ewebsock::connect_with_wakeup(&url, Default::default(), wakeup).unwrap() 89 | }; 90 | let loader = Arc::new(Loader::new(ws_sender)); 91 | 92 | SpreadsheetApp { 93 | focused_row: 0, 94 | focused_col: 0, 95 | bg_color_picked: Color32::TRANSPARENT, 96 | last_key_time: 0.0, 97 | num_cols: Self::DEFAULT_COLS, 98 | num_rows: Self::DEFAULT_ROWS, 99 | stats, 100 | loader: loader.clone(), 101 | ws_receiver, 102 | cell_cache: CellCache::new(loader, Self::DEFAULT_COLS, Self::DEFAULT_ROWS), 103 | editing_cell: None, 104 | reference_open: false, 105 | } 106 | } 107 | } 108 | 109 | impl eframe::App for SpreadsheetApp { 110 | /// Called each time the UI needs repainting, which may be many times per second. 111 | fn update(&mut self, ctx: &egui::Context, _frame: &mut eframe::Frame) { 112 | while let Some(event) = self.ws_receiver.try_recv() { 113 | match event { 114 | WsEvent::Message(WsMessage::Text(update)) => { 115 | let parsed = serde_json::from_str::(&update); 116 | match parsed { 117 | Ok(cell) => { 118 | self.cell_cache.set(cell.id, cell.into()); 119 | } 120 | Err(e) => { 121 | trace!("error parsing cell update: {:?} {:?}", update, e); 122 | } 123 | } 124 | } 125 | WsEvent::Opened => { 126 | self.loader.is_open.store(true, Ordering::Relaxed); 127 | self.loader.fetch(0..2600); 128 | } 129 | WsEvent::Closed => { 130 | self.loader.is_open.store(false, Ordering::Relaxed); 131 | } 132 | _ => { 133 | error!("unexpected event: {:?}", event); 134 | } 135 | } 136 | } 137 | 138 | egui::TopBottomPanel::top("top_panel").show(ctx, |ui| { 139 | egui::menu::bar(ui, |ui| { 140 | ui.horizontal_wrapped(|ui| { 141 | egui::widgets::global_theme_preference_buttons(ui); 142 | if ui.button("📖 Read The Blog Post").clicked() { 143 | ctx.output_mut(|o| { 144 | o.open_url = Some(OpenUrl::new_tab( 145 | "https://docs.feldera.com/use_cases/real_time_apps/part1", 146 | )) 147 | }); 148 | } 149 | if ui.button("📺 Video Tutorial").clicked() { 150 | ctx.output_mut(|o| { 151 | o.open_url = Some(OpenUrl::new_tab( 152 | "https://www.youtube.com/watch?v=ROa4duVqoOs", 153 | )) 154 | }); 155 | } 156 | if ui.button(format!("{GITHUB} Fork me on Github")).clicked() { 157 | ctx.output_mut(|o| { 158 | o.open_url = Some(OpenUrl::new_tab( 159 | "https://github.com/feldera/techdemo-spreadsheet", 160 | )) 161 | }); 162 | } 163 | Window::new("Formula Reference") 164 | .open(&mut self.reference_open) 165 | .show(ctx, |ui| { 166 | let mut rw = ReferenceWindow {}; 167 | rw.ui(ui); 168 | }); 169 | if ui.button("? Help").clicked() { 170 | self.reference_open = true; 171 | } 172 | }); 173 | }); 174 | }); 175 | 176 | egui::CentralPanel::default().show(ctx, |ui| { 177 | ui.heading(RichText::new("Billion Cell Spreadsheet").strong()); 178 | ui.add_space(20.0); 179 | 180 | fn active_users(ui: &mut Ui, stats: &Stats) { 181 | ui.label(RichText::new("Currently Active Users:").strong()); 182 | let icon_size = Vec2::splat(10.0); 183 | ui.horizontal(|ui| { 184 | for _ in 0..stats.currently_active_users.min(10) { 185 | ui.painter().circle_filled( 186 | Pos2::new( 187 | ui.cursor().min.x + icon_size.x, 188 | ui.cursor().center().y, 189 | ), 190 | icon_size.x / 2.0, 191 | Color32::LIGHT_GREEN, 192 | ); 193 | ui.add_space(12.0); 194 | } 195 | if stats.currently_active_users > 10 { 196 | ui.label(format!( 197 | "+{} more", 198 | stats.currently_active_users - 10 199 | )); 200 | } 201 | }); 202 | } 203 | 204 | fn cells_with_content(ui: &mut Ui, stats: &Stats) { 205 | ui.label(RichText::new("Cells With Content:").strong()); 206 | let max_cells = (SpreadsheetApp::DEFAULT_COLS as u64 207 | * SpreadsheetApp::DEFAULT_ROWS as u64) 208 | as f64; 209 | let filled_ratio = (stats.filled_total as f64 / max_cells) as f32; 210 | let filled_color = if filled_ratio < 0.5 { 211 | Color32::from_rgb(100, 150, 250) 212 | } else { 213 | Color32::from_rgb(250, 100, 100) 214 | }; 215 | ui.painter().rect_filled( 216 | Rect::from_min_size( 217 | ui.cursor().min, 218 | Vec2::new(filled_ratio * 150.0, 15.0), 219 | ), 220 | 4.0, 221 | filled_color, 222 | ); 223 | ui.label(format!("{}/{}", stats.filled_total, max_cells)); 224 | ui.label(format!("{}%", filled_ratio * 100.0)); 225 | } 226 | 227 | fn timed_stats(ui: &mut Ui, stats: &Stats) { 228 | ui.horizontal(|ui| { 229 | ui.label(RichText::new("Cells Edited This Hour: ").strong()); 230 | ui.label(format!("{}", stats.filled_this_hour)); 231 | }); 232 | ui.horizontal(|ui| { 233 | ui.label(RichText::new("Cells Edited Today: ").strong()); 234 | ui.label(format!("{}", stats.filled_today)); 235 | }); 236 | ui.horizontal(|ui| { 237 | ui.label(RichText::new("Cells Edited This Week: ").strong()); 238 | ui.label(format!("{}", stats.filled_this_week)); 239 | }); 240 | } 241 | 242 | fn built_with(ui: &mut Ui) { 243 | ui.heading("Built with"); 244 | 245 | ui.horizontal(|ui| { 246 | ui.add( 247 | egui::Hyperlink::from_label_and_url( 248 | format!("{GITHUB} feldera"), 249 | "https://github.com/feldera/feldera", 250 | ) 251 | .open_in_new_tab(true), 252 | ); 253 | ui.add_space(10.0); 254 | ui.add( 255 | egui::Hyperlink::from_label_and_url( 256 | format!("{GITHUB} axum"), 257 | "https://github.com/tokio-rs/axum", 258 | ) 259 | .open_in_new_tab(true), 260 | ); 261 | ui.add_space(10.0); 262 | ui.add( 263 | egui::Hyperlink::from_label_and_url( 264 | format!("{GITHUB} egui"), 265 | "https://github.com/emilk/egui", 266 | ) 267 | .open_in_new_tab(true), 268 | ); 269 | ui.add_space(10.0); 270 | ui.add( 271 | egui::Hyperlink::from_label_and_url( 272 | format!("{GITHUB} XLFormula Engine"), 273 | "https://github.com/jiradaherbst/XLFormula-Engine", 274 | ) 275 | .open_in_new_tab(true), 276 | ); 277 | ui.add_space(10.0); 278 | }); 279 | } 280 | 281 | let stats = self.stats.read().clone(); 282 | if !is_mobile(ctx) { 283 | ui.vertical(|ui| { 284 | ui.horizontal(|ui| { 285 | ui.with_layout(egui::Layout::left_to_right(egui::Align::TOP), |ui| { 286 | // Active users section with icons 287 | ui.vertical(|ui| { 288 | active_users(ui, &stats); 289 | }); 290 | ui.separator(); 291 | // Meter for total filled cells 292 | ui.vertical(|ui| { 293 | cells_with_content(ui, &stats); 294 | }); 295 | ui.separator(); 296 | timed_stats(ui, &stats); 297 | }); 298 | 299 | ui.add_space(50.0); 300 | 301 | ui.with_layout( 302 | egui::Layout::left_to_right(egui::Align::TOP).with_main_wrap(true), 303 | |ui| { 304 | ui.vertical(|ui| { 305 | built_with(ui) 306 | }); 307 | }, 308 | ); 309 | }); 310 | }); 311 | } 312 | else { 313 | ui.vertical(|ui| { 314 | active_users(ui, &stats); 315 | ui.add_space(20.0); 316 | cells_with_content(ui, &stats); 317 | timed_stats(ui, &stats); 318 | ui.add_space(20.0); 319 | 320 | ui.with_layout( 321 | egui::Layout::left_to_right(egui::Align::TOP).with_main_wrap(true), 322 | |ui| { 323 | ui.vertical(|ui| { 324 | built_with(ui) 325 | }); 326 | }, 327 | ); 328 | }); 329 | } 330 | 331 | 332 | ui.add_space(20.0); 333 | 334 | ui.vertical(|ui| { 335 | ui.horizontal(|ui| { 336 | let original_spacing = { 337 | let style = ui.style_mut(); 338 | let original_spacing = style.spacing.item_spacing; 339 | style.spacing.item_spacing.x = 2.0; 340 | original_spacing 341 | }; 342 | 343 | ui.label("Set Background Color"); 344 | ui.colored_label(Color32::LIGHT_BLUE, RichText::new("[?]")).on_hover_text( 345 | "By default colors are at 0 alpha (fully transparent).\nMove the bottom slider in the widget to decrease the transparency if yo want\nto set a color on a new transparent cell.", 346 | ); 347 | let style = ui.style_mut(); 348 | style.spacing.item_spacing = original_spacing; 349 | }); 350 | 351 | let id = self.focused_row as u64 * self.num_cols as u64 + self.focused_col as u64; 352 | let cell = self.cell_cache.get(id); 353 | let color_response = egui::widgets::color_picker::color_edit_button_srgba( 354 | ui, 355 | &mut self.bg_color_picked, 356 | Alpha::BlendOrAdditive, 357 | ); 358 | if color_response.changed() { 359 | cell.set_background(self.bg_color_picked); 360 | } 361 | }); 362 | 363 | ScrollArea::horizontal().show(ui, |ui| { 364 | TableBuilder::new(ui) 365 | .striped(true) 366 | .resizable(true) 367 | .cell_layout(egui::Layout::left_to_right(egui::Align::Center)) 368 | .column(Column::remainder()) 369 | .columns(Column::initial(100.0).at_least(25.0).resizable(true).clip(true), self.num_cols) 370 | .header(Self::DEFAULT_ROW_HEIGHT + 3.0, |mut header| { 371 | let col_idx_to_label = |idx: usize| { 372 | if idx < 26 { 373 | format!("{}", (b'A' + idx as u8) as char) 374 | } else { 375 | format!( 376 | "{}{}", 377 | (b'A' + (idx / 26 - 1) as u8) as char, 378 | (b'A' + (idx % 26) as u8) as char 379 | ) 380 | } 381 | }; 382 | 383 | header.col(|ui| { 384 | ui.strong(""); 385 | }); 386 | 387 | for col_index in 0..self.num_cols { 388 | header.col(|ui| { 389 | ui.strong(col_idx_to_label(col_index)); 390 | }); 391 | } 392 | }) 393 | .body(|body| { 394 | body.rows(Self::DEFAULT_ROW_HEIGHT, self.num_rows, |mut row| { 395 | let row_index = row.index(); 396 | row.col(|ui| { 397 | ui.strong(row_index.to_string()); 398 | }); 399 | 400 | for col_index in 0..self.num_cols { 401 | let id = row_index as u64 * self.num_cols as u64 + col_index as u64; 402 | let cell = self.cell_cache.get(id); 403 | row.col(|ui| { 404 | let has_focus = row_index == self.focused_row 405 | && col_index == self.focused_col; 406 | let rect = ui.available_rect_before_wrap(); 407 | let resp = ui.interact( 408 | ui.available_rect_before_wrap(), 409 | ui.make_persistent_id(id), 410 | Sense::click(), 411 | ); 412 | ui.painter().rect_filled(rect, 0.0, cell.background_color()); 413 | let cell_response = cell.ui(ui); 414 | 415 | // Adjust cell focus based on the new coordinates 416 | if has_focus { 417 | ui.painter().rect_stroke( 418 | rect, 419 | 0.0, 420 | egui::Stroke::new(1.0, Color32::LIGHT_BLUE), 421 | ); 422 | } 423 | 424 | ui.input(|i| { 425 | const KEY_DELAY: f64 = 0.01; 426 | let now = i.time; 427 | i.events.iter().for_each(|i| { 428 | if let egui::Event::Key { key, pressed, .. } = i { 429 | if now - self.last_key_time > KEY_DELAY && *pressed 430 | { 431 | match key { 432 | Key::Escape => { 433 | if self.editing_cell.is_some() { 434 | cell.disable_edit(true); 435 | } 436 | } 437 | Key::Enter => { 438 | self.focused_row = (self.focused_row 439 | + 1) 440 | .min(self.num_rows - 1); 441 | self.last_key_time = now; 442 | } 443 | Key::ArrowDown => { 444 | if self.editing_cell.is_none() { 445 | self.focused_row = 446 | (self.focused_row + 1) 447 | .min(self.num_rows - 1); 448 | self.last_key_time = now; 449 | } 450 | } 451 | Key::ArrowUp => { 452 | if self.editing_cell.is_none() { 453 | self.focused_row = self 454 | .focused_row 455 | .saturating_sub(1); 456 | self.last_key_time = now; 457 | } 458 | } 459 | Key::ArrowRight => { 460 | if self.editing_cell.is_none() { 461 | self.focused_col = 462 | (self.focused_col + 1) 463 | .min(self.num_cols - 1); 464 | self.last_key_time = now; 465 | } 466 | } 467 | Key::ArrowLeft => { 468 | if self.editing_cell.is_none() { 469 | self.focused_col = self 470 | .focused_col 471 | .saturating_sub(1); 472 | self.last_key_time = now; 473 | } 474 | } 475 | Key::PageDown => { 476 | if self.editing_cell.is_none() { 477 | self.focused_row = 478 | (self.focused_row + 10) 479 | .min(self.num_rows - 1); 480 | self.last_key_time = now; 481 | } 482 | } 483 | Key::PageUp => { 484 | if self.editing_cell.is_none() { 485 | self.focused_row = self 486 | .focused_row 487 | .saturating_sub(10); 488 | self.last_key_time = now; 489 | } 490 | } 491 | _ => {} 492 | } 493 | } 494 | } 495 | }); 496 | }); 497 | 498 | // Set focus on the cell 499 | if resp.clicked() 500 | || (cell_response.clicked() && !cell_response.has_focus()) 501 | { 502 | self.focused_row = row_index; 503 | self.focused_col = col_index; 504 | self.bg_color_picked = cell.background_color(); 505 | } 506 | 507 | // Done with editing 508 | if self.editing_cell.is_some() && cell_response.lost_focus() { 509 | cell.disable_edit(false); 510 | cell.save(); 511 | self.editing_cell = None; 512 | } 513 | 514 | // Edit the current cell 515 | if self.editing_cell.is_none() 516 | && (resp.double_clicked() 517 | || cell_response.double_clicked() 518 | || (resp.has_focus() 519 | && ui.input(|i| i.key_pressed(Key::Enter)))) 520 | { 521 | cell_response.request_focus(); 522 | cell.edit(); 523 | self.editing_cell = Some(id); 524 | } 525 | }); 526 | } 527 | }); 528 | }); 529 | }); 530 | }); 531 | } 532 | } 533 | -------------------------------------------------------------------------------- /client/src/cell_cache.rs: -------------------------------------------------------------------------------- 1 | use std::cell::RefCell; 2 | use std::fmt::Display; 3 | use std::num::NonZeroUsize; 4 | use std::ops::Range; 5 | use std::rc::Rc; 6 | use std::sync::atomic::{AtomicBool, AtomicI32, Ordering}; 7 | use std::sync::Arc; 8 | use std::time::Duration; 9 | 10 | use egui::mutex::{Mutex, RwLock}; 11 | use egui::widgets::TextEdit; 12 | use egui::{Color32, Label, Response, Sense, Ui}; 13 | use ehttp::Request; 14 | use ewebsock::{WsMessage, WsSender}; 15 | use log::{debug, trace, warn}; 16 | use lru::LruCache; 17 | use serde_json::json; 18 | 19 | use crate::debouncer::Debouncer; 20 | 21 | /// The cell as it comes from the backend. 22 | #[derive(Debug, Clone, Eq, PartialEq, serde::Deserialize)] 23 | pub(crate) struct Cell { 24 | pub(crate) id: u64, 25 | pub(crate) raw_value: String, 26 | pub(crate) computed_value: String, 27 | pub(crate) background: i32, 28 | } 29 | 30 | /// A request to update a cell. 31 | #[derive(Debug, Clone, Eq, PartialEq, serde::Serialize)] 32 | pub(crate) struct UpdateCellRequest { 33 | pub(crate) id: u64, 34 | pub(crate) raw_value: String, 35 | pub(crate) background: i32, 36 | } 37 | 38 | impl From<&CellContent> for UpdateCellRequest { 39 | fn from(cell: &CellContent) -> Self { 40 | Self { 41 | id: cell.id, 42 | raw_value: cell.write_buffer.read().clone(), 43 | background: cell.background.load(Ordering::Relaxed), 44 | } 45 | } 46 | } 47 | 48 | /// A Cell that we currently track as part of the spreadsheet. 49 | pub(crate) struct CellContent { 50 | pub(crate) id: u64, 51 | pub(crate) content: RwLock, 52 | pub(crate) write_buffer: RwLock, 53 | pub(crate) old_write_buffer: Mutex, 54 | pub(crate) background: AtomicI32, 55 | pub(crate) is_editing: AtomicBool, 56 | debounce_bg_change: Rc>, 57 | } 58 | 59 | /// We convert Cells from the backend into CellContent that we can edit. 60 | impl From for CellContent { 61 | fn from(cell: Cell) -> Self { 62 | Self { 63 | id: cell.id, 64 | content: RwLock::new(cell.computed_value), 65 | write_buffer: RwLock::new(cell.raw_value.clone()), 66 | old_write_buffer: Mutex::new(cell.raw_value), 67 | is_editing: AtomicBool::new(false), 68 | background: AtomicI32::new(cell.background), 69 | debounce_bg_change: Rc::new(Mutex::new(Debouncer::new())), 70 | } 71 | } 72 | } 73 | 74 | impl CellContent { 75 | /// A new empty cell. 76 | pub(crate) fn empty(id: u64) -> Self { 77 | Self { 78 | id, 79 | write_buffer: RwLock::new(String::new()), 80 | old_write_buffer: Mutex::new(String::new()), 81 | content: RwLock::new(String::new()), 82 | is_editing: AtomicBool::new(false), 83 | background: AtomicI32::new(i32::from_le_bytes(Color32::TRANSPARENT.to_array())), 84 | debounce_bg_change: Rc::new(Mutex::new(Debouncer::new())), 85 | } 86 | } 87 | 88 | pub(crate) fn background_color(&self) -> Color32 { 89 | let rgba_premultiplied = i32::to_le_bytes(self.background.load(Ordering::Relaxed)); 90 | Color32::from_rgba_premultiplied( 91 | rgba_premultiplied[0], 92 | rgba_premultiplied[1], 93 | rgba_premultiplied[2], 94 | rgba_premultiplied[3], 95 | ) 96 | } 97 | 98 | pub(crate) fn is_editing(&self) -> bool { 99 | self.is_editing.load(Ordering::SeqCst) 100 | } 101 | 102 | /// We set the cell into edit mode -- if the user clicks it. 103 | pub(crate) fn edit(&self) { 104 | let mut old_value = self.old_write_buffer.lock(); 105 | old_value.clear(); 106 | old_value.push_str(&self.write_buffer.read()); 107 | self.is_editing.store(true, Ordering::SeqCst); 108 | } 109 | 110 | /// We disable editing mode -- if the user clicks elsewhere. 111 | pub(crate) fn disable_edit(&self, revert: bool) { 112 | if revert { 113 | let old_value = self.old_write_buffer.lock(); 114 | let mut write_buffer = self.write_buffer.write(); 115 | write_buffer.clear(); 116 | write_buffer.push_str(&old_value); 117 | } 118 | self.is_editing.store(false, Ordering::SeqCst); 119 | } 120 | 121 | pub(crate) fn set_background(&self, color: Color32) { 122 | self.background 123 | .store(i32::from_le_bytes(color.to_array()), Ordering::Relaxed); 124 | let mut debouncer = self.debounce_bg_change.lock(); 125 | let cell_update = self.into(); 126 | debouncer.debounce(Duration::from_millis(350), move || { 127 | update_cell( 128 | format!( 129 | "{}/api/spreadsheet", 130 | CellCache::API_HOST.unwrap_or("http://localhost:3000") 131 | ), 132 | cell_update, 133 | ); 134 | }); 135 | } 136 | 137 | pub(crate) fn save(&self) { 138 | let mut old_value = self.old_write_buffer.lock(); 139 | let new_value = self.write_buffer.read(); 140 | if *old_value != *new_value { 141 | update_cell( 142 | format!( 143 | "{}/api/spreadsheet", 144 | CellCache::API_HOST.unwrap_or("http://localhost:3000") 145 | ), 146 | self.into(), 147 | ); 148 | old_value.clear(); 149 | old_value.push_str(&new_value); 150 | } 151 | } 152 | 153 | /// We render the cell in the UI/Table. 154 | pub fn ui(&self, ui: &mut Ui) -> Response { 155 | if self.is_editing() { 156 | let mut content = self.write_buffer.write(); 157 | ui.add(TextEdit::singleline(&mut *content)) 158 | } else { 159 | let content = self.content.read().to_string(); 160 | ui.add(Label::new(&content).sense(Sense::click())) 161 | } 162 | } 163 | } 164 | 165 | /// Sends a PATCH request to the server to update a cell. 166 | fn update_cell(url: String, data: UpdateCellRequest) { 167 | let request = Request::json(url, &data).unwrap(); 168 | ehttp::fetch(request, move |response| { 169 | if let Ok(response) = response { 170 | if !response.ok { 171 | warn!("POST request failed: {:?}", response.text()); 172 | } 173 | } else { 174 | debug!("No response received"); 175 | } 176 | }); 177 | } 178 | 179 | /// Helper to display CellContent. 180 | impl Display for CellContent { 181 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 182 | write!(f, "{}", self.content.read()) 183 | } 184 | } 185 | 186 | pub(crate) struct Loader { 187 | pub(crate) is_open: AtomicBool, 188 | ws_sender: Mutex, 189 | } 190 | 191 | impl Loader { 192 | pub(crate) fn new(ws_sender: WsSender) -> Self { 193 | Self { 194 | ws_sender: Mutex::new(ws_sender), 195 | is_open: AtomicBool::new(false), 196 | } 197 | } 198 | 199 | pub(crate) fn fetch(&self, range: Range) -> bool { 200 | if !self.is_open.load(Ordering::Relaxed) { 201 | return false; 202 | } 203 | 204 | let mut sender = self.ws_sender.lock(); 205 | sender.send(WsMessage::Text( 206 | json!({"from": range.start, "to": range.end}).to_string(), 207 | )); 208 | true 209 | } 210 | } 211 | 212 | /// The CellCache stores a fixed number of cells in memory. 213 | /// 214 | /// - It fetches cells from the backend as needed. 215 | /// - It always contains the cells that the user is currently looking at (and some more 216 | /// since it also prefetches cells around the current view to make scrolling smooth). 217 | /// - It debounces fetching of new rows to avoid fetching too many cells at once. 218 | pub(crate) struct CellCache { 219 | cells: Rc>>>, 220 | fetcher: Arc, 221 | debouncer: Rc>, 222 | current_range: Option>, 223 | prefetch_before_after_id: u64, 224 | max_cells: usize, 225 | } 226 | 227 | impl CellCache { 228 | pub(crate) const API_HOST: Option<&'static str> = option_env!("API_HOST"); 229 | 230 | pub fn new(fetcher: Arc, width: usize, height: usize) -> Self { 231 | let prefetch_before_after_id = 100 * width as u64; 232 | let lru_cache_size = NonZeroUsize::new(200 * width).unwrap(); 233 | 234 | Self { 235 | fetcher, 236 | cells: Rc::new(Mutex::new(LruCache::new(lru_cache_size))), 237 | debouncer: Rc::new(RefCell::new(Debouncer::new())), 238 | current_range: None, 239 | prefetch_before_after_id, 240 | max_cells: width * height, 241 | } 242 | } 243 | 244 | pub fn set(&mut self, id: u64, c: CellContent) { 245 | let mut cells = self.cells.lock(); 246 | cells.push(id, Rc::new(c)); 247 | } 248 | 249 | pub fn get(&mut self, id: u64) -> Rc { 250 | let mut cells = self.cells.lock(); 251 | 252 | if let Some(c) = cells.get(&id) { 253 | c.clone() 254 | } else { 255 | let c = Rc::new(CellContent::empty(id)); 256 | cells.push(id, c.clone()); 257 | 258 | if let Some(current_range) = &self.current_range { 259 | if current_range.contains(&id) { 260 | // Already fetching this range... 261 | return c; 262 | } 263 | } 264 | 265 | let start = id.saturating_sub(self.prefetch_before_after_id); 266 | let end = std::cmp::min( 267 | id.saturating_add(self.prefetch_before_after_id), 268 | self.max_cells as u64, 269 | ); 270 | let current_range = start..end; 271 | self.current_range = Some(current_range.clone()); 272 | trace!("fetching range: {:?}", current_range); 273 | let fetcher = self.fetcher.clone(); 274 | 275 | let debouncer_clone = self.debouncer.clone(); 276 | debouncer_clone 277 | .borrow_mut() 278 | .debounce(Duration::from_millis(100), move || { 279 | let mut max_retry = 10; 280 | while !fetcher.fetch(current_range.clone()) && max_retry > 0 { 281 | max_retry -= 1; 282 | } 283 | }); 284 | 285 | c 286 | } 287 | } 288 | } 289 | -------------------------------------------------------------------------------- /client/src/debouncer.rs: -------------------------------------------------------------------------------- 1 | use gloo_timers::callback::Timeout; 2 | use std::time::Duration; 3 | 4 | pub(crate) struct Debouncer { 5 | timeout: Option, 6 | } 7 | 8 | impl Debouncer { 9 | pub(crate) fn new() -> Self { 10 | Self { timeout: None } 11 | } 12 | 13 | pub(crate) fn debounce(&mut self, delay: Duration, callback: F) 14 | where 15 | F: 'static + FnOnce(), 16 | { 17 | if let Some(timeout) = self.timeout.take() { 18 | timeout.cancel(); 19 | } 20 | 21 | self.timeout = Some(Timeout::new(delay.as_millis() as u32, callback)); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /client/src/http.rs: -------------------------------------------------------------------------------- 1 | use std::ops::ControlFlow; 2 | use std::sync::Arc; 3 | 4 | use egui::mutex::Mutex; 5 | use ehttp::{streaming, Request}; 6 | use log::error; 7 | 8 | pub fn streaming_request( 9 | url: String, 10 | handle_data: Arc ControlFlow<()> + Send + Sync>, 11 | ) { 12 | let remainder = Arc::new(Mutex::new(String::new())); 13 | 14 | // Handle a chunk of data received from the server, this might not be a complete JSON object 15 | // so we need to store the remainder of the last chunk and append it to the next chunk 16 | let handle_chunk: Arc) -> ControlFlow<()> + Send + Sync> = 17 | Arc::new(move |chunk: Vec| { 18 | if chunk.is_empty() { 19 | return ControlFlow::Break(()); 20 | } 21 | let mut remainder = remainder.lock(); 22 | 23 | let mut current_chunk = remainder.to_string(); 24 | current_chunk.extend(String::from_utf8_lossy(chunk.as_slice()).chars()); 25 | 26 | // For ndjson, needs to end with a newline, if not it's an incomplete chunk 27 | // store the last bit in the remainder 28 | if !current_chunk.ends_with('\n') { 29 | // split off the last chunk that doesn't end with a newline 30 | let (chunk_str, new_remainder) = match current_chunk.rfind('\n') { 31 | Some(idx) => current_chunk.split_at(idx + 1), 32 | None => { 33 | *remainder = current_chunk; 34 | return ControlFlow::Continue(()); 35 | } 36 | }; 37 | *remainder = new_remainder.to_string(); 38 | current_chunk = chunk_str.to_string(); 39 | } else { 40 | *remainder = String::new(); 41 | } 42 | 43 | handle_data(current_chunk) 44 | }); 45 | 46 | let request = Request::get(url.clone()); 47 | streaming::fetch(request, move |result: ehttp::Result| { 48 | let part = match result { 49 | Ok(part) => part, 50 | Err(err) => { 51 | error!("an error occurred while streaming `{url}`: {err}"); 52 | return ControlFlow::Break(()); 53 | } 54 | }; 55 | 56 | match part { 57 | streaming::Part::Response(response) => { 58 | if response.ok { 59 | ControlFlow::Continue(()) 60 | } else { 61 | ControlFlow::Break(()) 62 | } 63 | } 64 | streaming::Part::Chunk(chunk) => handle_chunk(chunk), 65 | } 66 | }); 67 | } 68 | -------------------------------------------------------------------------------- /client/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![warn(clippy::all, rust_2018_idioms)] 2 | mod app; 3 | mod cell_cache; 4 | mod debouncer; 5 | mod http; 6 | mod reference; 7 | 8 | pub use app::SpreadsheetApp; 9 | -------------------------------------------------------------------------------- /client/src/main.rs: -------------------------------------------------------------------------------- 1 | #![warn(clippy::all, rust_2018_idioms)] 2 | #![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release 3 | 4 | // When compiling natively: 5 | #[cfg(not(target_arch = "wasm32"))] 6 | fn main() -> eframe::Result { 7 | env_logger::init(); // Log to stderr (if you run with `RUST_LOG=debug`). 8 | 9 | let native_options = eframe::NativeOptions { 10 | viewport: egui::ViewportBuilder::default() 11 | .with_inner_size([400.0, 300.0]) 12 | .with_min_inner_size([300.0, 220.0]), 13 | ..Default::default() 14 | }; 15 | eframe::run_native( 16 | "Spreadsheet Techdemo", 17 | native_options, 18 | Box::new(|cc| Ok(Box::new(spreadsheet_techdemo::SpreadsheetApp::new(cc)))), 19 | ) 20 | } 21 | 22 | // When compiling to web using trunk: 23 | #[cfg(target_arch = "wasm32")] 24 | fn main() { 25 | wasm_logger::init(wasm_logger::Config::new(log::Level::Debug)); 26 | use eframe::wasm_bindgen::JsCast as _; 27 | 28 | // Redirect `log` message to `console.log` and friends: 29 | eframe::WebLogger::init(log::LevelFilter::Debug).ok(); 30 | 31 | let web_options = eframe::WebOptions::default(); 32 | 33 | wasm_bindgen_futures::spawn_local(async { 34 | let document = web_sys::window() 35 | .expect("No window") 36 | .document() 37 | .expect("No document"); 38 | 39 | let canvas = document 40 | .get_element_by_id("the_canvas_id") 41 | .expect("Failed to find the_canvas_id") 42 | .dyn_into::() 43 | .expect("the_canvas_id was not a HtmlCanvasElement"); 44 | 45 | let start_result = eframe::WebRunner::new() 46 | .start( 47 | canvas, 48 | web_options, 49 | Box::new(|cc| Ok(Box::new(spreadsheet_techdemo::SpreadsheetApp::new(cc)))), 50 | ) 51 | .await; 52 | 53 | // Remove the loading text and spinner: 54 | if let Some(loading_text) = document.get_element_by_id("loading_text") { 55 | match start_result { 56 | Ok(_) => { 57 | loading_text.remove(); 58 | } 59 | Err(e) => { 60 | loading_text.set_inner_html( 61 | "

The app has crashed. See the developer console for details.

", 62 | ); 63 | panic!("Failed to start eframe: {e:?}"); 64 | } 65 | } 66 | } 67 | }); 68 | } 69 | -------------------------------------------------------------------------------- /client/src/reference.rs: -------------------------------------------------------------------------------- 1 | use egui::{CollapsingHeader, Ui}; 2 | 3 | pub struct ReferenceWindow {} 4 | 5 | impl ReferenceWindow { 6 | pub(crate) fn ui(&mut self, ui: &mut Ui) { 7 | ui.set_min_width(250.0); 8 | 9 | // Title 10 | ui.heading("Formula Help"); 11 | 12 | // Features section 13 | CollapsingHeader::new("Features") 14 | .default_open(true) 15 | .show(ui, |ui| { 16 | ui.label("The formula engine support:"); 17 | ui.label("• Any numbers, negative and positive, as float or integer."); 18 | ui.label("• Arithmetic operations: +, -, /, *, ^"); 19 | ui.label("• Logical operations: AND(), OR(), NOT(), XOR()."); 20 | ui.label("• Comparison operations: =, >, >=, <, <=, <>."); 21 | ui.label("• String operation: & (concatenation)."); 22 | ui.label("• Built-in variables: TRUE, FALSE."); 23 | ui.label("• Excel functions: ABS(), SUM(), PRODUCT(), AVERAGE(), RIGHT(), LEFT(), IF(), ISBLANK()."); 24 | ui.label("• Operations on lists of values (one-dimensional range)."); 25 | ui.label("• Add or subtract dates and Excel function DAYS()."); 26 | ui.label("• Custom functions with number arguments."); 27 | }); 28 | 29 | CollapsingHeader::new("Examples") 30 | .default_open(false) 31 | .show(ui, |ui| { 32 | self.add_examples(ui); 33 | }); 34 | 35 | // Logical Expressions section 36 | CollapsingHeader::new("Logical Expressions") 37 | .default_open(false) 38 | .show(ui, |ui| { 39 | ui.label("Supports logical expressions like AND(), OR(), and more:"); 40 | ui.monospace("=2>=1"); 41 | ui.monospace("=OR(1>1,1<>1)"); 42 | ui.monospace("=AND(\"test\",\"True\", 1, true)"); 43 | }); 44 | 45 | // Date Handling section 46 | CollapsingHeader::new("Handling Dates") 47 | .default_open(false) 48 | .show(ui, |ui| { 49 | ui.label("Supports adding, subtracting, and calculating days between dates:"); 50 | ui.label("• Dates must be written in the RFC 3339: e.g., 2019-03-01T02:00:00.000Z"); 51 | ui.monospace("=DAYS(A12, A32)"); 52 | }); 53 | 54 | CollapsingHeader::new("References") 55 | .default_open(false) 56 | .show(ui, |ui| { 57 | ui.label("Supports referencing other cells:"); 58 | ui.monospace("=A12"); 59 | ui.label("• The demo limits the number of allowed references per cell to 1000."); 60 | }); 61 | } 62 | 63 | fn add_examples(&self, ui: &mut Ui) { 64 | CollapsingHeader::new("Parsing and Evaluating Formulas") 65 | .default_open(true) 66 | .show(ui, |ui| { 67 | ui.monospace("=1+2"); 68 | ui.monospace("=(1*(2+3))*2"); 69 | ui.monospace("=1+3/0"); 70 | }); 71 | 72 | CollapsingHeader::new("Concatenating Strings") 73 | .default_open(true) 74 | .show(ui, |ui| { 75 | ui.monospace(r#"="Hello " & " World!""#); 76 | ui.label("• Concatenating number and string results in a #CAST! error."); 77 | }); 78 | 79 | CollapsingHeader::new("Excel Functions") 80 | .default_open(true) 81 | .show(ui, |ui| { 82 | ui.monospace("=ABS(-1)"); 83 | ui.monospace(r#"=SUM(1,2,"3")"#); 84 | ui.monospace("=PRODUCT(ABS(1),2*1, 3,4*1)"); 85 | ui.monospace("=RIGHT(\"apple\", 3)"); 86 | ui.monospace("=LEFT(\"apple\", 3)"); 87 | ui.monospace("=LEFT(\"apple\")"); 88 | ui.monospace("=IF(TRUE,1,0)"); 89 | }); 90 | 91 | CollapsingHeader::new("Working with Lists") 92 | .default_open(true) 93 | .show(ui, |ui| { 94 | ui.monospace("={1,2,3}+{1,2,3}"); 95 | }); 96 | } 97 | } 98 | -------------------------------------------------------------------------------- /feldera/deploy.sh: -------------------------------------------------------------------------------- 1 | fda create xls || true 2 | fda stop xls || true 3 | fda program set xls program.sql --udf-rs udf/src/lib.rs --udf-toml udf/udf.toml 4 | fda set-config xls workers 8 5 | fda restart --recompile xls -------------------------------------------------------------------------------- /feldera/program.sql: -------------------------------------------------------------------------------- 1 | -- Given a cell value as a formula (e.g., =A0+B0), and a context with cell values 2 | -- referenced in the formula, returns the computed value of the cell 3 | create function cell_value(cell varchar(64), mentions_ids bigint array, mentions_values varchar(64) array) returns varchar(64); 4 | 5 | -- Given a cell value e.g., =A0+B0, returns an array of cell ids that were mentioned in the formula 6 | create function mentions(cell varchar(64)) returns bigint array; 7 | 8 | -- Forward declaration of spreadsheet view 9 | declare recursive view spreadsheet_view ( 10 | id bigint not null, 11 | background integer not null, 12 | raw_value varchar(64) not null, 13 | computed_value varchar(64) 14 | ); 15 | 16 | -- Raw spreadsheet cell data coming from backend/user, updates 17 | -- are inserted as new entries with newer timestamps 18 | create table spreadsheet_data ( 19 | id bigint not null, 20 | ip varchar(45) not null, 21 | ts timestamp not null, 22 | raw_value varchar(64) not null, 23 | background integer not null 24 | ) with ( 25 | 'materialized' = 'true', 26 | 'connectors' = '[{ 27 | "transport": { 28 | "name": "datagen", 29 | "config": { 30 | "workers": 1, 31 | "plan": [{ 32 | "limit": 23, 33 | "fields": { 34 | "id": { "values": [1039999974, 0, 1, 2, 12, 14, 40, 66, 92, 118, 170, 196, 222, 13, 65, 91, 117, 15, 41, 67, 93, 119, 39, 144] }, 35 | "ip": { "values": ["0"] }, 36 | "raw_value": { "values": ["42", "=A39999999", "=A0", "=A0+B0", "Reference", "Functions", "=ABS(-1)", "=AVERAGE(1,2,3,1,2,3)", "={1,2,3}+{1,2,3}", "=SUM(1,2,3)", "=PRODUCT(ABS(1),2*1, 3,4*1)", "=RIGHT(\"apple\", 3)", "=LEFT(\"apple\", 3)", "Logic", "=2>=1", "=OR(1>1,1<>1)", "=AND(\"test\",\"True\", 1, true)", "Datetime", "2019-03-01T02:00:00.000Z", "2019-08-30T02:00:00.000Z", "=DAYS(P1, P2)", "=P1+5", "=XOR(0,1)", "=IF(TRUE,1,0)"] }, 37 | "background": { "strategy": "uniform", "range": [0, 1] } 38 | } 39 | }] 40 | } 41 | } 42 | }]' 43 | ); 44 | 45 | -- Get the latest cell value for the spreadsheet. 46 | -- (By finding the one with the highest `ts` for a given `id`) 47 | create view latest_cells as with 48 | max_ts_per_cell as ( 49 | select 50 | id, 51 | max(ts) as max_ts 52 | from 53 | spreadsheet_data 54 | group by 55 | id 56 | ) 57 | select 58 | s.id, 59 | s.raw_value, 60 | s.background, 61 | -- The append with null is silly but crucial to ensure that the 62 | -- cross join in `latest_cells_with_mention` returns all cells 63 | -- not just those that reference another cell 64 | ARRAY_APPEND(mentions(s.raw_value), null) as mentioned_cell_ids 65 | from 66 | spreadsheet_data s 67 | join max_ts_per_cell mt on s.id = mt.id and s.ts = mt.max_ts; 68 | 69 | -- List all mentioned ids per latest cell 70 | create view latest_cells_with_mentions as 71 | select 72 | s.id, 73 | s.raw_value, 74 | s.background, 75 | m.mentioned_id 76 | from 77 | latest_cells s, unnest(s.mentioned_cell_ids) as m(mentioned_id); 78 | 79 | -- Like latest_cells_with_mentions, but enrich it with values of mentioned cells 80 | create local view mentions_with_values as 81 | select 82 | m.id, 83 | m.raw_value, 84 | m.background, 85 | m.mentioned_id, 86 | sv.computed_value as mentioned_value 87 | from 88 | latest_cells_with_mentions m 89 | left join 90 | spreadsheet_view sv on m.mentioned_id = sv.id; 91 | 92 | -- We aggregate mentioned values and ids back into arrays 93 | create local view mentions_aggregated as 94 | select 95 | id, 96 | raw_value, 97 | background, 98 | ARRAY_AGG(mentioned_id) as mentions_ids, 99 | ARRAY_AGG(mentioned_value) as mentions_values 100 | from 101 | mentions_with_values 102 | group by 103 | id, 104 | raw_value, 105 | background; 106 | 107 | -- Calculate the final spreadsheet by executing the UDF for the formula 108 | create materialized view spreadsheet_view as 109 | select 110 | id, 111 | background, 112 | raw_value, 113 | cell_value(raw_value, mentions_ids, mentions_values) AS computed_value 114 | from 115 | mentions_aggregated; 116 | 117 | -- Figure out which IPs currently reached their API limit 118 | create materialized view api_limit_reached as 119 | select 120 | ip 121 | from 122 | spreadsheet_data 123 | where 124 | ts >= NOW() - INTERVAL 60 MINUTES 125 | group by 126 | ip 127 | having 128 | count(*) > 100; 129 | 130 | -- Compute statistics 131 | create materialized view spreadsheet_statistics as 132 | with filled_total as ( 133 | select 134 | count(distinct id) as filled_total 135 | from 136 | spreadsheet_data 137 | ), 138 | filled_this_hour as ( 139 | select 140 | count(*) as filled_this_hour 141 | from 142 | spreadsheet_data 143 | where 144 | ts >= NOW() - INTERVAL 1 HOUR 145 | ), 146 | filled_today as ( 147 | select 148 | count(*) as filled_today 149 | from 150 | spreadsheet_data 151 | where 152 | ts >= NOW() - INTERVAL 1 DAY 153 | ), 154 | filled_this_week as ( 155 | select 156 | count(*) as filled_this_week 157 | from 158 | spreadsheet_data 159 | where 160 | ts >= NOW() - INTERVAL 1 WEEK 161 | ), 162 | currently_active_users as ( 163 | select 164 | count(distinct ip) as currently_active_users 165 | from 166 | spreadsheet_data 167 | where 168 | ts >= NOW() - INTERVAL 5 MINUTE 169 | ) 170 | select 171 | (select filled_total from filled_total) as filled_total, 172 | (select filled_this_hour from filled_this_hour) as filled_this_hour, 173 | (select filled_today from filled_today) as filled_today, 174 | (select filled_this_week from filled_this_week) as filled_this_week, 175 | (select currently_active_users from currently_active_users) as currently_active_users; -------------------------------------------------------------------------------- /feldera/udf/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "udf" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | xlformula_engine = "0.1.18" 8 | chrono = "0.4" 9 | log = "0.4" 10 | env_logger = "0.11.5" 11 | -------------------------------------------------------------------------------- /feldera/udf/src/lib.rs: -------------------------------------------------------------------------------- 1 | use feldera_sqllib::*; 2 | 3 | use std::sync::Arc; 4 | use std::collections::{BTreeMap, VecDeque}; 5 | use xlformula_engine::calculate; 6 | use xlformula_engine::parse_formula; 7 | use xlformula_engine::NoCustomFunction; 8 | use xlformula_engine::types::{Formula, Value, Error, Boolean}; 9 | use chrono::DateTime; 10 | 11 | fn parse_as_value(input: SqlString) -> Value { 12 | if let Ok(number) = input.str().parse::() { 13 | return Value::Number(number); 14 | } 15 | if let Ok(boolean) = input.str().parse::() { 16 | return Value::Boolean(if boolean { Boolean::True } else { Boolean::False }); 17 | } 18 | if let Ok(date) = DateTime::parse_from_rfc3339(input.str()) { 19 | return Value::Date(date); 20 | } 21 | Value::Text(String::from(input.str())) 22 | } 23 | 24 | pub fn cell_value(raw_content: Option, mentions_ids: Option>>>, mentions_values: Option>>>) -> Result, Box> { 25 | let cell_content = raw_content.unwrap_or_else(|| SqlString::new()); 26 | let formula = parse_formula::parse_string_to_formula(cell_content.str(), None::); 27 | 28 | let mentions_ids = mentions_ids.map(Arc::unwrap_or_clone).unwrap_or_else(|| vec![]); 29 | let mentions_values = mentions_values.map(Arc::unwrap_or_clone).unwrap_or_else(|| vec![]); 30 | assert_eq!(mentions_ids.len(), mentions_values.len()); 31 | let mut context = BTreeMap::new(); 32 | for (id, value) in mentions_ids.into_iter().zip(mentions_values.into_iter()) { 33 | if let (Some(id), Some(value)) = (id, value) { 34 | context.insert(id_to_cell_reference(id), parse_as_value(value)); 35 | } 36 | } 37 | let data_function = |s: String| context.get(&s).cloned().unwrap_or_else(|| Value::Error(Error::Value)); 38 | 39 | let result = calculate::calculate_formula(formula, Some(&data_function)); 40 | let result_str = calculate::result_to_string(result); 41 | Ok(Some(SqlString::from(result_str))) 42 | } 43 | 44 | fn cell_references_to_ids(crf: &str) -> Option { 45 | let mut col = 0; 46 | let mut row = 0; 47 | for c in crf.chars() { 48 | if c.is_ascii_alphabetic() { 49 | col = col * 26 + (c.to_ascii_uppercase() as i64 - 'A' as i64); 50 | } else if c.is_ascii_digit() { 51 | row = row * 10 + (c as i64 - '0' as i64); 52 | } else { 53 | return None; 54 | } 55 | } 56 | Some(col + row * 26) 57 | } 58 | 59 | fn id_to_cell_reference(id: i64) -> String { 60 | let mut col = id % 26; 61 | let row = id / 26; 62 | let mut result = String::new(); 63 | while col >= 0 { 64 | result.push((col as u8 + 'A' as u8) as char); 65 | col = col / 26 - 1; 66 | } 67 | result.push_str(&row.to_string()); 68 | result 69 | } 70 | 71 | pub fn mentions(raw_content: Option) -> Result>>>, Box> { 72 | let cell_content = raw_content.unwrap_or_else(|| SqlString::new()); 73 | let formula = parse_formula::parse_string_to_formula(cell_content.str(), None::); 74 | 75 | let mut formulas = VecDeque::from(vec![formula]); 76 | let mut references = vec![]; 77 | 78 | while !formulas.is_empty() { 79 | let formula = formulas.pop_front().unwrap(); 80 | match formula { 81 | Formula::Reference(reference) => { 82 | references.push(reference); 83 | }, 84 | Formula::Iterator(iterator) => { 85 | formulas.extend(iterator); 86 | }, 87 | Formula::Operation(expression) => { 88 | formulas.extend(expression.values); 89 | }, 90 | _ => {} 91 | } 92 | } 93 | let mut cell_ids: Vec> = references.iter().map(|r| cell_references_to_ids(r)).collect(); 94 | cell_ids.sort_unstable(); 95 | 96 | Ok(Some(Arc::new(cell_ids))) 97 | } 98 | 99 | #[cfg(test)] 100 | mod tests { 101 | use super::*; 102 | 103 | #[test] 104 | fn cell_ref_id() { 105 | assert_eq!(cell_references_to_ids("A0"), Some(0)); 106 | assert_eq!(cell_references_to_ids("A1"), Some(26)); 107 | assert_eq!(cell_references_to_ids("A2"), Some(52)); 108 | assert_eq!(cell_references_to_ids("B0"), Some(1)); 109 | assert_eq!(cell_references_to_ids("C0"), Some(2)); 110 | assert_eq!(cell_references_to_ids("Z0"), Some(25)); 111 | assert_eq!(cell_references_to_ids("Z100"), Some(100*26 + 25)); 112 | assert_eq!(cell_references_to_ids("Z100"), Some(100*26 + 25)); 113 | assert_eq!(cell_references_to_ids("Z10000000"), Some(260000025)); 114 | 115 | assert_eq!(id_to_cell_reference(0), "A0".to_string()); 116 | assert_eq!(id_to_cell_reference(26), "A1".to_string()); 117 | assert_eq!(id_to_cell_reference(52), "A2".to_string()); 118 | assert_eq!(id_to_cell_reference(1), "B0".to_string()); 119 | assert_eq!(id_to_cell_reference(2), "C0".to_string()); 120 | assert_eq!(id_to_cell_reference(25), "Z0".to_string()); 121 | assert_eq!(id_to_cell_reference(100*26 + 25), "Z100".to_string()); 122 | assert_eq!(id_to_cell_reference(100*26 + 25), "Z100".to_string()); 123 | assert_eq!(id_to_cell_reference(260000025), "Z10000000".to_string()); 124 | assert_eq!(id_to_cell_reference(1_040_000_000-1), "Z39999999".to_string()); 125 | } 126 | 127 | #[test] 128 | fn mentions_empty() { 129 | let _r = env_logger::try_init(); 130 | 131 | let result = mentions(Some("".to_string())).unwrap().unwrap(); 132 | assert_eq!(result, vec![]); 133 | } 134 | 135 | #[test] 136 | fn mentions_one() { 137 | let _r = env_logger::try_init(); 138 | let result = mentions(Some("=A1".to_string())).unwrap().unwrap(); 139 | assert_eq!(result, vec![Some(26)]); 140 | } 141 | 142 | #[test] 143 | fn mentions_two() { 144 | let _r = env_logger::try_init(); 145 | let result = mentions(Some("=A1+A2".to_string())).unwrap().unwrap(); 146 | assert_eq!(result, vec![Some(26), Some(52)]); 147 | } 148 | 149 | #[test] 150 | fn mentions_set() { 151 | let _r = env_logger::try_init(); 152 | let result = mentions(Some("=SUM(A0, A10)".to_string())).unwrap().unwrap(); 153 | assert_eq!(result, vec![Some(0), Some(26*10)]); 154 | } 155 | 156 | 157 | #[test] 158 | fn empty() { 159 | let result = cell_value(Some("".to_string()), None, None).unwrap().unwrap(); 160 | assert_eq!(result, String::new()); 161 | } 162 | 163 | #[test] 164 | fn non_formula() { 165 | let result = cell_value(Some("just a text".to_string()), None, None).unwrap().unwrap(); 166 | assert_eq!(result, "just a text".to_string()); 167 | } 168 | 169 | #[test] 170 | fn math() { 171 | let result = cell_value(Some("=(1*(2+3))*2".to_string()), None, None).unwrap().unwrap(); 172 | assert_eq!(result, "10"); 173 | } 174 | } 175 | -------------------------------------------------------------------------------- /feldera/udf/udf.toml: -------------------------------------------------------------------------------- 1 | xlformula_engine = { git = "https://github.com/gz/XLFormula-Engine.git", rev = "ba11e1f" } 2 | log = "0.4" 3 | chrono = "0.4" -------------------------------------------------------------------------------- /loadtest/img2xls.py: -------------------------------------------------------------------------------- 1 | from PIL import Image 2 | import requests 3 | import struct 4 | import sys 5 | 6 | def make_cell(ide: int, raw_value: str, background: int): 7 | return { 8 | "id": ide, 9 | "raw_value": raw_value, 10 | "background": background, 11 | } 12 | 13 | def image_to_excel( 14 | input_image_path: str, 15 | max_width: int = 26, 16 | row_start: int = 0 17 | ): 18 | # 1. Load the image 19 | img = Image.open(input_image_path).convert("RGBA") 20 | 21 | # 2. Calculate the new height keeping aspect ratio 22 | width_percent = max_width / float(img.width) 23 | print(width_percent) 24 | new_height = int(float(img.height*2) * width_percent) 25 | 26 | # 3. Resize the image 27 | img = img.resize((max_width, new_height), Image.Resampling.LANCZOS) 28 | 29 | 30 | with requests.Session() as session: 31 | try: 32 | headers = {"Content-Type": "application/json"} 33 | # 5. For each pixel, create a cell with the corresponding fill color 34 | error_count = 0 35 | for row in range(new_height): 36 | for col in range(max_width): 37 | r, g, b, a = img.getpixel((col, row)) 38 | rgba_32 = struct.unpack('i', struct.pack('>4B', r, g, b, a))[0] 39 | idx = row_start*26+row*26+col 40 | cell = make_cell(idx, "", rgba_32) 41 | response = session.post("https://xls.fly.dev/api/spreadsheet", json=cell, headers=headers) 42 | if response.status_code != 200: 43 | error_count += 1 44 | except requests.RequestException as e: 45 | raise e 46 | if error_count > 0: 47 | print(f"Failed to send {error_count} cells") 48 | 49 | 50 | 51 | 52 | if __name__ == "__main__": 53 | img = sys.argv[1] 54 | max_width = int(sys.argv[2]) 55 | row_start = int(sys.argv[3]) 56 | 57 | # Example usage: 58 | image_to_excel( 59 | input_image_path=img, 60 | max_width=max_width, 61 | row_start=row_start 62 | ) 63 | -------------------------------------------------------------------------------- /loadtest/loadtest.py: -------------------------------------------------------------------------------- 1 | import time 2 | import random 3 | from pip._vendor import requests 4 | import string 5 | 6 | def generate_random_value(): 7 | """ 8 | Generate a random value that can be: 9 | 1. A random text string (alphanumeric). 10 | 2. A random number. 11 | 3. A formula in the format `=A1000`. 12 | """ 13 | choice = random.choice(["text", "number", "formula"]) 14 | 15 | if choice == "text": 16 | # Generate a random alphanumeric text string 17 | length = random.randint(5, 15) # Random length between 5 and 15 18 | return ''.join(random.choices(string.ascii_letters + string.digits, k=length)) 19 | 20 | elif choice == "number": 21 | # Generate a random integer 22 | return str(random.randint(1, 10000)) 23 | 24 | elif choice == "formula": 25 | # Generate a formula in the format =A1000 26 | letter = random.choice(string.ascii_uppercase) # Random uppercase letter A-Z 27 | number = random.randint(1, 10000) # Random number 28 | return f"={letter}{number}" 29 | 30 | def make_cell(ide: int, raw_value: str, background: int): 31 | return { 32 | "id": ide, 33 | "raw_value": raw_value, 34 | "background": background, 35 | } 36 | 37 | def lambda_handler(event, context): 38 | """ 39 | Lambda function handler that sends POST requests to a given URL 40 | for a limited duration. 41 | 42 | Event structure: 43 | { 44 | "url": "http://localhost:3000/api/spreadsheet", 45 | "duration": 10, # Duration of load-test in seconds 46 | "interval": 0.1 # Time between requests in seconds 47 | cell_start: 0, # Start cell id for range 48 | cell_end: 100 # End cell id for range 49 | } 50 | """ 51 | # Read parameters from the event 52 | url = event["url"] 53 | duration = event.get("duration", 10) 54 | interval = event.get("interval", 0.1) 55 | cell_start = event.get("cell_start", 0) 56 | cell_end = event.get("cell_end", 10000) 57 | 58 | headers = {"Content-Type": "application/json"} 59 | 60 | # Validate URL 61 | if not url: 62 | return {"status": "error", "message": "URL is required"} 63 | 64 | start_time = time.time() 65 | responses = [] 66 | 67 | with requests.Session() as session: 68 | # Perform POST requests in a loop for the given duration 69 | while time.time() - start_time < duration: 70 | try: 71 | data = generate_random_value() 72 | cell = make_cell(random.randint(cell_start, cell_end), data, random.randint(0, 16777215)) 73 | 74 | response = session.post(url, json=cell, headers=headers) 75 | responses.append({ 76 | "status_code": response.status_code, 77 | "body": response.text, 78 | }) 79 | except requests.RequestException as e: 80 | responses.append({"error": str(e)}) 81 | 82 | if interval > 0: 83 | time.sleep(interval) 84 | 85 | return { 86 | "requests_made": len(responses), 87 | "responses": responses, 88 | } 89 | 90 | if __name__ == '__main__': 91 | total = 0 92 | resp = lambda_handler({ 93 | "url": "http://localhost:3000/api/spreadsheet", 94 | "duration": 10, 95 | "cell_start": 0, 96 | "cell_end": 1000, 97 | "interval": 0}, None) 98 | 99 | failed = len(list(filter(lambda x: x["status_code"] != 200 or 'error' in x, resp["responses"]))) 100 | total += resp["requests_made"] 101 | if failed > 0: 102 | print(resp["responses"]) 103 | print("Total {} req completed ({} failures)".format(total, failed)) 104 | -------------------------------------------------------------------------------- /server/.dockerignore: -------------------------------------------------------------------------------- 1 | fly.toml 2 | .git/ 3 | -------------------------------------------------------------------------------- /server/.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | .envrc 3 | .bin -------------------------------------------------------------------------------- /server/Cargo.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Cargo. 2 | # It is not intended for manual editing. 3 | version = 3 4 | 5 | [[package]] 6 | name = "addr2line" 7 | version = "0.21.0" 8 | source = "registry+https://github.com/rust-lang/crates.io-index" 9 | checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" 10 | dependencies = [ 11 | "gimli", 12 | ] 13 | 14 | [[package]] 15 | name = "adler" 16 | version = "1.0.2" 17 | source = "registry+https://github.com/rust-lang/crates.io-index" 18 | checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" 19 | 20 | [[package]] 21 | name = "async-trait" 22 | version = "0.1.80" 23 | source = "registry+https://github.com/rust-lang/crates.io-index" 24 | checksum = "c6fa2087f2753a7da8cc1c0dbfcf89579dd57458e36769de5ac750b4671737ca" 25 | dependencies = [ 26 | "proc-macro2", 27 | "quote", 28 | "syn", 29 | ] 30 | 31 | [[package]] 32 | name = "axum" 33 | version = "0.7.5" 34 | source = "registry+https://github.com/rust-lang/crates.io-index" 35 | checksum = "3a6c9af12842a67734c9a2e355436e5d03b22383ed60cf13cd0c18fbfe3dcbcf" 36 | dependencies = [ 37 | "async-trait", 38 | "axum-core", 39 | "bytes", 40 | "futures-util", 41 | "http", 42 | "http-body", 43 | "http-body-util", 44 | "hyper", 45 | "hyper-util", 46 | "itoa", 47 | "matchit", 48 | "memchr", 49 | "mime", 50 | "percent-encoding", 51 | "pin-project-lite", 52 | "rustversion", 53 | "serde", 54 | "serde_json", 55 | "serde_path_to_error", 56 | "serde_urlencoded", 57 | "sync_wrapper 1.0.1", 58 | "tokio", 59 | "tower", 60 | "tower-layer", 61 | "tower-service", 62 | "tracing", 63 | ] 64 | 65 | [[package]] 66 | name = "axum-core" 67 | version = "0.4.3" 68 | source = "registry+https://github.com/rust-lang/crates.io-index" 69 | checksum = "a15c63fd72d41492dc4f497196f5da1fb04fb7529e631d73630d1b491e47a2e3" 70 | dependencies = [ 71 | "async-trait", 72 | "bytes", 73 | "futures-util", 74 | "http", 75 | "http-body", 76 | "http-body-util", 77 | "mime", 78 | "pin-project-lite", 79 | "rustversion", 80 | "sync_wrapper 0.1.2", 81 | "tower-layer", 82 | "tower-service", 83 | "tracing", 84 | ] 85 | 86 | [[package]] 87 | name = "backtrace" 88 | version = "0.3.71" 89 | source = "registry+https://github.com/rust-lang/crates.io-index" 90 | checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d" 91 | dependencies = [ 92 | "addr2line", 93 | "cc", 94 | "cfg-if", 95 | "libc", 96 | "miniz_oxide", 97 | "object", 98 | "rustc-demangle", 99 | ] 100 | 101 | [[package]] 102 | name = "bytes" 103 | version = "1.6.0" 104 | source = "registry+https://github.com/rust-lang/crates.io-index" 105 | checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" 106 | 107 | [[package]] 108 | name = "cc" 109 | version = "1.0.97" 110 | source = "registry+https://github.com/rust-lang/crates.io-index" 111 | checksum = "099a5357d84c4c61eb35fc8eafa9a79a902c2f76911e5747ced4e032edd8d9b4" 112 | 113 | [[package]] 114 | name = "cfg-if" 115 | version = "1.0.0" 116 | source = "registry+https://github.com/rust-lang/crates.io-index" 117 | checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" 118 | 119 | [[package]] 120 | name = "fnv" 121 | version = "1.0.7" 122 | source = "registry+https://github.com/rust-lang/crates.io-index" 123 | checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" 124 | 125 | [[package]] 126 | name = "form_urlencoded" 127 | version = "1.2.1" 128 | source = "registry+https://github.com/rust-lang/crates.io-index" 129 | checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" 130 | dependencies = [ 131 | "percent-encoding", 132 | ] 133 | 134 | [[package]] 135 | name = "futures-channel" 136 | version = "0.3.30" 137 | source = "registry+https://github.com/rust-lang/crates.io-index" 138 | checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" 139 | dependencies = [ 140 | "futures-core", 141 | ] 142 | 143 | [[package]] 144 | name = "futures-core" 145 | version = "0.3.30" 146 | source = "registry+https://github.com/rust-lang/crates.io-index" 147 | checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" 148 | 149 | [[package]] 150 | name = "futures-task" 151 | version = "0.3.30" 152 | source = "registry+https://github.com/rust-lang/crates.io-index" 153 | checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" 154 | 155 | [[package]] 156 | name = "futures-util" 157 | version = "0.3.30" 158 | source = "registry+https://github.com/rust-lang/crates.io-index" 159 | checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" 160 | dependencies = [ 161 | "futures-core", 162 | "futures-task", 163 | "pin-project-lite", 164 | "pin-utils", 165 | ] 166 | 167 | [[package]] 168 | name = "generic-rust" 169 | version = "0.1.0" 170 | dependencies = [ 171 | "axum", 172 | "tokio", 173 | ] 174 | 175 | [[package]] 176 | name = "gimli" 177 | version = "0.28.1" 178 | source = "registry+https://github.com/rust-lang/crates.io-index" 179 | checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" 180 | 181 | [[package]] 182 | name = "hermit-abi" 183 | version = "0.3.9" 184 | source = "registry+https://github.com/rust-lang/crates.io-index" 185 | checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" 186 | 187 | [[package]] 188 | name = "http" 189 | version = "1.1.0" 190 | source = "registry+https://github.com/rust-lang/crates.io-index" 191 | checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" 192 | dependencies = [ 193 | "bytes", 194 | "fnv", 195 | "itoa", 196 | ] 197 | 198 | [[package]] 199 | name = "http-body" 200 | version = "1.0.0" 201 | source = "registry+https://github.com/rust-lang/crates.io-index" 202 | checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" 203 | dependencies = [ 204 | "bytes", 205 | "http", 206 | ] 207 | 208 | [[package]] 209 | name = "http-body-util" 210 | version = "0.1.1" 211 | source = "registry+https://github.com/rust-lang/crates.io-index" 212 | checksum = "0475f8b2ac86659c21b64320d5d653f9efe42acd2a4e560073ec61a155a34f1d" 213 | dependencies = [ 214 | "bytes", 215 | "futures-core", 216 | "http", 217 | "http-body", 218 | "pin-project-lite", 219 | ] 220 | 221 | [[package]] 222 | name = "httparse" 223 | version = "1.8.0" 224 | source = "registry+https://github.com/rust-lang/crates.io-index" 225 | checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" 226 | 227 | [[package]] 228 | name = "httpdate" 229 | version = "1.0.3" 230 | source = "registry+https://github.com/rust-lang/crates.io-index" 231 | checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" 232 | 233 | [[package]] 234 | name = "hyper" 235 | version = "1.3.1" 236 | source = "registry+https://github.com/rust-lang/crates.io-index" 237 | checksum = "fe575dd17d0862a9a33781c8c4696a55c320909004a67a00fb286ba8b1bc496d" 238 | dependencies = [ 239 | "bytes", 240 | "futures-channel", 241 | "futures-util", 242 | "http", 243 | "http-body", 244 | "httparse", 245 | "httpdate", 246 | "itoa", 247 | "pin-project-lite", 248 | "smallvec", 249 | "tokio", 250 | ] 251 | 252 | [[package]] 253 | name = "hyper-util" 254 | version = "0.1.3" 255 | source = "registry+https://github.com/rust-lang/crates.io-index" 256 | checksum = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa" 257 | dependencies = [ 258 | "bytes", 259 | "futures-util", 260 | "http", 261 | "http-body", 262 | "hyper", 263 | "pin-project-lite", 264 | "socket2", 265 | "tokio", 266 | ] 267 | 268 | [[package]] 269 | name = "itoa" 270 | version = "1.0.11" 271 | source = "registry+https://github.com/rust-lang/crates.io-index" 272 | checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" 273 | 274 | [[package]] 275 | name = "libc" 276 | version = "0.2.154" 277 | source = "registry+https://github.com/rust-lang/crates.io-index" 278 | checksum = "ae743338b92ff9146ce83992f766a31066a91a8c84a45e0e9f21e7cf6de6d346" 279 | 280 | [[package]] 281 | name = "log" 282 | version = "0.4.21" 283 | source = "registry+https://github.com/rust-lang/crates.io-index" 284 | checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" 285 | 286 | [[package]] 287 | name = "matchit" 288 | version = "0.7.3" 289 | source = "registry+https://github.com/rust-lang/crates.io-index" 290 | checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" 291 | 292 | [[package]] 293 | name = "memchr" 294 | version = "2.7.2" 295 | source = "registry+https://github.com/rust-lang/crates.io-index" 296 | checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" 297 | 298 | [[package]] 299 | name = "mime" 300 | version = "0.3.17" 301 | source = "registry+https://github.com/rust-lang/crates.io-index" 302 | checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" 303 | 304 | [[package]] 305 | name = "miniz_oxide" 306 | version = "0.7.2" 307 | source = "registry+https://github.com/rust-lang/crates.io-index" 308 | checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7" 309 | dependencies = [ 310 | "adler", 311 | ] 312 | 313 | [[package]] 314 | name = "mio" 315 | version = "0.8.11" 316 | source = "registry+https://github.com/rust-lang/crates.io-index" 317 | checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" 318 | dependencies = [ 319 | "libc", 320 | "wasi", 321 | "windows-sys 0.48.0", 322 | ] 323 | 324 | [[package]] 325 | name = "num_cpus" 326 | version = "1.16.0" 327 | source = "registry+https://github.com/rust-lang/crates.io-index" 328 | checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" 329 | dependencies = [ 330 | "hermit-abi", 331 | "libc", 332 | ] 333 | 334 | [[package]] 335 | name = "object" 336 | version = "0.32.2" 337 | source = "registry+https://github.com/rust-lang/crates.io-index" 338 | checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" 339 | dependencies = [ 340 | "memchr", 341 | ] 342 | 343 | [[package]] 344 | name = "once_cell" 345 | version = "1.19.0" 346 | source = "registry+https://github.com/rust-lang/crates.io-index" 347 | checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" 348 | 349 | [[package]] 350 | name = "percent-encoding" 351 | version = "2.3.1" 352 | source = "registry+https://github.com/rust-lang/crates.io-index" 353 | checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" 354 | 355 | [[package]] 356 | name = "pin-project" 357 | version = "1.1.5" 358 | source = "registry+https://github.com/rust-lang/crates.io-index" 359 | checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" 360 | dependencies = [ 361 | "pin-project-internal", 362 | ] 363 | 364 | [[package]] 365 | name = "pin-project-internal" 366 | version = "1.1.5" 367 | source = "registry+https://github.com/rust-lang/crates.io-index" 368 | checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" 369 | dependencies = [ 370 | "proc-macro2", 371 | "quote", 372 | "syn", 373 | ] 374 | 375 | [[package]] 376 | name = "pin-project-lite" 377 | version = "0.2.14" 378 | source = "registry+https://github.com/rust-lang/crates.io-index" 379 | checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" 380 | 381 | [[package]] 382 | name = "pin-utils" 383 | version = "0.1.0" 384 | source = "registry+https://github.com/rust-lang/crates.io-index" 385 | checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" 386 | 387 | [[package]] 388 | name = "proc-macro2" 389 | version = "1.0.82" 390 | source = "registry+https://github.com/rust-lang/crates.io-index" 391 | checksum = "8ad3d49ab951a01fbaafe34f2ec74122942fe18a3f9814c3268f1bb72042131b" 392 | dependencies = [ 393 | "unicode-ident", 394 | ] 395 | 396 | [[package]] 397 | name = "quote" 398 | version = "1.0.36" 399 | source = "registry+https://github.com/rust-lang/crates.io-index" 400 | checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" 401 | dependencies = [ 402 | "proc-macro2", 403 | ] 404 | 405 | [[package]] 406 | name = "rustc-demangle" 407 | version = "0.1.24" 408 | source = "registry+https://github.com/rust-lang/crates.io-index" 409 | checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" 410 | 411 | [[package]] 412 | name = "rustversion" 413 | version = "1.0.17" 414 | source = "registry+https://github.com/rust-lang/crates.io-index" 415 | checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" 416 | 417 | [[package]] 418 | name = "ryu" 419 | version = "1.0.18" 420 | source = "registry+https://github.com/rust-lang/crates.io-index" 421 | checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" 422 | 423 | [[package]] 424 | name = "serde" 425 | version = "1.0.202" 426 | source = "registry+https://github.com/rust-lang/crates.io-index" 427 | checksum = "226b61a0d411b2ba5ff6d7f73a476ac4f8bb900373459cd00fab8512828ba395" 428 | dependencies = [ 429 | "serde_derive", 430 | ] 431 | 432 | [[package]] 433 | name = "serde_derive" 434 | version = "1.0.202" 435 | source = "registry+https://github.com/rust-lang/crates.io-index" 436 | checksum = "6048858004bcff69094cd972ed40a32500f153bd3be9f716b2eed2e8217c4838" 437 | dependencies = [ 438 | "proc-macro2", 439 | "quote", 440 | "syn", 441 | ] 442 | 443 | [[package]] 444 | name = "serde_json" 445 | version = "1.0.117" 446 | source = "registry+https://github.com/rust-lang/crates.io-index" 447 | checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3" 448 | dependencies = [ 449 | "itoa", 450 | "ryu", 451 | "serde", 452 | ] 453 | 454 | [[package]] 455 | name = "serde_path_to_error" 456 | version = "0.1.16" 457 | source = "registry+https://github.com/rust-lang/crates.io-index" 458 | checksum = "af99884400da37c88f5e9146b7f1fd0fbcae8f6eec4e9da38b67d05486f814a6" 459 | dependencies = [ 460 | "itoa", 461 | "serde", 462 | ] 463 | 464 | [[package]] 465 | name = "serde_urlencoded" 466 | version = "0.7.1" 467 | source = "registry+https://github.com/rust-lang/crates.io-index" 468 | checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" 469 | dependencies = [ 470 | "form_urlencoded", 471 | "itoa", 472 | "ryu", 473 | "serde", 474 | ] 475 | 476 | [[package]] 477 | name = "smallvec" 478 | version = "1.13.2" 479 | source = "registry+https://github.com/rust-lang/crates.io-index" 480 | checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" 481 | 482 | [[package]] 483 | name = "socket2" 484 | version = "0.5.7" 485 | source = "registry+https://github.com/rust-lang/crates.io-index" 486 | checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" 487 | dependencies = [ 488 | "libc", 489 | "windows-sys 0.52.0", 490 | ] 491 | 492 | [[package]] 493 | name = "syn" 494 | version = "2.0.63" 495 | source = "registry+https://github.com/rust-lang/crates.io-index" 496 | checksum = "bf5be731623ca1a1fb7d8be6f261a3be6d3e2337b8a1f97be944d020c8fcb704" 497 | dependencies = [ 498 | "proc-macro2", 499 | "quote", 500 | "unicode-ident", 501 | ] 502 | 503 | [[package]] 504 | name = "sync_wrapper" 505 | version = "0.1.2" 506 | source = "registry+https://github.com/rust-lang/crates.io-index" 507 | checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" 508 | 509 | [[package]] 510 | name = "sync_wrapper" 511 | version = "1.0.1" 512 | source = "registry+https://github.com/rust-lang/crates.io-index" 513 | checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" 514 | 515 | [[package]] 516 | name = "tokio" 517 | version = "1.37.0" 518 | source = "registry+https://github.com/rust-lang/crates.io-index" 519 | checksum = "1adbebffeca75fcfd058afa480fb6c0b81e165a0323f9c9d39c9697e37c46787" 520 | dependencies = [ 521 | "backtrace", 522 | "libc", 523 | "mio", 524 | "num_cpus", 525 | "pin-project-lite", 526 | "socket2", 527 | "tokio-macros", 528 | "windows-sys 0.48.0", 529 | ] 530 | 531 | [[package]] 532 | name = "tokio-macros" 533 | version = "2.2.0" 534 | source = "registry+https://github.com/rust-lang/crates.io-index" 535 | checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" 536 | dependencies = [ 537 | "proc-macro2", 538 | "quote", 539 | "syn", 540 | ] 541 | 542 | [[package]] 543 | name = "tower" 544 | version = "0.4.13" 545 | source = "registry+https://github.com/rust-lang/crates.io-index" 546 | checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" 547 | dependencies = [ 548 | "futures-core", 549 | "futures-util", 550 | "pin-project", 551 | "pin-project-lite", 552 | "tokio", 553 | "tower-layer", 554 | "tower-service", 555 | "tracing", 556 | ] 557 | 558 | [[package]] 559 | name = "tower-layer" 560 | version = "0.3.2" 561 | source = "registry+https://github.com/rust-lang/crates.io-index" 562 | checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" 563 | 564 | [[package]] 565 | name = "tower-service" 566 | version = "0.3.2" 567 | source = "registry+https://github.com/rust-lang/crates.io-index" 568 | checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" 569 | 570 | [[package]] 571 | name = "tracing" 572 | version = "0.1.40" 573 | source = "registry+https://github.com/rust-lang/crates.io-index" 574 | checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" 575 | dependencies = [ 576 | "log", 577 | "pin-project-lite", 578 | "tracing-core", 579 | ] 580 | 581 | [[package]] 582 | name = "tracing-core" 583 | version = "0.1.32" 584 | source = "registry+https://github.com/rust-lang/crates.io-index" 585 | checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" 586 | dependencies = [ 587 | "once_cell", 588 | ] 589 | 590 | [[package]] 591 | name = "unicode-ident" 592 | version = "1.0.12" 593 | source = "registry+https://github.com/rust-lang/crates.io-index" 594 | checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" 595 | 596 | [[package]] 597 | name = "wasi" 598 | version = "0.11.0+wasi-snapshot-preview1" 599 | source = "registry+https://github.com/rust-lang/crates.io-index" 600 | checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" 601 | 602 | [[package]] 603 | name = "windows-sys" 604 | version = "0.48.0" 605 | source = "registry+https://github.com/rust-lang/crates.io-index" 606 | checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" 607 | dependencies = [ 608 | "windows-targets 0.48.5", 609 | ] 610 | 611 | [[package]] 612 | name = "windows-sys" 613 | version = "0.52.0" 614 | source = "registry+https://github.com/rust-lang/crates.io-index" 615 | checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" 616 | dependencies = [ 617 | "windows-targets 0.52.5", 618 | ] 619 | 620 | [[package]] 621 | name = "windows-targets" 622 | version = "0.48.5" 623 | source = "registry+https://github.com/rust-lang/crates.io-index" 624 | checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" 625 | dependencies = [ 626 | "windows_aarch64_gnullvm 0.48.5", 627 | "windows_aarch64_msvc 0.48.5", 628 | "windows_i686_gnu 0.48.5", 629 | "windows_i686_msvc 0.48.5", 630 | "windows_x86_64_gnu 0.48.5", 631 | "windows_x86_64_gnullvm 0.48.5", 632 | "windows_x86_64_msvc 0.48.5", 633 | ] 634 | 635 | [[package]] 636 | name = "windows-targets" 637 | version = "0.52.5" 638 | source = "registry+https://github.com/rust-lang/crates.io-index" 639 | checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" 640 | dependencies = [ 641 | "windows_aarch64_gnullvm 0.52.5", 642 | "windows_aarch64_msvc 0.52.5", 643 | "windows_i686_gnu 0.52.5", 644 | "windows_i686_gnullvm", 645 | "windows_i686_msvc 0.52.5", 646 | "windows_x86_64_gnu 0.52.5", 647 | "windows_x86_64_gnullvm 0.52.5", 648 | "windows_x86_64_msvc 0.52.5", 649 | ] 650 | 651 | [[package]] 652 | name = "windows_aarch64_gnullvm" 653 | version = "0.48.5" 654 | source = "registry+https://github.com/rust-lang/crates.io-index" 655 | checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" 656 | 657 | [[package]] 658 | name = "windows_aarch64_gnullvm" 659 | version = "0.52.5" 660 | source = "registry+https://github.com/rust-lang/crates.io-index" 661 | checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" 662 | 663 | [[package]] 664 | name = "windows_aarch64_msvc" 665 | version = "0.48.5" 666 | source = "registry+https://github.com/rust-lang/crates.io-index" 667 | checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" 668 | 669 | [[package]] 670 | name = "windows_aarch64_msvc" 671 | version = "0.52.5" 672 | source = "registry+https://github.com/rust-lang/crates.io-index" 673 | checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" 674 | 675 | [[package]] 676 | name = "windows_i686_gnu" 677 | version = "0.48.5" 678 | source = "registry+https://github.com/rust-lang/crates.io-index" 679 | checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" 680 | 681 | [[package]] 682 | name = "windows_i686_gnu" 683 | version = "0.52.5" 684 | source = "registry+https://github.com/rust-lang/crates.io-index" 685 | checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" 686 | 687 | [[package]] 688 | name = "windows_i686_gnullvm" 689 | version = "0.52.5" 690 | source = "registry+https://github.com/rust-lang/crates.io-index" 691 | checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" 692 | 693 | [[package]] 694 | name = "windows_i686_msvc" 695 | version = "0.48.5" 696 | source = "registry+https://github.com/rust-lang/crates.io-index" 697 | checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" 698 | 699 | [[package]] 700 | name = "windows_i686_msvc" 701 | version = "0.52.5" 702 | source = "registry+https://github.com/rust-lang/crates.io-index" 703 | checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" 704 | 705 | [[package]] 706 | name = "windows_x86_64_gnu" 707 | version = "0.48.5" 708 | source = "registry+https://github.com/rust-lang/crates.io-index" 709 | checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" 710 | 711 | [[package]] 712 | name = "windows_x86_64_gnu" 713 | version = "0.52.5" 714 | source = "registry+https://github.com/rust-lang/crates.io-index" 715 | checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" 716 | 717 | [[package]] 718 | name = "windows_x86_64_gnullvm" 719 | version = "0.48.5" 720 | source = "registry+https://github.com/rust-lang/crates.io-index" 721 | checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" 722 | 723 | [[package]] 724 | name = "windows_x86_64_gnullvm" 725 | version = "0.52.5" 726 | source = "registry+https://github.com/rust-lang/crates.io-index" 727 | checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" 728 | 729 | [[package]] 730 | name = "windows_x86_64_msvc" 731 | version = "0.48.5" 732 | source = "registry+https://github.com/rust-lang/crates.io-index" 733 | checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" 734 | 735 | [[package]] 736 | name = "windows_x86_64_msvc" 737 | version = "0.52.5" 738 | source = "registry+https://github.com/rust-lang/crates.io-index" 739 | checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" 740 | -------------------------------------------------------------------------------- /server/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "generic-rust" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | axum = { version = "0.7.5", features = ["ws"] } 10 | tokio = { version = "1.37.0", features = ["macros", "rt-multi-thread"] } 11 | futures = "0.3.31" 12 | tokio-util = { version = "0.7.12", features = ["codec", "io"] } 13 | reqwest = { version = "0.12.9", features = ["stream", "json"] } 14 | tokio-stream = { version = "0.1.16", features = ["sync"] } 15 | serde_json = "1.0.128" 16 | serde = { version = "1.0.210", features = ["derive"] } 17 | log = "0.4.22" 18 | env_logger = "0.11.5" 19 | chrono = "0.4.38" 20 | dashmap = "6.1.0" 21 | tower-http = { version = "0.6.2", features = ["cors"] } 22 | rustrict = "0.7.33" 23 | regex = "1.10.2" -------------------------------------------------------------------------------- /server/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM lukemathwalker/cargo-chef:latest-rust-1 AS chef 2 | WORKDIR /app 3 | 4 | FROM chef AS planner 5 | COPY . . 6 | RUN cargo chef prepare --recipe-path recipe.json 7 | 8 | FROM chef AS builder 9 | COPY --from=planner /app/recipe.json recipe.json 10 | # Build dependencies - this is the caching Docker layer! 11 | RUN cargo chef cook --release --recipe-path recipe.json 12 | # Build application 13 | COPY . . 14 | RUN cargo build --release --bin generic-rust 15 | 16 | # We do not need the Rust toolchain to run the binary! 17 | FROM debian:bookworm-slim AS runtime 18 | RUN apt-get update && apt install -y openssl ca-certificates 19 | WORKDIR /app 20 | COPY --from=builder /app/target/release/generic-rust /usr/local/bin 21 | ENTRYPOINT ["/usr/local/bin/generic-rust"] 22 | -------------------------------------------------------------------------------- /server/fly.toml: -------------------------------------------------------------------------------- 1 | # fly.toml app configuration file generated for xls on 2024-11-26T10:22:31-08:00 2 | # 3 | # See https://fly.io/docs/reference/configuration/ for information about how to use this file. 4 | # 5 | 6 | app = 'xls' 7 | primary_region = 'sjc' 8 | 9 | [build] 10 | 11 | [env] 12 | PORT = '8080' 13 | 14 | [http_service] 15 | internal_port = 3000 16 | force_https = true 17 | auto_stop_machines = 'stop' 18 | auto_start_machines = true 19 | min_machines_running = 0 20 | processes = ['app'] 21 | 22 | [[vm]] 23 | memory = '1gb' 24 | cpu_kind = 'shared' 25 | cpus = 1 26 | -------------------------------------------------------------------------------- /server/src/feldera.rs: -------------------------------------------------------------------------------- 1 | //! Helper functions for the Feldera API 2 | 3 | use std::env::var; 4 | use std::io; 5 | use std::sync::{Arc, LazyLock}; 6 | use std::time::Duration; 7 | 8 | use crate::stats::XlsError; 9 | use axum::http::StatusCode; 10 | use axum::Json; 11 | use dashmap::DashSet; 12 | use futures::{StreamExt, TryStreamExt}; 13 | use log::{error, warn}; 14 | use reqwest::Client; 15 | use serde::Serialize; 16 | use serde_json::Value; 17 | use tokio::sync::broadcast::Sender; 18 | 19 | const PIPELINE_NAME: &str = "xls"; 20 | const FELDERA_HOST: LazyLock = 21 | LazyLock::new(|| var("FELDERA_HOST").unwrap_or_else(|_| String::from("http://localhost:8080"))); 22 | static FELDERA_API_KEY: LazyLock = 23 | LazyLock::new(|| var("FELDERA_API_KEY").unwrap_or_else(|_| String::new())); 24 | 25 | pub(crate) async fn adhoc_query(client: Client, sql: &str) -> Result { 26 | let url = format!("{}/v0/pipelines/{PIPELINE_NAME}/query", &*FELDERA_HOST); 27 | let response = client 28 | .get(url) 29 | .bearer_auth(&*FELDERA_API_KEY) 30 | .query(&[("sql", sql), ("format", "json")]) 31 | .send() 32 | .await 33 | .map_err(XlsError::from)?; 34 | 35 | if !response.status().is_success() { 36 | return Err(XlsError::from(format!( 37 | "Failed to fetch data: HTTP {}: {:?}", 38 | response.status(), 39 | response.text().await.unwrap_or_else(|e| e.to_string()) 40 | ))); 41 | } 42 | 43 | let body = response.text().await.map_err(XlsError::from)?; 44 | 45 | Ok(body) 46 | } 47 | 48 | /// Parses feldera change format inside of json_data 49 | /// 50 | /// `{"sequence_number": ...,"json_data":[{"delete": {...} },{"insert": {...} }]}` 51 | #[derive(serde::Deserialize)] 52 | #[allow(dead_code)] 53 | enum Change { 54 | #[serde(rename = "insert")] 55 | Insert(Value), 56 | #[serde(rename = "delete")] 57 | Delete(Value), 58 | } 59 | 60 | /// Parses a record from the feldera change stream. 61 | #[derive(serde::Deserialize)] 62 | #[allow(dead_code)] 63 | struct Record { 64 | sequence_number: i64, 65 | json_data: Option>, 66 | } 67 | 68 | pub(crate) fn subscribe_change_stream( 69 | client: Client, 70 | view_name: &str, 71 | capacity: usize, 72 | ) -> Sender> { 73 | let (tx, _) = tokio::sync::broadcast::channel(capacity); 74 | let subscribe = tx.clone(); 75 | let url = format!( 76 | "{}/v0/pipelines/{PIPELINE_NAME}/egress/{view_name}", 77 | &*FELDERA_HOST 78 | ); 79 | let view = String::from(view_name); 80 | 81 | tokio::spawn(async move { 82 | loop { 83 | let response = client 84 | .post(url.clone()) 85 | .bearer_auth(&*FELDERA_API_KEY) 86 | .header("Content-Type", "application/json") 87 | .query(&[ 88 | ("format", "json"), 89 | ("backpressure", "false"), 90 | ("array", "false"), 91 | ]) 92 | .send() 93 | .await; 94 | 95 | match response { 96 | Ok(resp) if resp.status().is_success() => { 97 | let stream = resp 98 | .bytes_stream() 99 | .map_err(|e| io::Error::new(io::ErrorKind::Other, e)); 100 | let reader = tokio_util::io::StreamReader::new(stream); 101 | let mut decoder = tokio_util::codec::FramedRead::new( 102 | reader, 103 | tokio_util::codec::LinesCodec::new(), 104 | ); 105 | 106 | while let Some(line) = decoder.next().await { 107 | match line { 108 | Ok(line) => { 109 | //log::debug!("Received change: {line}"); 110 | match serde_json::from_str::(&line) { 111 | Ok(record) => { 112 | // walk record.json_data in reverse and return first `insert` 113 | 'inner: for change in 114 | record.json_data.unwrap_or_else(|| vec![]).iter().rev() 115 | { 116 | if let Change::Insert(value) = change { 117 | let mut value_str = value.to_string(); 118 | value_str.push('\n'); 119 | //log::debug!("broadcasting change: {value_str}"); 120 | if tx.send(Ok(value_str)).is_err() { 121 | // A send operation can only fail if there are no active receivers, 122 | // implying that the message could never be received. 123 | // The error contains the message being sent as a payload so it can be recovered. 124 | break 'inner; 125 | } 126 | } 127 | } 128 | } 129 | Err(e) => { 130 | error!("Failed to parse change record from {view}: {}", e); 131 | break; 132 | } 133 | } 134 | } 135 | Err(e) => { 136 | error!("Failed to decode line from {view}: {:?}", e); 137 | let _ = tx.send(Err(XlsError::from(e))); 138 | break; 139 | } 140 | } 141 | } 142 | } 143 | _ => { 144 | error!("Failed to fetch change stream at {url}: {:?}", response); 145 | let _ = tx.send(Err(XlsError::from("Failed to fetch change stream"))); 146 | } 147 | } 148 | 149 | warn!("Lost connection to change stream at {url}, wait 10 seconds before retrying to get changes again"); 150 | tokio::time::sleep(Duration::from_secs(10)).await; 151 | } 152 | }); 153 | 154 | subscribe 155 | } 156 | 157 | pub(crate) async fn insert( 158 | client: Client, 159 | table_name: &str, 160 | data: T, 161 | ) -> (StatusCode, Json) { 162 | let url = format!( 163 | "{}/v0/pipelines/{PIPELINE_NAME}/ingress/{table_name}", 164 | &*FELDERA_HOST 165 | ); 166 | 167 | let response = client 168 | .post(url.clone()) 169 | .bearer_auth(&*FELDERA_API_KEY) 170 | .header("Content-Type", "application/json") 171 | .query(&[("format", "json"), ("update_format", "raw")]) 172 | .json(&data) 173 | .send() 174 | .await; 175 | 176 | match response { 177 | Ok(resp) if resp.status().is_success() => { 178 | (StatusCode::OK, Json(serde_json::json!({"success": true}))) 179 | } 180 | Ok(resp) => { 181 | let body = resp.text().await.unwrap_or_else(|e| e.to_string()); 182 | ( 183 | StatusCode::INTERNAL_SERVER_ERROR, 184 | Json(serde_json::json!({"error": body})), 185 | ) 186 | } 187 | Err(e) => ( 188 | StatusCode::INTERNAL_SERVER_ERROR, 189 | Json(serde_json::json!({"error": format!("Failed to update cell: {:?}", e)})), 190 | ), 191 | } 192 | } 193 | 194 | #[derive(serde::Deserialize, Debug)] 195 | struct ApiLimitRecord { 196 | ip: String, 197 | } 198 | 199 | pub(crate) fn api_limit_table(client: Client) -> Arc> { 200 | let ds = Arc::new(DashSet::new()); 201 | let ds_clone = ds.clone(); 202 | let url = format!( 203 | "{}/v0/pipelines/{PIPELINE_NAME}/egress/api_limit_reached", 204 | &*FELDERA_HOST 205 | ); 206 | 207 | tokio::spawn(async move { 208 | loop { 209 | ds.clear(); 210 | let snapshot = adhoc_query(client.clone(), "SELECT * FROM api_limit_reached") 211 | .await 212 | .unwrap_or_else(|e| { 213 | error!("Failed to fetch initial api_limit data: {}", e); 214 | String::new() 215 | }); 216 | for line in snapshot.trim().lines() { 217 | if line.is_empty() { 218 | continue; 219 | } 220 | match serde_json::from_str::(line) { 221 | Ok(record) => { 222 | log::debug!("Initial api limit: {record:?}"); 223 | ds.insert(record.ip); 224 | } 225 | Err(e) => { 226 | error!("Failed to parse ApiLimitRecord: {}", e); 227 | } 228 | } 229 | } 230 | 231 | let response = client 232 | .post(url.clone()) 233 | .bearer_auth(&*FELDERA_API_KEY) 234 | .header("Content-Type", "application/json") 235 | .query(&[ 236 | ("format", "json"), 237 | ("backpressure", "true"), 238 | ("array", "false"), 239 | ]) 240 | .send() 241 | .await; 242 | 243 | match response { 244 | Ok(resp) if resp.status().is_success() => { 245 | let stream = resp 246 | .bytes_stream() 247 | .map_err(|e| io::Error::new(io::ErrorKind::Other, e)); 248 | let reader = tokio_util::io::StreamReader::new(stream); 249 | let mut decoder = tokio_util::codec::FramedRead::new( 250 | reader, 251 | tokio_util::codec::LinesCodec::new(), 252 | ); 253 | 254 | while let Some(line) = decoder.next().await { 255 | match line { 256 | Ok(line) => { 257 | match serde_json::from_str::(&line) { 258 | Ok(record) => { 259 | // walk record.json_data in reverse and return first `insert` 260 | for change in 261 | record.json_data.unwrap_or_else(|| vec![]).into_iter() 262 | { 263 | match change { 264 | Change::Insert(value) => { 265 | let record = 266 | serde_json::from_value::( 267 | value, 268 | ); 269 | match record { 270 | Ok(record) => { 271 | log::debug!( 272 | "Received api limit for: {record:?}" 273 | ); 274 | ds.insert(record.ip); 275 | } 276 | Err(e) => { 277 | error!("Failed to parse ApiLimitRecord: {}", e); 278 | } 279 | } 280 | } 281 | Change::Delete(value) => { 282 | let record = 283 | serde_json::from_value::( 284 | value, 285 | ); 286 | match record { 287 | Ok(record) => { 288 | log::debug!( 289 | "Received api limit removal for: {record:?}" 290 | ); 291 | ds.remove(&record.ip); 292 | } 293 | Err(e) => { 294 | error!("Failed to parse ApiLimitRecord: {}", e); 295 | } 296 | } 297 | } 298 | } 299 | } 300 | } 301 | Err(e) => { 302 | error!( 303 | "Failed to parse change record from api_limit_reached: {}", 304 | e 305 | ); 306 | break; 307 | } 308 | } 309 | } 310 | Err(e) => { 311 | error!("Failed to decode line from api_limit_reached: {:?}", e); 312 | break; 313 | } 314 | } 315 | } 316 | } 317 | _ => { 318 | error!("Failed to fetch change stream at {url}: {:?}", response); 319 | } 320 | } 321 | 322 | warn!("Lost connection to change stream at {url}, wait 10 seconds before retrying to get changes again"); 323 | tokio::time::sleep(Duration::from_secs(10)).await; 324 | } 325 | }); 326 | 327 | ds_clone 328 | } 329 | -------------------------------------------------------------------------------- /server/src/main.rs: -------------------------------------------------------------------------------- 1 | use crate::spreadsheet::SpreadSheetView; 2 | use crate::stats::XlsError; 3 | use axum::http::Method; 4 | use axum::{routing::get, routing::post, Router}; 5 | use dashmap::DashSet; 6 | use reqwest::Client; 7 | use std::net::SocketAddr; 8 | use std::sync::Arc; 9 | use tokio::sync::broadcast::Sender; 10 | use tower_http::cors::{AllowMethods, Any, CorsLayer}; 11 | 12 | mod feldera; 13 | mod spreadsheet; 14 | mod stats; 15 | #[derive(Clone)] 16 | struct AppState { 17 | stats_subscription: Sender>, 18 | xls_subscription: Sender>, 19 | spreadsheet_view: Arc, 20 | api_limits: Arc>, 21 | http_client: Client, 22 | } 23 | 24 | #[tokio::main] 25 | async fn main() { 26 | let _r = env_logger::try_init(); 27 | 28 | let http_client = Client::new(); 29 | let stats_subscription = 30 | feldera::subscribe_change_stream(http_client.clone(), "spreadsheet_statistics", 128); 31 | let xls_subscription = 32 | feldera::subscribe_change_stream(http_client.clone(), "spreadsheet_view", 4096); 33 | let api_limits = feldera::api_limit_table(http_client.clone()); 34 | let spreadsheet_view = 35 | Arc::new(SpreadSheetView::new(http_client.clone(), xls_subscription.subscribe()).await); 36 | 37 | let state = AppState { 38 | stats_subscription, 39 | xls_subscription, 40 | spreadsheet_view, 41 | api_limits, 42 | http_client, 43 | }; 44 | 45 | let cors = CorsLayer::new() 46 | .allow_methods(AllowMethods::list(vec![Method::GET, Method::POST])) 47 | .allow_origin([ 48 | "https://xls.feldera.io".parse().unwrap(), 49 | "http://localhost:7777".parse().unwrap(), 50 | "http://127.0.0.1:7777".parse().unwrap(), 51 | "http://localhost:3000".parse().unwrap(), 52 | ]) 53 | .allow_headers(Any); 54 | 55 | let app = Router::new() 56 | .route("/", get(|| async { "xls app!" })) 57 | .route("/api/stats", get(stats::stats)) 58 | .route("/api/spreadsheet", get(spreadsheet::ws_handler)) 59 | .route("/api/spreadsheet", post(spreadsheet::post_handler)) 60 | .layer(cors) 61 | .with_state(state); 62 | let listener = tokio::net::TcpListener::bind("0.0.0.0:3000").await.unwrap(); 63 | axum::serve( 64 | listener, 65 | app.into_make_service_with_connect_info::(), 66 | ) 67 | .await 68 | .unwrap(); 69 | } 70 | -------------------------------------------------------------------------------- /server/src/spreadsheet.rs: -------------------------------------------------------------------------------- 1 | use axum::http::HeaderMap; 2 | use axum::{ 3 | extract::ws::{Message, WebSocket, WebSocketUpgrade}, 4 | extract::{connect_info::ConnectInfo, Json, State}, 5 | response::IntoResponse, 6 | }; 7 | use chrono::Utc; 8 | use futures::{sink::SinkExt, stream::StreamExt}; 9 | use log::{debug, error, trace, warn}; 10 | use regex::Regex; 11 | use reqwest::Client; 12 | use rustrict::Censor; 13 | use serde::{Deserialize, Serialize}; 14 | use std::collections::BTreeMap; 15 | use std::net::SocketAddr; 16 | use std::ops::{ControlFlow, Range}; 17 | use std::sync::Arc; 18 | use tokio::sync::{broadcast::Receiver, mpsc, watch, RwLock}; 19 | 20 | use crate::feldera::{adhoc_query, insert}; 21 | use crate::stats::XlsError; 22 | use crate::AppState; 23 | 24 | pub(crate) struct SpreadSheetView { 25 | client: Client, 26 | cells: Arc>>, 27 | } 28 | 29 | impl SpreadSheetView { 30 | const CACHE_FRONT: Range = 0..100_000; 31 | const CACHE_BACK: Range = 1_039_900_000..1_040_000_000; 32 | 33 | pub(crate) async fn new( 34 | client: Client, 35 | xls_subscription: Receiver>, 36 | ) -> Self { 37 | let cells = Arc::new(RwLock::new(BTreeMap::new())); 38 | Self::spawn_update_cache_task(xls_subscription, cells.clone()); 39 | Self::initialize_cache(client.clone(), cells.clone(), Self::CACHE_FRONT).await; 40 | Self::initialize_cache(client.clone(), cells.clone(), Self::CACHE_BACK).await; 41 | SpreadSheetView { client, cells } 42 | } 43 | 44 | fn id_is_cached(id: i64) -> bool { 45 | Self::CACHE_FRONT.contains(&id) || Self::CACHE_BACK.contains(&id) 46 | } 47 | 48 | async fn initialize_cache( 49 | client: Client, 50 | cells: Arc>>, 51 | range: Range, 52 | ) { 53 | let sql = format!( 54 | "SELECT * FROM spreadsheet_view WHERE id >= {} and id < {}", 55 | range.start, range.end 56 | ); 57 | match adhoc_query(client, sql.as_str()).await { 58 | Ok(snapshot) => { 59 | for line in snapshot.trim().split('\n') { 60 | if line.is_empty() { 61 | continue; 62 | } 63 | match serde_json::from_str::(&line) { 64 | Ok(cell) => { 65 | cells.write().await.insert(cell.id, cell); 66 | } 67 | Err(e) => { 68 | warn!("Error parsing change: {e} (change {line})"); 69 | } 70 | } 71 | } 72 | } 73 | Err(e) => { 74 | panic!("Error filling spreadsheet cache: {e}"); 75 | } 76 | } 77 | } 78 | 79 | fn spawn_update_cache_task( 80 | mut xls_subscription: Receiver>, 81 | cells: Arc>>, 82 | ) { 83 | tokio::spawn(async move { 84 | loop { 85 | match xls_subscription.recv().await { 86 | Ok(Ok(change)) => match serde_json::from_str::(&change) { 87 | Ok(cell) => { 88 | if Self::id_is_cached(cell.id) { 89 | cells.write().await.insert(cell.id, cell); 90 | } 91 | } 92 | Err(e) => { 93 | error!("Error parsing change: {e} (change {change})"); 94 | } 95 | }, 96 | Ok(Err(e)) => { 97 | warn!("Error receiving change: {e}"); 98 | } 99 | Err(e) => { 100 | warn!("Error receiving change: {e}"); 101 | break; 102 | } 103 | } 104 | } 105 | }); 106 | } 107 | 108 | async fn query(&self, region: Region) -> Result { 109 | if Self::id_is_cached(region.from) && Self::id_is_cached(region.to - 1) { 110 | let mut snapshot = String::new(); 111 | for (_id, cell) in self.cells.read().await.range(region.from..region.to) { 112 | snapshot.push_str(&serde_json::to_string(cell).unwrap()); 113 | snapshot.push('\n'); 114 | } 115 | return Ok(snapshot); 116 | } 117 | 118 | let sql = format!( 119 | "SELECT * FROM spreadsheet_view WHERE id >= {} and id < {}", 120 | region.from, region.to 121 | ); 122 | adhoc_query(self.client.clone(), sql.as_str()).await 123 | } 124 | } 125 | 126 | #[derive(serde::Deserialize, serde::Serialize, Debug)] 127 | #[allow(dead_code)] 128 | struct Cell { 129 | id: i64, 130 | background: i32, 131 | raw_value: String, 132 | computed_value: String, 133 | } 134 | 135 | #[derive(serde::Deserialize, Debug, Copy, Clone)] 136 | struct Region { 137 | from: i64, 138 | to: i64, 139 | } 140 | 141 | impl Default for Region { 142 | fn default() -> Self { 143 | Region { from: 0, to: 2500 } 144 | } 145 | } 146 | 147 | /// The handler for the HTTP request (this gets called when the HTTP request lands at the start 148 | /// of websocket negotiation). After this completes, the actual switching from HTTP to 149 | /// websocket protocol will occur. 150 | /// This is the last point where we can extract TCP/IP metadata such as IP address of the client 151 | /// as well as things from HTTP headers such as user-agent of the browser etc. 152 | pub(crate) async fn ws_handler( 153 | ws: WebSocketUpgrade, 154 | ConnectInfo(addr): ConnectInfo, 155 | State(state): State, 156 | ) -> impl IntoResponse { 157 | debug!("{addr} connected."); 158 | ws.on_upgrade(move |socket| { 159 | handle_socket( 160 | state.spreadsheet_view.clone(), 161 | state.xls_subscription.subscribe(), 162 | socket, 163 | addr, 164 | ) 165 | }) 166 | } 167 | 168 | /// Actual websocket state-machine (one will be spawned per connection) 169 | async fn handle_socket( 170 | spreadsheet_view: Arc, 171 | mut xls_changes: Receiver>, 172 | socket: WebSocket, 173 | who: SocketAddr, 174 | ) { 175 | let (mut sender, mut receiver) = socket.split(); 176 | let (region_tx, mut region_rx) = watch::channel(Region::default()); 177 | let (change_sender, mut change_receiver) = mpsc::channel::(128); 178 | 179 | // spawn a task that forwards messages from the mpsc to the sink 180 | tokio::spawn(async move { 181 | while let Some(message) = change_receiver.recv().await { 182 | match sender.send(Message::Text(message.trim().to_string())).await { 183 | Ok(_) => { 184 | trace!("{message} sent to {who}"); 185 | } 186 | Err(e) => { 187 | warn!("Error sending change to client: {e}"); 188 | } 189 | } 190 | } 191 | }); 192 | 193 | // Spawn a task that will push spreadsheet view changes to the client 194 | let change_fwder = change_sender.clone(); 195 | let mut change_task = tokio::spawn(async move { 196 | let mut cnt = 0; 197 | loop { 198 | cnt += 1; 199 | match xls_changes.recv().await { 200 | Ok(Ok(change)) => match serde_json::from_str::(&change) { 201 | Ok(cell) => { 202 | let region = { *region_rx.borrow_and_update() }; 203 | if cell.id >= region.from && cell.id < region.to { 204 | match change_fwder.send(change).await { 205 | Ok(_) => {} 206 | Err(e) => { 207 | warn!("Error sending change to sender task: {e}"); 208 | return cnt; 209 | } 210 | } 211 | } 212 | } 213 | Err(e) => { 214 | error!("Error parsing change: {e} (change {change})"); 215 | } 216 | }, 217 | Ok(Err(e)) => { 218 | warn!("Error receiving change: {e}"); 219 | return cnt; 220 | } 221 | Err(e) => { 222 | warn!("Error receiving change: {e}"); 223 | return cnt; 224 | } 225 | } 226 | } 227 | }); 228 | 229 | // This second task will receive messages from the client and push snapshots 230 | let change_fwder = change_sender.clone(); 231 | let mut recv_task = tokio::spawn(async move { 232 | let mut cnt = 0; 233 | while let Some(Ok(msg)) = receiver.next().await { 234 | cnt += 1; 235 | match process_message(msg, who) { 236 | ControlFlow::Continue(Some(region)) => match spreadsheet_view.query(region).await { 237 | Ok(snapshot) => { 238 | region_tx.send_replace(region); 239 | for line in snapshot.split('\n') { 240 | match change_fwder.send(line.to_string()).await { 241 | Ok(_) => {} 242 | Err(e) => { 243 | warn!("Error sending change to sender task: {e}"); 244 | return cnt; 245 | } 246 | } 247 | } 248 | } 249 | Err(e) => { 250 | warn!("Error querying spreadsheet_view: {e}"); 251 | return cnt; 252 | } 253 | }, 254 | ControlFlow::Continue(None) => {} 255 | ControlFlow::Break(_) => { 256 | break; 257 | } 258 | } 259 | } 260 | cnt 261 | }); 262 | 263 | // If any one of the tasks exit, abort the other. 264 | tokio::select! { 265 | rv_a = &mut change_task => { 266 | match rv_a { 267 | Ok(a) => debug!("{a} messages sent to {who}"), 268 | Err(a) => warn!("Error sending messages {a:?}") 269 | } 270 | recv_task.abort(); 271 | }, 272 | rv_b = &mut recv_task => { 273 | match rv_b { 274 | Ok(b) => debug!("Received {b} messages from {who}"), 275 | Err(b) => warn!("Error receiving messages {b:?}") 276 | } 277 | change_task.abort(); 278 | } 279 | } 280 | 281 | trace!("Websocket context {who} destroyed"); 282 | } 283 | 284 | /// helper to print contents of messages to stdout. Has special treatment for Close. 285 | fn process_message(msg: Message, who: SocketAddr) -> ControlFlow<(), Option> { 286 | match msg { 287 | Message::Text(t) => match serde_json::from_str::(&t) { 288 | Ok(region) => { 289 | debug!("{who} sent range: {region:?}"); 290 | ControlFlow::Continue(Some(region)) 291 | } 292 | Err(e) => { 293 | warn!("{who} sent invalid region JSON: {t:?} {e}"); 294 | ControlFlow::Continue(None) 295 | } 296 | }, 297 | Message::Close(c) => { 298 | debug!("{who} closed connection: {c:?}"); 299 | ControlFlow::Break(()) 300 | } 301 | _ => ControlFlow::Continue(None), 302 | } 303 | } 304 | 305 | // Insert/Update a cell 306 | 307 | // Data structure to represent incoming JSON payload 308 | #[derive(Deserialize, Debug)] 309 | pub(crate) struct UpdateRequest { 310 | id: i64, 311 | raw_value: String, 312 | background: i32, 313 | } 314 | 315 | impl UpdateRequest { 316 | const ID_RANGE: Range = 0i64..1_040_000_000i64; 317 | } 318 | 319 | // Data structure to represent outgoing JSON payload 320 | #[derive(Serialize, Debug)] 321 | struct UpdatePayload { 322 | id: i64, 323 | raw_value: String, 324 | background: i32, 325 | ip: String, 326 | ts: String, 327 | } 328 | 329 | fn replace_domain_in_urls(input: &str, new_domain: &str) -> String { 330 | // Regex breakdown: 331 | // (https?://) captures the protocol (http or https) 332 | // ([^/\s]+) captures the domain portion (everything until a slash or whitespace) 333 | // ([^\s]*) captures the remainder of the URL (path/query/etc. until whitespace) 334 | let url_regex = Regex::new(r"(https?://)([^/\s]+)([^\s]*)").unwrap(); 335 | 336 | url_regex 337 | .replace_all(input, |caps: ®ex::Captures| { 338 | // caps[1] is the scheme+://, caps[2] is the original domain, caps[3] is the path/query 339 | format!("{}{}{}", &caps[1], new_domain, &caps[3]) 340 | }) 341 | .to_string() 342 | } 343 | 344 | pub(crate) async fn post_handler( 345 | headers: HeaderMap, 346 | ConnectInfo(addr): ConnectInfo, 347 | State(state): State, 348 | Json(update_request): Json, 349 | ) -> impl IntoResponse { 350 | // Load balancer puts the client IP in the HTTP header 351 | const CLIENT_IP_HEADER: &str = "Fly-Client-IP"; 352 | let client_ip = headers 353 | .get(CLIENT_IP_HEADER) 354 | .map(|ip| { 355 | String::from_utf8_lossy(ip.as_bytes()) 356 | .chars() 357 | .take(45) 358 | .collect::() 359 | }) 360 | .unwrap_or(addr.ip().to_string().chars().take(45).collect::()); 361 | 362 | if state.api_limits.contains(&client_ip) { 363 | return ( 364 | axum::http::StatusCode::INTERNAL_SERVER_ERROR, 365 | Json(serde_json::json!({"error": "API limit exceeded"})), 366 | ); 367 | } 368 | if !UpdateRequest::ID_RANGE.contains(&update_request.id) { 369 | return ( 370 | axum::http::StatusCode::BAD_REQUEST, 371 | Json(serde_json::json!({"error": "Invalid cell ID"})), 372 | ); 373 | } 374 | let user_value = update_request 375 | .raw_value 376 | .chars() 377 | .take(64) 378 | .collect::(); 379 | let censored_urls = replace_domain_in_urls(&user_value, "*REDACTED*"); 380 | let censored_input = Censor::new(censored_urls.chars()).censor(); 381 | let payload = UpdatePayload { 382 | id: update_request.id, 383 | raw_value: censored_input, 384 | background: update_request.background, 385 | ip: client_ip, 386 | ts: Utc::now().format("%Y-%m-%d %H:%M:%S%.3f").to_string(), 387 | }; 388 | 389 | insert(state.http_client, "spreadsheet_data", payload).await 390 | } 391 | -------------------------------------------------------------------------------- /server/src/stats.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::Display; 2 | use std::io; 3 | 4 | use axum::extract::State; 5 | use axum::{body::Body, response::IntoResponse, response::Response}; 6 | use futures::StreamExt; 7 | use log::debug; 8 | use serde::de::StdError; 9 | use tokio::sync::broadcast::error::SendError; 10 | use tokio_util::codec::LinesCodecError; 11 | 12 | use crate::feldera::adhoc_query; 13 | use crate::AppState; 14 | 15 | #[derive(Clone, Debug)] 16 | pub(crate) struct XlsError { 17 | message: String, 18 | } 19 | 20 | impl Display for XlsError { 21 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 22 | write!(f, "{}", self.message) 23 | } 24 | } 25 | 26 | impl From for XlsError { 27 | fn from(e: io::Error) -> Self { 28 | XlsError { 29 | message: e.to_string(), 30 | } 31 | } 32 | } 33 | 34 | impl From>> for XlsError { 35 | fn from(e: SendError>) -> Self { 36 | XlsError { 37 | message: e.to_string(), 38 | } 39 | } 40 | } 41 | 42 | impl From for XlsError { 43 | fn from(e: LinesCodecError) -> Self { 44 | XlsError { 45 | message: e.to_string(), 46 | } 47 | } 48 | } 49 | 50 | impl From<&str> for XlsError { 51 | fn from(e: &str) -> Self { 52 | XlsError { 53 | message: e.to_string(), 54 | } 55 | } 56 | } 57 | 58 | impl From for XlsError { 59 | fn from(e: String) -> Self { 60 | XlsError { message: e } 61 | } 62 | } 63 | 64 | impl From for XlsError { 65 | fn from(e: reqwest::Error) -> Self { 66 | XlsError { 67 | message: e.to_string(), 68 | } 69 | } 70 | } 71 | 72 | impl StdError for XlsError { 73 | fn source(&self) -> Option<&(dyn StdError + 'static)> { 74 | None 75 | } 76 | } 77 | 78 | pub(crate) async fn stats(State(state): State) -> impl IntoResponse { 79 | let initial_data = adhoc_query(state.http_client, "SELECT * FROM spreadsheet_statistics").await; 80 | 81 | if let Err(e) = initial_data { 82 | return Response::builder() 83 | .status(500) 84 | .body(Body::from(format!( 85 | "{{\"error\": \"{}\"}}", 86 | e.message.trim() 87 | ))) 88 | .unwrap(); 89 | } 90 | 91 | let initial_stream = futures::stream::once(async move { initial_data }); 92 | 93 | let change_stream_rx = state.stats_subscription.subscribe(); 94 | let change_stream = tokio_stream::wrappers::BroadcastStream::new(change_stream_rx); 95 | let stream = initial_stream.chain(change_stream.filter_map(|result| async move { 96 | match result { 97 | Ok(value) => Some(value), 98 | Err(e) => { 99 | debug!("BroadcastStream error: {:?}", e); 100 | None // Discard errors 101 | } 102 | } 103 | })); 104 | 105 | Response::builder() 106 | .status(200) 107 | .header("Content-Type", "application/json") 108 | .header("Transfer-Encoding", "chunked") 109 | .body(Body::from_stream(stream)) 110 | .unwrap() 111 | } 112 | --------------------------------------------------------------------------------