├── .gitbook.yaml
├── .gitbook
└── assets
│ ├── image.png
│ └── logo_bubbaloop.png
├── .gitignore
├── Cargo.toml
├── Cross.toml
├── LICENSE
├── README.md
├── build.rs
├── docker
└── aarch64.Dockerfile
├── docs
├── .gitbook
│ └── assets
│ │ └── image.png
├── README.md
├── SUMMARY.md
├── examples
│ ├── camera-recording.md
│ └── hello-world.md
├── model-inference-experimental.md
├── pipelines.md
├── quickstart.md
├── tutorials
│ └── security-camera.md
└── usage.md
├── examples
├── python-inference
│ ├── client.py
│ └── requirements.txt
├── python-rerun-files
│ ├── main.py
│ └── requirements.txt
├── python-streaming
│ ├── client.py
│ └── requirements.txt
└── react-app
│ ├── .gitignore
│ ├── README.md
│ ├── eslint.config.js
│ ├── index.html
│ ├── package-lock.json
│ ├── package.json
│ ├── src
│ ├── App.css
│ ├── App.tsx
│ ├── components
│ │ ├── ConnectionSettings.css
│ │ ├── ConnectionSettings.tsx
│ │ ├── InferenceDisplay.css
│ │ ├── InferenceDisplay.tsx
│ │ ├── InferenceInstruction.css
│ │ ├── InferenceInstruction.tsx
│ │ ├── StreamViewerWebsocket.css
│ │ └── StreamViewerWebsocket.tsx
│ ├── index.css
│ ├── main.tsx
│ └── vite-env.d.ts
│ ├── tsconfig.app.json
│ ├── tsconfig.json
│ ├── tsconfig.node.json
│ └── vite.config.ts
├── justfile
├── package-lock.json
├── package.json
├── scripts
├── cross_deploy.sh
├── install_deps.sh
├── install_libssl1.1.sh
├── install_linux.sh
├── run_serve.sh
└── uninstall_linux.sh
└── src
├── api
├── handles
│ ├── inference.rs
│ ├── mod.rs
│ ├── pipeline.rs
│ ├── recording.rs
│ ├── stats
│ │ ├── mod.rs
│ │ ├── sysinfo.rs
│ │ └── whoami.rs
│ └── streaming.rs
├── mod.rs
├── models
│ ├── inference.rs
│ ├── mod.rs
│ ├── pipeline.rs
│ ├── recording.rs
│ └── streaming.rs
└── server.rs
├── bin
├── bubbaloop.rs
└── serve.rs
├── cu29
├── mod.rs
├── msgs.rs
├── pipelines
│ ├── cameras.rs
│ ├── cameras_1.ron
│ ├── cameras_2.ron
│ ├── cameras_3.ron
│ ├── cameras_4.ron
│ ├── inference.ron
│ ├── inference.rs
│ └── mod.rs
└── tasks
│ ├── broadcast.rs
│ ├── image_encoder.rs
│ ├── inference.rs
│ ├── mod.rs
│ ├── recorder.rs
│ ├── video_capture.rs
│ └── video_writer.rs
├── lib.rs
└── pipeline.rs
/.gitbook.yaml:
--------------------------------------------------------------------------------
1 | root: ./docs/
2 |
3 | structure:
4 | readme: ./docs/README.md
5 | summary: ./docs/SUMMARY.md
6 |
--------------------------------------------------------------------------------
/.gitbook/assets/image.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kornia/bubbaloop/ba29fcdd84dd71f40e842c10db6236929efdb086/.gitbook/assets/image.png
--------------------------------------------------------------------------------
/.gitbook/assets/logo_bubbaloop.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kornia/bubbaloop/ba29fcdd84dd71f40e842c10db6236929efdb086/.gitbook/assets/logo_bubbaloop.png
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Generated by Cargo
2 | # will have compiled files and executables
3 | debug/
4 | target/
5 |
6 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
7 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
8 | Cargo.lock
9 |
10 | # These are backup files generated by rustfmt
11 | **/*.rs.bk
12 |
13 | # MSVC Windows builds of rustc generate these, which store debugging information
14 | *.pdb
15 |
16 | # RustRover
17 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
18 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
19 | # and can be added to the global gitignore or merged into this file. For a more nuclear
20 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
21 | #.idea/
22 |
23 | # venv files from python examples
24 | examples/python-*/.venv/
25 |
--------------------------------------------------------------------------------
/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | name = "bubbaloop"
3 | categories = ["computer-vision", "science::robotics"]
4 | description = "Serving library for computer vision and AI Robotics"
5 | edition = "2021"
6 | homepage = "http://kornia.org"
7 | include = ["Cargo.toml"]
8 | license = "Apache-2.0"
9 | repository = "https://github.com/kornia/bubbaloop"
10 | rust-version = "1.86"
11 | version = "0.0.1-rc.1"
12 |
13 | [dependencies]
14 | argh = "0.1"
15 | axum = { version = "0.8", features = ["ws"] }
16 | bincode = "2.0.0"
17 | env_logger = "0.11"
18 | once_cell = "1.21"
19 | log = "0.4"
20 | reqwest = { version = "0.12", features = ["json"] }
21 | rerun = "0.23.2"
22 | serde = { version = "1.0", features = ["derive"] }
23 | serde_json = "1.0"
24 | sysinfo = "0.35"
25 | tokio = { version = "1", features = ["full"] }
26 | tower-http = { version = "0.6", features = ["cors"] }
27 | whoami = "1.5"
28 |
29 | # message passing framework
30 | # cu29 = { version = "0.7.0" }
31 | # cu29-helpers = { version = "0.7.0" }
32 | # TODO: fixes ron file connections order issues
33 | cu29 = { git = "https://github.com/copper-project/copper-rs.git", branch = "master" }
34 | cu29-helpers = { git = "https://github.com/copper-project/copper-rs.git", branch = "master" }
35 |
36 | kornia-image = "0.1.9"
37 | kornia-io = { version = "0.1.9", features = ["gstreamer", "turbojpeg"] }
38 | kornia-paligemma = { git = "https://github.com/kornia/kornia-paligemma.git", tag = "v0.1.0", features = [] }
39 | kornia-infernum = { git = "https://github.com/kornia/kornia-infernum.git", tag = "v0.1.0" }
40 |
41 | [features]
42 | cuda = ["kornia-paligemma/cuda"]
--------------------------------------------------------------------------------
/Cross.toml:
--------------------------------------------------------------------------------
1 | [build]
2 | default-target = "aarch64-unknown-linux-gnu"
3 |
4 | [target.aarch64-unknown-linux-gnu]
5 | dockerfile = "docker/aarch64.Dockerfile"
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ./docs/README.md
--------------------------------------------------------------------------------
/build.rs:
--------------------------------------------------------------------------------
1 | fn main() {
2 | println!(
3 | "cargo:rustc-env=LOG_INDEX_DIR={}",
4 | std::env::var("OUT_DIR").unwrap()
5 | );
6 | }
7 |
--------------------------------------------------------------------------------
/docker/aarch64.Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ghcr.io/cross-rs/aarch64-unknown-linux-gnu:edge
2 |
3 | RUN apt-get update && apt-get install --assume-yes \
4 | cmake \
5 | curl \
6 | gdb \
7 | pkg-config \
8 | software-properties-common \
9 | wget \
10 | && \
11 | apt-get clean
12 |
13 | ENV DEBIAN_FRONTEND=noninteractive
14 |
15 | RUN dpkg --add-architecture arm64
16 |
17 | # Install dependencies
18 | RUN apt-get update && apt-get install --assume-yes \
19 | nasm \
20 | libgstreamer1.0-dev:arm64 \
21 | libgstreamer-plugins-base1.0-dev:arm64 \
22 | libssl-dev:arm64 \
23 | libglib2.0-dev:arm64 \
24 | libudev-dev:arm64 \
25 | && \
26 | apt-get clean
--------------------------------------------------------------------------------
/docs/.gitbook/assets/image.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kornia/bubbaloop/ba29fcdd84dd71f40e842c10db6236929efdb086/docs/.gitbook/assets/image.png
--------------------------------------------------------------------------------
/docs/README.md:
--------------------------------------------------------------------------------
1 | # 🦄 Bubbaloop
2 |
3 | :earth\_africa: Serve local models for Spatial and AI Robotics Openly.
4 |
5 | * :robot: **AI & Robotics Ready**: Ideal for integrated AI, Robotics, and IoT applications.
6 | * :rocket: **REST API**: Effortlessly manage data pipelines for recording and inference.
7 | * :crab: **Rust Framework**: Offers both efficiency and safety for diverse applications.
8 |
9 | ### 👥 **Join Our Community**
10 |
11 | Connect with like-minded innovators and developers!
12 |
13 | * 💬 Discord Server: [https://discord.com/invite/HfnywwpBnD](https://discord.com/invite/HfnywwpBnD)
14 | * :book: Checkout our live [documentation](https://kornia.gitbook.io/bubbaloop)
15 | * :woman\_technologist: Give me the code: [https://github.com/kornia/bubbaloop](https://github.com/kornia/bubbaloop)
16 |
17 | ## [📢](https://emojipedia.org/loudspeaker) News
18 |
19 | * \[2025-05-11] Added tutorial for Home Security App
20 | * \[2025-04-06] Added support to inference with Google Paligemma
21 | * \[2025-03-01] Initial prototype of Inference engine pipeline
22 | * \[2025-02-01] Added recording pipeline to store rerun files and viz
23 | * \[2025-01-02] Added RTSP and Webcam support with kornia-rs
24 | * \[2024-12-28] Initial push with the basics for pipeline management
25 |
--------------------------------------------------------------------------------
/docs/SUMMARY.md:
--------------------------------------------------------------------------------
1 | # Table of contents
2 |
3 | * [🦄 Bubbaloop](README.md)
4 | * [🚀 Quickstart](quickstart.md)
5 | * [💊 Stats API](usage.md)
6 | * [🍰 Pipeline API](pipelines.md)
7 |
8 | ## Examples
9 |
10 | * [🌈 Hello World](examples/hello-world.md)
11 | * [📷 Camera Recording](examples/camera-recording.md)
12 |
13 | ***
14 |
15 | * [🍄 Model Inference (experimental)](model-inference-experimental.md)
16 |
17 | ## Tutorials
18 |
19 | * [Home Security App](tutorials/security-camera.md)
20 |
--------------------------------------------------------------------------------
/docs/examples/camera-recording.md:
--------------------------------------------------------------------------------
1 | ---
2 | description: Example showing how to stream data from cameras and log into disk
3 | ---
4 |
5 | # 📷 Camera Recording
6 |
7 | The Bubbaloop platform includes a `cameras` pipeline functionality which allows to stream and record data from multi camera streams and serialize in disk including the video frames metadata such as the timestamps.
8 |
9 | ## Edit the pipeline file
10 |
11 | In order to customize the recording pipeline we need to follow the steps below, eg to adjust our RTSP streams configuration:
12 |
13 | {% stepper %}
14 | {% step %}
15 | #### Update the pipeline in[ cameras.rs](../../src/cu29/pipelines/cameras.rs)
16 |
17 | Go to [`cameras.rs`](../../src/cu29/pipelines/cameras.rs) an update the `config` parameter by specifying the path to the pipeline `ron` file that you want to use for the recording task.
18 |
19 | We provide as an example a couple of pipelines to record from one and multiple cameras. See: `cameras_1.ron` , `cameras_2.ron` , etc.
20 |
21 | ```rust
22 | #[copper_runtime(config = "src/cu29/pipelines/cameras_1.ron")]
23 | struct CamerasApp {}
24 | ```
25 | {% endstep %}
26 |
27 | {% step %}
28 | #### Customize the pipeline file
29 |
30 | You can definitely customize the `ron` file e.g to update the camera parameters like the `source_uri` to point to your RTSP camera; or enable disable the broadcasting.
31 |
32 | {% hint style="info" %}
33 | The RTSP url it's expected to be as in the following format
34 |
35 |
"rtsp://<username>:<password>@<ip>:<port>/<stream>
36 |
37 | {% endhint %}
38 |
39 | {% hint style="danger" %}
40 | The `channel_id` must be a valid `usize` number and must be not repeated.
41 | {% endhint %}
42 | {% endstep %}
43 | {% endstepper %}
44 |
45 | These are `ron` files examples to use with single and multicam with broadcasting included
46 |
47 | {% tabs %}
48 | {% tab title="RTSP (single)" %}
49 | ```json
50 | (
51 | tasks: [
52 | (
53 | id: "cam0",
54 | type: "crate::cu29::tasks::VideoCapture",
55 | config: {
56 | "source_type": "rtsp",
57 | // URL of the RTSP camera
58 | // rtsp://:@:/
59 | "source_uri": "rtsp://tapo_entrance:123456789@192.168.1.141:554/stream2",
60 | "channel_id": 0,
61 | }
62 | ),
63 | (
64 | id: "enc0",
65 | type: "crate::cu29::tasks::ImageEncoder",
66 | ),
67 | (
68 | id: "logger",
69 | type: "crate::cu29::tasks::RerunLoggerOne",
70 | config: {
71 | // Path to the directory where the recordings will be stored
72 | "path": "/tmp/",
73 | }
74 | ),
75 | (
76 | id: "bcast0",
77 | type: "crate::cu29::tasks::ImageBroadcast",
78 | ),
79 | ],
80 | cnx: [
81 | (src: "cam0", dst: "enc0", msg: "crate::cu29::msgs::ImageRgb8Msg"),
82 | (src: "enc0", dst: "logger", msg: "crate::cu29::msgs::EncodedImage"),
83 | (src: "enc0", dst: "bcast0", msg: "crate::cu29::msgs::EncodedImage"),
84 | ]
85 | ,
86 | logging: (
87 | slab_size_mib: 1024, // Preallocates 1GiB of memory map file at a time
88 | section_size_mib: 100, // Preallocates 100MiB of memory map per section for the main logger.
89 | enable_task_logging: false,
90 | ),
91 | )
92 |
93 | ```
94 | {% endtab %}
95 |
96 | {% tab title="RTSP (multi)" %}
97 | ```json
98 | (
99 | tasks: [
100 | (
101 | id: "cam0",
102 | type: "crate::cu29::tasks::VideoCapture",
103 | config: {
104 | "source_type": "rtsp",
105 | // URL of the RTSP camera
106 | // rtsp://:@:/
107 | "source_uri": "rtsp://tapo_entrance:123456789@192.168.1.141:554/stream2",
108 | "channel_id": 0,
109 | }
110 | ),
111 | (
112 | id: "cam1",
113 | type: "crate::cu29::tasks::VideoCapture",
114 | config: {
115 | "source_type": "rtsp",
116 | // URL of the RTSP camera
117 | // rtsp://:@:/
118 | "source_uri": "rtsp://tapo_terrace:123456789@192.168.1.151:554/stream2",
119 | "channel_id": 1,
120 | }
121 | ),
122 | (
123 | id: "enc0",
124 | type: "crate::cu29::tasks::ImageEncoder",
125 | ),
126 | (
127 | id: "enc1",
128 | type: "crate::cu29::tasks::ImageEncoder",
129 | ),
130 | (
131 | id: "bcast0",
132 | type: "crate::cu29::tasks::ImageBroadcast",
133 | ),
134 | (
135 | id: "bcast1",
136 | type: "crate::cu29::tasks::ImageBroadcast",
137 | ),
138 | (
139 | id: "logger",
140 | type: "crate::cu29::tasks::RerunLoggerTwo",
141 | config: {
142 | // Path to the directory where the logs will be stored
143 | "path": "/tmp/",
144 | }
145 | ),
146 | ],
147 | cnx: [
148 | (src: "cam0", dst: "enc0", msg: "crate::cu29::msgs::ImageRgb8Msg"),
149 | (src: "cam1", dst: "enc1", msg: "crate::cu29::msgs::ImageRgb8Msg"),
150 | (src: "enc0", dst: "logger", msg: "crate::cu29::msgs::EncodedImage"),
151 | (src: "enc1", dst: "logger", msg: "crate::cu29::msgs::EncodedImage"),
152 | (src: "enc0", dst: "bcast0", msg: "crate::cu29::msgs::EncodedImage"),
153 | (src: "enc1", dst: "bcast1", msg: "crate::cu29::msgs::EncodedImage"),
154 | ]
155 | ,
156 | logging: (
157 | slab_size_mib: 1024, // Preallocates 1GiB of memory map file at a time
158 | section_size_mib: 100, // Preallocates 100MiB of memory map per section for the main logger.
159 | enable_task_logging: false,
160 | ),
161 | )
162 |
163 | ```
164 | {% endtab %}
165 |
166 | {% tab title="Webcam" %}
167 | ```json
168 | (
169 | tasks: [
170 | (
171 | id: "cam0",
172 | type: "crate::cu29::tasks::VideoCapture",
173 | config: {
174 | "source_type": "v4l2",
175 | "source_uri": "/dev/video0",
176 | "source_fps": 30,
177 | "image_cols": 640,
178 | "image_rows": 480,
179 | }
180 | ),
181 | (
182 | id: "rerun",
183 | type: "crate::cu29::tasks::RerunLogger",
184 | config: {
185 | // Path to the directory where the logs will be stored
186 | "path": "/tmp/",
187 | // IP address of the rerun server
188 | "ip": "192.168.1.144",
189 | // Port of the rerun server
190 | "port": 9876,
191 | }
192 | )
193 | ],
194 | cnx: [
195 | (src: "cam0", dst: "rerun", msg: "crate::cu29::msgs::ImageRgb8Msg"),
196 | ]
197 | ,
198 | logging: (
199 | slab_size_mib: 1024, // Preallocates 1GiB of memory map file at a time
200 | section_size_mib: 100, // Preallocates 100MiB of memory map per section for the main logger.
201 | ),
202 | )
203 | ```
204 | {% endtab %}
205 | {% endtabs %}
206 |
207 | ## Start the server
208 |
209 | ```
210 | just serve
211 | ```
212 |
213 | ```bash
214 | [2025-04-13T12:22:53Z INFO bubbaloop::api::server] 🚀 Starting the server
215 | [2025-04-13T12:22:53Z INFO bubbaloop::api::server] 🔥 Listening on: 0.0.0.0:3000
216 | [2025-04-13T12:22:53Z INFO bubbaloop::api::server] 🔧 Press Ctrl+C to stop the server
217 | ```
218 |
219 | ## Start streaming
220 |
221 | Start the camera pipeline and log using [rerun.io](https://www.rerun.io).
222 |
223 | ```
224 | just start-pipeline cameras 0.0.0.0 3000
225 | ```
226 |
227 | ```bash
228 | Result: {
229 | "message": "Pipeline recording started"
230 | }
231 | ```
232 |
233 | ## Visualize the streaming
234 |
235 | You can use the example [`python-streaming`](https://github.com/kornia/bubbaloop/tree/main/examples/python-streaming) to visualize the streams in real-time using Rerun.
236 |
237 | ```bash
238 | python examples/python-streaming/client.py \
239 | --host 0.0.0.0 --port 3000 --cameras 0 # 1 (for multi cam)
240 | ```
241 |
242 | {% tabs %}
243 | {% tab title="Single Camera" %}
244 |
245 | {% endtab %}
246 |
247 | {% tab title="Multi Camera" %}
248 |
249 | {% endtab %}
250 | {% endtabs %}
251 |
252 | ## Start Recording
253 |
254 | Send a request to server to start recording from the cameras
255 |
256 | ```bash
257 | just start-recording 0.0.0.0 30000
258 | ```
259 |
260 | #### Client terminal
261 |
262 | ```
263 | Result: {
264 | "message": "Pipeline recording started"
265 | }
266 | ```
267 |
268 | ## Stop recording
269 |
270 | To stop the pipeline, use the `stop-pipeline` command:
271 |
272 | ```bash
273 | just stop-pipeline recording 0.0.0.0 3000
274 | ```
275 |
276 | #### **Client terminal**
277 |
278 | ```
279 | Result: {
280 | "message": "Pipeline recording stopped"
281 | }
282 | ```
283 |
284 | #### **Server terminal**
285 |
286 | ```bash
287 | [2025-04-13T12:10:45Z DEBUG bubbaloop::api::handles::pipeline] Request to stop pipeline: recording
288 | [2025-04-13T12:10:45Z DEBUG bubbaloop::cu29::pipelines::recording] Recording pipeline stopped
289 | [2025-04-13T12:10:45Z DEBUG re_log_encoding::file_sink] Log stream written to /tmp/1744545975.rrd
290 | ```
291 |
292 | ## Get the recorded data and Visualize
293 |
294 | You can copy to your home directory (or via ssh) the recorded files into your computer.
295 |
296 | ```bash
297 | scp bubbaloop777:/home/nvidia/1735941642.rrd ~/data
298 | ```
299 |
300 | Open the file directly wth rerun to introspect the recording
301 |
302 | ```bash
303 | rerun 1735941642.rrd
304 | ```
305 |
--------------------------------------------------------------------------------
/docs/examples/hello-world.md:
--------------------------------------------------------------------------------
1 | ---
2 | description: Your first Bubbaloop service experience
3 | ---
4 |
5 | # 🌈 Hello World
6 |
7 | ## Start the server
8 |
9 | ```
10 | just serve
11 | ```
12 |
13 | ## Request to start the task
14 |
15 | Send a HTTP request to the server to start the background task
16 |
17 | ```
18 | just start-pipeline bubbaloop 0.0.0.0 3000
19 | ```
20 |
21 | From the server side you will see the following
22 |
23 | ```bash
24 | [2025-01-05T15:51:33Z DEBUG bubbaloop::pipeline] | Hello !! This is a Bubbaloop !!! 🎮
25 | [2025-01-05T15:51:34Z DEBUG bubbaloop::pipeline] / Hello !! This is a Bubbaloop !!! 🌈
26 | [2025-01-05T15:51:35Z DEBUG bubbaloop::pipeline] - Hello !! This is a Bubbaloop !!! 😊
27 | [2025-01-05T15:51:36Z DEBUG bubbaloop::pipeline] \ Hello !! This is a Bubbaloop !!! 🚀
28 | [2025-01-05T15:51:37Z DEBUG bubbaloop::pipeline] | Hello !! This is a Bubbaloop !!! 🦀
29 | [2025-01-05T15:51:38Z DEBUG bubbaloop::pipeline] / Hello !! This is a Bubbaloop !!! 🎉
30 | ```
31 |
32 | ## Stop the task
33 |
34 | To stop the pipeline, use the `stop-pipeline` command:
35 |
36 | ```
37 | just stop-pipeline bubbaloop 0.0.0.0 3000
38 | ```
39 |
40 | #### From client
41 |
42 | ```
43 | Result: {
44 | "message": "Pipeline bubbaloop stopped"
45 | }
46 | ```
47 |
48 | #### From server
49 |
50 | ```bash
51 | [2025-01-05T15:51:39Z DEBUG bubbaloop::pipeline] Request to stop pipeline: bubbaloop
52 | [2025-01-05T15:51:40Z DEBUG bubbaloop::pipeline] Pipeline bubbaloop stopped after 155 iterations
53 | ```
54 |
--------------------------------------------------------------------------------
/docs/model-inference-experimental.md:
--------------------------------------------------------------------------------
1 | ---
2 | description: Example showing how to use the models inference functionality.
3 | ---
4 |
5 | # 🍄 Model Inference (experimental)
6 |
7 | The **Bubbaloop** server is able to run **inference** efficiently Visual Language Models (VLM) using directly the camera streams without any latency in the same process and broadcast the results.
8 |
9 | Supported models (via [Kornia](https://github.com/kornia/kornia-paligemma) / [Candle](https://github.com/huggingface/candle))
10 |
11 | * PaliGemma: [https://ai.google.dev/gemma/docs/paligemma](https://ai.google.dev/gemma/docs/paligemma/prompt-system-instructions)
12 |
13 | ## Edit the pipeline
14 |
15 | Similar to the [Camera Recording ](examples/camera-recording.md)pipeline, we can customize the `inference.ron` pipeline to adjust to our system setup. This will require compiling every time you modify your config.
16 |
17 | ```json
18 | (
19 | tasks: [
20 | // NOTE: Modify this block to customize
21 | (
22 | id: "cam0",
23 | type: "crate::cu29::tasks::VideoCapture",
24 | config: {
25 | config: {
26 | // URL of the RTSP camera
27 | "source_type": "rtsp",
28 | "source_uri": "rtsp://:@:/"
29 | }
30 | ),
31 | (
32 | id: "inference",
33 | type: "crate::cu29::tasks::Inference",
34 | ),
35 | (
36 | id: "bcast_text",
37 | type: "crate::cu29::tasks::BroadcastChat",
38 | ),
39 | (
40 | id: "bcast_image",
41 | type: "crate::cu29::tasks::BroadcastImage",
42 | ),
43 | ],
44 | cnx: [
45 | (src: "cam0", dst: "inference", msg: "crate::cu29::msgs::ImageRgb8Msg"),
46 | (src: "cam0", dst: "bcast_image", msg: "crate::cu29::msgs::ImageRgb8Msg"),
47 | (src: "inference", dst: "bcast_text", msg: "crate::cu29::msgs::PromptResponseMsg"),
48 | ],
49 | logging: (
50 | slab_size_mib: 1024, // Preallocates 1GiB of memory map file at a time
51 | section_size_mib: 100, // Preallocates 100MiB of memory map per section for the main logger.
52 | enable_task_logging: false,
53 | ),
54 | )
55 | ```
56 |
57 | ## Start the server
58 |
59 | ```
60 | just serve
61 | ```
62 |
63 | ## Start the inference
64 |
65 | ```
66 | just start-pipeline inference 0.0.0.0 3000
67 | ```
68 |
69 | By default, this command will start the inference engine using the prompt "cap en" — to generate a short capture from each frame.
70 |
71 | {% hint style="info" %}
72 | Check the supported prompts: [https://ai.google.dev/gemma/docs/paligemma/prompt-system-instructions](https://ai.google.dev/gemma/docs/paligemma/prompt-system-instructions)
73 | {% endhint %}
74 |
75 | In your terminal you should be able to get somethin similar
76 |
77 | ```
78 | [2025-04-06T14:20:13Z INFO bubbaloop::api::server] 🚀 Starting the server
79 | [2025-04-06T14:20:13Z INFO bubbaloop::api::server] 🔥 Listening on: 0.0.0.0:3000
80 | [2025-04-06T14:20:13Z INFO bubbaloop::api::server] 🔧 Press Ctrl+C to stop the server
81 | [2025-04-06T14:20:31Z DEBUG bubbaloop::cu29::tasks::inference] Received response from inference thread: PromptResponseMsg { prompt: "cap en", response: " Two people are sitting on the bed. In-front of them there is a table with some objects and other things on it. On top of them there is roof, light and we can see trees and sky in the background is sunny." }
82 | ```
83 |
84 | ## Inference settings
85 |
86 | We expose some setting via a REST api to the following end point.
87 |
88 | ```
89 | curl -X POST "http://localhost:3000/api/v0/inference/settings" \
90 | -H "Content-Type: application/json" \
91 | -d '{"prompt": "answer Is there any human?"}'
92 | ```
93 |
94 | This will fix the prompt to run inference on to detect people
95 |
96 | ## Broadcast
97 |
98 | You can access also to the image streams and prompts results via the following API including their timestamps.
99 |
100 | #### **Jpeg encoded images**
101 |
102 | ```html
103 | http://localhost:3000/api/v0/streaming/image
104 | ```
105 |
106 | #### **Model inference results**
107 |
108 | ```
109 | http://localhost:3000/api/v0/inference/results
110 | ```
111 |
112 | #### Visualize streams with inference results
113 |
114 | We provide a small Python script that calls the above end points and visualize the results with [Rerun](https://rerun.io/)
115 |
116 | {% hint style="info" %}
117 | [https://github.com/kornia/bubbaloop/blob/main/examples/python-inference/client.py](../examples/python-inference/client.py)
118 | {% endhint %}
119 |
120 |
121 |
122 | ## Stop inference
123 |
124 | To stop the pipeline, use the `stop-pipeline` command:
125 |
126 | ```
127 | just stop-pipeline inference 0.0.0.0 3000
128 | ```
129 |
--------------------------------------------------------------------------------
/docs/pipelines.md:
--------------------------------------------------------------------------------
1 | ---
2 | description: Basic usage and pipeline management with Bubbaloop
3 | ---
4 |
5 | # 🍰 Pipeline API
6 |
7 | **Bubbaloop** is a Rust-based server application that orchestrates computational pipelines using the Cu29 ([copper-rs](https://github.com/copper-project/copper-rs)) framework. It provides both an HTTP API and CLI for managing these pipelines.
8 |
9 | ## Core Concepts
10 |
11 | * Pipeline Management: The system dynamically manages multiple pipeline types (bubbaloop, inference, recording, streaming) that process data through connected tasks.
12 | * Cu29/Copper Framework: Pipelines are built using the Cu29 framework ([copper-rs](https://github.com/copper-project/copper-rs)), which provides a task-based computation model with message passing between components.
13 | * RON Configuration: Pipelines are defined in [RON](https://github.com/ron-rs/ron) (Rusty Object Notation) files that specify:
14 | * Tasks: Individual processing components with unique IDs and configurations
15 | * Connections: Message flows between tasks with specific message types
16 |
17 | ## Architecture
18 |
19 | * API Server: An Axum-based HTTP server that exposes endpoints for pipeline management
20 | * Pipeline Store: Central registry tracking all running pipelines with their statuses
21 | * Result Store: Maintains processing results and enables streaming of data between components
22 |
23 | ## Pipeline Types
24 |
25 | * `bubbaloop` — Our hello-world simple demo pipeline
26 | * `cameras` — Captures and records video streams form single or multiple camera
27 | * `inference` — Processes video streams for inference using computer vision models
28 |
29 | ## Available API
30 |
31 | * `POST /api/v0/pipeline/start` Start a pipeline with specified ID
32 | * `POST /api/v0/pipeline/stop` Stop a running pipeline
33 | * `GET /api/v0/pipeline/list` List all available pipelines with their statuses
34 |
35 | ## Usage
36 |
37 | ### Start pipeline
38 |
39 | Create and register a pipeline given its name. This will spawn a background task.
40 |
41 | ```
42 | just start-pipeline HOST IP PIPE_NAME
43 | ```
44 |
45 | ```bash
46 | Result: {
47 | "message": "Pipeline 'PIPE_NAME' started"
48 | }
49 | ```
50 |
51 | ### Stop pipeline
52 |
53 | To stop the pipeline, use the `stop-pipeline` command:
54 |
55 | ```
56 | just stop-pipeline HOST IP PIPE_NAME
57 | ```
58 |
59 | ```bash
60 | Result: {
61 | "message": "Pipeline 'PIPE_NAME' stopped"
62 | }
63 | ```
64 |
65 | ### List pipelines
66 |
67 | To list all the registered pipelines and their status, use the `list-pipeline` command:
68 |
69 | ```
70 | just pipeline-list HOST IP
71 | ```
72 |
73 | ```bash
74 | Result: [
75 | {
76 | "id": "bubbaloop",
77 | "status": "Running"
78 | }
79 | ]
80 | ```
81 |
--------------------------------------------------------------------------------
/docs/quickstart.md:
--------------------------------------------------------------------------------
1 | ---
2 | description: Get started with serving Bubbaloop serving platform
3 | ---
4 |
5 | # 🚀 Quickstart
6 |
7 | ## Setup the project
8 |
9 | {% hint style="info" %}
10 | Windows users are recommeneded to use Windows Subsystems by running `wsl.exe --install Ubuntu-22.04` on a Powershell.
11 | {% endhint %}
12 |
13 | {% hint style="info" %}
14 | You may need to install [rust](https://www.rust-lang.org/tools/install) if you have not.
15 | {% endhint %}
16 |
17 | {% stepper %}
18 | {% step %}
19 | **Download the project**
20 |
21 | ```
22 | git clone https://github.com/kornia/bubbaloop.git
23 | ```
24 | {% endstep %}
25 |
26 | {% step %}
27 | **Install pre-requisites**
28 |
29 | {% hint style="info" %}
30 | you need to install `cargo` in order to fetch and build necessary packages. If you don't have `cargo`, you can install it by following the instructions on the [official Rust website](https://www.rust-lang.org/tools/install).
31 | {% endhint %}
32 |
33 | Install **justfile**: [https://github.com/casey/just?tab=readme-ov-file#linux](https://github.com/casey/just?tab=readme-ov-file#linux)
34 | {% endstep %}
35 |
36 | {% step %}
37 | **Install Dependencies**
38 |
39 | To get started, ensure all necessary system dependencies
40 |
41 | ```
42 | just install_deps
43 | ```
44 | {% endstep %}
45 | {% endstepper %}
46 |
47 | ## Serve in local
48 |
49 | Launch the server via the terminal; it defaults to listening on `0.0.0.0:3000`
50 |
51 | ```
52 | just serve
53 | ```
54 |
55 | You might observe something like this:
56 |
57 | ```bash
58 | [2025-01-04T23:14:46Z INFO bubbaloop::api] 🚀 Starting the server
59 | [2025-01-04T23:14:46Z INFO bubbaloop::api] 🔥 Listening on: 0.0.0.0:3000
60 | [2025-01-04T23:14:46Z INFO bubbaloop::api] 🔧 Press Ctrl+C to stop the server
61 | ```
62 |
63 | ## Serve remotely
64 |
65 | Repeat the process about in a remote machine (e.g. in Nvidia Jetson) and give a `HOST`and an `IP` to serve remotely.
66 |
67 | ```bash
68 | just serve 192.168.1.154 3000
69 | ```
70 |
71 | ## Use the Rust CLI :crab:
72 |
73 | ```bash
74 | just help
75 | ```
76 |
77 | ```bash
78 | Usage: bubbaloop [-h ] [-p ] []
79 |
80 | Bubbaloop CLI
81 |
82 | Options:
83 | -h, --host the host to listen on
84 | -p, --port the port to listen on
85 | --help, help display usage information
86 |
87 | Commands:
88 | inference Inference management commands
89 | pipeline Pipeline management commands
90 | recording Recording management commands
91 | stats Get stats about the server
92 | ```
93 |
--------------------------------------------------------------------------------
/docs/tutorials/security-camera.md:
--------------------------------------------------------------------------------
1 | ---
2 | description: 'Bubbaloop 101: Turn Your Phone into a Smart Security Camera in 10 Minutes'
3 | icon: house-signal
4 | cover: >-
5 | https://images.unsplash.com/photo-1520697830682-bbb6e85e2b0b?crop=entropy&cs=srgb&fm=jpg&ixid=M3wxOTcwMjR8MHwxfHNlYXJjaHw4fHxzZWN1cml0eXxlbnwwfHx8fDE3NDY4OTI5ODF8MA&ixlib=rb-4.1.0&q=85
6 | coverY: 0
7 | ---
8 |
9 | # Home Security App
10 |
11 | **Why should you care?**
12 |
13 | * **You already own the hardware.** An old iPhone or Android device on your windowsill is now your first smart security feed.
14 | * **Privacy‑first.** Everything stays local on a $249 Jetson Orin Nano or your laptop – no cloud fees, no vendor lock‑in.
15 | * **Instant insight.** Live multi‑camera visualization and local video recording with spatial intelligence built in.
16 |
17 | This guide walks you through setting up **Bubbaloop**, an open-source camera pipeline built with Rust and [kornia-rs](https://github.com/kornia/kornia-rs), to:
18 |
19 | * Ingest real-time video from your phone or IP cameras
20 | * Do high level vision tasks like question answering, object detection etc on frames
21 | * Visualize and interact with the results in real-time
22 | * All with high performance on low-cost edge hardware
23 |
24 | ⏱️ You’ll go from "unopened box" to live feed + local recording in 10–15 minutes.
25 |
26 | ***
27 |
28 | ## What You'll Need
29 |
30 | ### Your Phone or Any Camera
31 |
32 | * **iPhone** – use [RTSP Stream](https://apps.apple.com/us/app/rtsp-stream/id6474928937) or Larix Broadcaster
33 | * **Android** – use [WebCamPro](https://play.google.com/store/apps/details?id=com.shenyaocn.android.WebCamPro\&hl=en)
34 | * **Optional**: IP Cam (RTSP compatible) – e.g. TP-Link Tapo TC65 (\~£29)
35 |
36 |
37 |
38 | ### Hardware
39 |
40 | * **Jetson Orin Nano (8GB)** – [Buy here from Seeed Studio](https://www.seeedstudio.com/NVIDIAr-Jetson-Orintm-Nano-Developer-Kit-p-5617.html) (\~$249)
41 | * Or your **Linux laptop / PC**
42 |
43 |
44 |
45 | ### Software & Tools
46 |
47 | * Rust + Cargo — [https://www.rust-lang.org/](https://www.rust-lang.org/)
48 | * Kornia-rs: high-performance vision tools in Rust — [https://github.com/kornia/kornia-rs](https://github.com/kornia/kornia-rs)
49 | * Just: command runner — [https://just.systems/](https://just.systems/)
50 | * [Rerun.io](https://rerun.io/) for real-time visualization (optional but recommended)
51 |
52 | ***
53 |
54 | ## Set Up Camera Streaming First
55 |
56 | {% tabs %}
57 | {% tab title="iPhone" %}
58 | * Download [RTSP Stream](https://apps.apple.com/us/app/rtsp-stream/id6474928937)
59 | * Start a stream and take note of the RTSP URL (e.g. `rtsp://your-ip:8554/live`)
60 | {% endtab %}
61 |
62 | {% tab title="Android" %}
63 | * Install [WebCamPro](https://play.google.com/store/apps/details?id=com.shenyaocn.android.WebCamPro\&hl=en)
64 | * Enable RTSP streaming
65 | * Get your stream URL (e.g. `rtsp://192.168.1.x:8554/live`)
66 | {% endtab %}
67 | {% endtabs %}
68 |
69 | ***
70 |
71 | ## Step-by-Step Setup
72 |
73 | ### Clone the Repo
74 |
75 | ```bash
76 | git clone https://github.com/kornia/bubbaloop.git
77 | cd bubbaloop
78 | ```
79 |
80 | ## Configure Your Camera
81 |
82 | Edit `src/cu29/pipelines/cameras_1.ron`:
83 |
84 | ```json
85 | (
86 | tasks: [
87 | (
88 | id: "cam0",
89 | type: "crate::cu29::tasks::VideoCapture",
90 | config: {
91 | "source_type": "rtsp",
92 | // URL of the RTSP camera
93 | // rtsp://:@:/
94 | "source_uri": "rtsp://tapo_entrance:123456789@192.168.1.141:554/stream2",
95 | "channel_id": 0,
96 | }
97 | ),
98 | ],
99 | )
100 | ```
101 |
102 | ### Install bubbaloop
103 |
104 | ```bash
105 | sudo ./scripts/install_linux.sh
106 | ```
107 |
108 | This will install all the necessary dependencies including Rust (if not installed on your computer) and start the system process. You can check the status via
109 |
110 | ```bash
111 | systemctl status bubbaloop
112 | ```
113 |
114 | for real time logs
115 |
116 | ```bash
117 | sudo journalctl -u bubbaloop.service -f
118 | ```
119 |
120 | ## Start a Camera Pipeline
121 |
122 | ```bash
123 | bubbaloop pipeline start --name cameras
124 | ```
125 |
126 | To stop:
127 |
128 | ```bash
129 | bubbaloop pipeline stop --name cameras
130 | ```
131 |
132 | List all pipelines:
133 |
134 | ```bash
135 | bubbaloop pipeline list
136 | ```
137 |
138 | ***
139 |
140 | ## Start a recording
141 |
142 | ```bash
143 | bubbaloop recording start
144 | ```
145 |
146 | To stop:
147 |
148 | ```bash
149 | bubbaloop recording stop
150 | ```
151 |
152 | ***
153 |
154 | ## Visualize with Rerun
155 |
156 | ```bash
157 | python examples/python-streaming/client.py --host 0.0.0.0 --port 3000 --cameras 0
158 | ```
159 |
160 | Or view a recorded `.rrd` file:
161 |
162 | ```bash
163 | scp your-device:/tmp/1735941642.rrd ./
164 | rerun 1735941642.rrd
165 | ```
166 |
167 |
168 |
169 | ***
170 |
171 | ## Running Paligemma for Object Detection (Experimental)
172 |
173 | {% hint style="warning" %}
174 | For now the pipelines are mutually exclusive. This means that before starting the inference you need to stop any running pipeline.
175 | {% endhint %}
176 |
177 | Now you can start safely the inference engine
178 |
179 | ```bash
180 | bubbaloop pipeline start --name inference
181 | ```
182 |
183 | ### Customise the prompt
184 |
185 | You can change the prompt online with the following command
186 |
187 | ```bash
188 | bubbaloop inference settings --prompt "Is there any human?"
189 | ```
190 |
191 | ### Request the inference result
192 |
193 | The inference result can be obtained using the following command
194 |
195 | ```bash
196 | bubbaloop inference result
197 | ```
198 |
199 | #### Client
200 |
201 | ```bash
202 | Result: {
203 | "Success": {
204 | "channel_id": 0,
205 | "prompt": "Is there any human?",
206 | "response": "no",
207 | "stamp_ns": 141281452950
208 | }
209 | }
210 | ```
211 |
212 | ***
213 |
214 | ## Contribute / Feedback
215 |
216 | Join our [Discord server](https://discord.com/invite/HfnywwpBnD) or open issues on [GitHub](https://github.com/kornia/bubbaloop).
217 |
--------------------------------------------------------------------------------
/docs/usage.md:
--------------------------------------------------------------------------------
1 | ---
2 | description: Low level utilities with the Bubbaloop server to get system stats and metrics
3 | layout:
4 | title:
5 | visible: true
6 | description:
7 | visible: true
8 | tableOfContents:
9 | visible: true
10 | outline:
11 | visible: true
12 | pagination:
13 | visible: true
14 | ---
15 |
16 | # 💊 Stats API
17 |
18 | The **Bubbaloop** server provides a comprehensive REST API that allows users to retrieve detailed system information through the `/api/v0/stats` endpoint. This API leverages established Rust libraries to deliver accurate and extensive system data in a structured JSON format.
19 |
20 | We expose the following functionality
21 |
22 | * `whoami` [https://docs.rs/whoami/latest/whoami](https://docs.rs/whoami/latest/whoami/)
23 | * `sysinfo` [https://docss.rs/sysinfo/latest/sysinfo](https://docs.rs/sysinfo/latest/sysinfo/)
24 |
25 | ## Available API
26 |
27 | * `GET /api/v0/stats/whoami` — Provides detailed information about the system's identity
28 | * `GET /api/v0/stats/sysinfo` — Delivers comprehensive system resource metric
29 |
30 | ## Usage
31 |
32 | ### whoami
33 |
34 | ```
35 | just whoami 0.0.0.0 3000
36 | ```
37 |
38 | #### **Server terminal**
39 |
40 | ```bash
41 | [2025-04-13T15:03:20Z DEBUG bubbaloop::api::handles::stats::whoami] 🤖 Received request for whoami
42 | ```
43 |
44 | #### **Client terminal**
45 |
46 | ```bash
47 | Result: {
48 | "arch": "Arm64",
49 | "desktop_env": "Unknown",
50 | "device_name": "nvidia-desktop",
51 | "distro": "Ubuntu 22.04.5 LTS",
52 | "hostname": "nvidia-desktop",
53 | "platform": "Linux",
54 | "realname": "nvidia",
55 | "username": "nvidia"
56 | }
57 | ```
58 |
59 | ### sysinfo
60 |
61 | ```
62 | just sysinfo 0.0.0.0 3000
63 | ```
64 |
65 | #### **Server terminal**
66 |
67 | ```bash
68 | [2025-04-13T15:03:45Z DEBUG bubbaloop::api::handles::stats::sysinfo] 🤖 Received request for sysinfo
69 | ```
70 |
71 | #### **Client terminal**
72 |
73 | ```json
74 | Result: {
75 | "available_memory": 7011606528,
76 | "cpus": [
77 | {
78 | "brand": "Cortex-A78AE",
79 | "frequency": 1113,
80 | "name": "cpu0",
81 | "usage": 0.0
82 | },
83 | {
84 | "brand": "Cortex-A78AE",
85 | "frequency": 1113,
86 | "name": "cpu1",
87 | "usage": 0.0
88 | },
89 | {
90 | "brand": "Cortex-A78AE",
91 | "frequency": 1113,
92 | "name": "cpu2",
93 | "usage": 0.0
94 | },
95 | {
96 | "brand": "Cortex-A78AE",
97 | "frequency": 1113,
98 | "name": "cpu3",
99 | "usage": 0.0
100 | },
101 | {
102 | "brand": "Cortex-A78AE",
103 | "frequency": 729,
104 | "name": "cpu4",
105 | "usage": 0.0
106 | },
107 | {
108 | "brand": "Cortex-A78AE",
109 | "frequency": 729,
110 | "name": "cpu5",
111 | "usage": 0.0
112 | }
113 | ],
114 | "disks": [
115 | {
116 | "available_space": 186810265600,
117 | "file_system": "ext4",
118 | "mount_point": "/",
119 | "name": "/dev/mmcblk0p1",
120 | "total_space": 250131267584
121 | },
122 | {
123 | "available_space": 65946624,
124 | "file_system": "vfat",
125 | "mount_point": "/boot/efi",
126 | "name": "/dev/mmcblk0p10",
127 | "total_space": 66059264
128 | }
129 | ],
130 | "free_memory": 4320612352,
131 | "global_cpu_usage": 18.697363,
132 | "host_name": "nvidia-desktop",
133 | "kernel_version": "5.15.148-tegra",
134 | "name": "Ubuntu",
135 | "os_version": "22.04",
136 | "total_memory": 7990116352,
137 | "total_swap": 3995049984,
138 | "used_memory": 978509824
139 | }
140 | ```
141 |
--------------------------------------------------------------------------------
/examples/python-inference/client.py:
--------------------------------------------------------------------------------
1 | """Example of a client that requests the inference result from the server."""
2 |
3 | import argparse
4 | import asyncio
5 | import httpx
6 | import rerun as rr
7 | import kornia_rs as kr
8 | import numpy as np
9 |
10 |
11 | async def get_api_response(client: httpx.AsyncClient, url: str) -> dict | None:
12 | try:
13 | response = await client.get(url)
14 | except httpx.HTTPError as _:
15 | print("The request timed out. Please try again.")
16 | return
17 |
18 | if response is None:
19 | return None
20 |
21 | json_response = response.json()
22 | return json_response
23 |
24 |
25 | def response_to_image(response: dict) -> rr.Image:
26 | # decode the image
27 | decoder = kr.ImageDecoder()
28 | data = decoder.decode(bytes(response["data"]))
29 | return rr.Image(data)
30 |
31 |
32 | def response_to_inference_result(response: dict) -> rr.TextLog:
33 | log_text = f"prompt: {response['prompt']} -- response: {response['response']}"
34 | return rr.TextLog(log_text, level=rr.TextLogLevel.INFO)
35 |
36 |
37 | async def poll_image(client: httpx.AsyncClient, url: str, rr):
38 | while True:
39 | # get the image from the server
40 | response = await get_api_response(client, url)
41 |
42 | if response is not None and "Success" in response:
43 | response = response["Success"]
44 | rr.set_time(
45 | "session",
46 | timestamp=np.datetime64(response["stamp_ns"], "ns"),
47 | )
48 | rr.log(f"/cam/{response['channel_id']}", response_to_image(response))
49 |
50 |
51 | async def poll_inference_result(client: httpx.AsyncClient, url: str, rr):
52 | while True:
53 | # get the inference result from the server
54 | response = await get_api_response(client, url)
55 |
56 | if response is not None and "Success" in response:
57 | response = response["Success"]
58 | rr.set_time(
59 | "session",
60 | timestamp=np.datetime64(response["stamp_ns"], "ns"),
61 | )
62 | rr.log(
63 | f"/logs/{response['channel_id']}",
64 | response_to_inference_result(response),
65 | )
66 |
67 |
68 | async def main() -> None:
69 | """Main function to receive the inference result from the server."""
70 | parser = argparse.ArgumentParser()
71 | parser.add_argument("--host", type=str, default="0.0.0.0")
72 | parser.add_argument("--port", type=int, default=3000)
73 | args = parser.parse_args()
74 |
75 | rr.init("rerun_inference_client", spawn=True)
76 |
77 | async with httpx.AsyncClient(timeout=None) as client:
78 | image_task = asyncio.create_task(
79 | poll_image(
80 | client,
81 | url=f"http://{args.host}:{args.port}/api/v0/streaming/image/0",
82 | rr=rr,
83 | )
84 | )
85 |
86 | inference_task = asyncio.create_task(
87 | poll_inference_result(
88 | client,
89 | url=f"http://{args.host}:{args.port}/api/v0/inference/result",
90 | rr=rr,
91 | )
92 | )
93 |
94 | await asyncio.gather(image_task, inference_task)
95 |
96 |
97 | if __name__ == "__main__":
98 | asyncio.run(main())
99 |
--------------------------------------------------------------------------------
/examples/python-inference/requirements.txt:
--------------------------------------------------------------------------------
1 | httpx==0.27.0
2 | rerun-sdk
3 | kornia_rs
--------------------------------------------------------------------------------
/examples/python-rerun-files/main.py:
--------------------------------------------------------------------------------
1 | """
2 | This script reads a rerun file, decodes the images and logs them to rerun again.
3 | """
4 |
5 | import argparse
6 | from pathlib import Path
7 | import kornia_rs as kr
8 | import rerun as rr
9 |
10 |
11 | def main() -> None:
12 | parser = argparse.ArgumentParser(
13 | description="Read a rerun file and print the messages"
14 | )
15 | parser.add_argument("--log-file", type=Path, required=True)
16 | args = parser.parse_args()
17 |
18 | rr.init("rerun_video_example", spawn=True)
19 |
20 | # load the recording
21 | recording = rr.dataframe.load_recording(args.log_file)
22 | # print(recording.schema().component_columns())
23 |
24 | image_decoder = kr.ImageDecoder()
25 |
26 | for cam_topic in ["/cam/0", "/cam/1"]:
27 | print(f"Processing {cam_topic} ...")
28 | view = recording.view(index="log_time", contents=cam_topic)
29 | table = view.select().read_all()
30 |
31 | # convert the table to a pandas dataframe to iterate over the rows
32 | df = table.to_pandas()
33 |
34 | for _, row in df.iterrows():
35 | _, time, blob, media_type = row
36 | if media_type is None:
37 | continue
38 |
39 | # decode the jpeg image to a numpy array HxWxC
40 | image = image_decoder.decode(blob[0].tobytes())
41 |
42 | rr.set_time_nanos("timeline", time.nanosecond)
43 | rr.log(cam_topic, rr.Image(image))
44 |
45 |
46 | if __name__ == "__main__":
47 | main()
48 |
--------------------------------------------------------------------------------
/examples/python-rerun-files/requirements.txt:
--------------------------------------------------------------------------------
1 | rerun-sdk
2 | kornia_rs
3 | pandas
--------------------------------------------------------------------------------
/examples/python-streaming/client.py:
--------------------------------------------------------------------------------
1 | """Example of a client that requests the streaming image from the server.
2 |
3 | Usage:
4 | python examples/python-streaming/client.py --host 0.0.0.0 --port 3000 --cameras 0 1
5 | """
6 |
7 | import argparse
8 | import asyncio
9 | import httpx
10 | import rerun as rr
11 | import kornia_rs as kr
12 |
13 |
14 | async def get_api_response(client: httpx.AsyncClient, url: str) -> dict | None:
15 | try:
16 | response = await client.get(url)
17 | except httpx.HTTPError as _:
18 | print("The request timed out. Please try again.")
19 | return
20 |
21 | if response is None:
22 | return None
23 |
24 | json_response = response.json()
25 | return json_response
26 |
27 |
28 | def response_to_image(response: dict) -> rr.Image:
29 | # decode the JPEG image
30 | decoder = kr.ImageDecoder()
31 | data = decoder.decode(bytes(response["data"]))
32 | return rr.Image(data)
33 |
34 |
35 | async def poll_image(client: httpx.AsyncClient, url: str, rr):
36 | while True:
37 | # get the image from the server
38 | response = await get_api_response(client, url)
39 |
40 | if response is not None and "Success" in response:
41 | response = response["Success"]
42 | rr.set_time_sequence("session", response["stamp_ns"])
43 | rr.log(f"/cam/{response['channel_id']}", response_to_image(response))
44 |
45 |
46 | async def main() -> None:
47 | """Main function to receive the streaming images from the server."""
48 | parser = argparse.ArgumentParser()
49 | parser.add_argument("--host", type=str, default="0.0.0.0")
50 | parser.add_argument("--port", type=int, default=3000)
51 | parser.add_argument("--cameras", type=int, nargs="+", default=[0])
52 | args = parser.parse_args()
53 |
54 | rr.init("rerun_streaming_client", spawn=True)
55 |
56 | async with httpx.AsyncClient(timeout=None) as client:
57 | image_tasks = []
58 | for camera_id in args.cameras:
59 | image_tasks.append(
60 | asyncio.create_task(
61 | poll_image(
62 | client,
63 | url=f"http://{args.host}:{args.port}/api/v0/streaming/image/{camera_id}",
64 | rr=rr,
65 | )
66 | )
67 | )
68 | await asyncio.gather(*image_tasks)
69 |
70 |
71 | if __name__ == "__main__":
72 | asyncio.run(main())
73 |
--------------------------------------------------------------------------------
/examples/python-streaming/requirements.txt:
--------------------------------------------------------------------------------
1 | httpx==0.27.0
2 | rerun-sdk
3 | kornia_rs
--------------------------------------------------------------------------------
/examples/react-app/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | pnpm-debug.log*
8 | lerna-debug.log*
9 |
10 | node_modules/**
11 | dist
12 | dist-ssr
13 | *.local
14 |
15 | # Editor directories and files
16 | .vscode/*
17 | !.vscode/extensions.json
18 | .idea
19 | .DS_Store
20 | *.suo
21 | *.ntvs*
22 | *.njsproj
23 | *.sln
24 | *.sw?
25 |
--------------------------------------------------------------------------------
/examples/react-app/README.md:
--------------------------------------------------------------------------------
1 | # Bubbaloop React App
2 |
3 | This is a React application for connecting to and displaying camera streams and inference results from a Bubbaloop backend.
4 |
5 | https://github.com/user-attachments/assets/2ba6575a-6a47-43d7-a8eb-0cdb76be1d43
6 |
7 | ## Prerequisites
8 |
9 | Before running the React app, you must have the Bubbaloop server running with the inference pipeline:
10 |
11 | 1. **Start the Bubbaloop server**
12 | ```bash
13 | # In the root of your bubbaloop project
14 |
15 | # First, start the server
16 | just serve 0.0.0.0 3000
17 |
18 | # Then, in another terminal, start the inference pipeline
19 | just start-pipeline inference 0.0.0.0 3000
20 | ```
21 |
22 | 2. **Verify the server is running**
23 | The server should start the inference pipeline and be ready to accept connections.
24 | You should see logs indicating successful initialization.
25 |
26 | ## Running the Application
27 |
28 | Once the server is running, follow these steps to run the React app:
29 |
30 | 1. **Install dependencies**
31 | ```bash
32 | cd examples/react-app
33 | npm install
34 | ```
35 |
36 | 2. **Start the development server**
37 | ```bash
38 | npm run dev
39 | ```
40 |
41 | 3. **Access the application**
42 | The app will be available at http://localhost:5173 (or another port if 5173 is in use)
43 |
44 | ## Using the Application
45 |
46 | 1. **Configure connection settings**
47 | - The app includes a connection settings panel to configure the host and port of your backend services
48 | - Default values are set to 0.0.0.0:3000
49 | - Update these values to match your backend server configuration
50 | - Click "Update Connection" to apply changes
51 |
52 | 2. **Features**
53 | - Stream viewer for camera feeds
54 | - Inference instruction panel for sending commands
55 | - Inference result display showing detection outcomes
56 |
57 | ## Requirements
58 |
59 | - Node.js (v16 or later recommended)
60 | - A running Bubbaloop backend service
61 |
--------------------------------------------------------------------------------
/examples/react-app/eslint.config.js:
--------------------------------------------------------------------------------
1 | import js from '@eslint/js'
2 | import globals from 'globals'
3 | import reactHooks from 'eslint-plugin-react-hooks'
4 | import reactRefresh from 'eslint-plugin-react-refresh'
5 | import tseslint from 'typescript-eslint'
6 |
7 | export default tseslint.config(
8 | { ignores: ['dist'] },
9 | {
10 | extends: [js.configs.recommended, ...tseslint.configs.recommended],
11 | files: ['**/*.{ts,tsx}'],
12 | languageOptions: {
13 | ecmaVersion: 2020,
14 | globals: globals.browser,
15 | },
16 | plugins: {
17 | 'react-hooks': reactHooks,
18 | 'react-refresh': reactRefresh,
19 | },
20 | rules: {
21 | ...reactHooks.configs.recommended.rules,
22 | 'react-refresh/only-export-components': [
23 | 'warn',
24 | { allowConstantExport: true },
25 | ],
26 | },
27 | },
28 | )
29 |
--------------------------------------------------------------------------------
/examples/react-app/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
9 | Bubbaloop
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/examples/react-app/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "react-app",
3 | "private": true,
4 | "version": "0.0.0",
5 | "type": "module",
6 | "scripts": {
7 | "dev": "vite",
8 | "build": "tsc -b && vite build",
9 | "lint": "eslint .",
10 | "preview": "vite preview"
11 | },
12 | "dependencies": {
13 | "react": "^19.1.0",
14 | "react-dom": "^19.1.0",
15 | "react-json-tree": "^0.20.0"
16 | },
17 | "devDependencies": {
18 | "@eslint/js": "^9.25.0",
19 | "@types/react": "^19.1.2",
20 | "@types/react-dom": "^19.1.2",
21 | "@vitejs/plugin-react": "^4.4.1",
22 | "eslint": "^9.25.0",
23 | "eslint-plugin-react-hooks": "^5.2.0",
24 | "eslint-plugin-react-refresh": "^0.4.19",
25 | "globals": "^16.0.0",
26 | "typescript": "~5.8.3",
27 | "typescript-eslint": "^8.30.1",
28 | "vite": "^6.3.5"
29 | }
30 | }
--------------------------------------------------------------------------------
/examples/react-app/src/App.css:
--------------------------------------------------------------------------------
1 | /* App.css */
2 | .App {
3 | max-width: 900px;
4 | margin: 0 auto;
5 | padding: 20px;
6 | font-family: Arial, sans-serif;
7 | }
8 |
9 | .App-header {
10 | text-align: center;
11 | margin-bottom: 30px;
12 | }
13 |
14 | .app-content {
15 | display: flex;
16 | flex-direction: column;
17 | gap: 20px;
18 | }
19 |
20 | .app-error-log {
21 | margin-top: 20px;
22 | padding: 15px;
23 | background-color: #f8f9fa;
24 | border: 1px solid #ddd;
25 | border-radius: 8px;
26 | }
27 |
28 | .app-error-log h3 {
29 | margin-top: 0;
30 | color: #721c24;
31 | }
--------------------------------------------------------------------------------
/examples/react-app/src/App.tsx:
--------------------------------------------------------------------------------
1 | // App.tsx
2 | import React, { useState } from 'react';
3 | import './App.css';
4 | import StreamViewerWebsocket from './components/StreamViewerWebsocket';
5 | import InferenceResultDisplay from './components/InferenceDisplay';
6 | import InferenceInstruction from './components/InferenceInstruction';
7 | import ConnectionSettings from './components/ConnectionSettings';
8 |
9 | const App: React.FC = () => {
10 | const [host, setHost] = useState('0.0.0.0');
11 | const [port, setPort] = useState('3000');
12 | const [key, setKey] = useState(0);
13 |
14 | const baseUrl = `http://${host}:${port}`;
15 | const wsUrl = `ws://${host}:${port}`;
16 |
17 | const handleConnectionUpdate = (newHost: string, newPort: string) => {
18 | setHost(newHost);
19 | setPort(newPort);
20 | // Increment key to force re-mounting the components
21 | setKey(prevKey => prevKey + 1);
22 | };
23 |
24 | return (
25 |
26 |
29 |
30 |
31 |
32 |
33 |
37 |
42 |
48 |
49 |
50 | );
51 | };
52 |
53 | export default App;
--------------------------------------------------------------------------------
/examples/react-app/src/components/ConnectionSettings.css:
--------------------------------------------------------------------------------
1 | s.connection-settings {
2 | background-color: #f5f5f5;
3 | padding: 15px;
4 | margin-bottom: 20px;
5 | border-radius: 5px;
6 | box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
7 | }
8 |
9 | .connection-settings form {
10 | display: flex;
11 | flex-wrap: wrap;
12 | gap: 10px;
13 | align-items: center;
14 | }
15 |
16 | .connection-settings .form-group {
17 | display: flex;
18 | align-items: center;
19 | gap: 5px;
20 | }
21 |
22 | .connection-settings label {
23 | font-weight: bold;
24 | margin-right: 5px;
25 | }
26 |
27 | .connection-settings input {
28 | padding: 8px;
29 | border: 1px solid #ccc;
30 | border-radius: 4px;
31 | }
32 |
33 | .connection-settings button {
34 | background-color: #4a90e2;
35 | color: white;
36 | border: none;
37 | padding: 8px 16px;
38 | border-radius: 4px;
39 | cursor: pointer;
40 | font-weight: bold;
41 | }
42 |
43 | .connection-settings button:hover {
44 | background-color: #357ab8;
45 | }
46 |
47 | @media (max-width: 600px) {
48 | .connection-settings form {
49 | flex-direction: column;
50 | align-items: stretch;
51 | }
52 | }
--------------------------------------------------------------------------------
/examples/react-app/src/components/ConnectionSettings.tsx:
--------------------------------------------------------------------------------
1 | import React, { useState } from 'react';
2 | import './ConnectionSettings.css';
3 |
4 | interface ConnectionSettingsProps {
5 | onUpdate: (host: string, port: string) => void;
6 | initialHost: string;
7 | initialPort: string;
8 | }
9 |
10 | const ConnectionSettings: React.FC = ({
11 | onUpdate,
12 | initialHost,
13 | initialPort
14 | }) => {
15 | const [host, setHost] = useState(initialHost);
16 | const [port, setPort] = useState(initialPort);
17 |
18 | const handleSubmit = (e: React.FormEvent) => {
19 | e.preventDefault();
20 | onUpdate(host, port);
21 | };
22 |
23 | return (
24 |
49 | );
50 | };
51 |
52 | export default ConnectionSettings;
--------------------------------------------------------------------------------
/examples/react-app/src/components/InferenceDisplay.css:
--------------------------------------------------------------------------------
1 | /* InferenceDisplay.css */
2 | .inference-display {
3 | border: 1px solid #ddd;
4 | border-radius: 8px;
5 | overflow: hidden;
6 | background-color: #f8f9fa;
7 | box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
8 | margin: 10px 0;
9 | }
10 |
11 | .inference-header {
12 | display: flex;
13 | justify-content: space-between;
14 | align-items: center;
15 | padding: 10px 15px;
16 | background-color: #343a40;
17 | color: white;
18 | border-bottom: 1px solid #ddd;
19 | }
20 |
21 | .inference-header h3 {
22 | margin: 0;
23 | font-size: 16px;
24 | }
25 |
26 | .loading-indicator {
27 | color: #4CAF50;
28 | animation: pulse 1.5s infinite;
29 | font-size: 14px;
30 | }
31 |
32 | .inference-content {
33 | padding: 15px;
34 | max-height: 300px;
35 | overflow-y: auto;
36 | }
37 |
38 | .inference-content pre {
39 | margin: 0;
40 | font-family: monospace;
41 | white-space: pre-wrap;
42 | word-break: break-word;
43 | }
44 |
45 | .inference-error {
46 | color: #721c24;
47 | background-color: #f8d7da;
48 | padding: 10px;
49 | border-radius: 4px;
50 | text-align: center;
51 | }
52 |
53 | .retry-button {
54 | background-color: #007bff;
55 | color: white;
56 | border: none;
57 | border-radius: 4px;
58 | padding: 5px 10px;
59 | margin-top: 10px;
60 | cursor: pointer;
61 | }
62 |
63 | .retry-button:hover {
64 | background-color: #0056b3;
65 | }
66 |
67 | @keyframes pulse {
68 | 0% {
69 | opacity: 0.4;
70 | }
71 |
72 | 50% {
73 | opacity: 1;
74 | }
75 |
76 | 100% {
77 | opacity: 0.4;
78 | }
79 | }
--------------------------------------------------------------------------------
/examples/react-app/src/components/InferenceDisplay.tsx:
--------------------------------------------------------------------------------
1 | // InferenceDisplay.tsx
2 | import React, { useState, useEffect, useRef } from 'react';
3 | import { JSONTree } from 'react-json-tree';
4 | import './InferenceDisplay.css';
5 |
6 | interface InferenceDisplayProps {
7 | inferenceUrl: string;
8 | refreshRate?: number;
9 | width?: string | number;
10 | height?: string | number;
11 | title?: string;
12 | }
13 |
14 | // Monokai theme for direct application
15 | const monokai = {
16 | scheme: 'monokai',
17 | base00: '#272822', // background
18 | base01: '#383830',
19 | base02: '#49483e',
20 | base03: '#75715e', // comments
21 | base04: '#a59f85',
22 | base05: '#f8f8f2', // text
23 | base06: '#f5f4f1',
24 | base07: '#f9f8f5',
25 | base08: '#f92672', // red
26 | base09: '#fd971f', // orange
27 | base0A: '#f4bf75', // yellow
28 | base0B: '#a6e22e', // green
29 | base0C: '#a1efe4', // aqua
30 | base0D: '#66d9ef', // blue
31 | base0E: '#ae81ff', // purple
32 | base0F: '#cc6633' // brown
33 | };
34 |
35 | const InferenceDisplay: React.FC = ({
36 | inferenceUrl,
37 | refreshRate = 1000,
38 | width = '100%',
39 | height = 'auto',
40 | title = 'Inference Results',
41 | }) => {
42 | const [inferenceData, setInferenceData] = useState(null);
43 | const [error, setError] = useState(null);
44 | const [isLoading, setIsLoading] = useState(true);
45 |
46 | const intervalRef = useRef(null);
47 | const isMountedRef = useRef(true);
48 |
49 | useEffect(() => {
50 | startFetching();
51 | isMountedRef.current = true;
52 |
53 | return () => {
54 | isMountedRef.current = false;
55 | stopFetching();
56 | };
57 | }, [inferenceUrl]);
58 |
59 | const startFetching = (): void => {
60 | stopFetching();
61 | setIsLoading(true);
62 | fetchInferenceData();
63 | intervalRef.current = window.setInterval(fetchInferenceData, refreshRate);
64 | };
65 |
66 | const stopFetching = (): void => {
67 | if (intervalRef.current) {
68 | clearInterval(intervalRef.current);
69 | intervalRef.current = null;
70 | }
71 | };
72 |
73 | const fetchInferenceData = async (): Promise => {
74 | try {
75 | const response = await fetch(inferenceUrl, {
76 | cache: 'no-store',
77 | });
78 |
79 | if (!isMountedRef.current) return;
80 |
81 | if (!response.ok) {
82 | throw new Error(`HTTP error! Status: ${response.status}`);
83 | }
84 |
85 | const jsonData = await response.json();
86 | setInferenceData(jsonData);
87 | setError(null);
88 | setIsLoading(false);
89 | } catch (err) {
90 | if (isMountedRef.current) {
91 | const message = err instanceof Error ? err.message : 'Failed to load inference data';
92 | setError(message);
93 | setIsLoading(false);
94 | console.error('Inference data fetch error:', message);
95 | }
96 | }
97 | };
98 |
99 | // Render error or loading states conditionally
100 | if (error) {
101 | return (
102 |
103 |
{error}
104 |
Retry
105 |
106 | );
107 | }
108 |
109 | if (isLoading && !inferenceData) {
110 | return Loading...
;
111 | }
112 |
113 | if (!inferenceData) {
114 | return No data available
;
115 | }
116 |
117 | return (
118 |
119 |
Inference Response
120 | true}
126 | />
127 |
128 | );
129 | };
130 |
131 | export default InferenceDisplay;
--------------------------------------------------------------------------------
/examples/react-app/src/components/InferenceInstruction.css:
--------------------------------------------------------------------------------
1 | /* InferenceInstruction.css */
2 | .inference-instruction {
3 | border: 1px solid #ddd;
4 | border-radius: 8px;
5 | overflow: hidden;
6 | background-color: #272822;
7 | margin: 10px 0;
8 | padding: 15px;
9 | display: flex;
10 | flex-direction: column;
11 | gap: 10px;
12 | }
13 |
14 | .instruction-input {
15 | width: 100%;
16 | padding: 10px;
17 | border: 1px solid #383830;
18 | border-radius: 4px;
19 | background-color: #1e1f1c;
20 | color: #f8f8f2;
21 | font-family: inherit;
22 | font-size: 14px;
23 | resize: vertical;
24 | }
25 |
26 | .instruction-input:focus {
27 | outline: none;
28 | border-color: #66d9ef;
29 | }
30 |
31 | .instruction-controls {
32 | display: flex;
33 | justify-content: space-between;
34 | align-items: center;
35 | }
36 |
37 | .instruction-error {
38 | color: #f92672;
39 | font-size: 14px;
40 | flex-grow: 1;
41 | }
42 |
43 | .instruction-button {
44 | padding: 8px 16px;
45 | border: none;
46 | border-radius: 4px;
47 | background-color: #a6e22e;
48 | color: #272822;
49 | font-weight: 500;
50 | cursor: pointer;
51 | min-width: 100px;
52 | }
53 |
54 | .instruction-button:hover:not(:disabled) {
55 | background-color: #c3f53e;
56 | }
57 |
58 | .instruction-button:disabled {
59 | background-color: #75715e;
60 | cursor: not-allowed;
61 | opacity: 0.7;
62 | }
--------------------------------------------------------------------------------
/examples/react-app/src/components/InferenceInstruction.tsx:
--------------------------------------------------------------------------------
1 | // InferenceInstruction.tsx
2 | import React, { useState } from 'react';
3 | import './InferenceInstruction.css';
4 |
5 | interface InferenceInstructionProps {
6 | settingsUrl: string;
7 | placeholder?: string;
8 | buttonText?: string;
9 | onSettingsSubmitted?: (success: boolean, response?: any) => void;
10 | }
11 |
12 | const InferenceInstruction: React.FC = ({
13 | settingsUrl,
14 | placeholder = 'cap en',
15 | buttonText = 'Apply',
16 | onSettingsSubmitted
17 | }) => {
18 | const [prompt, setPrompt] = useState('');
19 | const [isSubmitting, setIsSubmitting] = useState(false);
20 | const [error, setError] = useState(null);
21 |
22 | const handleInputChange = (e: React.ChangeEvent) => {
23 | setPrompt(e.target.value);
24 | if (error) setError(null);
25 | };
26 |
27 | const handleSubmit = async () => {
28 | if (!prompt.trim()) {
29 | setError('Please enter prompt text');
30 | return;
31 | }
32 |
33 | setIsSubmitting(true);
34 | setError(null);
35 |
36 | try {
37 | const response = await fetch(settingsUrl, {
38 | method: 'POST',
39 | headers: {
40 | 'Content-Type': 'application/json',
41 | },
42 | body: JSON.stringify({ prompt: prompt.trim() + '\n' }),
43 | });
44 |
45 | if (!response.ok) {
46 | throw new Error(`HTTP error! Status: ${response.status}`);
47 | }
48 |
49 | const result = await response.json();
50 | if (onSettingsSubmitted) {
51 | onSettingsSubmitted(true, result);
52 | }
53 |
54 | // Optional: clear the input after successful submission
55 | // setInstruction('');
56 | } catch (err) {
57 | const message = err instanceof Error ? err.message : 'Failed to submit instructions';
58 | setError(message);
59 | console.error('Instruction submission error:', message);
60 | if (onSettingsSubmitted) {
61 | onSettingsSubmitted(false);
62 | }
63 | } finally {
64 | setIsSubmitting(false);
65 | }
66 | };
67 |
68 | return (
69 |
70 |
78 |
79 |
80 | {error &&
{error}
}
81 |
86 | {isSubmitting ? 'Sending...' : buttonText}
87 |
88 |
89 |
90 | );
91 | };
92 |
93 | export default InferenceInstruction;
--------------------------------------------------------------------------------
/examples/react-app/src/components/StreamViewerWebsocket.css:
--------------------------------------------------------------------------------
1 | /* StreamViewer.css */
2 | .stream-viewer {
3 | border: 1px solid #ddd;
4 | border-radius: 8px;
5 | overflow: hidden;
6 | display: flex;
7 | flex-direction: column;
8 | background-color: #f8f9fa;
9 | box-shadow: 0 2px 8px rgba(0, 0, 0, 0.1);
10 | }
11 |
12 | .stream-display {
13 | flex-grow: 1;
14 | display: flex;
15 | align-items: center;
16 | justify-content: center;
17 | background-color: #000;
18 | min-height: 200px;
19 | position: relative;
20 | overflow: hidden;
21 | }
22 |
23 | .stream-image {
24 | max-width: 100%;
25 | max-height: 100%;
26 | object-fit: contain;
27 | }
28 |
29 | .stream-error {
30 | color: #721c24;
31 | background-color: #f8d7da;
32 | padding: 15px;
33 | border-radius: 4px;
34 | margin: 10px;
35 | max-width: 80%;
36 | }
37 |
38 | .stream-placeholder {
39 | color: #999;
40 | font-size: 16px;
41 | }
42 |
43 | .stream-status {
44 | display: flex;
45 | align-items: center;
46 | padding: 8px 12px;
47 | background-color: #fff;
48 | border-top: 1px solid #ddd;
49 | font-size: 14px;
50 | color: #666;
51 | }
52 |
53 | .status-indicator {
54 | color: #4CAF50;
55 | margin-right: 6px;
56 | animation: pulse 1.5s infinite;
57 | }
58 |
59 | @keyframes pulse {
60 | 0% {
61 | opacity: 0.4;
62 | }
63 |
64 | 50% {
65 | opacity: 1;
66 | }
67 |
68 | 100% {
69 | opacity: 0.4;
70 | }
71 | }
--------------------------------------------------------------------------------
/examples/react-app/src/components/StreamViewerWebsocket.tsx:
--------------------------------------------------------------------------------
1 | // StreamViewerWebsocket.tsx
2 | import React, { useState, useEffect, useRef } from 'react';
3 | import './StreamViewerWebsocket.css';
4 |
5 | interface StreamViewerWebsocketProps {
6 | wsUrl: string;
7 | maxWidth?: string | number;
8 | maxHeight?: string | number;
9 | initialWidth?: string | number;
10 | }
11 |
12 | const StreamViewerWebsocket: React.FC = ({
13 | wsUrl,
14 | maxWidth = '100%',
15 | maxHeight = '80vh',
16 | initialWidth = '100%',
17 | }) => {
18 | const [imageUrl, setImageUrl] = useState(null);
19 | const [error, setError] = useState(null);
20 | const [isConnecting, setIsConnecting] = useState(true);
21 | const [imageStyle, setImageStyle] = useState({
22 | maxWidth: '100%',
23 | maxHeight: '100%',
24 | objectFit: 'contain',
25 | display: 'block',
26 | margin: '0 auto'
27 | });
28 | const [containerStyle] = useState({
29 | width: initialWidth,
30 | height: typeof maxHeight === 'string' ? maxHeight : `${maxHeight}px`,
31 | backgroundColor: '#1a1a1a',
32 | overflow: 'hidden',
33 | display: 'flex',
34 | alignItems: 'center',
35 | justifyContent: 'center',
36 | position: 'relative',
37 | borderRadius: '8px'
38 | });
39 |
40 | // Use refs to track current state without triggering renders
41 | const wsRef = useRef(null);
42 | const isMountedRef = useRef(true);
43 | const containerRef = useRef(null);
44 | const imageRef = useRef(null);
45 | const frameBufferRef = useRef(null);
46 | const pendingRenderRef = useRef(false);
47 | const lastRenderTimeRef = useRef(0);
48 |
49 | // Keep image dimensions in a ref to avoid re-renders
50 | const dimensionsRef = useRef<{ width: number, height: number } | null>(null);
51 |
52 | // Performance optimization: limit frame rate
53 | const targetFps = 24;
54 | const minFrameInterval = 1000 / targetFps;
55 |
56 | useEffect(() => {
57 | connectWebSocket();
58 | isMountedRef.current = true;
59 |
60 | // Use requestAnimationFrame for smoother rendering without changing container size
61 | const renderLoop = () => {
62 | const now = performance.now();
63 | const elapsed = now - lastRenderTimeRef.current;
64 |
65 | if (pendingRenderRef.current && elapsed >= minFrameInterval) {
66 | if (frameBufferRef.current && isMountedRef.current) {
67 | // Revoke previous blob URL
68 | if (imageUrl && imageUrl.startsWith('blob:')) {
69 | URL.revokeObjectURL(imageUrl);
70 | }
71 |
72 | // Create and set new blob URL
73 | const url = URL.createObjectURL(frameBufferRef.current);
74 | setImageUrl(url);
75 |
76 | pendingRenderRef.current = false;
77 | lastRenderTimeRef.current = now;
78 | }
79 | }
80 |
81 | if (isMountedRef.current) {
82 | requestAnimationFrame(renderLoop);
83 | }
84 | };
85 |
86 | requestAnimationFrame(renderLoop);
87 |
88 | return () => {
89 | isMountedRef.current = false;
90 | disconnectWebSocket();
91 |
92 | if (imageUrl && imageUrl.startsWith('blob:')) {
93 | URL.revokeObjectURL(imageUrl);
94 | }
95 | };
96 | }, [wsUrl]);
97 |
98 | const handleImageLoad = () => {
99 | if (imageRef.current) {
100 | const { naturalWidth, naturalHeight } = imageRef.current;
101 | if (naturalWidth && naturalHeight) {
102 | dimensionsRef.current = {
103 | width: naturalWidth,
104 | height: naturalHeight
105 | };
106 | // Store dimensions but don't modify any styles
107 | }
108 | }
109 | };
110 |
111 | const connectWebSocket = () => {
112 | disconnectWebSocket();
113 | setIsConnecting(true);
114 | setError(null);
115 |
116 | try {
117 | const ws = new WebSocket(wsUrl);
118 | wsRef.current = ws;
119 |
120 | ws.binaryType = 'arraybuffer';
121 |
122 | ws.onopen = () => {
123 | if (!isMountedRef.current) return;
124 | console.log('WebSocket connection established');
125 | setIsConnecting(false);
126 | };
127 |
128 | ws.onmessage = (event) => {
129 | if (!isMountedRef.current) return;
130 |
131 | if (typeof event.data === 'string') {
132 | setError(event.data);
133 | return;
134 | }
135 |
136 | try {
137 | // Store the new frame in the buffer without immediately rendering
138 | frameBufferRef.current = new Blob([event.data], { type: 'image/jpeg' });
139 | pendingRenderRef.current = true;
140 |
141 | // Clear any previous errors
142 | if (error) setError(null);
143 | } catch (err) {
144 | console.error('Error processing image data:', err);
145 | }
146 | };
147 |
148 | ws.onerror = () => {
149 | if (!isMountedRef.current) return;
150 | setError('Connection error occurred');
151 | setIsConnecting(false);
152 | };
153 |
154 | ws.onclose = (event) => {
155 | if (!isMountedRef.current) return;
156 |
157 | // Only attempt reconnection if component is still mounted and closure wasn't intentional
158 | if (isMountedRef.current && event.code !== 1000) {
159 | setError('Connection closed. Reconnecting...');
160 | // Use exponential backoff for reconnection
161 | setTimeout(() => {
162 | if (isMountedRef.current) {
163 | connectWebSocket();
164 | }
165 | }, 2000);
166 | }
167 | };
168 | } catch (err) {
169 | setError('Failed to create WebSocket connection');
170 | setIsConnecting(false);
171 | }
172 | };
173 |
174 | const disconnectWebSocket = () => {
175 | if (wsRef.current) {
176 | if (wsRef.current.readyState === WebSocket.OPEN ||
177 | wsRef.current.readyState === WebSocket.CONNECTING) {
178 | wsRef.current.close(1000, 'Disconnection requested by client');
179 | }
180 | wsRef.current = null;
181 | }
182 | };
183 |
184 | return (
185 |
190 |
198 | {imageUrl && (
199 |
207 | )}
208 | {error && (
209 |
210 |
{error}
211 |
212 | Reconnect
213 |
214 |
215 | )}
216 | {isConnecting && !imageUrl && (
217 |
Connecting to video stream...
218 | )}
219 |
220 |
221 | );
222 | };
223 |
224 | export default StreamViewerWebsocket;
--------------------------------------------------------------------------------
/examples/react-app/src/index.css:
--------------------------------------------------------------------------------
1 | :root {
2 | font-family: system-ui, Avenir, Helvetica, Arial, sans-serif;
3 | line-height: 1.5;
4 | font-weight: 400;
5 |
6 | color-scheme: light dark;
7 | color: rgba(255, 255, 255, 0.87);
8 | background-color: #242424;
9 |
10 | font-synthesis: none;
11 | text-rendering: optimizeLegibility;
12 | -webkit-font-smoothing: antialiased;
13 | -moz-osx-font-smoothing: grayscale;
14 | }
15 |
16 | a {
17 | font-weight: 500;
18 | color: #646cff;
19 | text-decoration: inherit;
20 | }
21 | a:hover {
22 | color: #535bf2;
23 | }
24 |
25 | body {
26 | margin: 0;
27 | display: flex;
28 | place-items: center;
29 | min-width: 320px;
30 | min-height: 100vh;
31 | }
32 |
33 | h1 {
34 | font-size: 3.2em;
35 | line-height: 1.1;
36 | }
37 |
38 | button {
39 | border-radius: 8px;
40 | border: 1px solid transparent;
41 | padding: 0.6em 1.2em;
42 | font-size: 1em;
43 | font-weight: 500;
44 | font-family: inherit;
45 | background-color: #1a1a1a;
46 | cursor: pointer;
47 | transition: border-color 0.25s;
48 | }
49 | button:hover {
50 | border-color: #646cff;
51 | }
52 | button:focus,
53 | button:focus-visible {
54 | outline: 4px auto -webkit-focus-ring-color;
55 | }
56 |
57 | @media (prefers-color-scheme: light) {
58 | :root {
59 | color: #213547;
60 | background-color: #ffffff;
61 | }
62 | a:hover {
63 | color: #747bff;
64 | }
65 | button {
66 | background-color: #f9f9f9;
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/examples/react-app/src/main.tsx:
--------------------------------------------------------------------------------
1 | import { StrictMode } from 'react'
2 | import { createRoot } from 'react-dom/client'
3 | import './index.css'
4 | import App from './App.tsx'
5 |
6 | createRoot(document.getElementById('root')!).render(
7 |
8 |
9 | ,
10 | )
11 |
--------------------------------------------------------------------------------
/examples/react-app/src/vite-env.d.ts:
--------------------------------------------------------------------------------
1 | ///
2 |
--------------------------------------------------------------------------------
/examples/react-app/tsconfig.app.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo",
4 | "target": "ES2020",
5 | "useDefineForClassFields": true,
6 | "lib": ["ES2020", "DOM", "DOM.Iterable"],
7 | "module": "ESNext",
8 | "skipLibCheck": true,
9 |
10 | /* Bundler mode */
11 | "moduleResolution": "bundler",
12 | "allowImportingTsExtensions": true,
13 | "verbatimModuleSyntax": true,
14 | "moduleDetection": "force",
15 | "noEmit": true,
16 | "jsx": "react-jsx",
17 |
18 | /* Linting */
19 | "strict": true,
20 | "noUnusedLocals": true,
21 | "noUnusedParameters": true,
22 | "erasableSyntaxOnly": true,
23 | "noFallthroughCasesInSwitch": true,
24 | "noUncheckedSideEffectImports": true
25 | },
26 | "include": ["src"]
27 | }
28 |
--------------------------------------------------------------------------------
/examples/react-app/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "ESNext",
4 | "lib": [
5 | "DOM",
6 | "DOM.Iterable",
7 | "ESNext"
8 | ],
9 | "module": "ESNext",
10 | "skipLibCheck": true,
11 | "moduleResolution": "bundler",
12 | "allowImportingTsExtensions": true,
13 | "resolveJsonModule": true,
14 | "isolatedModules": true,
15 | "noEmit": true,
16 | "jsx": "react-jsx",
17 | "strict": true,
18 | "noUnusedLocals": true,
19 | "noUnusedParameters": true,
20 | "noFallthroughCasesInSwitch": true
21 | },
22 | "include": [
23 | "src"
24 | ],
25 | "references": [
26 | {
27 | "path": "./tsconfig.node.json"
28 | }
29 | ]
30 | }
--------------------------------------------------------------------------------
/examples/react-app/tsconfig.node.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo",
4 | "target": "ES2022",
5 | "lib": ["ES2023"],
6 | "module": "ESNext",
7 | "skipLibCheck": true,
8 |
9 | /* Bundler mode */
10 | "moduleResolution": "bundler",
11 | "allowImportingTsExtensions": true,
12 | "verbatimModuleSyntax": true,
13 | "moduleDetection": "force",
14 | "noEmit": true,
15 |
16 | /* Linting */
17 | "strict": true,
18 | "noUnusedLocals": true,
19 | "noUnusedParameters": true,
20 | "erasableSyntaxOnly": true,
21 | "noFallthroughCasesInSwitch": true,
22 | "noUncheckedSideEffectImports": true
23 | },
24 | "include": ["vite.config.ts"]
25 | }
26 |
--------------------------------------------------------------------------------
/examples/react-app/vite.config.ts:
--------------------------------------------------------------------------------
1 | import { defineConfig } from 'vite'
2 | import react from '@vitejs/plugin-react'
3 |
4 | // https://vite.dev/config/
5 | export default defineConfig({
6 | plugins: [react()],
7 | })
8 |
--------------------------------------------------------------------------------
/justfile:
--------------------------------------------------------------------------------
1 | @_default:
2 | just --list
3 |
4 | build:
5 | cargo build --release
6 |
7 | test:
8 | cargo test --release
9 |
10 | format:
11 | cargo fmt
12 |
13 | clippy:
14 | cargo clippy
15 |
16 | check:
17 | cargo check
18 |
19 | lint:
20 | @echo "Running format..."
21 | just format
22 | @echo "Running clippy..."
23 | just clippy
24 | @echo "Running check..."
25 | just check
26 |
27 | install_deps:
28 | ./scripts/install_deps.sh
29 |
30 | help:
31 | cargo run --release --bin bubbaloop -- --help
32 |
33 | serve HOST="0.0.0.0" PORT="3000" FEATURES="":
34 | RUST_LOG=debug cargo run --release --bin serve {{FEATURES}} -- -h {{HOST}} -p {{PORT}}
35 |
36 | whoami HOST="0.0.0.0" PORT="3000":
37 | RUST_LOG=info cargo run --release --bin bubbaloop -- -h {{HOST}} -p {{PORT}} stats whoami
38 |
39 | sysinfo HOST="0.0.0.0" PORT="3000":
40 | RUST_LOG=info cargo run --release --bin bubbaloop -- -h {{HOST}} -p {{PORT}} stats sysinfo
41 |
42 | start-pipeline NAME HOST="0.0.0.0" PORT="3000":
43 | RUST_LOG=info cargo run --release --bin bubbaloop -- -h {{HOST}} -p {{PORT}} pipeline start -n {{NAME}}
44 |
45 | stop-pipeline NAME HOST="0.0.0.0" PORT="3000":
46 | RUST_LOG=info cargo run --release --bin bubbaloop -- -h {{HOST}} -p {{PORT}} pipeline stop -n {{NAME}}
47 |
48 | list-pipelines HOST="0.0.0.0" PORT="3000":
49 | RUST_LOG=info cargo run --release --bin bubbaloop -- -h {{HOST}} -p {{PORT}} pipeline list
50 |
51 | start-recording HOST="0.0.0.0" PORT="3000":
52 | RUST_LOG=info cargo run --release --bin bubbaloop -- -h {{HOST}} -p {{PORT}} recording start
53 |
54 | stop-recording HOST="0.0.0.0" PORT="3000":
55 | RUST_LOG=info cargo run --release --bin bubbaloop -- -h {{HOST}} -p {{PORT}} recording stop
56 |
57 | inference-result HOST="0.0.0.0" PORT="3000":
58 | RUST_LOG=info cargo run --release --bin bubbaloop -- -h {{HOST}} -p {{PORT}} inference result
59 |
60 | inference-settings PROMPT HOST="0.0.0.0" PORT="3000":
61 | RUST_LOG=info cargo run --release --bin bubbaloop -- -h {{HOST}} -p {{PORT}} inference settings --prompt "{{PROMPT}}"
62 |
--------------------------------------------------------------------------------
/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "bubbaloop",
3 | "lockfileVersion": 3,
4 | "requires": true,
5 | "packages": {
6 | "": {
7 | "dependencies": {
8 | "react-json-tree": "^0.20.0",
9 | "react-json-view": "^1.21.3"
10 | }
11 | },
12 | "node_modules/@babel/runtime": {
13 | "version": "7.27.1",
14 | "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.27.1.tgz",
15 | "integrity": "sha512-1x3D2xEk2fRo3PAhwQwu5UubzgiVWSXTBfWpVd2Mx2AzRqJuDJCsgaDVZ7HB5iGzDW1Hl1sWN2mFyKjmR9uAog==",
16 | "license": "MIT",
17 | "engines": {
18 | "node": ">=6.9.0"
19 | }
20 | },
21 | "node_modules/@types/lodash": {
22 | "version": "4.17.16",
23 | "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.16.tgz",
24 | "integrity": "sha512-HX7Em5NYQAXKW+1T+FiuG27NGwzJfCX3s1GjOa7ujxZa52kjJLOr4FUxT+giF6Tgxv1e+/czV/iTtBw27WTU9g==",
25 | "license": "MIT"
26 | },
27 | "node_modules/@types/react": {
28 | "version": "19.1.4",
29 | "resolved": "https://registry.npmjs.org/@types/react/-/react-19.1.4.tgz",
30 | "integrity": "sha512-EB1yiiYdvySuIITtD5lhW4yPyJ31RkJkkDw794LaQYrxCSaQV/47y5o1FMC4zF9ZyjUjzJMZwbovEnT5yHTW6g==",
31 | "license": "MIT",
32 | "peer": true,
33 | "dependencies": {
34 | "csstype": "^3.0.2"
35 | }
36 | },
37 | "node_modules/asap": {
38 | "version": "2.0.6",
39 | "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz",
40 | "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==",
41 | "license": "MIT"
42 | },
43 | "node_modules/base16": {
44 | "version": "1.0.0",
45 | "resolved": "https://registry.npmjs.org/base16/-/base16-1.0.0.tgz",
46 | "integrity": "sha512-pNdYkNPiJUnEhnfXV56+sQy8+AaPcG3POZAUnwr4EeqCUZFz4u2PePbo3e5Gj4ziYPCWGUZT9RHisvJKnwFuBQ==",
47 | "license": "MIT"
48 | },
49 | "node_modules/color": {
50 | "version": "4.2.3",
51 | "resolved": "https://registry.npmjs.org/color/-/color-4.2.3.tgz",
52 | "integrity": "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==",
53 | "license": "MIT",
54 | "dependencies": {
55 | "color-convert": "^2.0.1",
56 | "color-string": "^1.9.0"
57 | },
58 | "engines": {
59 | "node": ">=12.5.0"
60 | }
61 | },
62 | "node_modules/color-convert": {
63 | "version": "2.0.1",
64 | "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
65 | "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
66 | "license": "MIT",
67 | "dependencies": {
68 | "color-name": "~1.1.4"
69 | },
70 | "engines": {
71 | "node": ">=7.0.0"
72 | }
73 | },
74 | "node_modules/color-name": {
75 | "version": "1.1.4",
76 | "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
77 | "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
78 | "license": "MIT"
79 | },
80 | "node_modules/color-string": {
81 | "version": "1.9.1",
82 | "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz",
83 | "integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==",
84 | "license": "MIT",
85 | "dependencies": {
86 | "color-name": "^1.0.0",
87 | "simple-swizzle": "^0.2.2"
88 | }
89 | },
90 | "node_modules/cross-fetch": {
91 | "version": "3.2.0",
92 | "resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-3.2.0.tgz",
93 | "integrity": "sha512-Q+xVJLoGOeIMXZmbUK4HYk+69cQH6LudR0Vu/pRm2YlU/hDV9CiS0gKUMaWY5f2NeUH9C1nV3bsTlCo0FsTV1Q==",
94 | "license": "MIT",
95 | "dependencies": {
96 | "node-fetch": "^2.7.0"
97 | }
98 | },
99 | "node_modules/csstype": {
100 | "version": "3.1.3",
101 | "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz",
102 | "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==",
103 | "license": "MIT"
104 | },
105 | "node_modules/fbemitter": {
106 | "version": "3.0.0",
107 | "resolved": "https://registry.npmjs.org/fbemitter/-/fbemitter-3.0.0.tgz",
108 | "integrity": "sha512-KWKaceCwKQU0+HPoop6gn4eOHk50bBv/VxjJtGMfwmJt3D29JpN4H4eisCtIPA+a8GVBam+ldMMpMjJUvpDyHw==",
109 | "license": "BSD-3-Clause",
110 | "dependencies": {
111 | "fbjs": "^3.0.0"
112 | }
113 | },
114 | "node_modules/fbjs": {
115 | "version": "3.0.5",
116 | "resolved": "https://registry.npmjs.org/fbjs/-/fbjs-3.0.5.tgz",
117 | "integrity": "sha512-ztsSx77JBtkuMrEypfhgc3cI0+0h+svqeie7xHbh1k/IKdcydnvadp/mUaGgjAOXQmQSxsqgaRhS3q9fy+1kxg==",
118 | "license": "MIT",
119 | "dependencies": {
120 | "cross-fetch": "^3.1.5",
121 | "fbjs-css-vars": "^1.0.0",
122 | "loose-envify": "^1.0.0",
123 | "object-assign": "^4.1.0",
124 | "promise": "^7.1.1",
125 | "setimmediate": "^1.0.5",
126 | "ua-parser-js": "^1.0.35"
127 | }
128 | },
129 | "node_modules/fbjs-css-vars": {
130 | "version": "1.0.2",
131 | "resolved": "https://registry.npmjs.org/fbjs-css-vars/-/fbjs-css-vars-1.0.2.tgz",
132 | "integrity": "sha512-b2XGFAFdWZWg0phtAWLHCk836A1Xann+I+Dgd3Gk64MHKZO44FfoD1KxyvbSh0qZsIoXQGGlVztIY+oitJPpRQ==",
133 | "license": "MIT"
134 | },
135 | "node_modules/flux": {
136 | "version": "4.0.4",
137 | "resolved": "https://registry.npmjs.org/flux/-/flux-4.0.4.tgz",
138 | "integrity": "sha512-NCj3XlayA2UsapRpM7va6wU1+9rE5FIL7qoMcmxWHRzbp0yujihMBm9BBHZ1MDIk5h5o2Bl6eGiCe8rYELAmYw==",
139 | "license": "BSD-3-Clause",
140 | "dependencies": {
141 | "fbemitter": "^3.0.0",
142 | "fbjs": "^3.0.1"
143 | },
144 | "peerDependencies": {
145 | "react": "^15.0.2 || ^16.0.0 || ^17.0.0"
146 | }
147 | },
148 | "node_modules/is-arrayish": {
149 | "version": "0.3.2",
150 | "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz",
151 | "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==",
152 | "license": "MIT"
153 | },
154 | "node_modules/js-tokens": {
155 | "version": "4.0.0",
156 | "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
157 | "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==",
158 | "license": "MIT"
159 | },
160 | "node_modules/lodash-es": {
161 | "version": "4.17.21",
162 | "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz",
163 | "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==",
164 | "license": "MIT"
165 | },
166 | "node_modules/lodash.curry": {
167 | "version": "4.1.1",
168 | "resolved": "https://registry.npmjs.org/lodash.curry/-/lodash.curry-4.1.1.tgz",
169 | "integrity": "sha512-/u14pXGviLaweY5JI0IUzgzF2J6Ne8INyzAZjImcryjgkZ+ebruBxy2/JaOOkTqScddcYtakjhSaeemV8lR0tA==",
170 | "license": "MIT"
171 | },
172 | "node_modules/lodash.flow": {
173 | "version": "3.5.0",
174 | "resolved": "https://registry.npmjs.org/lodash.flow/-/lodash.flow-3.5.0.tgz",
175 | "integrity": "sha512-ff3BX/tSioo+XojX4MOsOMhJw0nZoUEF011LX8g8d3gvjVbxd89cCio4BCXronjxcTUIJUoqKEUA+n4CqvvRPw==",
176 | "license": "MIT"
177 | },
178 | "node_modules/loose-envify": {
179 | "version": "1.4.0",
180 | "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
181 | "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==",
182 | "license": "MIT",
183 | "dependencies": {
184 | "js-tokens": "^3.0.0 || ^4.0.0"
185 | },
186 | "bin": {
187 | "loose-envify": "cli.js"
188 | }
189 | },
190 | "node_modules/node-fetch": {
191 | "version": "2.7.0",
192 | "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz",
193 | "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==",
194 | "license": "MIT",
195 | "dependencies": {
196 | "whatwg-url": "^5.0.0"
197 | },
198 | "engines": {
199 | "node": "4.x || >=6.0.0"
200 | },
201 | "peerDependencies": {
202 | "encoding": "^0.1.0"
203 | },
204 | "peerDependenciesMeta": {
205 | "encoding": {
206 | "optional": true
207 | }
208 | }
209 | },
210 | "node_modules/object-assign": {
211 | "version": "4.1.1",
212 | "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
213 | "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==",
214 | "license": "MIT",
215 | "engines": {
216 | "node": ">=0.10.0"
217 | }
218 | },
219 | "node_modules/promise": {
220 | "version": "7.3.1",
221 | "resolved": "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz",
222 | "integrity": "sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==",
223 | "license": "MIT",
224 | "dependencies": {
225 | "asap": "~2.0.3"
226 | }
227 | },
228 | "node_modules/pure-color": {
229 | "version": "1.3.0",
230 | "resolved": "https://registry.npmjs.org/pure-color/-/pure-color-1.3.0.tgz",
231 | "integrity": "sha512-QFADYnsVoBMw1srW7OVKEYjG+MbIa49s54w1MA1EDY6r2r/sTcKKYqRX1f4GYvnXP7eN/Pe9HFcX+hwzmrXRHA==",
232 | "license": "MIT"
233 | },
234 | "node_modules/react": {
235 | "version": "17.0.2",
236 | "resolved": "https://registry.npmjs.org/react/-/react-17.0.2.tgz",
237 | "integrity": "sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA==",
238 | "license": "MIT",
239 | "peer": true,
240 | "dependencies": {
241 | "loose-envify": "^1.1.0",
242 | "object-assign": "^4.1.1"
243 | },
244 | "engines": {
245 | "node": ">=0.10.0"
246 | }
247 | },
248 | "node_modules/react-base16-styling": {
249 | "version": "0.6.0",
250 | "resolved": "https://registry.npmjs.org/react-base16-styling/-/react-base16-styling-0.6.0.tgz",
251 | "integrity": "sha512-yvh/7CArceR/jNATXOKDlvTnPKPmGZz7zsenQ3jUwLzHkNUR0CvY3yGYJbWJ/nnxsL8Sgmt5cO3/SILVuPO6TQ==",
252 | "license": "MIT",
253 | "dependencies": {
254 | "base16": "^1.0.0",
255 | "lodash.curry": "^4.0.1",
256 | "lodash.flow": "^3.3.0",
257 | "pure-color": "^1.2.0"
258 | }
259 | },
260 | "node_modules/react-dom": {
261 | "version": "17.0.2",
262 | "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-17.0.2.tgz",
263 | "integrity": "sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA==",
264 | "license": "MIT",
265 | "peer": true,
266 | "dependencies": {
267 | "loose-envify": "^1.1.0",
268 | "object-assign": "^4.1.1",
269 | "scheduler": "^0.20.2"
270 | },
271 | "peerDependencies": {
272 | "react": "17.0.2"
273 | }
274 | },
275 | "node_modules/react-json-tree": {
276 | "version": "0.20.0",
277 | "resolved": "https://registry.npmjs.org/react-json-tree/-/react-json-tree-0.20.0.tgz",
278 | "integrity": "sha512-h+f9fUNAxzBx1rbrgUF7+zSWKGHDtt2VPYLErIuB0JyKGnWgFMM21ksqQyb3EXwXNnoMW2rdE5kuAaubgGOx2Q==",
279 | "license": "MIT",
280 | "dependencies": {
281 | "@types/lodash": "^4.17.15",
282 | "react-base16-styling": "^0.10.0"
283 | },
284 | "peerDependencies": {
285 | "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0",
286 | "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
287 | }
288 | },
289 | "node_modules/react-json-tree/node_modules/react-base16-styling": {
290 | "version": "0.10.0",
291 | "resolved": "https://registry.npmjs.org/react-base16-styling/-/react-base16-styling-0.10.0.tgz",
292 | "integrity": "sha512-H1k2eFB6M45OaiRru3PBXkuCcn2qNmx+gzLb4a9IPMR7tMH8oBRXU5jGbPDYG1Hz+82d88ED0vjR8BmqU3pQdg==",
293 | "license": "MIT",
294 | "dependencies": {
295 | "@types/lodash": "^4.17.0",
296 | "color": "^4.2.3",
297 | "csstype": "^3.1.3",
298 | "lodash-es": "^4.17.21"
299 | }
300 | },
301 | "node_modules/react-json-view": {
302 | "version": "1.21.3",
303 | "resolved": "https://registry.npmjs.org/react-json-view/-/react-json-view-1.21.3.tgz",
304 | "integrity": "sha512-13p8IREj9/x/Ye4WI/JpjhoIwuzEgUAtgJZNBJckfzJt1qyh24BdTm6UQNGnyTq9dapQdrqvquZTo3dz1X6Cjw==",
305 | "license": "MIT",
306 | "dependencies": {
307 | "flux": "^4.0.1",
308 | "react-base16-styling": "^0.6.0",
309 | "react-lifecycles-compat": "^3.0.4",
310 | "react-textarea-autosize": "^8.3.2"
311 | },
312 | "peerDependencies": {
313 | "react": "^17.0.0 || ^16.3.0 || ^15.5.4",
314 | "react-dom": "^17.0.0 || ^16.3.0 || ^15.5.4"
315 | }
316 | },
317 | "node_modules/react-lifecycles-compat": {
318 | "version": "3.0.4",
319 | "resolved": "https://registry.npmjs.org/react-lifecycles-compat/-/react-lifecycles-compat-3.0.4.tgz",
320 | "integrity": "sha512-fBASbA6LnOU9dOU2eW7aQ8xmYBSXUIWr+UmF9b1efZBazGNO+rcXT/icdKnYm2pTwcRylVUYwW7H1PHfLekVzA==",
321 | "license": "MIT"
322 | },
323 | "node_modules/react-textarea-autosize": {
324 | "version": "8.5.9",
325 | "resolved": "https://registry.npmjs.org/react-textarea-autosize/-/react-textarea-autosize-8.5.9.tgz",
326 | "integrity": "sha512-U1DGlIQN5AwgjTyOEnI1oCcMuEr1pv1qOtklB2l4nyMGbHzWrI0eFsYK0zos2YWqAolJyG0IWJaqWmWj5ETh0A==",
327 | "license": "MIT",
328 | "dependencies": {
329 | "@babel/runtime": "^7.20.13",
330 | "use-composed-ref": "^1.3.0",
331 | "use-latest": "^1.2.1"
332 | },
333 | "engines": {
334 | "node": ">=10"
335 | },
336 | "peerDependencies": {
337 | "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
338 | }
339 | },
340 | "node_modules/scheduler": {
341 | "version": "0.20.2",
342 | "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.20.2.tgz",
343 | "integrity": "sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ==",
344 | "license": "MIT",
345 | "peer": true,
346 | "dependencies": {
347 | "loose-envify": "^1.1.0",
348 | "object-assign": "^4.1.1"
349 | }
350 | },
351 | "node_modules/setimmediate": {
352 | "version": "1.0.5",
353 | "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz",
354 | "integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==",
355 | "license": "MIT"
356 | },
357 | "node_modules/simple-swizzle": {
358 | "version": "0.2.2",
359 | "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz",
360 | "integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==",
361 | "license": "MIT",
362 | "dependencies": {
363 | "is-arrayish": "^0.3.1"
364 | }
365 | },
366 | "node_modules/tr46": {
367 | "version": "0.0.3",
368 | "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
369 | "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==",
370 | "license": "MIT"
371 | },
372 | "node_modules/ua-parser-js": {
373 | "version": "1.0.40",
374 | "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.40.tgz",
375 | "integrity": "sha512-z6PJ8Lml+v3ichVojCiB8toQJBuwR42ySM4ezjXIqXK3M0HczmKQ3LF4rhU55PfD99KEEXQG6yb7iOMyvYuHew==",
376 | "funding": [
377 | {
378 | "type": "opencollective",
379 | "url": "https://opencollective.com/ua-parser-js"
380 | },
381 | {
382 | "type": "paypal",
383 | "url": "https://paypal.me/faisalman"
384 | },
385 | {
386 | "type": "github",
387 | "url": "https://github.com/sponsors/faisalman"
388 | }
389 | ],
390 | "license": "MIT",
391 | "bin": {
392 | "ua-parser-js": "script/cli.js"
393 | },
394 | "engines": {
395 | "node": "*"
396 | }
397 | },
398 | "node_modules/use-composed-ref": {
399 | "version": "1.4.0",
400 | "resolved": "https://registry.npmjs.org/use-composed-ref/-/use-composed-ref-1.4.0.tgz",
401 | "integrity": "sha512-djviaxuOOh7wkj0paeO1Q/4wMZ8Zrnag5H6yBvzN7AKKe8beOaED9SF5/ByLqsku8NP4zQqsvM2u3ew/tJK8/w==",
402 | "license": "MIT",
403 | "peerDependencies": {
404 | "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
405 | },
406 | "peerDependenciesMeta": {
407 | "@types/react": {
408 | "optional": true
409 | }
410 | }
411 | },
412 | "node_modules/use-isomorphic-layout-effect": {
413 | "version": "1.2.0",
414 | "resolved": "https://registry.npmjs.org/use-isomorphic-layout-effect/-/use-isomorphic-layout-effect-1.2.0.tgz",
415 | "integrity": "sha512-q6ayo8DWoPZT0VdG4u3D3uxcgONP3Mevx2i2b0434cwWBoL+aelL1DzkXI6w3PhTZzUeR2kaVlZn70iCiseP6w==",
416 | "license": "MIT",
417 | "peerDependencies": {
418 | "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
419 | },
420 | "peerDependenciesMeta": {
421 | "@types/react": {
422 | "optional": true
423 | }
424 | }
425 | },
426 | "node_modules/use-latest": {
427 | "version": "1.3.0",
428 | "resolved": "https://registry.npmjs.org/use-latest/-/use-latest-1.3.0.tgz",
429 | "integrity": "sha512-mhg3xdm9NaM8q+gLT8KryJPnRFOz1/5XPBhmDEVZK1webPzDjrPk7f/mbpeLqTgB9msytYWANxgALOCJKnLvcQ==",
430 | "license": "MIT",
431 | "dependencies": {
432 | "use-isomorphic-layout-effect": "^1.1.1"
433 | },
434 | "peerDependencies": {
435 | "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
436 | },
437 | "peerDependenciesMeta": {
438 | "@types/react": {
439 | "optional": true
440 | }
441 | }
442 | },
443 | "node_modules/webidl-conversions": {
444 | "version": "3.0.1",
445 | "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
446 | "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==",
447 | "license": "BSD-2-Clause"
448 | },
449 | "node_modules/whatwg-url": {
450 | "version": "5.0.0",
451 | "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
452 | "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
453 | "license": "MIT",
454 | "dependencies": {
455 | "tr46": "~0.0.3",
456 | "webidl-conversions": "^3.0.0"
457 | }
458 | }
459 | }
460 | }
461 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "dependencies": {
3 | "react-json-tree": "^0.20.0",
4 | "react-json-view": "^1.21.3"
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/scripts/cross_deploy.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Stop the script if any command fails
4 | set -e
5 |
6 | SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
7 |
8 | # Parse command line arguments
9 | while getopts "r:u:" opt; do
10 | case $opt in
11 | r) TARGET_IP="$OPTARG" # Target IP
12 | ;;
13 | u) TARGET_USER="$OPTARG" # Target user
14 | ;;
15 | esac
16 | done
17 |
18 | # Check if required arguments are provided
19 | if [ -z "$TARGET_IP" ] || [ -z "$TARGET_USER" ]; then
20 | echo "Usage: $0 -r -u "
21 | exit 1
22 | fi
23 |
24 | # Configuration
25 | BINARY_NAME="serve"
26 | DEPLOY_ARCH="aarch64-unknown-linux-gnu"
27 | LOCAL_FOLDER="/tmp/deploy"
28 | TARGET_PATH="/home/$TARGET_USER/deploy"
29 |
30 | # Colors for output
31 | GREEN='\033[0;32m'
32 | RED='\033[0;31m'
33 | NC='\033[0m' # No Color
34 |
35 | # Function to print status
36 | print_status() {
37 | echo -e "${GREEN}==> ${1}${NC}"
38 | }
39 |
40 | print_error() {
41 | echo -e "${RED}==> ERROR: ${1}${NC}"
42 | exit 1
43 | }
44 |
45 | # Check if cross is installed
46 | if ! command -v cross &> /dev/null; then
47 | print_error "cross is not installed. Install it with: cargo install cross"
48 | fi
49 |
50 | rm -rf $LOCAL_FOLDER
51 | mkdir -p $LOCAL_FOLDER
52 |
53 | # Build the release binary
54 | print_status "Building release binary for aarch64..."
55 | cross build --target $DEPLOY_ARCH --release -v --bin $BINARY_NAME || print_error "Build failed"
56 | rsync -a target/$DEPLOY_ARCH/release/$BINARY_NAME $LOCAL_FOLDER
57 |
58 | # Check if binary exists
59 | if [ ! -f "target/$DEPLOY_ARCH/release/$BINARY_NAME" ]; then
60 | print_error "Binary not found after build"
61 | fi
62 |
63 | # copy useful scripts
64 | cp -p scripts/run_serve.sh $LOCAL_FOLDER
65 |
66 | # Copy to remote machine
67 | print_status "Copying to $TARGET_USER@$TARGET_IP:$TARGET_PATH..."
68 |
69 | ssh $TARGET_USER@$TARGET_IP "mkdir -p $TARGET_PATH"
70 | rsync -a --delete $LOCAL_FOLDER $TARGET_USER@$TARGET_IP:~/
71 |
72 | print_status "Deploy completed successfully!"
--------------------------------------------------------------------------------
/scripts/install_deps.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -eu
4 |
5 | sudo apt-get update && sudo apt-get install -y \
6 | build-essential \
7 | cmake \
8 | curl \
9 | libssl-dev \
10 | libgstreamer1.0-dev \
11 | libgstreamer-plugins-base1.0-dev \
12 | nasm
13 |
--------------------------------------------------------------------------------
/scripts/install_libssl1.1.sh:
--------------------------------------------------------------------------------
1 | # download binary openssl packages from Impish builds
2 | wget http://ports.ubuntu.com/pool/main/o/openssl/openssl_1.1.1f-1ubuntu2_arm64.deb
3 | wget http://ports.ubuntu.com/pool/main/o/openssl/libssl-dev_1.1.1f-1ubuntu2_arm64.deb
4 | wget http://ports.ubuntu.com/pool/main/o/openssl/libssl1.1_1.1.1f-1ubuntu2_arm64.deb
5 |
6 | # install downloaded binary packages
7 | sudo dpkg -i libssl1.1_1.1.1f-1ubuntu2_arm64.deb
8 | sudo dpkg -i libssl-dev_1.1.1f-1ubuntu2_arm64.deb
9 | sudo dpkg -i openssl_1.1.1f-1ubuntu2_arm64.deb
--------------------------------------------------------------------------------
/scripts/install_linux.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Colors for better readability
4 | GREEN='\033[0;32m'
5 | NC='\033[0m' # No Color
6 | RED='\033[0;31m'
7 |
8 | echo -e "${GREEN}Installing dependencies...${NC}"
9 |
10 | # Use the existing install_deps.sh script
11 | ./scripts/install_deps.sh
12 |
13 | # Install Rust if not already installed
14 | if ! command -v rustc &> /dev/null; then
15 | echo -e "${GREEN}Installing Rust...${NC}"
16 | curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
17 | source $HOME/.cargo/env
18 | else
19 | echo -e "${GREEN}Rust is already installed.${NC}"
20 | fi
21 |
22 | # Install Just if not already installed
23 | if ! command -v just &> /dev/null; then
24 | echo -e "${GREEN}Installing Just...${NC}"
25 | cargo install just
26 | else
27 | echo -e "${GREEN}Just is already installed.${NC}"
28 | fi
29 |
30 | # Assume we're already in the project directory
31 | REPO_DIR=$(pwd)
32 | echo -e "${GREEN}Building the project...${NC}"
33 | if ! just build; then
34 | echo -e "${RED}Build failed! Aborting installation.${NC}"
35 | exit 1
36 | fi
37 |
38 | # Remove previous service if it exists
39 | SERVICE_FILE="/etc/systemd/system/bubbaloop.service"
40 | if [ -f "$SERVICE_FILE" ]; then
41 | echo -e "${GREEN}Stopping and removing previous service...${NC}"
42 | sudo systemctl stop bubbaloop.service
43 | sudo systemctl disable bubbaloop.service
44 | sudo rm $SERVICE_FILE
45 | sudo systemctl daemon-reload
46 | fi
47 |
48 | # Install binaries in a loop
49 | BUBBALOOP_INSTALL_DIR=/usr/local/bin
50 | BINARIES=("serve" "bubbaloop")
51 |
52 | for binary in "${BINARIES[@]}"; do
53 | echo -e "${GREEN}Installing $binary binary to $BUBBALOOP_INSTALL_DIR...${NC}"
54 | if [ -f "$REPO_DIR/target/release/$binary" ]; then
55 | sudo cp "$REPO_DIR/target/release/$binary" "$BUBBALOOP_INSTALL_DIR/"
56 | sudo chmod +x "$BUBBALOOP_INSTALL_DIR/$binary"
57 | echo -e "${GREEN}$binary installed successfully.${NC}"
58 | else
59 | echo -e "${RED}$binary not found in target/release! Aborting installation.${NC}"
60 | exit 1
61 | fi
62 | done
63 |
64 | # Create a systemd service file
65 | echo -e "${GREEN}Creating systemd service...${NC}"
66 | sudo tee $SERVICE_FILE > /dev/null << EOL
67 | [Unit]
68 | Description=Bubbaloop - AI & Robotics Service
69 | After=network.target
70 |
71 | [Service]
72 | ExecStart=$BUBBALOOP_INSTALL_DIR/serve
73 | WorkingDirectory=$REPO_DIR
74 | User=$USER
75 | Restart=on-failure
76 | RestartSec=5
77 | Environment=RUST_LOG=debug
78 |
79 | # Ensure access to /tmp directory
80 | PrivateTmp=false
81 | ReadWritePaths=/tmp
82 | ProtectSystem=false
83 |
84 | [Install]
85 | WantedBy=default.target
86 | EOL
87 |
88 | # Enable and start the service
89 | echo -e "${GREEN}Enabling and starting the service...${NC}"
90 | sudo systemctl daemon-reload
91 | sudo systemctl enable bubbaloop.service
92 | sudo systemctl start bubbaloop.service
93 |
94 | echo -e "${GREEN}Installation complete!${NC}"
95 | echo -e "${GREEN}Service status:${NC}"
96 | sudo systemctl status bubbaloop.service --no-pager
97 |
98 | echo -e "${GREEN}You can check the logs with: sudo journalctl -u bubbaloop.service -f${NC}"
--------------------------------------------------------------------------------
/scripts/run_serve.sh:
--------------------------------------------------------------------------------
1 | #! /bin/bash
2 |
3 | export RUST_LOG=${RUST_LOG:-debug}
4 |
5 | # get the local ip address
6 | HOST=$(hostname -I | awk '{print $1}')
7 | PORT=3000
8 |
9 | # script to run the serve binary on the target machine
10 | ./serve --host $HOST --port $PORT
--------------------------------------------------------------------------------
/scripts/uninstall_linux.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Colors for better readability
4 | GREEN='\033[0;32m'
5 | RED='\033[0;31m'
6 | NC='\033[0m' # No Color
7 |
8 | echo -e "${GREEN}Uninstalling Bubbaloop...${NC}"
9 |
10 | # Stop and remove the systemd service
11 | SERVICE_FILE="/etc/systemd/system/bubbaloop.service"
12 | if [ -f "$SERVICE_FILE" ]; then
13 | echo -e "${GREEN}Stopping and removing service...${NC}"
14 | sudo systemctl stop bubbaloop.service
15 | sudo systemctl disable bubbaloop.service
16 | sudo rm $SERVICE_FILE
17 | sudo systemctl daemon-reload
18 | echo -e "${GREEN}Service removed successfully.${NC}"
19 | else
20 | echo -e "${RED}Service not found, skipping service removal.${NC}"
21 | fi
22 |
23 | # Remove binaries using a loop
24 | BUBBALOOP_INSTALL_DIR=/usr/local/bin
25 | BINARIES=("bubbaloop" "serve")
26 |
27 | for binary in "${BINARIES[@]}"; do
28 | if [ -f "$BUBBALOOP_INSTALL_DIR/$binary" ]; then
29 | echo -e "${GREEN}Removing $binary binary...${NC}"
30 | sudo rm $BUBBALOOP_INSTALL_DIR/$binary
31 | echo -e "${GREEN}$binary binary removed successfully.${NC}"
32 | else
33 | echo -e "${RED}$binary binary not found, skipping removal.${NC}"
34 | fi
35 | done
36 |
37 | echo -e "${GREEN}Uninstallation complete!${NC}"
--------------------------------------------------------------------------------
/src/api/handles/inference.rs:
--------------------------------------------------------------------------------
1 | use crate::{
2 | api::models::inference::{
3 | InferenceResponse, InferenceSettingsQuery, InferenceSettingsResponse,
4 | },
5 | pipeline::ServerGlobalState,
6 | };
7 | use axum::{extract::State, response::IntoResponse, Json};
8 | use reqwest::StatusCode;
9 |
10 | pub async fn get_inference_result(State(state): State) -> impl IntoResponse {
11 | log::trace!("Request to get inference result");
12 |
13 | if !state.pipeline_store.is_inference_pipeline_running() {
14 | return (
15 | StatusCode::SERVICE_UNAVAILABLE,
16 | Json(InferenceResponse::Error {
17 | error: "Inference pipeline not running. Please start the inference pipeline first."
18 | .to_string(),
19 | }),
20 | );
21 | }
22 |
23 | let Ok(result) = state.result_store.inference.tx.subscribe().recv().await else {
24 | return (
25 | StatusCode::INTERNAL_SERVER_ERROR,
26 | Json(InferenceResponse::Error {
27 | error: "Failed to get inference result: `just start-pipeline inference`"
28 | .to_string(),
29 | }),
30 | );
31 | };
32 | (StatusCode::OK, Json(InferenceResponse::Success(result)))
33 | }
34 |
35 | pub async fn post_inference_settings(
36 | State(state): State,
37 | Json(query): Json,
38 | ) -> impl IntoResponse {
39 | log::debug!("Request to post inference settings: {}", query.prompt);
40 |
41 | if !state.pipeline_store.is_inference_pipeline_running() {
42 | return (
43 | StatusCode::SERVICE_UNAVAILABLE,
44 | Json(InferenceSettingsResponse::Error {
45 | error: "Inference pipeline not running. Please start the inference pipeline first."
46 | .to_string(),
47 | }),
48 | );
49 | }
50 |
51 | let Ok(_) = state.result_store.inference_settings.tx.send(query.prompt) else {
52 | return (
53 | StatusCode::INTERNAL_SERVER_ERROR,
54 | Json(InferenceSettingsResponse::Error {
55 | error: "Failed to send inference settings".to_string(),
56 | }),
57 | );
58 | };
59 |
60 | (StatusCode::OK, Json(InferenceSettingsResponse::Success))
61 | }
62 |
--------------------------------------------------------------------------------
/src/api/handles/mod.rs:
--------------------------------------------------------------------------------
1 | pub mod inference;
2 | pub mod pipeline;
3 | pub mod recording;
4 | pub mod stats;
5 | pub mod streaming;
6 |
--------------------------------------------------------------------------------
/src/api/handles/pipeline.rs:
--------------------------------------------------------------------------------
1 | use crate::{
2 | api::models::pipeline::{PipelineStartRequest, PipelineStopRequest},
3 | cu29,
4 | pipeline::{self, PipelineHandle, PipelineStatus, ServerGlobalState},
5 | };
6 | use axum::{
7 | extract::State,
8 | response::{IntoResponse, Json},
9 | };
10 | use reqwest::StatusCode;
11 | use serde_json::json;
12 | use std::{sync::atomic::AtomicBool, sync::Arc};
13 |
14 | /// Start a pipeline given its id
15 | pub async fn start_pipeline(
16 | State(state): State,
17 | Json(request): Json,
18 | ) -> impl IntoResponse {
19 | log::debug!("Request to start pipeline: {}", request.name);
20 |
21 | const SUPPORTED_PIPELINES: [&str; 3] = ["bubbaloop", "cameras", "inference"];
22 | if !SUPPORTED_PIPELINES.contains(&request.name.as_str()) {
23 | log::error!(
24 | "Pipeline {} not supported. Try 'bubbaloop', 'cameras', 'inference', instead",
25 | request.name
26 | );
27 | return (
28 | StatusCode::BAD_REQUEST,
29 | Json(json!({
30 | "error": "Pipeline not supported. Try 'bubbaloop', 'cameras', 'inference' instead",
31 | })),
32 | );
33 | }
34 |
35 | // check if the pipeline id is already in the store
36 | let pipeline_name = request.name;
37 | let mut pipeline_store = state
38 | .pipeline_store
39 | .0
40 | .lock()
41 | .expect("Failed to lock pipeline store");
42 |
43 | if pipeline_store.contains_key(&pipeline_name) {
44 | log::error!("Pipeline {} already exists", pipeline_name);
45 | return (
46 | StatusCode::BAD_REQUEST,
47 | Json(json!({
48 | "error": "Pipeline already exists",
49 | })),
50 | );
51 | }
52 |
53 | // the stop signal to kill the pipeline thread
54 | let stop_signal = Arc::new(AtomicBool::new(false));
55 |
56 | let handle = match pipeline_name.as_str() {
57 | "bubbaloop" => pipeline::spawn_bubbaloop_thread(stop_signal.clone()),
58 | "cameras" => cu29::pipelines::spawn_cameras_pipeline(stop_signal.clone()),
59 | "inference" => cu29::pipelines::spawn_inference_pipeline(stop_signal.clone()),
60 | _ => {
61 | log::error!("Pipeline {} not supported", pipeline_name);
62 | return (
63 | StatusCode::BAD_REQUEST,
64 | Json(json!({ "error": "Pipeline not supported" })),
65 | );
66 | }
67 | };
68 |
69 | // add the pipeline handle to the store
70 | pipeline_store.insert(
71 | pipeline_name.clone(),
72 | PipelineHandle {
73 | id: pipeline_name.clone(),
74 | handle,
75 | status: PipelineStatus::Running,
76 | stop_signal,
77 | },
78 | );
79 |
80 | log::debug!("Pipeline {} started", pipeline_name);
81 |
82 | (
83 | StatusCode::OK,
84 | Json(json!({
85 | "message": format!("Pipeline {} started", pipeline_name),
86 | })),
87 | )
88 | }
89 |
90 | // Stop a pipeline given its id
91 | pub async fn stop_pipeline(
92 | State(state): State,
93 | Json(request): Json,
94 | ) -> impl IntoResponse {
95 | log::debug!("Request to stop pipeline: {}", request.name);
96 | if !state.pipeline_store.unregister_pipeline(&request.name) {
97 | log::error!("Pipeline {} not found", request.name);
98 | return (
99 | StatusCode::BAD_REQUEST,
100 | Json(json!({
101 | "error": "Pipeline not found",
102 | })),
103 | );
104 | }
105 |
106 | log::debug!("Pipeline {} stopped", request.name);
107 |
108 | (
109 | StatusCode::OK,
110 | Json(json!({ "message": format!("Pipeline {} stopped", request.name) })),
111 | )
112 | }
113 |
114 | // List all pipelines and return their status
115 | pub async fn list_pipelines(State(state): State) -> impl IntoResponse {
116 | log::debug!("Request to list pipelines");
117 | Json(state.pipeline_store.list_pipelines())
118 | }
119 |
--------------------------------------------------------------------------------
/src/api/handles/recording.rs:
--------------------------------------------------------------------------------
1 | use crate::{
2 | api::models::recording::{RecordingQuery, RecordingResponse},
3 | pipeline::ServerGlobalState,
4 | };
5 | use axum::{
6 | extract::State,
7 | response::{IntoResponse, Json},
8 | };
9 |
10 | pub async fn post_recording_command(
11 | State(state): State,
12 | Json(query): Json,
13 | ) -> impl IntoResponse {
14 | log::debug!("Request to post recording command: {:?}", query.command);
15 |
16 | if !state.pipeline_store.is_cameras_pipeline_running() {
17 | return Json(RecordingResponse::Error {
18 | error: "Cameras pipeline not started. Please start the cameras pipeline first."
19 | .to_string(),
20 | });
21 | }
22 |
23 | log::debug!("Request to post recording command: {:?}", query.command);
24 |
25 | if let Err(e) = state.result_store.recording.request.tx.send(query.command) {
26 | return Json(RecordingResponse::Error {
27 | error: format!("Failed to send command to recording: {}", e),
28 | });
29 | }
30 |
31 | match state.result_store.recording.reply.rx.lock().unwrap().recv() {
32 | Ok(reply) => Json(reply),
33 | Err(e) => Json(RecordingResponse::Error {
34 | error: format!("Failed to receive reply from recording: {}", e),
35 | }),
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/src/api/handles/stats/mod.rs:
--------------------------------------------------------------------------------
1 | mod sysinfo;
2 | pub use sysinfo::get_sysinfo;
3 |
4 | mod whoami;
5 | pub use whoami::get_whoami;
6 |
--------------------------------------------------------------------------------
/src/api/handles/stats/sysinfo.rs:
--------------------------------------------------------------------------------
1 | use axum::response::{IntoResponse, Json};
2 | use serde::Serialize;
3 | use sysinfo::{Disks, System};
4 |
5 | #[derive(Debug, Serialize)]
6 | struct Sysinfo {
7 | // memory
8 | total_memory: u64,
9 | free_memory: u64,
10 | used_memory: u64,
11 | available_memory: u64,
12 | total_swap: u64,
13 | // system
14 | name: String,
15 | kernel_version: String,
16 | os_version: String,
17 | host_name: String,
18 | cpus: Vec,
19 | disks: Vec,
20 | global_cpu_usage: f32,
21 | }
22 |
23 | #[derive(Debug, Serialize)]
24 | struct Cpu {
25 | name: String,
26 | brand: String,
27 | frequency: u64,
28 | usage: f32,
29 | }
30 |
31 | #[derive(Debug, Serialize)]
32 | struct Disk {
33 | name: String,
34 | file_system: String,
35 | mount_point: String,
36 | total_space: u64,
37 | available_space: u64,
38 | }
39 |
40 | pub async fn get_sysinfo() -> impl IntoResponse {
41 | log::debug!("🤖 Received request for sysinfo");
42 |
43 | // TODO: implement this into a background task
44 | let mut sys = System::new_all();
45 | sys.refresh_all();
46 | sys.refresh_cpu_usage();
47 |
48 | let mut disks = Vec::new();
49 | for sys_disk in &Disks::new_with_refreshed_list() {
50 | disks.push(Disk {
51 | name: sys_disk.name().to_string_lossy().to_string(),
52 | file_system: sys_disk.file_system().to_string_lossy().to_string(),
53 | mount_point: sys_disk.mount_point().to_string_lossy().to_string(),
54 | total_space: sys_disk.total_space(),
55 | available_space: sys_disk.available_space(),
56 | });
57 | }
58 |
59 | let mut cpus = Vec::new();
60 | for cpu in sys.cpus() {
61 | cpus.push(Cpu {
62 | name: cpu.name().to_string(),
63 | brand: cpu.brand().to_string(),
64 | frequency: cpu.frequency(),
65 | usage: cpu.cpu_usage(),
66 | });
67 | }
68 |
69 | Json(Sysinfo {
70 | total_memory: sys.total_memory(),
71 | free_memory: sys.free_memory(),
72 | used_memory: sys.used_memory(),
73 | available_memory: sys.available_memory(),
74 | total_swap: sys.total_swap(),
75 | name: System::name().unwrap_or_default(),
76 | kernel_version: System::kernel_version().unwrap_or_default(),
77 | os_version: System::os_version().unwrap_or_default(),
78 | host_name: System::host_name().unwrap_or_default(),
79 | cpus,
80 | disks,
81 | global_cpu_usage: sys.global_cpu_usage(),
82 | })
83 | }
84 |
--------------------------------------------------------------------------------
/src/api/handles/stats/whoami.rs:
--------------------------------------------------------------------------------
1 | use axum::response::{IntoResponse, Json};
2 | use serde::Serialize;
3 |
4 | /// The current user's information
5 | #[derive(Debug, Serialize)]
6 | struct Whoami {
7 | arch: WhoamiArch,
8 | distro: String,
9 | desktop_env: WhoamiDesktopEnv,
10 | device_name: String,
11 | hostname: String,
12 | platform: WhoamiPlatform,
13 | realname: String,
14 | username: String,
15 | }
16 |
17 | /// Get the current user's information
18 | pub async fn get_whoami() -> impl IntoResponse {
19 | log::debug!("🤖 Received request for whoami");
20 | Json(Whoami {
21 | realname: whoami::realname(),
22 | username: whoami::username(),
23 | hostname: match whoami::fallible::hostname() {
24 | Ok(hostname) => hostname,
25 | Err(_) => "unknown".to_string(),
26 | },
27 | platform: WhoamiPlatform::from_whoami_platform(whoami::platform()),
28 | arch: WhoamiArch::from_whoami_arch(whoami::arch()),
29 | distro: whoami::distro(),
30 | device_name: whoami::devicename(),
31 | desktop_env: WhoamiDesktopEnv::from_whoami_desktop_env(whoami::desktop_env()),
32 | })
33 | }
34 |
35 | /// The architecture of the current system
36 | #[derive(Debug, Serialize)]
37 | enum WhoamiArch {
38 | Arm64,
39 | Armv5,
40 | Armv6,
41 | Armv7,
42 | I386,
43 | I586,
44 | I686,
45 | X64,
46 | Mips,
47 | Mipsel,
48 | Mips64,
49 | Mips64el,
50 | Powerpc,
51 | Powerpc64,
52 | Powerpc64le,
53 | Riscv32,
54 | Riscv64,
55 | S390x,
56 | Sparc,
57 | Sparc64,
58 | Wasm32,
59 | Wasm64,
60 | Unknown,
61 | }
62 |
63 | impl WhoamiArch {
64 | fn from_whoami_arch(arch: whoami::Arch) -> Self {
65 | match arch {
66 | whoami::Arch::Arm64 => Self::Arm64,
67 | whoami::Arch::ArmV5 => Self::Armv5,
68 | whoami::Arch::ArmV6 => Self::Armv6,
69 | whoami::Arch::ArmV7 => Self::Armv7,
70 | whoami::Arch::I386 => Self::I386,
71 | whoami::Arch::I586 => Self::I586,
72 | whoami::Arch::I686 => Self::I686,
73 | whoami::Arch::X64 => Self::X64,
74 | whoami::Arch::Mips => Self::Mips,
75 | whoami::Arch::MipsEl => Self::Mipsel,
76 | whoami::Arch::Mips64 => Self::Mips64,
77 | whoami::Arch::Mips64El => Self::Mips64el,
78 | whoami::Arch::PowerPc => Self::Powerpc,
79 | whoami::Arch::PowerPc64 => Self::Powerpc64,
80 | whoami::Arch::PowerPc64Le => Self::Powerpc64le,
81 | whoami::Arch::Riscv32 => Self::Riscv32,
82 | whoami::Arch::Riscv64 => Self::Riscv64,
83 | whoami::Arch::S390x => Self::S390x,
84 | whoami::Arch::Sparc => Self::Sparc,
85 | whoami::Arch::Sparc64 => Self::Sparc64,
86 | whoami::Arch::Wasm32 => Self::Wasm32,
87 | whoami::Arch::Wasm64 => Self::Wasm64,
88 | whoami::Arch::Unknown(_) => Self::Unknown,
89 | _ => Self::Unknown,
90 | }
91 | }
92 | }
93 |
94 | /// The platform of the current system
95 | #[derive(Debug, Serialize)]
96 | enum WhoamiPlatform {
97 | Linux,
98 | Bsd,
99 | Windows,
100 | Macos,
101 | Illumos,
102 | Ios,
103 | Android,
104 | Nintendo,
105 | Xbox,
106 | PlayStation,
107 | Fuchsia,
108 | Redox,
109 | Unknown,
110 | }
111 |
112 | impl WhoamiPlatform {
113 | fn from_whoami_platform(platform: whoami::Platform) -> Self {
114 | match platform {
115 | whoami::Platform::Linux => Self::Linux,
116 | whoami::Platform::Bsd => Self::Bsd,
117 | whoami::Platform::Windows => Self::Windows,
118 | whoami::Platform::MacOS => Self::Macos,
119 | whoami::Platform::Illumos => Self::Illumos,
120 | whoami::Platform::Ios => Self::Ios,
121 | whoami::Platform::Android => Self::Android,
122 | whoami::Platform::Nintendo => Self::Nintendo,
123 | whoami::Platform::Xbox => Self::Xbox,
124 | whoami::Platform::PlayStation => Self::PlayStation,
125 | whoami::Platform::Fuchsia => Self::Fuchsia,
126 | whoami::Platform::Redox => Self::Redox,
127 | whoami::Platform::Unknown(_) => Self::Unknown,
128 | _ => Self::Unknown,
129 | }
130 | }
131 | }
132 |
133 | /// The desktop environment of the current system
134 | #[derive(Debug, Serialize)]
135 | enum WhoamiDesktopEnv {
136 | Gnome,
137 | Windows,
138 | Lxde,
139 | Openbox,
140 | Mate,
141 | Xfce,
142 | Kde,
143 | Cinnamon,
144 | I3,
145 | Aqua,
146 | Ios,
147 | Android,
148 | WebBrowser,
149 | Console,
150 | Ubuntu,
151 | Ermine,
152 | Orbital,
153 | Unknown,
154 | }
155 |
156 | impl WhoamiDesktopEnv {
157 | fn from_whoami_desktop_env(desktop_env: whoami::DesktopEnv) -> Self {
158 | match desktop_env {
159 | whoami::DesktopEnv::Gnome => Self::Gnome,
160 | whoami::DesktopEnv::Windows => Self::Windows,
161 | whoami::DesktopEnv::Lxde => Self::Lxde,
162 | whoami::DesktopEnv::Openbox => Self::Openbox,
163 | whoami::DesktopEnv::Mate => Self::Mate,
164 | whoami::DesktopEnv::Xfce => Self::Xfce,
165 | whoami::DesktopEnv::Kde => Self::Kde,
166 | whoami::DesktopEnv::Cinnamon => Self::Cinnamon,
167 | whoami::DesktopEnv::I3 => Self::I3,
168 | whoami::DesktopEnv::Aqua => Self::Aqua,
169 | whoami::DesktopEnv::Ios => Self::Ios,
170 | whoami::DesktopEnv::Android => Self::Android,
171 | whoami::DesktopEnv::WebBrowser => Self::WebBrowser,
172 | whoami::DesktopEnv::Console => Self::Console,
173 | whoami::DesktopEnv::Ubuntu => Self::Ubuntu,
174 | whoami::DesktopEnv::Ermine => Self::Ermine,
175 | whoami::DesktopEnv::Orbital => Self::Orbital,
176 | whoami::DesktopEnv::Unknown(_) => Self::Unknown,
177 | _ => Self::Unknown,
178 | }
179 | }
180 | }
181 |
--------------------------------------------------------------------------------
/src/api/handles/streaming.rs:
--------------------------------------------------------------------------------
1 | use crate::{api::models::streaming::StreamingQuery, pipeline::ServerGlobalState};
2 | use axum::{
3 | body::Body,
4 | extract::{ws::WebSocketUpgrade, Path, State},
5 | http::{header, StatusCode},
6 | response::IntoResponse,
7 | };
8 |
9 | pub async fn get_streaming_image(
10 | Path(query): Path,
11 | State(state): State,
12 | ) -> impl IntoResponse {
13 | log::trace!("Request to get streaming image: {}", query.channel_id);
14 |
15 | // TODO: need to improve later to make sure that the right pipeline is running
16 | if !state.pipeline_store.is_cameras_pipeline_running()
17 | && !state.pipeline_store.is_inference_pipeline_running()
18 | {
19 | return (
20 | StatusCode::SERVICE_UNAVAILABLE,
21 | [(header::CONTENT_TYPE, "text/plain")],
22 | Body::from("Cameras pipeline not started. Please start the cameras pipeline first."),
23 | );
24 | }
25 |
26 | match state.result_store.images[query.channel_id as usize]
27 | .tx
28 | .subscribe()
29 | .recv()
30 | .await
31 | {
32 | Ok(result) => {
33 | // Return the JPEG image data directly with proper headers
34 | (
35 | StatusCode::OK,
36 | [(header::CONTENT_TYPE, "image/jpeg")],
37 | Body::from(result.data),
38 | )
39 | }
40 | Err(e) => {
41 | log::error!("Failed to get streaming image: {}", e);
42 | (
43 | StatusCode::INTERNAL_SERVER_ERROR,
44 | [(header::CONTENT_TYPE, "text/plain")],
45 | Body::from("Failed to get streaming image: `just start-pipeline streaming`"),
46 | )
47 | }
48 | }
49 | }
50 |
51 | /// Handler for WebSocket connections
52 | pub async fn websocket_streaming_image(
53 | ws: WebSocketUpgrade,
54 | Path(query): Path,
55 | State(state): State,
56 | ) -> impl IntoResponse {
57 | // Accept the WebSocket connection
58 | ws.on_upgrade(|socket| handle_socket(socket, query, state))
59 | }
60 |
61 | async fn handle_socket(
62 | mut socket: axum::extract::ws::WebSocket,
63 | query: StreamingQuery,
64 | state: ServerGlobalState,
65 | ) {
66 | log::info!(
67 | "WebSocket connection established for channel {}",
68 | query.channel_id
69 | );
70 |
71 | if !state.pipeline_store.is_cameras_pipeline_running()
72 | && !state.pipeline_store.is_inference_pipeline_running()
73 | {
74 | log::error!("Cameras pipeline not started");
75 | // Send an error message and close the connection
76 | let _ = socket
77 | .send(axum::extract::ws::Message::Text(
78 | "Cameras pipeline not started. Please start the cameras pipeline first.".into(),
79 | ))
80 | .await;
81 | return;
82 | }
83 |
84 | // Subscribe to the broadcast channel for this camera
85 | let mut rx = state.result_store.images[query.channel_id as usize]
86 | .tx
87 | .subscribe();
88 |
89 | // Stream images until the client disconnects
90 | while let Ok(result) = rx.recv().await {
91 | if let Err(e) = socket
92 | .send(axum::extract::ws::Message::Binary(result.data.into()))
93 | .await
94 | {
95 | log::error!("Failed to send WebSocket message: {}", e);
96 | break;
97 | }
98 | }
99 |
100 | log::info!(
101 | "WebSocket connection closed for channel {}",
102 | query.channel_id
103 | );
104 | }
105 |
--------------------------------------------------------------------------------
/src/api/mod.rs:
--------------------------------------------------------------------------------
1 | mod server;
2 | pub use server::ApiServer;
3 |
4 | pub mod handles;
5 | pub mod models;
6 |
--------------------------------------------------------------------------------
/src/api/models/inference.rs:
--------------------------------------------------------------------------------
1 | use serde::{Deserialize, Serialize};
2 |
3 | /// The query for the inference request
4 | #[derive(Debug, Deserialize, Serialize)]
5 | pub struct InferenceSettingsQuery {
6 | pub prompt: String,
7 | }
8 |
9 | /// The result of the inference request
10 | #[derive(Clone, Debug, Serialize)]
11 | pub struct InferenceResult {
12 | pub stamp_ns: u64,
13 | pub channel_id: u8,
14 | pub prompt: String,
15 | pub response: String,
16 | }
17 |
18 | /// The response of the inference request
19 | #[derive(Debug, Serialize)]
20 | pub enum InferenceResponse {
21 | Success(InferenceResult),
22 | Error { error: String },
23 | }
24 |
25 | /// The query for the inference settings request
26 | #[derive(Debug, Deserialize, Serialize)]
27 | pub enum InferenceSettingsResponse {
28 | Success,
29 | Error { error: String },
30 | }
31 |
--------------------------------------------------------------------------------
/src/api/models/mod.rs:
--------------------------------------------------------------------------------
1 | pub mod inference;
2 | pub mod pipeline;
3 | pub mod recording;
4 | pub mod streaming;
5 |
--------------------------------------------------------------------------------
/src/api/models/pipeline.rs:
--------------------------------------------------------------------------------
1 | use serde::{Deserialize, Serialize};
2 |
3 | #[derive(Debug, Serialize, Deserialize)]
4 | pub struct PipelineStartRequest {
5 | // the name of the pipeline to start
6 | pub name: String,
7 | }
8 |
9 | #[derive(Debug, Serialize, Deserialize)]
10 | pub struct PipelineStopRequest {
11 | // the name of the pipeline to stop
12 | pub name: String,
13 | }
14 |
--------------------------------------------------------------------------------
/src/api/models/recording.rs:
--------------------------------------------------------------------------------
1 | use serde::{Deserialize, Serialize};
2 |
3 | /// The command for the recording request
4 | #[derive(Clone, Debug, Deserialize, Serialize)]
5 | pub enum RecordingCommand {
6 | Start,
7 | Stop,
8 | }
9 |
10 | /// The query for the recording request
11 | #[derive(Clone, Debug, Deserialize, Serialize)]
12 | pub struct RecordingQuery {
13 | pub command: RecordingCommand,
14 | }
15 |
16 | /// The response for the recording request
17 | #[derive(Clone, Debug, Serialize, Deserialize)]
18 | pub enum RecordingResponse {
19 | Success,
20 | Error { error: String },
21 | }
22 |
--------------------------------------------------------------------------------
/src/api/models/streaming.rs:
--------------------------------------------------------------------------------
1 | use crate::cu29::msgs::EncodedImage;
2 | use serde::{Deserialize, Serialize};
3 |
4 | /// The query for the inference request
5 | #[derive(Clone, Debug, Deserialize, Serialize)]
6 | pub struct StreamingQuery {
7 | pub channel_id: u8,
8 | }
9 |
10 | /// The response of the inference request
11 | #[derive(Clone, Debug, Serialize, Deserialize)]
12 | pub enum StreamingResponse {
13 | Success(EncodedImage),
14 | Error { error: String },
15 | }
16 |
--------------------------------------------------------------------------------
/src/api/server.rs:
--------------------------------------------------------------------------------
1 | use crate::{api::handles, pipeline::ServerGlobalState};
2 | use axum::{
3 | routing::{get, post},
4 | Router,
5 | };
6 | use tower_http::cors::{Any, CorsLayer};
7 |
8 | #[derive(Default)]
9 | pub struct ApiServer;
10 |
11 | impl ApiServer {
12 | pub async fn start(
13 | &self,
14 | addr: String,
15 | state: ServerGlobalState,
16 | ) -> Result<(), Box> {
17 | log::info!("🚀 Starting the server");
18 | log::info!("🔥 Listening on: {}", addr);
19 | log::info!("🔧 Press Ctrl+C to stop the server");
20 |
21 | // Configure CORS to allow requests from your frontend app
22 | let cors = CorsLayer::new()
23 | // Allow requests from any origin
24 | .allow_origin(Any)
25 | // Allow common HTTP methods
26 | .allow_methods(Any)
27 | // Allow common headers
28 | .allow_headers(Any);
29 |
30 | let app = Router::new()
31 | .route("/", get(|| async { "Welcome to Bubbaloop!" }))
32 | .nest(
33 | "/api/v0/stats",
34 | Router::new()
35 | .route("/whoami", get(handles::stats::get_whoami))
36 | .route("/sysinfo", get(handles::stats::get_sysinfo)),
37 | )
38 | .nest(
39 | "/api/v0/streaming",
40 | Router::new()
41 | .route(
42 | "/image/{channel_id}",
43 | get(handles::streaming::get_streaming_image),
44 | )
45 | .route(
46 | "/ws/{channel_id}",
47 | get(handles::streaming::websocket_streaming_image),
48 | ),
49 | )
50 | .nest(
51 | "/api/v0/recording",
52 | Router::new().route("/command", post(handles::recording::post_recording_command)),
53 | )
54 | .nest(
55 | "/api/v0/inference",
56 | Router::new()
57 | .route("/result", get(handles::inference::get_inference_result))
58 | .route(
59 | "/settings",
60 | post(handles::inference::post_inference_settings),
61 | ),
62 | )
63 | .nest(
64 | "/api/v0/pipeline",
65 | Router::new()
66 | .route("/start", post(handles::pipeline::start_pipeline))
67 | .route("/stop", post(handles::pipeline::stop_pipeline))
68 | .route("/list", get(handles::pipeline::list_pipelines)),
69 | )
70 | .layer(cors) // Add the CORS middleware
71 | .with_state(state);
72 |
73 | let listener = tokio::net::TcpListener::bind(addr).await?;
74 | axum::serve(listener, app).await?;
75 | Ok(())
76 | }
77 | }
78 |
--------------------------------------------------------------------------------
/src/bin/bubbaloop.rs:
--------------------------------------------------------------------------------
1 | use argh::FromArgs;
2 |
3 | // defaults for the server
4 | const DEFAULT_HOST: &str = "0.0.0.0";
5 | const DEFAULT_PORT: u16 = 3000;
6 |
7 | #[derive(FromArgs)]
8 | /// Bubbaloop CLI
9 | struct CLIArgs {
10 | #[argh(subcommand)]
11 | commands: Commands,
12 |
13 | #[argh(option, short = 'h', default = "DEFAULT_HOST.to_string()")]
14 | /// the host to listen on
15 | host: String,
16 |
17 | #[argh(option, short = 'p', default = "DEFAULT_PORT")]
18 | /// the port to listen on
19 | port: u16,
20 | }
21 |
22 | #[derive(FromArgs)]
23 | #[argh(subcommand)]
24 | enum Commands {
25 | Inference(InferenceCommand),
26 | Pipeline(PipelineCommand),
27 | Recording(RecordingCommand),
28 | Stats(StatsCommand),
29 | }
30 |
31 | #[derive(FromArgs)]
32 | #[argh(subcommand, name = "stats")]
33 | /// Get stats about the server
34 | struct StatsCommand {
35 | #[argh(subcommand)]
36 | mode: StatsMode,
37 | }
38 |
39 | #[derive(FromArgs)]
40 | #[argh(subcommand)]
41 | enum StatsMode {
42 | Whoami(StatsWhoamiCommand),
43 | Sysinfo(StatsSysinfoCommand),
44 | }
45 |
46 | #[derive(FromArgs)]
47 | #[argh(subcommand, name = "whoami")]
48 | /// Print the whoami
49 | struct StatsWhoamiCommand {}
50 |
51 | #[derive(FromArgs)]
52 | #[argh(subcommand, name = "sysinfo")]
53 | /// Print the sysinfo
54 | struct StatsSysinfoCommand {}
55 |
56 | #[derive(FromArgs)]
57 | #[argh(subcommand, name = "recording")]
58 | /// Recording management commands
59 | struct RecordingCommand {
60 | #[argh(subcommand)]
61 | mode: RecordingMode,
62 | }
63 |
64 | #[derive(FromArgs)]
65 | #[argh(subcommand)]
66 | enum RecordingMode {
67 | Start(RecordingStartCommand),
68 | Stop(RecordingStopCommand),
69 | }
70 |
71 | #[derive(FromArgs)]
72 | #[argh(subcommand, name = "start")]
73 | /// Start recording
74 | struct RecordingStartCommand {}
75 |
76 | #[derive(FromArgs)]
77 | #[argh(subcommand, name = "stop")]
78 | /// Stop recording
79 | struct RecordingStopCommand {}
80 |
81 | #[derive(FromArgs)]
82 | #[argh(subcommand, name = "pipeline")]
83 | /// Pipeline management commands
84 | struct PipelineCommand {
85 | #[argh(subcommand)]
86 | mode: PipelineMode,
87 | }
88 |
89 | #[derive(FromArgs)]
90 | #[argh(subcommand)]
91 | enum PipelineMode {
92 | Start(PipelineStartCommand),
93 | Stop(PipelineStopCommand),
94 | List(PipelineListCommand),
95 | }
96 |
97 | #[derive(FromArgs)]
98 | #[argh(subcommand, name = "start")]
99 | /// Start a pipeline
100 | struct PipelineStartCommand {
101 | #[argh(option, short = 'n')]
102 | /// the pipeline name
103 | name: String,
104 | }
105 |
106 | #[derive(FromArgs)]
107 | #[argh(subcommand, name = "stop")]
108 | /// Stop a pipeline
109 | struct PipelineStopCommand {
110 | #[argh(option, short = 'n')]
111 | /// the pipeline name
112 | name: String,
113 | }
114 |
115 | #[derive(FromArgs)]
116 | #[argh(subcommand, name = "list")]
117 | /// List pipelines
118 | struct PipelineListCommand {}
119 |
120 | #[derive(FromArgs)]
121 | #[argh(subcommand, name = "inference")]
122 | /// Inference management commands
123 | struct InferenceCommand {
124 | #[argh(subcommand)]
125 | mode: InferenceMode,
126 | }
127 |
128 | #[derive(FromArgs)]
129 | #[argh(subcommand)]
130 | enum InferenceMode {
131 | Result(InferenceResultCommand),
132 | Settings(InferenceSettingsCommand),
133 | }
134 |
135 | #[derive(FromArgs)]
136 | #[argh(subcommand, name = "result")]
137 | /// Get the latest inference result
138 | struct InferenceResultCommand {}
139 |
140 | #[derive(FromArgs)]
141 | #[argh(subcommand, name = "settings")]
142 | /// Get the inference settings
143 | struct InferenceSettingsCommand {
144 | #[argh(option)]
145 | /// the prompt to configure during the inference
146 | prompt: String,
147 | }
148 |
149 | #[tokio::main]
150 | async fn main() -> Result<(), Box> {
151 | let args: CLIArgs = argh::from_env();
152 |
153 | let client = reqwest::Client::new();
154 |
155 | // format the host and port
156 | let addr = format!("{}:{}", args.host, args.port);
157 |
158 | match args.commands {
159 | Commands::Stats(stats_command) => match stats_command.mode {
160 | StatsMode::Whoami(_) => {
161 | let response = client
162 | .get(format!("http://{}/api/v0/stats/whoami", addr))
163 | .send()
164 | .await?;
165 |
166 | let result = response.json::().await?;
167 | println!("Result: {}", serde_json::to_string_pretty(&result)?);
168 | }
169 | StatsMode::Sysinfo(_) => {
170 | let response = client
171 | .get(format!("http://{}/api/v0/stats/sysinfo", addr))
172 | .send()
173 | .await?;
174 |
175 | let result = response.json::().await?;
176 | println!("Result: {}", serde_json::to_string_pretty(&result)?);
177 | }
178 | },
179 | Commands::Recording(recording_command) => match recording_command.mode {
180 | RecordingMode::Start(_) => {
181 | let response = client
182 | .post(format!("http://{}/api/v0/recording/command", addr))
183 | .json(&bubbaloop::api::models::recording::RecordingQuery {
184 | command: bubbaloop::api::models::recording::RecordingCommand::Start,
185 | })
186 | .send()
187 | .await?;
188 |
189 | let result = response.json::().await?;
190 | println!("Result: {}", serde_json::to_string_pretty(&result)?);
191 | }
192 | RecordingMode::Stop(_) => {
193 | let response = client
194 | .post(format!("http://{}/api/v0/recording/command", addr))
195 | .json(&bubbaloop::api::models::recording::RecordingQuery {
196 | command: bubbaloop::api::models::recording::RecordingCommand::Stop,
197 | })
198 | .send()
199 | .await?;
200 |
201 | let result = response.json::().await?;
202 | println!("Result: {}", serde_json::to_string_pretty(&result)?);
203 | }
204 | },
205 | Commands::Pipeline(pipeline_command) => match pipeline_command.mode {
206 | PipelineMode::Start(pipeline_start_command) => {
207 | let response = client
208 | .post(format!("http://{}/api/v0/pipeline/start", addr))
209 | .json(&bubbaloop::api::models::pipeline::PipelineStartRequest {
210 | name: pipeline_start_command.name,
211 | })
212 | .send()
213 | .await?;
214 |
215 | let result = response.json::().await?;
216 | println!("Result: {}", serde_json::to_string_pretty(&result)?);
217 | }
218 | PipelineMode::Stop(pipeline_stop_command) => {
219 | let response = client
220 | .post(format!("http://{}/api/v0/pipeline/stop", addr))
221 | .json(&bubbaloop::api::models::pipeline::PipelineStopRequest {
222 | name: pipeline_stop_command.name,
223 | })
224 | .send()
225 | .await?;
226 |
227 | let result = response.json::().await?;
228 | println!("Result: {}", serde_json::to_string_pretty(&result)?);
229 | }
230 | PipelineMode::List(_pipeline_list_command) => {
231 | let response = client
232 | .get(format!("http://{}/api/v0/pipeline/list", addr))
233 | .send()
234 | .await?;
235 |
236 | let result = response.json::().await?;
237 | println!("Result: {}", serde_json::to_string_pretty(&result)?);
238 | }
239 | },
240 | Commands::Inference(inference_command) => match inference_command.mode {
241 | InferenceMode::Result(_) => {
242 | let response = client
243 | .get(format!("http://{}/api/v0/inference/result", addr))
244 | .send()
245 | .await?;
246 |
247 | let result = response.json::().await?;
248 | println!("Result: {}", serde_json::to_string_pretty(&result)?);
249 | }
250 | InferenceMode::Settings(inference_settings_command) => {
251 | let response = client
252 | .post(format!("http://{}/api/v0/inference/settings", addr))
253 | .json(&bubbaloop::api::models::inference::InferenceSettingsQuery {
254 | prompt: inference_settings_command.prompt,
255 | })
256 | .send()
257 | .await?;
258 |
259 | let result = response.json::().await?;
260 | println!("Result: {}", serde_json::to_string_pretty(&result)?);
261 | }
262 | },
263 | }
264 | Ok(())
265 | }
266 |
--------------------------------------------------------------------------------
/src/bin/serve.rs:
--------------------------------------------------------------------------------
1 | use argh::FromArgs;
2 |
3 | // defaults for the server
4 | const DEFAULT_HOST: &str = "0.0.0.0";
5 | const DEFAULT_PORT: u16 = 3000;
6 |
7 | #[derive(FromArgs)]
8 | #[argh(description = "Bubbaloop server")]
9 | struct CLIArgs {
10 | #[argh(option, short = 'h', default = "DEFAULT_HOST.to_string()")]
11 | /// the host to listen on
12 | host: String,
13 |
14 | #[argh(option, short = 'p', default = "DEFAULT_PORT")]
15 | /// the port to listen on
16 | port: u16,
17 | }
18 |
19 | fn main() -> Result<(), Box> {
20 | env_logger::init();
21 |
22 | let args: CLIArgs = argh::from_env();
23 |
24 | // format the host and port
25 | let addr = format!("{}:{}", args.host, args.port);
26 |
27 | let global_state = bubbaloop::pipeline::SERVER_GLOBAL_STATE.clone();
28 |
29 | // start the api server
30 | let api = bubbaloop::api::ApiServer;
31 | let runtime = tokio::runtime::Runtime::new()?;
32 | runtime.block_on(async move {
33 | api.start(addr, global_state).await.unwrap();
34 | });
35 |
36 | Ok(())
37 | }
38 |
--------------------------------------------------------------------------------
/src/cu29/mod.rs:
--------------------------------------------------------------------------------
1 | pub mod msgs;
2 | pub mod pipelines;
3 | pub mod tasks;
4 |
--------------------------------------------------------------------------------
/src/cu29/msgs.rs:
--------------------------------------------------------------------------------
1 | use serde::{ser::SerializeStruct, Deserialize, Serialize};
2 |
3 | type ImageRgb8 = kornia_image::Image;
4 |
5 | #[derive(Clone)]
6 | pub struct ImageRgb8Msg {
7 | pub stamp_ns: u64,
8 | pub channel_id: u8,
9 | pub image: ImageRgb8,
10 | }
11 |
12 | // TODO: implement in kornia-image
13 | impl std::fmt::Debug for ImageRgb8Msg {
14 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
15 | write!(
16 | f,
17 | "ImageRgb8Msg(stamp_ns: {}, channel_id: {}, size: {:?})",
18 | self.stamp_ns,
19 | self.channel_id,
20 | self.image.size()
21 | )
22 | }
23 | }
24 |
25 | // TODO: implement Image::empty()
26 | impl Default for ImageRgb8Msg {
27 | fn default() -> Self {
28 | Self {
29 | stamp_ns: 0,
30 | channel_id: 0,
31 | image: ImageRgb8::new([0, 0].into(), vec![]).unwrap(),
32 | }
33 | }
34 | }
35 |
36 | // TODO: implement in kornia-image
37 | impl bincode::enc::Encode for ImageRgb8Msg {
38 | fn encode(
39 | &self,
40 | encoder: &mut E,
41 | ) -> Result<(), bincode::error::EncodeError> {
42 | bincode::Encode::encode(&self.stamp_ns, encoder)?;
43 | bincode::Encode::encode(&self.channel_id, encoder)?;
44 | // TODO: support image encoding in kornia_rs::Image
45 | bincode::Encode::encode(&self.image.rows(), encoder)?;
46 | bincode::Encode::encode(&self.image.cols(), encoder)?;
47 | bincode::Encode::encode(&self.image.as_slice(), encoder)?;
48 | Ok(())
49 | }
50 | }
51 |
52 | // TODO: implement in kornia-image
53 | impl bincode::de::Decode for ImageRgb8Msg {
54 | fn decode>(
55 | decoder: &mut D,
56 | ) -> Result {
57 | let stamp_ns = bincode::Decode::decode(decoder)?;
58 | let channel_id = bincode::Decode::decode(decoder)?;
59 | // TODO: support image encoding in kornia_rs::Image
60 | let rows = bincode::Decode::decode(decoder)?;
61 | let cols = bincode::Decode::decode(decoder)?;
62 | let data = bincode::Decode::decode(decoder)?;
63 | let image = ImageRgb8::new([rows, cols].into(), data)
64 | .map_err(|e| bincode::error::DecodeError::OtherString(e.to_string()))?;
65 | Ok(Self {
66 | stamp_ns,
67 | channel_id,
68 | image,
69 | })
70 | }
71 | }
72 |
73 | impl Serialize for ImageRgb8Msg {
74 | fn serialize(&self, serializer: S) -> Result
75 | where
76 | S: serde::Serializer,
77 | {
78 | let mut s = serializer.serialize_struct("ImageRgb8Msg", 3)?;
79 | s.serialize_field("stamp_ns", &self.stamp_ns)?;
80 | s.serialize_field("channel_id", &self.channel_id)?;
81 | // TODO: support image encoding in kornia_rs::Image
82 | s.serialize_field("rows", &self.image.rows())?;
83 | s.serialize_field("cols", &self.image.cols())?;
84 | s.serialize_field("data", &self.image.as_slice())?;
85 | s.end()
86 | }
87 | }
88 |
89 | impl<'de> Deserialize<'de> for ImageRgb8Msg {
90 | fn deserialize(deserializer: D) -> Result
91 | where
92 | D: serde::Deserializer<'de>,
93 | {
94 | #[derive(Deserialize)]
95 | struct SerializedImage {
96 | stamp_ns: u64,
97 | channel_id: u8,
98 | rows: usize,
99 | cols: usize,
100 | data: Vec,
101 | }
102 |
103 | let data = SerializedImage::deserialize(deserializer)?;
104 | Ok(Self {
105 | stamp_ns: data.stamp_ns,
106 | channel_id: data.channel_id,
107 | // TODO: support image encoding in kornia_rs::Image
108 | image: ImageRgb8::new([data.rows, data.cols].into(), data.data)
109 | .map_err(serde::de::Error::custom)?,
110 | })
111 | }
112 | }
113 |
114 | #[derive(Clone, Debug, Default, Serialize, Deserialize, bincode::Encode, bincode::Decode)]
115 | pub struct EncodedImage {
116 | pub stamp_ns: u64,
117 | pub channel_id: u8,
118 | pub data: Vec,
119 | pub encoding: String,
120 | }
121 |
122 | #[derive(Clone, Debug, Default, Serialize, Deserialize, bincode::Encode, bincode::Decode)]
123 | pub struct PromptResponseMsg {
124 | pub stamp_ns: u64,
125 | pub channel_id: u8,
126 | pub prompt: String,
127 | pub response: String,
128 | }
129 |
--------------------------------------------------------------------------------
/src/cu29/pipelines/cameras.rs:
--------------------------------------------------------------------------------
1 | use crate::pipeline::PipelineResult;
2 | use cu29::prelude::*;
3 | use cu29_helpers::basic_copper_setup;
4 | use std::{
5 | path::PathBuf,
6 | sync::{atomic::AtomicBool, Arc},
7 | };
8 |
9 | const SLAB_SIZE: Option = Some(150 * 1024 * 1024);
10 |
11 | // NOTE: this will use the default config file in the current directory during compilation
12 | // however, it will be overridden by the ron config string when the pipeline is started
13 | #[copper_runtime(config = "src/cu29/pipelines/cameras_1.ron")]
14 | struct CamerasApp {}
15 |
16 | pub struct CamerasPipeline(pub CamerasApp);
17 |
18 | impl CamerasPipeline {
19 | pub fn new() -> CuResult {
20 | let logger_path = PathBuf::from("/tmp/cameras.copper");
21 | debug!("Logger path: {}", path = &logger_path);
22 |
23 | let copper_ctx = basic_copper_setup(&logger_path, SLAB_SIZE, true, None)?;
24 | let application = CamerasAppBuilder::new().with_context(&copper_ctx).build()?;
25 |
26 | Ok(Self(application))
27 | }
28 | }
29 |
30 | /// Spawns a new thread for the pipeline
31 | ///
32 | /// This function is used to spawn a new thread for the pipeline
33 | /// and to pass the stop signal to the pipeline
34 | ///
35 | /// # Arguments
36 | ///
37 | /// * `pipeline_id` - The id of the pipeline
38 | /// * `stop_signal` - The stop signal to stop the pipeline
39 | ///
40 | /// # Returns
41 | ///
42 | /// A handle to the thread that runs the pipeline
43 | pub fn spawn_cameras_pipeline(
44 | stop_signal: Arc,
45 | ) -> std::thread::JoinHandle {
46 | std::thread::spawn({
47 | let stop_signal = stop_signal.clone();
48 | move || -> PipelineResult {
49 | // parse the ron config string and create the pipeline
50 | let mut app = CamerasPipeline::new()?;
51 |
52 | // create the pipeline and start the tasks
53 | app.start_all_tasks()?;
54 |
55 | while !stop_signal.load(std::sync::atomic::Ordering::Relaxed) {
56 | // we run the pipeline iteration step by step
57 | app.run_one_iteration()?;
58 | }
59 |
60 | // stop the pipeline and wait for the tasks to finish
61 | app.stop_all_tasks()?;
62 |
63 | log::debug!("Cameras pipeline stopped");
64 |
65 | Ok(())
66 | }
67 | })
68 | }
69 |
70 | impl std::ops::Deref for CamerasPipeline {
71 | type Target = CamerasApp;
72 |
73 | fn deref(&self) -> &Self::Target {
74 | &self.0
75 | }
76 | }
77 |
78 | impl std::ops::DerefMut for CamerasPipeline {
79 | fn deref_mut(&mut self) -> &mut Self::Target {
80 | &mut self.0
81 | }
82 | }
83 |
--------------------------------------------------------------------------------
/src/cu29/pipelines/cameras_1.ron:
--------------------------------------------------------------------------------
1 | (
2 | tasks: [
3 | (
4 | id: "cam0",
5 | type: "crate::cu29::tasks::VideoCapture",
6 | config: {
7 | "source_type": "rtsp",
8 | // URL of the RTSP camera
9 | // rtsp://:@:/
10 | "source_uri": "rtsp://tapo_entrance:123456789@192.168.1.141:554/stream2",
11 | "channel_id": 0,
12 | }
13 | ),
14 | (
15 | id: "enc0",
16 | type: "crate::cu29::tasks::ImageEncoder",
17 | ),
18 | (
19 | id: "bcast0",
20 | type: "crate::cu29::tasks::ImageBroadcast",
21 | ),
22 | (
23 | id: "recorder",
24 | type: "crate::cu29::tasks::RecorderOne",
25 | config: {
26 | // Path to the directory where the logs will be stored
27 | "path": "/tmp/",
28 | }
29 | ),
30 | ],
31 | cnx: [
32 | (src: "cam0", dst: "enc0", msg: "crate::cu29::msgs::ImageRgb8Msg"),
33 | (src: "enc0", dst: "recorder", msg: "crate::cu29::msgs::EncodedImage"),
34 | (src: "enc0", dst: "bcast0", msg: "crate::cu29::msgs::EncodedImage"),
35 | ],
36 | logging: (
37 | slab_size_mib: 1024, // Preallocates 1GiB of memory map file at a time
38 | section_size_mib: 100, // Preallocates 100MiB of memory map per section for the main logger.
39 | enable_task_logging: false,
40 | ),
41 | )
42 |
--------------------------------------------------------------------------------
/src/cu29/pipelines/cameras_2.ron:
--------------------------------------------------------------------------------
1 | (
2 | tasks: [
3 | (
4 | id: "cam0",
5 | type: "crate::cu29::tasks::VideoCapture",
6 | config: {
7 | "source_type": "rtsp",
8 | // URL of the RTSP camera
9 | // rtsp://:@:/
10 | "source_uri": "rtsp://tapo_entrance:123456789@192.168.1.141:554/stream2",
11 | "channel_id": 0,
12 | }
13 | ),
14 | (
15 | id: "cam1",
16 | type: "crate::cu29::tasks::VideoCapture",
17 | config: {
18 | "source_type": "rtsp",
19 | // URL of the RTSP camera
20 | // rtsp://:@:/
21 | "source_uri": "rtsp://tapo_terrace:123456789@192.168.1.151:554/stream2",
22 | "channel_id": 1,
23 | }
24 | ),
25 | (
26 | id: "enc0",
27 | type: "crate::cu29::tasks::ImageEncoder",
28 | ),
29 | (
30 | id: "enc1",
31 | type: "crate::cu29::tasks::ImageEncoder",
32 | ),
33 | (
34 | id: "bcast0",
35 | type: "crate::cu29::tasks::ImageBroadcast",
36 | ),
37 | (
38 | id: "bcast1",
39 | type: "crate::cu29::tasks::ImageBroadcast",
40 | ),
41 | (
42 | id: "recorder",
43 | type: "crate::cu29::tasks::RecorderTwo",
44 | config: {
45 | // Path to the directory where the logs will be stored
46 | "path": "/tmp/",
47 | }
48 | ),
49 | ],
50 | cnx: [
51 | (src: "cam0", dst: "enc0", msg: "crate::cu29::msgs::ImageRgb8Msg"),
52 | (src: "cam1", dst: "enc1", msg: "crate::cu29::msgs::ImageRgb8Msg"),
53 | (src: "enc0", dst: "recorder", msg: "crate::cu29::msgs::EncodedImage"),
54 | (src: "enc1", dst: "recorder", msg: "crate::cu29::msgs::EncodedImage"),
55 | (src: "enc0", dst: "bcast0", msg: "crate::cu29::msgs::EncodedImage"),
56 | (src: "enc1", dst: "bcast1", msg: "crate::cu29::msgs::EncodedImage"),
57 | ],
58 | logging: (
59 | slab_size_mib: 1024, // Preallocates 1GiB of memory map file at a time
60 | section_size_mib: 100, // Preallocates 100MiB of memory map per section for the main logger.
61 | enable_task_logging: false,
62 | ),
63 | )
64 |
--------------------------------------------------------------------------------
/src/cu29/pipelines/cameras_3.ron:
--------------------------------------------------------------------------------
1 | (
2 | tasks: [
3 | (
4 | id: "cam0",
5 | type: "crate::cu29::tasks::VideoCapture",
6 | config: {
7 | "source_type": "rtsp",
8 | // URL of the RTSP camera
9 | // rtsp://:@:/
10 | "source_uri": "rtsp://tapo_entrance:123456789@192.168.1.141:554/stream2",
11 | "channel_id": 0,
12 | }
13 | ),
14 | (
15 | id: "cam1",
16 | type: "crate::cu29::tasks::VideoCapture",
17 | config: {
18 | "source_type": "rtsp",
19 | // URL of the RTSP camera
20 | // rtsp://:@:/
21 | "source_uri": "rtsp://tapo_terrace:123456789@192.168.1.151:554/stream2",
22 | "channel_id": 1,
23 | }
24 | ),
25 | (
26 | id: "cam2",
27 | type: "crate::cu29::tasks::VideoCapture",
28 | config: {
29 | "source_type": "rtsp",
30 | // URL of the RTSP camera
31 | // rtsp://:@:/
32 | "source_uri": "rtsp://edgar:edgar@192.168.1.134:8554/live",
33 | "channel_id": 2,
34 | }
35 | ),
36 | (
37 | id: "enc0",
38 | type: "crate::cu29::tasks::ImageEncoder",
39 | ),
40 | (
41 | id: "enc1",
42 | type: "crate::cu29::tasks::ImageEncoder",
43 | ),
44 | (
45 | id: "enc2",
46 | type: "crate::cu29::tasks::ImageEncoder",
47 | ),
48 | (
49 | id: "bcast0",
50 | type: "crate::cu29::tasks::ImageBroadcast",
51 | ),
52 | (
53 | id: "bcast1",
54 | type: "crate::cu29::tasks::ImageBroadcast",
55 | ),
56 | (
57 | id: "bcast2",
58 | type: "crate::cu29::tasks::ImageBroadcast",
59 | ),
60 | (
61 | id: "recorder",
62 | type: "crate::cu29::tasks::RecorderThree",
63 | config: {
64 | // Path to the directory where the logs will be stored
65 | "path": "/tmp/",
66 | }
67 | ),
68 | ],
69 | cnx: [
70 | (src: "cam0", dst: "enc0", msg: "crate::cu29::msgs::ImageRgb8Msg"),
71 | (src: "cam1", dst: "enc1", msg: "crate::cu29::msgs::ImageRgb8Msg"),
72 | (src: "cam2", dst: "enc2", msg: "crate::cu29::msgs::ImageRgb8Msg"),
73 | (src: "enc0", dst: "recorder", msg: "crate::cu29::msgs::EncodedImage"),
74 | (src: "enc1", dst: "recorder", msg: "crate::cu29::msgs::EncodedImage"),
75 | (src: "enc2", dst: "recorder", msg: "crate::cu29::msgs::EncodedImage"),
76 | (src: "enc0", dst: "bcast0", msg: "crate::cu29::msgs::EncodedImage"),
77 | (src: "enc1", dst: "bcast1", msg: "crate::cu29::msgs::EncodedImage"),
78 | (src: "enc2", dst: "bcast2", msg: "crate::cu29::msgs::EncodedImage"),
79 | ],
80 | logging: (
81 | slab_size_mib: 1024, // Preallocates 1GiB of memory map file at a time
82 | section_size_mib: 100, // Preallocates 100MiB of memory map per section for the main logger.
83 | enable_task_logging: false,
84 | ),
85 | )
86 |
--------------------------------------------------------------------------------
/src/cu29/pipelines/cameras_4.ron:
--------------------------------------------------------------------------------
1 | (
2 | tasks: [
3 | (
4 | id: "cam0",
5 | type: "crate::cu29::tasks::VideoCapture",
6 | config: {
7 | "source_type": "rtsp",
8 | // URL of the RTSP camera
9 | // rtsp://:@:/
10 | "source_uri": "rtsp://tapo_entrance:123456789@192.168.1.141:554/stream2",
11 | "channel_id": 0,
12 | }
13 | ),
14 | (
15 | id: "cam1",
16 | type: "crate::cu29::tasks::VideoCapture",
17 | config: {
18 | "source_type": "rtsp",
19 | // URL of the RTSP camera
20 | // rtsp://:@:/
21 | "source_uri": "rtsp://tapo_terrace:123456789@192.168.1.151:554/stream2",
22 | "channel_id": 1,
23 | }
24 | ),
25 | (
26 | id: "cam2",
27 | type: "crate::cu29::tasks::VideoCapture",
28 | config: {
29 | "source_type": "rtsp",
30 | // URL of the RTSP camera
31 | // rtsp://:@:/
32 | "source_uri": "rtsp://edgar:edgar@192.168.1.134:8554/live",
33 | "channel_id": 2,
34 | }
35 | ),
36 | (
37 | id: "cam3",
38 | type: "crate::cu29::tasks::VideoCapture",
39 | config: {
40 | "source_type": "v4l2",
41 | "source_uri": "/dev/video0",
42 | "source_fps": 30,
43 | "image_cols": 640,
44 | "image_rows": 480,
45 | "channel_id": 3,
46 |
47 | }
48 | ),
49 | (
50 | id: "enc0",
51 | type: "crate::cu29::tasks::ImageEncoder",
52 | ),
53 | (
54 | id: "enc1",
55 | type: "crate::cu29::tasks::ImageEncoder",
56 | ),
57 | (
58 | id: "enc2",
59 | type: "crate::cu29::tasks::ImageEncoder",
60 | ),
61 | (
62 | id: "enc3",
63 | type: "crate::cu29::tasks::ImageEncoder",
64 | ),
65 | (
66 | id: "bcast0",
67 | type: "crate::cu29::tasks::ImageBroadcast",
68 | ),
69 | (
70 | id: "bcast1",
71 | type: "crate::cu29::tasks::ImageBroadcast",
72 | ),
73 | (
74 | id: "bcast2",
75 | type: "crate::cu29::tasks::ImageBroadcast",
76 | ),
77 | (
78 | id: "bcast3",
79 | type: "crate::cu29::tasks::ImageBroadcast",
80 | ),
81 | (
82 | id: "recorder",
83 | type: "crate::cu29::tasks::RecorderFour",
84 | config: {
85 | // Path to the directory where the logs will be stored
86 | "path": "/tmp/",
87 | }
88 | ),
89 | ],
90 | cnx: [
91 | (src: "cam0", dst: "enc0", msg: "crate::cu29::msgs::ImageRgb8Msg"),
92 | (src: "cam1", dst: "enc1", msg: "crate::cu29::msgs::ImageRgb8Msg"),
93 | (src: "cam2", dst: "enc2", msg: "crate::cu29::msgs::ImageRgb8Msg"),
94 | (src: "cam3", dst: "enc3", msg: "crate::cu29::msgs::ImageRgb8Msg"),
95 | (src: "enc0", dst: "recorder", msg: "crate::cu29::msgs::EncodedImage"),
96 | (src: "enc1", dst: "recorder", msg: "crate::cu29::msgs::EncodedImage"),
97 | (src: "enc2", dst: "recorder", msg: "crate::cu29::msgs::EncodedImage"),
98 | (src: "enc3", dst: "recorder", msg: "crate::cu29::msgs::EncodedImage"),
99 | (src: "enc0", dst: "bcast0", msg: "crate::cu29::msgs::EncodedImage"),
100 | (src: "enc1", dst: "bcast1", msg: "crate::cu29::msgs::EncodedImage"),
101 | (src: "enc2", dst: "bcast2", msg: "crate::cu29::msgs::EncodedImage"),
102 | (src: "enc3", dst: "bcast3", msg: "crate::cu29::msgs::EncodedImage"),
103 | ],
104 | logging: (
105 | slab_size_mib: 1024, // Preallocates 1GiB of memory map file at a time
106 | section_size_mib: 100, // Preallocates 100MiB of memory map per section for the main logger.
107 | enable_task_logging: false,
108 | ),
109 | )
110 |
--------------------------------------------------------------------------------
/src/cu29/pipelines/inference.ron:
--------------------------------------------------------------------------------
1 | (
2 | tasks: [
3 | (
4 | id: "cam0",
5 | type: "crate::cu29::tasks::VideoCapture",
6 | config: {
7 | "source_type": "v4l2",
8 | "source_uri": "/dev/video0",
9 | "source_fps": 30,
10 | "image_cols": 640,
11 | "image_rows": 480,
12 | "channel_id": 0,
13 | }
14 | ),
15 | // NOTE: uncomment to use this camera
16 | //(
17 | // id: "cam0",
18 | // type: "crate::cu29::tasks::VideoCapture",
19 | // config: {
20 | // "source_type": "rtsp",
21 | // // URL of the RTSP camera
22 | // // rtsp://:@:/
23 | // //"source_uri": "rtsp://tapo_entrance:123456789@192.168.1.141:554/stream2",
24 | // "source_uri": "rtsp://tapo_terrace:123456789@192.168.1.151:554/stream2",
25 | // "channel_id": 0,
26 | // }
27 | //),
28 | (
29 | id: "enc0",
30 | type: "crate::cu29::tasks::ImageEncoder",
31 | ),
32 | (
33 | id: "inference",
34 | type: "crate::cu29::tasks::Inference",
35 | ),
36 | (
37 | id: "img_bcast",
38 | type: "crate::cu29::tasks::ImageBroadcast",
39 | config: {
40 | "channel_id": 0,
41 | }
42 | ),
43 | (
44 | id: "inference_bcast",
45 | type: "crate::cu29::tasks::InferenceBroadcast",
46 | config: {
47 | "channel_id": 0,
48 | }
49 | ),
50 |
51 | ],
52 | cnx: [
53 | (src: "cam0", dst: "enc0", msg: "crate::cu29::msgs::ImageRgb8Msg"),
54 | (src: "cam0", dst: "inference", msg: "crate::cu29::msgs::ImageRgb8Msg"),
55 | (src: "enc0", dst: "img_bcast", msg: "crate::cu29::msgs::EncodedImage"),
56 | (src: "inference", dst: "inference_bcast", msg: "crate::cu29::msgs::PromptResponseMsg"),
57 | ],
58 | logging: (
59 | slab_size_mib: 1024, // Preallocates 1GiB of memory map file at a time
60 | section_size_mib: 100, // Preallocates 100MiB of memory map per section for the main logger.
61 | enable_task_logging: false,
62 | ),
63 | )
64 |
--------------------------------------------------------------------------------
/src/cu29/pipelines/inference.rs:
--------------------------------------------------------------------------------
1 | use crate::pipeline::PipelineResult;
2 | use cu29::prelude::*;
3 | use cu29_helpers::basic_copper_setup;
4 | use std::{
5 | path::PathBuf,
6 | sync::{atomic::AtomicBool, Arc},
7 | };
8 |
9 | const SLAB_SIZE: Option = Some(150 * 1024 * 1024);
10 |
11 | // NOTE: this will use the default config file in the current directory during compilation
12 | // however, it will be overridden by the ron config string when the pipeline is started
13 | #[copper_runtime(config = "src/cu29/pipelines/inference.ron")]
14 | struct InferenceApp {}
15 |
16 | pub struct InferencePipeline(pub InferenceApp);
17 |
18 | impl InferencePipeline {
19 | pub fn new() -> CuResult {
20 | let logger_path = PathBuf::from("/tmp/inference.copper");
21 | debug!("Logger path: {}", path = &logger_path);
22 |
23 | let copper_ctx = basic_copper_setup(&logger_path, SLAB_SIZE, true, None)?;
24 | let application = InferenceAppBuilder::new()
25 | .with_context(&copper_ctx)
26 | .build()?;
27 |
28 | Ok(Self(application))
29 | }
30 | }
31 |
32 | /// Spawns a new thread for the pipeline
33 | ///
34 | /// This function is used to spawn a new thread for the pipeline
35 | /// and to pass the stop signal to the pipeline
36 | ///
37 | /// # Arguments
38 | ///
39 | /// * `pipeline_id` - The id of the pipeline
40 | /// * `stop_signal` - The stop signal to stop the pipeline
41 | ///
42 | /// # Returns
43 | ///
44 | /// A handle to the thread that runs the pipeline
45 | pub fn spawn_inference_pipeline(
46 | stop_signal: Arc,
47 | ) -> std::thread::JoinHandle {
48 | std::thread::spawn({
49 | move || -> PipelineResult {
50 | // parse the ron config string and create the pipeline
51 | let mut app = InferencePipeline::new()?;
52 |
53 | // create the pipeline and start the tasks
54 | app.start_all_tasks()?;
55 |
56 | while !stop_signal.load(std::sync::atomic::Ordering::Relaxed) {
57 | // we run the pipeline iteration step by step
58 | app.run_one_iteration()?;
59 | }
60 |
61 | // stop the pipeline and wait for the tasks to finish
62 | app.stop_all_tasks()?;
63 |
64 | Ok(())
65 | }
66 | })
67 | }
68 |
69 | impl std::ops::Deref for InferencePipeline {
70 | type Target = InferenceApp;
71 |
72 | fn deref(&self) -> &Self::Target {
73 | &self.0
74 | }
75 | }
76 |
77 | impl std::ops::DerefMut for InferencePipeline {
78 | fn deref_mut(&mut self) -> &mut Self::Target {
79 | &mut self.0
80 | }
81 | }
82 |
--------------------------------------------------------------------------------
/src/cu29/pipelines/mod.rs:
--------------------------------------------------------------------------------
1 | mod cameras;
2 | pub use cameras::spawn_cameras_pipeline;
3 |
4 | // EXPERIMENTAL
5 | mod inference;
6 | pub use inference::spawn_inference_pipeline;
7 |
--------------------------------------------------------------------------------
/src/cu29/tasks/broadcast.rs:
--------------------------------------------------------------------------------
1 | use crate::{
2 | api::models::inference::InferenceResult,
3 | cu29::msgs::{EncodedImage, PromptResponseMsg},
4 | pipeline::SERVER_GLOBAL_STATE,
5 | };
6 | use cu29::prelude::*;
7 |
8 | pub struct ImageBroadcast;
9 |
10 | impl Freezable for ImageBroadcast {}
11 |
12 | impl<'cl> CuSinkTask<'cl> for ImageBroadcast {
13 | type Input = input_msg!('cl, EncodedImage);
14 |
15 | fn new(_config: Option<&ComponentConfig>) -> Result
16 | where
17 | Self: Sized,
18 | {
19 | Ok(Self {})
20 | }
21 |
22 | fn process(&mut self, _clock: &RobotClock, input: Self::Input) -> Result<(), CuError> {
23 | // broadcast the image
24 | if let Some(msg) = input.payload() {
25 | // send the camera image to the global state
26 | let _ = SERVER_GLOBAL_STATE.result_store.images[msg.channel_id as usize]
27 | .tx
28 | .send(msg.clone());
29 | }
30 | Ok(())
31 | }
32 | }
33 |
34 | pub struct InferenceBroadcast;
35 |
36 | impl Freezable for InferenceBroadcast {}
37 |
38 | impl<'cl> CuSinkTask<'cl> for InferenceBroadcast {
39 | type Input = input_msg!('cl, PromptResponseMsg);
40 |
41 | fn new(_config: Option<&ComponentConfig>) -> Result {
42 | Ok(Self {})
43 | }
44 |
45 | fn process(&mut self, _clock: &RobotClock, input: Self::Input) -> Result<(), CuError> {
46 | let Some(prompt) = input.payload() else {
47 | return Ok(());
48 | };
49 |
50 | let _ = SERVER_GLOBAL_STATE
51 | .result_store
52 | .inference
53 | .tx
54 | .send(InferenceResult {
55 | stamp_ns: prompt.stamp_ns,
56 | channel_id: prompt.channel_id,
57 | prompt: prompt.prompt.clone(),
58 | response: prompt.response.clone(),
59 | });
60 |
61 | Ok(())
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/src/cu29/tasks/image_encoder.rs:
--------------------------------------------------------------------------------
1 | use crate::cu29::msgs::{EncodedImage, ImageRgb8Msg};
2 | use cu29::prelude::*;
3 | use kornia_io::jpegturbo::JpegTurboEncoder;
4 |
5 | pub struct ImageEncoder {
6 | encoder: JpegTurboEncoder,
7 | }
8 |
9 | impl Freezable for ImageEncoder {}
10 |
11 | impl<'cl> CuTask<'cl> for ImageEncoder {
12 | type Input = input_msg!('cl, ImageRgb8Msg);
13 | type Output = output_msg!('cl, EncodedImage);
14 |
15 | fn new(_config: Option<&ComponentConfig>) -> Result
16 | where
17 | Self: Sized,
18 | {
19 | Ok(Self {
20 | encoder: JpegTurboEncoder::new()
21 | .map_err(|e| CuError::new_with_cause("Failed to create jpeg encoder", e))?,
22 | })
23 | }
24 |
25 | fn process(
26 | &mut self,
27 | _clock: &RobotClock,
28 | input: Self::Input,
29 | output: Self::Output,
30 | ) -> Result<(), CuError> {
31 | let Some(msg) = input.payload() else {
32 | return Ok(());
33 | };
34 |
35 | let encoded_image = self
36 | .encoder
37 | .encode_rgb8(&msg.image)
38 | .map_err(|e| CuError::new_with_cause("Failed to encode image", e))?;
39 |
40 | output.set_payload(EncodedImage {
41 | stamp_ns: msg.stamp_ns,
42 | channel_id: msg.channel_id,
43 | data: encoded_image,
44 | encoding: "jpeg".to_string(),
45 | });
46 |
47 | Ok(())
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/src/cu29/tasks/inference.rs:
--------------------------------------------------------------------------------
1 | use crate::{
2 | cu29::msgs::{ImageRgb8Msg, PromptResponseMsg},
3 | pipeline::SERVER_GLOBAL_STATE,
4 | };
5 | use cu29::prelude::*;
6 | use kornia_infernum::{
7 | engine::{InfernumEngine, InfernumEngineRequest, InfernumEngineResult, InfernumEngineState},
8 | model::{InfernumModel, InfernumModelRequest, InfernumModelResponse},
9 | };
10 | use kornia_paligemma::{Paligemma, PaligemmaConfig, PaligemmaError};
11 |
12 | /// The default prompt to use if no prompt is provided
13 | // NOTE: check the original prompt instructions
14 | // https://ai.google.dev/gemma/docs/paligemma/prompt-system-instructions
15 | const DEFAULT_PROMPT: &str = "cap en\n";
16 |
17 | /// Task that runs inference on an image
18 | pub struct Inference {
19 | current_prompt: String,
20 | engine: InfernumEngine,
21 | }
22 |
23 | impl Freezable for Inference {}
24 |
25 | impl<'cl> CuTask<'cl> for Inference {
26 | type Input = input_msg!('cl, ImageRgb8Msg);
27 | type Output = output_msg!('cl, PromptResponseMsg);
28 |
29 | fn new(_config: Option<&ComponentConfig>) -> Result
30 | where
31 | Self: Sized,
32 | {
33 | let paligemma = Paligemma::new(PaligemmaConfig::default())
34 | .map_err(|e| CuError::new_with_cause("Failed to create Paligemma", e))?;
35 |
36 | let engine = InfernumEngine::new(PaligemmaModel(paligemma));
37 |
38 | Ok(Self {
39 | current_prompt: DEFAULT_PROMPT.to_string(),
40 | engine,
41 | })
42 | }
43 |
44 | fn process(
45 | &mut self,
46 | _clock: &RobotClock,
47 | input: Self::Input,
48 | output: Self::Output,
49 | ) -> Result<(), CuError> {
50 | // clear the output payload to avoid any previous payload to be forwarded
51 | output.clear_payload();
52 |
53 | // check first if we should update the prompt
54 | if let Ok(prompt) = SERVER_GLOBAL_STATE
55 | .result_store
56 | .inference_settings
57 | .rx
58 | .lock()
59 | .expect("Failed to lock inference settings")
60 | .try_recv()
61 | {
62 | log::debug!("Updating prompt to: {}", prompt);
63 | self.current_prompt = prompt;
64 | }
65 |
66 | // check if we are already processing an inference to not block the main thread
67 | if self.engine.state() == InfernumEngineState::Processing {
68 | return Ok(());
69 | }
70 |
71 | // check first if we have a response from the previous inference
72 | if let InfernumEngineResult::Success(response) = self.engine.try_poll_response() {
73 | log::debug!(
74 | "Received response from inference thread for channel: {} -- prompt: {} -- response: {}",
75 | response.id,
76 | response.prompt,
77 | response.response
78 | );
79 |
80 | output.set_payload(PromptResponseMsg {
81 | stamp_ns: response.duration.as_nanos() as u64,
82 | channel_id: response.id,
83 | prompt: response.prompt,
84 | response: response.response,
85 | });
86 | }
87 |
88 | // check if we have a new image and schedule the inference
89 | let Some(img) = input.payload() else {
90 | return Ok(());
91 | };
92 |
93 | // send the request to the thread to schedule the inference
94 | self.engine.schedule_inference(InfernumEngineRequest {
95 | id: img.channel_id,
96 | prompt: self.current_prompt.clone(),
97 | image: img.image.clone(),
98 | });
99 |
100 | Ok(())
101 | }
102 | }
103 |
104 | /// Model that uses Paligemma to run inference
105 | struct PaligemmaModel(Paligemma);
106 |
107 | impl InfernumModel for PaligemmaModel {
108 | type Error = PaligemmaError;
109 |
110 | fn run(&mut self, request: InfernumModelRequest) -> Result {
111 | let response = self
112 | .0
113 | .inference(&request.image, &request.prompt, 50, false)?;
114 |
115 | Ok(InfernumModelResponse { response })
116 | }
117 | }
118 |
--------------------------------------------------------------------------------
/src/cu29/tasks/mod.rs:
--------------------------------------------------------------------------------
1 | mod broadcast;
2 | pub use broadcast::*;
3 |
4 | mod image_encoder;
5 | pub use image_encoder::*;
6 |
7 | mod inference;
8 | pub use inference::*;
9 |
10 | mod recorder;
11 | pub use recorder::*;
12 |
13 | mod video_capture;
14 | pub use video_capture::*;
15 |
16 | mod video_writer;
17 | pub use video_writer::*;
18 |
--------------------------------------------------------------------------------
/src/cu29/tasks/recorder.rs:
--------------------------------------------------------------------------------
1 | use crate::{
2 | api::models::recording::{RecordingCommand, RecordingResponse},
3 | cu29::msgs::EncodedImage,
4 | pipeline::SERVER_GLOBAL_STATE,
5 | };
6 | use cu29::prelude::*;
7 | use std::path::{Path, PathBuf};
8 |
9 | enum RecorderState {
10 | Stopped,
11 | Recording(rerun::RecordingStream),
12 | }
13 |
14 | pub struct RecorderOne {
15 | state: RecorderState,
16 | path: PathBuf,
17 | }
18 |
19 | impl Freezable for RecorderOne {}
20 |
21 | impl<'cl> CuSinkTask<'cl> for RecorderOne {
22 | type Input = input_msg!('cl, EncodedImage);
23 |
24 | fn new(config: Option<&ComponentConfig>) -> Result
25 | where
26 | Self: Sized,
27 | {
28 | let config = config.expect("config is required");
29 | let path = config.get::("path").expect("path is required");
30 |
31 | Ok(Self {
32 | state: RecorderState::Stopped,
33 | path: PathBuf::from(path),
34 | })
35 | }
36 |
37 | fn process(&mut self, _clock: &RobotClock, input: Self::Input) -> Result<(), CuError> {
38 | // check if we should start or stop recording
39 | let maybe_command = SERVER_GLOBAL_STATE
40 | .result_store
41 | .recording
42 | .request
43 | .rx
44 | .lock()
45 | .expect("Failed to lock recording")
46 | .try_recv();
47 |
48 | match &mut self.state {
49 | RecorderState::Stopped => {
50 | if let Ok(RecordingCommand::Stop) = maybe_command {
51 | SERVER_GLOBAL_STATE
52 | .result_store
53 | .recording
54 | .reply
55 | .tx
56 | .send(RecordingResponse::Error {
57 | error: "Could not stop recording. Recorder is not recording"
58 | .to_string(),
59 | })
60 | .map_err(|e| CuError::new_with_cause("Failed to send reply", e))?;
61 | }
62 | if let Ok(RecordingCommand::Start) = maybe_command {
63 | let (rec, rec_path) = create_recording_stream(&self.path)?;
64 | self.state = RecorderState::Recording(rec);
65 | log::info!("Started recording to {}", rec_path.display());
66 |
67 | SERVER_GLOBAL_STATE
68 | .result_store
69 | .recording
70 | .reply
71 | .tx
72 | .send(RecordingResponse::Success)
73 | .map_err(|e| CuError::new_with_cause("Failed to send reply", e))?;
74 | }
75 | }
76 | RecorderState::Recording(rec) => {
77 | if let Ok(RecordingCommand::Start) = maybe_command {
78 | SERVER_GLOBAL_STATE
79 | .result_store
80 | .recording
81 | .reply
82 | .tx
83 | .send(RecordingResponse::Error {
84 | error: "Could not start recording. Recorder is already recording"
85 | .to_string(),
86 | })
87 | .map_err(|e| CuError::new_with_cause("Failed to send reply", e))?;
88 | }
89 | if let Ok(RecordingCommand::Stop) = maybe_command {
90 | rec.flush_blocking();
91 | self.state = RecorderState::Stopped;
92 | log::info!("Stopped recording");
93 |
94 | SERVER_GLOBAL_STATE
95 | .result_store
96 | .recording
97 | .reply
98 | .tx
99 | .send(RecordingResponse::Success)
100 | .map_err(|e| CuError::new_with_cause("Failed to send reply", e))?;
101 | return Ok(());
102 | }
103 |
104 | if let Some(image) = input.payload() {
105 | log_image_encoded(rec, &format!("/cam/{}", image.channel_id), image)?;
106 | }
107 | }
108 | }
109 |
110 | Ok(())
111 | }
112 | }
113 |
114 | pub struct RecorderTwo {
115 | state: RecorderState,
116 | path: PathBuf,
117 | }
118 |
119 | impl Freezable for RecorderTwo {}
120 |
121 | impl<'cl> CuSinkTask<'cl> for RecorderTwo {
122 | type Input = input_msg!('cl, EncodedImage, EncodedImage);
123 |
124 | fn new(config: Option<&ComponentConfig>) -> Result {
125 | let config = config.expect("config is required");
126 | let path = config.get::("path").expect("path is required");
127 |
128 | Ok(Self {
129 | state: RecorderState::Stopped,
130 | path: PathBuf::from(path),
131 | })
132 | }
133 |
134 | fn process(&mut self, _clock: &RobotClock, input: Self::Input) -> Result<(), CuError> {
135 | let maybe_command = SERVER_GLOBAL_STATE
136 | .result_store
137 | .recording
138 | .request
139 | .rx
140 | .lock()
141 | .expect("Failed to lock recording")
142 | .try_recv();
143 |
144 | match &mut self.state {
145 | RecorderState::Stopped => {
146 | if let Ok(RecordingCommand::Start) = maybe_command {
147 | let (rec, rec_path) = create_recording_stream(&self.path)?;
148 | self.state = RecorderState::Recording(rec);
149 | log::info!("Started recording to {}", rec_path.display());
150 | }
151 | }
152 | RecorderState::Recording(rec) => {
153 | if let Ok(RecordingCommand::Stop) = maybe_command {
154 | rec.flush_blocking();
155 | self.state = RecorderState::Stopped;
156 | log::info!("Stopped recording");
157 | return Ok(());
158 | } else {
159 | let (msg1, msg2) = input;
160 | if let (Some(image1), Some(image2)) = (msg1.payload(), msg2.payload()) {
161 | log_image_encoded(rec, &format!("/cam/{}", image1.channel_id), image1)?;
162 | log_image_encoded(rec, &format!("/cam/{}", image2.channel_id), image2)?;
163 | }
164 | }
165 | }
166 | }
167 |
168 | Ok(())
169 | }
170 | }
171 |
172 | pub struct RecorderThree {
173 | state: RecorderState,
174 | path: PathBuf,
175 | }
176 |
177 | impl Freezable for RecorderThree {}
178 |
179 | impl<'cl> CuSinkTask<'cl> for RecorderThree {
180 | type Input = input_msg!('cl, EncodedImage, EncodedImage, EncodedImage);
181 |
182 | fn new(config: Option<&ComponentConfig>) -> Result {
183 | let config = config.expect("config is required");
184 | let path = config.get::("path").expect("path is required");
185 |
186 | Ok(Self {
187 | state: RecorderState::Stopped,
188 | path: PathBuf::from(path),
189 | })
190 | }
191 |
192 | fn process(&mut self, _clock: &RobotClock, input: Self::Input) -> Result<(), CuError> {
193 | let maybe_command = SERVER_GLOBAL_STATE
194 | .result_store
195 | .recording
196 | .request
197 | .rx
198 | .lock()
199 | .expect("Failed to lock recording")
200 | .try_recv();
201 |
202 | match &mut self.state {
203 | RecorderState::Stopped => {
204 | if let Ok(RecordingCommand::Start) = maybe_command {
205 | let (rec, rec_path) = create_recording_stream(&self.path)?;
206 | self.state = RecorderState::Recording(rec);
207 | log::info!("Started recording to {}", rec_path.display());
208 | }
209 | }
210 | RecorderState::Recording(rec) => {
211 | if let Ok(RecordingCommand::Stop) = maybe_command {
212 | rec.flush_blocking();
213 | self.state = RecorderState::Stopped;
214 | log::info!("Stopped recording");
215 | return Ok(());
216 | } else {
217 | let (msg1, msg2, msg3) = input;
218 | if let (Some(image1), Some(image2), Some(image3)) =
219 | (msg1.payload(), msg2.payload(), msg3.payload())
220 | {
221 | log_image_encoded(rec, &format!("/cam/{}", image1.channel_id), image1)?;
222 | log_image_encoded(rec, &format!("/cam/{}", image2.channel_id), image2)?;
223 | log_image_encoded(rec, &format!("/cam/{}", image3.channel_id), image3)?;
224 | }
225 | }
226 | }
227 | }
228 |
229 | Ok(())
230 | }
231 | }
232 |
233 | pub struct RecorderFour {
234 | state: RecorderState,
235 | path: PathBuf,
236 | }
237 |
238 | impl Freezable for RecorderFour {}
239 |
240 | impl<'cl> CuSinkTask<'cl> for RecorderFour {
241 | type Input = input_msg!('cl, EncodedImage, EncodedImage, EncodedImage, EncodedImage);
242 |
243 | fn new(config: Option<&ComponentConfig>) -> Result {
244 | let config = config.expect("config is required");
245 | let path = config.get::("path").expect("path is required");
246 |
247 | Ok(Self {
248 | state: RecorderState::Stopped,
249 | path: PathBuf::from(path),
250 | })
251 | }
252 |
253 | fn process(&mut self, _clock: &RobotClock, input: Self::Input) -> Result<(), CuError> {
254 | let maybe_command = SERVER_GLOBAL_STATE
255 | .result_store
256 | .recording
257 | .request
258 | .rx
259 | .lock()
260 | .expect("Failed to lock recording")
261 | .try_recv();
262 |
263 | match &mut self.state {
264 | RecorderState::Stopped => {
265 | if let Ok(RecordingCommand::Start) = maybe_command {
266 | let (rec, rec_path) = create_recording_stream(&self.path)?;
267 | self.state = RecorderState::Recording(rec);
268 | log::info!("Started recording to {}", rec_path.display());
269 | }
270 | }
271 | RecorderState::Recording(rec) => {
272 | if let Ok(RecordingCommand::Stop) = maybe_command {
273 | rec.flush_blocking();
274 | self.state = RecorderState::Stopped;
275 | log::info!("Stopped recording");
276 | return Ok(());
277 | } else {
278 | let (msg1, msg2, msg3, msg4) = input;
279 | if let (Some(image1), Some(image2), Some(image3), Some(image4)) = (
280 | msg1.payload(),
281 | msg2.payload(),
282 | msg3.payload(),
283 | msg4.payload(),
284 | ) {
285 | log_image_encoded(rec, &format!("/cam/{}", image1.channel_id), image1)?;
286 | log_image_encoded(rec, &format!("/cam/{}", image2.channel_id), image2)?;
287 | log_image_encoded(rec, &format!("/cam/{}", image3.channel_id), image3)?;
288 | log_image_encoded(rec, &format!("/cam/{}", image4.channel_id), image4)?;
289 | }
290 | }
291 | }
292 | }
293 |
294 | Ok(())
295 | }
296 | }
297 |
298 | fn create_recording_stream(path: &Path) -> Result<(rerun::RecordingStream, PathBuf), CuError> {
299 | let rec_path = {
300 | let timestamp = std::time::SystemTime::now()
301 | .duration_since(std::time::UNIX_EPOCH)
302 | .unwrap()
303 | .as_secs();
304 | path.join(format!("{}.rrd", timestamp))
305 | };
306 |
307 | let rec = rerun::RecordingStreamBuilder::new("rerun_logger")
308 | .save(&rec_path)
309 | .map_err(|e| CuError::new_with_cause("Failed to spawn rerun stream", e))?;
310 |
311 | Ok((rec, rec_path))
312 | }
313 |
314 | fn log_image_encoded(
315 | rec: &rerun::RecordingStream,
316 | name: &str,
317 | msg: &EncodedImage,
318 | ) -> Result<(), CuError> {
319 | rec.log(
320 | name,
321 | &rerun::EncodedImage::from_file_contents(msg.data.clone()),
322 | )
323 | .map_err(|e| CuError::new_with_cause("Failed to log image", e))?;
324 | Ok(())
325 | }
326 |
--------------------------------------------------------------------------------
/src/cu29/tasks/video_capture.rs:
--------------------------------------------------------------------------------
1 | use crate::cu29::msgs::ImageRgb8Msg;
2 | use cu29::prelude::*;
3 | use kornia_io::stream::{CameraCapture, RTSPCameraConfig, V4L2CameraConfig};
4 |
5 | pub struct VideoCapture {
6 | capture: CameraCapture,
7 | // TODO: remove once copper support access to the node id
8 | channel_id: u8,
9 | }
10 |
11 | impl Freezable for VideoCapture {}
12 |
13 | impl<'cl> CuSrcTask<'cl> for VideoCapture {
14 | type Output = output_msg!('cl, ImageRgb8Msg);
15 |
16 | fn new(config: Option<&ComponentConfig>) -> Result
17 | where
18 | Self: Sized,
19 | {
20 | let Some(config) = config else {
21 | return Err(CuError::from("No config provided"));
22 | };
23 |
24 | let source_type = config
25 | .get::("source_type")
26 | .ok_or(CuError::from("No source type provided"))?;
27 |
28 | let source_uri = config
29 | .get::("source_uri")
30 | .ok_or(CuError::from("No source uri provided"))?;
31 |
32 | let channel_id = config
33 | .get::("channel_id")
34 | .ok_or(CuError::from("No channel id provided"))?;
35 |
36 | let capture = match source_type.as_str() {
37 | "rtsp" => RTSPCameraConfig::new()
38 | .with_url(&source_uri)
39 | .build()
40 | .map_err(|e| CuError::new_with_cause("Failed to build camera", e))?,
41 | "v4l2" => {
42 | // parse the needed parameters from the config
43 | let image_cols = config
44 | .get::("image_cols")
45 | .ok_or(CuError::from("No image cols provided"))?;
46 | let image_rows = config
47 | .get::("image_rows")
48 | .ok_or(CuError::from("No image rows provided"))?;
49 | let source_fps = config
50 | .get::("source_fps")
51 | .ok_or(CuError::from("No source fps provided"))?;
52 |
53 | V4L2CameraConfig::new()
54 | .with_device(&source_uri)
55 | .with_fps(source_fps)
56 | .with_size([image_cols as usize, image_rows as usize].into())
57 | .build()
58 | .map_err(|e| CuError::new_with_cause("Failed to build camera", e))?
59 | }
60 | _ => return Err(CuError::from("Invalid source type")),
61 | };
62 |
63 | Ok(Self {
64 | capture,
65 | channel_id,
66 | })
67 | }
68 |
69 | fn start(&mut self, _clock: &RobotClock) -> Result<(), CuError> {
70 | self.capture
71 | .start()
72 | .map_err(|e| CuError::new_with_cause("Failed to start camera", e))
73 | }
74 |
75 | fn stop(&mut self, _clock: &RobotClock) -> Result<(), CuError> {
76 | self.capture
77 | .close()
78 | .map_err(|e| CuError::new_with_cause("Failed to stop camera", e))
79 | }
80 |
81 | fn process(&mut self, clock: &RobotClock, output: Self::Output) -> Result<(), CuError> {
82 | let Some(img) = self
83 | .capture
84 | .grab()
85 | .map_err(|e| CuError::new_with_cause("Failed to grab image", e))?
86 | else {
87 | return Ok(());
88 | };
89 |
90 | output.set_payload(ImageRgb8Msg {
91 | stamp_ns: clock.now().as_nanos(),
92 | channel_id: self.channel_id,
93 | image: img,
94 | });
95 |
96 | Ok(())
97 | }
98 | }
99 |
--------------------------------------------------------------------------------
/src/cu29/tasks/video_writer.rs:
--------------------------------------------------------------------------------
1 | use crate::cu29::msgs::ImageRgb8Msg;
2 | use cu29::prelude::*;
3 | use kornia_io::stream::video::{ImageFormat, VideoCodec, VideoWriter as KorniaVideoWriter};
4 |
5 | // default values for the video writer
6 | const DEFAULT_RES_ROWS: u32 = 480;
7 | const DEFAULT_RES_COLS: u32 = 640;
8 | const DEFAULT_FPS: u32 = 30;
9 |
10 | pub struct VideoWriter {
11 | writer: Option,
12 | }
13 |
14 | impl Freezable for VideoWriter {}
15 |
16 | impl<'cl> CuSinkTask<'cl> for VideoWriter {
17 | type Input = input_msg!('cl, ImageRgb8Msg);
18 |
19 | fn new(config: Option<&ComponentConfig>) -> CuResult
20 | where
21 | Self: Sized,
22 | {
23 | // generate path file based on the current timestamp
24 | let timestamp = std::time::SystemTime::now()
25 | .duration_since(std::time::UNIX_EPOCH)
26 | .unwrap()
27 | .as_secs();
28 | let path = format!("video_{}.mp4", timestamp);
29 |
30 | let (res_rows, res_cols, fps) = if let Some(config) = config {
31 | let res_cols = config.get::("res_cols").unwrap_or(DEFAULT_RES_COLS);
32 | let res_rows = config.get::("res_rows").unwrap_or(DEFAULT_RES_ROWS);
33 | let fps = config.get::("fps").unwrap_or(DEFAULT_FPS);
34 | (res_rows, res_cols, fps)
35 | } else {
36 | (DEFAULT_RES_ROWS, DEFAULT_RES_COLS, DEFAULT_FPS)
37 | };
38 |
39 | let writer = KorniaVideoWriter::new(
40 | path,
41 | VideoCodec::H264,
42 | ImageFormat::Rgb8,
43 | fps as i32,
44 | [res_cols as usize, res_rows as usize].into(),
45 | )
46 | .map_err(|e| CuError::new_with_cause("Failed to create video writer", e))?;
47 |
48 | Ok(Self {
49 | writer: Some(writer),
50 | })
51 | }
52 |
53 | fn start(&mut self, _clock: &RobotClock) -> CuResult<()> {
54 | let Some(writer) = self.writer.as_mut() else {
55 | return Ok(());
56 | };
57 |
58 | writer
59 | .start()
60 | .map_err(|e| CuError::new_with_cause("Failed to start video writer", e))?;
61 |
62 | Ok(())
63 | }
64 |
65 | fn stop(&mut self, _clock: &RobotClock) -> CuResult<()> {
66 | let Some(writer) = self.writer.as_mut() else {
67 | return Ok(());
68 | };
69 |
70 | writer
71 | .close()
72 | .map_err(|e| CuError::new_with_cause("Failed to close video writer", e))?;
73 |
74 | self.writer = None; // drop the writer
75 |
76 | Ok(())
77 | }
78 |
79 | fn process(&mut self, _clock: &RobotClock, input: Self::Input) -> CuResult<()> {
80 | let Some(msg) = input.payload() else {
81 | return Ok(());
82 | };
83 |
84 | let Some(writer) = self.writer.as_mut() else {
85 | return Ok(());
86 | };
87 |
88 | writer
89 | .write(&msg.image)
90 | .map_err(|e| CuError::new_with_cause("Failed to write image", e))?;
91 |
92 | Ok(())
93 | }
94 | }
95 |
--------------------------------------------------------------------------------
/src/lib.rs:
--------------------------------------------------------------------------------
1 | pub mod api;
2 | pub mod cu29;
3 | pub mod pipeline;
4 |
--------------------------------------------------------------------------------
/src/pipeline.rs:
--------------------------------------------------------------------------------
1 | use crate::{
2 | api::models::{
3 | inference::InferenceResult,
4 | recording::{RecordingCommand, RecordingResponse},
5 | },
6 | cu29::msgs::EncodedImage,
7 | };
8 | use once_cell::sync::Lazy;
9 | use serde::{Deserialize, Serialize};
10 | use std::{
11 | collections::HashMap,
12 | sync::atomic::AtomicBool,
13 | sync::{Arc, Mutex},
14 | };
15 |
16 | pub static SERVER_GLOBAL_STATE: Lazy = Lazy::new(ServerGlobalState::default);
17 |
18 | pub type PipelineResult = Result<(), Box>;
19 |
20 | /// Global store of all pipelines managed by the server
21 | #[derive(Clone, Default)]
22 | pub struct PipelineStore(pub Arc>>);
23 |
24 | #[derive(Clone)]
25 | pub struct BroadcastSender {
26 | pub tx: Arc>,
27 | }
28 |
29 | impl BroadcastSender {
30 | pub fn new() -> Self {
31 | let (tx, _) = tokio::sync::broadcast::channel(5);
32 | Self { tx: Arc::new(tx) }
33 | }
34 | }
35 |
36 | impl Default for BroadcastSender {
37 | fn default() -> Self {
38 | Self::new()
39 | }
40 | }
41 |
42 | /// A sender and receiver for a single message
43 | #[derive(Clone)]
44 | pub struct SenderReceiver {
45 | pub rx: Arc>>,
46 | pub tx: std::sync::mpsc::Sender,
47 | }
48 |
49 | impl Default for SenderReceiver {
50 | fn default() -> Self {
51 | Self::new()
52 | }
53 | }
54 |
55 | impl SenderReceiver {
56 | pub fn new() -> Self {
57 | let (tx, rx) = std::sync::mpsc::channel();
58 | Self {
59 | rx: Arc::new(Mutex::new(rx)),
60 | tx,
61 | }
62 | }
63 | }
64 |
65 | #[derive(Clone)]
66 | pub struct InferenceSenderReceiver {
67 | pub query: SenderReceiver,
68 | pub result: SenderReceiver,
69 | }
70 |
71 | impl Default for InferenceSenderReceiver {
72 | fn default() -> Self {
73 | Self {
74 | query: SenderReceiver::new(),
75 | result: SenderReceiver::new(),
76 | }
77 | }
78 | }
79 |
80 | /// A sender and receiver for a single message
81 | #[derive(Clone)]
82 | pub struct RequestReply {
83 | pub request: SenderReceiver,
84 | pub reply: SenderReceiver,
85 | }
86 |
87 | impl Default for RequestReply {
88 | fn default() -> Self {
89 | Self {
90 | request: SenderReceiver::new(),
91 | reply: SenderReceiver::new(),
92 | }
93 | }
94 | }
95 |
96 | /// Global store of all results managed by the server
97 | #[derive(Clone)]
98 | pub struct ResultStore {
99 | // NOTE: support a fixed number of streams
100 | pub inference: BroadcastSender,
101 | pub inference_settings: SenderReceiver,
102 | // NOTE: support a fixed number of streams
103 | pub images: [BroadcastSender; 8],
104 | pub recording: RequestReply,
105 | }
106 |
107 | impl Default for ResultStore {
108 | fn default() -> Self {
109 | Self {
110 | inference: BroadcastSender::new(),
111 | inference_settings: SenderReceiver::new(),
112 | images: std::array::from_fn(|_| BroadcastSender::new()),
113 | recording: RequestReply::default(),
114 | }
115 | }
116 | }
117 |
118 | /// Global state of the server
119 | #[derive(Clone, Default)]
120 | pub struct ServerGlobalState {
121 | pub pipeline_store: PipelineStore,
122 | pub result_store: ResultStore,
123 | }
124 |
125 | impl PipelineStore {
126 | /// Register a pipeline in the store and start it
127 | pub fn register_pipeline(
128 | &mut self,
129 | name: &str,
130 | handle: std::thread::JoinHandle,
131 | stop_signal: Arc,
132 | ) {
133 | self.0.lock().unwrap().insert(
134 | name.into(),
135 | PipelineHandle {
136 | id: name.into(),
137 | handle,
138 | status: PipelineStatus::Running,
139 | stop_signal,
140 | },
141 | );
142 | }
143 |
144 | /// Unregister a pipeline from the store and stop it
145 | pub fn unregister_pipeline(&self, name: &str) -> bool {
146 | let mut map = self.0.lock().unwrap();
147 | map.remove(name)
148 | .map(|pipeline| {
149 | pipeline
150 | .stop_signal
151 | .store(true, std::sync::atomic::Ordering::Relaxed);
152 |
153 | pipeline
154 | .handle
155 | .join()
156 | .map_err(|_| log::error!("Failed to join pipeline {}", name))
157 | .is_ok()
158 | })
159 | .unwrap_or(false)
160 | }
161 |
162 | pub fn list_pipelines(&self) -> Vec {
163 | self.0
164 | .lock()
165 | .unwrap()
166 | .values()
167 | .map(|pipeline| PipelineInfo {
168 | id: pipeline.id.clone(),
169 | status: pipeline.status.clone(),
170 | })
171 | .collect()
172 | }
173 |
174 | pub fn is_cameras_pipeline_running(&self) -> bool {
175 | self.0
176 | .lock()
177 | .unwrap()
178 | .values()
179 | .any(|pipeline| pipeline.id == "cameras" && pipeline.status == PipelineStatus::Running)
180 | }
181 |
182 | pub fn is_inference_pipeline_running(&self) -> bool {
183 | self.0.lock().unwrap().values().any(|pipeline| {
184 | pipeline.id == "inference" && pipeline.status == PipelineStatus::Running
185 | })
186 | }
187 | }
188 |
189 | /// The current status of a pipeline
190 | #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
191 | pub enum PipelineStatus {
192 | /// The pipeline is running in the background
193 | Running,
194 | /// The pipeline is stopped
195 | Stopped,
196 | /// The pipeline has encountered an error
197 | Error(String),
198 | }
199 |
200 | /// An object managing a pipeline
201 | #[derive(Debug)]
202 | pub struct PipelineHandle {
203 | // a unique identifier for the pipeline
204 | // TODO: explore using a UUID
205 | pub id: String,
206 | /// the task that the pipeline is running
207 | /// TODO: create a custom error type
208 | pub handle: std::thread::JoinHandle,
209 | // the status of the pipeline
210 | pub status: PipelineStatus,
211 | // stop signal
212 | pub stop_signal: Arc,
213 | }
214 |
215 | #[derive(Debug, Serialize)]
216 | pub struct PipelineInfo {
217 | // the id of the pipeline
218 | pub id: String,
219 | // the status of the pipeline
220 | pub status: PipelineStatus,
221 | }
222 |
223 | /// A dummy pipeline that runs indefinitely and prints a message every second
224 | pub fn spawn_bubbaloop_thread(
225 | stop_signal: Arc,
226 | ) -> std::thread::JoinHandle {
227 | let signs = ["|", "/", "-", "\\", "|", "/", "-", "\\"];
228 | let emojis = ["😊", "🚀", "🦀", "🎉", "✨", "🎸", "🌟", "🍕", "🎮", "🌈"];
229 | std::thread::spawn(move || {
230 | let mut counter = 0;
231 | while !stop_signal.load(std::sync::atomic::Ordering::Relaxed) {
232 | log::debug!(
233 | "{} Hello !! This is a Bubbaloop !!! {}",
234 | signs[counter % signs.len()],
235 | emojis[counter % emojis.len()]
236 | );
237 | std::thread::sleep(std::time::Duration::from_secs(1));
238 | counter += 1;
239 | }
240 | log::debug!("Bubbaloop pipeline stopped after {} iterations", counter);
241 | Ok(())
242 | })
243 | }
244 |
--------------------------------------------------------------------------------