├── .github └── workflows │ └── ci.yml ├── .gitignore ├── LICENSE ├── README.md ├── getting-started ├── README.md ├── data-flow.yaml └── nodes │ ├── python │ ├── file-writer │ │ ├── file-writer.py │ │ └── file-writer.yaml │ └── greetings-maker │ │ ├── greetings-maker.py │ │ └── greetings-maker.yaml │ └── rust │ ├── Cargo.toml │ ├── file-writer │ ├── Cargo.toml │ ├── file-writer.yaml │ └── src │ │ └── lib.rs │ └── greetings-maker │ ├── Cargo.toml │ ├── greetings-maker.yaml │ └── src │ └── lib.rs ├── montblanc ├── Cargo.toml ├── README.md ├── arequipa │ ├── Cargo.toml │ ├── arequipa.yml │ └── src │ │ └── lib.rs ├── barcelona │ ├── Cargo.toml │ ├── barcelona.yml │ └── src │ │ └── lib.rs ├── cordoba │ ├── Cargo.toml │ ├── cordoba.yml │ └── src │ │ └── lib.rs ├── datatypes │ ├── Cargo.toml │ ├── build.rs │ └── src │ │ ├── data_types.proto │ │ └── lib.rs ├── delhi │ ├── Cargo.toml │ ├── delhi.yml │ └── src │ │ └── lib.rs ├── freeport │ ├── Cargo.toml │ ├── freeport.yml │ └── src │ │ └── lib.rs ├── geneva │ ├── Cargo.toml │ ├── geneva.yml │ └── src │ │ └── lib.rs ├── georgetown │ ├── Cargo.toml │ ├── georgetown.yml │ └── src │ │ └── lib.rs ├── hamburg │ ├── Cargo.toml │ ├── hamburg.yml │ └── src │ │ └── lib.rs ├── hebron │ ├── Cargo.toml │ ├── hebron.yml │ └── src │ │ └── lib.rs ├── kingston │ ├── Cargo.toml │ ├── kingston.yml │ └── src │ │ └── lib.rs ├── lyon │ ├── Cargo.toml │ ├── lyon.yml │ └── src │ │ └── lib.rs ├── madelin │ ├── Cargo.toml │ ├── madelin.yml │ └── src │ │ └── lib.rs ├── mandalay │ ├── Cargo.toml │ ├── mandalay.yml │ └── src │ │ └── lib.rs ├── monaco │ ├── Cargo.toml │ ├── monaco.yml │ └── src │ │ └── lib.rs ├── mont_blanc.png ├── montblanc.yml ├── osaka │ ├── Cargo.toml │ ├── osaka.yml │ └── src │ │ └── lib.rs ├── ponce │ ├── Cargo.toml │ ├── ponce.yml │ └── src │ │ └── lib.rs ├── portsmouth │ ├── Cargo.toml │ ├── portsmouth.yml │ └── src │ │ └── lib.rs ├── rotterdam │ ├── Cargo.toml │ ├── rotterdam.yml │ └── src │ │ └── lib.rs ├── taipei │ ├── Cargo.toml │ ├── src │ │ └── lib.rs │ └── taipei.yml └── tripoli │ ├── Cargo.toml │ ├── src │ └── lib.rs │ └── tripoli.yml ├── period-miss-detector ├── README.md ├── data-flow.yaml └── nodes │ ├── python │ ├── file-writer │ │ ├── file-writer.py │ │ └── file-writer.yaml │ └── period-miss-detector │ │ ├── period-miss-detector.py │ │ └── period-miss-detector.yaml │ └── rust │ ├── Cargo.toml │ ├── file-writer │ ├── Cargo.toml │ ├── file-writer.yaml │ └── src │ │ └── lib.rs │ └── period-miss-detector │ ├── Cargo.toml │ ├── period-miss-detector.yaml │ └── src │ └── lib.rs ├── rust-toolchain └── transcoding ├── README.md ├── dataflow.yml ├── message.proto ├── message_pb2.py ├── pub-proto.py ├── requirements.txt ├── sub-cdr.py ├── transcoder.py ├── transcoder.yml └── zenoh-config.json /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2017, 2022 ZettaScale Technology. 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale zenoh team, 13 | # 14 | 15 | name: CI 16 | 17 | on: 18 | push: 19 | branches: [ '**' ] 20 | pull_request: 21 | branches: [ '**' ] 22 | schedule: 23 | - cron: '0 6 * * 1-5' 24 | 25 | jobs: 26 | build: 27 | name: Build on ${{ matrix.os }} 28 | runs-on: [self-hosted, "${{ matrix.os }}"] 29 | strategy: 30 | fail-fast: false 31 | matrix: 32 | os: [ubuntu-22.04] 33 | 34 | steps: 35 | - uses: actions/checkout@v2 36 | 37 | - name: Install Rust Toolchain 38 | uses: actions-rs/toolchain@v1 39 | with: 40 | components: rustfmt, clippy 41 | 42 | - name: Install Protoc 43 | uses: arduino/setup-protoc@v1 44 | 45 | - name: Code format check [getting started] 46 | uses: actions-rs/cargo@v1 47 | with: 48 | command: fmt 49 | args: --check --manifest-path ./getting-started/nodes/rust/Cargo.toml --all 50 | 51 | - name: Code format check [period miss detector] 52 | uses: actions-rs/cargo@v1 53 | with: 54 | command: fmt 55 | args: --check --manifest-path ./period-miss-detector/nodes/rust/Cargo.toml --all 56 | 57 | - name: Code format check [montblanc] 58 | uses: actions-rs/cargo@v1 59 | with: 60 | command: fmt 61 | args: --check --manifest-path ./montblanc/Cargo.toml --all 62 | 63 | - name: Clippy [getting-started] 64 | uses: actions-rs/cargo@v1 65 | with: 66 | command: clippy 67 | args: --manifest-path ./getting-started/nodes/rust/Cargo.toml --all -- -D warnings 68 | 69 | - name: Clippy [period-miss-detector] 70 | uses: actions-rs/cargo@v1 71 | with: 72 | command: clippy 73 | args: --manifest-path ./period-miss-detector/nodes/rust/Cargo.toml --all -- -D warnings 74 | 75 | - name: Clippy [montblanc] 76 | uses: actions-rs/cargo@v1 77 | with: 78 | command: clippy 79 | args: --manifest-path ./montblanc/Cargo.toml --all -- -D warnings 80 | 81 | - name: Build [getting-started] 82 | uses: actions-rs/cargo@v1 83 | with: 84 | command: build 85 | args: --manifest-path ./getting-started/nodes/rust/Cargo.toml --verbose --all-targets 86 | 87 | - name: Build [period-miss-detector] 88 | uses: actions-rs/cargo@v1 89 | with: 90 | command: build 91 | args: --manifest-path ./period-miss-detector/nodes/rust/Cargo.toml --verbose --all-targets 92 | 93 | - name: Build [montblanc] 94 | uses: actions-rs/cargo@v1 95 | with: 96 | command: build 97 | args: --manifest-path ./montblanc/Cargo.toml --verbose --all-targets 98 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | montblanc/target/ 3 | 4 | target/ 5 | Cargo.lock -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | apache-2.0 2 | epl-2.0 3 | 4 | 5 | Apache License 6 | Version 2.0, January 2004 7 | http://www.apache.org/licenses/ 8 | 9 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 10 | 11 | 1. Definitions. 12 | 13 | "License" shall mean the terms and conditions for use, reproduction, 14 | and distribution as defined by Sections 1 through 9 of this document. 15 | 16 | "Licensor" shall mean the copyright owner or entity authorized by 17 | the copyright owner that is granting the License. 18 | 19 | "Legal Entity" shall mean the union of the acting entity and all 20 | other entities that control, are controlled by, or are under common 21 | control with that entity. For the purposes of this definition, 22 | "control" means (i) the power, direct or indirect, to cause the 23 | direction or management of such entity, whether by contract or 24 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 25 | outstanding shares, or (iii) beneficial ownership of such entity. 26 | 27 | "You" (or "Your") shall mean an individual or Legal Entity 28 | exercising permissions granted by this License. 29 | 30 | "Source" form shall mean the preferred form for making modifications, 31 | including but not limited to software source code, documentation 32 | source, and configuration files. 33 | 34 | "Object" form shall mean any form resulting from mechanical 35 | transformation or translation of a Source form, including but 36 | not limited to compiled object code, generated documentation, 37 | and conversions to other media types. 38 | 39 | "Work" shall mean the work of authorship, whether in Source or 40 | Object form, made available under the License, as indicated by a 41 | copyright notice that is included in or attached to the work 42 | (an example is provided in the Appendix below). 43 | 44 | "Derivative Works" shall mean any work, whether in Source or Object 45 | form, that is based on (or derived from) the Work and for which the 46 | editorial revisions, annotations, elaborations, or other modifications 47 | represent, as a whole, an original work of authorship. For the purposes 48 | of this License, Derivative Works shall not include works that remain 49 | separable from, or merely link (or bind by name) to the interfaces of, 50 | the Work and Derivative Works thereof. 51 | 52 | "Contribution" shall mean any work of authorship, including 53 | the original version of the Work and any modifications or additions 54 | to that Work or Derivative Works thereof, that is intentionally 55 | submitted to Licensor for inclusion in the Work by the copyright owner 56 | or by an individual or Legal Entity authorized to submit on behalf of 57 | the copyright owner. For the purposes of this definition, "submitted" 58 | means any form of electronic, verbal, or written communication sent 59 | to the Licensor or its representatives, including but not limited to 60 | communication on electronic mailing lists, source code control systems, 61 | and issue tracking systems that are managed by, or on behalf of, the 62 | Licensor for the purpose of discussing and improving the Work, but 63 | excluding communication that is conspicuously marked or otherwise 64 | designated in writing by the copyright owner as "Not a Contribution." 65 | 66 | "Contributor" shall mean Licensor and any individual or Legal Entity 67 | on behalf of whom a Contribution has been received by Licensor and 68 | subsequently incorporated within the Work. 69 | 70 | 2. Grant of Copyright License. Subject to the terms and conditions of 71 | this License, each Contributor hereby grants to You a perpetual, 72 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 73 | copyright license to reproduce, prepare Derivative Works of, 74 | publicly display, publicly perform, sublicense, and distribute the 75 | Work and such Derivative Works in Source or Object form. 76 | 77 | 3. Grant of Patent License. Subject to the terms and conditions of 78 | this License, each Contributor hereby grants to You a perpetual, 79 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 80 | (except as stated in this section) patent license to make, have made, 81 | use, offer to sell, sell, import, and otherwise transfer the Work, 82 | where such license applies only to those patent claims licensable 83 | by such Contributor that are necessarily infringed by their 84 | Contribution(s) alone or by combination of their Contribution(s) 85 | with the Work to which such Contribution(s) was submitted. If You 86 | institute patent litigation against any entity (including a 87 | cross-claim or counterclaim in a lawsuit) alleging that the Work 88 | or a Contribution incorporated within the Work constitutes direct 89 | or contributory patent infringement, then any patent licenses 90 | granted to You under this License for that Work shall terminate 91 | as of the date such litigation is filed. 92 | 93 | 4. Redistribution. You may reproduce and distribute copies of the 94 | Work or Derivative Works thereof in any medium, with or without 95 | modifications, and in Source or Object form, provided that You 96 | meet the following conditions: 97 | 98 | (a) You must give any other recipients of the Work or 99 | Derivative Works a copy of this License; and 100 | 101 | (b) You must cause any modified files to carry prominent notices 102 | stating that You changed the files; and 103 | 104 | (c) You must retain, in the Source form of any Derivative Works 105 | that You distribute, all copyright, patent, trademark, and 106 | attribution notices from the Source form of the Work, 107 | excluding those notices that do not pertain to any part of 108 | the Derivative Works; and 109 | 110 | (d) If the Work includes a "NOTICE" text file as part of its 111 | distribution, then any Derivative Works that You distribute must 112 | include a readable copy of the attribution notices contained 113 | within such NOTICE file, excluding those notices that do not 114 | pertain to any part of the Derivative Works, in at least one 115 | of the following places: within a NOTICE text file distributed 116 | as part of the Derivative Works; within the Source form or 117 | documentation, if provided along with the Derivative Works; or, 118 | within a display generated by the Derivative Works, if and 119 | wherever such third-party notices normally appear. The contents 120 | of the NOTICE file are for informational purposes only and 121 | do not modify the License. You may add Your own attribution 122 | notices within Derivative Works that You distribute, alongside 123 | or as an addendum to the NOTICE text from the Work, provided 124 | that such additional attribution notices cannot be construed 125 | as modifying the License. 126 | 127 | You may add Your own copyright statement to Your modifications and 128 | may provide additional or different license terms and conditions 129 | for use, reproduction, or distribution of Your modifications, or 130 | for any such Derivative Works as a whole, provided Your use, 131 | reproduction, and distribution of the Work otherwise complies with 132 | the conditions stated in this License. 133 | 134 | 5. Submission of Contributions. Unless You explicitly state otherwise, 135 | any Contribution intentionally submitted for inclusion in the Work 136 | by You to the Licensor shall be under the terms and conditions of 137 | this License, without any additional terms or conditions. 138 | Notwithstanding the above, nothing herein shall supersede or modify 139 | the terms of any separate license agreement you may have executed 140 | with Licensor regarding such Contributions. 141 | 142 | 6. Trademarks. This License does not grant permission to use the trade 143 | names, trademarks, service marks, or product names of the Licensor, 144 | except as required for reasonable and customary use in describing the 145 | origin of the Work and reproducing the content of the NOTICE file. 146 | 147 | 7. Disclaimer of Warranty. Unless required by applicable law or 148 | agreed to in writing, Licensor provides the Work (and each 149 | Contributor provides its Contributions) on an "AS IS" BASIS, 150 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 151 | implied, including, without limitation, any warranties or conditions 152 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 153 | PARTICULAR PURPOSE. You are solely responsible for determining the 154 | appropriateness of using or redistributing the Work and assume any 155 | risks associated with Your exercise of permissions under this License. 156 | 157 | 8. Limitation of Liability. In no event and under no legal theory, 158 | whether in tort (including negligence), contract, or otherwise, 159 | unless required by applicable law (such as deliberate and grossly 160 | negligent acts) or agreed to in writing, shall any Contributor be 161 | liable to You for damages, including any direct, indirect, special, 162 | incidental, or consequential damages of any character arising as a 163 | result of this License or out of the use or inability to use the 164 | Work (including but not limited to damages for loss of goodwill, 165 | work stoppage, computer failure or malfunction, or any and all 166 | other commercial damages or losses), even if such Contributor 167 | has been advised of the possibility of such damages. 168 | 169 | 9. Accepting Warranty or Additional Liability. While redistributing 170 | the Work or Derivative Works thereof, You may choose to offer, 171 | and charge a fee for, acceptance of support, warranty, indemnity, 172 | or other liability obligations and/or rights consistent with this 173 | License. However, in accepting such obligations, You may act only 174 | on Your own behalf and on Your sole responsibility, not on behalf 175 | of any other Contributor, and only if You agree to indemnify, 176 | defend, and hold each Contributor harmless for any liability 177 | incurred by, or claims asserted against, such Contributor by reason 178 | of your accepting any such warranty or additional liability. 179 | 180 | OR 181 | 182 | Eclipse Public License - v 2.0 183 | 184 | THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE 185 | PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION 186 | OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT. 187 | 188 | 1. DEFINITIONS 189 | 190 | "Contribution" means: 191 | 192 | a) in the case of the initial Contributor, the initial content 193 | Distributed under this Agreement, and 194 | 195 | b) in the case of each subsequent Contributor: 196 | i) changes to the Program, and 197 | ii) additions to the Program; 198 | where such changes and/or additions to the Program originate from 199 | and are Distributed by that particular Contributor. A Contribution 200 | "originates" from a Contributor if it was added to the Program by 201 | such Contributor itself or anyone acting on such Contributor's behalf. 202 | Contributions do not include changes or additions to the Program that 203 | are not Modified Works. 204 | 205 | "Contributor" means any person or entity that Distributes the Program. 206 | 207 | "Licensed Patents" mean patent claims licensable by a Contributor which 208 | are necessarily infringed by the use or sale of its Contribution alone 209 | or when combined with the Program. 210 | 211 | "Program" means the Contributions Distributed in accordance with this 212 | Agreement. 213 | 214 | "Recipient" means anyone who receives the Program under this Agreement 215 | or any Secondary License (as applicable), including Contributors. 216 | 217 | "Derivative Works" shall mean any work, whether in Source Code or other 218 | form, that is based on (or derived from) the Program and for which the 219 | editorial revisions, annotations, elaborations, or other modifications 220 | represent, as a whole, an original work of authorship. 221 | 222 | "Modified Works" shall mean any work in Source Code or other form that 223 | results from an addition to, deletion from, or modification of the 224 | contents of the Program, including, for purposes of clarity any new file 225 | in Source Code form that contains any contents of the Program. Modified 226 | Works shall not include works that contain only declarations, 227 | interfaces, types, classes, structures, or files of the Program solely 228 | in each case in order to link to, bind by name, or subclass the Program 229 | or Modified Works thereof. 230 | 231 | "Distribute" means the acts of a) distributing or b) making available 232 | in any manner that enables the transfer of a copy. 233 | 234 | "Source Code" means the form of a Program preferred for making 235 | modifications, including but not limited to software source code, 236 | documentation source, and configuration files. 237 | 238 | "Secondary License" means either the GNU General Public License, 239 | Version 2.0, or any later versions of that license, including any 240 | exceptions or additional permissions as identified by the initial 241 | Contributor. 242 | 243 | 2. GRANT OF RIGHTS 244 | 245 | a) Subject to the terms of this Agreement, each Contributor hereby 246 | grants Recipient a non-exclusive, worldwide, royalty-free copyright 247 | license to reproduce, prepare Derivative Works of, publicly display, 248 | publicly perform, Distribute and sublicense the Contribution of such 249 | Contributor, if any, and such Derivative Works. 250 | 251 | b) Subject to the terms of this Agreement, each Contributor hereby 252 | grants Recipient a non-exclusive, worldwide, royalty-free patent 253 | license under Licensed Patents to make, use, sell, offer to sell, 254 | import and otherwise transfer the Contribution of such Contributor, 255 | if any, in Source Code or other form. This patent license shall 256 | apply to the combination of the Contribution and the Program if, at 257 | the time the Contribution is added by the Contributor, such addition 258 | of the Contribution causes such combination to be covered by the 259 | Licensed Patents. The patent license shall not apply to any other 260 | combinations which include the Contribution. No hardware per se is 261 | licensed hereunder. 262 | 263 | c) Recipient understands that although each Contributor grants the 264 | licenses to its Contributions set forth herein, no assurances are 265 | provided by any Contributor that the Program does not infringe the 266 | patent or other intellectual property rights of any other entity. 267 | Each Contributor disclaims any liability to Recipient for claims 268 | brought by any other entity based on infringement of intellectual 269 | property rights or otherwise. As a condition to exercising the 270 | rights and licenses granted hereunder, each Recipient hereby 271 | assumes sole responsibility to secure any other intellectual 272 | property rights needed, if any. For example, if a third party 273 | patent license is required to allow Recipient to Distribute the 274 | Program, it is Recipient's responsibility to acquire that license 275 | before distributing the Program. 276 | 277 | d) Each Contributor represents that to its knowledge it has 278 | sufficient copyright rights in its Contribution, if any, to grant 279 | the copyright license set forth in this Agreement. 280 | 281 | e) Notwithstanding the terms of any Secondary License, no 282 | Contributor makes additional grants to any Recipient (other than 283 | those set forth in this Agreement) as a result of such Recipient's 284 | receipt of the Program under the terms of a Secondary License 285 | (if permitted under the terms of Section 3). 286 | 287 | 3. REQUIREMENTS 288 | 289 | 3.1 If a Contributor Distributes the Program in any form, then: 290 | 291 | a) the Program must also be made available as Source Code, in 292 | accordance with section 3.2, and the Contributor must accompany 293 | the Program with a statement that the Source Code for the Program 294 | is available under this Agreement, and informs Recipients how to 295 | obtain it in a reasonable manner on or through a medium customarily 296 | used for software exchange; and 297 | 298 | b) the Contributor may Distribute the Program under a license 299 | different than this Agreement, provided that such license: 300 | i) effectively disclaims on behalf of all other Contributors all 301 | warranties and conditions, express and implied, including 302 | warranties or conditions of title and non-infringement, and 303 | implied warranties or conditions of merchantability and fitness 304 | for a particular purpose; 305 | 306 | ii) effectively excludes on behalf of all other Contributors all 307 | liability for damages, including direct, indirect, special, 308 | incidental and consequential damages, such as lost profits; 309 | 310 | iii) does not attempt to limit or alter the recipients' rights 311 | in the Source Code under section 3.2; and 312 | 313 | iv) requires any subsequent distribution of the Program by any 314 | party to be under a license that satisfies the requirements 315 | of this section 3. 316 | 317 | 3.2 When the Program is Distributed as Source Code: 318 | 319 | a) it must be made available under this Agreement, or if the 320 | Program (i) is combined with other material in a separate file or 321 | files made available under a Secondary License, and (ii) the initial 322 | Contributor attached to the Source Code the notice described in 323 | Exhibit A of this Agreement, then the Program may be made available 324 | under the terms of such Secondary Licenses, and 325 | 326 | b) a copy of this Agreement must be included with each copy of 327 | the Program. 328 | 329 | 3.3 Contributors may not remove or alter any copyright, patent, 330 | trademark, attribution notices, disclaimers of warranty, or limitations 331 | of liability ("notices") contained within the Program from any copy of 332 | the Program which they Distribute, provided that Contributors may add 333 | their own appropriate notices. 334 | 335 | 4. COMMERCIAL DISTRIBUTION 336 | 337 | Commercial distributors of software may accept certain responsibilities 338 | with respect to end users, business partners and the like. While this 339 | license is intended to facilitate the commercial use of the Program, 340 | the Contributor who includes the Program in a commercial product 341 | offering should do so in a manner which does not create potential 342 | liability for other Contributors. Therefore, if a Contributor includes 343 | the Program in a commercial product offering, such Contributor 344 | ("Commercial Contributor") hereby agrees to defend and indemnify every 345 | other Contributor ("Indemnified Contributor") against any losses, 346 | damages and costs (collectively "Losses") arising from claims, lawsuits 347 | and other legal actions brought by a third party against the Indemnified 348 | Contributor to the extent caused by the acts or omissions of such 349 | Commercial Contributor in connection with its distribution of the Program 350 | in a commercial product offering. The obligations in this section do not 351 | apply to any claims or Losses relating to any actual or alleged 352 | intellectual property infringement. In order to qualify, an Indemnified 353 | Contributor must: a) promptly notify the Commercial Contributor in 354 | writing of such claim, and b) allow the Commercial Contributor to control, 355 | and cooperate with the Commercial Contributor in, the defense and any 356 | related settlement negotiations. The Indemnified Contributor may 357 | participate in any such claim at its own expense. 358 | 359 | For example, a Contributor might include the Program in a commercial 360 | product offering, Product X. That Contributor is then a Commercial 361 | Contributor. If that Commercial Contributor then makes performance 362 | claims, or offers warranties related to Product X, those performance 363 | claims and warranties are such Commercial Contributor's responsibility 364 | alone. Under this section, the Commercial Contributor would have to 365 | defend claims against the other Contributors related to those performance 366 | claims and warranties, and if a court requires any other Contributor to 367 | pay any damages as a result, the Commercial Contributor must pay 368 | those damages. 369 | 370 | 5. NO WARRANTY 371 | 372 | EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT 373 | PERMITTED BY APPLICABLE LAW, THE PROGRAM IS PROVIDED ON AN "AS IS" 374 | BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR 375 | IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF 376 | TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR 377 | PURPOSE. Each Recipient is solely responsible for determining the 378 | appropriateness of using and distributing the Program and assumes all 379 | risks associated with its exercise of rights under this Agreement, 380 | including but not limited to the risks and costs of program errors, 381 | compliance with applicable laws, damage to or loss of data, programs 382 | or equipment, and unavailability or interruption of operations. 383 | 384 | 6. DISCLAIMER OF LIABILITY 385 | 386 | EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT 387 | PERMITTED BY APPLICABLE LAW, NEITHER RECIPIENT NOR ANY CONTRIBUTORS 388 | SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 389 | EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST 390 | PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 391 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 392 | ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE 393 | EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE 394 | POSSIBILITY OF SUCH DAMAGES. 395 | 396 | 7. GENERAL 397 | 398 | If any provision of this Agreement is invalid or unenforceable under 399 | applicable law, it shall not affect the validity or enforceability of 400 | the remainder of the terms of this Agreement, and without further 401 | action by the parties hereto, such provision shall be reformed to the 402 | minimum extent necessary to make such provision valid and enforceable. 403 | 404 | If Recipient institutes patent litigation against any entity 405 | (including a cross-claim or counterclaim in a lawsuit) alleging that the 406 | Program itself (excluding combinations of the Program with other software 407 | or hardware) infringes such Recipient's patent(s), then such Recipient's 408 | rights granted under Section 2(b) shall terminate as of the date such 409 | litigation is filed. 410 | 411 | All Recipient's rights under this Agreement shall terminate if it 412 | fails to comply with any of the material terms or conditions of this 413 | Agreement and does not cure such failure in a reasonable period of 414 | time after becoming aware of such noncompliance. If all Recipient's 415 | rights under this Agreement terminate, Recipient agrees to cease use 416 | and distribution of the Program as soon as reasonably practicable. 417 | However, Recipient's obligations under this Agreement and any licenses 418 | granted by Recipient relating to the Program shall continue and survive. 419 | 420 | Everyone is permitted to copy and distribute copies of this Agreement, 421 | but in order to avoid inconsistency the Agreement is copyrighted and 422 | may only be modified in the following manner. The Agreement Steward 423 | reserves the right to publish new versions (including revisions) of 424 | this Agreement from time to time. No one other than the Agreement 425 | Steward has the right to modify this Agreement. The Eclipse Foundation 426 | is the initial Agreement Steward. The Eclipse Foundation may assign the 427 | responsibility to serve as the Agreement Steward to a suitable separate 428 | entity. Each new version of the Agreement will be given a distinguishing 429 | version number. The Program (including Contributions) may always be 430 | Distributed subject to the version of the Agreement under which it was 431 | received. In addition, after a new version of the Agreement is published, 432 | Contributor may elect to Distribute the Program (including its 433 | Contributions) under the new version. 434 | 435 | Except as expressly stated in Sections 2(a) and 2(b) above, Recipient 436 | receives no rights or licenses to the intellectual property of any 437 | Contributor under this Agreement, whether expressly, by implication, 438 | estoppel or otherwise. All rights in the Program not expressly granted 439 | under this Agreement are reserved. Nothing in this Agreement is intended 440 | to be enforceable by any entity that is not a Contributor or Recipient. 441 | No third-party beneficiary rights are created under this Agreement. 442 | 443 | Exhibit A - Form of Secondary Licenses Notice 444 | 445 | "This Source Code may also be made available under the following 446 | Secondary Licenses when the conditions for such availability set forth 447 | in the Eclipse Public License, v. 2.0 are satisfied: {name license(s), 448 | version(s), and exceptions or additional permissions here}." 449 | 450 | Simply including a copy of this Agreement, including this Exhibit A 451 | is not sufficient to license the Source Code under Secondary Licenses. 452 | 453 | If it is not possible or desirable to put the notice in a particular 454 | file, then You may include the notice in a location (such as a LICENSE 455 | file in a relevant directory) where a recipient would be likely to 456 | look for such a notice. 457 | 458 | You may add additional accurate notices of copyright ownership. 459 | 460 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Eclipse Zenoh-Flow Examples 2 | 3 | [![Join the chat at https://gitter.im/atolab/zenoh-flow](https://badges.gitter.im/atolab/zenoh-flow.svg)](https://gitter.im/atolab/zenoh-flow?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) 4 | 5 | Zenoh-Flow provides a zenoh-based dataflow programming framework for computations that span from the cloud to the device. 6 | 7 | :warning: **This software is still in alpha status and should _not_ be used in production. Breaking changes are likely to happen and the API is not stable.** 8 | 9 | ----------- 10 | ## Description 11 | 12 | Zenoh-Flow allow users to declare a dataflow graph, via a YAML file, and use tags to express location affinity and requirements for the operators that makeup the graph. When deploying the dataflow graph, Zenoh-Flow automatically deals with distribution by linking remote operators through zenoh. 13 | 14 | A dataflow is composed of set of _sources_ — producing data, _operators_ — computing over the data, and _sinks_ — consuming the resulting data. These components are _dynamically_ loaded at runtime. 15 | 16 | Remote source, operators, and sinks leverage zenoh to communicate in a transparent manner. In other terms, the dataflow the dafalow graph retails location transparency and could be deployed in different ways depending on specific needs. 17 | 18 | Zenoh-Flow provides several working examples that illustrate how to define operators, sources and sinks as well as how to declaratively define they dataflow graph by means of a YAML file. 19 | 20 | ----------- 21 | ## Examples 22 | 23 | 24 | ### Getting Started 25 | 26 | The purpose of this example is to introduce the different concepts of Zenoh-Flow by creating a 27 | simple application that generates "Hello, World!" types of greetings. 28 | 29 | Go to the [README](./getting-started/README.md) for instructions on how to run it. 30 | 31 | --- 32 | 33 | ### Period Miss detector 34 | 35 | The purpose of this example is to showcase how one can implement a node that (i) 36 | expects data at regular intervals and (ii) sends a default value if no data was 37 | received within an interval. 38 | 39 | Go to the [README](./period-miss-detector/README.md) for instructions on how to run it. 40 | 41 | #### Montblanc 42 | 43 | The purpose of this example is to demonstrate how Zenoh-Flow can handle a 44 | complex dataflow graph, like a robotic application. 45 | 46 | Go to the [README](./montblanc/README.md) for instructions on how to run it. 47 | 48 | 49 | #### Transcoding 50 | 51 | The purpose of this example is to demonstrate how Zenoh-Flow can handle be used within a Zenoh 52 | router to transcode live data. 53 | 54 | Go to the [README](./transcoding/README.md) for instructions on how to run it. 55 | -------------------------------------------------------------------------------- /getting-started/README.md: -------------------------------------------------------------------------------- 1 | # Getting started 2 | 3 | The purpose of this example is to introduce the different concepts of Zenoh-Flow by creating a 4 | simple application that generates "Hello, World!" types of greetings. 5 | 6 | ## How to run 7 | 8 | ### Build (for Rust nodes only) 9 | 10 | We first generate the different shared libraries of the different nodes. 11 | 12 | ```shell 13 | cd ~/dev/zenoh-flow-examples/getting-started/nodes/rust/ && cargo build --workspace 14 | ``` 15 | 16 | ### Update the paths 17 | 18 | For each YAML file in the list below, check that the paths and filenames are 19 | correct: 20 | - data-flow.yaml 21 | - Rust nodes: 22 | - nodes/rust/period-miss-detector/period-miss-detector.yaml 23 | - nodes/rust/file-writer/file-writer.yaml 24 | - Python nodes: 25 | - nodes/python/period-miss-detector/period-miss-detector.yaml 26 | - nodes/python/file-writer/file-writer.yaml 27 | 28 | :bulb: Note that you actually only need to update the files of the nodes you are going to use — 29 | which could be a mix of Python and Rust nodes. 30 | 31 | ### Launch 32 | 33 | #### 1st terminal: Zenoh 34 | 35 | ```shell 36 | cd ~/dev/zenoh && ./target/debug/zenohd -c ~/.config/zenoh-flow/zenoh.json 37 | ``` 38 | 39 | #### 2nd terminal: Zenoh-Flow daemon 40 | 41 | ```shell 42 | cd ~/dev/zenoh-flow/ && ./target/debug/zenoh-flow-daemon -c ~/.config/zenoh-flow/runtime.yaml 43 | ``` 44 | 45 | #### 3rd terminal: launch the flow 46 | 47 | ```shell 48 | cd ~/dev/zenoh-flow && ./target/debug/zfctl launch ~/dev/zenoh-flow-examples/getting-started/data-flow.yaml 49 | ``` 50 | 51 | Then, if the flow was successfully launched, put values at regular intervals: 52 | 53 | ```shell 54 | # If you have compiled the `z_put` example of Zenoh in debug 55 | $ZENOH/target/debug/examples/z_put -k "zf/getting-started/hello" -v "Alice" 56 | 57 | # If you have enabled the REST plugin of Zenoh 58 | curl -X PUT -H "content-type:text/plain" -d 'Bob' http://localhost:8000/zf/getting-started/hello 59 | ``` 60 | -------------------------------------------------------------------------------- /getting-started/data-flow.yaml: -------------------------------------------------------------------------------- 1 | flow: getting-started 2 | 3 | vars: 4 | BASE_DIR: "/path/to/zenoh-flow-examples/getting-started" 5 | 6 | sources: 7 | - id: zenoh-sub 8 | configuration: 9 | key-expressions: 10 | out: zf/getting-started/hello 11 | descriptor: "builtin://zenoh" 12 | 13 | 14 | operators: 15 | - id: greetings-maker 16 | descriptor: "file://{{ BASE_DIR }}/nodes/python/greetings-maker/greetings-maker.yaml" 17 | 18 | 19 | sinks: 20 | - id: file-writer 21 | descriptor: "file://{{ BASE_DIR }}/nodes/python/file-writer/file-writer.yaml" 22 | 23 | - id: zenoh-writer 24 | configuration: 25 | key-expressions: 26 | in: zf/getting-started/greeting 27 | descriptor: "builtin://zenoh" 28 | 29 | 30 | links: 31 | - from: 32 | node: zenoh-sub 33 | output: out 34 | to: 35 | node: greetings-maker 36 | input: name 37 | 38 | - from: 39 | node: greetings-maker 40 | output: greeting 41 | to: 42 | node: file-writer 43 | input: in 44 | 45 | - from: 46 | node: greetings-maker 47 | output: greeting 48 | to: 49 | node: zenoh-writer 50 | input: in 51 | -------------------------------------------------------------------------------- /getting-started/nodes/python/file-writer/file-writer.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | from zenoh_flow.interfaces import Sink 16 | from zenoh_flow import Inputs 17 | from zenoh_flow.types import Context 18 | from typing import Dict, Any 19 | 20 | 21 | class FileWriter(Sink): 22 | def __init__( 23 | self, 24 | context: Context, 25 | configuration: Dict[str, Any], 26 | inputs: Inputs, 27 | ): 28 | self.input = inputs.take("in", str, lambda buf: buf.decode("utf-8")) 29 | if self.input is None: 30 | raise ValueError("Unable to find input") 31 | self.out_file = open("/tmp/greetings.txt", "w+") 32 | 33 | def finalize(self) -> None: 34 | self.out_file.close() 35 | 36 | async def iteration(self) -> None: 37 | message = await self.input.recv() 38 | greeting = message.get_data() 39 | if greeting is not None: 40 | self.out_file.write(greeting) 41 | self.out_file.flush() 42 | 43 | 44 | def register(): 45 | return FileWriter 46 | -------------------------------------------------------------------------------- /getting-started/nodes/python/file-writer/file-writer.yaml: -------------------------------------------------------------------------------- 1 | id: file-writer 2 | 3 | vars: 4 | BASE_DIR: "/path/to/zenoh-flow-examples/getting-started" 5 | 6 | uri: "file://{{ BASE_DIR }}/nodes/python/file-writer/file-writer.py" 7 | inputs: [in] 8 | -------------------------------------------------------------------------------- /getting-started/nodes/python/greetings-maker/greetings-maker.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | from zenoh_flow.interfaces import Operator 16 | from zenoh_flow import Inputs, Outputs 17 | from zenoh_flow.types import Context 18 | from typing import Dict, Any 19 | 20 | 21 | class GreetingsMaker(Operator): 22 | def __init__( 23 | self, 24 | context: Context, 25 | configuration: Dict[str, Any], 26 | inputs: Inputs, 27 | outputs: Outputs, 28 | ): 29 | print(f"Context: {context}") 30 | self.output = outputs.take("greeting", str, lambda s: bytes(s, "utf-8")) 31 | self.in_stream = inputs.take("name", str, lambda buf: buf.decode("utf-8")) 32 | 33 | if self.in_stream is None: 34 | raise ValueError("No input 'name' found") 35 | if self.output is None: 36 | raise ValueError("No output 'greeting' found") 37 | 38 | def finalize(self) -> None: 39 | return None 40 | 41 | async def iteration(self) -> None: 42 | message = await self.in_stream.recv() 43 | name = message.get_data() 44 | if name is not None: 45 | greetings = self.generate_greetings(name) 46 | await self.output.send(greetings) 47 | 48 | return None 49 | 50 | def generate_greetings(self, name: str) -> str: 51 | greetings_dict = { 52 | "Sofia": "Ciao, {}!\n", 53 | "Leonardo": "Ciao, {}!\n", 54 | "Lucia": "¡Hola, {}!\n", 55 | "Martin": "¡Hola, {}!\n", 56 | "Jade": "Bonjour, {}!\n", 57 | "Gabriele": "Ciao, PaaS manager!\n", 58 | } 59 | 60 | greet = greetings_dict.get(name, "Hello, {}!\n") 61 | return greet.format(name) 62 | 63 | 64 | def register(): 65 | return GreetingsMaker 66 | -------------------------------------------------------------------------------- /getting-started/nodes/python/greetings-maker/greetings-maker.yaml: -------------------------------------------------------------------------------- 1 | id: greetings-maker 2 | 3 | vars: 4 | BASE_DIR: "/path/to/zenoh-flow-examples/getting-started" 5 | 6 | uri: "file://{{ BASE_DIR }}/nodes/python/greetings-maker/greetings-maker.py" 7 | 8 | inputs: [name] 9 | outputs: [greeting] 10 | -------------------------------------------------------------------------------- /getting-started/nodes/rust/Cargo.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | [workspace] 16 | members = [ "greetings-maker", "file-writer" ] 17 | 18 | [workspace.dependencies] 19 | async-std = "1.12" 20 | async-trait = "0.1" 21 | prost = "0.11" 22 | zenoh-flow = { version = "0.5.0-alpha.1" } -------------------------------------------------------------------------------- /getting-started/nodes/rust/file-writer/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "file-writer" 3 | version = "0.1.0" 4 | edition = "2018" 5 | 6 | [dependencies] 7 | zenoh-flow = { workspace = true } 8 | async-trait = { workspace = true } 9 | async-std = { workspace = true } 10 | prost = { workspace = true } 11 | 12 | [lib] 13 | crate-type=["cdylib"] 14 | -------------------------------------------------------------------------------- /getting-started/nodes/rust/file-writer/file-writer.yaml: -------------------------------------------------------------------------------- 1 | id: file-writer 2 | 3 | vars: 4 | BASE_DIR: "/path/to/zenoh-flow-examples/getting-started" 5 | 6 | # Do not forget to change the extension depending on your operating system! 7 | # Linux -> .so 8 | # Windows -> .dll (and remove the "lib" in front) 9 | # MacOS -> .dylib 10 | uri: "file://{{ BASE_DIR }}/nodes/rust/target/debug/libfile_writer.dylib" 11 | # If the compilation is in release: 12 | # uri: file:///absolute/path/to/target/release/libfile_writer.so 13 | 14 | inputs: [in] 15 | 16 | -------------------------------------------------------------------------------- /getting-started/nodes/rust/file-writer/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | use async_std::{fs::File, io::WriteExt, sync::Mutex}; 16 | use prost::Message as pMessage; 17 | use zenoh_flow::{anyhow, prelude::*}; 18 | 19 | #[export_sink] 20 | pub struct FileWriter { 21 | input: Input, 22 | file: Mutex, 23 | } 24 | 25 | #[async_trait::async_trait] 26 | impl Node for FileWriter { 27 | async fn iteration(&self) -> Result<()> { 28 | let (message, _) = self.input.recv().await?; 29 | 30 | if let Message::Data(greeting) = message { 31 | let mut file = self.file.lock().await; 32 | file.write_all(greeting.as_bytes()) 33 | .await 34 | .map_err(|e| zferror!(ErrorKind::IOError, "{:?}", e))?; 35 | return file 36 | .flush() 37 | .await 38 | .map_err(|e| zferror!(ErrorKind::IOError, "{:?}", e).into()); 39 | } 40 | 41 | Ok(()) 42 | } 43 | } 44 | 45 | #[async_trait::async_trait] 46 | impl Sink for FileWriter { 47 | async fn new( 48 | _context: Context, 49 | _configuration: Option, 50 | mut inputs: Inputs, 51 | ) -> Result { 52 | Ok(FileWriter { 53 | file: Mutex::new( 54 | File::create("/tmp/greetings.txt") 55 | .await 56 | .expect("Could not create '/tmp/greetings.txt'"), 57 | ), 58 | input: inputs 59 | .take("in") 60 | .expect("No Input called 'in' found") 61 | .typed(|bytes| String::decode(bytes).map_err(|e| anyhow!(e))), 62 | }) 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /getting-started/nodes/rust/greetings-maker/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "greetings-maker" 3 | version = "0.1.0" 4 | edition = "2018" 5 | 6 | [dependencies] 7 | async-std = { workspace = true } 8 | async-trait = { workspace = true } 9 | prost = { workspace = true } 10 | zenoh-flow = { workspace = true } 11 | 12 | [lib] 13 | crate-type=["cdylib"] -------------------------------------------------------------------------------- /getting-started/nodes/rust/greetings-maker/greetings-maker.yaml: -------------------------------------------------------------------------------- 1 | id: greetings-maker 2 | 3 | vars: 4 | BASE_DIR: "/path/to/zenoh-flow-examples/getting-started" 5 | 6 | # Do not forget to change the extension depending on your operating system! 7 | # Linux -> .so 8 | # Windows -> .dll (and remove the "lib" in front) 9 | # MacOS -> .dylib 10 | uri: "file://{{ BASE_DIR }}/nodes/rust/target/debug/libgreetings_maker.dylib" 11 | # If the compilation is in release: 12 | # uri: file:///absolute/path/to/target/release/libgreetings_maker.so 13 | 14 | inputs: [name] 15 | outputs: [greeting] 16 | -------------------------------------------------------------------------------- /getting-started/nodes/rust/greetings-maker/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | use prost::Message as pMessage; 16 | use zenoh_flow::{anyhow, prelude::*}; 17 | 18 | #[export_operator] 19 | pub struct GreetingsMaker { 20 | input: Input, 21 | output: Output, 22 | } 23 | 24 | #[async_trait::async_trait] 25 | impl Operator for GreetingsMaker { 26 | async fn new( 27 | _context: Context, 28 | _configuration: Option, 29 | mut inputs: Inputs, 30 | mut outputs: Outputs, 31 | ) -> Result { 32 | Ok(GreetingsMaker { 33 | input: inputs 34 | .take("name") 35 | .expect("No input 'name' found") 36 | .typed(|bytes| String::from_utf8(bytes.into()).map_err(|e| anyhow!(e))), 37 | output: outputs 38 | .take("greeting") 39 | .expect("No output 'greeting' found") 40 | .typed(|buffer, data: &String| data.encode(buffer).map_err(|e| anyhow!(e))), 41 | }) 42 | } 43 | } 44 | 45 | #[async_trait::async_trait] 46 | impl Node for GreetingsMaker { 47 | async fn iteration(&self) -> Result<()> { 48 | let (message, _) = self.input.recv().await?; 49 | if let Message::Data(characters) = message { 50 | let name = characters.trim_end(); 51 | 52 | let greetings = match name { 53 | "Sofia" | "Leonardo" => format!("Ciao, {}!\n", name), 54 | "Lucia" | "Martin" => format!("¡Hola, {}!\n", name), 55 | "Jade" | "Gabriel" => format!("Bonjour, {} !\n", name), 56 | _ => format!("Hello, {}!\n", name), 57 | }; 58 | 59 | return self.output.send(greetings, None).await; 60 | } 61 | 62 | Ok(()) 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /montblanc/Cargo.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | [workspace] 16 | members = [ 17 | "arequipa", 18 | "barcelona", 19 | "cordoba", 20 | "datatypes", 21 | "delhi", 22 | "freeport", 23 | "geneva", 24 | "georgetown", 25 | "hamburg", 26 | "hebron", 27 | "kingston", 28 | "lyon", 29 | "madelin", 30 | "mandalay", 31 | "monaco", 32 | "osaka", 33 | "ponce", 34 | "portsmouth", 35 | "rotterdam", 36 | "taipei", 37 | "tripoli", 38 | ] 39 | 40 | [profile.dev] 41 | debug = true 42 | opt-level = 0 43 | 44 | [profile.release] 45 | debug = false 46 | lto="fat" 47 | codegen-units=1 48 | opt-level=3 49 | panic="abort" 50 | 51 | [workspace.dependencies] 52 | async-std = { version = "=1.12.0", features = ["attributes"] } 53 | async-trait = "0.1.50" 54 | futures = "0.3.28" 55 | prost = "0.11" 56 | rand = "0.8.0" 57 | zenoh-flow = "0.5.0-alpha.1" -------------------------------------------------------------------------------- /montblanc/README.md: -------------------------------------------------------------------------------- 1 | # Montblanc 2 | 3 | The purpose of this example is to demonstrate how Zenoh-Flow can handle a 4 | complex dataflow graph, like a robotic application. 5 | 6 | ## Dataflow Graph 7 | ![mont-blanc-graph](mont_blanc.png "Montblanc dataflow graph"). 8 | 9 | ## How to run 10 | 11 | ### Build 12 | 13 | We first generate the different shared libraries of the different nodes. 14 | 15 | ```shell 16 | cd ~/dev/zenoh-flow-examples/montblanc && cargo build --workspace --release 17 | ``` 18 | 19 | ### Update the paths 20 | 21 | For each YAML file in the list below, check that the paths and filenames are 22 | correct: 23 | - montblanc.yml 24 | - arequipa/arequipa.yml 25 | - barcelona/barcelona.yml 26 | - cordoba/cordoba.yml 27 | - delhi/delhi.yml 28 | - freeport/freeport.yml 29 | - geneva/geneva.yml 30 | - georgetown/georgetown.yml 31 | - hamburg/hamburg.yml 32 | - hebron/hebron.yml 33 | - kingston/kingston.yml 34 | - lyon/lyon.yml 35 | - madelin/madelin.yml 36 | - mandalay/mandalay.yml 37 | - monaco/monaco.yml 38 | - osaka/osaka.yml 39 | - ponce/ponce.yml 40 | - portsmouth/portsmouth.yml 41 | - rotterdam/rotterdam.yml 42 | - taipei/taipei.yml 43 | - tripoli/tripoli.yml 44 | 45 | ### Launch 46 | 47 | #### 1st terminal: Zenoh 48 | 49 | ```shell 50 | cd ~/dev/zenoh && ./target/debug/zenohd -c ~/.config/zenoh-flow/zenoh.json 51 | ``` 52 | 53 | #### 2nd terminal: Zenoh-Flow daemon 54 | 55 | ```shell 56 | cd ~/dev/zenoh-flow/ && ./target/debug/zenoh-flow-daemon -c ~/.config/zenoh-flow/runtime.yaml 57 | ``` 58 | 59 | #### 3rd terminal: launch the flow 60 | 61 | ```shell 62 | cd ~/dev/zenoh-flow && ./target/debug/zfctl launch ~/dev/montblanc/montblanc.yml 63 | ``` 64 | 65 | Then, if the flow was successfully launched, you can see its output 66 | with the command 67 | 68 | ```shell 69 | tail -f "/tmp/montblanc.out" 70 | ``` 71 | -------------------------------------------------------------------------------- /montblanc/arequipa/Cargo.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | [package] 16 | name = "arequipa" 17 | version = "0.1.0" 18 | edition = "2018" 19 | 20 | [lib] 21 | name = "arequipa" 22 | crate-type=["cdylib"] 23 | path="src/lib.rs" 24 | 25 | [dependencies] 26 | async-std = { workspace = true } 27 | async-trait = { workspace = true } 28 | datatypes = { path = "../datatypes" } 29 | prost = { workspace = true } 30 | zenoh-flow = { workspace = true } -------------------------------------------------------------------------------- /montblanc/arequipa/arequipa.yml: -------------------------------------------------------------------------------- 1 | id : Arequipa 2 | 3 | uri: file:///home/ato/Workspace/zenoh-flow-examples/montblanc/target/debug/libarequipa.so 4 | 5 | inputs: [Arkansas] -------------------------------------------------------------------------------- /montblanc/arequipa/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | use async_std::{fs::File, io::WriteExt, sync::Mutex}; 16 | use datatypes::ARKANSAS_PORT; 17 | use prost::Message; 18 | use zenoh_flow::prelude::*; 19 | 20 | static OUT_FILE: &str = "/tmp/montblanc.out"; 21 | 22 | #[export_sink] 23 | pub struct Arequipa { 24 | input: Input, 25 | file: Mutex, 26 | } 27 | 28 | #[async_trait::async_trait] 29 | impl Node for Arequipa { 30 | async fn iteration(&self) -> Result<()> { 31 | let (message, _) = self.input.recv().await?; 32 | if let zenoh_flow::prelude::Message::Data(data) = message { 33 | let mut file = self.file.lock().await; 34 | let final_data = format!("{}\n", data.value); 35 | file.write_all(final_data.as_bytes()) 36 | .await 37 | .map_err(|e| zferror!(ErrorKind::IOError, "{:?}", e))?; 38 | return file 39 | .flush() 40 | .await 41 | .map_err(|e| zferror!(ErrorKind::IOError, "{:?}", e).into()); 42 | } 43 | 44 | Ok(()) 45 | } 46 | } 47 | 48 | #[async_trait::async_trait] 49 | impl Sink for Arequipa { 50 | async fn new( 51 | _context: Context, 52 | _configuration: Option, 53 | mut inputs: Inputs, 54 | ) -> Result { 55 | Ok(Self { 56 | input: inputs 57 | .take(ARKANSAS_PORT) 58 | .unwrap_or_else(|| panic!("No Input called '{}' found", ARKANSAS_PORT)) 59 | .typed(|d| Ok(datatypes::data_types::String::decode(d)?)), 60 | 61 | file: Mutex::new( 62 | File::create(OUT_FILE) 63 | .await 64 | .unwrap_or_else(|_| panic!("Could not create {}", OUT_FILE)), 65 | ), 66 | }) 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /montblanc/barcelona/Cargo.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | [package] 16 | name = "barcelona" 17 | version = "0.1.0" 18 | edition = "2018" 19 | 20 | [lib] 21 | name = "barcelona" 22 | crate-type=["cdylib"] 23 | path="src/lib.rs" 24 | 25 | [dependencies] 26 | async-std = { workspace = true } 27 | async-trait = { workspace = true } 28 | datatypes = { path = "../datatypes" } 29 | futures = { workspace = true } 30 | prost = { workspace = true } 31 | rand = { workspace = true } 32 | zenoh-flow = { workspace = true } -------------------------------------------------------------------------------- /montblanc/barcelona/barcelona.yml: -------------------------------------------------------------------------------- 1 | id : Barcelona 2 | uri: file:///home/ato/Workspace/zenoh-flow-examples/montblanc/target/release/libbarcelona.so 3 | inputs: [Mekong] 4 | outputs: [Lena] 5 | -------------------------------------------------------------------------------- /montblanc/barcelona/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | use datatypes::data_types; 16 | use datatypes::{LENA_PORT, MEKONG_PORT}; 17 | use futures::prelude::*; 18 | use futures::select; 19 | use prost::Message as pMessage; 20 | use rand::random; 21 | use zenoh_flow::prelude::*; 22 | 23 | #[export_operator] 24 | pub struct Barcelona { 25 | input_mekong: Input, 26 | output_lena: Output, 27 | } 28 | 29 | #[async_trait::async_trait] 30 | impl Operator for Barcelona { 31 | async fn new( 32 | _context: Context, 33 | _configuration: Option, 34 | mut inputs: Inputs, 35 | mut outputs: Outputs, 36 | ) -> Result { 37 | Ok(Self { 38 | input_mekong: inputs 39 | .take(MEKONG_PORT) 40 | .unwrap_or_else(|| panic!("No Input called '{}' found", MEKONG_PORT)) 41 | .typed(|d| Ok(data_types::TwistWithCovarianceStamped::decode(d)?)), 42 | output_lena: outputs 43 | .take(LENA_PORT) 44 | .unwrap_or_else(|| panic!("No Output called '{}' found", LENA_PORT)) 45 | .typed(|buf, v: &data_types::WrenchStamped| { 46 | buf.resize(v.encoded_len(), 0); 47 | Ok(v.encode(buf)?) 48 | }), 49 | }) 50 | } 51 | } 52 | 53 | #[async_trait::async_trait] 54 | impl Node for Barcelona { 55 | async fn iteration(&self) -> Result<()> { 56 | select! { 57 | msg = self.input_mekong.recv().fuse() => { 58 | if let Ok((Message::Data(data), _ts)) = msg { 59 | let value = data_types::WrenchStamped { 60 | header: Some(data.header.clone().unwrap_or(random())), 61 | wrench: Some(data_types::Wrench { 62 | force: data 63 | .twist 64 | .as_ref() 65 | .ok_or_else(|| zferror!(ErrorKind::Empty))? 66 | .twist 67 | .as_ref() 68 | .ok_or_else(|| zferror!(ErrorKind::Empty))? 69 | .linear 70 | .clone(), 71 | torque: data 72 | .twist 73 | .as_ref() 74 | .ok_or_else(|| zferror!(ErrorKind::Empty))? 75 | .twist 76 | .as_ref() 77 | .ok_or_else(|| zferror!(ErrorKind::Empty))? 78 | .angular 79 | .clone(), 80 | }), 81 | }; 82 | self.output_lena.send(value, None).await?; 83 | } 84 | } 85 | } 86 | Ok(()) 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /montblanc/cordoba/Cargo.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | [package] 16 | name = "cordoba" 17 | version = "0.1.0" 18 | edition = "2018" 19 | 20 | [lib] 21 | name = "cordoba" 22 | crate-type=["cdylib"] 23 | path="src/lib.rs" 24 | 25 | [dependencies] 26 | async-std = { workspace = true } 27 | async-trait = { workspace = true } 28 | datatypes = { path = "../datatypes" } 29 | prost = { workspace = true } 30 | rand = { workspace = true } 31 | zenoh-flow = { workspace = true } 32 | -------------------------------------------------------------------------------- /montblanc/cordoba/cordoba.yml: -------------------------------------------------------------------------------- 1 | id : Cordoba 2 | 3 | uri: file:///home/ato/Workspace/zenoh-flow-examples/montblanc/target/debug/libcordoba.so 4 | 5 | # period: 6 | # length: 100 7 | # unit: ms 8 | outputs: [Amazon] 9 | -------------------------------------------------------------------------------- /montblanc/cordoba/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | use datatypes::AMAZON_PORT; 16 | use prost::Message; 17 | use rand::random; 18 | use std::time::Duration; 19 | use zenoh_flow::prelude::*; 20 | 21 | #[export_source] 22 | pub struct Cordoba { 23 | output: Output, 24 | } 25 | 26 | #[async_trait::async_trait] 27 | impl Node for Cordoba { 28 | async fn iteration(&self) -> Result<()> { 29 | async_std::task::sleep(Duration::from_millis(100)).await; 30 | let data: f32 = random::() * 1000000.0; 31 | let value = datatypes::data_types::Float32 { value: data }; 32 | self.output.send(value, None).await 33 | } 34 | } 35 | 36 | #[async_trait::async_trait] 37 | impl Source for Cordoba { 38 | async fn new( 39 | _context: Context, 40 | _configuration: Option, 41 | mut outputs: Outputs, 42 | ) -> Result { 43 | Ok(Self { 44 | output: outputs 45 | .take(AMAZON_PORT) 46 | .unwrap_or_else(|| panic!("No Output called '{}' found", AMAZON_PORT)) 47 | .typed(|buf, v: &datatypes::data_types::Float32| { 48 | buf.resize(v.encoded_len(), 0); 49 | Ok(v.encode(buf)?) 50 | }), 51 | }) 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /montblanc/datatypes/Cargo.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | [package] 16 | name = "datatypes" 17 | version = "0.1.0" 18 | edition = "2018" 19 | 20 | [dependencies] 21 | async-std = { workspace = true } 22 | async-trait = { workspace = true } 23 | chrono = "0.4" 24 | futures = { workspace = true } 25 | prost = { workspace = true } 26 | prost-build = "0.11" 27 | rand = { workspace = true } 28 | zenoh-flow = { workspace = true } 29 | 30 | [build-dependencies] 31 | prost-build = "0.11" 32 | chrono = "0.4" 33 | -------------------------------------------------------------------------------- /montblanc/datatypes/build.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | extern crate prost_build; 16 | 17 | fn main() { 18 | prost_build::compile_protos(&["src/data_types.proto"], &["src/"]).unwrap(); 19 | } 20 | -------------------------------------------------------------------------------- /montblanc/datatypes/src/data_types.proto: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | syntax = "proto3"; 16 | 17 | package datatypes.data_types; 18 | 19 | message Timestamp { 20 | uint64 sec = 1; 21 | uint32 nanosec = 2; 22 | } 23 | 24 | message Header { 25 | int32 sec = 1; 26 | uint32 nanosec = 2; 27 | string frame_id = 3; 28 | } 29 | 30 | message Point { 31 | double x = 1; 32 | double y = 2; 33 | double z = 3; 34 | } 35 | 36 | message Quaternion { 37 | double x = 1; 38 | double y = 2; 39 | double z = 3; 40 | double w = 4; 41 | } 42 | 43 | message Vector3 { 44 | double x = 1; 45 | double y = 2; 46 | double z = 3; 47 | } 48 | 49 | message Vector3Stamped { 50 | Header header = 1; 51 | Vector3 vector = 2; 52 | } 53 | 54 | message Pose { 55 | Point position = 1; 56 | Quaternion orientation = 2; 57 | } 58 | 59 | message Twist { 60 | Vector3 linear = 1; 61 | Vector3 angular = 2; 62 | } 63 | 64 | message TwistWithCovariance { 65 | Twist twist = 1; 66 | repeated double covariance = 2; 67 | } 68 | 69 | message TwistWithCovarianceStamped { 70 | Header header = 1; 71 | TwistWithCovariance twist = 2; 72 | } 73 | 74 | message Wrench { 75 | Vector3 force = 1; 76 | Vector3 torque = 2; 77 | } 78 | 79 | message WrenchStamped { 80 | Header header = 1; 81 | Wrench wrench = 2; 82 | } 83 | 84 | message Image { 85 | Header header = 1; 86 | uint32 height = 2; 87 | uint32 width = 3; 88 | string encoding = 4; 89 | bool is_bigendian = 5; 90 | uint32 step = 6; 91 | bytes data = 7; 92 | } 93 | 94 | message PointCloud2 { 95 | message PointField { 96 | enum DataType { 97 | INT8 = 0; 98 | UINT8 = 1; 99 | INT16 = 2; 100 | UINT16 = 3; 101 | INT32 = 4; 102 | UINT32 = 5; 103 | FLOAT32 = 6; 104 | FLOAT64 = 7; 105 | } 106 | string name = 1; 107 | uint32 offset = 2; 108 | DataType datatype = 3; 109 | uint32 count = 4; 110 | } 111 | 112 | Header header = 1; 113 | uint32 height = 2; 114 | uint32 width = 3; 115 | repeated PointField fields = 4; 116 | bool is_bigendian = 5; 117 | uint32 point_step = 6; 118 | uint32 row_step = 7; 119 | bytes data = 8; 120 | bool is_dense = 9; 121 | } 122 | 123 | message LaserScan { 124 | Header header = 1; 125 | float angle_min = 2; 126 | float angle_max = 3; 127 | float angle_increment = 4; 128 | float time_increment = 5; 129 | float scan_time = 6; 130 | float range_min = 7; 131 | float range_max = 8; 132 | repeated float ranges = 9; 133 | repeated float intensities = 10; 134 | } 135 | 136 | message Float32 { 137 | float value = 1; 138 | } 139 | 140 | message Int64 { 141 | int64 value = 1; 142 | } 143 | 144 | message Int32 { 145 | int32 value = 1; 146 | } 147 | 148 | message Float64 { 149 | double value = 1; 150 | } 151 | 152 | message String { 153 | string value = 1; 154 | } 155 | -------------------------------------------------------------------------------- /montblanc/datatypes/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | use prost::Message; 16 | use rand::distributions::{Alphanumeric, Distribution, Standard}; 17 | use rand::{random, Rng}; 18 | use std::io::Cursor; 19 | use std::time::{SystemTime, UNIX_EPOCH}; 20 | 21 | pub static AMAZON_PORT: &str = "Amazon"; 22 | pub static DANUBE_PORT: &str = "Danube"; 23 | pub static GANGES_PORT: &str = "Ganges"; 24 | pub static NILE_PORT: &str = "Nile"; 25 | pub static TIGRIS_PORT: &str = "Tigris"; 26 | pub static PARANA_PORT: &str = "Parana"; 27 | pub static COLUMBIA_PORT: &str = "Columbia"; 28 | pub static COLORADO_PORT: &str = "Colorado"; 29 | pub static SALWEEN_PORT: &str = "Salween"; 30 | pub static GODAVARI_PORT: &str = "Godavari"; 31 | pub static CHENAB_PORT: &str = "Chenab"; 32 | pub static LOIRE_PORT: &str = "Loire"; 33 | pub static YAMUNA_PORT: &str = "Yamuna"; 34 | pub static BRAZOS_PORT: &str = "Brazos"; 35 | pub static TAGUS_PORT: &str = "Tagus"; 36 | pub static MISSOURI_PORT: &str = "Missouri"; 37 | pub static CONGO_PORT: &str = "Congo"; 38 | pub static MEKONG_PORT: &str = "Mekong"; 39 | pub static ARKANSAS_PORT: &str = "Arkansas"; 40 | pub static OHIO_PORT: &str = "Ohio"; 41 | pub static VOLGA_PORT: &str = "Volga"; 42 | pub static MURRAY_PORT: &str = "Murray"; 43 | pub static LENA_PORT: &str = "Lena"; 44 | 45 | pub mod data_types { 46 | include!(concat!(env!("OUT_DIR"), "/datatypes.data_types.rs")); 47 | } 48 | 49 | pub fn random_string(length: usize) -> String { 50 | rand::thread_rng() 51 | .sample_iter(Alphanumeric) 52 | .take(length) 53 | .map(char::from) 54 | .collect() 55 | } 56 | 57 | pub fn random_bytes(length: usize) -> Vec { 58 | (0..length).map(|_| rand::random::()).collect() 59 | } 60 | 61 | pub fn empty_bytes() -> Vec { 62 | Vec::new() 63 | } 64 | 65 | pub fn random_floats(length: usize) -> Vec { 66 | (0..length).map(|_| rand::random::()).collect() 67 | } 68 | 69 | pub fn random_doubles(length: usize) -> Vec { 70 | (0..length).map(|_| rand::random::()).collect() 71 | } 72 | 73 | impl Distribution for Standard { 74 | fn sample(&self, _rng: &mut R) -> data_types::Header { 75 | let now = SystemTime::now(); 76 | let now_as_duration = now 77 | .duration_since(UNIX_EPOCH) 78 | .expect("System time went backwards"); 79 | data_types::Header { 80 | sec: now_as_duration.as_secs() as i32, 81 | nanosec: now_as_duration.subsec_nanos(), 82 | frame_id: random_string(16), 83 | } 84 | } 85 | } 86 | 87 | impl Distribution for Standard { 88 | fn sample(&self, rng: &mut R) -> data_types::Point { 89 | data_types::Point { 90 | x: rng.gen(), 91 | y: rng.gen(), 92 | z: rng.gen(), 93 | } 94 | } 95 | } 96 | 97 | pub fn serialize_point(point: &data_types::Point) -> Vec { 98 | let mut buf = Vec::new(); 99 | buf.resize(point.encoded_len(), 0); 100 | point.encode(&mut buf).unwrap(); 101 | buf 102 | } 103 | 104 | pub fn deserialize_point(buf: &[u8]) -> Result { 105 | data_types::Point::decode(&mut Cursor::new(buf)) 106 | } 107 | 108 | impl Distribution for Standard { 109 | fn sample(&self, rng: &mut R) -> data_types::Quaternion { 110 | data_types::Quaternion { 111 | x: rng.gen(), 112 | y: rng.gen(), 113 | z: rng.gen(), 114 | w: rng.gen(), 115 | } 116 | } 117 | } 118 | 119 | pub fn serialize_quaternion(quat: &data_types::Quaternion) -> Vec { 120 | let mut buf = Vec::new(); 121 | buf.resize(quat.encoded_len(), 0); 122 | quat.encode(&mut buf).unwrap(); 123 | buf 124 | } 125 | 126 | pub fn deserialize_quaternion(buf: &[u8]) -> Result { 127 | data_types::Quaternion::decode(&mut Cursor::new(buf)) 128 | } 129 | 130 | impl Distribution for Standard { 131 | fn sample(&self, rng: &mut R) -> data_types::Vector3 { 132 | data_types::Vector3 { 133 | x: rng.gen(), 134 | y: rng.gen(), 135 | z: rng.gen(), 136 | } 137 | } 138 | } 139 | 140 | pub fn serialize_vector3(vec3: &data_types::Vector3) -> Vec { 141 | let mut buf = Vec::new(); 142 | buf.resize(vec3.encoded_len(), 0); 143 | vec3.encode(&mut buf).unwrap(); 144 | buf 145 | } 146 | 147 | pub fn deserialize_vector3(buf: &[u8]) -> Result { 148 | data_types::Vector3::decode(&mut Cursor::new(buf)) 149 | } 150 | 151 | impl Distribution for Standard { 152 | fn sample(&self, rng: &mut R) -> data_types::Vector3Stamped { 153 | data_types::Vector3Stamped { 154 | header: rng.gen(), 155 | vector: rng.gen(), 156 | } 157 | } 158 | } 159 | 160 | pub fn serialize_vector3_stamped(vec3s: &data_types::Vector3Stamped) -> Vec { 161 | let mut buf = Vec::new(); 162 | buf.resize(vec3s.encoded_len(), 0); 163 | vec3s.encode(&mut buf).unwrap(); 164 | buf 165 | } 166 | 167 | pub fn deserialize_vector3_stamped( 168 | buf: &[u8], 169 | ) -> Result { 170 | data_types::Vector3Stamped::decode(&mut Cursor::new(buf)) 171 | } 172 | 173 | impl Distribution for Standard { 174 | fn sample(&self, rng: &mut R) -> data_types::Pose { 175 | data_types::Pose { 176 | position: rng.gen(), 177 | orientation: rng.gen(), 178 | } 179 | } 180 | } 181 | 182 | pub fn serialize_pose(pose: &data_types::Pose) -> Vec { 183 | let mut buf = Vec::new(); 184 | buf.resize(pose.encoded_len(), 0); 185 | pose.encode(&mut buf).unwrap(); 186 | buf 187 | } 188 | 189 | pub fn deserialize_pose(buf: &[u8]) -> Result { 190 | data_types::Pose::decode(&mut Cursor::new(buf)) 191 | } 192 | 193 | impl Distribution for Standard { 194 | fn sample(&self, rng: &mut R) -> data_types::Twist { 195 | data_types::Twist { 196 | linear: rng.gen(), 197 | angular: rng.gen(), 198 | } 199 | } 200 | } 201 | 202 | pub fn serialize_twist(twist: &data_types::Twist) -> Vec { 203 | let mut buf = Vec::new(); 204 | buf.resize(twist.encoded_len(), 0); 205 | twist.encode(&mut buf).unwrap(); 206 | buf 207 | } 208 | 209 | pub fn deserialize_twist(buf: &[u8]) -> Result { 210 | data_types::Twist::decode(&mut Cursor::new(buf)) 211 | } 212 | 213 | impl Distribution for Standard { 214 | fn sample(&self, rng: &mut R) -> data_types::TwistWithCovariance { 215 | data_types::TwistWithCovariance { 216 | twist: rng.gen(), 217 | covariance: random_doubles(36), 218 | } 219 | } 220 | } 221 | 222 | pub fn serialize_twist_with_covariance( 223 | twist_with_cov: &data_types::TwistWithCovariance, 224 | ) -> Vec { 225 | let mut buf = Vec::new(); 226 | buf.resize(twist_with_cov.encoded_len(), 0); 227 | twist_with_cov.encode(&mut buf).unwrap(); 228 | buf 229 | } 230 | 231 | pub fn deserialize_twist_with_covariance( 232 | buf: &[u8], 233 | ) -> Result { 234 | data_types::TwistWithCovariance::decode(&mut Cursor::new(buf)) 235 | } 236 | 237 | impl Distribution for Standard { 238 | fn sample(&self, rng: &mut R) -> data_types::TwistWithCovarianceStamped { 239 | data_types::TwistWithCovarianceStamped { 240 | header: random(), 241 | twist: rng.gen(), 242 | } 243 | } 244 | } 245 | 246 | pub fn serialize_twist_with_covariance_stamped( 247 | twist_with_cov: &data_types::TwistWithCovarianceStamped, 248 | ) -> Vec { 249 | let mut buf = Vec::new(); 250 | buf.resize(twist_with_cov.encoded_len(), 0); 251 | twist_with_cov.encode(&mut buf).unwrap(); 252 | buf 253 | } 254 | 255 | pub fn deserialize_twist_with_covariance_stamped( 256 | buf: &[u8], 257 | ) -> Result { 258 | data_types::TwistWithCovarianceStamped::decode(&mut Cursor::new(buf)) 259 | } 260 | 261 | impl Distribution for Standard { 262 | fn sample(&self, rng: &mut R) -> data_types::Wrench { 263 | data_types::Wrench { 264 | force: rng.gen(), 265 | torque: rng.gen(), 266 | } 267 | } 268 | } 269 | 270 | pub fn serialize_wrench(wrench: &data_types::Wrench) -> Vec { 271 | let mut buf = Vec::new(); 272 | buf.resize(wrench.encoded_len(), 0); 273 | wrench.encode(&mut buf).unwrap(); 274 | buf 275 | } 276 | 277 | pub fn deserialize_wrench(buf: &[u8]) -> Result { 278 | data_types::Wrench::decode(&mut Cursor::new(buf)) 279 | } 280 | 281 | impl Distribution for Standard { 282 | fn sample(&self, rng: &mut R) -> data_types::WrenchStamped { 283 | data_types::WrenchStamped { 284 | header: rng.gen(), 285 | wrench: rng.gen(), 286 | } 287 | } 288 | } 289 | 290 | pub fn serialize_wrench_stamped(wrench_stamped: &data_types::WrenchStamped) -> Vec { 291 | let mut buf = Vec::new(); 292 | buf.resize(wrench_stamped.encoded_len(), 0); 293 | wrench_stamped.encode(&mut buf).unwrap(); 294 | buf 295 | } 296 | 297 | pub fn deserialize_wrench_stamped( 298 | buf: &[u8], 299 | ) -> Result { 300 | data_types::WrenchStamped::decode(&mut Cursor::new(buf)) 301 | } 302 | 303 | impl Distribution for Standard { 304 | fn sample(&self, rng: &mut R) -> data_types::Image { 305 | data_types::Image { 306 | header: rng.gen(), 307 | height: rng.gen(), 308 | width: rng.gen(), 309 | encoding: random_string(32), 310 | is_bigendian: rng.gen(), 311 | step: rng.gen(), 312 | //data: random_bytes(1920 * 1080 * 3), 313 | data: empty_bytes(), 314 | } 315 | } 316 | } 317 | 318 | pub fn serialize_image(img: &data_types::Image) -> Vec { 319 | let mut buf = Vec::new(); 320 | buf.resize(img.encoded_len(), 0); 321 | img.encode(&mut buf).unwrap(); 322 | buf 323 | } 324 | 325 | pub fn deserialize_image(buf: &[u8]) -> Result { 326 | data_types::Image::decode(&mut Cursor::new(buf)) 327 | } 328 | 329 | impl Distribution for Standard { 330 | fn sample( 331 | &self, 332 | rng: &mut R, 333 | ) -> data_types::point_cloud2::point_field::DataType { 334 | match rng.gen_range(0..=7) { 335 | 0 => data_types::point_cloud2::point_field::DataType::Int8, 336 | 1 => data_types::point_cloud2::point_field::DataType::Uint8, 337 | 2 => data_types::point_cloud2::point_field::DataType::Int16, 338 | 3 => data_types::point_cloud2::point_field::DataType::Uint16, 339 | 4 => data_types::point_cloud2::point_field::DataType::Int32, 340 | 5 => data_types::point_cloud2::point_field::DataType::Uint32, 341 | 6 => data_types::point_cloud2::point_field::DataType::Float32, 342 | 7 => data_types::point_cloud2::point_field::DataType::Float64, 343 | _ => data_types::point_cloud2::point_field::DataType::Int8, 344 | } 345 | } 346 | } 347 | 348 | impl Distribution for Standard { 349 | fn sample(&self, rng: &mut R) -> data_types::point_cloud2::PointField { 350 | data_types::point_cloud2::PointField { 351 | name: random_string(32), 352 | offset: rng.gen(), 353 | datatype: rng.gen(), 354 | count: rng.gen(), 355 | } 356 | } 357 | } 358 | 359 | fn random_point_fields(length: usize) -> Vec { 360 | (0..length) 361 | .map(|_| rand::random::()) 362 | .collect() 363 | } 364 | 365 | impl Distribution for Standard { 366 | fn sample(&self, rng: &mut R) -> data_types::PointCloud2 { 367 | data_types::PointCloud2 { 368 | header: rng.gen(), 369 | height: rng.gen(), 370 | width: rng.gen(), 371 | fields: random_point_fields(3), 372 | is_bigendian: rng.gen(), 373 | point_step: rng.gen(), 374 | row_step: rng.gen(), 375 | //data: random_bytes(4 * 4 * 4 * 1280 * 960), 376 | data: empty_bytes(), 377 | is_dense: rng.gen(), 378 | } 379 | } 380 | } 381 | 382 | pub fn serialize_pointcloud2(pc: &data_types::PointCloud2) -> Vec { 383 | let mut buf = Vec::new(); 384 | buf.resize(pc.encoded_len(), 0); 385 | pc.encode(&mut buf).unwrap(); 386 | buf 387 | } 388 | 389 | pub fn deserialize_pointcloud2(buf: &[u8]) -> Result { 390 | data_types::PointCloud2::decode(&mut Cursor::new(buf)) 391 | } 392 | 393 | impl Distribution for Standard { 394 | fn sample(&self, rng: &mut R) -> data_types::LaserScan { 395 | data_types::LaserScan { 396 | header: rng.gen(), 397 | angle_min: rng.gen(), 398 | angle_max: rng.gen(), 399 | angle_increment: rng.gen(), 400 | time_increment: rng.gen(), 401 | scan_time: rng.gen(), 402 | range_min: rng.gen(), 403 | range_max: rng.gen(), 404 | ranges: random_floats(1024), 405 | intensities: random_floats(1024), 406 | } 407 | } 408 | } 409 | 410 | pub fn serialize_laserscan(ls: &data_types::LaserScan) -> Vec { 411 | let mut buf = Vec::new(); 412 | buf.resize(ls.encoded_len(), 0); 413 | ls.encode(&mut buf).unwrap(); 414 | buf 415 | } 416 | 417 | pub fn deserialize_laserscan(buf: &[u8]) -> Result { 418 | data_types::LaserScan::decode(&mut Cursor::new(buf)) 419 | } 420 | -------------------------------------------------------------------------------- /montblanc/delhi/Cargo.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | [package] 16 | name = "delhi" 17 | version = "0.1.0" 18 | edition = "2018" 19 | 20 | [lib] 21 | name = "delhi" 22 | crate-type=["cdylib"] 23 | path="src/lib.rs" 24 | 25 | [dependencies] 26 | 27 | async-std = { workspace = true } 28 | async-trait = { workspace = true } 29 | datatypes = { path = "../datatypes" } 30 | prost = { workspace = true } 31 | rand = { workspace = true } 32 | zenoh-flow = { workspace = true } -------------------------------------------------------------------------------- /montblanc/delhi/delhi.yml: -------------------------------------------------------------------------------- 1 | id : Delhi 2 | 3 | uri: file:///home/ato/Workspace/zenoh-flow-examples/montblanc/target/debug/libdelhi.so 4 | 5 | # period: 6 | # length: 1 7 | # unit: s 8 | outputs: [Columbia] 9 | -------------------------------------------------------------------------------- /montblanc/delhi/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | use datatypes::COLUMBIA_PORT; 16 | use prost::Message; 17 | use rand::random; 18 | use std::time::Duration; 19 | use zenoh_flow::prelude::*; 20 | 21 | #[export_source] 22 | pub struct Delhi { 23 | output: Output, 24 | } 25 | 26 | #[async_trait::async_trait] 27 | impl Node for Delhi { 28 | async fn iteration(&self) -> Result<()> { 29 | async_std::task::sleep(Duration::from_millis(1000)).await; 30 | let value: datatypes::data_types::Image = random(); 31 | self.output.send(value, None).await 32 | } 33 | } 34 | 35 | #[async_trait::async_trait] 36 | impl Source for Delhi { 37 | async fn new( 38 | _context: Context, 39 | _configuration: Option, 40 | mut outputs: Outputs, 41 | ) -> Result { 42 | Ok(Self { 43 | output: outputs 44 | .take(COLUMBIA_PORT) 45 | .unwrap_or_else(|| panic!("No Output called '{}' found", COLUMBIA_PORT)) 46 | .typed(|buf, v: &datatypes::data_types::Image| { 47 | buf.resize(v.encoded_len(), 0); 48 | Ok(v.encode(buf)?) 49 | }), 50 | }) 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /montblanc/freeport/Cargo.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | [package] 16 | name = "freeport" 17 | version = "0.1.0" 18 | edition = "2018" 19 | 20 | [lib] 21 | name = "freeport" 22 | crate-type=["cdylib"] 23 | path="src/lib.rs" 24 | 25 | [dependencies] 26 | async-std = { workspace = true } 27 | async-trait = { workspace = true } 28 | datatypes = { path = "../datatypes" } 29 | prost = { workspace = true } 30 | rand = { workspace = true } 31 | zenoh-flow = { workspace = true } -------------------------------------------------------------------------------- /montblanc/freeport/freeport.yml: -------------------------------------------------------------------------------- 1 | id : Freeport 2 | 3 | uri: file:///home/ato/Workspace/zenoh-flow-examples/montblanc/target/debug/libfreeport.so 4 | 5 | # period: 6 | # length: 50 7 | # unit: ms 8 | outputs: [Ganges] 9 | 10 | -------------------------------------------------------------------------------- /montblanc/freeport/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | use datatypes::GANGES_PORT; 16 | use prost::Message; 17 | use rand::random; 18 | use std::time::Duration; 19 | use zenoh_flow::prelude::*; 20 | 21 | #[export_source] 22 | pub struct Freeport { 23 | output: Output, 24 | } 25 | 26 | #[async_trait::async_trait] 27 | impl Node for Freeport { 28 | async fn iteration(&self) -> Result<()> { 29 | async_std::task::sleep(Duration::from_millis(50)).await; 30 | let data: i64 = random::(); 31 | let value = datatypes::data_types::Int64 { value: data }; 32 | self.output.send(value, None).await 33 | } 34 | } 35 | 36 | #[async_trait::async_trait] 37 | impl Source for Freeport { 38 | async fn new( 39 | _context: Context, 40 | _configuration: Option, 41 | mut outputs: Outputs, 42 | ) -> Result { 43 | Ok(Self { 44 | output: outputs 45 | .take(GANGES_PORT) 46 | .unwrap_or_else(|| panic!("No Output called '{}' found", GANGES_PORT)) 47 | .typed(|buf, v: &datatypes::data_types::Int64| { 48 | buf.resize(v.encoded_len(), 0); 49 | Ok(v.encode(buf)?) 50 | }), 51 | }) 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /montblanc/geneva/Cargo.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | [package] 16 | name = "geneva" 17 | version = "0.1.0" 18 | edition = "2018" 19 | 20 | [lib] 21 | name = "geneva" 22 | crate-type=["cdylib"] 23 | path="src/lib.rs" 24 | 25 | [dependencies] 26 | async-std = { workspace = true } 27 | async-trait = { workspace = true } 28 | datatypes = { path = "../datatypes" } 29 | futures = { workspace = true } 30 | prost = { workspace = true } 31 | rand = { workspace = true } 32 | zenoh-flow = { workspace = true } 33 | 34 | -------------------------------------------------------------------------------- /montblanc/geneva/geneva.yml: -------------------------------------------------------------------------------- 1 | id : Geneva 2 | uri: file:///home/ato/Workspace/zenoh-flow-examples/montblanc/target/debug/libgeneva.so 3 | inputs: 4 | - Danube 5 | - Parana 6 | - Tagus 7 | - Congo 8 | outputs: [Arkansas] 9 | -------------------------------------------------------------------------------- /montblanc/geneva/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | use async_std::sync::Mutex; 16 | use datatypes::data_types; 17 | use datatypes::{ARKANSAS_PORT, CONGO_PORT, DANUBE_PORT, PARANA_PORT, TAGUS_PORT}; 18 | use futures::prelude::*; 19 | use futures::select; 20 | use prost::Message as _; 21 | use rand::random; 22 | use std::sync::Arc; 23 | use zenoh_flow::prelude::*; 24 | #[derive(Debug, Clone)] 25 | struct GenevaState { 26 | danube_last_val: data_types::String, 27 | tagus_last_val: data_types::Pose, 28 | congo_last_val: data_types::Twist, 29 | } 30 | 31 | #[export_operator] 32 | pub struct Geneva { 33 | input_parana: Input, 34 | input_danube: Input, 35 | input_tagus: Input, 36 | input_congo: Input, 37 | output_arkansas: Output, 38 | state: Arc>, 39 | } 40 | 41 | #[async_trait::async_trait] 42 | impl Operator for Geneva { 43 | async fn new( 44 | _context: Context, 45 | _configuration: Option, 46 | mut inputs: Inputs, 47 | mut outputs: Outputs, 48 | ) -> Result { 49 | Ok(Self { 50 | input_parana: inputs 51 | .take(PARANA_PORT) 52 | .unwrap_or_else(|| panic!("No Input called '{}' found", PARANA_PORT)) 53 | .typed(|buf| Ok(data_types::String::decode(buf)?)), 54 | input_danube: inputs 55 | .take(DANUBE_PORT) 56 | .unwrap_or_else(|| panic!("No Input called '{}' found", DANUBE_PORT)) 57 | .typed(|buf| Ok(data_types::String::decode(buf)?)), 58 | input_tagus: inputs 59 | .take(TAGUS_PORT) 60 | .unwrap_or_else(|| panic!("No Input called '{}' found", TAGUS_PORT)) 61 | .typed(|buf| Ok(data_types::Pose::decode(buf)?)), 62 | input_congo: inputs 63 | .take(CONGO_PORT) 64 | .unwrap_or_else(|| panic!("No Input called '{}' found", CONGO_PORT)) 65 | .typed(|buf| Ok(data_types::Twist::decode(buf)?)), 66 | output_arkansas: outputs 67 | .take(ARKANSAS_PORT) 68 | .unwrap_or_else(|| panic!("No Output called '{}' found", ARKANSAS_PORT)) 69 | .typed(|buf, v: &data_types::String| { 70 | buf.resize(v.encoded_len(), 0); 71 | Ok(v.encode(buf)?) 72 | }), 73 | state: Arc::new(Mutex::new(GenevaState { 74 | danube_last_val: data_types::String { 75 | value: datatypes::random_string(1), 76 | }, 77 | tagus_last_val: random(), 78 | congo_last_val: random(), 79 | })), 80 | }) 81 | } 82 | } 83 | 84 | #[async_trait::async_trait] 85 | impl Node for Geneva { 86 | async fn iteration(&self) -> Result<()> { 87 | select! { 88 | msg = self.input_danube.recv().fuse() => { 89 | if let Ok((Message::Data(inner_data), _ts)) = msg { 90 | self.state.lock().await.danube_last_val = (*inner_data).clone(); 91 | } 92 | }, 93 | msg = self.input_tagus.recv().fuse() => { 94 | if let Ok((Message::Data(inner_data), _ts)) = msg { 95 | self.state.lock().await.tagus_last_val = (*inner_data).clone(); 96 | } 97 | }, 98 | msg = self.input_congo.recv().fuse() => { 99 | if let Ok((Message::Data(inner_data), _ts)) = msg { 100 | self.state.lock().await.congo_last_val = (*inner_data).clone(); 101 | } 102 | }, 103 | msg = self.input_parana.recv().fuse() => { 104 | if let Ok((Message::Data(inner_data), _ts)) = msg { 105 | let value = data_types::String { 106 | value: format!("geneva/arkansas:{}", inner_data.value), 107 | }; 108 | 109 | self.output_arkansas.send(value, None).await?; 110 | } 111 | } 112 | } 113 | Ok(()) 114 | } 115 | } 116 | -------------------------------------------------------------------------------- /montblanc/georgetown/Cargo.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | [package] 16 | name = "georgetown" 17 | version = "0.1.0" 18 | edition = "2018" 19 | 20 | [lib] 21 | name = "georgetown" 22 | crate-type=["cdylib"] 23 | path="src/lib.rs" 24 | 25 | [dependencies] 26 | async-std = { workspace = true } 27 | async-trait = { workspace = true } 28 | datatypes = { path = "../datatypes" } 29 | futures = { workspace = true } 30 | prost = { workspace = true } 31 | rand = { workspace = true } 32 | zenoh-flow = { workspace = true } -------------------------------------------------------------------------------- /montblanc/georgetown/georgetown.yml: -------------------------------------------------------------------------------- 1 | id : Georgetown 2 | uri: file:///home/ato/Workspace/zenoh-flow-examples/montblanc/target/debug/libgeorgetown.so 3 | inputs: 4 | - Lena 5 | - Murray 6 | outputs: [Volga] 7 | -------------------------------------------------------------------------------- /montblanc/georgetown/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | use async_std::sync::Mutex; 16 | use datatypes::data_types; 17 | use datatypes::{LENA_PORT, MURRAY_PORT, VOLGA_PORT}; 18 | use futures::prelude::*; 19 | use futures::select; 20 | use prost::Message as pMessage; 21 | use rand::random; 22 | use std::sync::Arc; 23 | use std::time::Duration; 24 | use zenoh_flow::prelude::*; 25 | 26 | #[derive(Debug, Clone)] 27 | struct GeorgetownState { 28 | murray_last_val: data_types::Vector3Stamped, 29 | lena_last_val: data_types::WrenchStamped, 30 | 31 | f64_data: data_types::Float64, 32 | } 33 | 34 | #[export_operator] 35 | pub struct Georgetown { 36 | input_murray: Input, 37 | input_lena: Input, 38 | output_volga: Output, 39 | state: Arc>, 40 | } 41 | 42 | #[async_trait::async_trait] 43 | impl Operator for Georgetown { 44 | async fn new( 45 | _context: Context, 46 | _configuration: Option, 47 | mut inputs: Inputs, 48 | mut outputs: Outputs, 49 | ) -> Result { 50 | Ok(Self { 51 | input_murray: inputs 52 | .take(MURRAY_PORT) 53 | .unwrap_or_else(|| panic!("No Input called '{}' found", MURRAY_PORT)) 54 | .typed(|buf| Ok(data_types::Vector3Stamped::decode(buf)?)), 55 | input_lena: inputs 56 | .take(LENA_PORT) 57 | .unwrap_or_else(|| panic!("No Input called '{}' found", LENA_PORT)) 58 | .typed(|buf| Ok(data_types::WrenchStamped::decode(buf)?)), 59 | output_volga: outputs 60 | .take(VOLGA_PORT) 61 | .unwrap_or_else(|| panic!("No Output called '{}' found", VOLGA_PORT)) 62 | .typed(|buf, v: &data_types::Float64| { 63 | buf.resize(v.encoded_len(), 0); 64 | Ok(v.encode(buf)?) 65 | }), 66 | state: Arc::new(Mutex::new(GeorgetownState { 67 | murray_last_val: random(), 68 | lena_last_val: random(), 69 | f64_data: data_types::Float64 { value: random() }, 70 | })), 71 | }) 72 | } 73 | } 74 | 75 | #[async_trait::async_trait] 76 | impl Node for Georgetown { 77 | async fn iteration(&self) -> Result<()> { 78 | select! { 79 | msg = self.input_murray.recv().fuse() => { 80 | if let Ok((Message::Data(inner_data), _ts)) = msg { 81 | self.state.lock().await.murray_last_val = (*inner_data).clone(); 82 | } 83 | }, 84 | msg = self.input_lena.recv().fuse() => { 85 | if let Ok((Message::Data(inner_data), _ts)) = msg { 86 | self.state.lock().await.lena_last_val = (*inner_data).clone(); 87 | } 88 | }, 89 | // Output every 50ms 90 | _ = async_std::task::sleep(Duration::from_millis(50)).fuse() => { 91 | let guard_state = self.state.lock().await; 92 | self.output_volga.send(guard_state.f64_data.clone(), None).await?; 93 | } 94 | } 95 | Ok(()) 96 | } 97 | } 98 | -------------------------------------------------------------------------------- /montblanc/hamburg/Cargo.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | [package] 16 | name = "hamburg" 17 | version = "0.1.0" 18 | edition = "2018" 19 | 20 | [lib] 21 | name = "hamburg" 22 | crate-type=["cdylib"] 23 | path="src/lib.rs" 24 | 25 | [dependencies] 26 | async-std = { workspace = true } 27 | async-trait = { workspace = true } 28 | datatypes = { path = "../datatypes" } 29 | futures = { workspace = true } 30 | prost = { workspace = true } 31 | zenoh-flow = { workspace = true } 32 | 33 | -------------------------------------------------------------------------------- /montblanc/hamburg/hamburg.yml: -------------------------------------------------------------------------------- 1 | id : Hamburg 2 | uri: file:///home/ato/Workspace/zenoh-flow-examples/montblanc/target/debug/libhamburg.so 3 | inputs: 4 | - Danube 5 | - Ganges 6 | - Nile 7 | - Tigris 8 | outputs: [Parana] 9 | -------------------------------------------------------------------------------- /montblanc/hamburg/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | use async_std::sync::Mutex; 16 | use datatypes::data_types; 17 | use datatypes::{DANUBE_PORT, GANGES_PORT, NILE_PORT, PARANA_PORT, TIGRIS_PORT}; 18 | use futures::prelude::*; 19 | use futures::select; 20 | use prost::Message as pMessage; 21 | use std::sync::Arc; 22 | use zenoh_flow::prelude::*; 23 | 24 | #[derive(Debug, Clone)] 25 | struct HamburgState { 26 | ganges_last_val: i64, 27 | nile_last_val: i32, 28 | tigris_last_val: f32, 29 | } 30 | 31 | #[export_operator] 32 | pub struct Hamburg { 33 | input_tigris: Input, 34 | input_ganges: Input, 35 | input_nile: Input, 36 | input_danube: Input, 37 | output_parana: Output, 38 | state: Arc>, 39 | } 40 | 41 | #[async_trait::async_trait] 42 | impl Operator for Hamburg { 43 | async fn new( 44 | _context: Context, 45 | _configuration: Option, 46 | mut inputs: Inputs, 47 | mut outputs: Outputs, 48 | ) -> Result { 49 | Ok(Self { 50 | input_tigris: inputs 51 | .take(TIGRIS_PORT) 52 | .unwrap_or_else(|| panic!("No Input called '{}' found", TIGRIS_PORT)) 53 | .typed(|buf| Ok(data_types::Float32::decode(buf)?)), 54 | input_ganges: inputs 55 | .take(GANGES_PORT) 56 | .unwrap_or_else(|| panic!("No Input called '{}' found", GANGES_PORT)) 57 | .typed(|buf| Ok(data_types::Int64::decode(buf)?)), 58 | input_nile: inputs 59 | .take(NILE_PORT) 60 | .unwrap_or_else(|| panic!("No Input called '{}' found", NILE_PORT)) 61 | .typed(|buf| Ok(data_types::Int32::decode(buf)?)), 62 | input_danube: inputs 63 | .take(DANUBE_PORT) 64 | .unwrap_or_else(|| panic!("No Input called '{}' found", DANUBE_PORT)) 65 | .typed(|buf| Ok(data_types::String::decode(buf)?)), 66 | output_parana: outputs 67 | .take(PARANA_PORT) 68 | .unwrap_or_else(|| panic!("No Output called '{}' found", PARANA_PORT)) 69 | .typed(|buf, v: &data_types::String| { 70 | buf.resize(v.encoded_len(), 0); 71 | Ok(v.encode(buf)?) 72 | }), 73 | state: Arc::new(Mutex::new(HamburgState { 74 | ganges_last_val: 0i64, 75 | nile_last_val: 0i32, 76 | tigris_last_val: 0.0f32, 77 | })), 78 | }) 79 | } 80 | } 81 | 82 | #[async_trait::async_trait] 83 | impl Node for Hamburg { 84 | async fn iteration(&self) -> Result<()> { 85 | select! { 86 | msg = self.input_tigris.recv().fuse() => { 87 | if let Ok((Message::Data(inner_data), _ts)) = msg { 88 | self.state.lock().await.tigris_last_val = inner_data.value; 89 | } 90 | }, 91 | msg = self.input_ganges.recv().fuse() => { 92 | if let Ok((Message::Data(inner_data), _ts)) = msg { 93 | self.state.lock().await.ganges_last_val = inner_data.value; 94 | } 95 | }, 96 | msg = self.input_nile.recv().fuse() => { 97 | if let Ok((Message::Data(inner_data), _ts)) = msg { 98 | self.state.lock().await.nile_last_val = inner_data.value; 99 | } 100 | }, 101 | msg = self.input_danube.recv().fuse() => { 102 | if let Ok((Message::Data(inner_data), _ts)) = msg { 103 | let new_value = data_types::String { 104 | value: format!("hamburg/parana:{}", inner_data.value) 105 | }; 106 | self.output_parana.send(new_value, None).await?; 107 | } 108 | } 109 | } 110 | Ok(()) 111 | } 112 | } 113 | -------------------------------------------------------------------------------- /montblanc/hebron/Cargo.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | [package] 16 | name = "hebron" 17 | version = "0.1.0" 18 | edition = "2018" 19 | 20 | [lib] 21 | name = "hebron" 22 | crate-type=["cdylib"] 23 | path="src/lib.rs" 24 | 25 | [dependencies] 26 | async-std = { workspace = true } 27 | async-trait = { workspace = true } 28 | datatypes = { path = "../datatypes" } 29 | prost = { workspace = true } 30 | rand = { workspace = true } 31 | zenoh-flow = { workspace = true } -------------------------------------------------------------------------------- /montblanc/hebron/hebron.yml: -------------------------------------------------------------------------------- 1 | id : Hebron 2 | 3 | uri: file:///home/ato/Workspace/zenoh-flow-examples/montblanc/target/debug/libhebron.so 4 | 5 | # period: 6 | # length: 100 7 | # unit: ms 8 | outputs: [Chenab] 9 | -------------------------------------------------------------------------------- /montblanc/hebron/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | use datatypes::CHENAB_PORT; 16 | use prost::Message; 17 | use rand::random; 18 | use std::time::Duration; 19 | use zenoh_flow::prelude::*; 20 | #[export_source] 21 | pub struct Hebron { 22 | output: Output, 23 | } 24 | 25 | #[async_trait::async_trait] 26 | impl Node for Hebron { 27 | async fn iteration(&self) -> Result<()> { 28 | async_std::task::sleep(Duration::from_millis(100)).await; 29 | let value: datatypes::data_types::Quaternion = random(); 30 | self.output.send(value, None).await 31 | } 32 | } 33 | 34 | #[async_trait::async_trait] 35 | impl Source for Hebron { 36 | async fn new( 37 | _context: Context, 38 | _configuration: Option, 39 | mut outputs: Outputs, 40 | ) -> Result { 41 | Ok(Self { 42 | output: outputs 43 | .take(CHENAB_PORT) 44 | .unwrap_or_else(|| panic!("No Output called '{}' found", CHENAB_PORT)) 45 | .typed(|buf, v: &datatypes::data_types::Quaternion| { 46 | buf.resize(v.encoded_len(), 0); 47 | Ok(v.encode(buf)?) 48 | }), 49 | }) 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /montblanc/kingston/Cargo.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | [package] 16 | name = "kingston" 17 | version = "0.1.0" 18 | edition = "2018" 19 | 20 | [lib] 21 | name = "kingston" 22 | crate-type=["cdylib"] 23 | path="src/lib.rs" 24 | 25 | [dependencies] 26 | async-std = { workspace = true } 27 | async-trait = { workspace = true } 28 | datatypes = { path = "../datatypes" } 29 | prost = { workspace = true } 30 | rand = { workspace = true } 31 | zenoh-flow = { workspace = true } -------------------------------------------------------------------------------- /montblanc/kingston/kingston.yml: -------------------------------------------------------------------------------- 1 | id : Kingston 2 | 3 | # period: 4 | # length: 100 5 | # unit: ms 6 | 7 | uri: file:///home/ato/Workspace/zenoh-flow-examples/montblanc/target/debug/libkingston.so 8 | 9 | outputs: [Yamuna] 10 | -------------------------------------------------------------------------------- /montblanc/kingston/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | use datatypes::YAMUNA_PORT; 16 | use prost::Message; 17 | use rand::random; 18 | use std::time::Duration; 19 | use zenoh_flow::prelude::*; 20 | 21 | #[export_source] 22 | pub struct Kingston { 23 | output: Output, 24 | } 25 | 26 | #[async_trait::async_trait] 27 | impl Node for Kingston { 28 | async fn iteration(&self) -> Result<()> { 29 | async_std::task::sleep(Duration::from_millis(100)).await; 30 | let value: datatypes::data_types::Vector3 = random(); 31 | self.output.send(value, None).await 32 | } 33 | } 34 | 35 | #[async_trait::async_trait] 36 | impl Source for Kingston { 37 | async fn new( 38 | _context: Context, 39 | _configuration: Option, 40 | mut outputs: Outputs, 41 | ) -> Result { 42 | Ok(Self { 43 | output: outputs 44 | .take(YAMUNA_PORT) 45 | .unwrap_or_else(|| panic!("No Output called '{}' found", YAMUNA_PORT)) 46 | .typed(|buf, v: &datatypes::data_types::Vector3| { 47 | buf.resize(v.encoded_len(), 0); 48 | Ok(v.encode(buf)?) 49 | }), 50 | }) 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /montblanc/lyon/Cargo.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | [package] 16 | name = "lyon" 17 | version = "0.1.0" 18 | edition = "2018" 19 | 20 | [lib] 21 | name = "lyon" 22 | crate-type=["cdylib"] 23 | path="src/lib.rs" 24 | 25 | [dependencies] 26 | async-std = { workspace = true } 27 | async-trait = { workspace = true } 28 | datatypes = { path = "../datatypes" } 29 | prost = { workspace = true } 30 | zenoh-flow = { workspace = true } -------------------------------------------------------------------------------- /montblanc/lyon/lyon.yml: -------------------------------------------------------------------------------- 1 | id : Lyon 2 | uri: file:///home/ato/Workspace/zenoh-flow-examples/montblanc/target/release/liblyon.so 3 | inputs: [Amazon] 4 | outputs: [Tigris] 5 | -------------------------------------------------------------------------------- /montblanc/lyon/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | use datatypes::{AMAZON_PORT, TIGRIS_PORT}; 16 | use prost::Message; 17 | use zenoh_flow::prelude::*; 18 | 19 | #[export_operator] 20 | pub struct Lyon { 21 | input: Input, 22 | output: Output, 23 | } 24 | 25 | #[async_trait::async_trait] 26 | impl Operator for Lyon { 27 | async fn new( 28 | _context: Context, 29 | _configuration: Option, 30 | mut inputs: Inputs, 31 | mut outputs: Outputs, 32 | ) -> Result { 33 | Ok(Self { 34 | input: inputs 35 | .take(AMAZON_PORT) 36 | .unwrap_or_else(|| panic!("No Input called '{}' found", AMAZON_PORT)) 37 | .typed(|buf| Ok(datatypes::data_types::Float32::decode(buf)?)), 38 | output: outputs 39 | .take(TIGRIS_PORT) 40 | .unwrap_or_else(|| panic!("No Output called '{}' found", TIGRIS_PORT)) 41 | .typed(|buf, v: &datatypes::data_types::Float32| { 42 | buf.resize(v.encoded_len(), 0); 43 | Ok(v.encode(buf)?) 44 | }), 45 | }) 46 | } 47 | } 48 | 49 | #[async_trait::async_trait] 50 | impl Node for Lyon { 51 | async fn iteration(&self) -> Result<()> { 52 | let (msg, _ts) = self.input.recv().await?; 53 | if let zenoh_flow::prelude::Message::Data(data) = msg { 54 | self.output.send(data, None).await?; 55 | } 56 | Ok(()) 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /montblanc/madelin/Cargo.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | [package] 16 | name = "madelin" 17 | version = "0.1.0" 18 | edition = "2018" 19 | 20 | [lib] 21 | name = "madelin" 22 | crate-type=["cdylib"] 23 | path="src/lib.rs" 24 | 25 | [dependencies] 26 | async-std = { workspace = true } 27 | async-trait = { workspace = true } 28 | datatypes = { path = "../datatypes" } 29 | prost = { workspace = true } 30 | rand = { workspace = true } 31 | zenoh-flow = { workspace = true } -------------------------------------------------------------------------------- /montblanc/madelin/madelin.yml: -------------------------------------------------------------------------------- 1 | id : Madelin 2 | 3 | uri: file:///home/ato/Workspace/zenoh-flow-examples/montblanc/target/debug/libmadelin.so 4 | 5 | # period: 6 | # length: 10 7 | # unit: ms 8 | 9 | outputs: [Nile] 10 | -------------------------------------------------------------------------------- /montblanc/madelin/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | use datatypes::NILE_PORT; 16 | use prost::Message; 17 | use rand::random; 18 | use std::time::Duration; 19 | use zenoh_flow::prelude::*; 20 | 21 | #[export_source] 22 | pub struct Madelin { 23 | output: Output, 24 | } 25 | 26 | #[async_trait::async_trait] 27 | impl Node for Madelin { 28 | async fn iteration(&self) -> Result<()> { 29 | async_std::task::sleep(Duration::from_millis(10)).await; 30 | let data: i32 = random::(); 31 | let value = datatypes::data_types::Int32 { value: data }; 32 | self.output.send(value, None).await 33 | } 34 | } 35 | 36 | #[async_trait::async_trait] 37 | impl Source for Madelin { 38 | async fn new( 39 | _context: Context, 40 | _configuration: Option, 41 | mut outputs: Outputs, 42 | ) -> Result { 43 | Ok(Self { 44 | output: outputs 45 | .take(NILE_PORT) 46 | .unwrap_or_else(|| panic!("No Output called '{}' found", NILE_PORT)) 47 | .typed(|buf, v: &datatypes::data_types::Int32| { 48 | buf.resize(v.encoded_len(), 0); 49 | Ok(v.encode(buf)?) 50 | }), 51 | }) 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /montblanc/mandalay/Cargo.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | [package] 16 | name = "mandalay" 17 | version = "0.1.0" 18 | edition = "2018" 19 | 20 | [lib] 21 | name = "mandalay" 22 | crate-type=["cdylib"] 23 | path="src/lib.rs" 24 | 25 | [dependencies] 26 | async-std = { workspace = true } 27 | async-trait = { workspace = true } 28 | datatypes = { path = "../datatypes" } 29 | futures = { workspace = true } 30 | prost = { workspace = true } 31 | rand = { workspace = true } 32 | zenoh-flow = { workspace = true } -------------------------------------------------------------------------------- /montblanc/mandalay/mandalay.yml: -------------------------------------------------------------------------------- 1 | id : Mandalay 2 | uri: file:///home/ato/Workspace/zenoh-flow-examples/montblanc/target/debug/libmandalay.so 3 | # period: 4 | # length: 100 5 | # unit: ms 6 | inputs: 7 | - Danube 8 | - Chenab 9 | - Salween 10 | - Godavari 11 | - Loire 12 | - Yamuna 13 | outputs: 14 | - Brazos 15 | - Tagus 16 | - Missouri 17 | -------------------------------------------------------------------------------- /montblanc/mandalay/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | use async_std::sync::Mutex; 16 | use datatypes::data_types; 17 | use datatypes::{ 18 | BRAZOS_PORT, CHENAB_PORT, DANUBE_PORT, GODAVARI_PORT, LOIRE_PORT, MISSOURI_PORT, SALWEEN_PORT, 19 | TAGUS_PORT, YAMUNA_PORT, 20 | }; 21 | use futures::prelude::*; 22 | use futures::select; 23 | use prost::Message as _; 24 | use rand::random; 25 | use std::sync::Arc; 26 | use std::time::Duration; 27 | use zenoh_flow::prelude::*; 28 | 29 | #[derive(Debug, Clone)] 30 | struct MandalayState { 31 | danube_last_val: data_types::String, 32 | chenab_last_val: data_types::Quaternion, 33 | salween_last_val: data_types::PointCloud2, 34 | godavari_last_val: data_types::LaserScan, 35 | loire_last_val: data_types::PointCloud2, 36 | yamuna_last_val: data_types::Vector3, 37 | pointcloud2_data: data_types::PointCloud2, 38 | pose_data: data_types::Pose, 39 | img_data: data_types::Image, 40 | } 41 | 42 | #[export_operator] 43 | pub struct Mandalay { 44 | input_danube: Input, 45 | input_chenab: Input, 46 | input_salween: Input, 47 | input_godavari: Input, 48 | input_loire: Input, 49 | input_yamuna: Input, 50 | output_brazos: Output, 51 | output_tagus: Output, 52 | output_missouri: Output, 53 | state: Arc>, 54 | } 55 | 56 | #[async_trait::async_trait] 57 | impl Operator for Mandalay { 58 | async fn new( 59 | _context: Context, 60 | _configuration: Option, 61 | mut inputs: Inputs, 62 | mut outputs: Outputs, 63 | ) -> Result { 64 | Ok(Self { 65 | input_danube: inputs 66 | .take(DANUBE_PORT) 67 | .unwrap_or_else(|| panic!("No Input called '{}' found", DANUBE_PORT)) 68 | .typed(|buf| Ok(data_types::String::decode(buf)?)), 69 | input_chenab: inputs 70 | .take(CHENAB_PORT) 71 | .unwrap_or_else(|| panic!("No Input called '{}' found", CHENAB_PORT)) 72 | .typed(|buf| Ok(data_types::Quaternion::decode(buf)?)), 73 | input_salween: inputs 74 | .take(SALWEEN_PORT) 75 | .unwrap_or_else(|| panic!("No Input called '{}' found", SALWEEN_PORT)) 76 | .typed(|buf| Ok(data_types::PointCloud2::decode(buf)?)), 77 | input_godavari: inputs 78 | .take(GODAVARI_PORT) 79 | .unwrap_or_else(|| panic!("No Input called '{}' found", GODAVARI_PORT)) 80 | .typed(|buf| Ok(data_types::LaserScan::decode(buf)?)), 81 | input_loire: inputs 82 | .take(LOIRE_PORT) 83 | .unwrap_or_else(|| panic!("No Input called '{}' found", LOIRE_PORT)) 84 | .typed(|buf| Ok(data_types::PointCloud2::decode(buf)?)), 85 | input_yamuna: inputs 86 | .take(YAMUNA_PORT) 87 | .unwrap_or_else(|| panic!("No Input called '{}' found", YAMUNA_PORT)) 88 | .typed(|buf| Ok(data_types::Vector3::decode(buf)?)), 89 | output_brazos: outputs 90 | .take(BRAZOS_PORT) 91 | .unwrap_or_else(|| panic!("No Output called '{}' found", BRAZOS_PORT)) 92 | .typed(|buf, v: &data_types::PointCloud2| { 93 | buf.resize(v.encoded_len(), 0); 94 | Ok(v.encode(buf)?) 95 | }), 96 | output_tagus: outputs 97 | .take(TAGUS_PORT) 98 | .unwrap_or_else(|| panic!("No Output called '{}' found", TAGUS_PORT)) 99 | .typed(|buf, v: &data_types::Pose| { 100 | buf.resize(v.encoded_len(), 0); 101 | Ok(v.encode(buf)?) 102 | }), 103 | output_missouri: outputs 104 | .take(MISSOURI_PORT) 105 | .unwrap_or_else(|| panic!("No Output called '{}' found", MISSOURI_PORT)) 106 | .typed(|buf, v: &data_types::Image| { 107 | buf.resize(v.encoded_len(), 0); 108 | Ok(v.encode(buf)?) 109 | }), 110 | state: Arc::new(Mutex::new(MandalayState { 111 | danube_last_val: data_types::String { 112 | value: datatypes::random_string(1), 113 | }, 114 | chenab_last_val: random(), 115 | salween_last_val: random(), 116 | godavari_last_val: random(), 117 | loire_last_val: random(), 118 | yamuna_last_val: random(), 119 | pointcloud2_data: random(), 120 | pose_data: random(), 121 | img_data: random(), 122 | })), 123 | }) 124 | } 125 | } 126 | 127 | #[async_trait::async_trait] 128 | impl Node for Mandalay { 129 | async fn iteration(&self) -> Result<()> { 130 | select! { 131 | msg = self.input_danube.recv().fuse() => { 132 | if let Ok((Message::Data(inner_data), _ts)) = msg { 133 | self.state.lock().await.danube_last_val = (*inner_data).clone(); 134 | } 135 | }, 136 | msg = self.input_chenab.recv().fuse() => { 137 | if let Ok((Message::Data(inner_data), _ts)) = msg { 138 | self.state.lock().await.chenab_last_val = (*inner_data).clone(); 139 | } 140 | }, 141 | msg = self.input_salween.recv().fuse() => { 142 | if let Ok((Message::Data(inner_data), _ts)) = msg { 143 | self.state.lock().await.salween_last_val = (*inner_data).clone(); 144 | } 145 | }, 146 | msg = self.input_godavari.recv().fuse() => { 147 | if let Ok((Message::Data(inner_data), _ts)) = msg { 148 | self.state.lock().await.godavari_last_val = (*inner_data).clone(); 149 | } 150 | }, 151 | msg = self.input_loire.recv().fuse() => { 152 | if let Ok((Message::Data(inner_data), _ts)) = msg { 153 | self.state.lock().await.loire_last_val = (*inner_data).clone(); 154 | } 155 | }, 156 | msg = self.input_yamuna.recv().fuse() => { 157 | if let Ok((Message::Data(inner_data), _ts)) = msg { 158 | self.state.lock().await.yamuna_last_val = (*inner_data).clone(); 159 | } 160 | }, 161 | // Output every 100ms 162 | _ = async_std::task::sleep(Duration::from_millis(100)).fuse() => { 163 | 164 | let guard_state = self.state.lock().await; 165 | 166 | self.output_brazos.send(guard_state.pointcloud2_data.clone(), None).await?; 167 | self.output_tagus.send(guard_state.pose_data.clone(), None).await?; 168 | self.output_missouri.send(guard_state.img_data.clone(), None).await?; 169 | } 170 | 171 | } 172 | Ok(()) 173 | } 174 | } 175 | -------------------------------------------------------------------------------- /montblanc/monaco/Cargo.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | [package] 16 | name = "monaco" 17 | version = "0.1.0" 18 | edition = "2018" 19 | 20 | [lib] 21 | name = "monaco" 22 | crate-type=["cdylib"] 23 | path="src/lib.rs" 24 | 25 | [dependencies] 26 | async-std = { workspace = true } 27 | async-trait = { workspace = true } 28 | datatypes = { path = "../datatypes" } 29 | futures = { workspace = true } 30 | prost = { workspace = true } 31 | rand = { workspace = true } 32 | zenoh-flow = { workspace = true } -------------------------------------------------------------------------------- /montblanc/monaco/monaco.yml: -------------------------------------------------------------------------------- 1 | id : Monaco 2 | uri: file:///home/ato/Workspace/zenoh-flow-examples/montblanc/target/release/libmonaco.so 3 | inputs: [Congo] 4 | outputs: [Ohio] 5 | -------------------------------------------------------------------------------- /montblanc/monaco/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | use datatypes::data_types; 16 | use datatypes::{CONGO_PORT, OHIO_PORT}; 17 | use futures::prelude::*; 18 | use futures::select; 19 | use prost::Message as pMessage; 20 | use rand::random; 21 | use zenoh_flow::prelude::*; 22 | 23 | #[export_operator] 24 | pub struct Monaco { 25 | input_congo: Input, 26 | output_ohio: Output, 27 | } 28 | 29 | #[async_trait::async_trait] 30 | impl Operator for Monaco { 31 | async fn new( 32 | _context: Context, 33 | _configuration: Option, 34 | mut inputs: Inputs, 35 | mut outputs: Outputs, 36 | ) -> Result { 37 | Ok(Self { 38 | input_congo: inputs 39 | .take(CONGO_PORT) 40 | .unwrap_or_else(|| panic!("No Input called '{}' found", CONGO_PORT)) 41 | .typed(|buf| Ok(data_types::Twist::decode(buf)?)), 42 | output_ohio: outputs 43 | .take(OHIO_PORT) 44 | .unwrap_or_else(|| panic!("No Output called '{}' found", OHIO_PORT)) 45 | .typed(|buf, v: &data_types::Float32| { 46 | buf.resize(v.encoded_len(), 0); 47 | Ok(v.encode(buf)?) 48 | }), 49 | }) 50 | } 51 | } 52 | 53 | #[async_trait::async_trait] 54 | impl Node for Monaco { 55 | async fn iteration(&self) -> Result<()> { 56 | select! { 57 | msg = self.input_congo.recv().fuse() => { 58 | if let Ok((Message::Data(_inner_data), _ts)) = msg { 59 | let value = data_types::Float32 { value: random() }; 60 | self.output_ohio.send(value, None).await?; 61 | }} 62 | } 63 | Ok(()) 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /montblanc/mont_blanc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ZettaScaleLabs/zenoh-flow-examples/ca030254811d72bb777a944905126d4a81909566/montblanc/mont_blanc.png -------------------------------------------------------------------------------- /montblanc/montblanc.yml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | flow: Montblanc 16 | vars: 17 | BASE_DIR: "/home/ato/Workspace/zenoh-flow-examples/montblanc" 18 | 19 | operators: 20 | - id : Lyon 21 | descriptor: "file://{{BASE_DIR}}/lyon/lyon.yml" 22 | 23 | - id : Hamburg 24 | descriptor: "file://{{BASE_DIR}}/hamburg/hamburg.yml" 25 | 26 | - id : Taipei 27 | descriptor: "file://{{BASE_DIR}}/taipei/taipei.yml" 28 | 29 | - id : Osaka 30 | descriptor: "file://{{BASE_DIR}}/osaka/osaka.yml" 31 | 32 | - id : Tripoli 33 | descriptor: "file://{{BASE_DIR}}/tripoli/tripoli.yml" 34 | 35 | - id : Mandalay 36 | descriptor: "file://{{BASE_DIR}}/mandalay/mandalay.yml" 37 | 38 | - id : Ponce 39 | descriptor: "file://{{BASE_DIR}}/ponce/ponce.yml" 40 | 41 | - id : Geneva 42 | descriptor: "file://{{BASE_DIR}}/geneva/geneva.yml" 43 | 44 | - id : Monaco 45 | descriptor: "file://{{BASE_DIR}}/monaco/monaco.yml" 46 | 47 | - id : Rotterdam 48 | descriptor: "file://{{BASE_DIR}}/rotterdam/rotterdam.yml" 49 | 50 | - id : Barcelona 51 | descriptor: "file://{{BASE_DIR}}/barcelona/barcelona.yml" 52 | 53 | - id : Georgetown 54 | descriptor: "file://{{BASE_DIR}}/georgetown/georgetown.yml" 55 | 56 | 57 | 58 | 59 | sources: 60 | - id : Cordoba 61 | descriptor: "file://{{BASE_DIR}}/cordoba/cordoba.yml" 62 | 63 | - id : Portsmouth 64 | descriptor: "file://{{BASE_DIR}}/portsmouth/portsmouth.yml" 65 | 66 | - id : Freeport 67 | descriptor: "file://{{BASE_DIR}}/freeport/freeport.yml" 68 | 69 | - id : Madelin 70 | descriptor: "file://{{BASE_DIR}}/madelin/madelin.yml" 71 | 72 | - id : Delhi 73 | descriptor: "file://{{BASE_DIR}}/delhi/delhi.yml" 74 | 75 | - id : Hebron 76 | descriptor: "file://{{BASE_DIR}}/hebron/hebron.yml" 77 | 78 | - id : Kingston 79 | descriptor: "file://{{BASE_DIR}}/kingston/kingston.yml" 80 | 81 | sinks: 82 | - id : Arequipa 83 | descriptor: "file://{{BASE_DIR}}/arequipa/arequipa.yml" 84 | 85 | links: 86 | - from: 87 | node : Cordoba 88 | output : Amazon 89 | to: 90 | node : Lyon 91 | input : Amazon 92 | - from: 93 | node : Portsmouth 94 | output : Danube 95 | to: 96 | node : Hamburg 97 | input : Danube 98 | - from: 99 | node : Freeport 100 | output : Ganges 101 | to: 102 | node : Hamburg 103 | input : Ganges 104 | - from: 105 | node : Madelin 106 | output : Nile 107 | to: 108 | node : Hamburg 109 | input : Nile 110 | - from: 111 | node : Lyon 112 | output : Tigris 113 | to: 114 | node : Hamburg 115 | input : Tigris 116 | - from: 117 | node : Delhi 118 | output : Columbia 119 | to: 120 | node : Taipei 121 | input : Columbia 122 | - from: 123 | node : Delhi 124 | output : Columbia 125 | to: 126 | node : Osaka 127 | input : Columbia 128 | - from: 129 | node : Taipei 130 | output : Colorado 131 | to: 132 | node : Osaka 133 | input : Colorado 134 | - from: 135 | node : Hamburg 136 | output : Parana 137 | to: 138 | node : Osaka 139 | input : Parana 140 | - from: 141 | node : Osaka 142 | output : Godavari 143 | to: 144 | node : Tripoli 145 | input : Godavari 146 | - from: 147 | node : Delhi 148 | output : Columbia 149 | to: 150 | node : Tripoli 151 | input : Columbia 152 | - from: 153 | node : Portsmouth 154 | output : Danube 155 | to: 156 | node : Mandalay 157 | input : Danube 158 | - from: 159 | node : Osaka 160 | output : Salween 161 | to: 162 | node : Mandalay 163 | input : Salween 164 | - from: 165 | node : Hebron 166 | output : Chenab 167 | to: 168 | node : Mandalay 169 | input : Chenab 170 | - from: 171 | node : Osaka 172 | output : Godavari 173 | to: 174 | node : Mandalay 175 | input : Godavari 176 | - from: 177 | node : Tripoli 178 | output : Loire 179 | to: 180 | node : Mandalay 181 | input : Loire 182 | - from: 183 | node : Kingston 184 | output : Yamuna 185 | to: 186 | node : Mandalay 187 | input : Yamuna 188 | - from: 189 | node : Portsmouth 190 | output : Danube 191 | to: 192 | node : Ponce 193 | input : Danube 194 | - from: 195 | node : Mandalay 196 | output : Brazos 197 | to: 198 | node : Ponce 199 | input : Brazos 200 | - from: 201 | node : Mandalay 202 | output : Tagus 203 | to: 204 | node : Ponce 205 | input : Tagus 206 | - from: 207 | node : Mandalay 208 | output : Missouri 209 | to: 210 | node : Ponce 211 | input : Missouri 212 | - from: 213 | node : Tripoli 214 | output : Loire 215 | to: 216 | node : Ponce 217 | input : Loire 218 | - from: 219 | node : Kingston 220 | output : Yamuna 221 | to: 222 | node : Ponce 223 | input : Yamuna 224 | - from: 225 | node : Osaka 226 | output : Godavari 227 | to: 228 | node : Ponce 229 | input : Godavari 230 | - from: 231 | node : Ponce 232 | output : Congo 233 | to: 234 | node : Geneva 235 | input : Congo 236 | - from: 237 | node : Portsmouth 238 | output : Danube 239 | to: 240 | node : Geneva 241 | input : Danube 242 | - from: 243 | node : Hamburg 244 | output : Parana 245 | to: 246 | node : Geneva 247 | input : Parana 248 | - from: 249 | node : Mandalay 250 | output : Tagus 251 | to: 252 | node : Geneva 253 | input : Tagus 254 | - from: 255 | node : Geneva 256 | output : Arkansas 257 | to: 258 | node : Arequipa 259 | input : Arkansas 260 | - from: 261 | node : Ponce 262 | output : Congo 263 | to: 264 | node : Monaco 265 | input : Congo 266 | - from: 267 | node : Ponce 268 | output : Mekong 269 | to: 270 | node : Rotterdam 271 | input : Mekong 272 | - from: 273 | node : Ponce 274 | output : Mekong 275 | to: 276 | node : Barcelona 277 | input : Mekong 278 | - from: 279 | node : Rotterdam 280 | output : Murray 281 | to: 282 | node : Georgetown 283 | input : Murray 284 | - from: 285 | node : Barcelona 286 | output : Lena 287 | to: 288 | node : Georgetown 289 | input : Lena 290 | 291 | ### feedbacks 292 | - from: 293 | node: Monaco 294 | output: Ohio 295 | to: 296 | node: Ponce 297 | input: Ohio 298 | - from: 299 | node: Georgetown 300 | output: Volga 301 | to: 302 | node: Ponce 303 | input: Volga 304 | 305 | # mapping: 306 | # Cordoba: robot 307 | # Portsmouth: robot 308 | # Freeport: robot 309 | # Madelin: robot 310 | # Lyon: robot 311 | # Delhi: robot 312 | # Hamburg: robot 313 | # Taipei: robot 314 | # Osaka: robot 315 | # Hebron: robot 316 | # Tripoli: robot 317 | # Kingston: robot 318 | # Mandalay: robot 319 | # Ponce: robot 320 | # Arequipa: ws 321 | # Geneva: ws 322 | # Monaco: ws 323 | # Rotterdam: ws 324 | # Barcelona: ws 325 | # Georgetown: ws -------------------------------------------------------------------------------- /montblanc/osaka/Cargo.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | [package] 16 | name = "osaka" 17 | version = "0.1.0" 18 | edition = "2018" 19 | 20 | [lib] 21 | name = "osaka" 22 | crate-type=["cdylib"] 23 | path="src/lib.rs" 24 | 25 | [dependencies] 26 | async-std = { workspace = true } 27 | async-trait = { workspace = true } 28 | datatypes = { path = "../datatypes" } 29 | futures = { workspace = true } 30 | prost = { workspace = true } 31 | rand = { workspace = true } 32 | zenoh-flow = { workspace = true } -------------------------------------------------------------------------------- /montblanc/osaka/osaka.yml: -------------------------------------------------------------------------------- 1 | id : Osaka 2 | uri: file:///home/ato/Workspace/zenoh-flow-examples/montblanc/target/debug/libosaka.so 3 | inputs: 4 | - Parana 5 | - Columbia 6 | - Colorado 7 | outputs: 8 | - Salween 9 | - Godavari 10 | -------------------------------------------------------------------------------- /montblanc/osaka/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | use async_std::sync::Mutex; 16 | use datatypes::data_types; 17 | use datatypes::{COLORADO_PORT, COLUMBIA_PORT, GODAVARI_PORT, PARANA_PORT, SALWEEN_PORT}; 18 | use futures::prelude::*; 19 | use futures::select; 20 | use prost::Message as _; 21 | use rand::random; 22 | use std::sync::Arc; 23 | use zenoh_flow::prelude::*; 24 | 25 | #[derive(Debug, Clone)] 26 | struct OsakaState { 27 | parana_last_val: data_types::String, 28 | columbia_last_val: data_types::Image, 29 | _colorado_last_val: data_types::Image, 30 | pointcloud2_data: data_types::PointCloud2, 31 | laserscan_data: data_types::LaserScan, 32 | } 33 | 34 | #[export_operator] 35 | pub struct Osaka { 36 | input_parana: Input, 37 | input_columbia: Input, 38 | input_colorado: Input, 39 | output_salween: Output, 40 | output_godavari: Output, 41 | state: Arc>, 42 | } 43 | 44 | #[async_trait::async_trait] 45 | impl Operator for Osaka { 46 | async fn new( 47 | _context: Context, 48 | _configuration: Option, 49 | mut inputs: Inputs, 50 | mut outputs: Outputs, 51 | ) -> Result { 52 | Ok(Self { 53 | input_parana: inputs 54 | .take(PARANA_PORT) 55 | .unwrap_or_else(|| panic!("No Input called '{}' found", PARANA_PORT)) 56 | .typed(|buf| Ok(data_types::String::decode(buf)?)), 57 | input_columbia: inputs 58 | .take(COLUMBIA_PORT) 59 | .unwrap_or_else(|| panic!("No Input called '{}' found", COLUMBIA_PORT)) 60 | .typed(|buf| Ok(data_types::Image::decode(buf)?)), 61 | input_colorado: inputs 62 | .take(COLORADO_PORT) 63 | .unwrap_or_else(|| panic!("No Input called '{}' found", COLORADO_PORT)) 64 | .typed(|buf| Ok(data_types::Image::decode(buf)?)), 65 | output_salween: outputs 66 | .take(SALWEEN_PORT) 67 | .unwrap_or_else(|| panic!("No Output called '{}' found", SALWEEN_PORT)) 68 | .typed(|buf, v: &data_types::PointCloud2| { 69 | buf.resize(v.encoded_len(), 0); 70 | Ok(v.encode(buf)?) 71 | }), 72 | output_godavari: outputs 73 | .take(GODAVARI_PORT) 74 | .unwrap_or_else(|| panic!("No Output called '{}' found", GODAVARI_PORT)) 75 | .typed(|buf, v: &data_types::LaserScan| { 76 | buf.resize(v.encoded_len(), 0); 77 | Ok(v.encode(buf)?) 78 | }), 79 | state: Arc::new(Mutex::new(OsakaState { 80 | parana_last_val: data_types::String { 81 | value: datatypes::random_string(1), 82 | }, 83 | columbia_last_val: random(), 84 | _colorado_last_val: random(), 85 | pointcloud2_data: random(), 86 | laserscan_data: random(), 87 | })), 88 | }) 89 | } 90 | } 91 | 92 | #[async_trait::async_trait] 93 | impl Node for Osaka { 94 | async fn iteration(&self) -> Result<()> { 95 | select! { 96 | msg = self.input_parana.recv().fuse() => { 97 | if let Ok((Message::Data(inner_data), _ts)) = msg { 98 | self.state.lock().await.parana_last_val = (*inner_data).clone(); 99 | } 100 | }, 101 | msg = self.input_columbia.recv().fuse() => { 102 | if let Ok((Message::Data(inner_data), _ts)) = msg { 103 | self.state.lock().await.columbia_last_val = (*inner_data).clone(); 104 | } 105 | }, 106 | msg = self.input_colorado.recv().fuse() => { 107 | if let Ok((Message::Data(_inner_data), _ts)) = msg { 108 | let guard_state = self.state.lock().await; 109 | 110 | self.output_salween.send(guard_state.pointcloud2_data.clone(), None).await?; 111 | self.output_godavari.send(guard_state.laserscan_data.clone(), None).await?; 112 | } 113 | } 114 | } 115 | Ok(()) 116 | } 117 | } 118 | -------------------------------------------------------------------------------- /montblanc/ponce/Cargo.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | [package] 16 | name = "ponce" 17 | version = "0.1.0" 18 | edition = "2018" 19 | 20 | [lib] 21 | name = "ponce" 22 | crate-type=["cdylib"] 23 | path="src/lib.rs" 24 | 25 | [dependencies] 26 | async-std = { workspace = true } 27 | async-trait = { workspace = true } 28 | datatypes = { path = "../datatypes" } 29 | futures = { workspace = true } 30 | prost = { workspace = true } 31 | rand = { workspace = true } 32 | zenoh-flow = { workspace = true } -------------------------------------------------------------------------------- /montblanc/ponce/ponce.yml: -------------------------------------------------------------------------------- 1 | id : Ponce 2 | uri: file:///home/ato/Workspace/zenoh-flow-examples/montblanc/target/debug/libponce.so 3 | inputs: 4 | - Danube 5 | - Brazos 6 | - Tagus 7 | - Missouri 8 | - Loire 9 | - Yamuna 10 | - Godavari 11 | - Ohio 12 | - Volga 13 | outputs: 14 | - Congo 15 | - Mekong 16 | -------------------------------------------------------------------------------- /montblanc/ponce/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | use async_std::sync::Mutex; 16 | use datatypes::data_types; 17 | use datatypes::{ 18 | BRAZOS_PORT, CONGO_PORT, DANUBE_PORT, LOIRE_PORT, MEKONG_PORT, MISSOURI_PORT, OHIO_PORT, 19 | TAGUS_PORT, VOLGA_PORT, YAMUNA_PORT, 20 | }; 21 | use futures::prelude::*; 22 | use futures::select; 23 | use prost::Message as pMessage; 24 | use rand::random; 25 | use std::sync::Arc; 26 | use zenoh_flow::prelude::*; 27 | 28 | #[derive(Debug, Clone)] 29 | struct PonceState { 30 | danube_last_val: data_types::String, 31 | tagus_last_val: data_types::Pose, 32 | missouri_last_val: data_types::Image, 33 | loire_last_val: data_types::PointCloud2, 34 | yamuna_last_val: data_types::Vector3, 35 | 36 | ohio_last_val: data_types::Float32, 37 | volga_last_val: data_types::Float64, 38 | 39 | twist_data: data_types::Twist, 40 | twist_w_cov_data: data_types::TwistWithCovarianceStamped, 41 | } 42 | 43 | #[export_operator] 44 | pub struct Ponce { 45 | input_danube: Input, 46 | input_tagus: Input, 47 | input_missouri: Input, 48 | input_loire: Input, 49 | input_yamuna: Input, 50 | input_ohio: Input, 51 | input_volga: Input, 52 | input_brazos: Input, 53 | output_congo: Output, 54 | output_mekong: Output, 55 | state: Arc>, 56 | } 57 | 58 | #[async_trait::async_trait] 59 | impl Operator for Ponce { 60 | async fn new( 61 | _context: Context, 62 | _configuration: Option, 63 | mut inputs: Inputs, 64 | mut outputs: Outputs, 65 | ) -> Result { 66 | Ok(Self { 67 | input_danube: inputs 68 | .take(DANUBE_PORT) 69 | .unwrap_or_else(|| panic!("No Input called '{}' found", DANUBE_PORT)) 70 | .typed(|buf| Ok(data_types::String::decode(buf)?)), 71 | input_tagus: inputs 72 | .take(TAGUS_PORT) 73 | .unwrap_or_else(|| panic!("No Input called '{}' found", TAGUS_PORT)) 74 | .typed(|buf| Ok(data_types::Pose::decode(buf)?)), 75 | input_missouri: inputs 76 | .take(MISSOURI_PORT) 77 | .unwrap_or_else(|| panic!("No Input called '{}' found", MISSOURI_PORT)) 78 | .typed(|buf| Ok(data_types::Image::decode(buf)?)), 79 | input_loire: inputs 80 | .take(LOIRE_PORT) 81 | .unwrap_or_else(|| panic!("No Input called '{}' found", LOIRE_PORT)) 82 | .typed(|buf| Ok(data_types::PointCloud2::decode(buf)?)), 83 | input_yamuna: inputs 84 | .take(YAMUNA_PORT) 85 | .unwrap_or_else(|| panic!("No Input called '{}' found", YAMUNA_PORT)) 86 | .typed(|buf| Ok(data_types::Vector3::decode(buf)?)), 87 | input_ohio: inputs 88 | .take(OHIO_PORT) 89 | .unwrap_or_else(|| panic!("No Input called '{}' found", OHIO_PORT)) 90 | .typed(|buf| Ok(data_types::Float32::decode(buf)?)), 91 | input_volga: inputs 92 | .take(VOLGA_PORT) 93 | .unwrap_or_else(|| panic!("No Input called '{}' found", VOLGA_PORT)) 94 | .typed(|buf| Ok(data_types::Float64::decode(buf)?)), 95 | input_brazos: inputs 96 | .take(BRAZOS_PORT) 97 | .unwrap_or_else(|| panic!("No Input called '{}' found", BRAZOS_PORT)) 98 | .typed(|buf| Ok(data_types::PointCloud2::decode(buf)?)), 99 | 100 | output_congo: outputs 101 | .take(CONGO_PORT) 102 | .unwrap_or_else(|| panic!("No Output called '{}' found", CONGO_PORT)) 103 | .typed(|buf, v: &data_types::Twist| { 104 | buf.resize(v.encoded_len(), 0); 105 | Ok(v.encode(buf)?) 106 | }), 107 | output_mekong: outputs 108 | .take(MEKONG_PORT) 109 | .unwrap_or_else(|| panic!("No Output called '{}' found", MEKONG_PORT)) 110 | .typed(|buf, v: &data_types::TwistWithCovarianceStamped| { 111 | buf.resize(v.encoded_len(), 0); 112 | Ok(v.encode(buf)?) 113 | }), 114 | state: Arc::new(Mutex::new(PonceState { 115 | danube_last_val: data_types::String { 116 | value: datatypes::random_string(1), 117 | }, 118 | tagus_last_val: random(), 119 | missouri_last_val: random(), 120 | loire_last_val: random(), 121 | yamuna_last_val: random(), 122 | 123 | ohio_last_val: data_types::Float32 { value: random() }, 124 | volga_last_val: data_types::Float64 { value: random() }, 125 | 126 | twist_data: random(), 127 | twist_w_cov_data: random(), 128 | })), 129 | }) 130 | } 131 | } 132 | 133 | #[async_trait::async_trait] 134 | impl Node for Ponce { 135 | async fn iteration(&self) -> Result<()> { 136 | select! { 137 | msg = self.input_danube.recv().fuse() => { 138 | if let Ok((Message::Data(inner_data), _ts)) = msg { 139 | self.state.lock().await.danube_last_val = (*inner_data).clone(); 140 | } 141 | }, 142 | msg = self.input_tagus.recv().fuse() => { 143 | if let Ok((Message::Data(inner_data), _ts)) = msg { 144 | self.state.lock().await.tagus_last_val = (*inner_data).clone(); 145 | } 146 | }, 147 | msg = self.input_missouri.recv().fuse() => { 148 | if let Ok((Message::Data(inner_data), _ts)) = msg { 149 | self.state.lock().await.missouri_last_val = (*inner_data).clone(); 150 | } 151 | }, 152 | msg = self.input_loire.recv().fuse() => { 153 | if let Ok((Message::Data(inner_data), _ts)) = msg { 154 | self.state.lock().await.loire_last_val = (*inner_data).clone(); 155 | } 156 | }, 157 | msg = self.input_yamuna.recv().fuse() => { 158 | if let Ok((Message::Data(inner_data), _ts)) = msg { 159 | self.state.lock().await.yamuna_last_val = (*inner_data).clone(); 160 | } 161 | }, 162 | msg = self.input_ohio.recv().fuse() => { 163 | if let Ok((Message::Data(inner_data), _ts)) = msg { 164 | self.state.lock().await.ohio_last_val = (*inner_data).clone(); 165 | } 166 | }, 167 | msg = self.input_volga.recv().fuse() => { 168 | if let Ok((Message::Data(inner_data), _ts)) = msg { 169 | self.state.lock().await.volga_last_val = (*inner_data).clone(); 170 | } 171 | }, 172 | msg = self.input_brazos.recv().fuse() => { 173 | if let Ok((Message::Data(_inner_data), _ts)) = msg { 174 | let guard_state = self.state.lock().await; 175 | 176 | self.output_congo.send(guard_state.twist_data.clone(), None).await?; 177 | self.output_mekong.send(guard_state.twist_w_cov_data.clone(), None).await?; 178 | } 179 | } 180 | } 181 | Ok(()) 182 | } 183 | } 184 | -------------------------------------------------------------------------------- /montblanc/portsmouth/Cargo.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | [package] 16 | name = "portsmouth" 17 | version = "0.1.0" 18 | edition = "2018" 19 | 20 | [lib] 21 | name = "portsmouth" 22 | crate-type=["cdylib"] 23 | path="src/lib.rs" 24 | 25 | [dependencies] 26 | async-std = { workspace = true } 27 | async-trait = { workspace = true } 28 | datatypes = { path = "../datatypes" } 29 | prost = { workspace = true } 30 | zenoh-flow = { workspace = true } -------------------------------------------------------------------------------- /montblanc/portsmouth/portsmouth.yml: -------------------------------------------------------------------------------- 1 | id : Portsmouth 2 | 3 | uri: file:///home/ato/Workspace/zenoh-flow-examples/montblanc/target/debug/libportsmouth.so 4 | 5 | # period: 6 | # length: 500 7 | # unit: ms 8 | outputs: [Danube] 9 | -------------------------------------------------------------------------------- /montblanc/portsmouth/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | use datatypes::DANUBE_PORT; 16 | use prost::Message; 17 | use std::time::Duration; 18 | use zenoh_flow::prelude::*; 19 | #[export_source] 20 | pub struct Portsmouth { 21 | output: Output, 22 | } 23 | 24 | #[async_trait::async_trait] 25 | impl Node for Portsmouth { 26 | async fn iteration(&self) -> Result<()> { 27 | async_std::task::sleep(Duration::from_millis(200)).await; 28 | let value = datatypes::data_types::String { 29 | value: "portsmouth/danube".into(), 30 | }; 31 | self.output.send(value, None).await 32 | } 33 | } 34 | 35 | #[async_trait::async_trait] 36 | impl Source for Portsmouth { 37 | async fn new( 38 | _context: Context, 39 | _configuration: Option, 40 | mut outputs: Outputs, 41 | ) -> Result { 42 | Ok(Self { 43 | output: outputs 44 | .take(DANUBE_PORT) 45 | .unwrap_or_else(|| panic!("No Input called '{}' found", DANUBE_PORT)) 46 | .typed(|buf, v: &datatypes::data_types::String| { 47 | buf.resize(v.encoded_len(), 0); 48 | Ok(v.encode(buf)?) 49 | }), 50 | }) 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /montblanc/rotterdam/Cargo.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | [package] 16 | name = "rotterdam" 17 | version = "0.1.0" 18 | edition = "2018" 19 | 20 | [lib] 21 | name = "rotterdam" 22 | crate-type=["cdylib"] 23 | path="src/lib.rs" 24 | 25 | [dependencies] 26 | async-std = { workspace = true } 27 | async-trait = { workspace = true } 28 | datatypes = { path = "../datatypes" } 29 | futures = { workspace = true } 30 | prost = { workspace = true } 31 | rand = { workspace = true } 32 | zenoh-flow = { workspace = true } 33 | 34 | -------------------------------------------------------------------------------- /montblanc/rotterdam/rotterdam.yml: -------------------------------------------------------------------------------- 1 | id : Rotterdam 2 | uri: file:///home/ato/Workspace/zenoh-flow-examples/montblanc/target/release/librotterdam.so 3 | inputs: [Mekong] 4 | outputs: [Murray] 5 | -------------------------------------------------------------------------------- /montblanc/rotterdam/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | use datatypes::data_types; 16 | use datatypes::{MEKONG_PORT, MURRAY_PORT}; 17 | use futures::prelude::*; 18 | use futures::select; 19 | use prost::Message as pMessage; 20 | use rand::random; 21 | use zenoh_flow::prelude::*; 22 | 23 | #[export_operator] 24 | pub struct Rotterdam { 25 | input_mekong: Input, 26 | output_murray: Output, 27 | } 28 | 29 | #[async_trait::async_trait] 30 | impl Operator for Rotterdam { 31 | async fn new( 32 | _context: Context, 33 | _configuration: Option, 34 | mut inputs: Inputs, 35 | mut outputs: Outputs, 36 | ) -> Result { 37 | Ok(Self { 38 | input_mekong: inputs 39 | .take(MEKONG_PORT) 40 | .unwrap_or_else(|| panic!("No Input called '{}' found", MEKONG_PORT)) 41 | .typed(|buf| Ok(data_types::TwistWithCovarianceStamped::decode(buf)?)), 42 | output_murray: outputs 43 | .take(MURRAY_PORT) 44 | .unwrap_or_else(|| panic!("No Output called '{}' found", MURRAY_PORT)) 45 | .typed(|buf, v: &data_types::Vector3Stamped| { 46 | buf.resize(v.encoded_len(), 0); 47 | Ok(v.encode(buf)?) 48 | }), 49 | }) 50 | } 51 | } 52 | 53 | #[async_trait::async_trait] 54 | impl Node for Rotterdam { 55 | async fn iteration(&self) -> Result<()> { 56 | select! { 57 | msg = self.input_mekong.recv().fuse() => { 58 | if let Ok((Message::Data(inner_data), _ts)) = msg { 59 | let value = data_types::Vector3Stamped { 60 | header: Some(inner_data.header.clone().unwrap_or(random())), 61 | vector: inner_data 62 | .twist 63 | .as_ref() 64 | .ok_or_else(|| zferror!(ErrorKind::Empty))? 65 | .twist 66 | .as_ref() 67 | .ok_or_else(|| zferror!(ErrorKind::Empty))? 68 | .linear 69 | .clone(), 70 | }; 71 | self.output_murray.send(value, None).await?; 72 | } 73 | } 74 | } 75 | Ok(()) 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /montblanc/taipei/Cargo.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | [package] 16 | name = "taipei" 17 | version = "0.1.0" 18 | edition = "2018" 19 | 20 | [lib] 21 | name = "taipei" 22 | crate-type=["cdylib"] 23 | path="src/lib.rs" 24 | 25 | [dependencies] 26 | async-std = { workspace = true } 27 | async-trait = { workspace = true } 28 | datatypes = { path = "../datatypes" } 29 | futures = { workspace = true } 30 | prost = { workspace = true } 31 | zenoh-flow = { workspace = true } 32 | -------------------------------------------------------------------------------- /montblanc/taipei/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | use datatypes::data_types; 16 | use datatypes::{COLORADO_PORT, COLUMBIA_PORT}; 17 | use prost::Message; 18 | use zenoh_flow::prelude::*; 19 | 20 | #[export_operator] 21 | pub struct Taipei { 22 | input: Input, 23 | output: Output, 24 | } 25 | 26 | #[async_trait::async_trait] 27 | impl Operator for Taipei { 28 | async fn new( 29 | _context: Context, 30 | _configuration: Option, 31 | mut inputs: Inputs, 32 | mut outputs: Outputs, 33 | ) -> Result { 34 | Ok(Self { 35 | input: inputs 36 | .take(COLUMBIA_PORT) 37 | .unwrap_or_else(|| panic!("No Input called '{}' found", COLUMBIA_PORT)) 38 | .typed(|buf| Ok(data_types::Image::decode(buf)?)), 39 | output: outputs 40 | .take(COLORADO_PORT) 41 | .unwrap_or_else(|| panic!("No Output called '{}' found", COLORADO_PORT)) 42 | .typed(|buf, v: &data_types::Image| { 43 | buf.resize(v.encoded_len(), 0); 44 | Ok(v.encode(buf)?) 45 | }), 46 | }) 47 | } 48 | } 49 | 50 | #[async_trait::async_trait] 51 | impl Node for Taipei { 52 | async fn iteration(&self) -> Result<()> { 53 | let (msg, _ts) = self.input.recv().await?; 54 | if let zenoh_flow::prelude::Message::Data(data) = msg { 55 | self.output.send(data, None).await?; 56 | } 57 | Ok(()) 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /montblanc/taipei/taipei.yml: -------------------------------------------------------------------------------- 1 | id : Taipei 2 | uri: file:///home/ato/Workspace/zenoh-flow-examples/montblanc/target/release/libtaipei.so 3 | inputs: [Columbia] 4 | outputs: [Colorado] 5 | -------------------------------------------------------------------------------- /montblanc/tripoli/Cargo.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | [package] 16 | name = "tripoli" 17 | version = "0.1.0" 18 | edition = "2018" 19 | 20 | [lib] 21 | name = "tripoli" 22 | crate-type=["cdylib"] 23 | path="src/lib.rs" 24 | 25 | [dependencies] 26 | async-std = { workspace = true } 27 | async-trait = { workspace = true } 28 | datatypes = { path = "../datatypes" } 29 | futures = { workspace = true } 30 | prost = { workspace = true } 31 | rand = { workspace = true } 32 | zenoh-flow = { workspace = true } -------------------------------------------------------------------------------- /montblanc/tripoli/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | use async_std::sync::Mutex; 16 | use datatypes::data_types; 17 | use datatypes::{COLUMBIA_PORT, GODAVARI_PORT, LOIRE_PORT}; 18 | use futures::prelude::*; 19 | use futures::select; 20 | use prost::Message as _; 21 | use rand::random; 22 | use std::sync::Arc; 23 | use zenoh_flow::prelude::*; 24 | 25 | #[derive(Debug, Clone)] 26 | struct TripoliState { 27 | pointcloud2_data: data_types::PointCloud2, 28 | columbia_last_val: data_types::Image, 29 | } 30 | 31 | #[export_operator] 32 | pub struct Tripoli { 33 | input_columbia: Input, 34 | input_godavari: Input, 35 | output_loire: Output, 36 | state: Arc>, 37 | } 38 | 39 | #[async_trait::async_trait] 40 | impl Operator for Tripoli { 41 | async fn new( 42 | _context: Context, 43 | _configuration: Option, 44 | mut inputs: Inputs, 45 | mut outputs: Outputs, 46 | ) -> Result { 47 | Ok(Self { 48 | input_columbia: inputs 49 | .take(COLUMBIA_PORT) 50 | .unwrap_or_else(|| panic!("No Input called '{}' found", COLUMBIA_PORT)) 51 | .typed(|buf| Ok(data_types::Image::decode(buf)?)), 52 | input_godavari: inputs 53 | .take(GODAVARI_PORT) 54 | .unwrap_or_else(|| panic!("No Output called '{}' found", GODAVARI_PORT)) 55 | .typed(|buf| Ok(data_types::LaserScan::decode(buf)?)), 56 | output_loire: outputs 57 | .take(LOIRE_PORT) 58 | .unwrap_or_else(|| panic!("No Output called '{}' found", LOIRE_PORT)) 59 | .typed(|buf, v: &data_types::PointCloud2| { 60 | buf.resize(v.encoded_len(), 0); 61 | Ok(v.encode(buf)?) 62 | }), 63 | state: Arc::new(Mutex::new(TripoliState { 64 | pointcloud2_data: random(), 65 | columbia_last_val: random(), 66 | })), 67 | }) 68 | } 69 | } 70 | 71 | #[async_trait::async_trait] 72 | impl Node for Tripoli { 73 | async fn iteration(&self) -> Result<()> { 74 | select! { 75 | msg = self.input_columbia.recv().fuse() => { 76 | if let Ok((Message::Data(inner_data), _ts)) = msg { 77 | self.state.lock().await.columbia_last_val = (*inner_data).clone(); 78 | } 79 | }, 80 | msg = self.input_godavari.recv().fuse() => { 81 | if let Ok((Message::Data(_inner_data), _ts)) = msg { 82 | let guard_state = self.state.lock().await; 83 | 84 | self.output_loire.send(guard_state.pointcloud2_data.clone(), None).await?; 85 | } 86 | } 87 | } 88 | Ok(()) 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /montblanc/tripoli/tripoli.yml: -------------------------------------------------------------------------------- 1 | id : Tripoli 2 | uri: file:///home/ato/Workspace/zenoh-flow-examples/montblanc/target/debug/libtripoli.so 3 | inputs: 4 | - Columbia 5 | - Godavari 6 | outputs: [Loire] 7 | -------------------------------------------------------------------------------- /period-miss-detector/README.md: -------------------------------------------------------------------------------- 1 | # Period miss detector 2 | 3 | The purpose of this example is to showcase how one can implement a node that (i) 4 | expects data at regular intervals and (ii) sends a default value if no data was 5 | received within an interval. 6 | 7 | ## How to run 8 | 9 | ### Build 10 | 11 | We first generate the different shared libraries of the different nodes. 12 | 13 | ```shell 14 | cd ~/dev/zenoh-flow-examples/period-miss-detector/nodes/rust/ && cargo build --workspace 15 | ``` 16 | 17 | ### Update the paths 18 | 19 | For each YAML file in the list below, check that the paths and filenames are 20 | correct: 21 | - data-flow.yaml 22 | - nodes/period-miss-detector.yaml 23 | - nodes/file-writer.yaml 24 | 25 | ### Launch 26 | 27 | #### 1st terminal: Zenoh 28 | 29 | ```shell 30 | cd ~/dev/zenoh && ./target/debug/zenohd -c ~/.config/zenoh-flow/zenoh.json 31 | ``` 32 | 33 | #### 2nd terminal: Zenoh-Flow daemon 34 | 35 | ```shell 36 | cd ~/dev/zenoh-flow/ && ./target/debug/zenoh-flow-daemon -c ~/.config/zenoh-flow/runtime.yaml 37 | ``` 38 | 39 | #### 3rd terminal: launch the flow 40 | 41 | ```shell 42 | cd ~/dev/zenoh-flow && ./target/debug/zfctl launch ~/dev/zenoh-flow-examples/period-miss-detector/data-flow.yaml 43 | ``` 44 | 45 | Then, if the flow was successfully launched, put values at regular intervals: 46 | 47 | ```shell 48 | cd ~/dev/zenoh && ./target/debug/examples/z_put -k "zf/period-miss-detector" -v "3.1416" 49 | ``` 50 | -------------------------------------------------------------------------------- /period-miss-detector/data-flow.yaml: -------------------------------------------------------------------------------- 1 | flow: period-miss-detector 2 | 3 | 4 | vars: 5 | BASE_DIR: "/path/to/zenoh-flow-examples/period-miss-detector/nodes" 6 | 7 | 8 | sources: 9 | - id: zenoh-sub 10 | configuration: 11 | key-expressions: 12 | out: zf/period-miss-detector 13 | descriptor: "builtin://zenoh" 14 | 15 | 16 | operators: 17 | - id: period-miss-detector 18 | descriptor: "file://{{ BASE_DIR }}/python/period-miss-detector/period-miss-detector.yaml" 19 | 20 | 21 | sinks: 22 | - id: file-writer 23 | descriptor: "file://{{ BASE_DIR }}/python/file-writer/file-writer.yaml" 24 | 25 | 26 | links: 27 | - from: 28 | node: zenoh-sub 29 | output: out 30 | to: 31 | node: period-miss-detector 32 | input: in 33 | 34 | - from: 35 | node: period-miss-detector 36 | output: out 37 | to: 38 | node: file-writer 39 | input: in 40 | -------------------------------------------------------------------------------- /period-miss-detector/nodes/python/file-writer/file-writer.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | from zenoh_flow.interfaces import Sink 16 | from zenoh_flow import Inputs 17 | from zenoh_flow.types import Context 18 | from typing import Dict, Any 19 | 20 | 21 | class FileWriter(Sink): 22 | def __init__( 23 | self, 24 | context: Context, 25 | configuration: Dict[str, Any], 26 | inputs: Inputs, 27 | ): 28 | self.input = inputs.take("in", str, lambda buf: buf.decode("utf-8")) 29 | if self.input is None: 30 | raise ValueError("Unable to find input 'in'") 31 | self.out_file = open("/tmp/period-log.txt", "w+") 32 | 33 | def finalize(self) -> None: 34 | self.out_file.close() 35 | 36 | async def iteration(self) -> None: 37 | data_msg = await self.input.recv() 38 | self.out_file.write(data_msg.get_data()) 39 | self.out_file.flush() 40 | 41 | 42 | def register(): 43 | return FileWriter 44 | -------------------------------------------------------------------------------- /period-miss-detector/nodes/python/file-writer/file-writer.yaml: -------------------------------------------------------------------------------- 1 | id: file-writer 2 | 3 | vars: 4 | BASE_DIR: "/path/to/zenoh-flow-examples/period-miss-detector" 5 | 6 | uri: "file://{{ BASE_DIR }}/nodes/python/file-writer/file-writer.py" 7 | 8 | inputs: [in] 9 | -------------------------------------------------------------------------------- /period-miss-detector/nodes/python/period-miss-detector/period-miss-detector.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | from zenoh_flow.interfaces import Operator 16 | from zenoh_flow import Inputs, Outputs 17 | from zenoh_flow.types import Context 18 | from typing import Dict, Any 19 | import datetime 20 | import asyncio 21 | 22 | 23 | class PeriodMissDetector(Operator): 24 | def __init__( 25 | self, 26 | context: Context, 27 | configuration: Dict[str, Any], 28 | inputs: Inputs, 29 | outputs: Outputs, 30 | ): 31 | print(f"Context: {context}") 32 | self.output = outputs.take("out", str, lambda s: bytes(s, "utf-8")) 33 | self.in_stream = inputs.take("in", str, lambda buf: buf.decode("utf-8")) 34 | 35 | if self.in_stream is None: 36 | raise ValueError("No input 'in' found") 37 | if self.output is None: 38 | raise ValueError("No output 'out' found") 39 | 40 | self.period = 5 41 | self.next_period = datetime.datetime.now() + datetime.timedelta(seconds=self.period) 42 | 43 | def finalize(self) -> None: 44 | return None 45 | 46 | async def iteration(self) -> None: 47 | 48 | now = datetime.datetime.now() 49 | sleep_duration = self.next_period.timestamp() - now.timestamp() 50 | if sleep_duration < 0: 51 | self.next_period = datetime.datetime.now() + datetime.timedelta(seconds=self.period) 52 | sleep_duration = self.period 53 | 54 | (done, pending) = await asyncio.wait( 55 | self.create_task_list(sleep_duration), 56 | return_when=asyncio.FIRST_COMPLETED, 57 | ) 58 | 59 | for task in list(pending): 60 | task.cancel() 61 | 62 | return None 63 | 64 | async def default(self, sleep_duration): 65 | await asyncio.sleep(sleep_duration) 66 | await self.output.send("(default) 0\n") 67 | self.next_period = \ 68 | self.next_period + datetime.timedelta(seconds=self.period) 69 | return "tick" 70 | 71 | async def wait_input(self): 72 | data_msg = await self.in_stream.recv() 73 | value = data_msg.get_data() 74 | await self.output.send(f"Received: {value}\n") 75 | 76 | now = datetime.datetime.now() 77 | interval = self.next_period.timestamp() - now.timestamp() 78 | if interval > 0 and interval < self.period: 79 | self.next_period = datetime.datetime.now() + datetime.timedelta(seconds=interval) 80 | else: 81 | self.next_period = datetime.datetime.now() + datetime.timedelta(seconds=self.period) 82 | 83 | return "in" 84 | 85 | def create_task_list(self, sleep_duration): 86 | task_list = [] 87 | 88 | if not any(t.get_name() == "in" for t in task_list): 89 | task_list.append( 90 | asyncio.create_task(self.wait_input(), name="in") 91 | ) 92 | 93 | if not any(t.get_name() == "tick" for t in task_list): 94 | task_list.append( 95 | asyncio.create_task(self.default(sleep_duration), name="tick") 96 | ) 97 | return task_list 98 | 99 | 100 | def register(): 101 | return PeriodMissDetector 102 | -------------------------------------------------------------------------------- /period-miss-detector/nodes/python/period-miss-detector/period-miss-detector.yaml: -------------------------------------------------------------------------------- 1 | id: period-miss-detector 2 | 3 | vars: 4 | BASE_DIR: "/path/to/zenoh-flow-examples/period-miss-detector" 5 | 6 | uri: "file://{{ BASE_DIR }}/nodes/python/period-miss-detector/period-miss-detector.py" 7 | 8 | inputs: [in] 9 | outputs: [out] 10 | -------------------------------------------------------------------------------- /period-miss-detector/nodes/rust/Cargo.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | [workspace] 16 | members = ["period-miss-detector", "file-writer"] 17 | 18 | [workspace.dependencies] 19 | async-std = "1.12" 20 | async-trait = "0.1" 21 | prost = "0.11" 22 | zenoh-flow = { version = "0.5.0-alpha.1" } 23 | -------------------------------------------------------------------------------- /period-miss-detector/nodes/rust/file-writer/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "file-writer" 3 | version = "0.1.0" 4 | edition = "2018" 5 | 6 | [dependencies] 7 | async-std = { workspace = true } 8 | async-trait = { workspace = true } 9 | prost = { workspace = true } 10 | zenoh-flow = { workspace = true } 11 | 12 | [lib] 13 | crate-type=["cdylib"] 14 | -------------------------------------------------------------------------------- /period-miss-detector/nodes/rust/file-writer/file-writer.yaml: -------------------------------------------------------------------------------- 1 | id: file-writer 2 | 3 | vars: 4 | BASE_DIR: "/path/to/zenoh-flow-examples/period-miss-detector" 5 | 6 | # Do not forget to change the extension depending on your operating system! 7 | # Linux -> .so 8 | # Windows -> .dll (and remove the "lib" in front) 9 | # MacOS -> .dylib 10 | uri: "file://{{ BASE_DIR }}/nodes/rust/target/debug/libfile_writer.dylib" 11 | # If the compilation is in release: 12 | # uri: file:///absolute/path/to/target/release/libfile_writer.so 13 | 14 | inputs: [in] 15 | -------------------------------------------------------------------------------- /period-miss-detector/nodes/rust/file-writer/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | use async_std::{fs::File, io::WriteExt, sync::Mutex}; 16 | use prost::Message as pMessage; 17 | use zenoh_flow::{anyhow, prelude::*}; 18 | 19 | #[export_sink] 20 | pub struct FileWriter { 21 | input: Input, 22 | file: Mutex, 23 | } 24 | 25 | #[async_trait::async_trait] 26 | impl Node for FileWriter { 27 | async fn iteration(&self) -> Result<()> { 28 | let (message, _) = self.input.recv().await?; 29 | 30 | if let Message::Data(data) = message { 31 | let mut file = self.file.lock().await; 32 | file.write_all(data.as_bytes()) 33 | .await 34 | .map_err(|e| zferror!(ErrorKind::IOError, "{:?}", e))?; 35 | return file 36 | .flush() 37 | .await 38 | .map_err(|e| zferror!(ErrorKind::IOError, "{:?}", e).into()); 39 | } 40 | 41 | Ok(()) 42 | } 43 | } 44 | 45 | #[async_trait::async_trait] 46 | impl Sink for FileWriter { 47 | async fn new( 48 | _context: Context, 49 | _configuration: Option, 50 | mut inputs: Inputs, 51 | ) -> Result { 52 | Ok(FileWriter { 53 | file: Mutex::new( 54 | File::create("/tmp/period-log.txt") 55 | .await 56 | .expect("Could not create '/tmp/period-log.txt'"), 57 | ), 58 | input: inputs 59 | .take("in") 60 | .expect("No Input called 'in' found") 61 | .typed(|bytes| String::decode(bytes).map_err(|e| anyhow!(e))), 62 | }) 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /period-miss-detector/nodes/rust/period-miss-detector/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "period-miss-detector" 3 | version = "0.1.0" 4 | edition = "2018" 5 | 6 | [dependencies] 7 | async-std = { workspace = true } 8 | async-trait = { workspace = true } 9 | prost = { workspace = true } 10 | zenoh-flow = { workspace = true } 11 | 12 | [lib] 13 | crate-type=["cdylib"] -------------------------------------------------------------------------------- /period-miss-detector/nodes/rust/period-miss-detector/period-miss-detector.yaml: -------------------------------------------------------------------------------- 1 | id: period-miss-detector 2 | 3 | vars: 4 | BASE_DIR: "/path/to/dev/zenoh-flow-examples/period-miss-detector" 5 | 6 | # Do not forget to change the extension depending on your operating system! 7 | # Linux -> .so 8 | # Windows -> .dll (and remove the "lib" in front) 9 | # MacOS -> .dylib 10 | uri: "file://{{ BASE_DIR }}/nodes/rust/target/debug/libperiod_miss_detector.dylib" 11 | # If the compilation is in release: 12 | # uri: file:///absolute/path/to/target/release/libperiod_miss_detector.dylib 13 | 14 | inputs: [in] 15 | outputs: [out] 16 | -------------------------------------------------------------------------------- /period-miss-detector/nodes/rust/period-miss-detector/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Copyright (c) 2022 ZettaScale Technology 3 | // 4 | // This program and the accompanying materials are made available under the 5 | // terms of the Eclipse Public License 2.0 which is available at 6 | // http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | // which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | // 9 | // SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | // 11 | // Contributors: 12 | // ZettaScale Zenoh Team, 13 | // 14 | 15 | use async_std::prelude::FutureExt; 16 | use async_std::sync::{Arc, Mutex}; 17 | use prost::Message as pMessage; 18 | use std::time::{Duration, Instant}; 19 | use zenoh_flow::{anyhow, prelude::*}; 20 | 21 | #[export_operator] 22 | pub struct PeriodMissDetector { 23 | input: Input, 24 | output: Output, 25 | period_duration: Duration, 26 | next_period: Arc>, 27 | } 28 | 29 | #[async_trait::async_trait] 30 | impl Operator for PeriodMissDetector { 31 | async fn new( 32 | _context: Context, 33 | _configuration: Option, 34 | mut inputs: Inputs, 35 | mut outputs: Outputs, 36 | ) -> Result { 37 | let period_duration = Duration::from_secs(5); 38 | Ok(PeriodMissDetector { 39 | input: inputs 40 | .take("in") 41 | .expect("No input 'in' found") 42 | .typed(|bytes| String::from_utf8(bytes.into()).map_err(|e| anyhow!(e))), 43 | output: outputs 44 | .take("out") 45 | .expect("No output 'out' found") 46 | .typed(|buffer, data: &String| data.encode(buffer).map_err(|e| anyhow!(e))), 47 | // CAVEAT: There can be a delay between the moment the node is created and the moment it 48 | // is actually run. 49 | next_period: Arc::new(Mutex::new( 50 | Instant::now().checked_add(period_duration).unwrap(), 51 | )), 52 | period_duration, 53 | }) 54 | } 55 | } 56 | 57 | #[async_trait::async_trait] 58 | impl Node for PeriodMissDetector { 59 | async fn iteration(&self) -> Result<()> { 60 | let mut next_period = self.next_period.lock().await; 61 | let now = Instant::now(); 62 | 63 | let sleep_duration = match next_period.checked_duration_since(now) { 64 | Some(sleep_duration) => sleep_duration, 65 | None => { 66 | *next_period = next_period 67 | .checked_add(self.period_duration) 68 | .expect("Could not add duration"); 69 | self.period_duration 70 | } 71 | }; 72 | 73 | drop(next_period); // explicitely release the lock 74 | 75 | let default = async { 76 | async_std::task::sleep(sleep_duration).await; 77 | self.output 78 | .send(String::from("(default) 0\n"), None) 79 | .await 80 | .expect("output channel disconnected"); 81 | 82 | let mut next_period = self.next_period.lock().await; 83 | *next_period = next_period 84 | .checked_add(self.period_duration) 85 | .expect("Could not add duration"); 86 | }; 87 | 88 | let run = async { 89 | let (message, _) = self.input.recv().await.expect("input channel disconnected"); 90 | if let Message::Data(data) = message { 91 | if let Ok(number) = data.trim_end().parse::() { 92 | self.output 93 | .send(format!("Received: {number}\n"), None) 94 | .await 95 | .expect("output channel disconnected"); 96 | 97 | // We just sent a value, if we are within a period (i.e. `next_period` is less 98 | // than `period_duration` away) we can safely increase the value of 99 | // `next_period` by a single period. 100 | let mut next_period = self.next_period.lock().await; 101 | let now = Instant::now(); 102 | if let Some(interval) = next_period.checked_duration_since(now) { 103 | if interval < self.period_duration { 104 | *next_period = next_period.checked_add(self.period_duration).unwrap(); 105 | } 106 | } else { 107 | // This else clause is an edge case: we sent the value riiiiiight before the 108 | // end. So by the time we are reaching this code, `now` is later than 109 | // `next_period`. Considering that we are executing this code, we still 110 | // received data before reaching the next period so we can also safely 111 | // increase by one period. 112 | *next_period = next_period.checked_add(self.period_duration).unwrap(); 113 | } 114 | } 115 | } 116 | }; 117 | 118 | run.race(default).await; 119 | 120 | Ok(()) 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /rust-toolchain: -------------------------------------------------------------------------------- 1 | 1.69.0 2 | -------------------------------------------------------------------------------- /transcoding/README.md: -------------------------------------------------------------------------------- 1 | 2 | # Zenoh-Flow for data transcoding. 3 | 4 | This document will guide you in building, installing and configuring Zenoh-Flow together with Zenoh for data transcoding. 5 | 6 | Note: this guide has been tested on Ubuntu 22.04 LTS 7 | ## Prerequisites 8 | 9 | In order to be able to build and run Zenoh-Flow the following dependencies are needed: 10 | 11 | - build-essentials 12 | - python3-dev 13 | - python3-pip 14 | - python3-venv 15 | - clang 16 | - libclang-dev 17 | - rust 18 | - pkg-config 19 | 20 | Please make sure those dependencies are installed before proceeding. 21 | 22 | ## Build Zenoh and Zenoh-Flow 23 | 24 | Clone the repositories and build: 25 | ``` 26 | cd ~ 27 | 28 | git clone https://github.com/eclipse-zenoh/zenoh -b 0.7.2-rc 29 | cd zenoh 30 | cargo build --release --all-targets --features shared-memory 31 | 32 | cd .. 33 | git clone https://github.com/eclipse-zenoh/zenoh-flow -b v0.5.0-alpha.1 34 | cd zenoh-flow 35 | cargo build --release --all-targets 36 | 37 | cd .. 38 | git clone https://github.com/eclipse-zenoh/zenoh-flow-python -b v0.5.0-alpha.1 39 | cd zenoh-flow-python 40 | cargo build --release --all-targets 41 | 42 | cd zenoh-flow-python 43 | 44 | python3 -m venv venv 45 | source venv/bin/activate 46 | pip3 install -r requirements-dev.txt 47 | maturin build --release 48 | deactivate 49 | ``` 50 | 51 | ## Install 52 | 53 | Install Zenoh and Zenoh-Flow 54 | 55 | ``` 56 | cd ~ 57 | 58 | sudo mkdir -p /etc/zenoh/ 59 | sudo mkdir -p /var/zenoh-flow/python 60 | sudo mkdir -p /var/zenoh-flow/flows 61 | sudo mkdir -p /etc/zenoh-flow/extensions.d 62 | 63 | sudo cp zenoh/target/release/zenohd /usr/bin/ 64 | sudo cp zenoh/target/release/libzenoh_plugin_*.so /usr/lib/ 65 | 66 | sudo cp zenoh-flow/target/release/libzenoh_plugin_zenoh_flow.so /usr/lib/ 67 | sudo cp zenoh-flow/target/release/zfctl /usr/bin/ 68 | sudo cp zenoh-flow-python/target/release/libzenoh_flow_python_*_wrapper.so /var/zenoh-flow/python 69 | sudo cp zenoh-flow-python/01-python.zfext /etc/zenoh-flow/extensions.d/ 70 | sudo cp zenoh-flow/zfctl/.config/zfctl-zenoh.json /etc/zenoh-flow/ 71 | 72 | pip3 install ./zenoh-flow-python/target/wheels/eclipse_zenoh_flow-0.5.0a1-cp37-abi3-manylinux_2_34_x86_64.whl 73 | ``` 74 | 75 | ## Start Runtime 76 | 77 | Copy the `zenoh-config.json` from this folder to `/etc/zenoh/zenoh.json`. 78 | 79 | Now you can start the Zenoh router with the Zenoh-Flow plugin. 80 | Open a terminal and run: `RUST_LOG=debug zenohd -c /etc/zenoh/zenoh.json` 81 | 82 | Then on another terminal run: `zfctl list runtimes` 83 | 84 | You should get an output similar to this: 85 | ``` 86 | +----------------------------------+--------------------+--------+ 87 | | UUID | Name | Status | 88 | +----------------------------------+--------------------+--------+ 89 | | bb4a456d6c0948bfae21a6e8c9051d6b | protoc-client-test | Ready | 90 | +----------------------------------+--------------------+--------+ 91 | ``` 92 | 93 | This means that the zenoh-flow runtime is was loaded and it is ready. 94 | 95 | ## The transcoding application. 96 | 97 | Copy the content of this folder in: `/var/zenoh-flow/flows` and run `pip3 install -r /var/zenoh-flow/flows/requirements.txt`. 98 | 99 | 100 | On a terminal start the publisher side: `cd /var/zenoh-flow/flows && python3 pub-proto.py` 101 | On a new terminal start the subscriber side: `cd /var/zenoh-flow/flows && python3 pub-cdr.py` 102 | 103 | The subscriber will not receive any data as the transcoding is not yet deployed. 104 | 105 | On a 3rd terminal instruct zenoh-flow to launch the transcoding flow: `zfctl launch /var/zenoh-flow/flows/dataflow.yml` it will return the instance id. 106 | 107 | Now you should see the data being transcoded and received by your subscriber. 108 | 109 | Once you are done you can list the current running flow instances: `zfctl list instances` and delete the running one with `zfctl destroy `. 110 | 111 | Once the instance is delete you will see that the subscriber is not going to receive any data. 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | -------------------------------------------------------------------------------- /transcoding/dataflow.yml: -------------------------------------------------------------------------------- 1 | flow: Transcoder 2 | vars: 3 | BASE_DIR: "/var/zenoh-flow/flows" 4 | 5 | operators: 6 | - id : Conversion 7 | descriptor: "file://{{ BASE_DIR }}/transcoder.yml" 8 | sources: 9 | - id : ZenohSrc 10 | configuration: 11 | key-expressions: 12 | proto: data/proto 13 | descriptor: "builtin://zenoh" 14 | 15 | sinks: 16 | - id : ZenohSink 17 | configuration: 18 | key-expressions: 19 | cdr: data/cdr 20 | descriptor: "builtin://zenoh" 21 | 22 | links: 23 | - from: 24 | node : ZenohSrc 25 | output : proto 26 | to: 27 | node : Conversion 28 | input : in 29 | 30 | - from: 31 | node : Conversion 32 | output : out 33 | to: 34 | node : ZenohSink 35 | input : cdr -------------------------------------------------------------------------------- /transcoding/message.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | message MyMsg { 4 | uint64 u_value = 1; 5 | string s_value = 2; 6 | } -------------------------------------------------------------------------------- /transcoding/message_pb2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # source: message.proto 4 | """Generated protocol buffer code.""" 5 | from google.protobuf import descriptor as _descriptor 6 | from google.protobuf import descriptor_pool as _descriptor_pool 7 | from google.protobuf import message as _message 8 | from google.protobuf import reflection as _reflection 9 | from google.protobuf import symbol_database as _symbol_database 10 | # @@protoc_insertion_point(imports) 11 | 12 | _sym_db = _symbol_database.Default() 13 | 14 | 15 | 16 | 17 | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\rmessage.proto\")\n\x05MyMsg\x12\x0f\n\x07u_value\x18\x01 \x01(\x04\x12\x0f\n\x07s_value\x18\x02 \x01(\tb\x06proto3') 18 | 19 | 20 | 21 | _MYMSG = DESCRIPTOR.message_types_by_name['MyMsg'] 22 | MyMsg = _reflection.GeneratedProtocolMessageType('MyMsg', (_message.Message,), { 23 | 'DESCRIPTOR' : _MYMSG, 24 | '__module__' : 'message_pb2' 25 | # @@protoc_insertion_point(class_scope:MyMsg) 26 | }) 27 | _sym_db.RegisterMessage(MyMsg) 28 | 29 | if _descriptor._USE_C_DESCRIPTORS == False: 30 | 31 | DESCRIPTOR._options = None 32 | _MYMSG._serialized_start=17 33 | _MYMSG._serialized_end=58 34 | # @@protoc_insertion_point(module_scope) 35 | -------------------------------------------------------------------------------- /transcoding/pub-proto.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | import sys 16 | import time 17 | import argparse 18 | import itertools 19 | import json 20 | import zenoh 21 | from zenoh import config 22 | from message_pb2 import MyMsg 23 | 24 | # --- Command line argument parsing --- --- --- --- --- --- 25 | parser = argparse.ArgumentParser( 26 | prog='z_pub', 27 | description='zenoh pub example') 28 | parser.add_argument('--mode', '-m', dest='mode', 29 | choices=['peer', 'client'], 30 | type=str, 31 | help='The zenoh session mode.') 32 | parser.add_argument('--connect', '-e', dest='connect', 33 | metavar='ENDPOINT', 34 | action='append', 35 | type=str, 36 | help='Endpoints to connect to.') 37 | parser.add_argument('--listen', '-l', dest='listen', 38 | metavar='ENDPOINT', 39 | action='append', 40 | type=str, 41 | help='Endpoints to listen on.') 42 | parser.add_argument('--key', '-k', dest='key', 43 | default='data/proto', 44 | type=str, 45 | help='The key expression to publish onto.') 46 | parser.add_argument('--value', '-v', dest='value', 47 | default='Pub from Python!', 48 | type=str, 49 | help='The value to publish.') 50 | parser.add_argument("--iter", dest="iter", type=int, 51 | help="How many puts to perform") 52 | parser.add_argument('--config', '-c', dest='config', 53 | metavar='FILE', 54 | type=str, 55 | help='A configuration file.') 56 | 57 | args = parser.parse_args() 58 | conf = zenoh.Config.from_file(args.config) if args.config is not None else zenoh.Config() 59 | if args.mode is not None: 60 | conf.insert_json5(zenoh.config.MODE_KEY, json.dumps(args.mode)) 61 | if args.connect is not None: 62 | conf.insert_json5(zenoh.config.CONNECT_KEY, json.dumps(args.connect)) 63 | if args.listen is not None: 64 | conf.insert_json5(zenoh.config.LISTEN_KEY, json.dumps(args.listen)) 65 | key = args.key 66 | value = args.value 67 | 68 | # initiate logging 69 | zenoh.init_logger() 70 | 71 | print("Opening session...") 72 | session = zenoh.open(conf) 73 | 74 | print(f"Declaring Publisher on '{key}'...") 75 | pub = session.declare_publisher(key) 76 | 77 | for idx in itertools.count() if args.iter is None else range(args.iter): 78 | time.sleep(1) 79 | 80 | msg = MyMsg( 81 | u_value = idx, 82 | s_value = value 83 | ) 84 | 85 | print(f"Putting Data ('{key}': '{msg}')...") 86 | pub.put(msg.SerializeToString()) 87 | 88 | pub.undeclare() 89 | session.close() -------------------------------------------------------------------------------- /transcoding/requirements.txt: -------------------------------------------------------------------------------- 1 | grpcio-tools==1.54.2 2 | pycdr2==1.0.0 3 | eclipse-zenoh==0.7.2rc0 -------------------------------------------------------------------------------- /transcoding/sub-cdr.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | import sys 16 | import time 17 | from datetime import datetime 18 | import argparse 19 | import json 20 | import zenoh 21 | from zenoh import Reliability, Sample 22 | from dataclasses import dataclass 23 | from pycdr2 import IdlStruct 24 | from pycdr2.types import uint64 25 | 26 | 27 | # CRD 28 | @dataclass 29 | class MyStruct(IdlStruct, typename="MyStruct"): 30 | u_value: uint64 31 | s_value: str 32 | 33 | 34 | 35 | # --- Command line argument parsing --- --- --- --- --- --- 36 | parser = argparse.ArgumentParser( 37 | prog='z_sub', 38 | description='zenoh sub example') 39 | parser.add_argument('--mode', '-m', dest='mode', 40 | choices=['peer', 'client'], 41 | type=str, 42 | help='The zenoh session mode.') 43 | parser.add_argument('--connect', '-e', dest='connect', 44 | metavar='ENDPOINT', 45 | action='append', 46 | type=str, 47 | help='Endpoints to connect to.') 48 | parser.add_argument('--listen', '-l', dest='listen', 49 | metavar='ENDPOINT', 50 | action='append', 51 | type=str, 52 | help='Endpoints to listen on.') 53 | parser.add_argument('--key', '-k', dest='key', 54 | default='data/cdr', 55 | type=str, 56 | help='The key expression to subscribe to.') 57 | parser.add_argument('--config', '-c', dest='config', 58 | metavar='FILE', 59 | type=str, 60 | help='A configuration file.') 61 | 62 | args = parser.parse_args() 63 | conf = zenoh.Config.from_file( 64 | args.config) if args.config is not None else zenoh.Config() 65 | if args.mode is not None: 66 | conf.insert_json5(zenoh.config.MODE_KEY, json.dumps(args.mode)) 67 | if args.connect is not None: 68 | conf.insert_json5(zenoh.config.CONNECT_KEY, json.dumps(args.connect)) 69 | if args.listen is not None: 70 | conf.insert_json5(zenoh.config.LISTEN_KEY, json.dumps(args.listen)) 71 | key = args.key 72 | 73 | # Zenoh code --- --- --- --- --- --- --- --- --- --- --- 74 | 75 | 76 | 77 | # initiate logging 78 | zenoh.init_logger() 79 | 80 | print("Opening session...") 81 | session = zenoh.open(conf) 82 | 83 | print("Declaring Subscriber on '{}'...".format(key)) 84 | 85 | 86 | def listener(sample: Sample): 87 | data = MyStruct.deserialize(sample.payload) 88 | print(f">> [Subscriber] Received {sample.kind} ('{sample.key_expr}': '{data}')") 89 | 90 | 91 | # WARNING, you MUST store the return value in order for the subscription to work!! 92 | # This is because if you don't, the reference counter will reach 0 and the subscription 93 | # will be immediately undeclared. 94 | sub = session.declare_subscriber(key, listener, reliability=Reliability.RELIABLE()) 95 | 96 | print("Enter 'q' to quit...") 97 | c = '\0' 98 | while c != 'q': 99 | c = sys.stdin.read(1) 100 | if c == '': 101 | time.sleep(1) 102 | 103 | # Cleanup: note that even if you forget it, cleanup will happen automatically when 104 | # the reference counter reaches 0 105 | sub.undeclare() 106 | session.close() -------------------------------------------------------------------------------- /transcoding/transcoder.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2022 ZettaScale Technology 3 | # 4 | # This program and the accompanying materials are made available under the 5 | # terms of the Eclipse Public License 2.0 which is available at 6 | # http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 7 | # which is available at https://www.apache.org/licenses/LICENSE-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 10 | # 11 | # Contributors: 12 | # ZettaScale Zenoh Team, 13 | # 14 | 15 | 16 | from zenoh_flow.interfaces import Operator 17 | from zenoh_flow import Input, Output 18 | from zenoh_flow.types import Context 19 | import logging 20 | from typing import Dict, Any 21 | from dataclasses import dataclass 22 | from pycdr2 import IdlStruct 23 | from pycdr2.types import uint64 24 | 25 | 26 | # CRD 27 | @dataclass 28 | class MyStruct(IdlStruct, typename="MyStruct"): 29 | u_value: uint64 30 | s_value: str 31 | 32 | 33 | 34 | # Protobuf 35 | 36 | 37 | # -*- coding: utf-8 -*- 38 | # Generated by the protocol buffer compiler. DO NOT EDIT! 39 | # source: message.proto 40 | """Generated protocol buffer code.""" 41 | from google.protobuf import descriptor as _descriptor 42 | from google.protobuf import descriptor_pool as _descriptor_pool 43 | from google.protobuf import message as _message 44 | from google.protobuf import reflection as _reflection 45 | from google.protobuf import symbol_database as _symbol_database 46 | # @@protoc_insertion_point(imports) 47 | 48 | _sym_db = _symbol_database.Default() 49 | 50 | 51 | 52 | 53 | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\rmessage.proto\")\n\x05MyMsg\x12\x0f\n\x07u_value\x18\x01 \x01(\x04\x12\x0f\n\x07s_value\x18\x02 \x01(\tb\x06proto3') 54 | 55 | 56 | 57 | _MYMSG = DESCRIPTOR.message_types_by_name['MyMsg'] 58 | MyMsg = _reflection.GeneratedProtocolMessageType('MyMsg', (_message.Message,), { 59 | 'DESCRIPTOR' : _MYMSG, 60 | '__module__' : 'message_pb2' 61 | # @@protoc_insertion_point(class_scope:MyMsg) 62 | }) 63 | _sym_db.RegisterMessage(MyMsg) 64 | 65 | if _descriptor._USE_C_DESCRIPTORS == False: 66 | 67 | DESCRIPTOR._options = None 68 | _MYMSG._serialized_start=17 69 | _MYMSG._serialized_end=58 70 | # @@protoc_insertion_point(module_scope) 71 | 72 | 73 | ### 74 | 75 | def protobuf_deserialize(b): 76 | msg = MyMsg() 77 | msg.ParseFromString(b) 78 | return msg 79 | 80 | 81 | class Transcoder(Operator): 82 | def __init__( 83 | self, 84 | context: Context, 85 | configuration: Dict[str, Any], 86 | inputs: Dict[str, Input], 87 | outputs: Dict[str, Output], 88 | ): 89 | logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.DEBUG) 90 | self.output = outputs.take("out", MyStruct ,lambda x: x.serialize()) 91 | self.in_stream = inputs.take("in",MyMsg , protobuf_deserialize) 92 | 93 | if self.in_stream is None: 94 | raise ValueError("No input 'in' found") 95 | if self.output is None: 96 | raise ValueError("No output 'ou' found") 97 | 98 | def finalize(self) -> None: 99 | return None 100 | 101 | async def iteration(self) -> None: 102 | data_msg = await self.in_stream.recv() 103 | payload = data_msg.get_data() 104 | print(f"Protobuf= (type: {type(payload)}) : {payload} ") 105 | # Transcode 106 | cdrMsg = MyStruct(payload.u_value, payload.s_value) 107 | 108 | print(f"CDR= (type: {type(cdrMsg)}) : {cdrMsg} ") 109 | await self.output.send(cdrMsg) 110 | 111 | return None 112 | 113 | 114 | def register(): 115 | return Transcoder -------------------------------------------------------------------------------- /transcoding/transcoder.yml: -------------------------------------------------------------------------------- 1 | id: compute-proximity 2 | vars: 3 | BASE_DIR: "/var/zenoh-flow/flows" 4 | uri: "file://{{BASE_DIR }}/transcoder.py" 5 | 6 | 7 | inputs: [in] 8 | outputs: [out] -------------------------------------------------------------------------------- /transcoding/zenoh-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "listen":{ 3 | "endpoints":["tcp/0.0.0.0:7447"] 4 | }, 5 | "plugins_search_dirs":["/usr/lib/"], 6 | "plugins":{ 7 | "storage_manager":{ 8 | "required":true, 9 | "storages":{ 10 | "zfrpc":{ 11 | "key_expr":"zf/runtime/**", 12 | "volume": "memory" 13 | }, 14 | "zf":{ 15 | "key_expr":"zenoh-flow/**", 16 | "volume": "memory" 17 | } 18 | } 19 | }, 20 | "zenoh_flow":{ 21 | "required":true, 22 | "path":"/etc/zenoh-flow", 23 | "pid_file": "/var/zenoh-flow/runtime.pid", 24 | "extensions": "/etc/zenoh-flow/extensions.d", 25 | "worker_pool_size":4, 26 | "use_shm": false 27 | } 28 | } 29 | } --------------------------------------------------------------------------------