├── .github └── FUNDING.yml ├── .gitignore ├── .gitmodules ├── .vscode └── launch.json ├── Cargo.toml ├── LICENSE ├── README.md ├── duo-api ├── Cargo.toml ├── build.rs ├── proto │ ├── common.proto │ ├── google │ │ └── protobuf │ │ │ ├── duration.proto │ │ │ └── timestamp.proto │ ├── instrument.proto │ ├── log.proto │ ├── process.proto │ └── span.proto └── src │ ├── common.rs │ ├── instrument.rs │ ├── lib.rs │ ├── log.rs │ ├── process.rs │ └── span.rs ├── duo-subscriber ├── Cargo.toml ├── examples │ ├── log.rs │ └── main.rs └── src │ ├── client.rs │ ├── conn.rs │ ├── lib.rs │ ├── subscriber.rs │ └── visitor.rs ├── duo-ui-logging.png ├── duo-ui-tracing.png ├── duo-ui ├── .gitignore ├── .npmrc ├── .prettierignore ├── .prettierrc ├── README.md ├── build-jeager.sh ├── build-ui.sh ├── components.json ├── eslint.config.js ├── jsconfig.json ├── package.json ├── postcss.config.js ├── src │ ├── app.css │ ├── app.d.ts │ ├── app.html │ ├── lib │ │ ├── api.js │ │ ├── components │ │ │ ├── Datatype.svelte │ │ │ ├── LogItem.svelte │ │ │ └── ui │ │ │ │ ├── badge │ │ │ │ ├── badge.svelte │ │ │ │ └── index.js │ │ │ │ ├── button │ │ │ │ ├── button.svelte │ │ │ │ └── index.js │ │ │ │ ├── collapsible │ │ │ │ ├── collapsible-content.svelte │ │ │ │ └── index.js │ │ │ │ ├── input │ │ │ │ ├── index.js │ │ │ │ └── input.svelte │ │ │ │ ├── progress │ │ │ │ ├── index.js │ │ │ │ └── progress.svelte │ │ │ │ ├── resizable │ │ │ │ ├── index.js │ │ │ │ ├── resizable-handle.svelte │ │ │ │ └── resizable-pane-group.svelte │ │ │ │ ├── scroll-area │ │ │ │ ├── index.js │ │ │ │ ├── scroll-area-scrollbar.svelte │ │ │ │ └── scroll-area.svelte │ │ │ │ └── separator │ │ │ │ ├── index.js │ │ │ │ └── separator.svelte │ │ ├── index.js │ │ └── utils.js │ └── routes │ │ ├── +layout.js │ │ ├── +layout.svelte │ │ ├── +page.js │ │ ├── +page.svelte │ │ └── trace │ │ └── +page.svelte ├── svelte.config.js ├── tailwind.config.js └── vite.config.js ├── duo.toml └── duo ├── Cargo.toml ├── src ├── aggregator.rs ├── arrow.rs ├── config.rs ├── grpc │ ├── mod.rs │ └── server.rs ├── ipc.rs ├── main.rs ├── memory.rs ├── models.rs ├── partition │ ├── mod.rs │ ├── query.rs │ └── writer.rs ├── query.rs ├── schema.rs ├── utils.rs └── web │ ├── deser.rs │ ├── logs.rs │ ├── mod.rs │ ├── serialize.rs │ ├── services.rs │ └── trace.rs └── ui ├── _app ├── env.js ├── immutable │ ├── assets │ │ ├── 0.9240paAY.css │ │ ├── 2.DdNfNY-u.css │ │ ├── 3.Buu_H62d.css │ │ ├── _layout.9240paAY.css │ │ ├── _page.Buu_H62d.css │ │ └── _page.Do73vz5N.css │ ├── chunks │ │ ├── each.Chk0zBHY.js │ │ ├── entry.CY4Bz8dl.js │ │ ├── index.D6PN1hnv.js │ │ ├── index.LOVubqCp.js │ │ └── scheduler.Bi1YRy3L.js │ ├── entry │ │ ├── app.C9MSuNpP.js │ │ └── start.KXOXF_R5.js │ └── nodes │ │ ├── 0.BRH967PU.js │ │ ├── 1.CwZJuE_B.js │ │ ├── 2.Cq86B-2g.js │ │ └── 3.CvrP5Z8A.js └── version.json ├── index.html ├── static ├── css │ ├── 1.1d830a2b.chunk.css │ ├── 1.69c64675.chunk.css │ ├── main.024cd045.chunk.css │ └── main.dfcb7700.chunk.css ├── js │ ├── 1.365650eb.chunk.js │ ├── 1.a403f3de.chunk.js │ ├── main.ad2150d5.chunk.js │ └── main.f89fdb33.chunk.js └── media │ └── jaeger-logo.a7093b12.svg └── trace.html /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: [Folyd] 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | /target/ 4 | 5 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 6 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 7 | Cargo.lock 8 | 9 | # These are backup files generated by rustfmt 10 | **/*.rs.bk 11 | 12 | # Jaeger-ui static files, generated from build script 13 | # duo/ui 14 | 15 | *.parquet 16 | .history 17 | 18 | process.json 19 | .DS_Store 20 | /data -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "duo-ui/jaeger-ui"] 2 | path = duo-ui/jaeger-ui 3 | url = git@github.com:duo-rs/jaeger-ui.git 4 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "type": "lldb", 9 | "request": "launch", 10 | "name": "Debug", 11 | "program": "${workspaceFolder}/target/debug/duo", 12 | "args": [], 13 | "cwd": "${workspaceFolder}" 14 | } 15 | ] 16 | } -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = ["duo", "duo-api", "duo-subscriber"] 3 | exclude = ["duo-ui"] 4 | resolver = "2" 5 | 6 | [workspace.package] 7 | edition = "2021" 8 | homepage = "https://github.com/duo-rs/duo" 9 | license = "MIT" 10 | 11 | [workspace.dependencies] 12 | duo-api = { version = "0.1", path = "./duo-api" } 13 | duo-subscriber = { version = "0.1", default-features = false, path = "./duo-subscriber" } 14 | anyhow = "1" 15 | tonic = "0.12" 16 | tonic-build = "0.12" 17 | tracing = "0.1" 18 | tracing-core = "0.1" 19 | tracing-subscriber = "0.3" 20 | rand = "0.8" 21 | serde = { version = "1.0", features = ["derive"] } 22 | serde_json = "1.0" 23 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 Folyd 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Duo 2 | 3 | [![Crates.io](https://img.shields.io/crates/v/duo.svg)](https://crates.io/crates/duo) 4 | ![Crates.io](https://img.shields.io/crates/d/duo) 5 | [![license-mit](https://img.shields.io/badge/license-MIT-yellow.svg)](./LICENSE) 6 | [![dependency status](https://deps.rs/repo/github/duo-rs/duo/status.svg)](https://deps.rs/repo/github/duo-rs/duo) 7 | 8 | **A lightweight Logging and Tracing observability solution for Rust, built with [Apache Arrow](https://arrow.apache.org), [Apache Parquet](https://parquet.apache.org) and [Apache DataFusion](https://datafusion.apache.org).** 9 | 10 | > **Notice: this project is in the experimental stage and not production-ready. Use at your own risk.** 11 | 12 | ## What is duo? 13 | 14 | Duo is an easy-to-use observability solution that provides both logging and tracing capabilities for Rust applications. While traditional observability solutions are powerful (such as [ELK](https://elastic.co), [jaegertracing](https://jaegertracing.io), etc), it is also complex to deploy and maintain. Duo aimed to provide a less-powerful but complete set of observability features, with extremely simple deployment and maintenance. 15 | 16 | This project was inspired by [tracing](https://github.com/tokio-rs/tracing) and [console](https://github.com/tokio-rs/console), which mainly consist of multiple components: 17 | 18 | - **duo-api** - a wire protocol for logging and tracing data. The wire format is defined using gRPC and protocol buffers. 19 | - **duo-subscriber** - instrumentation for collecting logging and tracing data from a process and exposing it over the wire format. `duo-subscriber` crate in this repository contains an implementation of the instrumentation-side API as a [tracing-subscriber](https://crates.io/crates/tracing-subscriber) [Layer](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/layer/trait.Layer.html), for projects using Tokio and tracing. 20 | - **duo-ui** - the web UI for duo. 21 | - **duo-server** - the aggregating server to collect tracing and logging data and interact with duo web UI. 22 | 23 | ## Get started 24 | 25 | ### Installation 26 | 27 | ``` 28 | cargo install duo 29 | ``` 30 | 31 | Run `duo`. 32 | 33 | ``` 34 | $ duo 35 | 36 | gRPC server listening on http://127.0.0.1:6000 37 | 38 | Web server listening on http://127.0.0.1:3000 39 | ``` 40 | 41 | Open https://127.0.0.1:3000 at your local browser to wait application report data. 42 | 43 | ### Application 44 | 45 | ```toml 46 | duo-subscriber = "0.1" 47 | ``` 48 | 49 | ```rs 50 | #[tokio::main] 51 | async fn main() { 52 | let fmt_layer = fmt::layer(); 53 | let uri = Uri::from_static("http://127.0.0.1:6000"); 54 | let (duo_layer, handle) = DuoLayer::with_handle("example", uri).await; 55 | tracing_subscriber::registry() 56 | .with(fmt_layer) 57 | .with(duo_layer) 58 | .init(); 59 | 60 | tracing::debug!("Bootstrap..."); 61 | foo(); 62 | 63 | handle.await.unwrap(); 64 | } 65 | ``` 66 | 67 | > For more example, please see [examples directory](./duo-subscriber/examples/). 68 | 69 | Run your application then check the http://127.0.0.1:3000 to see the tracing data. 70 | 71 | ### Logging UI 72 | 73 | ![](./duo-ui-logging.png) 74 | 75 | ### Tracing UI 76 | 77 | Currently trace view is based on [Jaeger UI](https://www.jaegertracing.io), we'll rewrite it with Svelte in the future. 78 | 79 | ![](./duo-ui-tracing.png) 80 | 81 | ## Roadmap 82 | 83 | - [x] Support tracing diagnosing with Jaeger UI. 84 | 85 | - [x] Build duo web UI. 86 | 87 | - [x] Support logging diagnosing. 88 | 89 | - [x] Support arrow-ipc WAL. 90 | 91 | - [x] Batch sync WAL to parquet files. 92 | 93 | - [x] Support Object Store. 94 | 95 | - [ ] Integrate Apache Iceberg? 96 | 97 | - [ ] Support OpenTelemetry specification, aimed to be a lightweight OpenTelemetry backend. 98 | 99 | ## Why called duo? 100 | 101 | Duo is mainly a musical terminology meaning a musical composition for two performers in which the performers have equal importance to the piece, often a composition involving two singers or two pianists. 102 | 103 | The famous duo band is [Brooklyn Duo](https://www.youtube.com/c/BrooklynDuo), you can visit this video ([Canon in D (Pachelbel's Canon) - Cello & Piano](https://www.youtube.com/watch?v=Ptk_1Dc2iPY)) to learn more about them. 104 | 105 | ![](https://i.ytimg.com/vi/Ptk_1Dc2iPY/maxresdefault.jpg) 106 | 107 | I personally think the logging and tracing have equal importance to observability, they are just like a duo band to help you diagnose your application. 108 | 109 | ## License 110 | 111 | This project is licensed under the [MIT license](./LICENSE). 112 | -------------------------------------------------------------------------------- /duo-api/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "duo-api" 3 | version = "0.1.0" 4 | edition.workspace = true 5 | homepage.workspace = true 6 | license.workspace = true 7 | 8 | [dependencies] 9 | tonic.workspace = true 10 | prost = "0.13" 11 | prost-types = "0.13" 12 | tracing-core.workspace = true 13 | serde_json.workspace = true 14 | 15 | [build-dependencies] 16 | tonic-build.workspace = true 17 | -------------------------------------------------------------------------------- /duo-api/build.rs: -------------------------------------------------------------------------------- 1 | use std::error::Error; 2 | 3 | fn main() -> Result<(), Box> { 4 | let proto_files = &[ 5 | "proto/common.proto", 6 | "proto/instrument.proto", 7 | "proto/log.proto", 8 | "proto/span.proto", 9 | "proto/process.proto", 10 | ]; 11 | 12 | let dirs = &["proto"]; 13 | tonic_build::configure() 14 | .build_client(true) 15 | .build_server(true) 16 | .protoc_arg("--experimental_allow_proto3_optional") 17 | .compile(proto_files, dirs)?; 18 | 19 | // recompile protobufs only if any of the proto files changes. 20 | for file in proto_files { 21 | println!("cargo:rerun-if-changed={}", file); 22 | } 23 | Ok(()) 24 | } 25 | -------------------------------------------------------------------------------- /duo-api/proto/common.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package rs.duo.common; 4 | 5 | enum Level { 6 | // The "error" level. 7 | // 8 | // Designates very serious errors. 9 | ERROR = 0; 10 | // The "warn" level. 11 | // 12 | // Designates hazardous situations. 13 | WARN = 1; 14 | // The "info" level. 15 | // Designates useful information. 16 | INFO = 2; 17 | // The "debug" level. 18 | // 19 | // Designates lower priority information. 20 | DEBUG = 3; 21 | // The "trace" level. 22 | // 23 | // Designates very low priority, often extremely verbose, information. 24 | TRACE = 4; 25 | } 26 | 27 | // The value of the key-value pair. 28 | message Value { 29 | oneof inner { 30 | // A string value. 31 | string str_val = 2; 32 | // An unsigned integer value. 33 | uint64 u64_val = 3; 34 | // A signed integer value. 35 | sint64 i64_val = 4; 36 | // A boolean value. 37 | bool bool_val = 5; 38 | } 39 | } -------------------------------------------------------------------------------- /duo-api/proto/google/protobuf/duration.proto: -------------------------------------------------------------------------------- 1 | // Protocol Buffers - Google's data interchange format 2 | // Copyright 2008 Google Inc. All rights reserved. 3 | // https://developers.google.com/protocol-buffers/ 4 | // 5 | // Redistribution and use in source and binary forms, with or without 6 | // modification, are permitted provided that the following conditions are 7 | // met: 8 | // 9 | // * Redistributions of source code must retain the above copyright 10 | // notice, this list of conditions and the following disclaimer. 11 | // * Redistributions in binary form must reproduce the above 12 | // copyright notice, this list of conditions and the following disclaimer 13 | // in the documentation and/or other materials provided with the 14 | // distribution. 15 | // * Neither the name of Google Inc. nor the names of its 16 | // contributors may be used to endorse or promote products derived from 17 | // this software without specific prior written permission. 18 | // 19 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 | // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 | // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 | // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 | // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 | // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 | // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | 31 | syntax = "proto3"; 32 | 33 | package google.protobuf; 34 | 35 | option csharp_namespace = "Google.Protobuf.WellKnownTypes"; 36 | option cc_enable_arenas = true; 37 | option go_package = "github.com/golang/protobuf/ptypes/duration"; 38 | option java_package = "com.google.protobuf"; 39 | option java_outer_classname = "DurationProto"; 40 | option java_multiple_files = true; 41 | option objc_class_prefix = "GPB"; 42 | 43 | // A Duration represents a signed, fixed-length span of time represented 44 | // as a count of seconds and fractions of seconds at nanosecond 45 | // resolution. It is independent of any calendar and concepts like "day" 46 | // or "month". It is related to Timestamp in that the difference between 47 | // two Timestamp values is a Duration and it can be added or subtracted 48 | // from a Timestamp. Range is approximately +-10,000 years. 49 | // 50 | // # Examples 51 | // 52 | // Example 1: Compute Duration from two Timestamps in pseudo code. 53 | // 54 | // Timestamp start = ...; 55 | // Timestamp end = ...; 56 | // Duration duration = ...; 57 | // 58 | // duration.seconds = end.seconds - start.seconds; 59 | // duration.nanos = end.nanos - start.nanos; 60 | // 61 | // if (duration.seconds < 0 && duration.nanos > 0) { 62 | // duration.seconds += 1; 63 | // duration.nanos -= 1000000000; 64 | // } else if (durations.seconds > 0 && duration.nanos < 0) { 65 | // duration.seconds -= 1; 66 | // duration.nanos += 1000000000; 67 | // } 68 | // 69 | // Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. 70 | // 71 | // Timestamp start = ...; 72 | // Duration duration = ...; 73 | // Timestamp end = ...; 74 | // 75 | // end.seconds = start.seconds + duration.seconds; 76 | // end.nanos = start.nanos + duration.nanos; 77 | // 78 | // if (end.nanos < 0) { 79 | // end.seconds -= 1; 80 | // end.nanos += 1000000000; 81 | // } else if (end.nanos >= 1000000000) { 82 | // end.seconds += 1; 83 | // end.nanos -= 1000000000; 84 | // } 85 | // 86 | // Example 3: Compute Duration from datetime.timedelta in Python. 87 | // 88 | // td = datetime.timedelta(days=3, minutes=10) 89 | // duration = Duration() 90 | // duration.FromTimedelta(td) 91 | // 92 | // # JSON Mapping 93 | // 94 | // In JSON format, the Duration type is encoded as a string rather than an 95 | // object, where the string ends in the suffix "s" (indicating seconds) and 96 | // is preceded by the number of seconds, with nanoseconds expressed as 97 | // fractional seconds. For example, 3 seconds with 0 nanoseconds should be 98 | // encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should 99 | // be expressed in JSON format as "3.000000001s", and 3 seconds and 1 100 | // microsecond should be expressed in JSON format as "3.000001s". 101 | // 102 | // 103 | message Duration { 104 | 105 | // Signed seconds of the span of time. Must be from -315,576,000,000 106 | // to +315,576,000,000 inclusive. Note: these bounds are computed from: 107 | // 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years 108 | int64 seconds = 1; 109 | 110 | // Signed fractions of a second at nanosecond resolution of the span 111 | // of time. Durations less than one second are represented with a 0 112 | // `seconds` field and a positive or negative `nanos` field. For durations 113 | // of one second or more, a non-zero value for the `nanos` field must be 114 | // of the same sign as the `seconds` field. Must be from -999,999,999 115 | // to +999,999,999 inclusive. 116 | int32 nanos = 2; 117 | } 118 | -------------------------------------------------------------------------------- /duo-api/proto/google/protobuf/timestamp.proto: -------------------------------------------------------------------------------- 1 | // Protocol Buffers - Google's data interchange format 2 | // Copyright 2008 Google Inc. All rights reserved. 3 | // https://developers.google.com/protocol-buffers/ 4 | // 5 | // Redistribution and use in source and binary forms, with or without 6 | // modification, are permitted provided that the following conditions are 7 | // met: 8 | // 9 | // * Redistributions of source code must retain the above copyright 10 | // notice, this list of conditions and the following disclaimer. 11 | // * Redistributions in binary form must reproduce the above 12 | // copyright notice, this list of conditions and the following disclaimer 13 | // in the documentation and/or other materials provided with the 14 | // distribution. 15 | // * Neither the name of Google Inc. nor the names of its 16 | // contributors may be used to endorse or promote products derived from 17 | // this software without specific prior written permission. 18 | // 19 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 | // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 | // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 | // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 | // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 | // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 | // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | 31 | syntax = "proto3"; 32 | 33 | package google.protobuf; 34 | 35 | option csharp_namespace = "Google.Protobuf.WellKnownTypes"; 36 | option cc_enable_arenas = true; 37 | option go_package = "github.com/golang/protobuf/ptypes/timestamp"; 38 | option java_package = "com.google.protobuf"; 39 | option java_outer_classname = "TimestampProto"; 40 | option java_multiple_files = true; 41 | option objc_class_prefix = "GPB"; 42 | 43 | // A Timestamp represents a point in time independent of any time zone 44 | // or calendar, represented as seconds and fractions of seconds at 45 | // nanosecond resolution in UTC Epoch time. It is encoded using the 46 | // Proleptic Gregorian Calendar which extends the Gregorian calendar 47 | // backwards to year one. It is encoded assuming all minutes are 60 48 | // seconds long, i.e. leap seconds are "smeared" so that no leap second 49 | // table is needed for interpretation. Range is from 50 | // 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. 51 | // By restricting to that range, we ensure that we can convert to 52 | // and from RFC 3339 date strings. 53 | // See [https://www.ietf.org/rfc/rfc3339.txt](https://www.ietf.org/rfc/rfc3339.txt). 54 | // 55 | // # Examples 56 | // 57 | // Example 1: Compute Timestamp from POSIX `time()`. 58 | // 59 | // Timestamp timestamp; 60 | // timestamp.set_seconds(time(NULL)); 61 | // timestamp.set_nanos(0); 62 | // 63 | // Example 2: Compute Timestamp from POSIX `gettimeofday()`. 64 | // 65 | // struct timeval tv; 66 | // gettimeofday(&tv, NULL); 67 | // 68 | // Timestamp timestamp; 69 | // timestamp.set_seconds(tv.tv_sec); 70 | // timestamp.set_nanos(tv.tv_usec * 1000); 71 | // 72 | // Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. 73 | // 74 | // FILETIME ft; 75 | // GetSystemTimeAsFileTime(&ft); 76 | // UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; 77 | // 78 | // // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z 79 | // // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. 80 | // Timestamp timestamp; 81 | // timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); 82 | // timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); 83 | // 84 | // Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. 85 | // 86 | // long millis = System.currentTimeMillis(); 87 | // 88 | // Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) 89 | // .setNanos((int) ((millis % 1000) * 1000000)).build(); 90 | // 91 | // 92 | // Example 5: Compute Timestamp from current time in Python. 93 | // 94 | // timestamp = Timestamp() 95 | // timestamp.GetCurrentTime() 96 | // 97 | // # JSON Mapping 98 | // 99 | // In JSON format, the Timestamp type is encoded as a string in the 100 | // [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the 101 | // format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" 102 | // where {year} is always expressed using four digits while {month}, {day}, 103 | // {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional 104 | // seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), 105 | // are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone 106 | // is required, though only UTC (as indicated by "Z") is presently supported. 107 | // 108 | // For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past 109 | // 01:30 UTC on January 15, 2017. 110 | // 111 | // In JavaScript, one can convert a Date object to this format using the 112 | // standard [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString] 113 | // method. In Python, a standard `datetime.datetime` object can be converted 114 | // to this format using [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) 115 | // with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one 116 | // can use the Joda Time's [`ISODateTimeFormat.dateTime()`]( 117 | // http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime--) 118 | // to obtain a formatter capable of generating timestamps in this format. 119 | // 120 | // 121 | message Timestamp { 122 | 123 | // Represents seconds of UTC time since Unix epoch 124 | // 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 125 | // 9999-12-31T23:59:59Z inclusive. 126 | int64 seconds = 1; 127 | 128 | // Non-negative fractions of a second at nanosecond resolution. Negative 129 | // second values with fractions must still have non-negative nanos values 130 | // that count forward in time. Must be from 0 to 999,999,999 131 | // inclusive. 132 | int32 nanos = 2; 133 | } 134 | -------------------------------------------------------------------------------- /duo-api/proto/instrument.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package rs.duo.instrument; 4 | 5 | import "span.proto"; 6 | import "log.proto"; 7 | import "process.proto"; 8 | 9 | service Instrument { 10 | // Register a new process and get the process id. 11 | // 12 | // The registration is name-agnostic, each new registration 13 | // with the same process name, alwasy get a brand new process id. 14 | rpc register_process(RegisterProcessRequest) returns (RegisterProcessResponse) {} 15 | 16 | rpc record_span(RecordSpanRequest) returns (RecordSpanResponse) {} 17 | 18 | rpc record_event(RecordEventRequest) returns (RecordEventResponse) {} 19 | } 20 | 21 | message RegisterProcessRequest { 22 | process.Process process = 1; 23 | } 24 | 25 | message RecordSpanRequest { 26 | span.Span span = 1; 27 | } 28 | 29 | message RecordEventRequest { 30 | log.Log log = 1; 31 | } 32 | 33 | message RegisterProcessResponse { 34 | string process_id = 1; 35 | } 36 | 37 | message RecordSpanResponse {} 38 | 39 | message RecordEventResponse {} -------------------------------------------------------------------------------- /duo-api/proto/log.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package rs.duo.log; 4 | 5 | import "google/protobuf/timestamp.proto"; 6 | import "common.proto"; 7 | 8 | message Log { 9 | // The prcess id 10 | string process_id = 1; 11 | // Span id. 12 | optional uint64 span_id = 2; 13 | // Trace id. 14 | optional uint64 trace_id = 3; 15 | // Describes the level of verbosity of a log. 16 | common.Level level = 4; 17 | // The part of the system that the span that this metadata describes 18 | // occurred in. 19 | string target = 5; 20 | // The name of the source code file where the log occurred. 21 | optional string file = 6; 22 | // The line number in the source code file where the log occurred. 23 | optional uint32 line = 7; 24 | // Timestamp. 25 | google.protobuf.Timestamp time = 8; 26 | // Key-value fileds. 27 | map fields = 9; 28 | } -------------------------------------------------------------------------------- /duo-api/proto/process.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package rs.duo.process; 4 | 5 | import "common.proto"; 6 | 7 | message Process { 8 | // Process name. 9 | string name = 1; 10 | // Process tags. 11 | map tags = 2; 12 | } -------------------------------------------------------------------------------- /duo-api/proto/span.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package rs.duo.span; 4 | 5 | import "google/protobuf/timestamp.proto"; 6 | import "common.proto"; 7 | 8 | message Span { 9 | // Span id 10 | uint64 id = 1; 11 | // The prcess id 12 | string process_id = 2; 13 | // Trace id 14 | uint64 trace_id = 3; 15 | // Parent span id 16 | optional uint64 parent_id = 4; 17 | // Span name 18 | string name = 5; 19 | // Start timestamp 20 | google.protobuf.Timestamp start = 6; 21 | // End timestamp 22 | optional google.protobuf.Timestamp end = 7; 23 | // Span's tags 24 | map tags = 8; 25 | 26 | } -------------------------------------------------------------------------------- /duo-api/src/common.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::Display; 2 | 3 | use serde_json::{Number, Value as JsonValue}; 4 | 5 | use crate::ValueEnum; 6 | 7 | tonic::include_proto!("rs.duo.common"); 8 | 9 | impl Value { 10 | pub fn type_name(&self) -> &str { 11 | if let Some(inner) = &self.inner { 12 | match inner { 13 | ValueEnum::StrVal(_) => "str", 14 | ValueEnum::U64Val(_) => "u64", 15 | ValueEnum::I64Val(_) => "i64", 16 | ValueEnum::BoolVal(_) => "bool", 17 | } 18 | } else { 19 | "" 20 | } 21 | } 22 | } 23 | 24 | impl Display for Value { 25 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 26 | if let Some(inner) = &self.inner { 27 | match inner { 28 | ValueEnum::StrVal(v) => write!(f, "{v}"), 29 | ValueEnum::U64Val(v) => write!(f, "{v}"), 30 | ValueEnum::I64Val(v) => write!(f, "{v}"), 31 | ValueEnum::BoolVal(v) => write!(f, "{v}"), 32 | } 33 | } else { 34 | write!(f, "") 35 | } 36 | } 37 | } 38 | 39 | impl From for Level { 40 | fn from(level: tracing_core::Level) -> Self { 41 | match level { 42 | tracing_core::Level::ERROR => Level::Error, 43 | tracing_core::Level::WARN => Level::Warn, 44 | tracing_core::Level::INFO => Level::Info, 45 | tracing_core::Level::DEBUG => Level::Debug, 46 | tracing_core::Level::TRACE => Level::Trace, 47 | } 48 | } 49 | } 50 | impl From for tracing_core::Level { 51 | fn from(level: Level) -> Self { 52 | match level { 53 | Level::Error => tracing_core::Level::ERROR, 54 | Level::Warn => tracing_core::Level::WARN, 55 | Level::Info => tracing_core::Level::INFO, 56 | Level::Debug => tracing_core::Level::DEBUG, 57 | Level::Trace => tracing_core::Level::TRACE, 58 | } 59 | } 60 | } 61 | 62 | impl From for Value { 63 | fn from(inner: value::Inner) -> Self { 64 | Value { inner: Some(inner) } 65 | } 66 | } 67 | 68 | impl From for Value { 69 | fn from(val: i32) -> Self { 70 | value::Inner::I64Val(val as i64).into() 71 | } 72 | } 73 | 74 | impl From for Value { 75 | fn from(val: u32) -> Self { 76 | value::Inner::U64Val(val as u64).into() 77 | } 78 | } 79 | 80 | impl From for Value { 81 | fn from(val: i64) -> Self { 82 | value::Inner::I64Val(val).into() 83 | } 84 | } 85 | 86 | impl From for Value { 87 | fn from(val: u64) -> Self { 88 | value::Inner::U64Val(val).into() 89 | } 90 | } 91 | 92 | impl From for Value { 93 | fn from(val: bool) -> Self { 94 | value::Inner::BoolVal(val).into() 95 | } 96 | } 97 | 98 | impl From<&str> for Value { 99 | fn from(val: &str) -> Self { 100 | value::Inner::StrVal(val.into()).into() 101 | } 102 | } 103 | 104 | impl From for Value { 105 | fn from(val: String) -> Self { 106 | value::Inner::StrVal(val).into() 107 | } 108 | } 109 | 110 | impl From<&dyn std::fmt::Debug> for Value { 111 | fn from(val: &dyn std::fmt::Debug) -> Self { 112 | value::Inner::StrVal(format!("{:?}", val)).into() 113 | } 114 | } 115 | 116 | impl From for JsonValue { 117 | fn from(val: Value) -> Self { 118 | if let Some(inner) = val.inner { 119 | match inner { 120 | ValueEnum::StrVal(v) => JsonValue::String(v), 121 | ValueEnum::U64Val(v) => JsonValue::Number(Number::from(v)), 122 | ValueEnum::I64Val(v) => JsonValue::Number(Number::from(v)), 123 | ValueEnum::BoolVal(v) => JsonValue::Bool(v), 124 | } 125 | } else { 126 | JsonValue::Null 127 | } 128 | } 129 | } 130 | -------------------------------------------------------------------------------- /duo-api/src/instrument.rs: -------------------------------------------------------------------------------- 1 | tonic::include_proto!("rs.duo.instrument"); 2 | -------------------------------------------------------------------------------- /duo-api/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod common; 2 | pub mod instrument; 3 | pub mod log; 4 | pub mod process; 5 | pub mod span; 6 | 7 | pub use common::value::Inner as ValueEnum; 8 | pub use common::{Level, Value}; 9 | pub use log::Log; 10 | pub use process::Process; 11 | pub use span::Span; 12 | -------------------------------------------------------------------------------- /duo-api/src/log.rs: -------------------------------------------------------------------------------- 1 | tonic::include_proto!("rs.duo.log"); 2 | -------------------------------------------------------------------------------- /duo-api/src/process.rs: -------------------------------------------------------------------------------- 1 | tonic::include_proto!("rs.duo.process"); 2 | -------------------------------------------------------------------------------- /duo-api/src/span.rs: -------------------------------------------------------------------------------- 1 | tonic::include_proto!("rs.duo.span"); 2 | -------------------------------------------------------------------------------- /duo-subscriber/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "duo-subscriber" 3 | version = "0.1.0" 4 | edition.workspace = true 5 | homepage.workspace = true 6 | license.workspace = true 7 | 8 | [dependencies] 9 | duo-api.workspace = true 10 | rand.workspace = true 11 | tokio = { version = "1", features = ["time", "sync"] } 12 | tonic.workspace = true 13 | tracing.workspace = true 14 | tracing-subscriber = { workspace = true, features = ["registry"] } 15 | 16 | [dev-dependencies] 17 | tokio = { version = "1", features = ["rt-multi-thread"] } 18 | tracing-subscriber = "0.3" 19 | log = "0.4" 20 | -------------------------------------------------------------------------------- /duo-subscriber/examples/log.rs: -------------------------------------------------------------------------------- 1 | // cargo run --example log --features=log-compat 2 | 3 | use std::time::Duration; 4 | 5 | use duo_subscriber::DuoLayer; 6 | use log::debug; 7 | use tonic::transport::Uri; 8 | use tracing::Level; 9 | use tracing_subscriber::{self, filter::Targets, layer::SubscriberExt, util::SubscriberInitExt}; 10 | 11 | #[tracing::instrument] 12 | fn foo() { 13 | debug!("hello foo!"); 14 | bar(); 15 | debug!("called bar!"); 16 | } 17 | 18 | #[tracing::instrument] 19 | fn bar() { 20 | debug!("hello bar!"); 21 | baz(); 22 | } 23 | 24 | #[tracing::instrument] 25 | fn baz() { 26 | debug!("hello baz!"); 27 | } 28 | 29 | #[tokio::main] 30 | async fn main() { 31 | let name = "log"; 32 | let uri = Uri::from_static("http://127.0.0.1:6000"); 33 | let duo_layer = DuoLayer::new(name, uri).await; 34 | tracing_subscriber::registry() 35 | .with(duo_layer) 36 | .with( 37 | Targets::new() 38 | .with_target(name, Level::DEBUG) 39 | .with_target("tracing_subscriber", Level::DEBUG), 40 | ) 41 | .init(); 42 | 43 | debug!("Bootstrap..."); 44 | foo(); 45 | 46 | tokio::time::sleep(Duration::from_secs(1)).await; 47 | } 48 | -------------------------------------------------------------------------------- /duo-subscriber/examples/main.rs: -------------------------------------------------------------------------------- 1 | use std::time::Duration; 2 | 3 | use duo_subscriber::DuoLayer; 4 | use tonic::transport::Uri; 5 | use tracing::{debug, error, info, warn, Level}; 6 | use tracing_subscriber::{ 7 | self, filter::Targets, fmt, layer::SubscriberExt, util::SubscriberInitExt, 8 | }; 9 | 10 | #[tracing::instrument] 11 | fn foo() { 12 | info!(test = true, "hello foo!"); 13 | bar(); 14 | debug!("called bar!"); 15 | foz(); 16 | } 17 | 18 | #[tracing::instrument] 19 | fn bar() { 20 | baz(); 21 | } 22 | 23 | #[tracing::instrument] 24 | fn baz() { 25 | warn!("hello baz!"); 26 | } 27 | 28 | #[tracing::instrument] 29 | fn foz() { 30 | debug!("hello foz!"); 31 | error!(flag = 1, data = "data", "Oops!"); 32 | } 33 | 34 | #[tokio::main] 35 | async fn main() { 36 | let fmt_layer = fmt::layer(); 37 | let uri = Uri::from_static("http://127.0.0.1:6000"); 38 | let duo_layer = DuoLayer::new("example", uri).await; 39 | tracing_subscriber::registry() 40 | .with(fmt_layer) 41 | .with(duo_layer) 42 | .with( 43 | Targets::new() 44 | .with_target("main", Level::DEBUG) 45 | .with_target("tracing_subscriber", Level::DEBUG), 46 | ) 47 | .init(); 48 | 49 | tracing::info!("Bootstrap..."); 50 | foo(); 51 | tokio::time::sleep(Duration::from_secs(1)).await; 52 | } 53 | -------------------------------------------------------------------------------- /duo-subscriber/src/client.rs: -------------------------------------------------------------------------------- 1 | use duo_api as proto; 2 | use proto::instrument::{ 3 | instrument_client::InstrumentClient, RecordEventRequest, RecordSpanRequest, 4 | RegisterProcessRequest, 5 | }; 6 | use proto::process::Process; 7 | use tonic::{transport::Channel, Request}; 8 | 9 | pub struct DuoClient { 10 | name: &'static str, 11 | process_id: String, 12 | inner: InstrumentClient, 13 | } 14 | 15 | impl DuoClient { 16 | #[must_use] 17 | pub fn new(name: &'static str, client: InstrumentClient) -> DuoClient { 18 | DuoClient { 19 | name, 20 | process_id: String::new(), 21 | inner: client, 22 | } 23 | } 24 | 25 | pub(crate) async fn registry_process(&mut self) { 26 | let response = self 27 | .inner 28 | .register_process(Request::new(RegisterProcessRequest { 29 | process: Some(Process { 30 | name: String::from(self.name), 31 | tags: super::grasp_process_info(), 32 | }), 33 | })) 34 | .await 35 | .unwrap(); 36 | self.process_id = response.into_inner().process_id; 37 | } 38 | 39 | pub async fn record_span(&mut self, mut span: proto::Span) { 40 | span.process_id = self.process_id.clone(); 41 | self.inner 42 | .record_span(Request::new(RecordSpanRequest { span: Some(span) })) 43 | .await 44 | .unwrap(); 45 | } 46 | 47 | pub async fn record_event(&mut self, mut log: proto::Log) { 48 | log.process_id = self.process_id.clone(); 49 | self.inner 50 | .record_event(Request::new(RecordEventRequest { log: Some(log) })) 51 | .await 52 | .unwrap(); 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /duo-subscriber/src/conn.rs: -------------------------------------------------------------------------------- 1 | use std::time::Duration; 2 | 3 | use duo_api::instrument::instrument_client::InstrumentClient; 4 | use tonic::transport::Uri; 5 | 6 | use crate::client::DuoClient; 7 | 8 | pub struct Connection; 9 | 10 | impl Connection { 11 | const BACKOFF: Duration = Duration::from_millis(500); 12 | const MAX_BACKOFF: Duration = Duration::from_secs(5); 13 | 14 | pub async fn connect(name: &'static str, uri: Uri) -> DuoClient { 15 | let mut backoff = Duration::from_secs(0); 16 | loop { 17 | if backoff == Duration::from_secs(0) { 18 | tracing::debug!(to = %uri, "connecting"); 19 | } else { 20 | tracing::debug!(reconnect_in = ?backoff, "reconnecting"); 21 | tokio::time::sleep(backoff).await; 22 | } 23 | 24 | let try_connect = async { 25 | let client = InstrumentClient::connect(uri.clone()) 26 | .await 27 | .map_err(|err| format!("InstrumentClient connect error: {}", err))?; 28 | Ok::, String>(client) 29 | }; 30 | 31 | match try_connect.await { 32 | Ok(connected_client) => { 33 | tracing::debug!("connected successfully!"); 34 | let mut client = DuoClient::new(name, connected_client); 35 | client.registry_process().await; 36 | return client; 37 | } 38 | Err(error) => { 39 | tracing::warn!(%error, "error connecting"); 40 | backoff = std::cmp::max(backoff + Self::BACKOFF, Self::MAX_BACKOFF); 41 | } 42 | }; 43 | } 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /duo-subscriber/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! Duo subscriber for tracing. 2 | //! 3 | use std::collections::HashMap; 4 | 5 | use duo_api as proto; 6 | mod client; 7 | mod conn; 8 | mod subscriber; 9 | mod visitor; 10 | 11 | pub use subscriber::DuoLayer; 12 | 13 | // Grasp basic process info, this will collect to server 14 | // when register process. 15 | fn grasp_process_info() -> HashMap { 16 | let mut tags = HashMap::default(); 17 | tags.insert("duo-version".into(), env!("CARGO_PKG_VERSION").into()); 18 | tags 19 | } 20 | -------------------------------------------------------------------------------- /duo-subscriber/src/subscriber.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | collections::HashMap, 3 | time::{Instant, SystemTime}, 4 | }; 5 | 6 | use crate::{ 7 | conn::Connection, 8 | proto, 9 | visitor::{EventAttributeVisitor, SpanAttributeVisitor}, 10 | }; 11 | use rand::rngs::ThreadRng; 12 | use rand::Rng; 13 | use tokio::{ 14 | sync::mpsc::{self, error::TrySendError, Sender}, 15 | task::JoinHandle, 16 | }; 17 | use tonic::transport::Uri; 18 | use tracing::{ 19 | span::{self, Attributes}, 20 | Subscriber, 21 | }; 22 | use tracing_subscriber::{layer::Context, registry::LookupSpan, Layer}; 23 | 24 | pub struct DuoLayer { 25 | sender: Sender, 26 | } 27 | 28 | #[derive(Debug)] 29 | enum Message { 30 | NewSpan(proto::Span), 31 | CloseSpan(proto::Span), 32 | Event(proto::Log), 33 | } 34 | 35 | struct Timings { 36 | // unit: us 37 | idle: u32, 38 | // unit: us 39 | busy: u32, 40 | last: Instant, 41 | } 42 | 43 | impl Timings { 44 | fn new() -> Self { 45 | Self { 46 | idle: 0, 47 | busy: 0, 48 | last: Instant::now(), 49 | } 50 | } 51 | } 52 | 53 | impl DuoLayer { 54 | pub async fn new(name: &'static str, uri: Uri) -> Self { 55 | let (layer, _) = Self::with_handle(name, uri).await; 56 | layer 57 | } 58 | 59 | pub async fn with_handle(name: &'static str, uri: Uri) -> (Self, JoinHandle<()>) { 60 | let (sender, mut receiver) = mpsc::channel(2048); 61 | let handler = tokio::spawn(async move { 62 | let mut client = Connection::connect(name, uri).await; 63 | while let Some(message) = receiver.recv().await { 64 | match message { 65 | Message::NewSpan(span) | Message::CloseSpan(span) => { 66 | client.record_span(span).await 67 | } 68 | Message::Event(log) => { 69 | client.record_event(log).await; 70 | } 71 | } 72 | } 73 | }); 74 | (DuoLayer { sender }, handler) 75 | } 76 | 77 | #[inline] 78 | fn send_message(&self, message: Message) { 79 | match self.sender.try_send(message) { 80 | Ok(_) => {} 81 | Err(TrySendError::Closed(_)) => {} 82 | Err(TrySendError::Full(_)) => {} 83 | } 84 | } 85 | } 86 | 87 | impl Layer for DuoLayer 88 | where 89 | S: Subscriber + for<'span> LookupSpan<'span>, 90 | { 91 | fn on_new_span(&self, attrs: &Attributes<'_>, id: &span::Id, ctx: Context<'_, S>) { 92 | if let Some(span) = ctx.span(id) { 93 | let mut extension = span.extensions_mut(); 94 | 95 | let parent_span = if let Some(parent) = attrs.parent() { 96 | ctx.span(parent) 97 | } else if attrs.is_contextual() { 98 | ctx.lookup_current() 99 | } else { 100 | None 101 | }; 102 | 103 | let rand_id = ThreadRng::default().gen::() as u64; 104 | // Obtain parent_id and trace_id from parent span. 105 | let (parent_id, trace_id) = parent_span 106 | .and_then(|span_ref| { 107 | span_ref 108 | .extensions() 109 | .get::() 110 | .map(|s| (Some(s.id), s.trace_id)) 111 | }) 112 | // If parent's trace_id not exists, use the newly generated one. 113 | .unwrap_or((None, rand_id)); 114 | 115 | let metadata = attrs.metadata(); 116 | let mut tags = HashMap::with_capacity(3 + metadata.fields().len()); 117 | if let (Some(file), Some(line)) = (metadata.file(), metadata.line()) { 118 | tags.insert("line".into(), format!("{}:{}", file, line).into()); 119 | } 120 | let mut span = proto::Span { 121 | id: rand_id, 122 | trace_id, 123 | parent_id, 124 | name: metadata.name().into(), 125 | start: Some(SystemTime::now().into()), 126 | end: None, 127 | tags, 128 | // Set a temporary process id, we'll set a real value in send stage. 129 | process_id: String::new(), 130 | }; 131 | attrs.record(&mut SpanAttributeVisitor(&mut span)); 132 | self.send_message(Message::NewSpan(span.clone())); 133 | extension.insert(span); 134 | extension.insert(Timings::new()); 135 | } 136 | } 137 | 138 | fn on_event(&self, event: &tracing::Event<'_>, ctx: Context<'_, S>) { 139 | let parent_span_ref = if let Some(parent) = event.parent() { 140 | ctx.span(parent) 141 | } else if event.is_contextual() { 142 | ctx.lookup_current() 143 | } else { 144 | None 145 | }; 146 | 147 | let (trace_id, span_id) = parent_span_ref 148 | .and_then(|span_ref| { 149 | span_ref 150 | .extensions() 151 | .get::() 152 | .map(|span| (Some(span.trace_id), Some(span.id))) 153 | }) 154 | .unwrap_or_default(); 155 | 156 | let metadata = event.metadata(); 157 | let fields = HashMap::with_capacity(metadata.fields().len()); 158 | let mut log = proto::Log { 159 | // Set a temporary process id, we'll set a real value in send stage. 160 | process_id: String::new(), 161 | span_id, 162 | trace_id, 163 | target: metadata.target().into(), 164 | file: metadata.file().map(Into::into), 165 | line: metadata.line(), 166 | level: proto::Level::from(*metadata.level()) as i32, 167 | time: Some(SystemTime::now().into()), 168 | fields, 169 | }; 170 | event.record(&mut EventAttributeVisitor(&mut log)); 171 | self.send_message(Message::Event(log)); 172 | } 173 | 174 | fn on_enter(&self, id: &span::Id, ctx: Context<'_, S>) { 175 | if let Some(span) = ctx.span(id) { 176 | let mut extensions = span.extensions_mut(); 177 | if let Some(timings) = extensions.get_mut::() { 178 | let now = Instant::now(); 179 | timings.idle += now.saturating_duration_since(timings.last).as_micros() as u32; 180 | timings.last = now; 181 | } 182 | } 183 | } 184 | 185 | fn on_record(&self, id: &span::Id, values: &span::Record<'_>, ctx: Context<'_, S>) { 186 | if let Some(span_ref) = ctx.span(id) { 187 | let mut extension = span_ref.extensions_mut(); 188 | if let Some(span) = extension.get_mut::() { 189 | values.record(&mut SpanAttributeVisitor(span)); 190 | } 191 | } 192 | } 193 | 194 | fn on_follows_from(&self, id: &span::Id, follows: &span::Id, ctx: Context<'_, S>) { 195 | if let (Some(current), Some(follows)) = (ctx.span(id), ctx.span(follows)) { 196 | if let (Some(child), Some(parent)) = ( 197 | current.extensions_mut().get_mut::(), 198 | follows.extensions().get::(), 199 | ) { 200 | child.parent_id = Some(parent.id); 201 | } 202 | } 203 | } 204 | 205 | fn on_exit(&self, id: &span::Id, ctx: Context<'_, S>) { 206 | if let Some(span) = ctx.span(id) { 207 | let mut extensions = span.extensions_mut(); 208 | if let Some(timings) = extensions.get_mut::() { 209 | let now = Instant::now(); 210 | timings.busy += now.saturating_duration_since(timings.last).as_micros() as u32; 211 | timings.last = now; 212 | } 213 | } 214 | } 215 | 216 | fn on_close(&self, id: span::Id, ctx: Context<'_, S>) { 217 | if let Some(span_ref) = ctx.span(&id) { 218 | let mut extensions = span_ref.extensions_mut(); 219 | if let Some(mut span) = extensions.remove::() { 220 | span.end = Some(SystemTime::now().into()); 221 | 222 | if let Some(timings) = extensions.remove::() { 223 | span.tags.insert("idle".into(), timings.idle.into()); 224 | span.tags.insert("busy".into(), timings.busy.into()); 225 | } 226 | 227 | self.send_message(Message::CloseSpan(span)); 228 | } 229 | } 230 | } 231 | } 232 | -------------------------------------------------------------------------------- /duo-subscriber/src/visitor.rs: -------------------------------------------------------------------------------- 1 | use core::fmt; 2 | 3 | use crate::proto; 4 | use tracing::field::{Field, Visit}; 5 | pub(crate) struct SpanAttributeVisitor<'a>(pub(crate) &'a mut proto::Span); 6 | 7 | pub(crate) struct EventAttributeVisitor<'a>(pub(crate) &'a mut proto::Log); 8 | 9 | impl<'a> Visit for SpanAttributeVisitor<'a> { 10 | fn record_i64(&mut self, field: &Field, value: i64) { 11 | self.0.tags.insert(field.name().into(), value.into()); 12 | } 13 | 14 | fn record_u64(&mut self, field: &Field, value: u64) { 15 | self.0.tags.insert(field.name().into(), value.into()); 16 | } 17 | 18 | fn record_bool(&mut self, field: &Field, value: bool) { 19 | self.0.tags.insert(field.name().into(), value.into()); 20 | } 21 | 22 | fn record_str(&mut self, field: &Field, value: &str) { 23 | self.0.tags.insert(field.name().into(), value.into()); 24 | } 25 | 26 | fn record_debug(&mut self, field: &Field, value: &dyn fmt::Debug) { 27 | self.0.tags.insert(field.name().into(), value.into()); 28 | } 29 | } 30 | 31 | impl<'a> Visit for EventAttributeVisitor<'a> { 32 | fn record_i64(&mut self, field: &Field, value: i64) { 33 | self.0.fields.insert(field.name().into(), value.into()); 34 | } 35 | 36 | fn record_u64(&mut self, field: &Field, value: u64) { 37 | self.0.fields.insert(field.name().into(), value.into()); 38 | } 39 | 40 | fn record_bool(&mut self, field: &Field, value: bool) { 41 | self.0.fields.insert(field.name().into(), value.into()); 42 | } 43 | 44 | fn record_str(&mut self, field: &Field, value: &str) { 45 | self.0.fields.insert(field.name().into(), value.into()); 46 | } 47 | 48 | fn record_debug(&mut self, field: &Field, value: &dyn fmt::Debug) { 49 | self.0.fields.insert(field.name().into(), value.into()); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /duo-ui-logging.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/duo-rs/duo/a74401ba4db5eeefb69b89d0d6b72e77518cf628/duo-ui-logging.png -------------------------------------------------------------------------------- /duo-ui-tracing.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/duo-rs/duo/a74401ba4db5eeefb69b89d0d6b72e77518cf628/duo-ui-tracing.png -------------------------------------------------------------------------------- /duo-ui/.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | package-lock.json 3 | 4 | # Output 5 | .output 6 | .vercel 7 | /.svelte-kit 8 | /build 9 | 10 | # OS 11 | .DS_Store 12 | Thumbs.db 13 | 14 | # Env 15 | .env 16 | .env.* 17 | !.env.example 18 | !.env.test 19 | 20 | # Vite 21 | vite.config.js.timestamp-* 22 | vite.config.ts.timestamp-* 23 | -------------------------------------------------------------------------------- /duo-ui/.npmrc: -------------------------------------------------------------------------------- 1 | engine-strict=true 2 | -------------------------------------------------------------------------------- /duo-ui/.prettierignore: -------------------------------------------------------------------------------- 1 | # Package Managers 2 | package-lock.json 3 | pnpm-lock.yaml 4 | yarn.lock 5 | -------------------------------------------------------------------------------- /duo-ui/.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "useTabs": false, 3 | "singleQuote": true, 4 | "trailingComma": "all", 5 | "printWidth": 100, 6 | "plugins": ["prettier-plugin-svelte", "prettier-plugin-tailwindcss"], 7 | "overrides": [ 8 | { 9 | "files": "*.svelte", 10 | "options": { 11 | "parser": "svelte" 12 | } 13 | } 14 | ] 15 | } 16 | -------------------------------------------------------------------------------- /duo-ui/README.md: -------------------------------------------------------------------------------- 1 | # duo-ui 2 | 3 | ## Developing 4 | 5 | Once you've created a project and installed dependencies with `npm install` (or `pnpm install` or `yarn`), start a development server: 6 | 7 | ```bash 8 | npm run dev 9 | 10 | # or start the server and open the app in a new browser tab 11 | npm run dev -- --open 12 | ``` 13 | 14 | ## Building 15 | 16 | To create a production version of your app: 17 | 18 | ```bash 19 | npm run build 20 | ``` 21 | 22 | You can preview the production build with `npm run preview`. 23 | 24 | > To deploy your app, you may need to install an [adapter](https://kit.svelte.dev/docs/adapters) for your target environment. 25 | -------------------------------------------------------------------------------- /duo-ui/build-jeager.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | # Copy modified file to jaeger-ui repository 6 | # cp -r packages/* jaeger-ui/packages/ 7 | 8 | cd jaeger-ui 9 | 10 | yarn && yarn build 11 | 12 | BUILD_DIR=packages/jaeger-ui/build 13 | find ${BUILD_DIR} -type f \( -name "*runtime*.js" -o -name "*.map" \) | xargs rm 14 | 15 | TARGET_DIR=../../duo/ui/ 16 | 17 | if [ -d "${TARGET_DIR}*" ] 18 | then 19 | rm -r ${TARGET_DIR}* 20 | else 21 | mkdir -p ${TARGET_DIR} 22 | fi 23 | 24 | # Copy index.html file 25 | cp ${BUILD_DIR}/index.html ${TARGET_DIR}/trace.html 26 | 27 | # Copy the rest static files 28 | cp -r ${BUILD_DIR}/static ${TARGET_DIR} -------------------------------------------------------------------------------- /duo-ui/build-ui.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | BUILD_DIR=build 6 | TARGET_DIR=../duo/ui/ 7 | # Remove legacy app dir 8 | rm -rf ${TARGET_DIR}/_app 9 | npm run build 10 | cp -r ${BUILD_DIR}/* ${TARGET_DIR} 11 | -------------------------------------------------------------------------------- /duo-ui/components.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://shadcn-svelte.com/schema.json", 3 | "style": "default", 4 | "tailwind": { 5 | "config": "tailwind.config.js", 6 | "css": "src/app.css", 7 | "baseColor": "slate" 8 | }, 9 | "aliases": { 10 | "components": "$lib/components", 11 | "utils": "$lib/utils" 12 | }, 13 | "typescript": false 14 | } 15 | -------------------------------------------------------------------------------- /duo-ui/eslint.config.js: -------------------------------------------------------------------------------- 1 | import js from '@eslint/js'; 2 | import svelte from 'eslint-plugin-svelte'; 3 | import prettier from 'eslint-config-prettier'; 4 | import globals from 'globals'; 5 | 6 | /** @type {import('eslint').Linter.Config[]} */ 7 | export default [ 8 | js.configs.recommended, 9 | ...svelte.configs['flat/recommended'], 10 | prettier, 11 | ...svelte.configs['flat/prettier'], 12 | { 13 | languageOptions: { 14 | globals: { 15 | ...globals.browser, 16 | ...globals.node, 17 | }, 18 | }, 19 | }, 20 | { 21 | ignores: ['build/', '.svelte-kit/', 'dist/'], 22 | }, 23 | ]; 24 | -------------------------------------------------------------------------------- /duo-ui/jsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./.svelte-kit/tsconfig.json", 3 | "compilerOptions": { 4 | "allowJs": true, 5 | "checkJs": true, 6 | "esModuleInterop": true, 7 | "forceConsistentCasingInFileNames": true, 8 | "resolveJsonModule": true, 9 | "skipLibCheck": true, 10 | "sourceMap": true, 11 | "strict": true, 12 | "moduleResolution": "bundler" 13 | } 14 | // Path aliases are handled by https://kit.svelte.dev/docs/configuration#alias 15 | // except $lib which is handled by https://kit.svelte.dev/docs/configuration#files 16 | // 17 | // If you want to overwrite includes/excludes, make sure to copy over the relevant includes/excludes 18 | // from the referenced tsconfig.json - TypeScript does not merge them in 19 | } 20 | -------------------------------------------------------------------------------- /duo-ui/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "duo-ui", 3 | "version": "0.0.1", 4 | "private": true, 5 | "scripts": { 6 | "dev": "vite dev", 7 | "build": "vite build", 8 | "preview": "vite preview", 9 | "check": "svelte-kit sync && svelte-check --tsconfig ./jsconfig.json", 10 | "check:watch": "svelte-kit sync && svelte-check --tsconfig ./jsconfig.json --watch", 11 | "lint": "prettier --check . && eslint .", 12 | "format": "prettier --write ." 13 | }, 14 | "devDependencies": { 15 | "@sveltejs/adapter-static": "^3.0.4", 16 | "@sveltejs/kit": "^2.0.0", 17 | "@sveltejs/vite-plugin-svelte": "^3.0.0", 18 | "@types/eslint": "^9.6.0", 19 | "autoprefixer": "^10.4.20", 20 | "eslint": "^9.0.0", 21 | "eslint-config-prettier": "^9.1.0", 22 | "eslint-plugin-svelte": "^2.36.0", 23 | "globals": "^15.0.0", 24 | "prettier": "^3.1.1", 25 | "prettier-plugin-svelte": "^3.1.2", 26 | "prettier-plugin-tailwindcss": "^0.6.5", 27 | "svelte": "^4.2.18", 28 | "svelte-check": "^3.6.0", 29 | "tailwindcss": "^3.4.9", 30 | "typescript": "^5.0.0", 31 | "vite": "^5.0.3" 32 | }, 33 | "type": "module", 34 | "dependencies": { 35 | "@svelte-plugins/datepicker": "^1.0.9", 36 | "bits-ui": "^0.21.13", 37 | "clsx": "^2.1.1", 38 | "dayjs": "^1.11.12", 39 | "ky": "^1.6.0", 40 | "lucide-svelte": "^0.427.0", 41 | "paneforge": "^0.0.5", 42 | "svelecte": "^4.2.5", 43 | "svelte-infinite-loading": "^1.4.0", 44 | "tailwind-merge": "^2.5.2", 45 | "tailwind-variants": "^0.2.1" 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /duo-ui/postcss.config.js: -------------------------------------------------------------------------------- 1 | export default { 2 | plugins: { 3 | tailwindcss: {}, 4 | autoprefixer: {}, 5 | }, 6 | }; 7 | -------------------------------------------------------------------------------- /duo-ui/src/app.css: -------------------------------------------------------------------------------- 1 | @tailwind base; 2 | @tailwind components; 3 | @tailwind utilities; 4 | 5 | @layer base { 6 | :root { 7 | --background: 0 0% 100%; 8 | --foreground: 222.2 84% 4.9%; 9 | 10 | --muted: 210 40% 96.1%; 11 | --muted-foreground: 215.4 16.3% 46.9%; 12 | 13 | --popover: 0 0% 100%; 14 | --popover-foreground: 222.2 84% 4.9%; 15 | 16 | --card: 0 0% 100%; 17 | --card-foreground: 222.2 84% 4.9%; 18 | 19 | --border: 214.3 31.8% 91.4%; 20 | --input: 214.3 31.8% 91.4%; 21 | 22 | --primary: 222.2 47.4% 11.2%; 23 | --primary-foreground: 210 40% 98%; 24 | 25 | --secondary: 210 40% 96.1%; 26 | --secondary-foreground: 222.2 47.4% 11.2%; 27 | 28 | --accent: 210 40% 96.1%; 29 | --accent-foreground: 222.2 47.4% 11.2%; 30 | 31 | --destructive: 0 72.2% 50.6%; 32 | --destructive-foreground: 210 40% 98%; 33 | 34 | --ring: 222.2 84% 4.9%; 35 | 36 | --radius: 0.5rem; 37 | } 38 | 39 | .dark { 40 | --background: 222.2 84% 4.9%; 41 | --foreground: 210 40% 98%; 42 | 43 | --muted: 217.2 32.6% 17.5%; 44 | --muted-foreground: 215 20.2% 65.1%; 45 | 46 | --popover: 222.2 84% 4.9%; 47 | --popover-foreground: 210 40% 98%; 48 | 49 | --card: 222.2 84% 4.9%; 50 | --card-foreground: 210 40% 98%; 51 | 52 | --border: 217.2 32.6% 17.5%; 53 | --input: 217.2 32.6% 17.5%; 54 | 55 | --primary: 210 40% 98%; 56 | --primary-foreground: 222.2 47.4% 11.2%; 57 | 58 | --secondary: 217.2 32.6% 17.5%; 59 | --secondary-foreground: 210 40% 98%; 60 | 61 | --accent: 217.2 32.6% 17.5%; 62 | --accent-foreground: 210 40% 98%; 63 | 64 | --destructive: 0 62.8% 30.6%; 65 | --destructive-foreground: 210 40% 98%; 66 | 67 | --ring: hsl(212.7, 26.8%, 83.9); 68 | } 69 | } 70 | 71 | @layer base { 72 | * { 73 | @apply border-border; 74 | } 75 | body { 76 | @apply bg-background text-foreground; 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /duo-ui/src/app.d.ts: -------------------------------------------------------------------------------- 1 | // See https://kit.svelte.dev/docs/types#app 2 | // for information about these interfaces 3 | declare global { 4 | namespace App { 5 | // interface Error {} 6 | // interface Locals {} 7 | // interface PageData {} 8 | // interface PageState {} 9 | // interface Platform {} 10 | } 11 | } 12 | 13 | export {}; 14 | -------------------------------------------------------------------------------- /duo-ui/src/app.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | %sveltekit.head% 8 | 9 | 10 |
%sveltekit.body%
11 | 12 | 13 | -------------------------------------------------------------------------------- /duo-ui/src/lib/api.js: -------------------------------------------------------------------------------- 1 | import ky from 'ky'; 2 | 3 | const API_URL = process.env.NODE_ENV === 'production' ? '/' : 'http://localhost:3000'; 4 | 5 | export const client = ky.extend({ 6 | prefixUrl: API_URL, 7 | }); 8 | 9 | export const api = { 10 | async getServices() { 11 | /** @type {{data: []}} */ 12 | let response = await client.get('api/services').json(); 13 | return response.data.sort(); 14 | }, 15 | /** 16 | * @returns {Promise<{fields: any}>} 17 | */ 18 | async getSchema() { 19 | return await client.get('api/logs/schema').json(); 20 | }, 21 | /** 22 | * @param {URLSearchParams} searchParams 23 | * @returns {Promise} 24 | */ 25 | async searchLogs(searchParams) { 26 | let response = await client.get('api/logs', { 27 | searchParams, 28 | }); 29 | if (response.ok) { 30 | return response.json(); 31 | } else { 32 | throw new Error(response.statusText); 33 | } 34 | }, 35 | /** 36 | * @param {string} field 37 | * @param {URLSearchParams} searchParams 38 | * @returns {Promise<{count:number, value: string}[]>} 39 | */ 40 | async getFieldStats(field, searchParams) { 41 | let response = await client.get(`api/logs/stats/${field}`, { searchParams }); 42 | if (response.ok) { 43 | return response.json(); 44 | } else { 45 | return []; 46 | } 47 | }, 48 | }; 49 | -------------------------------------------------------------------------------- /duo-ui/src/lib/components/Datatype.svelte: -------------------------------------------------------------------------------- 1 | 53 | 54 | {#if typeName} 55 |
56 | {typeName} 57 |
58 | {/if} 59 | -------------------------------------------------------------------------------- /duo-ui/src/lib/components/LogItem.svelte: -------------------------------------------------------------------------------- 1 | 91 | 92 | 121 | 122 | {#if expand} 123 |
124 | {#each Object.entries(allFields()) as [key, value]} 125 |
126 | {key} 127 |
{value || ''}
128 |
129 | 130 | {/each} 131 |
132 | {/if} 133 | -------------------------------------------------------------------------------- /duo-ui/src/lib/components/ui/badge/badge.svelte: -------------------------------------------------------------------------------- 1 | 9 | 10 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /duo-ui/src/lib/components/ui/badge/index.js: -------------------------------------------------------------------------------- 1 | import { tv } from 'tailwind-variants'; 2 | export { default as Badge } from './badge.svelte'; 3 | export const badgeVariants = tv({ 4 | base: 'focus:ring-ring inline-flex select-none items-center rounded-full border px-2.5 py-0.5 text-xs font-semibold transition-colors focus:outline-none focus:ring-2 focus:ring-offset-2', 5 | variants: { 6 | variant: { 7 | default: 'bg-primary text-primary-foreground hover:bg-primary/80 border-transparent', 8 | secondary: 'bg-secondary text-secondary-foreground hover:bg-secondary/80 border-transparent', 9 | destructive: 10 | 'bg-destructive text-destructive-foreground hover:bg-destructive/80 border-transparent', 11 | outline: 'text-foreground', 12 | }, 13 | }, 14 | defaultVariants: { 15 | variant: 'default', 16 | }, 17 | }); 18 | -------------------------------------------------------------------------------- /duo-ui/src/lib/components/ui/button/button.svelte: -------------------------------------------------------------------------------- 1 | 11 | 12 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /duo-ui/src/lib/components/ui/button/index.js: -------------------------------------------------------------------------------- 1 | import { tv } from 'tailwind-variants'; 2 | import Root from './button.svelte'; 3 | const buttonVariants = tv({ 4 | base: 'ring-offset-background focus-visible:ring-ring inline-flex items-center justify-center whitespace-nowrap rounded-md text-sm font-medium transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50', 5 | variants: { 6 | variant: { 7 | default: 'bg-primary text-primary-foreground hover:bg-primary/90', 8 | destructive: 'bg-destructive text-destructive-foreground hover:bg-destructive/90', 9 | outline: 'border-input bg-background hover:bg-accent hover:text-accent-foreground border', 10 | secondary: 'bg-secondary text-secondary-foreground hover:bg-secondary/80', 11 | ghost: 'hover:bg-accent hover:text-accent-foreground', 12 | link: 'text-primary underline-offset-4 hover:underline', 13 | }, 14 | size: { 15 | default: 'h-10 px-4 py-2', 16 | sm: 'h-9 rounded-md px-3', 17 | lg: 'h-11 rounded-md px-8', 18 | icon: 'h-10 w-10', 19 | }, 20 | }, 21 | defaultVariants: { 22 | variant: 'default', 23 | size: 'default', 24 | }, 25 | }); 26 | export { 27 | Root, 28 | // 29 | Root as Button, 30 | buttonVariants, 31 | }; 32 | -------------------------------------------------------------------------------- /duo-ui/src/lib/components/ui/collapsible/collapsible-content.svelte: -------------------------------------------------------------------------------- 1 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /duo-ui/src/lib/components/ui/collapsible/index.js: -------------------------------------------------------------------------------- 1 | import { Collapsible as CollapsiblePrimitive } from 'bits-ui'; 2 | import Content from './collapsible-content.svelte'; 3 | const Root = CollapsiblePrimitive.Root; 4 | const Trigger = CollapsiblePrimitive.Trigger; 5 | export { 6 | Root, 7 | Content, 8 | Trigger, 9 | // 10 | Root as Collapsible, 11 | Content as CollapsibleContent, 12 | Trigger as CollapsibleTrigger, 13 | }; 14 | -------------------------------------------------------------------------------- /duo-ui/src/lib/components/ui/input/index.js: -------------------------------------------------------------------------------- 1 | import Root from './input.svelte'; 2 | export { 3 | Root, 4 | // 5 | Root as Input, 6 | }; 7 | -------------------------------------------------------------------------------- /duo-ui/src/lib/components/ui/input/input.svelte: -------------------------------------------------------------------------------- 1 | 8 | 9 | 34 | -------------------------------------------------------------------------------- /duo-ui/src/lib/components/ui/progress/index.js: -------------------------------------------------------------------------------- 1 | import Root from './progress.svelte'; 2 | export { 3 | Root, 4 | // 5 | Root as Progress, 6 | }; 7 | -------------------------------------------------------------------------------- /duo-ui/src/lib/components/ui/progress/progress.svelte: -------------------------------------------------------------------------------- 1 | 9 | 10 | 14 |
18 |
19 | -------------------------------------------------------------------------------- /duo-ui/src/lib/components/ui/resizable/index.js: -------------------------------------------------------------------------------- 1 | import { Pane } from 'paneforge'; 2 | import Handle from './resizable-handle.svelte'; 3 | import PaneGroup from './resizable-pane-group.svelte'; 4 | export { 5 | PaneGroup, 6 | Pane, 7 | Handle, 8 | // 9 | PaneGroup as ResizablePaneGroup, 10 | Pane as ResizablePane, 11 | Handle as ResizableHandle, 12 | }; 13 | -------------------------------------------------------------------------------- /duo-ui/src/lib/components/ui/resizable/resizable-handle.svelte: -------------------------------------------------------------------------------- 1 | 10 | 11 | div]:rotate-90', 15 | className, 16 | )} 17 | > 18 | {#if withHandle} 19 |
20 | 21 |
22 | {/if} 23 |
24 | -------------------------------------------------------------------------------- /duo-ui/src/lib/components/ui/resizable/resizable-pane-group.svelte: -------------------------------------------------------------------------------- 1 | 10 | 11 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /duo-ui/src/lib/components/ui/scroll-area/index.js: -------------------------------------------------------------------------------- 1 | import Scrollbar from './scroll-area-scrollbar.svelte'; 2 | import Root from './scroll-area.svelte'; 3 | export { 4 | Root, 5 | Scrollbar, 6 | //, 7 | Root as ScrollArea, 8 | Scrollbar as ScrollAreaScrollbar, 9 | }; 10 | -------------------------------------------------------------------------------- /duo-ui/src/lib/components/ui/scroll-area/scroll-area-scrollbar.svelte: -------------------------------------------------------------------------------- 1 | 8 | 9 | 18 | 19 | 22 | 23 | -------------------------------------------------------------------------------- /duo-ui/src/lib/components/ui/scroll-area/scroll-area.svelte: -------------------------------------------------------------------------------- 1 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | {#if orientation === 'vertical' || orientation === 'both'} 19 | 20 | {/if} 21 | {#if orientation === 'horizontal' || orientation === 'both'} 22 | 23 | {/if} 24 | 25 | 26 | -------------------------------------------------------------------------------- /duo-ui/src/lib/components/ui/separator/index.js: -------------------------------------------------------------------------------- 1 | import Root from './separator.svelte'; 2 | export { 3 | Root, 4 | // 5 | Root as Separator, 6 | }; 7 | -------------------------------------------------------------------------------- /duo-ui/src/lib/components/ui/separator/separator.svelte: -------------------------------------------------------------------------------- 1 | 9 | 10 | 20 | -------------------------------------------------------------------------------- /duo-ui/src/lib/index.js: -------------------------------------------------------------------------------- 1 | // place files you want to import through the `$lib` alias in this folder. 2 | -------------------------------------------------------------------------------- /duo-ui/src/lib/utils.js: -------------------------------------------------------------------------------- 1 | import { clsx } from 'clsx'; 2 | import { twMerge } from 'tailwind-merge'; 3 | import { cubicOut } from 'svelte/easing'; 4 | 5 | export function cn(...inputs) { 6 | return twMerge(clsx(inputs)); 7 | } 8 | 9 | export const flyAndScale = (node, params = { y: -8, x: 0, start: 0.95, duration: 150 }) => { 10 | const style = getComputedStyle(node); 11 | const transform = style.transform === 'none' ? '' : style.transform; 12 | 13 | const scaleConversion = (valueA, scaleA, scaleB) => { 14 | const [minA, maxA] = scaleA; 15 | const [minB, maxB] = scaleB; 16 | 17 | const percentage = (valueA - minA) / (maxA - minA); 18 | const valueB = percentage * (maxB - minB) + minB; 19 | 20 | return valueB; 21 | }; 22 | 23 | const styleToString = (style) => { 24 | return Object.keys(style).reduce((str, key) => { 25 | if (style[key] === undefined) return str; 26 | return str + `${key}:${style[key]};`; 27 | }, ''); 28 | }; 29 | 30 | return { 31 | duration: params.duration ?? 200, 32 | delay: 0, 33 | css: (t) => { 34 | const y = scaleConversion(t, [0, 1], [params.y ?? 5, 0]); 35 | const x = scaleConversion(t, [0, 1], [params.x ?? 0, 0]); 36 | const scale = scaleConversion(t, [0, 1], [params.start ?? 0.95, 1]); 37 | 38 | return styleToString({ 39 | transform: `${transform} translate3d(${x}px, ${y}px, 0) scale(${scale})`, 40 | opacity: t, 41 | }); 42 | }, 43 | easing: cubicOut, 44 | }; 45 | }; 46 | -------------------------------------------------------------------------------- /duo-ui/src/routes/+layout.js: -------------------------------------------------------------------------------- 1 | export const ssr = false; 2 | -------------------------------------------------------------------------------- /duo-ui/src/routes/+layout.svelte: -------------------------------------------------------------------------------- 1 | 14 | 15 |
16 |

DUO

17 |
    18 | {#each menus as menu} 19 |
  • 20 | {menu.name} 21 |
  • 22 | {/each} 23 |
24 |
25 |
26 | 27 |
28 | -------------------------------------------------------------------------------- /duo-ui/src/routes/+page.js: -------------------------------------------------------------------------------- 1 | import { api } from '$lib/api'; 2 | 3 | export const ssr = false; 4 | 5 | /** 6 | * @type {import('./$types').PageLoad} 7 | * @return {Promise<{services: Array<{}>, schema: {fields: any}}>} 8 | */ 9 | export async function load() { 10 | return { 11 | services: await api.getServices(), 12 | schema: await api.getSchema(), 13 | }; 14 | } 15 | -------------------------------------------------------------------------------- /duo-ui/src/routes/+page.svelte: -------------------------------------------------------------------------------- 1 | 164 | 165 |
166 |
167 | Service: 168 | 176 | 182 |
183 | 196 | 207 | 208 |
209 | 210 |
211 | 212 | 213 | 214 |

Fields

215 | {#each filterableFields() as field} 216 | 217 | 218 |
219 | 220 | {field.name} 221 | 222 | 223 |
224 |
225 | 226 | {#await getFieldStats(field.name)} 227 |

loading...

228 | {:then stats} 229 | {#each stats.items as { value, count }} 230 |
231 | {value} 232 | {count} 233 |
234 | 235 | {/each} 236 | {:catch error} 237 |

Error: {error.message}

238 | {/await} 239 |
240 |
241 | {/each} 242 |
243 | 244 | 245 |
248 |
Timestamp
249 |
Level
250 |
Message
251 |
Fields
252 |
Process
253 |
254 | {#if logs.length > 0} 255 | 256 | {#each logs as log} 257 | 258 | {/each} 259 | 260 | 261 | {:else} 262 |

No logs found.

263 | {/if} 264 |
265 |
266 |
269 | 270 | Loaded: {logs.length} 271 | 272 | 273 | Per page: 274 | 279 | 280 |
281 |
282 | -------------------------------------------------------------------------------- /duo-ui/src/routes/trace/+page.svelte: -------------------------------------------------------------------------------- 1 | 4 | 5 | 11 | 12 | 18 | -------------------------------------------------------------------------------- /duo-ui/svelte.config.js: -------------------------------------------------------------------------------- 1 | import adapter from '@sveltejs/adapter-static'; 2 | 3 | export default { 4 | kit: { 5 | adapter: adapter({ 6 | fallback: 'index.html', // may differ from host to host 7 | }), 8 | }, 9 | }; 10 | -------------------------------------------------------------------------------- /duo-ui/tailwind.config.js: -------------------------------------------------------------------------------- 1 | import { fontFamily } from 'tailwindcss/defaultTheme'; 2 | 3 | /** @type {import('tailwindcss').Config} */ 4 | const config = { 5 | darkMode: ['class'], 6 | content: ['./src/**/*.{html,js,svelte,ts}'], 7 | safelist: ['dark'], 8 | theme: { 9 | container: { 10 | center: true, 11 | padding: '2rem', 12 | screens: { 13 | '2xl': '1400px', 14 | }, 15 | }, 16 | extend: { 17 | colors: { 18 | border: 'hsl(var(--border) / )', 19 | input: 'hsl(var(--input) / )', 20 | ring: 'hsl(var(--ring) / )', 21 | background: 'hsl(var(--background) / )', 22 | foreground: 'hsl(var(--foreground) / )', 23 | primary: { 24 | DEFAULT: 'hsl(var(--primary) / )', 25 | foreground: 'hsl(var(--primary-foreground) / )', 26 | }, 27 | secondary: { 28 | DEFAULT: 'hsl(var(--secondary) / )', 29 | foreground: 'hsl(var(--secondary-foreground) / )', 30 | }, 31 | destructive: { 32 | DEFAULT: 'hsl(var(--destructive) / )', 33 | foreground: 'hsl(var(--destructive-foreground) / )', 34 | }, 35 | muted: { 36 | DEFAULT: 'hsl(var(--muted) / )', 37 | foreground: 'hsl(var(--muted-foreground) / )', 38 | }, 39 | accent: { 40 | DEFAULT: 'hsl(var(--accent) / )', 41 | foreground: 'hsl(var(--accent-foreground) / )', 42 | }, 43 | popover: { 44 | DEFAULT: 'hsl(var(--popover) / )', 45 | foreground: 'hsl(var(--popover-foreground) / )', 46 | }, 47 | card: { 48 | DEFAULT: 'hsl(var(--card) / )', 49 | foreground: 'hsl(var(--card-foreground) / )', 50 | }, 51 | }, 52 | borderRadius: { 53 | lg: 'var(--radius)', 54 | md: 'calc(var(--radius) - 2px)', 55 | sm: 'calc(var(--radius) - 4px)', 56 | }, 57 | fontFamily: { 58 | sans: [...fontFamily.sans], 59 | }, 60 | }, 61 | }, 62 | }; 63 | 64 | export default config; 65 | -------------------------------------------------------------------------------- /duo-ui/vite.config.js: -------------------------------------------------------------------------------- 1 | import { sveltekit } from '@sveltejs/kit/vite'; 2 | import { defineConfig } from 'vite'; 3 | 4 | export default defineConfig({ 5 | plugins: [sveltekit()], 6 | }); 7 | -------------------------------------------------------------------------------- /duo.toml: -------------------------------------------------------------------------------- 1 | data_dir = "./data" 2 | 3 | [storage.local] 4 | 5 | # [storage.s3] 6 | # bucket = "my-bucket" 7 | # region = "us-east-1" 8 | -------------------------------------------------------------------------------- /duo/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "duo" 3 | version = "0.1.0" 4 | rust-version = "1.80.0" 5 | description = "Observability duet: Logging and Tracing" 6 | edition.workspace = true 7 | homepage.workspace = true 8 | license.workspace = true 9 | 10 | [dependencies] 11 | anyhow.workspace = true 12 | datafusion = "42" 13 | arrow-schema = { version = "53.0", features = ["serde"] } 14 | axum = "0.7" 15 | rand.workspace = true 16 | clap = { version = "4", default-features = false, features = ["std", "derive"] } 17 | duo-api.workspace = true 18 | duo-subscriber.workspace = true 19 | parking_lot = { version = "0.12", features = ["send_guard"] } 20 | serde.workspace = true 21 | serde_json.workspace = true 22 | time = { version = "0.3", features = ["parsing"] } 23 | tokio = { version = "1.39", features = ["rt-multi-thread", "fs"] } 24 | tonic.workspace = true 25 | tower = "0.4" 26 | tracing.workspace = true 27 | tracing-subscriber.workspace = true 28 | rust-embed = "8.5" 29 | mime_guess = "2" 30 | object_store = { version = "0.11", features = ["aws"] } 31 | url = "2.5.2" 32 | toml = "0.8.19" 33 | tower-http = { version = "0.5.2", features = ["cors"] } 34 | 35 | [dev-dependencies] 36 | rstest = "0.22" 37 | -------------------------------------------------------------------------------- /duo/src/aggregator.rs: -------------------------------------------------------------------------------- 1 | use std::mem; 2 | 3 | use duo_api as proto; 4 | 5 | use crate::Span; 6 | 7 | #[derive(Debug, Default)] 8 | pub struct SpanAggregator { 9 | spans: Vec, 10 | } 11 | 12 | impl SpanAggregator { 13 | pub fn new() -> Self { 14 | SpanAggregator { spans: Vec::new() } 15 | } 16 | 17 | pub fn record_span(&mut self, raw: proto::Span) { 18 | if let Some(span) = self.spans.iter_mut().find(|s| s.id == raw.id) { 19 | if raw.parent_id.is_some() { 20 | span.parent_id = raw.parent_id; 21 | } 22 | 23 | if !raw.tags.is_empty() { 24 | span.tags.extend(raw.tags); 25 | } 26 | span.end = raw.end; 27 | } else { 28 | self.spans.push(raw); 29 | } 30 | } 31 | 32 | pub fn aggregate(&mut self) -> Vec { 33 | // Remove all intact spans. 34 | let (intact_spans, ongoing_spans): (Vec<_>, Vec<_>) = mem::take(&mut self.spans) 35 | .into_iter() 36 | .partition(|span| span.end.is_some()); 37 | self.spans = ongoing_spans; 38 | intact_spans.into_iter().map(Span::from).collect() 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /duo/src/arrow.rs: -------------------------------------------------------------------------------- 1 | use datafusion::arrow::json::{ 2 | reader::infer_json_schema_from_iterator, ArrayWriter, ReaderBuilder, 3 | }; 4 | use serde::de::DeserializeOwned; 5 | use serde_json::{Map, Value as JsonValue}; 6 | use std::sync::Arc; 7 | 8 | use crate::{schema, Log, Span}; 9 | use anyhow::Result; 10 | use arrow_schema::Schema; 11 | use datafusion::arrow::array::{Int64Array, RecordBatch, StringArray, UInt64Array}; 12 | 13 | pub fn convert_span_to_record_batch(spans: Vec) -> Result { 14 | let mut span_ids = Vec::::new(); 15 | let mut parent_ids = Vec::>::new(); 16 | let mut trace_ids = Vec::::new(); 17 | let mut names = Vec::::new(); 18 | let mut process_ids = Vec::::new(); 19 | let mut start_times = Vec::::new(); 20 | let mut end_times = Vec::>::new(); 21 | let mut tags_list = Vec::::new(); 22 | 23 | for span in spans { 24 | let start_time = span.start_as_micros(); 25 | let end_time = span.end_as_micros(); 26 | span_ids.push(span.id); 27 | parent_ids.push(span.parent_id); 28 | trace_ids.push(span.trace_id); 29 | names.push(span.name); 30 | process_ids.push(span.process_id); 31 | start_times.push(start_time); 32 | end_times.push(end_time); 33 | tags_list.push(serde_json::to_string(&span.tags).unwrap()); 34 | } 35 | 36 | if span_ids.is_empty() { 37 | return Ok(RecordBatch::new_empty(schema::get_span_schema())); 38 | } 39 | 40 | Ok(RecordBatch::try_new( 41 | schema::get_span_schema(), 42 | vec![ 43 | Arc::new(UInt64Array::from(span_ids)), 44 | Arc::new(UInt64Array::from(parent_ids)), 45 | Arc::new(UInt64Array::from(trace_ids)), 46 | Arc::new(StringArray::from(names)), 47 | Arc::new(StringArray::from(process_ids)), 48 | Arc::new(Int64Array::from(start_times)), 49 | Arc::new(Int64Array::from(end_times)), 50 | Arc::new(StringArray::from(tags_list)), 51 | ], 52 | )?) 53 | } 54 | 55 | pub fn convert_log_to_record_batch(logs: Vec) -> Result { 56 | let mut data = vec![]; 57 | let mut fields = vec![]; 58 | for log in logs { 59 | let mut map = Map::new(); 60 | let time = log.as_micros(); 61 | map.insert("process_id".into(), log.process_id.into()); 62 | map.insert("span_id".into(), log.span_id.into()); 63 | map.insert("trace_id".into(), log.trace_id.into()); 64 | map.insert("level".into(), log.level.as_str().into()); 65 | map.insert("target".into(), log.target.into()); 66 | map.insert("file".into(), log.file.into()); 67 | map.insert("line".into(), log.line.into()); 68 | map.insert("time".into(), time.into()); 69 | map.insert("message".into(), log.message.into()); 70 | let mut field_map = Map::new(); 71 | for (key, value) in log.fields { 72 | field_map.insert(key, value); 73 | } 74 | 75 | if !field_map.is_empty() { 76 | fields.push(JsonValue::Object(field_map.clone())); 77 | map.extend(field_map); 78 | } 79 | data.push(JsonValue::Object(map)); 80 | } 81 | 82 | let inferred_field_schema = infer_json_schema_from_iterator(fields.iter().map(Ok))?; 83 | let schema = Schema::try_merge(vec![ 84 | (*schema::get_log_schema()).clone(), 85 | inferred_field_schema, 86 | ]) 87 | .unwrap(); 88 | let mut decoder = ReaderBuilder::new(Arc::new(schema)).build_decoder()?; 89 | decoder.serialize(&data)?; 90 | let batch = decoder.flush()?.expect("Empty record batch"); 91 | Ok(batch) 92 | } 93 | 94 | pub fn serialize_record_batches(batch: &[RecordBatch]) -> Result> { 95 | if batch.is_empty() { 96 | return Ok(vec![]); 97 | } 98 | 99 | let buf = Vec::new(); 100 | let mut writer = ArrayWriter::new(buf); 101 | writer.write_batches(&batch.iter().collect::>())?; 102 | writer.finish()?; 103 | let json_values = writer.into_inner(); 104 | if json_values.is_empty() { 105 | return Ok(vec![]); 106 | } 107 | let json_rows: Vec<_> = serde_json::from_reader(json_values.as_slice()).unwrap(); 108 | Ok(json_rows) 109 | } 110 | -------------------------------------------------------------------------------- /duo/src/config.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | env, fs, 3 | path::Path, 4 | sync::{Arc, OnceLock}, 5 | }; 6 | 7 | use anyhow::{Context, Result}; 8 | use object_store::{aws::AmazonS3Builder, local::LocalFileSystem, ObjectStore}; 9 | use serde::Deserialize; 10 | use url::Url; 11 | 12 | static DUO_CONFIG: OnceLock> = OnceLock::new(); 13 | 14 | #[derive(Debug, Deserialize)] 15 | pub struct DuoConfig { 16 | pub data_dir: String, 17 | storage: StorageConfig, 18 | } 19 | 20 | impl Default for DuoConfig { 21 | fn default() -> Self { 22 | Self { 23 | data_dir: "data".to_string(), 24 | storage: Default::default(), 25 | } 26 | } 27 | } 28 | 29 | #[derive(Debug, Deserialize)] 30 | #[serde(rename_all = "snake_case")] 31 | enum StorageConfig { 32 | Local { 33 | dir: Option, 34 | }, 35 | S3 { 36 | bucket: String, 37 | region: String, 38 | aws_access_key: Option, 39 | aws_access_secret: Option, 40 | }, 41 | } 42 | 43 | impl Default for StorageConfig { 44 | fn default() -> Self { 45 | Self::Local { dir: None } 46 | } 47 | } 48 | 49 | pub fn load() -> Arc { 50 | Arc::clone(DUO_CONFIG.get().expect("DuoConfig not initialized")) 51 | } 52 | 53 | pub fn set(config: DuoConfig) { 54 | DUO_CONFIG 55 | .set(Arc::new(config)) 56 | .expect("DuoConfig already initialized") 57 | } 58 | 59 | impl DuoConfig { 60 | pub fn parse_from_toml>(source: P) -> Result { 61 | let source = source.as_ref(); 62 | let content = fs::read_to_string(source) 63 | .with_context(|| format!("Read `{}` failed", source.display()))?; 64 | 65 | Ok(toml::from_str::(&content) 66 | .unwrap_or_else(|err| panic!("Parse `{}` failed: {}", source.display(), err))) 67 | } 68 | 69 | pub fn object_store_url(&self) -> Url { 70 | match &self.storage { 71 | StorageConfig::Local { dir } => { 72 | let dir = dir.as_ref().unwrap_or(&self.data_dir); 73 | let path = Path::new(&dir); 74 | if path.is_relative() { 75 | if let Ok(cwd) = env::current_dir() { 76 | let path = cwd.join(path); 77 | if path.is_relative() { 78 | panic!("Invalid path: {}", path.display()); 79 | } 80 | // A trailing slash is significant. Without it, the last path component 81 | // is considered to be a “file” name to be removed to get at the “directory” 82 | // that is used as the base. 83 | // https://docs.rs/url/latest/url/struct.Url.html#method.join 84 | return Url::parse(&format!("file://{}/", path.display())).unwrap(); 85 | } 86 | } 87 | Url::parse(&format!("file://{dir}/")).unwrap() 88 | } 89 | StorageConfig::S3 { bucket, .. } => Url::parse(&format!("s3://{bucket}/")).unwrap(), 90 | } 91 | } 92 | 93 | pub fn object_store(&self) -> Arc { 94 | match &self.storage { 95 | StorageConfig::Local { dir } => { 96 | let dir = dir.as_ref().unwrap_or(&self.data_dir); 97 | let path = Path::new(dir); 98 | if !path.exists() { 99 | std::fs::create_dir_all(path).unwrap(); 100 | } 101 | 102 | Arc::new(LocalFileSystem::new_with_prefix(dir).unwrap()) 103 | } 104 | StorageConfig::S3 { 105 | bucket, 106 | region, 107 | aws_access_key, 108 | aws_access_secret, 109 | } => { 110 | let s3 = AmazonS3Builder::new() 111 | .with_bucket_name(bucket) 112 | .with_region(region) 113 | .with_access_key_id( 114 | env::var("AWS_ACCESS_KEY_ID") 115 | .ok() 116 | .as_ref() 117 | .or(aws_access_key.as_ref()) 118 | .unwrap(), 119 | ) 120 | .with_secret_access_key( 121 | env::var("AWS_SECRET_ACCESS_KEY") 122 | .ok() 123 | .as_ref() 124 | .or(aws_access_secret.as_ref()) 125 | .unwrap(), 126 | ) 127 | .build() 128 | .unwrap(); 129 | Arc::new(s3) 130 | } 131 | } 132 | } 133 | } 134 | -------------------------------------------------------------------------------- /duo/src/grpc/mod.rs: -------------------------------------------------------------------------------- 1 | use std::{net::SocketAddr, sync::Arc}; 2 | 3 | use crate::MemoryStore; 4 | 5 | use self::server::DuoServer; 6 | 7 | use duo_api as proto; 8 | use parking_lot::RwLock; 9 | use proto::instrument::instrument_server::InstrumentServer; 10 | use tonic::transport::Server; 11 | 12 | mod server; 13 | 14 | pub fn spawn_server(memory_store: Arc>, port: u16) { 15 | tokio::spawn(async move { 16 | let addr = SocketAddr::from(([0, 0, 0, 0], port)); 17 | let mut service = DuoServer::new(memory_store); 18 | service.spawn(); 19 | 20 | println!("gRPC server listening on grpc://{}", addr); 21 | Server::builder() 22 | .add_service(InstrumentServer::new(service)) 23 | .serve(addr) 24 | .await 25 | .unwrap(); 26 | }); 27 | } 28 | -------------------------------------------------------------------------------- /duo/src/grpc/server.rs: -------------------------------------------------------------------------------- 1 | use std::{mem, sync::Arc, time::Duration}; 2 | 3 | use crate::{ipc::IpcFile, partition::PartitionWriter, schema, Log, MemoryStore, SpanAggregator}; 4 | use duo_api::instrument::{ 5 | instrument_server::Instrument, RecordEventRequest, RecordEventResponse, RecordSpanRequest, 6 | RecordSpanResponse, RegisterProcessRequest, RegisterProcessResponse, 7 | }; 8 | use parking_lot::RwLock; 9 | use tonic::{Request, Response, Status}; 10 | use tracing::{debug, info}; 11 | 12 | pub struct DuoServer { 13 | memory_store: Arc>, 14 | aggregator: Arc>, 15 | logs: Arc>>, 16 | } 17 | 18 | impl DuoServer { 19 | pub fn new(memory_store: Arc>) -> Self { 20 | Self { 21 | memory_store, 22 | aggregator: Arc::new(RwLock::new(SpanAggregator::new())), 23 | logs: Arc::new(RwLock::new(vec![])), 24 | } 25 | } 26 | 27 | pub fn spawn(&mut self) { 28 | let aggregator = Arc::clone(&self.aggregator); 29 | let memory_store = Arc::clone(&self.memory_store); 30 | let logs = Arc::clone(&self.logs); 31 | tokio::spawn(async move { 32 | let mut interval = tokio::time::interval(Duration::from_secs(1)); 33 | loop { 34 | interval.tick().await; 35 | 36 | let logs = mem::take(&mut *logs.write()); 37 | let spans = aggregator.write().aggregate(); 38 | if logs.is_empty() || spans.is_empty() { 39 | continue; 40 | } 41 | 42 | let mut guard = memory_store.write(); 43 | guard.merge_logs(logs); 44 | guard.merge_spans(spans); 45 | } 46 | }); 47 | 48 | if crate::is_memory_mode() { 49 | // Never persist data in memory mode. 50 | return; 51 | } 52 | 53 | let memory_store = Arc::clone(&self.memory_store); 54 | tokio::spawn(async move { 55 | let mut interval = tokio::time::interval(Duration::from_secs(10)); 56 | interval.tick().await; 57 | loop { 58 | interval.tick().await; 59 | 60 | println!( 61 | "ipc writing: is locked {}, is_locked_exclusive {}", 62 | memory_store.is_locked(), 63 | memory_store.is_locked_exclusive() 64 | ); 65 | let guard = memory_store.read(); 66 | if !guard.is_dirty { 67 | continue; 68 | } 69 | 70 | let ipc_file = IpcFile::new(); 71 | if !guard.span_batches.is_empty() { 72 | ipc_file.write_span_ipc(&guard.span_batches).unwrap(); 73 | } 74 | 75 | if !guard.log_batches.is_empty() { 76 | ipc_file 77 | .write_log_ipc(&guard.log_batches, &guard.log_schema) 78 | .unwrap(); 79 | } 80 | drop(guard); 81 | 82 | memory_store.write().is_dirty = false; 83 | tokio::spawn(async { 84 | schema::persit_log_schema().await; 85 | }); 86 | } 87 | }); 88 | 89 | let memory_store = Arc::clone(&self.memory_store); 90 | tokio::spawn(async move { 91 | // TODO: replace interval with job scheduler 92 | let mut interval = tokio::time::interval(Duration::from_secs(60)); 93 | interval.tick().await; 94 | loop { 95 | interval.tick().await; 96 | 97 | let pw = PartitionWriter::with_minute(); 98 | println!( 99 | "write partition: is locked {}, is_locked_exclusive {}", 100 | memory_store.is_locked(), 101 | memory_store.is_locked_exclusive() 102 | ); 103 | 104 | // clear the previous log schema 105 | let (span_batches, log_batches) = { memory_store.write().reset() }; 106 | 107 | if !span_batches.is_empty() { 108 | pw.write_partition("span", &span_batches).await.unwrap(); 109 | println!("write partition done: span"); 110 | } 111 | 112 | if !log_batches.is_empty() { 113 | pw.write_partition("log", &log_batches).await.unwrap(); 114 | println!("write partition done: log"); 115 | } 116 | 117 | let ipc_file = IpcFile::new(); 118 | ipc_file.clear().unwrap(); 119 | } 120 | }); 121 | } 122 | } 123 | 124 | #[tonic::async_trait] 125 | impl Instrument for DuoServer { 126 | async fn register_process( 127 | &self, 128 | request: Request, 129 | ) -> Result, Status> { 130 | let process = request 131 | .into_inner() 132 | .process 133 | .ok_or_else(|| tonic::Status::invalid_argument("missing process"))?; 134 | info!("register process: {}", process.name); 135 | let process_id = self 136 | .memory_store 137 | .write() 138 | .register_process(process) 139 | .expect("Register process failed."); 140 | Ok(Response::new(RegisterProcessResponse { process_id })) 141 | } 142 | 143 | async fn record_span( 144 | &self, 145 | request: Request, 146 | ) -> Result, Status> { 147 | let span = request 148 | .into_inner() 149 | .span 150 | .ok_or_else(|| tonic::Status::invalid_argument("missing span"))?; 151 | debug!(target: "duo_internal", "record span: {}", span.name); 152 | self.aggregator.write().record_span(span); 153 | Ok(Response::new(RecordSpanResponse {})) 154 | } 155 | 156 | async fn record_event( 157 | &self, 158 | request: Request, 159 | ) -> Result, Status> { 160 | debug!(target: "duo_internal", "record event, {:?}", request); 161 | 162 | let log = request 163 | .into_inner() 164 | .log 165 | .ok_or_else(|| tonic::Status::invalid_argument("missing event"))?; 166 | self.logs.write().push(log.into()); 167 | Ok(Response::new(RecordEventResponse {})) 168 | } 169 | } 170 | -------------------------------------------------------------------------------- /duo/src/ipc.rs: -------------------------------------------------------------------------------- 1 | use std::fs::{self, File}; 2 | use std::path::{Path, PathBuf}; 3 | 4 | use anyhow::Result; 5 | use arrow_schema::Schema; 6 | use datafusion::arrow::ipc::writer::FileWriter; 7 | use datafusion::arrow::{array::RecordBatch, ipc::reader::FileReader}; 8 | 9 | use crate::{config, schema}; 10 | 11 | pub struct IpcFile { 12 | path: PathBuf, 13 | } 14 | 15 | impl IpcFile { 16 | pub fn new() -> Self { 17 | let config = config::load(); 18 | Self { 19 | path: Path::new(&config.data_dir).join("ipc"), 20 | } 21 | } 22 | 23 | pub fn read_log_ipc(&self) -> Result> { 24 | self.read_ipc("log") 25 | } 26 | 27 | pub fn read_span_ipc(&self) -> Result> { 28 | self.read_ipc("span") 29 | } 30 | 31 | fn read_ipc(&self, name: &'static str) -> Result> { 32 | let ipc_path = self.path.join(format!("{name}.arrow")); 33 | if !ipc_path.exists() { 34 | return Ok(vec![]); 35 | } 36 | let reader = FileReader::try_new(File::open(ipc_path)?, None)?; 37 | Ok(reader.filter_map(Result::ok).collect::>()) 38 | } 39 | 40 | fn write_ipc( 41 | &self, 42 | name: &'static str, 43 | batches: &[RecordBatch], 44 | schema: &Schema, 45 | ) -> Result<()> { 46 | if !self.path.exists() { 47 | fs::create_dir_all(&self.path)?; 48 | } 49 | let ipc_path = self.path.join(format!("{name}.arrow")); 50 | let mut writer = FileWriter::try_new(File::create(ipc_path)?, schema)?; 51 | for batch in batches { 52 | writer.write(batch)?; 53 | } 54 | writer.finish()?; 55 | Ok(()) 56 | } 57 | 58 | pub fn write_log_ipc(&self, batches: &[RecordBatch], schema: &Schema) -> Result<()> { 59 | self.write_ipc("log", batches, schema) 60 | } 61 | 62 | pub fn write_span_ipc(&self, batches: &[RecordBatch]) -> Result<()> { 63 | self.write_ipc("span", batches, &schema::get_span_schema()) 64 | } 65 | 66 | pub fn clear(&self) -> Result<()> { 67 | if self.path.exists() { 68 | fs::remove_dir_all(&self.path)?; 69 | } 70 | Ok(()) 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /duo/src/main.rs: -------------------------------------------------------------------------------- 1 | use std::sync::{ 2 | atomic::{AtomicBool, Ordering}, 3 | Arc, 4 | }; 5 | 6 | use anyhow::Result; 7 | use clap::Parser; 8 | use config::DuoConfig; 9 | use duo_subscriber::DuoLayer; 10 | use parking_lot::RwLock; 11 | use tracing::Level; 12 | use tracing_subscriber::{ 13 | filter::{self, Targets}, 14 | fmt, 15 | layer::SubscriberExt, 16 | util::SubscriberInitExt, 17 | Layer, 18 | }; 19 | 20 | mod aggregator; 21 | mod arrow; 22 | mod config; 23 | mod grpc; 24 | mod ipc; 25 | mod memory; 26 | mod models; 27 | mod partition; 28 | mod query; 29 | mod schema; 30 | mod utils; 31 | mod web; 32 | 33 | pub use aggregator::SpanAggregator; 34 | pub use grpc::spawn_server as spawn_grpc_server; 35 | pub use memory::MemoryStore; 36 | pub use models::{Log, Process, Span, TraceExt}; 37 | pub use web::run_web_server; 38 | 39 | // ASCII Art generated from https://patorjk.com/software/taag/#p=display&h=0&v=0&f=ANSI%20Regular&t=Duo 40 | static DUO_BANNER: &str = r" 41 | 42 | ██████ ██ ██ ██████ 43 | ██ ██ ██ ██ ██ ██ 44 | ██ ██ ██ ██ ██ ██ 45 | ██ ██ ██ ██ ██ ██ 46 | ██████ ██████ ██████ 47 | 48 | "; 49 | static MEMORY_MODE: AtomicBool = AtomicBool::new(false); 50 | 51 | #[derive(Parser, Debug)] 52 | #[command(author, version, about, long_about = None)] 53 | struct Args { 54 | /// The web server listening port. 55 | #[arg(short, default_value_t = 3000)] 56 | web_port: u16, 57 | /// The gRPC server listening port. 58 | #[arg(short, default_value_t = 6000)] 59 | grpc_port: u16, 60 | #[arg(short, long)] 61 | /// Enable the memory mode, which never persist collected data. 62 | /// This mode suit for local development. 63 | memory_mode: bool, 64 | /// Collect log and span of duo itself. 65 | #[arg(short = 's', long)] 66 | collect_self: bool, 67 | /// Configuration file path. 68 | #[arg(short, long)] 69 | config_file: Option, 70 | } 71 | 72 | #[tokio::main] 73 | async fn main() -> Result<()> { 74 | println!("{}", DUO_BANNER); 75 | let Args { 76 | web_port, 77 | grpc_port, 78 | memory_mode, 79 | collect_self, 80 | config_file, 81 | } = Args::parse(); 82 | if memory_mode { 83 | MEMORY_MODE.store(memory_mode, Ordering::Relaxed); 84 | println!("Running Duo in memory mode, all data will be lost after the process exits"); 85 | } 86 | 87 | let config = match config_file { 88 | Some(config_file) => DuoConfig::parse_from_toml(config_file)?, 89 | None => DuoConfig::default(), 90 | }; 91 | config::set(config); 92 | schema::load().await?; 93 | 94 | let memory_store = Arc::new(RwLock::new(MemoryStore::load()?)); 95 | spawn_grpc_server(Arc::clone(&memory_store), grpc_port); 96 | 97 | let duo_layer = if collect_self { 98 | let layer = DuoLayer::new( 99 | "duo", 100 | format!("grpc://127.0.0.1:{}", grpc_port).parse().unwrap(), 101 | ) 102 | .await 103 | .with_filter(filter::filter_fn(|metadata| { 104 | // Ignore "duo_internal" event to avoid recursively report event to duo-server 105 | metadata.target() != "duo_internal" 106 | })); 107 | tracing::debug!("Collect self spans and logs..."); 108 | Some(layer) 109 | } else { 110 | None 111 | }; 112 | tracing_subscriber::registry() 113 | .with(fmt::layer()) 114 | .with(duo_layer) 115 | .with(Targets::new().with_target("duo", Level::DEBUG)) 116 | // .with(Targets::new().with_default(Level::DEBUG)) 117 | .init(); 118 | 119 | run_web_server(memory_store, web_port).await?; 120 | Ok(()) 121 | } 122 | 123 | pub fn is_memory_mode() -> bool { 124 | MEMORY_MODE.load(Ordering::Relaxed) 125 | } 126 | -------------------------------------------------------------------------------- /duo/src/memory.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | use std::{collections::HashMap, fmt::Debug, fs::File, io::Write, mem, path::Path}; 3 | 4 | use crate::arrow::{convert_log_to_record_batch, convert_span_to_record_batch}; 5 | use crate::ipc::IpcFile; 6 | use crate::{config, schema, Log, Process, Span}; 7 | use anyhow::Result; 8 | use arrow_schema::Schema; 9 | use datafusion::arrow::array::RecordBatch; 10 | 11 | use duo_api as proto; 12 | 13 | pub struct MemoryStore { 14 | // Collection of services. 15 | services: HashMap>, 16 | pub log_schema: Arc, 17 | pub span_batches: Vec, 18 | pub log_batches: Vec, 19 | pub is_dirty: bool, 20 | } 21 | 22 | impl Debug for MemoryStore { 23 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 24 | f.debug_struct("MemoryStore") 25 | .field("services", &self.services.len()) 26 | .finish() 27 | } 28 | } 29 | 30 | impl Default for MemoryStore { 31 | fn default() -> Self { 32 | Self::new() 33 | } 34 | } 35 | 36 | impl MemoryStore { 37 | pub fn new() -> Self { 38 | MemoryStore { 39 | services: HashMap::new(), 40 | log_schema: schema::get_log_schema(), 41 | span_batches: vec![], 42 | log_batches: vec![], 43 | is_dirty: false, 44 | } 45 | } 46 | 47 | pub fn load() -> Result { 48 | let config = config::load(); 49 | let path = Path::new(&config.data_dir); 50 | let ipc_file = IpcFile::new(); 51 | let span_batches = ipc_file.read_span_ipc()?; 52 | let log_batches = ipc_file.read_log_ipc()?; 53 | let mut store = Self { 54 | span_batches, 55 | log_batches, 56 | services: HashMap::new(), 57 | log_schema: schema::get_log_schema(), 58 | is_dirty: false, 59 | }; 60 | let path = path.join("process.json"); 61 | if !path.exists() { 62 | return Ok(store); 63 | } 64 | let data: Vec = match serde_json::from_reader(File::open(path)?) { 65 | Ok(data) => data, 66 | Err(err) => { 67 | println!("Warning: read process.json failed: {err}"); 68 | return Ok(store); 69 | } 70 | }; 71 | let mut services = HashMap::>::new(); 72 | data.into_iter().for_each(|process| { 73 | services 74 | .entry(process.service_name.clone()) 75 | .or_insert_with(Vec::new) 76 | .push(process); 77 | }); 78 | 79 | store.services = services; 80 | Ok(store) 81 | } 82 | 83 | pub(super) fn reset(&mut self) -> (Vec, Vec) { 84 | ( 85 | mem::take(&mut self.span_batches), 86 | mem::take(&mut self.log_batches), 87 | ) 88 | } 89 | 90 | pub(super) fn processes(&self) -> HashMap { 91 | self.services 92 | .values() 93 | .flat_map(|processes| { 94 | processes 95 | .iter() 96 | .map(|process| (process.id.clone(), process.clone())) 97 | .collect::>() 98 | }) 99 | .collect() 100 | } 101 | 102 | pub(super) fn service_names(&self) -> Vec { 103 | self.services.keys().cloned().collect() 104 | } 105 | 106 | /// Register new process and return the process id. 107 | pub(crate) fn register_process(&mut self, process: proto::Process) -> Result { 108 | let service_name = process.name; 109 | let service_processes = self.services.entry(service_name.clone()).or_default(); 110 | 111 | // TODO: generate new process id 112 | let process_id = format!("{}-{}", &service_name, service_processes.len()); 113 | service_processes.push(Process { 114 | id: process_id.clone(), 115 | service_name, 116 | tags: process 117 | .tags 118 | .into_iter() 119 | .map(|(key, value)| (key, value.into())) 120 | .collect(), 121 | }); 122 | self.write_process()?; 123 | Ok(process_id) 124 | } 125 | 126 | pub fn merge_logs(&mut self, logs: Vec) { 127 | let batches = convert_log_to_record_batch(logs).unwrap(); 128 | 129 | let schema = batches.schema(); 130 | self.log_schema = schema::merge_log_schema(schema); 131 | self.log_batches.push(batches); 132 | self.is_dirty = true; 133 | } 134 | 135 | pub fn merge_spans(&mut self, spans: Vec) { 136 | self.span_batches 137 | .push(convert_span_to_record_batch(spans).unwrap()); 138 | self.is_dirty = true; 139 | } 140 | 141 | fn write_process(&self) -> Result<()> { 142 | let config = config::load(); 143 | let mut file = File::create(Path::new(&config.data_dir).join("process.json"))?; 144 | file.write_all( 145 | serde_json::to_string(&self.processes().values().collect::>())?.as_bytes(), 146 | )?; 147 | Ok(()) 148 | } 149 | } 150 | -------------------------------------------------------------------------------- /duo/src/models.rs: -------------------------------------------------------------------------------- 1 | use crate::web::deser; 2 | use duo_api as proto; 3 | use serde::{Deserialize, Serialize}; 4 | use serde_json::Value as JsonValue; 5 | use std::{collections::HashMap, time::SystemTime}; 6 | use time::{Duration, OffsetDateTime}; 7 | use tracing::Level; 8 | 9 | #[derive(Debug, Clone, Serialize, Deserialize)] 10 | pub struct Process { 11 | pub id: String, 12 | #[serde(rename = "serviceName")] 13 | pub service_name: String, 14 | pub tags: HashMap, 15 | } 16 | 17 | #[derive(Clone, Deserialize)] 18 | pub struct Span { 19 | pub id: u64, 20 | pub trace_id: u64, 21 | pub parent_id: Option, 22 | pub process_id: String, 23 | pub name: String, 24 | #[serde(deserialize_with = "deser::miscrosecond::deserialize")] 25 | pub start: OffsetDateTime, 26 | #[serde(default, deserialize_with = "deser::option_miscrosecond")] 27 | pub end: Option, 28 | #[serde(default, deserialize_with = "deser::map_list")] 29 | pub tags: HashMap, 30 | #[serde(skip_deserializing)] 31 | pub logs: Vec, 32 | } 33 | 34 | #[derive(Clone, Serialize, Deserialize)] 35 | pub struct Log { 36 | pub process_id: String, 37 | /// The span's id the log belong to. 38 | /// They have no span id if the log emitted out of tracing context. 39 | pub span_id: Option, 40 | pub trace_id: Option, 41 | // TODO: change level to i32 42 | #[serde(with = "deser::level")] 43 | pub level: Level, 44 | pub target: String, 45 | pub file: Option, 46 | pub line: Option, 47 | #[serde(with = "deser::miscrosecond")] 48 | pub time: OffsetDateTime, 49 | pub message: String, 50 | #[serde(flatten)] 51 | pub fields: HashMap, 52 | } 53 | 54 | pub struct TraceExt { 55 | pub trace_id: u64, 56 | pub spans: Vec, 57 | pub processes: HashMap, 58 | } 59 | 60 | impl Span { 61 | pub fn start_as_micros(&self) -> i64 { 62 | (self.start.unix_timestamp_nanos() / 1000) as i64 63 | } 64 | 65 | pub fn end_as_micros(&self) -> Option { 66 | self.end.map(|t| (t.unix_timestamp_nanos() / 1000) as i64) 67 | } 68 | 69 | pub fn duration(&self) -> Duration { 70 | self.end.map(|end| end - self.start).unwrap_or_default() 71 | } 72 | 73 | /// Whether the span is intact. 74 | /// Intact means the span have both time values: start and end. 75 | #[inline] 76 | pub fn is_intact(&self) -> bool { 77 | self.end.is_some() 78 | } 79 | 80 | pub fn correlate_span_logs(&mut self, logs: &[Log]) { 81 | let mut errors = 0; 82 | self.logs = logs 83 | .iter() 84 | .filter(|log| log.span_id == Some(self.id)) 85 | .inspect(|log| errors += (log.level == Level::ERROR) as i32) 86 | .cloned() 87 | .collect(); 88 | 89 | // Auto insert 'error = true' tag, this will help Jaeger UI show error icon. 90 | if errors > 0 { 91 | self.tags 92 | .insert(String::from("error"), serde_json::Value::Bool(true)); 93 | } 94 | } 95 | } 96 | 97 | impl Log { 98 | pub fn as_micros(&self) -> i64 { 99 | (self.time.unix_timestamp_nanos() / 1000) as i64 100 | } 101 | } 102 | 103 | impl From for Span { 104 | fn from(span: proto::Span) -> Self { 105 | let mut raw_tags = span.tags; 106 | for key in ["busy", "idle"] { 107 | if let Some(proto::Value { 108 | inner: Some(proto::ValueEnum::U64Val(value)), 109 | }) = raw_tags.remove(key) 110 | { 111 | raw_tags.insert(key.into(), format_timing_value(value).into()); 112 | } 113 | } 114 | 115 | let tags = raw_tags 116 | .into_iter() 117 | .map(|(key, value)| (key, value.into())) 118 | .collect(); 119 | 120 | Span { 121 | id: span.id, 122 | trace_id: span.trace_id, 123 | parent_id: span.parent_id, 124 | process_id: span.process_id, 125 | name: span.name, 126 | start: span 127 | .start 128 | .and_then(|timestamp| { 129 | SystemTime::try_from(timestamp) 130 | .ok() 131 | .map(OffsetDateTime::from) 132 | }) 133 | .unwrap_or_else(OffsetDateTime::now_utc), 134 | end: span 135 | .end 136 | .and_then(|timestamp| { 137 | SystemTime::try_from(timestamp) 138 | .ok() 139 | .map(OffsetDateTime::from) 140 | }) 141 | .or_else(|| Some(OffsetDateTime::now_utc())), 142 | tags, 143 | logs: Vec::new(), 144 | } 145 | } 146 | } 147 | 148 | impl From for Log { 149 | fn from(mut log: proto::Log) -> Self { 150 | let level = proto::Level::try_from(log.level) 151 | .map(tracing::Level::from) 152 | .unwrap_or(tracing::Level::DEBUG); 153 | 154 | let message = log 155 | .fields 156 | .remove("message") 157 | .map(|v| v.to_string()) 158 | .unwrap_or_default(); 159 | let fields = log 160 | .fields 161 | .into_iter() 162 | .map(|(key, value)| (key, value.into())) 163 | .collect::>(); 164 | Log { 165 | process_id: log.process_id, 166 | span_id: log.span_id, 167 | trace_id: log.trace_id, 168 | level, 169 | target: log.target, 170 | file: log.file, 171 | line: log.line, 172 | time: log 173 | .time 174 | .and_then(|timestamp| { 175 | SystemTime::try_from(timestamp) 176 | .ok() 177 | .map(OffsetDateTime::from) 178 | }) 179 | .unwrap_or_else(OffsetDateTime::now_utc), 180 | message, 181 | fields, 182 | } 183 | } 184 | } 185 | 186 | fn format_timing_value(value: u64) -> String { 187 | let value = value as f64; 188 | if value < 1000.0 { 189 | format!("{}us", value) 190 | } else if value < 1_000_000.0 { 191 | format!("{:.2}ms", value / 1000.0) 192 | } else { 193 | format!("{:.2}s", value / 1_000_000.0) 194 | } 195 | } 196 | 197 | #[cfg(test)] 198 | mod tests { 199 | use super::format_timing_value; 200 | 201 | #[test] 202 | fn test_timings_format() { 203 | assert_eq!(format_timing_value(3), "3us".to_string()); 204 | assert_eq!(format_timing_value(303), "303us".to_string()); 205 | assert_eq!(format_timing_value(3003), "3.00ms".to_string()); 206 | assert_eq!(format_timing_value(3013), "3.01ms".to_string()); 207 | assert_eq!(format_timing_value(300030), "300.03ms".to_string()); 208 | assert_eq!(format_timing_value(3003300), "3.00s".to_string()); 209 | assert_eq!(format_timing_value(3033300), "3.03s".to_string()); 210 | assert_eq!(format_timing_value(3333300), "3.33s".to_string()); 211 | assert_eq!(format_timing_value(33000330), "33.00s".to_string()); 212 | assert_eq!(format_timing_value(33300330), "33.30s".to_string()); 213 | } 214 | } 215 | -------------------------------------------------------------------------------- /duo/src/partition/mod.rs: -------------------------------------------------------------------------------- 1 | mod query; 2 | mod writer; 3 | 4 | pub use query::PartitionQuery; 5 | pub use writer::PartitionWriter; 6 | -------------------------------------------------------------------------------- /duo/src/partition/query.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use anyhow::Result; 4 | use datafusion::{ 5 | arrow::array::RecordBatch, 6 | datasource::{ 7 | file_format::parquet::ParquetFormat, 8 | listing::{ListingOptions, ListingTable, ListingTableConfig, ListingTableUrl}, 9 | TableProvider, 10 | }, 11 | prelude::{DataFrame, Expr, SessionConfig, SessionContext}, 12 | }; 13 | use time::{Duration, OffsetDateTime}; 14 | use url::Url; 15 | 16 | use crate::{config, schema, utils::TimePeriod}; 17 | 18 | static TABLE_SPAN: &str = "span"; 19 | 20 | pub struct PartitionQuery { 21 | ctx: SessionContext, 22 | object_store_url: Url, 23 | prefixes: Vec, 24 | } 25 | 26 | impl PartitionQuery { 27 | pub fn new(start: OffsetDateTime, end: OffsetDateTime) -> Self { 28 | let ctx = SessionContext::new_with_config( 29 | // Enable bloom filter pruning for parquet readers 30 | SessionConfig::new().with_parquet_bloom_filter_pruning(true), 31 | ); 32 | let config = config::load(); 33 | let object_store_url = config.object_store_url(); 34 | ctx.register_object_store(&object_store_url, config.object_store()); 35 | PartitionQuery { 36 | ctx, 37 | object_store_url, 38 | prefixes: TimePeriod::new(start, end, 1).generate_prefixes(), 39 | } 40 | } 41 | 42 | pub fn recent_hours(hours: i64) -> Self { 43 | let now = OffsetDateTime::now_utc(); 44 | let hours_ago = now - Duration::hours(hours); 45 | Self::new(hours_ago, now) 46 | } 47 | 48 | fn table_paths(&self, table_name: &str) -> Vec { 49 | self.prefixes 50 | .iter() 51 | .filter_map(|prefix| { 52 | ListingTableUrl::parse( 53 | self.object_store_url 54 | .join(&format!("{table_name}/{prefix}")) 55 | .unwrap(), 56 | ) 57 | .ok() 58 | }) 59 | .collect() 60 | } 61 | 62 | async fn get_table(&self, table_name: &str) -> Result> { 63 | let listing_options = 64 | ListingOptions::new(Arc::new(ParquetFormat::default().with_enable_pruning(true))) 65 | .with_file_extension(".parquet"); 66 | let mut listing_table_config = 67 | ListingTableConfig::new_with_multi_paths(self.table_paths(table_name)) 68 | .with_listing_options(listing_options); 69 | if table_name == TABLE_SPAN { 70 | listing_table_config = listing_table_config.with_schema(schema::get_span_schema()); 71 | } else { 72 | // FIXME: log dynamic fields schema 73 | listing_table_config = listing_table_config.with_schema(schema::get_log_schema()); 74 | // listing_table_config = listing_table_config.infer_schema(&self.ctx.state()).await?; 75 | // println!("listing schema: {:?}", listing_table_config.file_schema); 76 | } 77 | Ok(Arc::new(ListingTable::try_new(listing_table_config)?)) 78 | } 79 | 80 | pub async fn df(&self, table_name: &str) -> Result { 81 | Ok(self.ctx.read_table(self.get_table(table_name).await?)?) 82 | } 83 | 84 | pub async fn query_table(&self, table_name: &str, expr: Expr) -> Result> { 85 | let df = self.df(table_name).await?; 86 | Ok(df.filter(expr)?.collect().await.unwrap_or_default()) 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /duo/src/partition/writer.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use anyhow::Result; 4 | use datafusion::parquet::arrow::AsyncArrowWriter; 5 | use datafusion::parquet::schema::types::ColumnPath; 6 | use datafusion::{arrow::array::RecordBatch, parquet::file::properties::WriterProperties}; 7 | use object_store::{path::Path, ObjectStore}; 8 | use rand::{rngs::ThreadRng, Rng}; 9 | use time::OffsetDateTime; 10 | 11 | use crate::config; 12 | 13 | pub struct PartitionWriter { 14 | object_store: Arc, 15 | partition_path: String, 16 | } 17 | 18 | impl PartitionWriter { 19 | pub fn with_minute() -> Self { 20 | let now = OffsetDateTime::now_utc(); 21 | let config = config::load(); 22 | PartitionWriter { 23 | object_store: config.object_store(), 24 | partition_path: format!( 25 | "date={}/hour={:02}/minute={:02}", 26 | now.date(), 27 | now.hour(), 28 | now.minute() 29 | ), 30 | } 31 | } 32 | 33 | pub async fn write_partition( 34 | &self, 35 | table_name: &str, 36 | record_batchs: &[RecordBatch], 37 | ) -> Result<()> { 38 | let schema = if let Some(rb) = record_batchs.first() { 39 | rb.schema() 40 | } else { 41 | return Ok(()); 42 | }; 43 | 44 | let mut buffer = vec![]; 45 | // Enable bloom filter for trace_id column, 46 | // both span and log have trace_id column 47 | let properties = WriterProperties::builder() 48 | .set_column_bloom_filter_enabled(ColumnPath::from("trace_id"), true) 49 | .build(); 50 | let mut writer = AsyncArrowWriter::try_new(&mut buffer, schema, Some(properties))?; 51 | for rb in record_batchs { 52 | writer.write(rb).await?; 53 | } 54 | writer.close().await?; 55 | let path = Path::from(format!( 56 | "{table_name}/{}/{}.parquet", 57 | self.partition_path, 58 | ThreadRng::default().gen::() 59 | )); 60 | self.object_store.put(&path, buffer.into()).await?; 61 | 62 | Ok(()) 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /duo/src/query.rs: -------------------------------------------------------------------------------- 1 | use std::mem; 2 | use std::sync::Arc; 3 | 4 | use crate::arrow::serialize_record_batches; 5 | use crate::partition::PartitionQuery; 6 | use crate::schema; 7 | use crate::MemoryStore; 8 | 9 | use anyhow::{Ok, Result}; 10 | use datafusion::datasource::MemTable; 11 | use datafusion::logical_expr::SortExpr; 12 | use datafusion::prelude::DataFrame; 13 | use datafusion::prelude::SessionContext; 14 | use datafusion::prelude::{col, Expr}; 15 | use parking_lot::RwLock; 16 | use serde::de::DeserializeOwned; 17 | use time::{Duration, OffsetDateTime}; 18 | 19 | pub struct QueryEngine { 20 | memory_store: Arc>, 21 | } 22 | 23 | impl QueryEngine { 24 | pub fn new(memory_store: Arc>) -> Self { 25 | Self { memory_store } 26 | } 27 | 28 | pub fn aggregate_span_names(self, expr: Expr) -> AggregateQuery { 29 | self.query_span(expr).aggregate(vec![col("name")], vec![]) 30 | } 31 | 32 | pub fn query_span(&self, expr: Expr) -> Query { 33 | let guard = self.memory_store.read(); 34 | Query::new( 35 | "span", 36 | expr, 37 | MemTable::try_new(schema::get_span_schema(), vec![guard.span_batches.clone()]) 38 | .expect("Create Memtable failed"), 39 | ) 40 | } 41 | 42 | pub fn query_log(&self, expr: Expr) -> Query { 43 | let guard = self.memory_store.read(); 44 | Query::new( 45 | "log", 46 | expr, 47 | MemTable::try_new( 48 | Arc::clone(&guard.log_schema), 49 | vec![guard.log_batches.clone()], 50 | ) 51 | .expect("Create Memtable failed"), 52 | ) 53 | } 54 | } 55 | 56 | pub struct Query { 57 | table_name: &'static str, 58 | expr: Expr, 59 | memtable: MemTable, 60 | start: Option, 61 | end: Option, 62 | sort_expr: Vec, 63 | limit: Option, 64 | skip: usize, 65 | } 66 | 67 | pub struct AggregateQuery { 68 | raw_query: Query, 69 | group_expr: Vec, 70 | aggr_expr: Vec, 71 | } 72 | 73 | impl Query { 74 | fn new(table_name: &'static str, expr: Expr, memtable: MemTable) -> Self { 75 | Self { 76 | table_name, 77 | expr, 78 | memtable, 79 | start: None, 80 | end: None, 81 | sort_expr: Vec::new(), 82 | limit: None, 83 | skip: 0, 84 | } 85 | } 86 | 87 | pub fn range(self, start: Option, end: Option) -> Self { 88 | Self { start, end, ..self } 89 | } 90 | 91 | pub fn limit(self, skip: usize, limit: Option) -> Self { 92 | Self { 93 | skip, 94 | limit, 95 | ..self 96 | } 97 | } 98 | 99 | async fn df(self) -> Result { 100 | let ctx = SessionContext::new(); 101 | let mut df = ctx.read_table(Arc::new(self.memtable))?; 102 | 103 | // Don't query data from storage in memory mode 104 | if !crate::is_memory_mode() { 105 | let pq = PartitionQuery::new( 106 | self.start 107 | .unwrap_or_else(|| OffsetDateTime::now_utc() - Duration::minutes(15)), 108 | self.end.unwrap_or(OffsetDateTime::now_utc()), 109 | ); 110 | df = df.union(pq.df(self.table_name).await?)?; 111 | } 112 | Ok(df.filter(self.expr)?.limit(self.skip, self.limit)?) 113 | } 114 | 115 | pub fn sort(self, sort_expr: Vec) -> Self { 116 | Self { sort_expr, ..self } 117 | } 118 | 119 | pub fn aggregate(self, group_expr: Vec, aggr_expr: Vec) -> AggregateQuery { 120 | AggregateQuery { 121 | raw_query: self, 122 | group_expr, 123 | aggr_expr, 124 | } 125 | } 126 | 127 | pub async fn collect(mut self) -> Result> { 128 | let sort_expr = mem::take(&mut self.sort_expr); 129 | let mut df = self.df().await?; 130 | if !sort_expr.is_empty() { 131 | df = df.sort(sort_expr)?; 132 | } 133 | let batches = df.collect().await?; 134 | Ok(serialize_record_batches::(&batches)?) 135 | } 136 | } 137 | 138 | impl AggregateQuery { 139 | pub async fn collect(mut self) -> Result> { 140 | let sort_expr = mem::take(&mut self.raw_query.sort_expr); 141 | let mut df = self 142 | .raw_query 143 | .df() 144 | .await? 145 | .aggregate(self.group_expr, self.aggr_expr)?; 146 | if !sort_expr.is_empty() { 147 | df = df.sort(sort_expr)?; 148 | } 149 | let batches = df.collect().await?; 150 | Ok(serialize_record_batches::(&batches)?) 151 | } 152 | } 153 | -------------------------------------------------------------------------------- /duo/src/schema.rs: -------------------------------------------------------------------------------- 1 | use std::sync::{ 2 | atomic::{AtomicBool, Ordering}, 3 | Arc, LazyLock, OnceLock, 4 | }; 5 | 6 | use anyhow::Result; 7 | use arrow_schema::{DataType, Field, Schema}; 8 | use object_store::path::Path; 9 | use parking_lot::RwLock; 10 | 11 | use crate::config; 12 | 13 | static LOG_SCHEMA: OnceLock>> = OnceLock::new(); 14 | static LOG_SCHEMA_DIRTY: AtomicBool = AtomicBool::new(false); 15 | 16 | static SPAN_SCHEMA: LazyLock> = LazyLock::new(|| { 17 | Arc::new(Schema::new(vec![ 18 | Field::new("id", DataType::UInt64, false), 19 | Field::new("parent_id", DataType::UInt64, true), 20 | Field::new("trace_id", DataType::UInt64, false), 21 | Field::new("name", DataType::Utf8, false), 22 | Field::new("process_id", DataType::Utf8, false), 23 | Field::new("start", DataType::Int64, false), 24 | Field::new("end", DataType::Int64, true), 25 | Field::new("tags", DataType::Utf8, true), 26 | ])) 27 | }); 28 | 29 | #[inline] 30 | fn default_log_schema() -> Arc { 31 | Arc::new(Schema::new(vec![ 32 | Field::new("process_id", DataType::Utf8, false), 33 | Field::new("time", DataType::Int64, false), 34 | Field::new("trace_id", DataType::UInt64, true), 35 | Field::new("span_id", DataType::UInt64, true), 36 | Field::new("level", DataType::Utf8, false), 37 | Field::new("target", DataType::Utf8, true), 38 | Field::new("file", DataType::Utf8, true), 39 | Field::new("line", DataType::UInt32, true), 40 | Field::new("message", DataType::Utf8, true), 41 | ])) 42 | } 43 | 44 | pub fn get_span_schema() -> Arc { 45 | Arc::clone(&SPAN_SCHEMA) 46 | } 47 | 48 | pub async fn load() -> Result<()> { 49 | let config = config::load(); 50 | let object_store = config.object_store(); 51 | match object_store 52 | .get(&Path::from("schema/log_schema.json")) 53 | .await 54 | { 55 | Ok(data) => { 56 | let schema = serde_json::from_slice::(&data.bytes().await?)?; 57 | let latest_schema = 58 | Schema::try_merge(vec![(*default_log_schema()).clone(), schema]).unwrap(); 59 | LOG_SCHEMA_DIRTY.store(true, Ordering::Relaxed); 60 | LOG_SCHEMA 61 | .set(RwLock::new(Arc::new(latest_schema))) 62 | .expect("LogSchema already initialized"); 63 | } 64 | Err(_err) => { 65 | LOG_SCHEMA 66 | .set(RwLock::new(default_log_schema())) 67 | .expect("LogSchema already initialized"); 68 | } 69 | } 70 | 71 | Ok(()) 72 | } 73 | 74 | pub fn get_log_schema() -> Arc { 75 | Arc::clone(&LOG_SCHEMA.get().expect("LogSchema not initialized").read()) 76 | } 77 | 78 | pub fn merge_log_schema(schema: Arc) -> Arc { 79 | let mut guard = LOG_SCHEMA.get().expect("LogSchema not initialized").write(); 80 | if guard.contains(&schema) { 81 | return Arc::clone(&*guard); 82 | } 83 | 84 | let new_schema = 85 | Arc::new(Schema::try_merge(vec![(**guard).clone(), (*schema).clone()]).unwrap()); 86 | *guard = Arc::clone(&new_schema); 87 | LOG_SCHEMA_DIRTY.store(true, Ordering::Relaxed); 88 | new_schema 89 | } 90 | 91 | pub async fn persit_log_schema() { 92 | if LOG_SCHEMA_DIRTY.load(Ordering::Relaxed) { 93 | let object_store = config::load().object_store(); 94 | let payload = serde_json::to_vec(&get_log_schema()).unwrap(); 95 | object_store 96 | .put(&Path::from("schema/log_schema.json"), payload.into()) 97 | .await 98 | .unwrap(); 99 | LOG_SCHEMA_DIRTY.store(false, Ordering::Relaxed); 100 | } 101 | } 102 | -------------------------------------------------------------------------------- /duo/src/utils.rs: -------------------------------------------------------------------------------- 1 | use time::{Date, OffsetDateTime}; 2 | 3 | /// Convert minutes to a slot range 4 | /// e.g. given minute = 15 and OBJECT_STORE_DATA_GRANULARITY = 10 returns "10-19" 5 | fn minute_to_slot(minute: u8, data_granularity: u8) -> Option { 6 | if minute >= 60 { 7 | return None; 8 | } 9 | 10 | let block_n = minute / data_granularity; 11 | let block_start = block_n * data_granularity; 12 | if data_granularity == 1 { 13 | return Some(format!("{block_start:02}")); 14 | } 15 | 16 | let block_end = (block_n + 1) * data_granularity - 1; 17 | Some(format!("{block_start:02}-{block_end:02}")) 18 | } 19 | 20 | fn date_to_prefix(date: Date) -> String { 21 | let date = format!("date={date}/"); 22 | date.replace("UTC", "") 23 | } 24 | 25 | fn hour_to_prefix(hour: u8) -> String { 26 | format!("hour={hour:02}/") 27 | } 28 | 29 | fn minute_to_prefix(minute: u8, data_granularity: u8) -> Option { 30 | Some(format!( 31 | "minute={}/", 32 | minute_to_slot(minute, data_granularity)? 33 | )) 34 | } 35 | 36 | pub struct TimePeriod { 37 | start: OffsetDateTime, 38 | end: OffsetDateTime, 39 | data_granularity: u8, 40 | } 41 | 42 | impl TimePeriod { 43 | pub fn new(start: OffsetDateTime, end: OffsetDateTime, data_granularity: u8) -> Self { 44 | debug_assert!(end > start); 45 | Self { 46 | data_granularity, 47 | start, 48 | end, 49 | } 50 | } 51 | 52 | pub fn generate_prefixes(&self) -> Vec { 53 | let end_minute = self.end.minute() + u8::from(self.end.second() > 0); 54 | self.generate_date_prefixes( 55 | self.start.date(), 56 | self.end.date(), 57 | (self.start.hour(), self.start.minute()), 58 | (self.end.hour(), end_minute), 59 | ) 60 | } 61 | 62 | fn generate_minute_prefixes( 63 | &self, 64 | prefix: &str, 65 | start_minute: u8, 66 | end_minute: u8, 67 | ) -> Vec { 68 | if start_minute == end_minute { 69 | return vec![]; 70 | } 71 | 72 | let (start_block, end_block) = ( 73 | start_minute / self.data_granularity, 74 | end_minute / self.data_granularity, 75 | ); 76 | 77 | let forbidden_block = 60 / self.data_granularity; 78 | 79 | // ensure both start and end are within the same hour, else return prefix as is 80 | if end_block - start_block >= forbidden_block { 81 | return vec![prefix.to_owned()]; 82 | } 83 | 84 | let mut prefixes = vec![]; 85 | 86 | let push_prefix = |block: u8, prefixes: &mut Vec<_>| { 87 | if let Some(minute_prefix) = 88 | minute_to_prefix(block * self.data_granularity, self.data_granularity) 89 | { 90 | let prefix = prefix.to_owned() + &minute_prefix; 91 | prefixes.push(prefix); 92 | } 93 | }; 94 | 95 | for block in start_block..end_block { 96 | push_prefix(block, &mut prefixes); 97 | } 98 | 99 | // NOTE: for block sizes larger than a minute ensure 100 | // ensure last block is considered 101 | if self.data_granularity > 1 { 102 | push_prefix(end_block, &mut prefixes); 103 | } 104 | 105 | prefixes 106 | } 107 | 108 | fn generate_hour_prefixes( 109 | &self, 110 | prefix: &str, 111 | start_hour: u8, 112 | start_minute: u8, 113 | end_hour: u8, 114 | end_minute: u8, 115 | ) -> Vec { 116 | // ensure both start and end are within the same day 117 | if end_hour.saturating_sub(start_hour) >= 24 { 118 | return vec![prefix.to_owned()]; 119 | } 120 | 121 | let mut prefixes = vec![]; 122 | 123 | for hour in start_hour..=end_hour { 124 | if hour == 24 { 125 | break; 126 | } 127 | let prefix = prefix.to_owned() + &hour_to_prefix(hour); 128 | let is_start = hour == start_hour; 129 | let is_end = hour == end_hour; 130 | 131 | if is_start || is_end { 132 | let minute_prefixes = self.generate_minute_prefixes( 133 | &prefix, 134 | if is_start { start_minute } else { 0 }, 135 | if is_end { end_minute } else { 60 }, 136 | ); 137 | prefixes.extend(minute_prefixes); 138 | } else { 139 | prefixes.push(prefix); 140 | } 141 | } 142 | 143 | prefixes 144 | } 145 | 146 | fn generate_date_prefixes( 147 | &self, 148 | start_date: Date, 149 | end_date: Date, 150 | start_time: (u8, u8), 151 | end_time: (u8, u8), 152 | ) -> Vec { 153 | let mut prefixes = vec![]; 154 | let mut date = start_date; 155 | 156 | while date <= end_date { 157 | let prefix = date_to_prefix(date); 158 | let is_start = date == start_date; 159 | let is_end = date == end_date; 160 | 161 | if is_start || is_end { 162 | let ((start_hour, start_minute), (end_hour, end_minute)) = ( 163 | if is_start { start_time } else { (0, 0) }, 164 | if is_end { end_time } else { (24, 60) }, 165 | ); 166 | let hour_prefixes = self.generate_hour_prefixes( 167 | &prefix, 168 | start_hour, 169 | start_minute, 170 | end_hour, 171 | end_minute, 172 | ); 173 | prefixes.extend(hour_prefixes); 174 | } else { 175 | prefixes.push(prefix); 176 | } 177 | date = date.next_day().unwrap(); 178 | } 179 | 180 | prefixes 181 | } 182 | } 183 | 184 | #[cfg(test)] 185 | mod tests { 186 | use rstest::*; 187 | use time::format_description::well_known::Rfc3339; 188 | use time::OffsetDateTime; 189 | 190 | use super::TimePeriod; 191 | 192 | fn time_period_from_str(start: &str, end: &str) -> TimePeriod { 193 | TimePeriod::new( 194 | OffsetDateTime::parse(start, &Rfc3339).unwrap(), 195 | OffsetDateTime::parse(end, &Rfc3339).unwrap(), 196 | 1, 197 | ) 198 | } 199 | 200 | #[rstest] 201 | #[case::same_minute( 202 | "2022-06-11T16:30:00+00:00", "2022-06-11T16:30:59+00:00", 203 | &["date=2022-06-11/hour=16/minute=30/"] 204 | )] 205 | #[case::same_hour_different_minute( 206 | "2022-06-11T16:57:00+00:00", "2022-06-11T16:59:00+00:00", 207 | &[ 208 | "date=2022-06-11/hour=16/minute=57/", 209 | "date=2022-06-11/hour=16/minute=58/" 210 | ] 211 | )] 212 | #[case::same_hour_with_00_to_59_minute_block( 213 | "2022-06-11T16:00:00+00:00", "2022-06-11T16:59:59+00:00", 214 | &["date=2022-06-11/hour=16/"] 215 | )] 216 | #[case::same_date_different_hours_coherent_minute( 217 | "2022-06-11T15:00:00+00:00", "2022-06-11T17:00:00+00:00", 218 | &[ 219 | "date=2022-06-11/hour=15/", 220 | "date=2022-06-11/hour=16/" 221 | ] 222 | )] 223 | #[case::same_date_different_hours_incoherent_minutes( 224 | "2022-06-11T15:59:00+00:00", "2022-06-11T16:01:00+00:00", 225 | &[ 226 | "date=2022-06-11/hour=15/minute=59/", 227 | "date=2022-06-11/hour=16/minute=00/" 228 | ] 229 | )] 230 | #[case::same_date_different_hours_whole_hours_between_incoherent_minutes( 231 | "2022-06-11T15:59:00+00:00", "2022-06-11T17:01:00+00:00", 232 | &[ 233 | "date=2022-06-11/hour=15/minute=59/", 234 | "date=2022-06-11/hour=16/", 235 | "date=2022-06-11/hour=17/minute=00/" 236 | ] 237 | )] 238 | #[case::different_date_coherent_hours_and_minutes( 239 | "2022-06-11T00:00:00+00:00", "2022-06-13T00:00:00+00:00", 240 | &[ 241 | "date=2022-06-11/", 242 | "date=2022-06-12/" 243 | ] 244 | )] 245 | #[case::different_date_incoherent_hours_coherent_minutes( 246 | "2022-06-11T23:00:01+00:00", "2022-06-12T01:59:59+00:00", 247 | &[ 248 | "date=2022-06-11/hour=23/", 249 | "date=2022-06-12/hour=00/", 250 | "date=2022-06-12/hour=01/" 251 | ] 252 | )] 253 | #[case::different_date_incoherent_hours_incoherent_minutes( 254 | "2022-06-11T23:59:59+00:00", "2022-06-12T00:01:00+00:00", 255 | &[ 256 | "date=2022-06-11/hour=23/minute=59/", 257 | "date=2022-06-12/hour=00/minute=00/" 258 | ] 259 | )] 260 | fn prefix_generation(#[case] start: &str, #[case] end: &str, #[case] right: &[&str]) { 261 | let time_period = time_period_from_str(start, end); 262 | let prefixes = time_period.generate_prefixes(); 263 | let left = prefixes.iter().map(String::as_str).collect::>(); 264 | assert_eq!(left.as_slice(), right); 265 | } 266 | } 267 | -------------------------------------------------------------------------------- /duo/src/web/deser.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::HashMap, marker::PhantomData, str::FromStr}; 2 | 3 | use serde::de; 4 | use serde_json::Value; 5 | use time::{Duration, OffsetDateTime}; 6 | 7 | pub(super) fn option_ignore_error<'de, T, D>(d: D) -> Result, D::Error> 8 | where 9 | T: de::Deserialize<'de>, 10 | D: de::Deserializer<'de>, 11 | { 12 | Ok(T::deserialize(d).ok()) 13 | } 14 | 15 | pub fn option_miscrosecond<'de, D>(d: D) -> Result, D::Error> 16 | where 17 | D: de::Deserializer<'de>, 18 | { 19 | d.deserialize_option(OptionMicroSecondsTimestampVisitor) 20 | } 21 | 22 | pub fn map_list<'de, D>(d: D) -> Result, D::Error> 23 | where 24 | D: de::Deserializer<'de>, 25 | { 26 | d.deserialize_any(ListValueVisitor) 27 | } 28 | 29 | #[allow(unused)] 30 | pub fn str_sequence<'de, D>(d: D) -> Result, D::Error> 31 | where 32 | D: de::Deserializer<'de>, 33 | { 34 | d.deserialize_any(SeparatedSequenceVisitor(PhantomData)) 35 | } 36 | 37 | pub(super) fn option_duration<'de, D>(d: D) -> Result, D::Error> 38 | where 39 | D: de::Deserializer<'de>, 40 | { 41 | d.deserialize_option(OptionDurationVisitor) 42 | } 43 | 44 | pub mod miscrosecond { 45 | use serde::{Deserializer, Serializer}; 46 | use time::OffsetDateTime; 47 | 48 | use super::MicroSecondsTimestampVisitor; 49 | 50 | pub fn serialize(time: &OffsetDateTime, serializer: S) -> Result 51 | where 52 | S: Serializer, 53 | { 54 | serializer.serialize_i64((time.unix_timestamp_nanos() / 1000) as i64) 55 | } 56 | 57 | pub fn deserialize<'de, D>(deserializer: D) -> Result 58 | where 59 | D: Deserializer<'de>, 60 | { 61 | deserializer.deserialize_any(MicroSecondsTimestampVisitor) 62 | } 63 | } 64 | 65 | pub mod level { 66 | use std::str::FromStr; 67 | 68 | use serde::{Deserialize, Deserializer, Serializer}; 69 | use tracing::Level; 70 | 71 | #[allow(unused)] 72 | pub fn serialize(level: &Level, serializer: S) -> Result 73 | where 74 | S: Serializer, 75 | { 76 | serializer.serialize_str(level.as_str()) 77 | } 78 | 79 | pub fn deserialize<'de, D>(deserializer: D) -> Result 80 | where 81 | D: Deserializer<'de>, 82 | { 83 | let level = String::deserialize(deserializer)?; 84 | Ok(Level::from_str(&level).expect("invalid level")) 85 | } 86 | } 87 | 88 | #[allow(unused)] 89 | struct SeparatedSequenceVisitor(PhantomData); 90 | 91 | impl<'de, T: FromStr> de::Visitor<'de> for SeparatedSequenceVisitor { 92 | type Value = Vec; 93 | 94 | fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { 95 | write!(formatter, "an array string") 96 | } 97 | 98 | fn visit_str(self, v: &str) -> Result 99 | where 100 | E: de::Error, 101 | { 102 | let list = v 103 | .split(",") 104 | .filter(|s| !str::is_empty(s)) 105 | .filter_map(|s| FromStr::from_str(s).ok()) 106 | .collect(); 107 | Ok(list) 108 | } 109 | } 110 | 111 | struct ListValueVisitor; 112 | 113 | impl<'de> de::Visitor<'de> for ListValueVisitor { 114 | type Value = HashMap; 115 | 116 | fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { 117 | write!(formatter, "an array string") 118 | } 119 | 120 | fn visit_str(self, v: &str) -> Result 121 | where 122 | E: de::Error, 123 | { 124 | let list = serde_json::from_str(v).unwrap(); 125 | Ok(list) 126 | } 127 | } 128 | 129 | struct OptionMicroSecondsTimestampVisitor; 130 | 131 | impl<'de> de::Visitor<'de> for OptionMicroSecondsTimestampVisitor { 132 | type Value = Option; 133 | 134 | fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { 135 | formatter.write_str("a unix timestamp in microseconds or none") 136 | } 137 | 138 | fn visit_some(self, d: D) -> Result 139 | where 140 | D: de::Deserializer<'de>, 141 | { 142 | d.deserialize_i64(MicroSecondsTimestampVisitor).map(Some) 143 | } 144 | 145 | fn visit_none(self) -> Result 146 | where 147 | E: de::Error, 148 | { 149 | Ok(None) 150 | } 151 | 152 | fn visit_unit(self) -> Result 153 | where 154 | E: de::Error, 155 | { 156 | Ok(None) 157 | } 158 | } 159 | 160 | struct MicroSecondsTimestampVisitor; 161 | 162 | impl<'de> de::Visitor<'de> for MicroSecondsTimestampVisitor { 163 | type Value = OffsetDateTime; 164 | 165 | fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { 166 | write!(formatter, "a unix timestamp in microseconds") 167 | } 168 | 169 | fn visit_i64(self, v: i64) -> Result 170 | where 171 | E: de::Error, 172 | { 173 | Ok( 174 | OffsetDateTime::from_unix_timestamp_nanos((v * 1000) as i128) 175 | .expect("invalid timestamp format"), 176 | ) 177 | } 178 | 179 | fn visit_u64(self, v: u64) -> Result 180 | where 181 | E: de::Error, 182 | { 183 | Ok( 184 | OffsetDateTime::from_unix_timestamp_nanos((v * 1000) as i128) 185 | .expect("invalid timestamp format"), 186 | ) 187 | } 188 | 189 | fn visit_str(self, v: &str) -> Result 190 | where 191 | E: de::Error, 192 | { 193 | let timestamp = v.parse::().expect("invalid timestamp format"); 194 | self.visit_i64(timestamp) 195 | } 196 | } 197 | 198 | struct OptionDurationVisitor; 199 | 200 | impl<'de> de::Visitor<'de> for OptionDurationVisitor { 201 | type Value = Option; 202 | 203 | fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { 204 | formatter.write_str("a duration or none") 205 | } 206 | 207 | fn visit_some(self, d: D) -> Result 208 | where 209 | D: de::Deserializer<'de>, 210 | { 211 | Ok(d.deserialize_str(DurationVisitor).ok()) 212 | } 213 | 214 | fn visit_none(self) -> Result 215 | where 216 | E: de::Error, 217 | { 218 | Ok(None) 219 | } 220 | } 221 | 222 | struct DurationVisitor; 223 | 224 | impl<'de> de::Visitor<'de> for DurationVisitor { 225 | type Value = Duration; 226 | 227 | fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { 228 | formatter.write_str("a duration") 229 | } 230 | 231 | fn visit_str(self, v: &str) -> Result 232 | where 233 | E: de::Error, 234 | { 235 | parse_duration(v) 236 | .map_err(de::Error::custom) 237 | .map(Duration::microseconds) 238 | } 239 | } 240 | 241 | fn parse_duration(duration: &str) -> anyhow::Result { 242 | let duration = duration.to_lowercase(); 243 | if let Some(d) = duration.strip_suffix("us") { 244 | Ok(d.parse()?) 245 | } else if let Some(d) = duration.strip_suffix("ms") { 246 | Ok(d.parse::()? * 1000) 247 | } else if let Some(d) = duration.strip_suffix('s') { 248 | Ok(d.parse::()? * 1_000_000) 249 | } else { 250 | anyhow::bail!("Invalid duration {}", duration) 251 | } 252 | } 253 | -------------------------------------------------------------------------------- /duo/src/web/logs.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use axum::extract::{Extension, Path, Query}; 4 | use axum::http::StatusCode; 5 | use axum::response::{IntoResponse, Response}; 6 | use axum::Json; 7 | use datafusion::common::DFSchema; 8 | use datafusion::functions_aggregate::count::count; 9 | use datafusion::prelude::*; 10 | use parking_lot::RwLock; 11 | use serde::{Deserialize, Serialize}; 12 | use time::OffsetDateTime; 13 | use tracing::{debug, info, warn}; 14 | 15 | use crate::query::QueryEngine; 16 | use crate::{schema, Log, MemoryStore}; 17 | 18 | use super::deser; 19 | 20 | const DEFAUT_LOG_LIMIT: usize = 50; 21 | 22 | #[derive(Debug, Deserialize)] 23 | pub(super) struct QueryParameters { 24 | service: String, 25 | #[serde(default, deserialize_with = "deser::option_ignore_error")] 26 | limit: Option, 27 | #[serde(default, deserialize_with = "deser::option_ignore_error")] 28 | skip: Option, 29 | #[serde(default, deserialize_with = "deser::option_miscrosecond")] 30 | start: Option, 31 | #[serde(default, deserialize_with = "deser::option_miscrosecond")] 32 | end: Option, 33 | expr: Option, 34 | } 35 | 36 | #[tracing::instrument] 37 | pub(super) async fn schema() -> impl IntoResponse { 38 | Json(schema::get_log_schema()) 39 | } 40 | 41 | impl QueryParameters { 42 | fn expr(&self) -> Expr { 43 | let process_prefix = &self.service; 44 | let mut expr = col("process_id").like(lit(format!("{process_prefix}%"))); 45 | if let Some(sql_expr) = &self.expr { 46 | let df_schema = DFSchema::try_from(schema::get_log_schema()).unwrap(); 47 | match SessionContext::new().parse_sql_expr(sql_expr, &df_schema) { 48 | Ok(sql_expr) => { 49 | debug!("Parsed expr: {sql_expr}"); 50 | expr = expr.and(sql_expr); 51 | } 52 | Err(err) => { 53 | warn!("Parse expr failed: {err}"); 54 | expr = expr.and(col("message").ilike(lit(format!("%{sql_expr}%")))); 55 | } 56 | } 57 | } 58 | info!(expr = ?expr, "Query expr: "); 59 | expr 60 | } 61 | } 62 | 63 | #[tracing::instrument] 64 | pub(super) async fn field_stats( 65 | Path(field): Path, 66 | Query(p): Query, 67 | Extension(memory_store): Extension>>, 68 | ) -> Response { 69 | if schema::get_log_schema().index_of(&field).is_err() { 70 | return (StatusCode::NOT_FOUND, format!("Field {field} not exists")).into_response(); 71 | } 72 | 73 | #[derive(Serialize, Deserialize)] 74 | struct FieldStats { 75 | value: Option, 76 | count: i64, 77 | } 78 | let query_engine = QueryEngine::new(memory_store); 79 | let c = col(field); 80 | let stats = query_engine 81 | .query_log(p.expr()) 82 | .range(p.start, p.end) 83 | // sort by count desc 84 | .sort(vec![col("count").sort(false, false)]) 85 | .limit(p.skip.unwrap_or(0), p.limit.or(Some(20))) 86 | .aggregate( 87 | vec![c.clone().alias("value")], 88 | vec![count(c).alias("count")], 89 | ) 90 | .collect::() 91 | .await 92 | .unwrap() 93 | .into_iter() 94 | // Filter out null value 95 | .filter(|s| s.value.is_some()) 96 | .collect::>(); 97 | Json(stats).into_response() 98 | } 99 | 100 | #[tracing::instrument] 101 | pub(super) async fn list( 102 | Query(p): Query, 103 | Extension(memory_store): Extension>>, 104 | ) -> impl IntoResponse { 105 | let query_engine = QueryEngine::new(memory_store); 106 | let total_logs = query_engine 107 | .query_log(p.expr()) 108 | .range(p.start, p.end) 109 | .sort(vec![col("time").sort(false, false)]) 110 | .limit(p.skip.unwrap_or(0), p.limit.or(Some(DEFAUT_LOG_LIMIT))) 111 | .collect::() 112 | .await 113 | .unwrap_or_default(); 114 | Json(total_logs.into_iter().collect::>()) 115 | } 116 | -------------------------------------------------------------------------------- /duo/src/web/mod.rs: -------------------------------------------------------------------------------- 1 | use std::{net::SocketAddr, sync::Arc}; 2 | 3 | use axum::{ 4 | body::Body, 5 | extract::Extension, 6 | http::{header, Method, StatusCode, Uri}, 7 | response::{IntoResponse, Response}, 8 | routing::get, 9 | Router, 10 | }; 11 | use parking_lot::RwLock; 12 | use rust_embed::RustEmbed; 13 | use tower::ServiceBuilder; 14 | use tower_http::cors::{Any, CorsLayer}; 15 | 16 | use crate::MemoryStore; 17 | 18 | pub mod deser; 19 | mod logs; 20 | pub mod serialize; 21 | mod services; 22 | mod trace; 23 | 24 | pub struct JaegerData(pub I); 25 | 26 | #[derive(RustEmbed)] 27 | #[folder = "ui"] 28 | struct UiAssets; 29 | 30 | pub struct StaticFile(Uri); 31 | 32 | impl IntoResponse for StaticFile { 33 | fn into_response(self) -> Response { 34 | let new_path = match self.0.path().trim_start_matches('/') { 35 | "" => "index.html", 36 | p if p.starts_with("trace") || p.starts_with("search") => "trace.html", 37 | p => p, 38 | }; 39 | // println!("path: {}, new_path: {}", path, new_path); 40 | match UiAssets::get(new_path) { 41 | Some(content) => { 42 | let body = Body::from(content.data); 43 | let mime = mime_guess::from_path(new_path).first_or_octet_stream(); 44 | Response::builder() 45 | .header(header::CONTENT_TYPE, mime.as_ref()) 46 | .body(body) 47 | .unwrap() 48 | } 49 | None => Response::builder() 50 | .status(StatusCode::NOT_FOUND) 51 | .body(Body::from("404")) 52 | .unwrap(), 53 | } 54 | } 55 | } 56 | 57 | pub async fn run_web_server( 58 | memory_store: Arc>, 59 | port: u16, 60 | ) -> anyhow::Result<()> { 61 | let addr = SocketAddr::from(([0, 0, 0, 0], port)); 62 | let listener = tokio::net::TcpListener::bind(addr).await?; 63 | let cors = CorsLayer::new() 64 | // allow `GET` and `POST` when accessing the resource 65 | .allow_methods([Method::GET, Method::POST]) 66 | // allow requests from any origin 67 | .allow_origin(Any); 68 | let layer = ServiceBuilder::new() 69 | .layer(Extension(memory_store)) 70 | .layer(cors); 71 | 72 | let app = Router::new() 73 | .nest_service("/", get(static_handler)) 74 | .route("/api/traces", get(trace::list)) 75 | .route("/api/traces/:id", get(trace::get_by_id)) 76 | .route("/api/services", get(trace::services)) 77 | .route("/api/services/:service/operations", get(trace::operations)) 78 | .route("/api/logs", get(logs::list)) 79 | .route("/api/logs/schema", get(logs::schema)) 80 | .route("/api/logs/stats/:field", get(logs::field_stats)) 81 | .route("/stats", get(self::stats)) 82 | .layer(layer); 83 | 84 | println!("Web server listening on http://{}", addr); 85 | axum::serve(listener, app.into_make_service()).await?; 86 | Ok(()) 87 | } 88 | 89 | async fn static_handler(uri: Uri) -> impl IntoResponse { 90 | StaticFile(uri) 91 | } 92 | 93 | #[tracing::instrument] 94 | async fn stats(Extension(memory_store): Extension>>) -> impl IntoResponse { 95 | let memory_store = memory_store.read(); 96 | serde_json::json!({ 97 | "process": memory_store.processes(), 98 | "logs": 0, 99 | "spans": 0, 100 | }) 101 | .to_string() 102 | } 103 | -------------------------------------------------------------------------------- /duo/src/web/serialize.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | 3 | use serde::{ser::SerializeMap, Serialize, Serializer}; 4 | use serde_json::Value; 5 | 6 | use crate::{Log, Process, Span, TraceExt}; 7 | 8 | use super::JaegerData; 9 | 10 | struct SpanExt<'a> { 11 | inner: &'a Span, 12 | trace_id: u64, 13 | process_id: &'a String, 14 | } 15 | 16 | // Due to Jaeger has different format, here we 17 | // use newtype to reimplement the searialization. 18 | struct JaegerField<'a>((&'a String, &'a Value)); 19 | struct JaegerLog<'a>(&'a Log); 20 | struct JaegerProcess<'a>(&'a Process); 21 | 22 | struct ReferenceType { 23 | trace_id: u64, 24 | span_id: u64, 25 | } 26 | 27 | impl Serialize for ReferenceType { 28 | fn serialize(&self, serializer: S) -> Result 29 | where 30 | S: Serializer, 31 | { 32 | let mut map = serializer.serialize_map(Some(3))?; 33 | map.serialize_entry("refType", "CHILD_OF")?; 34 | map.serialize_entry("traceID", &self.trace_id.to_string())?; 35 | map.serialize_entry("spanID", &self.span_id.to_string())?; 36 | map.end() 37 | } 38 | } 39 | 40 | impl<'a> Serialize for JaegerField<'a> { 41 | fn serialize(&self, serializer: S) -> Result 42 | where 43 | S: Serializer, 44 | { 45 | let mut map: ::SerializeMap = serializer.serialize_map(Some(3))?; 46 | let (key, value) = self.0; 47 | map.serialize_entry("key", key)?; 48 | match value { 49 | Value::Bool(v) => { 50 | map.serialize_entry("type", "bool")?; 51 | map.serialize_entry("value", v)? 52 | } 53 | Value::Number(v) => { 54 | map.serialize_entry("type", "int64")?; 55 | map.serialize_entry("value", v)? 56 | } 57 | Value::String(v) => { 58 | map.serialize_entry("type", "string")?; 59 | map.serialize_entry("value", v)? 60 | } 61 | _ => { 62 | // TODO: more types? 63 | } 64 | } 65 | 66 | map.end() 67 | } 68 | } 69 | 70 | impl<'a> Serialize for SpanExt<'a> { 71 | fn serialize(&self, serializer: S) -> Result 72 | where 73 | S: Serializer, 74 | { 75 | let trace_id = self.trace_id; 76 | let span = self.inner; 77 | 78 | let mut map = serializer.serialize_map(Some(11))?; 79 | map.serialize_entry("traceID", &trace_id.to_string())?; 80 | let references = if let Some(parent_span_id) = span.parent_id { 81 | vec![ReferenceType { 82 | span_id: parent_span_id, 83 | trace_id, 84 | }] 85 | } else { 86 | vec![] 87 | }; 88 | map.serialize_entry("references", &references)?; 89 | 90 | map.serialize_entry("spanID", &span.id.to_string())?; 91 | if span.is_intact() { 92 | map.serialize_entry("operationName", &span.name)?; 93 | } else { 94 | // The span isn't intact, add * to the operationName for indication. 95 | map.serialize_entry("operationName", &format!("{}*", span.name))?; 96 | } 97 | map.serialize_entry("startTime", &span.start_as_micros())?; 98 | map.serialize_entry("duration", &span.duration().whole_microseconds())?; 99 | let tags: Vec<_> = span.tags.iter().map(JaegerField).collect(); 100 | map.serialize_entry("tags", &tags)?; 101 | map.serialize_entry("logs", &span.logs.iter().map(JaegerLog).collect::>())?; 102 | map.serialize_entry("processID", &self.process_id)?; 103 | map.serialize_entry("warnings", &Value::Null)?; 104 | map.serialize_entry("flags", &1)?; 105 | 106 | map.end() 107 | } 108 | } 109 | 110 | impl Serialize for TraceExt { 111 | fn serialize(&self, serializer: S) -> Result 112 | where 113 | S: Serializer, 114 | { 115 | let mut map = serializer.serialize_map(Some(4))?; 116 | map.serialize_entry("traceID", &self.trace_id.to_string())?; 117 | map.serialize_entry( 118 | "spans", 119 | &self 120 | .spans 121 | .iter() 122 | .map(|span| SpanExt { 123 | trace_id: span.trace_id, 124 | inner: span, 125 | process_id: &span.process_id, 126 | }) 127 | .collect::>(), 128 | )?; 129 | 130 | let processes = self 131 | .processes 132 | .iter() 133 | .map(|(key, value)| (key, JaegerProcess(value))) 134 | .collect::>(); 135 | map.serialize_entry("processes", &processes)?; 136 | map.serialize_entry("warnings", &Value::Null)?; 137 | map.end() 138 | } 139 | } 140 | 141 | impl Serialize for JaegerData { 142 | fn serialize(&self, serializer: S) -> Result 143 | where 144 | S: Serializer, 145 | { 146 | let mut map = serializer.serialize_map(Some(5))?; 147 | map.serialize_entry("data", &self.0)?; 148 | map.serialize_entry("total", &0)?; 149 | map.serialize_entry("limit", &0)?; 150 | map.serialize_entry("offset", &0)?; 151 | map.serialize_entry("errors", &Value::Null)?; 152 | map.end() 153 | } 154 | } 155 | 156 | impl<'a> Serialize for JaegerLog<'a> { 157 | fn serialize(&self, serializer: S) -> Result 158 | where 159 | S: serde::Serializer, 160 | { 161 | let mut map = serializer.serialize_map(Some(2))?; 162 | map.serialize_entry("timestamp", &self.0.as_micros())?; 163 | let mut fields = HashMap::new(); 164 | fields.insert("message".into(), self.0.message.clone().into()); 165 | fields.insert("level".into(), self.0.level.as_str().into()); 166 | fields.insert("target".into(), self.0.target.as_str().into()); 167 | if let Some(file) = &self.0.file { 168 | fields.insert( 169 | "file".into(), 170 | format!("{}:{}", &file, self.0.line.unwrap_or_default()).into(), 171 | ); 172 | } 173 | fields.extend(self.0.fields.clone()); 174 | map.serialize_entry( 175 | "fields", 176 | &fields.iter().map(JaegerField).collect::>(), 177 | )?; 178 | map.end() 179 | } 180 | } 181 | 182 | impl<'a> Serialize for JaegerProcess<'a> { 183 | fn serialize(&self, serializer: S) -> Result 184 | where 185 | S: serde::Serializer, 186 | { 187 | let inner = self.0; 188 | let mut map = serializer.serialize_map(Some(3))?; 189 | map.serialize_entry("id", &inner.id)?; 190 | map.serialize_entry("serviceName", &inner.service_name)?; 191 | let tags: Vec<_> = inner.tags.iter().map(JaegerField).collect(); 192 | map.serialize_entry("tags", &tags)?; 193 | map.end() 194 | } 195 | } 196 | -------------------------------------------------------------------------------- /duo/src/web/services.rs: -------------------------------------------------------------------------------- 1 | use crate::query::QueryEngine; 2 | use crate::{Log, MemoryStore, Span, TraceExt}; 3 | use datafusion::prelude::*; 4 | use parking_lot::RwLock; 5 | use serde::Deserialize; 6 | use std::collections::{HashMap, HashSet}; 7 | use std::sync::Arc; 8 | 9 | use super::trace::QueryParameters; 10 | 11 | const DEFAUT_TRACE_LIMIT: usize = 20; 12 | 13 | pub(super) async fn filter_traces( 14 | memory_store: Arc>, 15 | p: QueryParameters, 16 | ) -> Vec { 17 | let process_prefix = p.service; 18 | let limit = p.limit.unwrap_or(DEFAUT_TRACE_LIMIT); 19 | // 20 | let mut traces = HashMap::>::new(); 21 | 22 | let expr = col("process_id").like(lit(format!("{process_prefix}%"))); 23 | 24 | let processes = { memory_store.read().processes() }; 25 | let query_engine = QueryEngine::new(memory_store); 26 | let total_spans = query_engine 27 | .query_span(expr.clone()) 28 | .range(p.start, p.end) 29 | .collect::() 30 | .await 31 | .unwrap_or_default(); 32 | 33 | for span in total_spans { 34 | if traces.contains_key(&span.trace_id) { 35 | traces.entry(span.trace_id).or_default().push(span); 36 | continue; 37 | } 38 | 39 | // Filter the root span, the child spans will be added in above. 40 | if let Some(span_name) = p.operation.as_ref() { 41 | if &span.name != span_name { 42 | continue; 43 | } 44 | } 45 | 46 | if span.parent_id.is_some() { 47 | continue; 48 | } 49 | 50 | match (p.start, p.end) { 51 | (Some(start), None) if span.start < start => continue, 52 | (None, Some(end)) if span.start > end => continue, 53 | (Some(start), Some(end)) if span.start < start || span.start > end => continue, 54 | _ => {} 55 | } 56 | 57 | let duration = span.duration(); 58 | match (p.min_duration, p.max_duration) { 59 | (Some(min), None) if duration < min => continue, 60 | (None, Some(max)) if duration > max => continue, 61 | (Some(min), Some(max)) if duration < min || duration > max => continue, 62 | _ => {} 63 | } 64 | 65 | traces.entry(span.trace_id).or_default().push(span); 66 | } 67 | 68 | let trace_ids = traces.keys().collect::>(); 69 | 70 | let expr = col("trace_id").in_list(trace_ids.into_iter().map(|id| lit(*id)).collect(), false); 71 | let trace_logs = query_engine 72 | .query_log(expr.clone()) 73 | .range(p.start, p.end) 74 | .collect::() 75 | .await 76 | .unwrap_or_default(); 77 | 78 | traces 79 | .into_iter() 80 | .take(limit) 81 | .map(|(trace_id, spans)| TraceExt { 82 | trace_id, 83 | processes: processes.clone(), 84 | spans: spans 85 | .iter() 86 | .map(|span| { 87 | let mut span = span.clone(); 88 | span.correlate_span_logs(&trace_logs); 89 | span 90 | }) 91 | .collect(), 92 | }) 93 | .collect() 94 | } 95 | 96 | pub(super) async fn get_trace_by_id( 97 | memory_store: Arc>, 98 | trace_id: u64, 99 | ) -> Option { 100 | let expr = col("trace_id").eq(lit(trace_id)); 101 | let processes = { memory_store.read().processes() }; 102 | let query_engine = QueryEngine::new(memory_store); 103 | let trace_spans = query_engine 104 | .query_span(expr.clone()) 105 | .collect::() 106 | .await 107 | .unwrap_or_default(); 108 | 109 | if trace_spans.is_empty() { 110 | None 111 | } else { 112 | let trace_logs = query_engine 113 | .query_log(expr) 114 | .collect::() 115 | .await 116 | .unwrap_or_default(); 117 | Some(TraceExt { 118 | trace_id, 119 | spans: trace_spans 120 | .into_iter() 121 | .map(|span| { 122 | let mut span = span.clone(); 123 | span.correlate_span_logs(&trace_logs); 124 | span 125 | }) 126 | .collect(), 127 | processes, 128 | }) 129 | } 130 | } 131 | 132 | pub(super) async fn aggregate_span_names( 133 | memory_store: Arc>, 134 | service: &str, 135 | ) -> HashSet { 136 | #[derive(Deserialize)] 137 | struct SpanName { 138 | name: String, 139 | } 140 | 141 | let query_engine = QueryEngine::new(memory_store); 142 | let expr = col("process_id").like(lit(format!("{service}%"))); 143 | let batches = query_engine 144 | .aggregate_span_names(expr) 145 | .collect::() 146 | .await 147 | .unwrap_or_default(); 148 | 149 | batches 150 | .into_iter() 151 | .map(|item| item.name) 152 | .collect::>() 153 | } 154 | -------------------------------------------------------------------------------- /duo/src/web/trace.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use axum::extract::{Extension, Path, Query}; 4 | use axum::http::StatusCode; 5 | use axum::response::IntoResponse; 6 | use axum::Json; 7 | use parking_lot::RwLock; 8 | use serde::Deserialize; 9 | use time::{Duration, OffsetDateTime}; 10 | 11 | use crate::{MemoryStore, TraceExt}; 12 | 13 | use super::deser; 14 | use super::services::{aggregate_span_names, filter_traces, get_trace_by_id}; 15 | use super::JaegerData; 16 | 17 | #[derive(Debug, Deserialize)] 18 | pub(super) struct QueryParameters { 19 | pub service: String, 20 | pub operation: Option, 21 | #[serde(default, deserialize_with = "deser::option_ignore_error")] 22 | pub limit: Option, 23 | #[serde(default, deserialize_with = "deser::option_miscrosecond")] 24 | pub start: Option, 25 | #[serde(default, deserialize_with = "deser::option_miscrosecond")] 26 | pub end: Option, 27 | #[serde(rename = "maxDuration")] 28 | #[serde(default, deserialize_with = "deser::option_duration")] 29 | pub max_duration: Option, 30 | #[serde(rename = "minDuration")] 31 | #[serde(default, deserialize_with = "deser::option_duration")] 32 | pub min_duration: Option, 33 | } 34 | 35 | #[tracing::instrument] 36 | pub(super) async fn list( 37 | Query(parameters): Query, 38 | Extension(memory_store): Extension>>, 39 | ) -> impl IntoResponse { 40 | Json(JaegerData(filter_traces(memory_store, parameters).await)) 41 | } 42 | 43 | #[tracing::instrument] 44 | pub(super) async fn services( 45 | Extension(memory_store): Extension>>, 46 | ) -> impl IntoResponse { 47 | let memory_store = memory_store.read(); 48 | Json(JaegerData(memory_store.service_names())) 49 | } 50 | 51 | #[tracing::instrument] 52 | pub(super) async fn operations( 53 | Path(service): Path, 54 | Extension(memory_store): Extension>>, 55 | ) -> impl IntoResponse { 56 | Json(JaegerData( 57 | aggregate_span_names(memory_store, &service).await, 58 | )) 59 | } 60 | 61 | #[tracing::instrument] 62 | pub(super) async fn get_by_id( 63 | Path(id): Path, 64 | Extension(memory_store): Extension>>, 65 | ) -> impl IntoResponse { 66 | let trace_id = id.parse::().ok(); 67 | match trace_id { 68 | Some(trace_id) => { 69 | if let Some(trace) = get_trace_by_id(memory_store, trace_id).await { 70 | Json(JaegerData(vec![trace])).into_response() 71 | } else { 72 | Json(JaegerData(Vec::::new())).into_response() 73 | } 74 | } 75 | None => (StatusCode::NOT_FOUND, format!("trace {} not found", id)).into_response(), 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /duo/ui/_app/env.js: -------------------------------------------------------------------------------- 1 | export const env={} -------------------------------------------------------------------------------- /duo/ui/_app/immutable/assets/3.Buu_H62d.css: -------------------------------------------------------------------------------- 1 | iframe.svelte-1ehqna5{width:100%;height:100vh} 2 | -------------------------------------------------------------------------------- /duo/ui/_app/immutable/assets/_page.Buu_H62d.css: -------------------------------------------------------------------------------- 1 | iframe.svelte-1ehqna5{width:100%;height:100vh} 2 | -------------------------------------------------------------------------------- /duo/ui/_app/immutable/chunks/each.Chk0zBHY.js: -------------------------------------------------------------------------------- 1 | import{t as q,a as z}from"./index.LOVubqCp.js";import{r as B}from"./scheduler.Bi1YRy3L.js";function G(e){return(e==null?void 0:e.length)!==void 0?e:Array.from(e)}function C(e,c){e.d(1),c.delete(e.key)}function H(e,c){z(e,1,1,()=>{c.delete(e.key)})}function I(e,c){e.f(),C(e,c)}function J(e,c,x,D,S,g,o,k,p,A,a,j){let i=e.length,f=g.length,d=i;const u={};for(;d--;)u[e[d].key]=d;const h=[],w=new Map,y=new Map,_=[];for(d=f;d--;){const n=j(S,g,d),s=x(n);let t=o.get(s);t?_.push(()=>t.p(n,c)):(t=A(s,n),t.c()),w.set(s,h[d]=t),s in u&&y.set(s,Math.abs(d-u[s]))}const M=new Set,v=new Set;function m(n){q(n,1),n.m(k,a),o.set(n.key,n),a=n.first,f--}for(;i&&f;){const n=h[f-1],s=e[i-1],t=n.key,l=s.key;n===s?(a=n.first,i--,f--):w.has(l)?!o.has(t)||M.has(t)?m(n):v.has(l)?i--:y.get(t)>y.get(l)?(v.add(t),m(n)):(M.add(l),i--):(p(s,o),i--)}for(;i--;){const n=e[i];w.has(n.key)||p(n,o)}for(;f;)m(h[f-1]);return B(_),h}export{C as d,G as e,I as f,H as o,J as u}; 2 | -------------------------------------------------------------------------------- /duo/ui/_app/immutable/chunks/index.D6PN1hnv.js: -------------------------------------------------------------------------------- 1 | import{n as f,s as w,r as m,a as q,i as x}from"./scheduler.Bi1YRy3L.js";const a=[];function z(e,o){return{subscribe:A(e,o).subscribe}}function A(e,o=f){let n;const r=new Set;function u(t){if(q(e,t)&&(e=t,n)){const i=!a.length;for(const s of r)s[1](),a.push(s,e);if(i){for(let s=0;s{r.delete(s),r.size===0&&n&&(n(),n=null)}}return{set:u,update:l,subscribe:b}}function E(e,o,n){const r=!Array.isArray(e),u=r?[e]:e;if(!u.every(Boolean))throw new Error("derived() expects stores as input, got a falsy value");const l=o.length<2;return z(n,(b,t)=>{let i=!1;const s=[];let d=0,p=f;const h=()=>{if(d)return;p();const c=o(r?s[0]:s,b,t);l?b(c):p=x(c)?c:f},y=u.map((c,g)=>w(c,_=>{s[g]=_,d&=~(1<{d|=1<e in t?T(t,e,{enumerable:!0,configurable:!0,writable:!0,value:n}):t[e]=n;var z=(t,e,n)=>q(t,typeof e!="symbol"?e+"":e,n);import{n as w,G as W,g as H,H as X,r as k,i as M,I as O,J as Y,K as A,L as F,d as Z,M as tt,N as et,O as nt,P as it,Q as G,R as st,S as rt,T as at,U as ot,V as ft}from"./scheduler.Bi1YRy3L.js";const J=typeof window<"u";let L=J?()=>window.performance.now():()=>Date.now(),U=J?t=>requestAnimationFrame(t):w;const S=new Set;function K(t){S.forEach(e=>{e.c(t)||(S.delete(e),e.f())}),S.size!==0&&U(K)}function V(t){let e;return S.size===0&&U(K),{promise:new Promise(n=>{S.add(e={c:t,f:n})}),abort(){S.delete(e)}}}const j=new Map;let C=0;function ut(t){let e=5381,n=t.length;for(;n--;)e=(e<<5)-e^t.charCodeAt(n);return e>>>0}function lt(t,e){const n={stylesheet:X(e),rules:{}};return j.set(t,n),n}function I(t,e,n,s,u,a,l,i=0){const c=16.666/s;let r=`{ 2 | `;for(let $=0;$<=1;$+=c){const m=e+(n-e)*a($);r+=$*100+`%{${l(m,1-m)}} 3 | `}const d=r+`100% {${l(n,1-n)}} 4 | }`,f=`__svelte_${ut(d)}_${i}`,g=W(t),{stylesheet:h,rules:o}=j.get(g)||lt(g,t);o[f]||(o[f]=!0,h.insertRule(`@keyframes ${f} ${d}`,h.cssRules.length));const _=t.style.animation||"";return t.style.animation=`${_?`${_}, `:""}${f} ${s}ms linear ${u}ms 1 both`,C+=1,f}function N(t,e){const n=(t.style.animation||"").split(", "),s=n.filter(e?a=>a.indexOf(e)<0:a=>a.indexOf("__svelte")===-1),u=n.length-s.length;u&&(t.style.animation=s.join(", "),C-=u,C||ct())}function ct(){U(()=>{C||(j.forEach(t=>{const{ownerNode:e}=t.stylesheet;e&&H(e)}),j.clear())})}let E;function B(){return E||(E=Promise.resolve(),E.then(()=>{E=null})),E}function v(t,e,n){t.dispatchEvent(Y(`${e?"intro":"outro"}${n}`))}const R=new Set;let p;function yt(){p={r:0,c:[],p}}function xt(){p.r||k(p.c),p=p.p}function dt(t,e){t&&t.i&&(R.delete(t),t.i(e))}function vt(t,e,n,s){if(t&&t.o){if(R.has(t))return;R.add(t),p.c.push(()=>{R.delete(t),s&&(n&&t.d(1),s())}),t.o(e)}else s&&s()}const D={duration:0};function wt(t,e,n){const s={direction:"in"};let u=e(t,n,s),a=!1,l,i,c=0;function r(){l&&N(t,l)}function d(){const{delay:g=0,duration:h=300,easing:o=A,tick:_=w,css:$}=u||D;$&&(l=I(t,0,1,h,g,o,$,c++)),_(0,1);const m=L()+g,y=m+h;i&&i.abort(),a=!0,O(()=>v(t,!0,"start")),i=V(x=>{if(a){if(x>=y)return _(1,0),v(t,!0,"end"),r(),a=!1;if(x>=m){const b=o((x-m)/h);_(b,1-b)}}return a})}let f=!1;return{start(){f||(f=!0,N(t),M(u)?(u=u(s),B().then(d)):d())},invalidate(){f=!1},end(){a&&(r(),a=!1)}}}function bt(t,e,n){const s={direction:"out"};let u=e(t,n,s),a=!0,l;const i=p;i.r+=1;let c;function r(){const{delay:d=0,duration:f=300,easing:g=A,tick:h=w,css:o}=u||D;o&&(l=I(t,1,0,f,d,g,o));const _=L()+d,$=_+f;O(()=>v(t,!1,"start")),"inert"in t&&(c=t.inert,t.inert=!0),V(m=>{if(a){if(m>=$)return h(0,1),v(t,!1,"end"),--i.r||k(i.c),!1;if(m>=_){const y=g((m-_)/f);h(1-y,y)}}return a})}return M(u)?B().then(()=>{u=u(s),r()}):r(),{end(d){d&&"inert"in t&&(t.inert=c),d&&u.tick&&u.tick(1,0),a&&(l&&N(t,l),a=!1)}}}function St(t,e,n,s){let a=e(t,n,{direction:"both"}),l=s?0:1,i=null,c=null,r=null,d;function f(){r&&N(t,r)}function g(o,_){const $=o.b-l;return _*=Math.abs($),{a:l,b:o.b,d:$,duration:_,start:o.start,end:o.start+_,group:o.group}}function h(o){const{delay:_=0,duration:$=300,easing:m=A,tick:y=w,css:x}=a||D,b={start:L()+_,b:o};o||(b.group=p,p.r+=1),"inert"in t&&(o?d!==void 0&&(t.inert=d):(d=t.inert,t.inert=!0)),i||c?c=b:(x&&(f(),r=I(t,l,o,$,_,m,x)),o&&y(0,1),i=g(b,$),O(()=>v(t,o,"start")),V(P=>{if(c&&P>c.start&&(i=g(c,$),c=null,v(t,i.b,"start"),x&&(f(),r=I(t,l,i.b,i.duration,0,m,a.css))),i){if(P>=i.end)y(l=i.b,1-l),v(t,i.b,"end"),c||(i.b?f():--i.group.r||k(i.group.c)),i=null;else if(P>=i.start){const Q=P-i.start;l=i.a+i.d*m(Q/i.duration),y(l,1-l)}}return!!(i||c)}))}return{run(o){M(a)?B().then(()=>{a=a({direction:o?"in":"out"}),h(o)}):h(o)},end(){f(),i=c=null}}}function kt(t,e,n){const s=t.$$.props[e];s!==void 0&&(t.$$.bound[s]=n,n(t.$$.ctx[s]))}function Et(t){t&&t.c()}function Ot(t,e){t&&t.l(e)}function _t(t,e,n){const{fragment:s,after_update:u}=t.$$;s&&s.m(e,n),O(()=>{const a=t.$$.on_mount.map(st).filter(M);t.$$.on_destroy?t.$$.on_destroy.push(...a):k(a),t.$$.on_mount=[]}),u.forEach(O)}function $t(t,e){const n=t.$$;n.fragment!==null&&(nt(n.after_update),k(n.on_destroy),n.fragment&&n.fragment.d(e),n.on_destroy=n.fragment=null,n.ctx=[])}function ht(t,e){t.$$.dirty[0]===-1&&(rt.push(t),at(),t.$$.dirty.fill(0)),t.$$.dirty[e/31|0]|=1<{const o=h.length?h[0]:g;return r.ctx&&u(r.ctx[f],r.ctx[f]=o)&&(!r.skip_bound&&r.bound[f]&&r.bound[f](o),d&&ht(t,f)),g}):[],r.update(),d=!0,k(r.before_update),r.fragment=s?s(r.ctx):!1,e.target){if(e.hydrate){ot();const f=Z(e.target);r.fragment&&r.fragment.l(f),f.forEach(H)}else r.fragment&&r.fragment.c();e.intro&&dt(t.$$.fragment),_t(t,e.target,e.anchor),ft(),tt()}G(c)}class Pt{constructor(){z(this,"$$");z(this,"$$set")}$destroy(){$t(this,1),this.$destroy=w}$on(e,n){if(!M(n))return w;const s=this.$$.callbacks[e]||(this.$$.callbacks[e]=[]);return s.push(n),()=>{const u=s.indexOf(n);u!==-1&&s.splice(u,1)}}$set(e){this.$$set&&!et(e)&&(this.$$.skip_bound=!0,this.$$set(e),this.$$.skip_bound=!1)}}const gt="4";typeof window<"u"&&(window.__svelte||(window.__svelte={v:new Set})).v.add(gt);export{Pt as S,vt as a,Et as b,xt as c,Ot as d,$t as e,I as f,yt as g,N as h,Mt as i,bt as j,wt as k,V as l,_t as m,L as n,St as o,kt as p,dt as t}; 5 | -------------------------------------------------------------------------------- /duo/ui/_app/immutable/chunks/scheduler.Bi1YRy3L.js: -------------------------------------------------------------------------------- 1 | var G=Object.defineProperty;var I=(t,e,n)=>e in t?G(t,e,{enumerable:!0,configurable:!0,writable:!0,value:n}):t[e]=n;var f=(t,e,n)=>I(t,typeof e!="symbol"?e+"":e,n);function H(){}const _t=t=>t;function z(t,e){for(const n in e)t[n]=e[n];return t}function ht(t){return!!t&&(typeof t=="object"||typeof t=="function")&&typeof t.then=="function"}function F(t){return t()}function dt(){return Object.create(null)}function U(t){t.forEach(F)}function W(t){return typeof t=="function"}function mt(t,e){return t!=t?e==e:t!==e||t&&typeof t=="object"||typeof t=="function"}let p;function pt(t,e){return t===e?!0:(p||(p=document.createElement("a")),p.href=e,t===p.href)}function yt(t){return Object.keys(t).length===0}function L(t,...e){if(t==null){for(const i of e)i(void 0);return H}const n=t.subscribe(...e);return n.unsubscribe?()=>n.unsubscribe():n}function gt(t){let e;return L(t,n=>e=n)(),e}function bt(t,e,n){t.$$.on_destroy.push(L(e,n))}function xt(t,e,n,i){if(t){const s=S(t,e,n,i);return t[0](s)}}function S(t,e,n,i){return t[1]&&i?z(n.ctx.slice(),t[1](i(e))):n.ctx}function Et(t,e,n,i){if(t[2]&&i){const s=t[2](i(n));if(e.dirty===void 0)return s;if(typeof s=="object"){const l=[],c=Math.max(e.dirty.length,s.length);for(let o=0;o32){const e=[],n=t.ctx.length/32;for(let i=0;i>1);n(s)<=i?t=s+1:e=s}return t}function K(t){if(t.hydrate_init)return;t.hydrate_init=!0;let e=t.childNodes;if(t.nodeName==="HEAD"){const r=[];for(let a=0;a0&&e[n[s]].claim_order<=a?s+1:J(1,s,R=>e[n[R]].claim_order,a))-1;i[r]=n[u]+1;const C=u+1;n[C]=r,s=Math.max(C,s)}const l=[],c=[];let o=e.length-1;for(let r=n[s]+1;r!=0;r=i[r-1]){for(l.push(e[r-1]);o>=r;o--)c.push(e[o]);o--}for(;o>=0;o--)c.push(e[o]);l.reverse(),c.sort((r,a)=>r.claim_order-a.claim_order);for(let r=0,a=0;r=l[a].claim_order;)a++;const u=at.removeEventListener(e,n,i)}function Ot(t){return function(e){return e.preventDefault(),t.call(this,e)}}function A(t,e,n){n==null?t.removeAttribute(e):t.getAttribute(e)!==n&&t.setAttribute(e,n)}const tt=["width","height"];function et(t,e){const n=Object.getOwnPropertyDescriptors(t.__proto__);for(const i in e)e[i]==null?t.removeAttribute(i):i==="style"?t.style.cssText=e[i]:i==="__value"?t.value=t[i]=e[i]:n[i]&&n[i].set&&tt.indexOf(i)===-1?t[i]=e[i]:A(t,i,e[i])}function qt(t,e){for(const n in e)A(t,n,e[n])}function nt(t,e){Object.keys(e).forEach(n=>{it(t,n,e[n])})}function it(t,e,n){const i=e.toLowerCase();i in t?t[i]=typeof t[i]=="boolean"&&n===""?!0:n:e in t?t[e]=typeof t[e]=="boolean"&&n===""?!0:n:A(t,e,n)}function Bt(t){return/-/.test(t)?nt:et}function Rt(t){return t.dataset.svelteH}function Gt(t){return Array.from(t.childNodes)}function P(t){t.claim_info===void 0&&(t.claim_info={last_index:0,total_claimed:0})}function O(t,e,n,i,s=!1){P(t);const l=(()=>{for(let c=t.claim_info.last_index;c=0;c--){const o=t[c];if(e(o)){const r=n(o);return r===void 0?t.splice(c,1):t[c]=r,s?r===void 0&&t.claim_info.last_index--:t.claim_info.last_index=c,o}}return i()})();return l.claim_order=t.claim_info.total_claimed,t.claim_info.total_claimed+=1,l}function q(t,e,n,i){return O(t,s=>s.nodeName===e,s=>{const l=[];for(let c=0;cs.removeAttribute(c))},()=>i(e))}function It(t,e,n){return q(t,e,n,N)}function zt(t,e,n){return q(t,e,n,M)}function st(t,e){return O(t,n=>n.nodeType===3,n=>{const i=""+e;if(n.data.startsWith(i)){if(n.data.length!==i.length)return n.splitText(i.length)}else n.data=i},()=>k(e),!0)}function Ft(t){return st(t," ")}function j(t,e,n){for(let i=n;i0&&n.push(s);return n}class rt{constructor(e=!1){f(this,"is_svg",!1);f(this,"e");f(this,"n");f(this,"t");f(this,"a");this.is_svg=e,this.e=this.n=null}c(e){this.h(e)}m(e,n,i=null){this.e||(this.is_svg?this.e=M(n.nodeName):this.e=N(n.nodeType===11?"TEMPLATE":n.nodeName),this.t=n.tagName!=="TEMPLATE"?n:n.content,this.c(e)),this.i(i)}h(e){this.e.innerHTML=e,this.n=Array.from(this.e.nodeName==="TEMPLATE"?this.e.content.childNodes:this.e.childNodes)}i(e){for(let n=0;n{const s=t.$$.callbacks[e];if(s){const l=ct(e,n,{cancelable:i});return s.slice().forEach(c=>{c.call(t,l)}),!l.defaultPrevented}return!0}}function ie(t,e){return _().$$.context.set(t,e),e}function se(t){return _().$$.context.get(t)}function ce(t){return _().$$.context.has(t)}function re(t,e){const n=t.$$.callbacks[e.type];n&&n.slice().forEach(i=>i.call(this,e))}const m=[],D=[];let d=[];const T=[],B=Promise.resolve();let v=!1;function lt(){v||(v=!0,B.then(at))}function le(){return lt(),B}function ot(t){d.push(t)}function oe(t){T.push(t)}const E=new Set;let h=0;function at(){if(h!==0)return;const t=y;do{try{for(;ht.indexOf(i)===-1?e.push(i):n.push(i)),n.forEach(i=>i()),d=e}export{kt as $,te as A,$t as B,Kt as C,D,Zt as E,le as F,V as G,Ht as H,ot as I,ct as J,_t as K,dt as L,at as M,yt as N,ae as O,y as P,x as Q,F as R,m as S,lt as T,jt as U,Dt as V,ht as W,_ as X,ne as Y,ee as Z,Xt as _,mt as a,Jt as a0,Pt as a1,Ct as a2,Ot as a3,re as a4,M as a5,zt as a6,b as a7,Ut as a8,z as a9,qt as aa,Nt as ab,vt as ac,gt as ad,Bt as ae,ie as af,se as ag,et as ah,ce as ai,oe as aj,Yt as ak,Qt as al,At as am,Vt as an,St as b,It as c,Gt as d,N as e,st as f,w as g,Ft as h,W as i,$ as j,Y as k,Wt as l,bt as m,H as n,pt as o,A as p,xt as q,U as r,L as s,k as t,Rt as u,wt as v,Tt as w,Et as x,Lt as y,Mt as z}; 2 | -------------------------------------------------------------------------------- /duo/ui/_app/immutable/entry/app.C9MSuNpP.js: -------------------------------------------------------------------------------- 1 | const __vite__mapDeps=(i,m=__vite__mapDeps,d=(m.f||(m.f=["../nodes/0.BRH967PU.js","../chunks/scheduler.Bi1YRy3L.js","../chunks/index.LOVubqCp.js","../chunks/each.Chk0zBHY.js","../assets/0.9240paAY.css","../nodes/1.CwZJuE_B.js","../chunks/entry.CY4Bz8dl.js","../chunks/index.D6PN1hnv.js","../nodes/2.Cq86B-2g.js","../assets/2.DdNfNY-u.css","../nodes/3.CvrP5Z8A.js","../assets/3.Buu_H62d.css"])))=>i.map(i=>d[i]); 2 | import{a as N,b as q,z as h,h as U,j as k,g as p,A as j,B as z,e as F,c as W,d as G,p as D,C as d,t as H,f as J,l as K,D as C,E,F as Q}from"../chunks/scheduler.Bi1YRy3L.js";import{S as X,i as Y,a as g,c as A,t as w,g as S,b as y,d as I,m as R,e as P}from"../chunks/index.LOVubqCp.js";const Z="modulepreload",M=function(a,e){return new URL(a,e).href},O={},L=function(e,n,i){let s=Promise.resolve();if(n&&n.length>0){const u=document.getElementsByTagName("link"),t=document.querySelector("meta[property=csp-nonce]"),r=(t==null?void 0:t.nonce)||(t==null?void 0:t.getAttribute("nonce"));s=Promise.all(n.map(o=>{if(o=M(o,i),o in O)return;O[o]=!0;const f=o.endsWith(".css"),l=f?'[rel="stylesheet"]':"";if(!!i)for(let b=u.length-1;b>=0;b--){const v=u[b];if(v.href===o&&(!f||v.rel==="stylesheet"))return}else if(document.querySelector(`link[href="${o}"]${l}`))return;const _=document.createElement("link");if(_.rel=f?"stylesheet":Z,f||(_.as="script",_.crossOrigin=""),_.href=o,r&&_.setAttribute("nonce",r),document.head.appendChild(_),f)return new Promise((b,v)=>{_.addEventListener("load",b),_.addEventListener("error",()=>v(new Error(`Unable to preload CSS for ${o}`)))})}))}return s.then(()=>e()).catch(u=>{const t=new Event("vite:preloadError",{cancelable:!0});if(t.payload=u,window.dispatchEvent(t),!t.defaultPrevented)throw u})},re={};function $(a){let e,n,i;var s=a[1][0];function u(t,r){return{props:{data:t[3],form:t[2]}}}return s&&(e=E(s,u(a)),a[12](e)),{c(){e&&y(e.$$.fragment),n=h()},l(t){e&&I(e.$$.fragment,t),n=h()},m(t,r){e&&R(e,t,r),k(t,n,r),i=!0},p(t,r){if(r&2&&s!==(s=t[1][0])){if(e){S();const o=e;g(o.$$.fragment,1,0,()=>{P(o,1)}),A()}s?(e=E(s,u(t)),t[12](e),y(e.$$.fragment),w(e.$$.fragment,1),R(e,n.parentNode,n)):e=null}else if(s){const o={};r&8&&(o.data=t[3]),r&4&&(o.form=t[2]),e.$set(o)}},i(t){i||(e&&w(e.$$.fragment,t),i=!0)},o(t){e&&g(e.$$.fragment,t),i=!1},d(t){t&&p(n),a[12](null),e&&P(e,t)}}}function x(a){let e,n,i;var s=a[1][0];function u(t,r){return{props:{data:t[3],$$slots:{default:[ee]},$$scope:{ctx:t}}}}return s&&(e=E(s,u(a)),a[11](e)),{c(){e&&y(e.$$.fragment),n=h()},l(t){e&&I(e.$$.fragment,t),n=h()},m(t,r){e&&R(e,t,r),k(t,n,r),i=!0},p(t,r){if(r&2&&s!==(s=t[1][0])){if(e){S();const o=e;g(o.$$.fragment,1,0,()=>{P(o,1)}),A()}s?(e=E(s,u(t)),t[11](e),y(e.$$.fragment),w(e.$$.fragment,1),R(e,n.parentNode,n)):e=null}else if(s){const o={};r&8&&(o.data=t[3]),r&8215&&(o.$$scope={dirty:r,ctx:t}),e.$set(o)}},i(t){i||(e&&w(e.$$.fragment,t),i=!0)},o(t){e&&g(e.$$.fragment,t),i=!1},d(t){t&&p(n),a[11](null),e&&P(e,t)}}}function ee(a){let e,n,i;var s=a[1][1];function u(t,r){return{props:{data:t[4],form:t[2]}}}return s&&(e=E(s,u(a)),a[10](e)),{c(){e&&y(e.$$.fragment),n=h()},l(t){e&&I(e.$$.fragment,t),n=h()},m(t,r){e&&R(e,t,r),k(t,n,r),i=!0},p(t,r){if(r&2&&s!==(s=t[1][1])){if(e){S();const o=e;g(o.$$.fragment,1,0,()=>{P(o,1)}),A()}s?(e=E(s,u(t)),t[10](e),y(e.$$.fragment),w(e.$$.fragment,1),R(e,n.parentNode,n)):e=null}else if(s){const o={};r&16&&(o.data=t[4]),r&4&&(o.form=t[2]),e.$set(o)}},i(t){i||(e&&w(e.$$.fragment,t),i=!0)},o(t){e&&g(e.$$.fragment,t),i=!1},d(t){t&&p(n),a[10](null),e&&P(e,t)}}}function T(a){let e,n=a[6]&&V(a);return{c(){e=F("div"),n&&n.c(),this.h()},l(i){e=W(i,"DIV",{id:!0,"aria-live":!0,"aria-atomic":!0,style:!0});var s=G(e);n&&n.l(s),s.forEach(p),this.h()},h(){D(e,"id","svelte-announcer"),D(e,"aria-live","assertive"),D(e,"aria-atomic","true"),d(e,"position","absolute"),d(e,"left","0"),d(e,"top","0"),d(e,"clip","rect(0 0 0 0)"),d(e,"clip-path","inset(50%)"),d(e,"overflow","hidden"),d(e,"white-space","nowrap"),d(e,"width","1px"),d(e,"height","1px")},m(i,s){k(i,e,s),n&&n.m(e,null)},p(i,s){i[6]?n?n.p(i,s):(n=V(i),n.c(),n.m(e,null)):n&&(n.d(1),n=null)},d(i){i&&p(e),n&&n.d()}}}function V(a){let e;return{c(){e=H(a[7])},l(n){e=J(n,a[7])},m(n,i){k(n,e,i)},p(n,i){i&128&&K(e,n[7])},d(n){n&&p(e)}}}function te(a){let e,n,i,s,u;const t=[x,$],r=[];function o(l,m){return l[1][1]?0:1}e=o(a),n=r[e]=t[e](a);let f=a[5]&&T(a);return{c(){n.c(),i=q(),f&&f.c(),s=h()},l(l){n.l(l),i=U(l),f&&f.l(l),s=h()},m(l,m){r[e].m(l,m),k(l,i,m),f&&f.m(l,m),k(l,s,m),u=!0},p(l,[m]){let _=e;e=o(l),e===_?r[e].p(l,m):(S(),g(r[_],1,1,()=>{r[_]=null}),A(),n=r[e],n?n.p(l,m):(n=r[e]=t[e](l),n.c()),w(n,1),n.m(i.parentNode,i)),l[5]?f?f.p(l,m):(f=T(l),f.c(),f.m(s.parentNode,s)):f&&(f.d(1),f=null)},i(l){u||(w(n),u=!0)},o(l){g(n),u=!1},d(l){l&&(p(i),p(s)),r[e].d(l),f&&f.d(l)}}}function ne(a,e,n){let{stores:i}=e,{page:s}=e,{constructors:u}=e,{components:t=[]}=e,{form:r}=e,{data_0:o=null}=e,{data_1:f=null}=e;j(i.page.notify);let l=!1,m=!1,_=null;z(()=>{const c=i.page.subscribe(()=>{l&&(n(6,m=!0),Q().then(()=>{n(7,_=document.title||"untitled page")}))});return n(5,l=!0),c});function b(c){C[c?"unshift":"push"](()=>{t[1]=c,n(0,t)})}function v(c){C[c?"unshift":"push"](()=>{t[0]=c,n(0,t)})}function B(c){C[c?"unshift":"push"](()=>{t[0]=c,n(0,t)})}return a.$$set=c=>{"stores"in c&&n(8,i=c.stores),"page"in c&&n(9,s=c.page),"constructors"in c&&n(1,u=c.constructors),"components"in c&&n(0,t=c.components),"form"in c&&n(2,r=c.form),"data_0"in c&&n(3,o=c.data_0),"data_1"in c&&n(4,f=c.data_1)},a.$$.update=()=>{a.$$.dirty&768&&i.page.set(s)},[t,u,r,o,f,l,m,_,i,s,b,v,B]}class oe extends X{constructor(e){super(),Y(this,e,ne,te,N,{stores:8,page:9,constructors:1,components:0,form:2,data_0:3,data_1:4})}}const ae=[()=>L(()=>import("../nodes/0.BRH967PU.js"),__vite__mapDeps([0,1,2,3,4]),import.meta.url),()=>L(()=>import("../nodes/1.CwZJuE_B.js"),__vite__mapDeps([5,1,2,6,7]),import.meta.url),()=>L(()=>import("../nodes/2.Cq86B-2g.js"),__vite__mapDeps([8,1,2,3,7,9]),import.meta.url),()=>L(()=>import("../nodes/3.CvrP5Z8A.js"),__vite__mapDeps([10,1,2,11]),import.meta.url)],le=[],fe={"/":[2],"/trace":[3]},ce={handleError:({error:a})=>{console.error(a)},reroute:()=>{}};export{fe as dictionary,ce as hooks,re as matchers,ae as nodes,oe as root,le as server_loads}; 3 | -------------------------------------------------------------------------------- /duo/ui/_app/immutable/entry/start.KXOXF_R5.js: -------------------------------------------------------------------------------- 1 | import{a as t}from"../chunks/entry.CY4Bz8dl.js";export{t as start}; 2 | -------------------------------------------------------------------------------- /duo/ui/_app/immutable/nodes/0.BRH967PU.js: -------------------------------------------------------------------------------- 1 | import{a as D,q as I,e as p,b as C,c as v,d as b,u as O,h as L,g as d,p as y,j as $,k as g,v as q,w as T,x as U,y as V,t as z,f as A,n as H}from"../chunks/scheduler.Bi1YRy3L.js";import{S as M,i as P,t as B,a as F}from"../chunks/index.LOVubqCp.js";import{e as S}from"../chunks/each.Chk0zBHY.js";const G=!1,W=Object.freeze(Object.defineProperty({__proto__:null,ssr:G},Symbol.toStringTag,{value:"Module"}));function j(n,t,s){const u=n.slice();return u[3]=t[s],u}function w(n){let t,s,u=n[3].name+"",_,o;return{c(){t=p("li"),s=p("a"),_=z(u),o=C(),this.h()},l(r){t=v(r,"LI",{class:!0});var f=b(t);s=v(f,"A",{href:!0});var h=b(s);_=A(h,u),h.forEach(d),o=L(f),f.forEach(d),this.h()},h(){y(s,"href",n[3].path),y(t,"class","rounded-md px-5 py-1 hover:bg-secondary")},m(r,f){$(r,t,f),g(t,s),g(s,_),g(t,o)},p:H,d(r){r&&d(t)}}}function J(n){let t,s,u="DUO",_,o,r,f,h,m=S(n[0]),a=[];for(let e=0;e{"$$scope"in r&&s(1,_=r.$$scope)},[o,_,u]}class X extends M{constructor(t){super(),P(this,t,K,J,D,{})}}export{X as component,W as universal}; 2 | -------------------------------------------------------------------------------- /duo/ui/_app/immutable/nodes/1.CwZJuE_B.js: -------------------------------------------------------------------------------- 1 | import{a as S,e as _,t as f,b as x,c as d,d as g,f as h,g as l,h as j,j as m,k as v,l as $,n as E,m as k}from"../chunks/scheduler.Bi1YRy3L.js";import{S as q,i as y}from"../chunks/index.LOVubqCp.js";import{s as C}from"../chunks/entry.CY4Bz8dl.js";const H=()=>{const s=C;return{page:{subscribe:s.page.subscribe},navigating:{subscribe:s.navigating.subscribe},updated:s.updated}},P={subscribe(s){return H().page.subscribe(s)}};function w(s){var b;let t,r=s[0].status+"",o,n,i,c=((b=s[0].error)==null?void 0:b.message)+"",u;return{c(){t=_("h1"),o=f(r),n=x(),i=_("p"),u=f(c)},l(e){t=d(e,"H1",{});var a=g(t);o=h(a,r),a.forEach(l),n=j(e),i=d(e,"P",{});var p=g(i);u=h(p,c),p.forEach(l)},m(e,a){m(e,t,a),v(t,o),m(e,n,a),m(e,i,a),v(i,u)},p(e,[a]){var p;a&1&&r!==(r=e[0].status+"")&&$(o,r),a&1&&c!==(c=((p=e[0].error)==null?void 0:p.message)+"")&&$(u,c)},i:E,o:E,d(e){e&&(l(t),l(n),l(i))}}}function z(s,t,r){let o;return k(s,P,n=>r(0,o=n)),[o]}let F=class extends q{constructor(t){super(),y(this,t,z,w,S,{})}};export{F as component}; 2 | -------------------------------------------------------------------------------- /duo/ui/_app/immutable/nodes/3.CvrP5Z8A.js: -------------------------------------------------------------------------------- 1 | import{a as n,e as l,c as m,d as u,g as o,o as f,p as t,j as p,n as s}from"../chunks/scheduler.Bi1YRy3L.js";import{S as h,i as d}from"../chunks/index.LOVubqCp.js";function _(a){let e,c;return{c(){e=l("iframe"),this.h()},l(r){e=m(r,"IFRAME",{src:!0,frameborder:!0,title:!0,allow:!0,class:!0}),u(e).forEach(o),this.h()},h(){f(e.src,c=a[0])||t(e,"src",c),t(e,"frameborder","0"),t(e,"title","trace view"),t(e,"allow","accelerometer; autoplay; encrypted-media; gyroscope; picture-in-picture"),t(e,"class","svelte-1ehqna5")},m(r,i){p(r,e,i)},p:s,i:s,o:s,d(r){r&&o(e)}}}function g(a){return["/trace"]}class q extends h{constructor(e){super(),d(this,e,g,_,n,{})}}export{q as component}; 2 | -------------------------------------------------------------------------------- /duo/ui/_app/version.json: -------------------------------------------------------------------------------- 1 | {"version":"1726233354945"} -------------------------------------------------------------------------------- /duo/ui/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 |
17 | 33 |
34 | 35 | 36 | -------------------------------------------------------------------------------- /duo/ui/trace.html: -------------------------------------------------------------------------------- 1 | Jaeger UI
--------------------------------------------------------------------------------