├── images ├── workload-phases.png ├── up-transition_linear.png ├── down-transition_linear.png ├── up-transition_ease-in.png ├── up-transition_ease-out.png ├── up-transition_spike-in.png ├── down-transition_ease-in.png ├── down-transition_ease-out.png ├── down-transition_spike-in.png ├── down-transition_spike-out.png ├── up-transition_ease-in-out.png ├── up-transition_spike-out.png ├── down-transition_ease-in-out.png ├── up-transition_spike-in-out.png └── down-transition_spike-in-out.png ├── .cargo └── config.toml ├── .gitignore ├── .rustfmt.toml ├── .github ├── dependabot.yml └── workflows │ ├── hub.docker.com-manual_publish.yml │ ├── ci.yml │ ├── crates.io-publish.yml │ ├── hub.docker.com-publish.yml │ └── github-release.yml ├── Dockerfile ├── src ├── logging.rs ├── rdkafka.rs ├── producer_sink.rs ├── main.rs ├── records_tap.rs ├── cli.rs ├── transition.rs ├── workload.rs └── generator.rs ├── Cross.toml ├── LICENSE-MIT ├── CHANGELOG_GUIDANCE.md ├── Cargo.toml ├── CHANGELOG.md ├── LICENSE-APACHE ├── README.md └── Cargo.lock /images/workload-phases.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kafkesc/ksunami/HEAD/images/workload-phases.png -------------------------------------------------------------------------------- /.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [target.x86_64-pc-windows-msvc] 2 | rustflags = ["-C", "target-feature=+crt-static"] 3 | -------------------------------------------------------------------------------- /images/up-transition_linear.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kafkesc/ksunami/HEAD/images/up-transition_linear.png -------------------------------------------------------------------------------- /images/down-transition_linear.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kafkesc/ksunami/HEAD/images/down-transition_linear.png -------------------------------------------------------------------------------- /images/up-transition_ease-in.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kafkesc/ksunami/HEAD/images/up-transition_ease-in.png -------------------------------------------------------------------------------- /images/up-transition_ease-out.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kafkesc/ksunami/HEAD/images/up-transition_ease-out.png -------------------------------------------------------------------------------- /images/up-transition_spike-in.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kafkesc/ksunami/HEAD/images/up-transition_spike-in.png -------------------------------------------------------------------------------- /images/down-transition_ease-in.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kafkesc/ksunami/HEAD/images/down-transition_ease-in.png -------------------------------------------------------------------------------- /images/down-transition_ease-out.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kafkesc/ksunami/HEAD/images/down-transition_ease-out.png -------------------------------------------------------------------------------- /images/down-transition_spike-in.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kafkesc/ksunami/HEAD/images/down-transition_spike-in.png -------------------------------------------------------------------------------- /images/down-transition_spike-out.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kafkesc/ksunami/HEAD/images/down-transition_spike-out.png -------------------------------------------------------------------------------- /images/up-transition_ease-in-out.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kafkesc/ksunami/HEAD/images/up-transition_ease-in-out.png -------------------------------------------------------------------------------- /images/up-transition_spike-out.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kafkesc/ksunami/HEAD/images/up-transition_spike-out.png -------------------------------------------------------------------------------- /images/down-transition_ease-in-out.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kafkesc/ksunami/HEAD/images/down-transition_ease-in-out.png -------------------------------------------------------------------------------- /images/up-transition_spike-in-out.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kafkesc/ksunami/HEAD/images/up-transition_spike-in-out.png -------------------------------------------------------------------------------- /images/down-transition_spike-in-out.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kafkesc/ksunami/HEAD/images/down-transition_spike-in-out.png -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Rustlang / Cargo 2 | /target/ 3 | 4 | # rustfmt 5 | **/*.rs.bk 6 | 7 | # Jetbrains IDEs 8 | .idea/ 9 | *.iws 10 | .fleet/ 11 | 12 | # macOS 13 | .DS_Store 14 | -------------------------------------------------------------------------------- /.rustfmt.toml: -------------------------------------------------------------------------------- 1 | use_small_heuristics = "Off" 2 | reorder_imports = true 3 | reorder_modules = true 4 | edition = "2021" 5 | match_block_trailing_comma = true 6 | max_width = 120 7 | use_field_init_shorthand = true 8 | use_try_shorthand = true -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # Docs: https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 2 | 3 | version: 2 4 | updates: 5 | 6 | # Maintain dependencies for GitHub Actions 7 | - package-ecosystem: "github-actions" 8 | directory: "/" 9 | schedule: 10 | interval: "weekly" 11 | 12 | # Maintain dependencies for Rust crates 13 | - package-ecosystem: "cargo" 14 | directory: "/" 15 | schedule: 16 | interval: "weekly" 17 | allow: 18 | - dependency-type: "all" 19 | -------------------------------------------------------------------------------- /.github/workflows/hub.docker.com-manual_publish.yml: -------------------------------------------------------------------------------- 1 | name: Manually Publish Docker Image 2 | 3 | on: 4 | 5 | # This is for when, occasionally, we need to manually publish a docker image 6 | workflow_dispatch: 7 | inputs: 8 | 9 | target-platforms: 10 | type: string 11 | default: linux/amd64,linux/arm64 12 | description: Platforms to target when publishing the docker image 13 | 14 | release-version: 15 | required: true 16 | type: string 17 | description: Version to publish as docker image (eg. 'vX.Y.Z') 18 | 19 | jobs: 20 | manually_publish-hub_docker_com: 21 | name: Publish to hub.docker.com 22 | uses: ./.github/workflows/hub.docker.com-publish.yml 23 | with: 24 | release-version: ${{ inputs.release-version }} 25 | target-platforms: ${{ inputs.target-platforms }} 26 | secrets: 27 | dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }} 28 | dockerhub-password: ${{ secrets.DOCKERHUB_TOKEN }} 29 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | ARG BASE_IMG=rust 2 | ARG BASE_IMG_VERSION=1.77.2-slim-bookworm 3 | 4 | FROM $BASE_IMG:$BASE_IMG_VERSION 5 | ARG BASE_IMG 6 | ARG BASE_IMG_VERSION 7 | ARG KSUNAMI_VERSION 8 | 9 | LABEL base.image=$BASE_IMG:$BASE_IMG_VERSION \ 10 | name=ksunami \ 11 | version=$KSUNAMI_VERSION \ 12 | description="Produce constant, configurable, cyclical waves of Kafka Records" \ 13 | repository="https://github.com/kafkesc/ksunami" \ 14 | homepage="https://github.com/kafkesc/ksunami" \ 15 | license="MIT OR Apache-2.0" 16 | 17 | ENV BUILD_DEPS "tcl-dev libssl-dev libsasl2-dev" 18 | 19 | # Setup 20 | RUN \ 21 | apt update && \ 22 | apt install -y ${BUILD_DEPS} 23 | 24 | # Build 25 | RUN \ 26 | cargo install --version "${KSUNAMI_VERSION#v}" ksunami 27 | 28 | # Cleanup 29 | RUN \ 30 | rm -rf /usr/local/cargo/registry && \ 31 | apt remove --purge -y ${BUILD_DEPS} && \ 32 | apt autoremove -y && \ 33 | apt autoclean -y && \ 34 | rm -rf /var/lib/apt/lists/* 35 | 36 | ENTRYPOINT ["ksunami"] -------------------------------------------------------------------------------- /src/logging.rs: -------------------------------------------------------------------------------- 1 | pub const LOG_FILTER_ENV_VAR: &str = "KSUNAMI_LOG"; 2 | 3 | /// Log level will be configured based on the given `verbosity_level`. 4 | /// 5 | /// If the env var `KSUNAMI_LOG` is set, that will take precedence and configuration 6 | /// will be based on the rules described [here](https://docs.rs/env_logger/latest/env_logger/#enabling-logging). 7 | pub fn init(verbosity_level: i8) { 8 | let default_log_level = match verbosity_level { 9 | i8::MIN..=-2 => "OFF", 10 | -1 => log::Level::Error.as_str(), 11 | 0 => log::Level::Warn.as_str(), 12 | 1 => log::Level::Info.as_str(), 13 | 2 => log::Level::Debug.as_str(), 14 | 3..=i8::MAX => log::Level::Trace.as_str(), 15 | }; 16 | 17 | let logger_env = env_logger::Env::default().filter_or(LOG_FILTER_ENV_VAR, default_log_level); 18 | let mut logger_builder = env_logger::Builder::from_env(logger_env); 19 | logger_builder.init(); 20 | 21 | info!("Configured log level: {}", log::max_level().as_str()); 22 | } 23 | -------------------------------------------------------------------------------- /Cross.toml: -------------------------------------------------------------------------------- 1 | [target.aarch64-unknown-linux-gnu] 2 | pre-build = ["dpkg --add-architecture arm64 && apt update && apt install -y libssl-dev libsasl2-dev"] 3 | 4 | [target.i686-unknown-linux-gnu] 5 | pre-build = ["dpkg --add-architecture i386 && apt update && apt install -y libssl-dev libsasl2-dev"] 6 | 7 | [target.x86_64-unknown-linux-gnu] 8 | pre-build = ["dpkg --add-architecture amd64 && apt update && apt install -y libssl-dev libsasl2-dev"] 9 | 10 | [target.arm-unknown-linux-gnueabihf] 11 | pre-build = ["dpkg --add-architecture armhf && apt update && apt install -y libssl-dev libsasl2-dev"] 12 | 13 | [target.arm-unknown-linux-musleabihf] 14 | pre-build = ["dpkg --add-architecture armhf && apt update && apt install -y libssl-dev libsasl2-dev"] 15 | 16 | [target.i686-unknown-linux-musl] 17 | pre-build = ["dpkg --add-architecture i386 && apt update && apt install -y libssl-dev libsasl2-dev"] 18 | 19 | [target.x86_64-unknown-linux-musl] 20 | pre-build = ["dpkg --add-architecture amd64 && apt update && apt install -y libssl-dev libsasl2-dev"] 21 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Sören Meier 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | paths-ignore: 7 | - '*.md' 8 | - 'images/*' 9 | pull_request: 10 | branches: [ "main" ] 11 | paths-ignore: 12 | - '*.md' 13 | - 'images/*' 14 | 15 | env: 16 | CARGO_TERM_COLOR: always 17 | 18 | jobs: 19 | ci: 20 | name: Pipeline 21 | 22 | runs-on: ubuntu-latest 23 | 24 | steps: 25 | 26 | - name: Dependencies 27 | run: | 28 | sudo apt update 29 | sudo apt install -y libsasl2-dev 30 | 31 | - name: Check-out 32 | uses: actions/checkout@v4 33 | 34 | - name: Toolchain 35 | uses: actions-rs/toolchain@v1 36 | with: 37 | toolchain: stable 38 | profile: minimal 39 | override: true 40 | components: rustfmt, clippy 41 | 42 | - name: Cargo check 43 | uses: actions-rs/cargo@v1 44 | with: 45 | command: check 46 | 47 | - name: Cargo fmt 48 | uses: actions-rs/cargo@v1 49 | with: 50 | command: fmt 51 | args: --all -- --check 52 | 53 | - name: Cargo clippy 54 | uses: actions-rs/cargo@v1 55 | with: 56 | command: clippy 57 | args: -- -D warnings 58 | 59 | - name: Cargo test 60 | uses: actions-rs/cargo@v1 61 | with: 62 | command: test 63 | -------------------------------------------------------------------------------- /.github/workflows/crates.io-publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish Crate 2 | 3 | on: 4 | # This workflow is designed to be triggered, when a tag is pushed against "main" branch.workflow_call: 5 | # Tags must be of the format "vX.Y.Z" or "vX.Y.Z-SOMETHING". 6 | push: 7 | tags: 8 | - "v[0-9]+.[0-9]+.[0-9]+*" 9 | 10 | env: 11 | CARGO_TERM_COLOR: always 12 | 13 | jobs: 14 | publish-crates_io: 15 | name: Publish to crates.io (${{ github.ref_name }}) 16 | 17 | runs-on: ubuntu-latest 18 | 19 | steps: 20 | 21 | - name: Dependencies 22 | run: | 23 | sudo apt update 24 | sudo apt install -y libsasl2-dev 25 | 26 | - name: Check-out 27 | uses: actions/checkout@v4 28 | 29 | - name: Toolchain 30 | uses: actions-rs/toolchain@v1 31 | with: 32 | toolchain: stable 33 | profile: minimal 34 | override: true 35 | 36 | - name: Cargo publish 37 | run: cargo publish --locked 38 | env: 39 | CARGO_REGISTRY_TOKEN: ${{ secrets.CRATES_IO_PUBLISH_TOKEN }} 40 | 41 | publish-hub_docker_com: 42 | name: Publish to hub.docker.com 43 | needs: 44 | - publish-crates_io 45 | uses: ./.github/workflows/hub.docker.com-publish.yml 46 | with: 47 | release-version: ${{ github.ref_name }} 48 | secrets: 49 | dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }} 50 | dockerhub-password: ${{ secrets.DOCKERHUB_TOKEN }} -------------------------------------------------------------------------------- /CHANGELOG_GUIDANCE.md: -------------------------------------------------------------------------------- 1 | # `CHANGELOG.md` Guidance 2 | 3 | At this stage, we manage the changelog manually. Nothing fancy. 4 | 5 | Each entry has to match a release, and follow this format: 6 | 7 | ```markdown 8 | # vMAJOR.MINOR.PATCH (20??-??-??) 9 | 10 | ## Breaking Changes 11 | 12 | ## Features 13 | 14 | ## Enhancements 15 | 16 | ## Bug Fixes 17 | 18 | ## Notes 19 | ``` 20 | 21 | The `# H1` should be `version (ISO DATE)`. 22 | 23 | The `## H2` are instead categories of what we want to report about this version. 24 | **IMPORTANT:** Before cutting the release, remove any section that is empty for the given release: no point 25 | in publishing empty sections. 26 | 27 | ## Categorization 28 | 29 | Information in each entry should be structured as follows: 30 | 31 | `## Breaking Changes`: This section documents in brief any incompatible changes and how to handle them. 32 | **This should only be present in major (or, in some cases, minor) version upgrades**. 33 | 34 | `## Features`: These are new improvements and features that deserve to be highlighted. 35 | **This should be marked by a minor version upgrade**. 36 | 37 | `## Enhancements`: Smaller features added to the project. 38 | 39 | `## Bug Fixes`: Any bugs that were fixed. 40 | 41 | `## Notes`: Additional information for potentially unexpected upgrade behavior, notice of upcoming deprecations, 42 | or anything worth highlighting to the user that does not fit in the other categories. 43 | **This should not be abused**: always consider if the information is of any material importance to the user. 44 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "ksunami" 3 | version = "0.1.10" 4 | edition = "2021" 5 | authors = ["Ivan De Marino ", "Kafkesc "] 6 | repository = "https://github.com/kafkesc/ksunami" 7 | homepage = "https://github.com/kafkesc/ksunami" 8 | description = "Produce constant, configurable, cyclical waves of Kafka Records" 9 | license = "MIT OR Apache-2.0" 10 | keywords = ["kafka", "workload", "testing", "performance", "developer-tools"] 11 | categories = ["command-line-utilities", "development-tools"] 12 | exclude = [ 13 | ".github/", 14 | ".cargo/", 15 | ".gitignore", 16 | "images/*", 17 | "CHANGELOG_GUIDANCE.md" 18 | ] 19 | 20 | [dependencies] 21 | flo_curves = "0.7.2" 22 | clap = { version = "4.5.4", features = ["derive", "deprecated", "env", "wrap_help"] } 23 | log = "0.4.21" 24 | env_logger = "0.11.3" 25 | tokio = { version = "1.37.0", features = ["rt", "rt-multi-thread", "time", "sync", "macros"] } 26 | rand = "0.8.5" 27 | futures = "0.3.30" 28 | ctrlc = { version = "3.4.4", features = ["termination"] } 29 | 30 | # TODO https://github.com/kafkesc/ksunami/issues/39 31 | #[target.'cfg(windows)'.dependencies] 32 | #rdkafka = { version = "0.34.0", features = ["cmake-build", "ssl-vendored", "libz-static"] } 33 | 34 | [target.'cfg(unix)'.dependencies] 35 | rdkafka = { version = "0.36.2", features = ["ssl-vendored", "gssapi-vendored", "libz-static"] } 36 | 37 | [profile.release] 38 | strip = true # Automatically strip symbols from the binary. 39 | lto = true # Link time optimization - see https://llvm.org/docs/LinkTimeOptimization.html. 40 | -------------------------------------------------------------------------------- /src/rdkafka.rs: -------------------------------------------------------------------------------- 1 | use clap::ValueEnum; 2 | 3 | /// The possible `partitioner` configuration value that the [`librdkafka`](https://github.com/edenhill/librdkafka) library can handle. 4 | /// 5 | /// The documentation is lifted directly from the `librdkafka` configuration 6 | /// [page](https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md). 7 | #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, ValueEnum, Default)] 8 | #[value(rename_all = "snake_case")] 9 | pub enum PartitionerConfig { 10 | /// Random distribution. 11 | Random, 12 | 13 | /// CRC32 hash of key (Empty and NULL keys are mapped to single partition). 14 | Consistent, 15 | 16 | /// CRC32 hash of key (Empty and NULL keys are randomly partitioned). 17 | #[default] 18 | ConsistentRandom, 19 | 20 | /// Java Producer compatible Murmur2 hash of key (NULL keys are mapped to single partition). 21 | Murmur2, 22 | 23 | /// Java Producer compatible Murmur2 hash of key (NULL keys are randomly partitioned): equivalent to default partitioner in Java Producer 24 | Murmur2Random, 25 | 26 | /// FNV-1a hash of key (NULL keys are mapped to single partition). 27 | Fnv1a, 28 | 29 | /// FNV-1a hash of key (NULL keys are randomly partitioned). 30 | Fnv1aRandom, 31 | } 32 | 33 | impl PartitionerConfig { 34 | /// Returns the "name" for the enum value, as recognised by `librdkafka`. 35 | pub fn name(&self) -> String { 36 | match self { 37 | PartitionerConfig::Random => "random".to_string(), 38 | PartitionerConfig::Consistent => "consistent".to_string(), 39 | PartitionerConfig::ConsistentRandom => "consistent_random".to_string(), 40 | PartitionerConfig::Murmur2 => "murmur2".to_string(), 41 | PartitionerConfig::Murmur2Random => "murmur2_random".to_string(), 42 | PartitionerConfig::Fnv1a => "fnv1a".to_string(), 43 | PartitionerConfig::Fnv1aRandom => "fnv1a_random".to_string(), 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /.github/workflows/hub.docker.com-publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish Docker Image 2 | 3 | on: 4 | 5 | # This workflow is designed to be invoked by others, not triggered by events. 6 | workflow_call: 7 | inputs: 8 | target-platforms: 9 | type: string 10 | default: linux/amd64,linux/arm64 11 | description: Platforms to target when publishing the docker image 12 | dockerhub-org: 13 | type: string 14 | default: kafkesc 15 | description: Docker Hub organization under which to publish the docker image 16 | dockerhub-imagename: 17 | type: string 18 | default: ksunami 19 | description: Name of the docker image to publish 20 | release-version: 21 | required: true 22 | type: string 23 | description: Version to publish as docker image (eg. 'vX.Y.Z') 24 | 25 | secrets: 26 | dockerhub-username: 27 | required: true 28 | description: Docker Hub username to use when publishing the image 29 | dockerhub-password: 30 | required: true 31 | description: Docker Hub password to use when publishing the image (e.g. the API token) 32 | 33 | jobs: 34 | build-and-push: 35 | name: Build&Push ${{ inputs.dockerhub-org }}/${{ inputs.dockerhub-imagename }}:${{ inputs.release-version }} (${{ inputs.target-platforms }}) 36 | 37 | runs-on: ubuntu-latest 38 | 39 | steps: 40 | 41 | - name: Set up QEMU 42 | uses: docker/setup-qemu-action@v3 43 | 44 | - name: Set-up Docker Buildx 45 | uses: docker/setup-buildx-action@v3 46 | 47 | - name: Login to Docker Hub 48 | uses: docker/login-action@v3 49 | with: 50 | username: ${{ secrets.dockerhub-username }} 51 | password: ${{ secrets.dockerhub-password }} 52 | 53 | - name: Build + Push 54 | uses: docker/build-push-action@v5 55 | with: 56 | push: true 57 | platforms: ${{ inputs.target-platforms }} 58 | build-args: | 59 | KSUNAMI_VERSION=${{ inputs.release-version }} 60 | tags: | 61 | ${{ inputs.dockerhub-org }}/${{ inputs.dockerhub-imagename }}:latest 62 | ${{ inputs.dockerhub-org }}/${{ inputs.dockerhub-imagename }}:${{ inputs.release-version }} 63 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # v0.1.10 (2024-05-20) 2 | 3 | ## Notes 4 | 5 | * Deps upgrade 6 | 7 | # v0.1.9 (2024-04-15) 8 | 9 | ## Features 10 | 11 | Setup GitHub Actions Workflow to publish Docker image kafkesc/ksunami at every release ([i#206](https://github.com/kafkesc/ksunami/issues/206)) 12 | 13 | ## Notes 14 | 15 | Multiple dependencies upgrades since previous release 16 | 17 | # v0.1.8 (2023-06-18) 18 | 19 | ## Notes 20 | 21 | Multiple dependencies updates since previous release 22 | 23 | # v0.1.7 (2023-03-13) 24 | 25 | ## Features 26 | 27 | * Docker: Ksunami is now available as image on Docker-Hub: [kafkesc/ksunami](https://hub.docker.com/r/kafkesc/ksunami) ([i#15](https://github.com/kafkesc/ksunami/issues/15)) 28 | 29 | ## Notes 30 | 31 | Multiple dependencies updates since previous release 32 | 33 | # v0.1.6 (2023-01-18) 34 | 35 | ## Enhancements 36 | 37 | * Reworked warning message that informs user when the internal records channel capacity is less than 20% ([commit](https://github.com/kafkesc/ksunami/commit/a8f7bee444ae59f5509ad4170c4f10c76a1ceb13)) 38 | 39 | ## Notes 40 | 41 | * Multiple dependencies updates 42 | * Fixed annoying CI build shield ([commit](https://github.com/kafkesc/ksunami/commit/d07c1124b4630d4e495f1dd0413ba69d95d8db9f)) 43 | * Updated section about "License" in README 44 | * Added section about "Contribution" in README 45 | * Updated licenses model: Ksunami is now offered in dual license - both [Apache 2.0](LICENSE-APACHE) and [MIT](LICENSE-MIT) 46 | 47 | # v0.1.5 (2022-12-14) 48 | 49 | ## Enhancements 50 | 51 | * Providing binary as part of the release process: `x86_64-apple-darwin` ([i#36](https://github.com/kafkesc/ksunami/issues/36)) 52 | * Providing binary as part of the release process: `x86_64-unknown-linux-gnu` ([i#35](https://github.com/kafkesc/ksunami/issues/35)) 53 | * Introducing (this) `CHANGELOG.md`, and adding it to the release packages ([i#19](https://github.com/kafkesc/ksunami/issues/19)) 54 | * Publishing to [crates.io](https://crates.io/crates/ksunami) when a tag is pushed ([i#20](https://github.com/kafkesc/ksunami/issues/20)) 55 | * Add examples to `README.md` ([i#40](https://github.com/kafkesc/ksunami/issues/40)) 56 | 57 | ## Notes 58 | 59 | * Added usage instructions (`-h` / `--help`) to `README.md` 60 | * Multiple dependencies updates 61 | * Published first blogpost announcing Ksunami ([i#11](https://github.com/kafkesc/ksunami/issues/11)) 62 | 63 | # v0.1.0 -> v0.1.4 (2022-11-01) 64 | 65 | ## Features 66 | 67 | * Kafka records production described in 4 "phases" that repeat in circle: `min`, `up`, `max` and `down` 68 | * All phases are configured in terms of _seconds_ (i.e. _duration of a phase_) and _records per second_ (i.e. _workload during a phase_) 69 | * `up` and `down` can be one of many transitions, each with a specific "shape" (ex. `linear`, `ease-in`, `spike-out`, ...) 70 | * Records `key` and `payload` are configurable with _fixed_, _from-file_ and _randomly-generated_ values 71 | * Records `headers` can be added to each record 72 | * Internal Kafka producer is fully configurable, including selecting a partitioner 73 | * Complete control of verbosity via `-v` / `-q` 74 | * Extensive usage instructions via `-h` (compact) / `--help` (extended) 75 | 76 | ## Notes 77 | 78 | * First functional release of Ksunami 79 | * This changelog is being written retroactively 80 | -------------------------------------------------------------------------------- /src/producer_sink.rs: -------------------------------------------------------------------------------- 1 | use std::sync::{Arc, Mutex}; 2 | 3 | use rdkafka::error::KafkaError; 4 | use rdkafka::producer::FutureProducer; 5 | use rdkafka::util::Timeout; 6 | use rdkafka::ClientConfig; 7 | use tokio::sync::mpsc; 8 | use tokio::task::JoinHandle; 9 | 10 | use crate::GeneratedRecord; 11 | 12 | /// A "sink" to feed all the records it receives to an [`FutureProducer`]. 13 | /// 14 | /// It's initialized by passing in the desired configuration of a Producer, 15 | /// and it then feeds every record to said producer, received through the call to [`spawn`]. 16 | pub struct ProducerSink { 17 | producer: FutureProducer, 18 | } 19 | 20 | impl ProducerSink { 21 | /// It takes a `producer_config` [`ClientConfig`], and will create an internal [`FutureProducer`] from it. 22 | /// If invalid/incomplete, it will return a [`KafkaError`]. 23 | pub fn new(producer_config: ClientConfig) -> Result { 24 | Ok(ProducerSink { 25 | producer: producer_config.create()?, 26 | }) 27 | } 28 | 29 | /// "Spawns" the task-loop of the sink, that feeds every record to the producer. 30 | /// 31 | /// Every [`GeneratedRecord`] from `records_rx` is converted to [`FutureRecord`] via [`GeneratedRecord::as_future_record`], 32 | /// and then sent via the [`FutureProducer`]. 33 | /// 34 | /// The spawned [`tokio::task`] terminates once the sender side of the given `records_rx` is closed. 35 | /// On termination, it returns 2 numbers: the number of records successfully sent, and that failed to send. 36 | pub fn spawn(&mut self, mut records_rx: mpsc::Receiver) -> JoinHandle<(u64, u64)> { 37 | let producer = self.producer.clone(); 38 | 39 | tokio::spawn(async move { 40 | let send_success = Arc::new(Mutex::new(0u64)); 41 | let send_fail = Arc::new(Mutex::new(0u64)); 42 | 43 | // Stops when `records_rx` receives `None` back: 44 | // this means that the `records_tx` has been closed (dropped). 45 | while let Some(gen_rec) = records_rx.recv().await { 46 | trace!("Generated Record received"); 47 | 48 | let producer = producer.clone(); 49 | let send_success = send_success.clone(); 50 | let send_fail = send_fail.clone(); 51 | 52 | tokio::spawn(async move { 53 | let rec = gen_rec.as_future_record(); 54 | 55 | // Finally, send the record (or wait if the producer internal queue is full) 56 | match producer.send(rec, Timeout::Never).await { 57 | Ok((partition, offset)) => { 58 | *(send_success.lock().unwrap()) += 1; 59 | trace!("Delivered => partition: {partition}, offset: {offset}") 60 | }, 61 | Err((e, _)) => { 62 | *(send_fail.lock().unwrap()) += 1; 63 | error!("Failed record delivery: {:?}", e) 64 | }, 65 | } 66 | }); 67 | } 68 | 69 | // Return some basic stats: 70 | // how many did we sent, and how many we failed to send. 71 | let success = send_success.lock().unwrap(); 72 | let fail = send_fail.lock().unwrap(); 73 | (*success, *fail) 74 | }) 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate log; 3 | 4 | use std::error::Error; 5 | 6 | use ::rdkafka::ClientConfig; 7 | use tokio::sync::broadcast; 8 | use tokio::sync::mpsc; 9 | 10 | use cli::*; 11 | use generator::*; 12 | use workload::*; 13 | 14 | use crate::producer_sink::ProducerSink; 15 | use crate::records_tap::RecordsTap; 16 | 17 | mod cli; 18 | mod generator; 19 | mod logging; 20 | mod producer_sink; 21 | mod rdkafka; 22 | mod records_tap; 23 | mod transition; 24 | mod workload; 25 | 26 | #[tokio::main] 27 | async fn main() -> Result<(), Box> { 28 | let cli = parse_cli_and_init_logging(); 29 | 30 | let workload = build_workload(&cli); 31 | 32 | let generator = build_record_generator(&cli)?; 33 | 34 | let producer_config = build_producer_config(&cli); 35 | 36 | let (records_tx, records_rx) = build_records_channel(cli.max as usize); 37 | let shutdown_rx = build_shutdown_channel(); 38 | 39 | // Create a "tap" of records, based on the workload and generator we just built 40 | let mut records_tap = RecordsTap::new(workload, generator); 41 | 42 | // Configure a "sink" around a Kafka Producer, based on the producer config we just built 43 | let mut producer_sink = ProducerSink::new(producer_config)?; 44 | 45 | // Setup channel between "tap" and "sink" 46 | let records_tap_handle = records_tap.spawn(records_tx, shutdown_rx); 47 | let producer_sink_handle = producer_sink.spawn(records_rx); 48 | 49 | // Await async tasks: when finished, print out some basic stats 50 | let sec = records_tap_handle.await?; 51 | let (success, fail) = producer_sink_handle.await?; 52 | info!("Records produced for {sec}s: {success} successfully, {fail} failed"); 53 | 54 | Ok(()) 55 | } 56 | 57 | fn parse_cli_and_init_logging() -> Cli { 58 | // Parse command line input and initialize logging 59 | let cli = Cli::parse_and_validate(); 60 | logging::init(cli.verbosity_level()); 61 | 62 | trace!("Created:\n{:#?}", cli); 63 | 64 | // Log the production that Ksunami intends to do 65 | info!(""); 66 | info!("Records production will follow this schedule:"); 67 | info!(" 1. {} rec/sec for {} seconds", cli.min, cli.min_sec); 68 | info!(" 2. increase in rec/sec along a '{:?}' curve for {} seconds", cli.up, cli.up_sec); 69 | info!(" 3. {} rec/sec for {} seconds", cli.max, cli.max_sec); 70 | info!(" 4. decrease in rec/sec along a '{:?}' curve for {} seconds", cli.down, cli.down_sec); 71 | info!(" 5. repeat from 1."); 72 | info!(""); 73 | 74 | cli 75 | } 76 | 77 | fn build_workload(cli: &Cli) -> Workload { 78 | let workload = 79 | Workload::new(cli.min, cli.min_sec, cli.max, cli.max_sec, cli.up, cli.up_sec, cli.down, cli.down_sec); 80 | 81 | trace!("Created:\n{:#?}", workload); 82 | workload 83 | } 84 | 85 | fn build_record_generator(cli: &Cli) -> Result { 86 | let mut generator = RecordGenerator::new(cli.topic.clone()); 87 | 88 | if let Some(k_gen) = &cli.key { 89 | generator.set_key_generator(k_gen.clone())?; 90 | } 91 | if let Some(p_gen) = &cli.payload { 92 | generator.set_payload_generator(p_gen.clone())?; 93 | } 94 | if let Some(part) = cli.partition { 95 | generator.set_destination_partition(part); 96 | } 97 | for kv_pair in &cli.headers { 98 | generator.add_record_header(kv_pair.0.clone(), kv_pair.1.clone()); 99 | } 100 | 101 | trace!("Created:\n{:#?}", generator); 102 | Ok(generator) 103 | } 104 | 105 | fn build_producer_config(cli: &Cli) -> ClientConfig { 106 | let mut producer_config = ClientConfig::new(); 107 | producer_config 108 | .set("bootstrap.servers", cli.bootstrap_brokers.clone()) 109 | .set("client.id", cli.client_id.clone()) 110 | .set("partitioner", cli.partitioner.name()); 111 | for cfg in &cli.config { 112 | producer_config.set(cfg.0.clone(), cfg.1.clone()); 113 | } 114 | 115 | trace!("Created:\n{:#?}", producer_config); 116 | producer_config 117 | } 118 | 119 | fn build_records_channel(depth: usize) -> (mpsc::Sender, mpsc::Receiver) { 120 | mpsc::channel::(depth) 121 | } 122 | 123 | fn build_shutdown_channel() -> broadcast::Receiver<()> { 124 | let (sender, receiver) = broadcast::channel(1); 125 | 126 | // Setup shutdown signal handler: 127 | // when it's time to shutdown, broadcast to all receiver a unit. 128 | // 129 | // NOTE: This handler will be listening on its own dedicated thread. 130 | if let Err(e) = ctrlc::set_handler(move || { 131 | info!("Shutting down..."); 132 | sender.send(()).unwrap(); 133 | }) { 134 | error!("Failed to register signal handler: {e}"); 135 | } 136 | 137 | // Return a receiver to we can notify other parts of the system. 138 | receiver 139 | } 140 | -------------------------------------------------------------------------------- /src/records_tap.rs: -------------------------------------------------------------------------------- 1 | use std::time::Duration; 2 | 3 | use log::Level::Warn; 4 | use tokio::sync::broadcast; 5 | use tokio::sync::mpsc; 6 | use tokio::task::JoinHandle; 7 | use tokio::time; 8 | 9 | use crate::{GeneratedRecord, RecordGenerator, Workload}; 10 | 11 | /// Using a [`Workload`] and a [`RecordGenerator`], it generates records and sends them via a given channel. 12 | /// 13 | /// This receives the [`mpsc::Sender`] part of the channel: the [`mpsc::Receiver`] part is 14 | /// assigned to a [`ProducerSink`]. 15 | pub struct RecordsTap { 16 | workload: Workload, 17 | generator: RecordGenerator, 18 | } 19 | 20 | impl RecordsTap { 21 | pub fn new(workload: Workload, generator: RecordGenerator) -> RecordsTap { 22 | RecordsTap { 23 | workload, 24 | generator, 25 | } 26 | } 27 | 28 | /// Instantiates a record-producing loop as async [`tokio::task`]. 29 | /// 30 | /// Once per second, it queries the internal [`Workload`] for how many records are supposed 31 | /// to be produced in that instant, and then invokes the internal [`RecordGenerator`] an equal 32 | /// amount of time. Each record is then sent to the "sink" via the given `records_tx` side of 33 | /// a channel. 34 | /// 35 | /// Additionally, when a `()` is received over the `shutdown_rx` [`broadcast::Receiver`], it 36 | /// initiates a shutdown: stops producing records and causes the the `records_tx` to be dropped. 37 | /// This in turn causes the receiver to stop expecting records and shutdown as well. 38 | pub fn spawn( 39 | &mut self, 40 | records_tx: mpsc::Sender, 41 | mut shutdown_rx: broadcast::Receiver<()>, 42 | ) -> JoinHandle { 43 | let workload = self.workload.clone(); 44 | let generator = self.generator.clone(); 45 | 46 | tokio::spawn(async move { 47 | // Seconds since we started producing 48 | let mut sec = 0u64; 49 | 50 | // This is used to set the pace of the records production 51 | let mut interval = time::interval(time::Duration::from_secs(1)); 52 | 53 | let mut shutdown_requested = false; 54 | while !shutdown_requested { 55 | // Figure out how many records we need to produce in this second 56 | let records_at = workload.records_per_sec_at(sec); 57 | info!("{sec} sec: sending {records_at} recs..."); 58 | 59 | for _ in 0..records_at { 60 | if log_enabled!(Warn) { 61 | // Warn if we have less then 20% capacity on the internal records channel 62 | let cap = records_tx.capacity() as f64; 63 | let max_cap = records_tx.max_capacity() as f64; 64 | let remaining_cap_perc = cap / max_cap; 65 | if remaining_cap_perc < 0.2 { 66 | warn!( 67 | "Remaining capacity of (internal) Records Channel: {:.2}% ({}/{})", 68 | remaining_cap_perc * 100f64, 69 | cap, 70 | max_cap 71 | ); 72 | } 73 | } 74 | 75 | match generator.generate_record() { 76 | Ok(gen_rec) => { 77 | tokio::select! { 78 | // Send record to the sink (producer) 79 | send_res = records_tx.send_timeout(gen_rec, Duration::from_millis(10)) => { 80 | if let Err(e) = send_res { 81 | error!("Failed to send record to producer: {e}"); 82 | } 83 | }, 84 | 85 | // Initiate shutdown: by letting this task conclude, 86 | // the "tap" `records_tx` will close, causing the "sink" `records_rx` 87 | // to return `None` and conclude its own task. 88 | _ = shutdown_rx.recv() => { 89 | info!("Received shutdown signal"); 90 | shutdown_requested = true; 91 | }, 92 | } 93 | }, 94 | Err(e) => error!("Failed to generate record: {e}"), 95 | } 96 | } 97 | info!("{sec} sec: sent {records_at} recs"); 98 | 99 | // Await next cycle: we do the awaiting at this stage, so that we can start producing 100 | // for this second as soon as possible, instead of using some of that time to produce the 101 | // records. 102 | interval.tick().await; 103 | sec += 1; 104 | } 105 | 106 | // Return for how many seconds has this been producing records 107 | sec 108 | }) 109 | } 110 | } 111 | -------------------------------------------------------------------------------- /.github/workflows/github-release.yml: -------------------------------------------------------------------------------- 1 | name: GitHub Release 2 | 3 | env: 4 | ARTEFACTS_DIR: "_artefacts" 5 | CARGO_TERM_COLOR: always 6 | 7 | permissions: 8 | contents: write 9 | 10 | on: 11 | push: 12 | tags: [ "v[0-9]+.[0-9]+.[0-9]+*" ] 13 | 14 | # TODO Remove this trigger, once we have sorted out release for the various targets 15 | workflow_dispatch: 16 | 17 | jobs: 18 | release: 19 | name: Release 20 | 21 | # Workflow strategy supports multiple targets on multiple runners 22 | strategy: 23 | fail-fast: false 24 | matrix: 25 | job: 26 | # Linux 27 | # - { target: aarch64-unknown-linux-gnu , runner: ubuntu-20.04, cross: true, can_fail: false } 28 | # - { target: i686-unknown-linux-gnu , runner: ubuntu-20.04, cross: true, can_fail: false } 29 | - { target: x86_64-unknown-linux-gnu , runner: ubuntu-20.04, cross: true, can_fail: false } 30 | # - { target: arm-unknown-linux-gnueabihf , runner: ubuntu-20.04, cross: true, can_fail: false } 31 | # - { target: arm-unknown-linux-musleabihf, runner: ubuntu-20.04, cross: true, can_fail: false } 32 | # - { target: i686-unknown-linux-musl , runner: ubuntu-20.04, cross: true, can_fail: false } 33 | # - { target: x86_64-unknown-linux-musl , runner: ubuntu-20.04, cross: true, can_fail: false } 34 | # macOS 35 | - { target: x86_64-apple-darwin , runner: macos-10.15, cross: false, can_fail: false } 36 | # Windows 37 | # # TODO https://github.com/kafkesc/ksunami/issues/39 38 | # - { target: i686-pc-windows-msvc , runner: windows-2019, cross: false, can_fail: true, arch: i686 } 39 | # - { target: x86_64-pc-windows-gnu , runner: windows-2019, cross: false, can_fail: true, arch: x86_64 } 40 | # - { target: x86_64-pc-windows-msvc , runner: windows-2019, cross: false, can_fail: true, arch: x86_64 } 41 | 42 | runs-on: ${{ matrix.job.runner }} 43 | continue-on-error: ${{ matrix.job.can_fail }} 44 | 45 | steps: 46 | - name: Check-out 47 | uses: actions/checkout@v4 48 | 49 | - name: Dependencies 50 | shell: bash 51 | run: | 52 | case ${{ matrix.job.target }} in 53 | arm-unknown-linux-*) sudo apt-get -y install gcc-arm-linux-gnueabihf g++-arm-linux-gnueabihf ;; 54 | aarch64-unknown-linux-gnu) sudo apt-get -y install gcc-aarch64-linux-gnu g++-arm-linux-gnueabihf ;; 55 | esac 56 | 57 | - name: Project info as env-var 58 | shell: bash 59 | run: | 60 | echo "PROJECT_NAME=$(sed -n 's/^name = "\(.*\)"/\1/p' Cargo.toml | head -n1)" >> $GITHUB_ENV 61 | echo "PROJECT_VERSION=$(sed -n 's/^version = "\(.*\)"/\1/p' Cargo.toml | head -n1)" >> $GITHUB_ENV 62 | echo "PROJECT_MAINTAINER=$(sed -n 's/^authors = \["\(.*\)"\]/\1/p' Cargo.toml)" >> $GITHUB_ENV 63 | echo "PROJECT_HOMEPAGE=$(sed -n 's/^homepage = "\(.*\)"/\1/p' Cargo.toml)" >> $GITHUB_ENV 64 | 65 | # # TODO https://github.com/kafkesc/ksunami/issues/39 66 | # - name: Deps (Windows) 67 | # if: ${{ startsWith(matrix.job.runner, 'windows') }} 68 | # run: >- 69 | # choco install visualstudio2019enterprise --package-parameters " 70 | # --add Microsoft.VisualStudio.Component.VC.Tools.x86.x64 71 | # --add Microsoft.VisualStudio.Component.Windows10SDK 72 | # --add Microsoft.VisualStudio.Component.Windows10SDK.19041 73 | # --add Microsoft.VisualStudio.ComponentGroup.NativeDesktop.Core 74 | # --add Microsoft.VisualStudio.Component.VC.CMake.Project 75 | # --add Microsoft.VisualStudio.Component.VC.CoreBuildTools 76 | # --add Microsoft.VisualStudio.Component.VC.ATLMFC 77 | # --add Microsoft.VisualStudio.Component.Roslyn.Compiler" 78 | # ; choco install cmake --installargs 'ADD_CMAKE_TO_PATH=System' 79 | 80 | # # TODO https://github.com/kafkesc/ksunami/issues/39 81 | # - name: Envs - MSVC (Windows) 82 | # if: ${{ startsWith(matrix.job.runner, 'windows') }} 83 | # uses: ilammy/msvc-dev-cmd@v1 84 | # with: 85 | # arch: ${{ matrix.job.arch }} 86 | # vsversion: 2019 87 | 88 | # # TODO https://github.com/kafkesc/ksunami/issues/39 89 | # - name: Envs - WindowsSdkVerBinPath (Windows) 90 | # if: ${{ startsWith(matrix.job.runner, 'windows') }} 91 | # uses: myci-actions/export-env-var-powershell@1 92 | # with: 93 | # name: PATH 94 | # value: $env:PATH;$env:WindowsSdkVerBinPath\x86;$env:WindowsSdkVerBinPath\x64 95 | 96 | - name: Toolchain 97 | uses: actions-rs/toolchain@v1 98 | with: 99 | toolchain: stable 100 | target: ${{ matrix.job.target }} 101 | profile: minimal 102 | override: true 103 | 104 | - name: Build 105 | uses: actions-rs/cargo@v1 106 | with: 107 | use-cross: ${{ matrix.job.cross }} 108 | command: build 109 | args: --locked --release --target=${{ matrix.job.target }} 110 | 111 | - name: Artefact 112 | id: artefact 113 | shell: bash 114 | run: | 115 | # Figure out if the binary has a suffix (Windows!) 116 | EXE_SUFFIX="" 117 | case ${{ matrix.job.target }} in 118 | *-pc-windows-*) EXE_SUFFIX=".exe" ;; 119 | esac; 120 | 121 | # Setup paths 122 | BIN_NAME="${{ env.PROJECT_NAME }}${EXE_SUFFIX}" 123 | BIN_PATH="target/${{ matrix.job.target }}/release/${BIN_NAME}" 124 | 125 | # Let subsequent steps know where to find the binary 126 | echo "BIN_PATH=${BIN_PATH}" >> $GITHUB_OUTPUT 127 | echo "BIN_NAME=${BIN_NAME}" >> $GITHUB_OUTPUT 128 | 129 | - name: Package 130 | id: package 131 | shell: bash 132 | run: | 133 | # Figure out the tarball suffix (Windows!) 134 | PKG_SUFFIX=".tar.gz" 135 | case ${{ matrix.job.target }} in 136 | *-pc-windows-*) PKG_SUFFIX=".zip" ;; 137 | esac 138 | 139 | # Composing final PKG_NAME 140 | PKG_BASENAME=${PROJECT_NAME}-v${PROJECT_VERSION}-${{ matrix.job.target }} 141 | PKG_NAME=${PKG_BASENAME}${PKG_SUFFIX} 142 | echo "PKG_NAME=${PKG_NAME}" >> $GITHUB_OUTPUT 143 | 144 | PKG_STAGING="${{ env.ARTEFACTS_DIR }}/package" 145 | ARCHIVE_DIR="${PKG_STAGING}/${PKG_BASENAME}/" 146 | mkdir -p "${ARCHIVE_DIR}" 147 | 148 | # Binary 149 | cp "${{ steps.artefact.outputs.BIN_PATH }}" "$ARCHIVE_DIR" 150 | 151 | # README, LICENSE-* and CHANGELOG files 152 | cp "README.md" "LICENSE-APACHE" "LICENSE-MIT" "CHANGELOG.md" "$ARCHIVE_DIR" 153 | 154 | # base compressed package 155 | pushd "${PKG_STAGING}/" >/dev/null 156 | case ${{ matrix.job.target }} in 157 | *-pc-windows-*) 7z -y a "${PKG_NAME}" "${PKG_BASENAME}"/* | tail -2 ;; 158 | *) tar czf "${PKG_NAME}" "${PKG_BASENAME}"/* ;; 159 | esac; 160 | popd >/dev/null 161 | 162 | # Let subsequent steps know where to find the compressed package 163 | echo "PKG_PATH=${PKG_STAGING}/${PKG_NAME}" >> $GITHUB_OUTPUT 164 | 165 | - name: Release 166 | id: release 167 | uses: softprops/action-gh-release@v1 168 | with: 169 | name: v${{ env.PROJECT_VERSION }} 170 | tag_name: v${{ env.PROJECT_VERSION }} 171 | files: | 172 | ${{ steps.package.outputs.PKG_PATH }} -------------------------------------------------------------------------------- /src/cli.rs: -------------------------------------------------------------------------------- 1 | use clap::error::ErrorKind; 2 | pub use clap::{value_parser, ArgGroup, CommandFactory, Parser}; 3 | 4 | use crate::generator::ValueGenerator; 5 | use crate::rdkafka::PartitionerConfig; 6 | use crate::transition::Transition; 7 | 8 | #[derive(Parser, Debug)] 9 | #[command(author, version, about, long_about = None)] 10 | #[command(group( 11 | ArgGroup::new("logging_flags") 12 | .required(false) 13 | .multiple(false) 14 | .args(["verbose", "quiet"]), 15 | ))] 16 | pub struct Cli { 17 | // ---------------------------------------------------------------------- Producer configuration 18 | /// Initial Kafka Brokers to connect to (format: 'HOST:PORT,...'). 19 | /// 20 | /// Equivalent to '--config=bootstrap.servers:host:port,...'. 21 | #[arg(short, long = "brokers", value_name = "BOOTSTRAP_BROKERS")] 22 | pub bootstrap_brokers: String, 23 | 24 | /// Client identifier used by the internal Kafka Producer. 25 | /// 26 | /// Equivalent to '--config=client.id:my-client-id'. 27 | #[arg(long = "client-id", value_name = "CLIENT_ID", default_value = env!("CARGO_PKG_NAME"))] 28 | pub client_id: String, 29 | 30 | /// Partitioner used by the internal Kafka Producer. 31 | /// 32 | /// Equivalent to '--config=partitioner:random'. 33 | #[arg(long, value_name = "PARTITIONER", value_enum, default_value_t = PartitionerConfig::default())] 34 | pub partitioner: PartitionerConfig, 35 | 36 | /// Additional configuration used by the internal Kafka Producer (format: 'CONF_KEY:CONF_VAL'). 37 | /// 38 | /// To set multiple configurations keys, use this argument multiple times. 39 | /// See: https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md. 40 | #[arg(short, long, value_name = "CONF_KEY:CONF_VAL", value_parser = kv_clap_value_parser)] 41 | pub config: Vec, 42 | 43 | // ------------------------------------------------------------- Record generation configuration 44 | /// Destination Topic. 45 | /// 46 | /// Topic must already exist. 47 | #[arg(short = 't', long = "topic", value_name = "TOPIC")] 48 | pub topic: String, 49 | 50 | /// Records Key (format: 'KEY_TYPE:INPUT'). 51 | /// 52 | /// The supported key types are: 53 | /// 54 | /// * 'string:STR': STR is a plain string 55 | /// * 'file:PATH': PATH is a path to an existing file 56 | /// * 'alpha:LENGTH': LENGTH is the length of a random alphanumeric string 57 | /// * 'bytes:LENGTH': LENGTH is the length of a random bytes array 58 | /// * 'int:MIN-MAX': MIN and MAX are limits of an inclusive range from which an integer number is picked 59 | /// * 'float:MIN-MAX': MIN and MAX are limits of an inclusive range from which a float number is picked 60 | #[arg(short, long, value_name = "KEY_TYPE:INPUT", value_parser = ValueGenerator::clap_parser, verbatim_doc_comment)] 61 | pub key: Option, 62 | 63 | /// Records Payload (format: 'PAYLOAD_TYPE:INPUT'). 64 | /// 65 | /// The supported payload types are: 66 | /// 67 | /// * 'string:STR': STR is a plain string 68 | /// * 'file:PATH': PATH is a path to an existing file 69 | /// * 'alpha:LENGTH': LENGTH is the length of a random alphanumeric string 70 | /// * 'bytes:LENGTH': LENGTH is the length of a random bytes array 71 | /// * 'int:MIN-MAX': MIN and MAX are limits of an inclusive range from which an integer number is picked 72 | /// * 'float:MIN-MAX': MIN and MAX are limits of an inclusive range from which a float number is picked 73 | #[arg(short, long, value_name = "PAYLOAD_TYPE:INPUT", value_parser = ValueGenerator::clap_parser, verbatim_doc_comment)] 74 | pub payload: Option, 75 | 76 | /// Destination Topic Partition. 77 | /// 78 | /// If not specified (or '-1'), Producer will rely on the Partitioner. 79 | /// See the '--partitioner' argument. 80 | #[arg(long, value_name = "PARTITION", value_parser = value_parser!(i32).range(-1..))] 81 | pub partition: Option, 82 | 83 | /// Records Header(s) (format: 'HEAD_KEY:HEAD_VAL'). 84 | /// 85 | /// To set multiple headers, use this argument multiple times. 86 | #[arg(long = "head", value_name = "HEAD_KEY:HEAD_VAL", value_parser = kv_clap_value_parser)] 87 | pub headers: Vec, 88 | 89 | // ---------------------------------------------------------------------- Workload configuration 90 | /// Minimum amount of records/sec. 91 | #[arg(long = "min", value_name = "REC/SEC")] 92 | pub min: u32, 93 | 94 | /// How long to produce at minimum records/sec, before ramp-up. 95 | #[arg(long = "min-sec", default_value_t = 60, value_name = "SEC")] 96 | pub min_sec: u32, 97 | 98 | /// Maximum amount of records/sec. 99 | #[arg(long = "max", value_name = "REC/SEC")] 100 | pub max: u32, 101 | 102 | /// How long to produce at maximum records/sec, before ramp-down. 103 | #[arg(long = "max-sec", default_value_t = 60, value_name = "SEC")] 104 | pub max_sec: u32, 105 | 106 | /// Ramp-up transition from minimum to maximum records/sec. 107 | #[arg(long = "up", value_enum, default_value_t = Transition::Linear, value_name = "TRANSITION_TYPE")] 108 | pub up: Transition, 109 | 110 | /// How long the ramp-up transition should last. 111 | #[arg(long = "up-sec", default_value_t = 10, value_name = "SEC")] 112 | pub up_sec: u32, 113 | 114 | /// Ramp-down transition from maximum to minimum records/sec. 115 | #[arg(long = "down", value_enum, default_value_t = Transition::None, value_name = "TRANSITION_TYPE")] 116 | pub down: Transition, 117 | 118 | /// How long the ramp-down transition should last. 119 | #[arg(long = "down-sec", default_value_t = 10, value_name = "SEC")] 120 | pub down_sec: u32, 121 | 122 | /// Verbose logging. 123 | /// 124 | /// * none = 'WARN' 125 | /// * '-v' = 'INFO' 126 | /// * '-vv' = 'DEBUG' 127 | /// * '-vvv' = 'TRACE' 128 | /// 129 | /// Alternatively, set environment variable 'KSUNAMI_LOG=(ERROR|WARN|INFO|DEBUG|TRACE|OFF)'. 130 | #[arg(short,long, action = clap::ArgAction::Count, verbatim_doc_comment)] 131 | pub verbose: u8, 132 | 133 | /// Quiet logging. 134 | /// 135 | /// * none = 'WARN' 136 | /// * '-q' = 'ERROR' 137 | /// * '-qq' = 'OFF' 138 | /// 139 | /// Alternatively, set environment variable 'KSUNAMI_LOG=(ERROR|WARN|INFO|DEBUG|TRACE|OFF)'. 140 | #[arg(short,long, action = clap::ArgAction::Count, verbatim_doc_comment)] 141 | pub quiet: u8, 142 | } 143 | 144 | impl Cli { 145 | pub fn parse_and_validate() -> Self { 146 | let cli = Self::parse(); 147 | 148 | // Validate values provided for `min` and `max` 149 | if cli.min >= cli.max { 150 | let mut cmd = Cli::command(); 151 | cmd.error(ErrorKind::InvalidValue, "Argument 'min' must be less than 'max'").exit(); 152 | } 153 | 154 | // Validate `(up|down)` transition in respect to their `(up|down)_sec` value 155 | if cli.up != Transition::None && cli.up_sec == 0 { 156 | let mut cmd = Cli::command(); 157 | cmd.error( 158 | ErrorKind::InvalidValue, 159 | "Argument 'up-sec' must be greater than 0 when 'up' transition is not 'none'", 160 | ) 161 | .exit(); 162 | } 163 | if cli.down != Transition::None && cli.down_sec == 0 { 164 | let mut cmd = Cli::command(); 165 | cmd.error( 166 | ErrorKind::InvalidValue, 167 | "Argument 'down-sec' must be greater than 0 when 'down' transition is not 'none'", 168 | ) 169 | .exit(); 170 | } 171 | 172 | // Validate that non-zero values are assigned to `min_sec` and `max_sec` 173 | if cli.min_sec == 0 || cli.max_sec == 0 { 174 | let mut cmd = Cli::command(); 175 | cmd.error(ErrorKind::InvalidValue, "Arguments 'min/max' must be greater than 0").exit(); 176 | } 177 | 178 | cli 179 | } 180 | 181 | pub fn verbosity_level(&self) -> i8 { 182 | self.verbose as i8 - self.quiet as i8 183 | } 184 | } 185 | 186 | /// A simple (key,value) pair of `String`s, useful to be parsed from arguments via [`kv_clap_value_parser`]. 187 | pub type KVPair = (String, String); 188 | 189 | /// To be used as [`clap::value_parser`] function to create [`KVPair`] values. 190 | fn kv_clap_value_parser(kv: &str) -> Result { 191 | let (k, v) = match kv.split_once(':') { 192 | None => { 193 | return Err("Should have 'K:V' format".to_string()); 194 | }, 195 | Some((k, v)) => (k, v), 196 | }; 197 | 198 | Ok((k.to_string(), v.to_string())) 199 | } 200 | -------------------------------------------------------------------------------- /src/transition.rs: -------------------------------------------------------------------------------- 1 | use clap::ValueEnum; 2 | use flo_curves::{Coord2, Coordinate2D}; 3 | 4 | /// It represents a passing from the "min" phase to the "max" phase (i.e. "up" phase), or vice-versa (i.e. "down" phase). 5 | /// 6 | /// A `Transition` is a name we give to a pair of control points used by Bézier Curves. 7 | /// We use [Cubic Bézier curves](https://en.wikipedia.org/wiki/B%C3%A9zier_curve#Cubic_B%C3%A9zier_curves) 8 | /// to describe the transition: given the control points P0, P1, P2 and P3 of a Cubic Bézier, 9 | /// each Transition represents `P1` and `P2` - the 2 middle-control-points of the curve. 10 | /// 11 | /// `P0` and `P3` are instead defined by the phase we are in: 12 | /// "up" phase means that `P0` will assume the "min" values, while `P3` will assume the "max" values; 13 | /// "down" phase it's the reciprocal. 14 | /// 15 | /// Note that the values of `P1` and `P2` in the documentation below are expressed at `t` values of a 16 | /// Bézier curve (i.e. `0 <= t <= 1`): they are mapped to the final control points, 17 | /// based on the bounding box of `P0` and `P3`. 18 | #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, ValueEnum)] 19 | pub enum Transition { 20 | /// Immediate transition, with no in-between values. 21 | None, 22 | 23 | /// Linear transition, constant increments between values. 24 | /// 25 | /// Cubic Bézier P1 and P2 control points: 26 | /// 27 | /// * up: P1 = (0,0) and P2 = (1,1) 28 | /// * down: P1 = (0,1) and P2 = (1,0) 29 | Linear, 30 | 31 | /// Slow increment at the beginning, accelerates half way through until the end. 32 | /// 33 | /// Cubic Bézier P1 and P2 control points: 34 | /// 35 | /// * up: P1 = (.5,0) and P2 = (1,1) 36 | /// * down: P1 = (.5,1) and P2 = (1,0) 37 | EaseIn, 38 | 39 | /// Fast increment at the beginning, decelerates half way through until the end. 40 | /// 41 | /// Cubic Bézier P1 and P2 control points: 42 | /// 43 | /// * up: P1 = (0,0) and P2 = (.5,1) 44 | /// * down: P1 = (0,1) and P2 = (.5,0) 45 | EaseOut, 46 | 47 | /// Slow increment at the beginning, accelerates half way, decelerates at the end. 48 | /// 49 | /// Cubic Bézier P1 and P2 control points: 50 | /// 51 | /// * up: P1 = (.5,0) and P2 = (.5,1) 52 | /// * down: P1 = (.5,1) and P2 = (.5,0) 53 | EaseInOut, 54 | 55 | /// Fastest increment at the beginning, slowest deceleration close to the end. 56 | /// 57 | /// Cubic Bézier P1 and P2 control points: 58 | /// 59 | /// * up: P1 = (0,1) and P2 = (0,1) 60 | /// * down: P1 = (1,1) and P2 = (1,1) 61 | SpikeIn, 62 | 63 | /// Slowest increment at the beginning, fastest acceleration close to the end. 64 | /// 65 | /// Cubic Bézier P1 and P2 control points: 66 | /// 67 | /// * up: P1 = (1,0) and P2 = (1,0) 68 | /// * down: P1 = (0,0) and P2 = (0,0) 69 | SpikeOut, 70 | 71 | /// Fastest increment at the beginning, slow half way, fastest acceleration close to the end. 72 | /// 73 | /// Cubic Bézier P1 and P2 control points: 74 | /// 75 | /// * up: P1 = (0,1) and P2 = (1,0) 76 | /// * down: P1 = (0,0) and P2 = (1,1) 77 | SpikeInOut, 78 | } 79 | 80 | impl Transition { 81 | pub fn ctrl_pts_up(&self, p0: Coord2, p3: Coord2) -> Option<(Coord2, Coord2)> { 82 | let (p1_t, p2_t) = match *self { 83 | Transition::None => { 84 | return None; 85 | }, 86 | 87 | Transition::Linear => (Coord2(0., 0.), Coord2(1., 1.)), 88 | 89 | Transition::EaseIn => (Coord2(0.5, 0.), Coord2(1., 1.)), 90 | Transition::EaseOut => (Coord2(0., 0.), Coord2(0.5, 1.)), 91 | Transition::EaseInOut => (Coord2(0.5, 0.), Coord2(0.5, 1.)), 92 | 93 | Transition::SpikeIn => (Coord2(0., 1.), Coord2(0., 1.)), 94 | Transition::SpikeOut => (Coord2(1., 0.), Coord2(1., 0.)), 95 | Transition::SpikeInOut => (Coord2(0., 1.), Coord2(1., 0.)), 96 | }; 97 | 98 | Some(map_p1t_p2t_to_p0_p3(p0, p3, p1_t, p2_t)) 99 | } 100 | 101 | pub fn ctrl_pts_down(&self, p0: Coord2, p3: Coord2) -> Option<(Coord2, Coord2)> { 102 | let (p1_t, p2_t) = match *self { 103 | Transition::None => { 104 | return None; 105 | }, 106 | 107 | Transition::Linear => (Coord2(0., 1.), Coord2(1., 0.)), 108 | 109 | Transition::EaseIn => (Coord2(0.5, 1.), Coord2(1., 0.)), 110 | Transition::EaseOut => (Coord2(0., 1.), Coord2(0.5, 0.)), 111 | Transition::EaseInOut => (Coord2(0.5, 1.), Coord2(0.5, 0.)), 112 | 113 | Transition::SpikeIn => (Coord2(1., 1.), Coord2(1., 1.)), 114 | Transition::SpikeOut => (Coord2(0., 0.), Coord2(0., 0.)), 115 | Transition::SpikeInOut => (Coord2(0., 0.), Coord2(1., 1.)), 116 | }; 117 | 118 | Some(map_p1t_p2t_to_p0_p3(p0, p3, p1_t, p2_t)) 119 | } 120 | } 121 | 122 | /// Find the control points `P1` and `P2`, between `P0` and `P3`, using the `t` value of `P1` and `P2`. 123 | fn map_p1t_p2t_to_p0_p3(p0: Coord2, p3: Coord2, p1_t: Coord2, p2_t: Coord2) -> (Coord2, Coord2) { 124 | ( 125 | Coord2(between(p0.x(), p3.x(), p1_t.x()), between(p0.y(), p3.y(), p1_t.y())), 126 | Coord2(between(p0.x(), p3.x(), p2_t.x()), between(p0.y(), p3.y(), p2_t.y())), 127 | ) 128 | } 129 | 130 | /// Find in-between value between `a` and `b`, by `t`. 131 | /// 132 | /// Note that `t == 0` maps to the `min(a, b)`, while `t == 1` maps to the `max(a,b)`. 133 | fn between(a: f64, b: f64, t: f64) -> f64 { 134 | let min_val = a.min(b); 135 | let max_val = a.max(b); 136 | 137 | min_val + ((max_val - min_val) * t) 138 | } 139 | 140 | #[cfg(test)] 141 | mod tests { 142 | use super::*; 143 | 144 | #[test] 145 | fn test_none() { 146 | assert_eq!(Transition::None.ctrl_pts_up(Coord2(0., 0.), Coord2(1., 1.)), None); 147 | } 148 | 149 | #[test] 150 | fn test_linear() { 151 | let (p1, p2) = Transition::Linear.ctrl_pts_up(Coord2(1., 2.), Coord2(30., 40.)).unwrap(); 152 | assert_eq!(p1, Coord2(1., 2.)); 153 | assert_eq!(p2, Coord2(30., 40.)); 154 | 155 | let (p1, p2) = Transition::Linear.ctrl_pts_down(Coord2(1., 40.), Coord2(30., 2.)).unwrap(); 156 | assert_eq!(p1, Coord2(1., 40.)); 157 | assert_eq!(p2, Coord2(30., 2.)); 158 | } 159 | 160 | #[test] 161 | fn test_ease_in() { 162 | let (p1, p2) = Transition::EaseIn.ctrl_pts_up(Coord2(1., 2.), Coord2(32., 44.)).unwrap(); 163 | assert_eq!(p1, Coord2(16.5, 2.)); 164 | assert_eq!(p2, Coord2(32., 44.)); 165 | 166 | let (p1, p2) = Transition::EaseIn.ctrl_pts_down(Coord2(1., 44.), Coord2(32., 2.)).unwrap(); 167 | assert_eq!(p1, Coord2(16.5, 44.)); 168 | assert_eq!(p2, Coord2(32., 2.)); 169 | } 170 | 171 | #[test] 172 | fn test_ease_out() { 173 | let (p1, p2) = Transition::EaseOut.ctrl_pts_up(Coord2(1., 2.), Coord2(32., 44.)).unwrap(); 174 | assert_eq!(p1, Coord2(1., 2.)); 175 | assert_eq!(p2, Coord2(16.5, 44.)); 176 | 177 | let (p1, p2) = Transition::EaseOut.ctrl_pts_down(Coord2(1., 44.), Coord2(32., 2.)).unwrap(); 178 | assert_eq!(p1, Coord2(1., 44.)); 179 | assert_eq!(p2, Coord2(16.5, 2.)); 180 | } 181 | 182 | #[test] 183 | fn test_ease_in_out() { 184 | let (p1, p2) = Transition::EaseInOut.ctrl_pts_up(Coord2(11., 3.), Coord2(35., 50.)).unwrap(); 185 | assert_eq!(p1, Coord2(23., 3.)); 186 | assert_eq!(p2, Coord2(23., 50.)); 187 | 188 | let (p1, p2) = Transition::EaseInOut.ctrl_pts_down(Coord2(11., 50.), Coord2(35., 3.)).unwrap(); 189 | assert_eq!(p1, Coord2(23., 50.)); 190 | assert_eq!(p2, Coord2(23., 3.)); 191 | } 192 | 193 | #[test] 194 | fn test_spike_in() { 195 | let (p1, p2) = Transition::SpikeIn.ctrl_pts_up(Coord2(11., 3.), Coord2(35., 50.)).unwrap(); 196 | assert_eq!(p1, Coord2(11., 50.)); 197 | assert_eq!(p2, Coord2(11., 50.)); 198 | 199 | let (p1, p2) = Transition::SpikeIn.ctrl_pts_down(Coord2(11., 50.), Coord2(35., 3.)).unwrap(); 200 | assert_eq!(p1, Coord2(35., 50.)); 201 | assert_eq!(p2, Coord2(35., 50.)); 202 | } 203 | 204 | #[test] 205 | fn test_spike_out() { 206 | let (p1, p2) = Transition::SpikeOut.ctrl_pts_up(Coord2(11., 3.), Coord2(35., 50.)).unwrap(); 207 | assert_eq!(p1, Coord2(35., 3.)); 208 | assert_eq!(p2, Coord2(35., 3.)); 209 | 210 | let (p1, p2) = Transition::SpikeOut.ctrl_pts_down(Coord2(11., 50.), Coord2(35., 3.)).unwrap(); 211 | assert_eq!(p1, Coord2(11., 3.)); 212 | assert_eq!(p2, Coord2(11., 3.)); 213 | } 214 | 215 | #[test] 216 | fn test_spike_in_out() { 217 | let (p1, p2) = Transition::SpikeInOut.ctrl_pts_up(Coord2(11., 3.), Coord2(35., 50.)).unwrap(); 218 | assert_eq!(p1, Coord2(11., 50.)); 219 | assert_eq!(p2, Coord2(35., 3.)); 220 | 221 | let (p1, p2) = Transition::SpikeInOut.ctrl_pts_down(Coord2(11., 50.), Coord2(35., 3.)).unwrap(); 222 | assert_eq!(p1, Coord2(11., 3.)); 223 | assert_eq!(p2, Coord2(35., 50.)); 224 | } 225 | } 226 | -------------------------------------------------------------------------------- /src/workload.rs: -------------------------------------------------------------------------------- 1 | use flo_curves::bezier; 2 | use flo_curves::*; 3 | 4 | use crate::transition::Transition; 5 | 6 | /// It represents the amount of "work" to do, at any given time. 7 | /// Time itself is measured in seconds, from `0` to [`std::u64::MAX`]. 8 | /// 9 | /// The amount of work is expressed as a `u32`, referring to the amount of records/sec, 10 | /// and it's retrieved via [`Workload::records_per_sec_at`]. 11 | /// 12 | /// The workload goes through 4 phases: 13 | /// 14 | /// * [`WorkloadPhase::Min`], lasting `min_sec` 15 | /// * [`WorkloadPhase::Up`], lasting `up_sec` 16 | /// * [`WorkloadPhase::Max`], lasting `max_sec` 17 | /// * [`WorkloadPhase::Down`], lasting `down_sec` 18 | /// 19 | /// Given the input at construction time, the workload repeats over and over, with a period 20 | /// equivalent to the sum of `min_sec + max_sec + up_sec + down_sec`: this means that 21 | /// after the [`WorkloadPhase::Down`], the [`WorkloadPhase::Min`] starts again. 22 | #[derive(Debug, Clone, PartialEq)] 23 | pub struct Workload { 24 | /// Minimum amount of records per second 25 | min: u32, 26 | 27 | /// How long to stay at minimum records/sec before ramp-up 28 | min_sec: u32, 29 | 30 | /// Maximum amount of records per second 31 | max: u32, 32 | 33 | /// How long to stay at maximum records/sec, before ramp-down 34 | max_sec: u32, 35 | 36 | /// Ramp-up transition duration 37 | up_sec: u32, 38 | 39 | /// Bézier Curve describing the ramp-up transition. 40 | /// Present if [`up_transition`] is not [`Transition::None`]. 41 | up_curve: Option>, 42 | 43 | /// Ramp-down transition duration 44 | down_sec: u32, 45 | 46 | /// Bézier Curve describing the ramp-down" transition. 47 | /// Present if [`down_transition`] is not [`Transition::None`]. 48 | down_curve: Option>, 49 | } 50 | 51 | /// Describes the phases that a [`Workload`] goes through cyclically. 52 | #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] 53 | pub enum WorkloadPhase { 54 | Min, 55 | Up, 56 | Max, 57 | Down, 58 | } 59 | 60 | impl Workload { 61 | #![allow(clippy::too_many_arguments)] 62 | pub fn new( 63 | min: u32, 64 | min_sec: u32, 65 | max: u32, 66 | max_sec: u32, 67 | up_transition: Transition, 68 | up_sec: u32, 69 | down_transition: Transition, 70 | down_sec: u32, 71 | ) -> Workload { 72 | // P0/P3 for the "up" phase 73 | let up_p0 = Coord2(min_sec as f64, min as f64); 74 | let up_p3 = Coord2((min_sec + up_sec) as f64, max as f64); 75 | 76 | // P0/P3 for the "down" phase 77 | let down_p0 = Coord2((min_sec + up_sec + max_sec) as f64, max as f64); 78 | let down_p3 = Coord2((min_sec + up_sec + max_sec + down_sec) as f64, min as f64); 79 | 80 | // "up" phase duration and curve, depend on the corresponding Transition 81 | let (up_sec, up_curve) = if up_transition != Transition::None { 82 | (up_sec, Some(bezier::Curve::from_points(up_p0, up_transition.ctrl_pts_up(up_p0, up_p3).unwrap(), up_p3))) 83 | } else { 84 | (0, None) 85 | }; 86 | 87 | // "down" phase duration and curve, depend on the corresponding Transition 88 | let (down_sec, down_curve) = if down_transition != Transition::None { 89 | ( 90 | down_sec, 91 | Some(bezier::Curve::from_points( 92 | down_p0, 93 | down_transition.ctrl_pts_down(down_p0, down_p3).unwrap(), 94 | down_p3, 95 | )), 96 | ) 97 | } else { 98 | (0, None) 99 | }; 100 | 101 | Workload { 102 | min, 103 | min_sec, 104 | max, 105 | max_sec, 106 | up_sec, 107 | up_curve, 108 | down_sec, 109 | down_curve, 110 | } 111 | } 112 | 113 | /// How long the [`WorkloadPhase::Min`] lasts, in seconds 114 | pub fn min_duration_sec(&self) -> u32 { 115 | self.min_sec 116 | } 117 | 118 | /// How long before the [`WorkloadPhase::Max`] starts, in seconds 119 | pub fn before_max_duration_sec(&self) -> u32 { 120 | self.min_sec + self.up_sec 121 | } 122 | 123 | /// How long before the [`WorkloadPhase::Max`] ends, in seconds 124 | pub fn after_max_duration_sec(&self) -> u32 { 125 | self.min_sec + self.up_sec + self.max_sec 126 | } 127 | 128 | /// How long before the [`WorkloadPhase::Down`] ends, in seconds. 129 | /// 130 | /// This represents also the entire length of a "cycle" of workload: 131 | /// after [`WorkloadPhase::Down`] ends, the [`WorkloadPhase::Min`] starts agai. 132 | pub fn overall_duration_sec(&self) -> u32 { 133 | self.min_sec + self.up_sec + self.max_sec + self.down_sec 134 | } 135 | 136 | /// Normalizes the input `sec` from absolute to relative. 137 | /// 138 | /// The [`WorkloadPhase`]s repeat in a loop, so it takes any absolute seconds input, and 139 | /// convert it to it's relative position in the [`Workload`]. 140 | fn normalize_sec(&self, sec: u64) -> u32 { 141 | // Time "loops", so we normalize the input to be never 142 | // greater than the total `duration_sec`. 143 | (sec % self.overall_duration_sec() as u64) as u32 144 | } 145 | 146 | /// Given the input `sec`, informs of what [`WorkloadPhase`] that is at. 147 | pub fn phase_at(&self, sec: u64) -> WorkloadPhase { 148 | let nor_sec = self.normalize_sec(sec); 149 | 150 | if nor_sec < self.min_duration_sec() { 151 | WorkloadPhase::Min 152 | } else if nor_sec < self.before_max_duration_sec() { 153 | WorkloadPhase::Up 154 | } else if nor_sec < self.after_max_duration_sec() { 155 | WorkloadPhase::Max 156 | } else { 157 | WorkloadPhase::Down 158 | } 159 | } 160 | 161 | /// Given the input `sec`, returns the number of records/sec that this `Workload` indicates. 162 | /// 163 | /// The [`WorkloadPhase`]s repeat in a loop, so it takes any absolute seconds input, and 164 | /// returns the amount of records/sec for that moment in time. 165 | pub fn records_per_sec_at(&self, sec: u64) -> u32 { 166 | let nor_sec = self.normalize_sec(sec); 167 | 168 | match self.phase_at(sec) { 169 | WorkloadPhase::Min => self.min, 170 | WorkloadPhase::Up => { 171 | // The corresponding Bézier `t` for `nor_sec` during "up" phase 172 | let nor_sec_t = (nor_sec - self.min_duration_sec()) as f64 / self.up_sec as f64; 173 | 174 | // Return the corresponding Y (amount of records per second) give `t` as X 175 | self.up_curve.unwrap().point_at_pos(nor_sec_t).y().round() as u32 176 | }, 177 | WorkloadPhase::Max => self.max, 178 | WorkloadPhase::Down => { 179 | // The corresponding Bézier `t` for `nor_sec` during "down" phase 180 | let nor_sec_t = (nor_sec - self.after_max_duration_sec()) as f64 / self.down_sec as f64; 181 | 182 | // Return the corresponding Y (amount of records per second) give `t` as X 183 | self.down_curve.unwrap().point_at_pos(nor_sec_t).y().round() as u32 184 | }, 185 | } 186 | } 187 | } 188 | 189 | #[cfg(test)] 190 | mod tests { 191 | use std::collections::HashMap; 192 | 193 | use crate::workload::WorkloadPhase::*; 194 | 195 | use super::*; 196 | 197 | #[test] 198 | fn test_up_linear_down_none() { 199 | let w = Workload::new(1, 20, 100, 5, Transition::Linear, 3, Transition::None, 0); 200 | 201 | // min_sec=20 + up_sec=3 + max_sec=5 + down_sec=0 202 | assert_eq!(28, w.overall_duration_sec()); 203 | 204 | // min at 0-19 205 | for sec in 0u64..=19u64 { 206 | assert_eq!(1, w.records_per_sec_at(sec)); 207 | } 208 | 209 | // up at 20-22 210 | assert_eq!(1, w.records_per_sec_at(20)); 211 | assert_eq!(27, w.records_per_sec_at(21)); 212 | assert_eq!(74, w.records_per_sec_at(22)); 213 | 214 | // max at 23-27 215 | for sec in 23u64..=27u64 { 216 | assert_eq!(100, w.records_per_sec_at(sec)); 217 | } 218 | 219 | // down_sec is 0, so it's time for min again 220 | for sec in 28u64..=47u64 { 221 | assert_eq!(1, w.records_per_sec_at(sec)); 222 | } 223 | 224 | // up at 48-50 225 | assert_eq!(1, w.records_per_sec_at(48)); 226 | assert_eq!(27, w.records_per_sec_at(49)); 227 | assert_eq!(74, w.records_per_sec_at(50)); 228 | 229 | // max at 51-55 230 | for sec in 51u64..=55u64 { 231 | assert_eq!(100, w.records_per_sec_at(sec)); 232 | } 233 | } 234 | 235 | #[test] 236 | fn test_up_spike_out_down_ease_in() { 237 | let w = Workload::new(3, 60, 100, 5, Transition::SpikeOut, 20, Transition::EaseIn, 20); 238 | 239 | let mut occurrences = HashMap::new(); 240 | 241 | let mut prev = 0; 242 | for sec in 0u64..(w.overall_duration_sec() as u64 * 10u64) { 243 | let curr = w.records_per_sec_at(sec); 244 | 245 | // Count the occurrences of a specific phase 246 | let sec_phase = w.phase_at(sec); 247 | occurrences.entry(sec_phase.clone()).and_modify(|counter| *counter += 1).or_insert(1); 248 | 249 | // Given the phase, check that the behaviour is what we expect 250 | match sec_phase { 251 | Min => { 252 | assert_eq!(curr, 3); 253 | }, 254 | Up => { 255 | assert!(curr >= prev); 256 | }, 257 | Max => { 258 | assert_eq!(curr, 100); 259 | }, 260 | Down => { 261 | assert!(curr <= prev); 262 | }, 263 | } 264 | 265 | prev = curr; 266 | } 267 | 268 | // Confirm the occurrences of each phase match expectations 269 | assert_eq!(600, occurrences.get(&Min).cloned().unwrap()); 270 | assert_eq!(200, occurrences.get(&Up).cloned().unwrap()); 271 | assert_eq!(50, occurrences.get(&Max).cloned().unwrap()); 272 | assert_eq!(200, occurrences.get(&Down).cloned().unwrap()); 273 | } 274 | } 275 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /src/generator.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use std::fs::File; 3 | use std::io::{Error, Read}; 4 | use std::path::PathBuf; 5 | 6 | use rand::distributions::{Alphanumeric, DistString}; 7 | use rand::{thread_rng, Rng}; 8 | use rdkafka::message::{Header, OwnedHeaders}; 9 | use rdkafka::producer::FutureRecord; 10 | 11 | /// Helps to generate a possible value used in [`RecordGenerator`]. 12 | /// 13 | /// Specifically, this is used for the [`RecordGenerator::key_field`] and [`RecordGenerator::payload_field`], 14 | /// to specify what content should be generated for those fields when a Kafka record is generated 15 | /// (via [`RecordGenerator::generate_record`]). 16 | #[derive(Debug, Clone, PartialEq, PartialOrd)] 17 | pub enum ValueGenerator { 18 | /// A user provided string. 19 | String(String), 20 | 21 | /// The content of a file. 22 | File(PathBuf), 23 | 24 | /// A random alphanumeric string. 25 | RandAlphaNum(usize), 26 | 27 | /// A random bytes array. 28 | RandBytes(usize), 29 | 30 | /// A random (signed) integer. 31 | RandInt(i64, i64), 32 | 33 | /// A random float. 34 | RandFloat(f64, f64), 35 | } 36 | 37 | impl ValueGenerator { 38 | /// Generates a `Vec` of bytes containing the value created by this generator, or an error. 39 | fn generate(&self) -> Result, Error> { 40 | match self { 41 | ValueGenerator::String(s) => Ok(s.as_bytes().to_vec()), 42 | ValueGenerator::File(bp) => { 43 | debug!("Loading content of file {:?}", bp); 44 | let mut f = File::open(bp)?; 45 | let mut buf = Vec::new(); 46 | f.read_to_end(&mut buf)?; 47 | 48 | Ok(buf) 49 | }, 50 | ValueGenerator::RandAlphaNum(l) => { 51 | let rand_alpha = Alphanumeric.sample_string(&mut thread_rng(), *l); 52 | 53 | Ok(rand_alpha.as_bytes().to_vec()) 54 | }, 55 | ValueGenerator::RandBytes(l) => { 56 | let random_bytes: Vec = (0..*l).map(|_| thread_rng().gen::()).collect(); 57 | 58 | Ok(random_bytes) 59 | }, 60 | ValueGenerator::RandInt(min, max) => { 61 | let random_int = thread_rng().gen_range(*min..=*max); 62 | 63 | Ok(random_int.to_be_bytes().to_vec()) 64 | }, 65 | ValueGenerator::RandFloat(min, max) => { 66 | let random_float = thread_rng().gen_range(*min..=*max); 67 | 68 | Ok(random_float.to_be_bytes().to_vec()) 69 | }, 70 | } 71 | } 72 | 73 | /// Implementation of [`clap::value_parser`], used to create an argument by parsing a user-provided value. 74 | /// 75 | /// The `&str` provided must be of one of the following formats: 76 | /// 77 | /// * `string:STR`: `STR` is parsed to `String` and will be the generated value 78 | /// * `file:PATH`: `PATH` is parsed to `PathBuf` to an existing file, failing if it can't read it's content 79 | /// * `alpha:LENGTH`: `LENGTH` is parsed to `usize` length of a randomly generated alphanumeric string 80 | /// * `bytes:LENGTH`: `LENGTH` is parsed to `usize` length of a randomly generated bytes array 81 | /// * `int:MIN-MAX`: `MIN` and `MAX` are parsed to `i64`s of an inclusive range from which to pick a random `i64` 82 | /// * `float:MIN-MAX`: `MIN` and `MAX` are parsed to `f64`s of an inclusive range from which to pick a random `f64` 83 | /// 84 | /// In case of error, it returns a `String` that [`clap`] adds to the error message returned to the user. 85 | /// 86 | pub fn clap_parser(val_gen_as_str: &str) -> Result { 87 | let (val_gen_type, val_gen_input) = match val_gen_as_str.split_once(':') { 88 | None => { 89 | return Err("Should have 'TYPE:INPUT' format".to_string()); 90 | }, 91 | Some((t, c)) => (t, c), 92 | }; 93 | 94 | match val_gen_type { 95 | "string" => Ok(ValueGenerator::String(val_gen_input.to_string())), 96 | "file" => { 97 | let path = PathBuf::from(val_gen_input); 98 | 99 | if !path.exists() || !path.is_file() { 100 | Err(format!("INPUT file '{}' does not exist or is not a file", path.display())) 101 | } else { 102 | Ok(ValueGenerator::File(path)) 103 | } 104 | }, 105 | "alpha" => { 106 | let res = val_gen_input.parse::(); 107 | 108 | match res { 109 | Err(e) => Err(format!("Failed to parse INPUT 'SIZE' from 'alpha:SIZE': {e}")), 110 | Ok(size) => Ok(ValueGenerator::RandAlphaNum(size)), 111 | } 112 | }, 113 | "bytes" => { 114 | let res = val_gen_input.parse::(); 115 | 116 | match res { 117 | Err(e) => Err(format!("Failed to parse INPUT 'SIZE' from 'bytes:SIZE': {e}")), 118 | Ok(size) => Ok(ValueGenerator::RandBytes(size)), 119 | } 120 | }, 121 | "int" => match val_gen_input.split_once('-') { 122 | None => Err("Inclusive range should have 'min-max' format".to_string()), 123 | Some((min_str, max_str)) => { 124 | let min = match min_str.parse::() { 125 | Err(e) => return Err(format!("Failed to parse INPUT 'MIN' from 'int:MIN-MAX': {e}")), 126 | Ok(v) => v, 127 | }; 128 | 129 | let max = match max_str.parse::() { 130 | Err(e) => return Err(format!("Failed to parse INPUT 'MAX' from 'int:MIN-MAX': {e}")), 131 | Ok(v) => v, 132 | }; 133 | 134 | Ok(ValueGenerator::RandInt(min, max)) 135 | }, 136 | }, 137 | "float" => match val_gen_input.split_once('-') { 138 | None => Err("Inclusive range should have 'min-max' format".to_string()), 139 | Some((min_str, max_str)) => { 140 | let min = match min_str.parse::() { 141 | Err(e) => return Err(format!("Failed to parse INPUT 'MIN' from 'float:MIN-MAX': {e}")), 142 | Ok(v) => v, 143 | }; 144 | 145 | let max = match max_str.parse::() { 146 | Err(e) => return Err(format!("Failed to parse INPUT 'MAX' from 'float:MIN-MAX': {e}")), 147 | Ok(v) => v, 148 | }; 149 | 150 | Ok(ValueGenerator::RandFloat(min, max)) 151 | }, 152 | }, 153 | _ => Err(format!("Unsupported TYPE '{val_gen_type}:...'")), 154 | } 155 | } 156 | } 157 | 158 | /// The data of a Kafka Record, as generated by [`RecordGenerator`]. 159 | /// 160 | /// The `key` and `payload` value are `Vec`, 161 | /// as this is the most basic form of data we can give to the Kafka Producer. 162 | #[derive(Debug, Clone, PartialEq, Eq)] 163 | pub struct GeneratedRecord { 164 | pub topic: String, 165 | pub key: Option>, 166 | pub payload: Option>, 167 | pub partition: Option, 168 | pub headers: HashMap, 169 | } 170 | 171 | impl GeneratedRecord { 172 | /// Converts the `GeneratedRecord` into a [`rdkafka::producer::FutureRecord`], usable with [`rdkafka::producer::FutureProducer`]. 173 | pub fn as_future_record(&self) -> FutureRecord, Vec> { 174 | let mut rec: FutureRecord, Vec> = FutureRecord::to(self.topic.as_str()); 175 | 176 | // Set record key (if available) 177 | rec.key = if let Some(k) = &self.key { 178 | Some(k) 179 | } else { 180 | None 181 | }; 182 | 183 | // Set record payload (if available) 184 | rec.payload = if let Some(p) = &self.payload { 185 | Some(p) 186 | } else { 187 | None 188 | }; 189 | 190 | // Set partition 191 | rec.partition = self.partition; 192 | 193 | // Set headers 194 | let mut rec_headers = OwnedHeaders::new(); 195 | for (k, v) in &self.headers { 196 | rec_headers = rec_headers.insert(Header { 197 | key: k, 198 | value: Some(v), 199 | }); 200 | } 201 | rec.headers = Some(rec_headers); 202 | rec 203 | } 204 | } 205 | 206 | /// Utility to generate records. 207 | #[derive(Debug, Clone, PartialEq)] 208 | pub struct RecordGenerator { 209 | /// Topic the record is destined to. 210 | destination_topic: String, 211 | 212 | /// Generators of the content that will go in the record key. 213 | key_generator: Option, 214 | 215 | /// Offers memoization for the record `key_generator`, depending on the [`ValueGenerator`] used. 216 | /// This is used when the value that goes in the key doesn't change at every record generation. 217 | key_generated_content: Option>, 218 | 219 | /// Generators of the content that will go in the record payload. 220 | payload_generator: Option, 221 | 222 | /// Offers memoization for the record `payload_generator`, depending on the [`ValueGenerator`] used. 223 | /// This is used when the value that goes in the payload doesn't change at every record generation. 224 | payload_generated_content: Option>, 225 | 226 | /// Headers that will be added to the record. 227 | headers: HashMap, 228 | 229 | /// Topic partition the record is destined to. 230 | /// If absent, this will be left to the Kafka Producer partitioner to determine. 231 | destination_partition: Option, 232 | } 233 | 234 | impl RecordGenerator { 235 | pub fn new(destination_topic: String) -> RecordGenerator { 236 | RecordGenerator { 237 | destination_topic, 238 | key_generator: None, 239 | key_generated_content: None, 240 | payload_generator: None, 241 | payload_generated_content: None, 242 | headers: HashMap::new(), 243 | destination_partition: None, 244 | } 245 | } 246 | 247 | pub fn add_record_header(&mut self, k: String, v: String) { 248 | self.headers.insert(k, v); 249 | } 250 | 251 | pub fn set_key_generator(&mut self, key_generator: ValueGenerator) -> Result<(), Error> { 252 | // Memoize content, if appropriate 253 | self.key_generated_content = match key_generator { 254 | ValueGenerator::String(_) | ValueGenerator::File(_) => Some(key_generator.generate()?), 255 | _ => None, 256 | }; 257 | 258 | self.key_generator = Some(key_generator); 259 | 260 | Ok(()) 261 | } 262 | 263 | pub fn set_payload_generator(&mut self, payload_generator: ValueGenerator) -> Result<(), Error> { 264 | // Memoize content, if appropriate 265 | self.payload_generated_content = match payload_generator { 266 | ValueGenerator::String(_) | ValueGenerator::File(_) => Some(payload_generator.generate()?), 267 | _ => None, 268 | }; 269 | 270 | self.payload_generator = Some(payload_generator); 271 | 272 | Ok(()) 273 | } 274 | 275 | pub fn set_destination_partition(&mut self, partition: i32) { 276 | self.destination_partition = Some(partition); 277 | } 278 | 279 | pub fn generate_record(&self) -> Result { 280 | let rec = GeneratedRecord { 281 | topic: self.destination_topic.clone(), 282 | key: if let Some(k_mem) = &self.key_generated_content { 283 | Some(k_mem.to_vec()) 284 | } else if let Some(k) = &self.key_generator { 285 | Some(k.generate()?) 286 | } else { 287 | None 288 | }, 289 | payload: if let Some(p_mem) = &self.payload_generated_content { 290 | Some(p_mem.to_vec()) 291 | } else if let Some(p) = &self.payload_generator { 292 | Some(p.generate()?) 293 | } else { 294 | None 295 | }, 296 | partition: self.destination_partition, 297 | headers: self.headers.clone(), 298 | }; 299 | 300 | Ok(rec) 301 | } 302 | } 303 | 304 | #[cfg(test)] 305 | mod tests { 306 | use rdkafka::message::Headers; 307 | 308 | use super::*; 309 | 310 | #[test] 311 | fn test_payload_only() { 312 | let mut generator = RecordGenerator::new("a_topic_name".to_string()); 313 | assert!(generator.set_payload_generator(ValueGenerator::String("a payload content".to_string())).is_ok()); 314 | 315 | let rec = generator.generate_record().unwrap(); 316 | assert_eq!("a_topic_name", rec.topic); 317 | assert_eq!(None, rec.key); 318 | assert_eq!("a payload content".as_bytes(), rec.payload.clone().unwrap()); 319 | assert!(rec.headers.is_empty()); 320 | assert_eq!(None, rec.partition); 321 | 322 | let fut_rec = rec.as_future_record(); 323 | assert_eq!("a_topic_name", fut_rec.topic); 324 | assert!(fut_rec.key.is_none()); 325 | assert!(fut_rec.payload.is_some()); 326 | assert!(fut_rec.partition.is_none()); 327 | assert!(fut_rec.headers.is_some()); 328 | assert_eq!(0, fut_rec.headers.unwrap().count()); 329 | 330 | generator.set_destination_partition(10); 331 | let rec = generator.generate_record().unwrap(); 332 | assert_eq!(Some(10), rec.partition); 333 | 334 | let fut_rec = rec.as_future_record(); 335 | assert_eq!(Some(10), fut_rec.partition); 336 | } 337 | 338 | #[test] 339 | fn test_key_and_headers() { 340 | let mut generator = RecordGenerator::new("another_topic".to_string()); 341 | assert!(generator.set_payload_generator(ValueGenerator::String("another payload".to_string())).is_ok()); 342 | 343 | generator.add_record_header("k1".to_string(), "v1".to_string()); 344 | generator.add_record_header("k2".to_string(), "v2".to_string()); 345 | generator.add_record_header("k3".to_string(), "v3".to_string()); 346 | 347 | assert!(generator.set_key_generator(ValueGenerator::RandInt(10, 10)).is_ok()); 348 | 349 | let rec = generator.generate_record().unwrap(); 350 | assert_eq!("another_topic", rec.topic); 351 | assert_eq!(10u64.to_be_bytes().to_vec(), rec.key.clone().unwrap()); 352 | assert_eq!("another payload".as_bytes(), rec.payload.clone().unwrap()); 353 | 354 | assert_eq!(3, rec.headers.len()); 355 | assert!(rec.headers.contains_key("k1")); 356 | assert!(rec.headers.contains_key("k2")); 357 | assert!(rec.headers.contains_key("k3")); 358 | 359 | assert_eq!(None, rec.partition); 360 | 361 | let fut_rec = rec.as_future_record(); 362 | assert_eq!("another_topic", fut_rec.topic); 363 | assert!(fut_rec.key.is_some()); 364 | assert!(fut_rec.payload.is_some()); 365 | assert!(fut_rec.partition.is_none()); 366 | assert!(fut_rec.headers.is_some()); 367 | assert_eq!(3, fut_rec.headers.unwrap().count()); 368 | } 369 | 370 | #[test] 371 | fn test_file_payload() { 372 | let cargo_toml_path = PathBuf::from("./Cargo.toml"); 373 | let mut generator = RecordGenerator::new("topic_zzz".to_string()); 374 | assert!(generator.set_payload_generator(ValueGenerator::File(cargo_toml_path.clone())).is_ok()); 375 | 376 | let rec = generator.generate_record().unwrap(); 377 | assert_eq!("topic_zzz", rec.topic); 378 | assert_eq!(None, rec.key); 379 | let mut f = File::open(cargo_toml_path).unwrap(); 380 | let mut cargo_toml_content = Vec::new(); 381 | f.read_to_end(&mut cargo_toml_content).unwrap(); 382 | 383 | assert_eq!(cargo_toml_content, rec.payload.unwrap()); 384 | assert!(rec.headers.is_empty()); 385 | assert_eq!(None, rec.partition); 386 | } 387 | 388 | #[test] 389 | fn test_randomizers() { 390 | let mut generator = RecordGenerator::new("topic".to_string()); 391 | assert!(generator.set_key_generator(ValueGenerator::RandBytes(20)).is_ok()); 392 | assert!(generator.set_payload_generator(ValueGenerator::RandAlphaNum(20)).is_ok()); 393 | 394 | let rec = generator.generate_record().unwrap(); 395 | assert_eq!(20, rec.key.unwrap().len()); 396 | assert!(std::str::from_utf8(rec.payload.unwrap().as_slice()).is_ok()); 397 | 398 | assert!(generator.set_payload_generator(ValueGenerator::RandInt(123, 125)).is_ok()); 399 | assert!(generator.set_key_generator(ValueGenerator::RandFloat(1.5, 2.0)).is_ok()); 400 | 401 | let rec = generator.generate_record().unwrap(); 402 | let rec_key = f64::from_be_bytes(rec.key.unwrap().as_slice().try_into().unwrap()); 403 | assert!((1.5..=2.0).contains(&rec_key)); 404 | 405 | let rec_payload = i64::from_be_bytes(rec.payload.unwrap().as_slice().try_into().unwrap()); 406 | assert!((123..=125).contains(&rec_payload)); 407 | } 408 | 409 | #[test] 410 | fn test_value_generator_clap_parser() { 411 | let res = ValueGenerator::clap_parser("string:StRiNgA"); 412 | assert!(res.is_ok()); 413 | assert_eq!(ValueGenerator::String("StRiNgA".to_string()), res.unwrap()); 414 | 415 | let res = ValueGenerator::clap_parser("file:Cargo.toml"); 416 | assert!(res.is_ok()); 417 | assert_eq!(ValueGenerator::File(PathBuf::from("Cargo.toml")), res.unwrap()); 418 | 419 | let res = ValueGenerator::clap_parser("alpha:11"); 420 | assert!(res.is_ok()); 421 | assert_eq!(ValueGenerator::RandAlphaNum(11), res.unwrap()); 422 | 423 | let res = ValueGenerator::clap_parser("bytes:21"); 424 | assert!(res.is_ok()); 425 | assert_eq!(ValueGenerator::RandBytes(21), res.unwrap()); 426 | 427 | let res = ValueGenerator::clap_parser("int:10-100"); 428 | assert!(res.is_ok()); 429 | assert_eq!(ValueGenerator::RandInt(10, 100), res.unwrap()); 430 | 431 | let res = ValueGenerator::clap_parser("float:11-213.1"); 432 | assert!(res.is_ok()); 433 | assert_eq!(ValueGenerator::RandFloat(11., 213.1), res.unwrap()); 434 | } 435 | 436 | #[test] 437 | fn test_failure_value_generator_clap_parser() { 438 | let res = ValueGenerator::clap_parser("stringz:StRiNgA"); 439 | assert!(res.is_err()); 440 | assert_eq!("Unsupported TYPE 'stringz:...'", res.unwrap_err()); 441 | 442 | let res = ValueGenerator::clap_parser("asdasd"); 443 | assert!(res.is_err()); 444 | assert_eq!("Should have 'TYPE:INPUT' format", res.unwrap_err()); 445 | 446 | let res = ValueGenerator::clap_parser("file:does_not_exist"); 447 | assert!(res.is_err()); 448 | assert_eq!("INPUT file 'does_not_exist' does not exist or is not a file", res.unwrap_err()); 449 | 450 | let res = ValueGenerator::clap_parser("bytes:gimme_some"); 451 | assert!(res.is_err()); 452 | assert_eq!("Failed to parse INPUT 'SIZE' from 'bytes:SIZE': invalid digit found in string", res.unwrap_err()); 453 | 454 | let res = ValueGenerator::clap_parser("float:123,456"); 455 | assert!(res.is_err()); 456 | assert_eq!("Inclusive range should have 'min-max' format", res.unwrap_err()); 457 | 458 | let res = ValueGenerator::clap_parser("int:abc-asd"); 459 | assert!(res.is_err()); 460 | assert_eq!("Failed to parse INPUT 'MIN' from 'int:MIN-MAX': invalid digit found in string", res.unwrap_err()); 461 | } 462 | } 463 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Ksunami 2 | 3 |
4 | 5 | Waves of Kafka Records! 6 | 7 | [![CI](https://img.shields.io/github/actions/workflow/status/kafkesc/ksunami/ci.yml?branch=main&label=CI%20%28main%29&logo=Github&style=flat-square)](https://github.com/kafkesc/ksunami/actions/workflows/ci.yml) 8 | [![Apache 2.0](https://img.shields.io/crates/l/ksunami?logo=apache&style=flat-square)]([https://github.com/kafkesc/ksunami/blob/main/LICENSE](https://github.com/search?q=repo%3Akafkesc%2Fksunami+path%3ALICENSE&type=code)) 9 | [![Crates.io downloads](https://img.shields.io/crates/d/ksunami?logo=rust&style=flat-square)](https://crates.io/crates/ksunami) 10 | [![](https://img.shields.io/crates/v/ksunami?label=latest%20version&logo=rust&style=flat-square)](https://crates.io/crates/ksunami/versions) 11 | 12 |
13 | 14 | ## What's Ksunami? 15 | 16 | Ksunami is a command-line tool to produce a constant, configurable, cyclical stream of (dummy) records 17 | against a [Kafka](https://kafka.apache.org/) Cluster Topic. 18 | 19 | If you are experimenting with scalability and latency against Kafka, and are looking for ways to reproduce a continues 20 | stream of records, following a specific traffic pattern that repeats periodically, **Ksunami** is the tool for you. 21 | 22 | Ksunami offers a way to set up the production of records, expressing the scenario as a sequence of "phases" 23 | that repeat indefinitely. Records content is configurable but random: the purpose of the tool is to help with 24 | performance and scalability testing of your infrastructure. 25 | 26 | ## Features 27 | 28 | * Production described in 4 "phases" that repeat in circle: `min`, `up`, `max` and `down` 29 | * All phases are configurable in terms of _seconds_ (duration) and _records per second_ (workload) 30 | * `up` and `down` can be one of many transitions, each with a specific "shape" (ex. `linear`, `ease-in`, `spike-out`, ...) 31 | * Records `key` and `payload` are configurable with fixed, from-file and randomly-generated values 32 | * Records headers can be added to each record 33 | * Kafka producer is fully configurable, including selecting a partitioner 34 | * Built on top of the awesome [librdkafka](https://github.com/edenhill/librdkafka) 35 | 36 | ## Getting started 37 | 38 | To install `ksunami`, _currently_ you need to compile it yourself. You need the [Rust Toolchain](https://rustup.rs/) 39 | and then run: 40 | 41 | ```shell 42 | $ cargo install ksunami 43 | ``` 44 | 45 | > **Note** 46 | > We are working to provide [binary releases (i#13)](https://github.com/kafkesc/ksunami/issues/13) and 47 | > an [homebrew (i#14)](https://github.com/kafkesc/ksunami/issues/14) installation. 48 | 49 | ### In Docker 50 | 51 | Ksunami is now available as a Docker Image: [`kafkesc/ksunami`](https://hub.docker.com/r/kafkesc/ksunami) on the Docker Hub registry. 52 | Both `linux/amd64` and `linux/arm64` images are available, based on Debian slim images. 53 | 54 | The `ENTRYPOINT` is the `ksunami` binary itself, so you can just pass arguments to the container execution. 55 | 56 | ## Usage 57 | 58 | Thanks to [clap](https://crates.io/crates/clap), Ksunami provides out of the box support for _compact_ and 59 | _extended_ usage instructions. 60 | 61 | ### Compact: `ksunami -h` 62 | 63 | ``` 64 | Produce constant, configurable, cyclical waves of Kafka Records 65 | 66 | Usage: ksunami [OPTIONS] --brokers --topic --min --max 67 | 68 | Options: 69 | -b, --brokers Initial Kafka Brokers to connect to (format: 'HOST:PORT,...') 70 | --client-id Client identifier used by the internal Kafka Producer [default: ksunami] 71 | --partitioner Partitioner used by the internal Kafka Producer [default: consistent_random] [possible values: random, consistent, 72 | consistent_random, murmur2, murmur2_random, fnv1a, fnv1a_random] 73 | -c, --config Additional configuration used by the internal Kafka Producer (format: 'CONF_KEY:CONF_VAL') 74 | -t, --topic Destination Topic 75 | -k, --key Records Key (format: 'KEY_TYPE:INPUT'). 76 | -p, --payload Records Payload (format: 'PAYLOAD_TYPE:INPUT'). 77 | --partition Destination Topic Partition 78 | --head Records Header(s) (format: 'HEAD_KEY:HEAD_VAL') 79 | --min Minimum amount of records/sec 80 | --min-sec How long to produce at minimum records/sec, before ramp-up [default: 60] 81 | --max Maximum amount of records/sec 82 | --max-sec How long to produce at maximum records/sec, before ramp-down [default: 60] 83 | --up Ramp-up transition from minimum to maximum records/sec [default: linear] [possible values: none, linear, ease-in, ease-out, 84 | ease-in-out, spike-in, spike-out, spike-in-out] 85 | --up-sec How long the ramp-up transition should last [default: 10] 86 | --down Ramp-down transition from maximum to minimum records/sec [default: none] [possible values: none, linear, ease-in, ease-out, 87 | ease-in-out, spike-in, spike-out, spike-in-out] 88 | --down-sec How long the ramp-down transition should last [default: 10] 89 | -v, --verbose... Verbose logging. 90 | -q, --quiet... Quiet logging. 91 | -h, --help Print help information (use `--help` for more detail) 92 | -V, --version Print version information 93 | ``` 94 | 95 | ### Extended: `ksunami --help` 96 | 97 | ``` 98 | Produce constant, configurable, cyclical waves of Kafka Records 99 | 100 | Usage: ksunami [OPTIONS] --brokers --topic --min --max 101 | 102 | Options: 103 | -b, --brokers 104 | Initial Kafka Brokers to connect to (format: 'HOST:PORT,...'). 105 | 106 | Equivalent to '--config=bootstrap.servers:host:port,...'. 107 | 108 | --client-id 109 | Client identifier used by the internal Kafka Producer. 110 | 111 | Equivalent to '--config=client.id:my-client-id'. 112 | 113 | [default: ksunami] 114 | 115 | --partitioner 116 | Partitioner used by the internal Kafka Producer. 117 | 118 | Equivalent to '--config=partitioner:random'. 119 | 120 | [default: consistent_random] 121 | 122 | Possible values: 123 | - random: 124 | Random distribution 125 | - consistent: 126 | CRC32 hash of key (Empty and NULL keys are mapped to single partition) 127 | - consistent_random: 128 | CRC32 hash of key (Empty and NULL keys are randomly partitioned) 129 | - murmur2: 130 | Java Producer compatible Murmur2 hash of key (NULL keys are mapped to single partition) 131 | - murmur2_random: 132 | Java Producer compatible Murmur2 hash of key (NULL keys are randomly partitioned): equivalent to default partitioner in Java Producer 133 | - fnv1a: 134 | FNV-1a hash of key (NULL keys are mapped to single partition) 135 | - fnv1a_random: 136 | FNV-1a hash of key (NULL keys are randomly partitioned) 137 | 138 | -c, --config 139 | Additional configuration used by the internal Kafka Producer (format: 'CONF_KEY:CONF_VAL'). 140 | 141 | To set multiple configurations keys, use this argument multiple times. See: https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md. 142 | 143 | -t, --topic 144 | Destination Topic. 145 | 146 | Topic must already exist. 147 | 148 | -k, --key 149 | Records Key (format: 'KEY_TYPE:INPUT'). 150 | 151 | The supported key types are: 152 | 153 | * 'string:STR': STR is a plain string 154 | * 'file:PATH': PATH is a path to an existing file 155 | * 'alpha:LENGTH': LENGTH is the length of a random alphanumeric string 156 | * 'bytes:LENGTH': LENGTH is the length of a random bytes array 157 | * 'int:MIN-MAX': MIN and MAX are limits of an inclusive range from which an integer number is picked 158 | * 'float:MIN-MAX': MIN and MAX are limits of an inclusive range from which a float number is picked 159 | 160 | -p, --payload 161 | Records Payload (format: 'PAYLOAD_TYPE:INPUT'). 162 | 163 | The supported payload types are: 164 | 165 | * 'string:STR': STR is a plain string 166 | * 'file:PATH': PATH is a path to an existing file 167 | * 'alpha:LENGTH': LENGTH is the length of a random alphanumeric string 168 | * 'bytes:LENGTH': LENGTH is the length of a random bytes array 169 | * 'int:MIN-MAX': MIN and MAX are limits of an inclusive range from which an integer number is picked 170 | * 'float:MIN-MAX': MIN and MAX are limits of an inclusive range from which a float number is picked 171 | 172 | --partition 173 | Destination Topic Partition. 174 | 175 | If not specified (or '-1'), Producer will rely on the Partitioner. See the '--partitioner' argument. 176 | 177 | --head 178 | Records Header(s) (format: 'HEAD_KEY:HEAD_VAL'). 179 | 180 | To set multiple headers, use this argument multiple times. 181 | 182 | --min 183 | Minimum amount of records/sec 184 | 185 | --min-sec 186 | How long to produce at minimum records/sec, before ramp-up 187 | 188 | [default: 60] 189 | 190 | --max 191 | Maximum amount of records/sec 192 | 193 | --max-sec 194 | How long to produce at maximum records/sec, before ramp-down 195 | 196 | [default: 60] 197 | 198 | --up 199 | Ramp-up transition from minimum to maximum records/sec 200 | 201 | [default: linear] 202 | 203 | Possible values: 204 | - none: Immediate transition, with no in-between values 205 | - linear: Linear transition, constant increments between values 206 | - ease-in: Slow increment at the beginning, accelerates half way through until the end 207 | - ease-out: Fast increment at the beginning, decelerates half way through until the end 208 | - ease-in-out: Slow increment at the beginning, accelerates half way, decelerates at the end 209 | - spike-in: Fastest increment at the beginning, slowest deceleration close to the end 210 | - spike-out: Slowest increment at the beginning, fastest acceleration close to the end 211 | - spike-in-out: Fastest increment at the beginning, slow half way, fastest acceleration close to the end 212 | 213 | --up-sec 214 | How long the ramp-up transition should last 215 | 216 | [default: 10] 217 | 218 | --down 219 | Ramp-down transition from maximum to minimum records/sec 220 | 221 | [default: none] 222 | 223 | Possible values: 224 | - none: Immediate transition, with no in-between values 225 | - linear: Linear transition, constant increments between values 226 | - ease-in: Slow increment at the beginning, accelerates half way through until the end 227 | - ease-out: Fast increment at the beginning, decelerates half way through until the end 228 | - ease-in-out: Slow increment at the beginning, accelerates half way, decelerates at the end 229 | - spike-in: Fastest increment at the beginning, slowest deceleration close to the end 230 | - spike-out: Slowest increment at the beginning, fastest acceleration close to the end 231 | - spike-in-out: Fastest increment at the beginning, slow half way, fastest acceleration close to the end 232 | 233 | --down-sec 234 | How long the ramp-down transition should last 235 | 236 | [default: 10] 237 | 238 | -v, --verbose... 239 | Verbose logging. 240 | 241 | * none = 'WARN' 242 | * '-v' = 'INFO' 243 | * '-vv' = 'DEBUG' 244 | * '-vvv' = 'TRACE' 245 | 246 | Alternatively, set environment variable 'KSUNAMI_LOG=(ERROR|WARN|INFO|DEBUG|TRACE|OFF)'. 247 | 248 | -q, --quiet... 249 | Quiet logging. 250 | 251 | * none = 'WARN' 252 | * '-q' = 'ERROR' 253 | * '-qq' = 'OFF' 254 | 255 | Alternatively, set environment variable 'KSUNAMI_LOG=(ERROR|WARN|INFO|DEBUG|TRACE|OFF)'. 256 | 257 | -h, --help 258 | Print help information (use `-h` for a summary) 259 | 260 | -V, --version 261 | Print version information 262 | ``` 263 | 264 | ## Examples 265 | 266 | Here are some examples of things you can do with Ksunami. It's not an exhaustive collection, but it can hopefully give 267 | you a good starting point, and maybe some inspiration. 268 | 269 | > **Note** 270 | > Parameters in the examples are expressed as `{{ PARAMETER_NAME }}`. 271 | 272 | ### Connect to Kafka cluster requiring [`SASL_SSL`](https://en.wikipedia.org/wiki/Simple_Authentication_and_Security_Layer) 273 | 274 | ```shell 275 | $ ksunami \ 276 | --brokers {{ BOOTSTRAP_BROKERS or BROKER_ENDPOINT }} \ 277 | --config security.protocol:SASL_SSL \ 278 | --config sasl.mechanisms=PLAIN \ 279 | --config sasl.username:{{ USERNAME or API_KEY }} \ 280 | --config sasl.password:{{ PASSWORD or API_SECRET }} \ 281 | --topic {{ TOPIC_NAME }} \ 282 | ... 283 | ``` 284 | 285 | ### Min/Max [log verbosity](#log-verbosity) 286 | 287 | ```shell 288 | # Set logging to TRACE level 289 | $ ksunami ... -vvv 290 | 291 | # Set logging to OFF level 292 | $ ksunami ... -qq 293 | 294 | # Set logging to ERROR level, via env var 295 | KSUNAMI_LOG=ERROR ksunami ... 296 | ``` 297 | 298 | ### Low rec/sec, but spike of 1000x once-a-day, lasting 60 seconds 299 | 300 | ```shell 301 | $ ksunami \ 302 | --topic {{ ONCE_A_DAY_SPIKE_TOPIC }} \ 303 | ... \ 304 | --min-sec 86310 \ # i.e. 24h - 90s 305 | --min 10 \ # most of the day, this topics sees 10 rec/sec 306 | \ 307 | --up-sec 10 \ # transitions from min to max within 10 sec 308 | --up spike-in \ # sudden jump: 10k rec/sec 309 | \ 310 | --max-sec 60 \ # a spike of just 60s 311 | --max 10000 \ # producing at 10k rec/sec 312 | \ 313 | --down-sec 20 \ # transitions from max to min within 20 sec 314 | --down spike-out \ # sudden drop: back to just 10 rec/sec 315 | ... 316 | ``` 317 | 318 | ### Records in a wavy-pattern over the 24h cycle 319 | 320 | ```shell 321 | $ ksunami \ 322 | --topic {{ WAVY_TOPIC }} \ 323 | ... \ 324 | --min-sec 21600 \ # first quarter of the day 325 | --min 1000 \ # 1k rec/sec 326 | \ 327 | --up-sec 21600 \ # second quarter of the day 328 | --up ease-in-out \ # stable rise 329 | \ 330 | --max-sec 21600 \ # third quarter of the day 331 | --max 3000 \ # 3k rec/sec 332 | \ 333 | --down-sec 21600 \ # fourth quarter of the day 334 | --down ease-in-out \ # stable decline 335 | ... 336 | ``` 337 | 338 | ### Produce to [random partitions](#partitioner) , regardless of key 339 | 340 | ```shell 341 | $ ksunami \ 342 | --topic {{ RANDOMLY_PICKED_PARTITION_TOPIC }} \ 343 | ... \ 344 | --partitioner random 345 | ``` 346 | 347 | ### Produce records with random alphanumeric `key`, but fixed `payload` from file 348 | 349 | ```shell 350 | $ ksunami \ 351 | --topic {{ RANDOM_KEYS_FIXED_PAYLOADS_TOPIC }} \ 352 | ... \ 353 | --key alpha:{{ RANDOM_ALPHANUMERIC_STRING_LENGTH }} \ 354 | --payload file:{{ PATH_TO_LOCAL_FILE }} \ 355 | ... 356 | ``` 357 | 358 | ### Production switches from `min` to `max` (and back) without [transition](#transitions) 359 | 360 | ```shell 361 | $ ksunami \ 362 | --topic {{ NO_TRANSITON_TOPIC }} \ 363 | ... \ 364 | --min-sec 120 \ 365 | --min 100 \ 366 | \ 367 | --up none \ # switch from min to max after 120s 368 | \ 369 | --max-sec 60 \ 370 | --max 1000 \ 371 | \ 372 | --down none \ # switch from max to min after 60s 373 | ... 374 | ``` 375 | 376 | ## Core concepts 377 | 378 | ### The 4 phases 379 | 380 | Ksunami is designed around the idea that the user has a specific "workload pattern" that they want to reproduce 381 | against their Kafka cluster. It might be steady/stable and never changing, or it can be a regular flux of records, 382 | interleaved with dramatic spikes that happen at regular intervals. Or it can be that you have a new customer that will 383 | bring lots more traffic to your Kafka cluster. 384 | 385 | We have elected to describe such a workload in 4 phases, `min, up, max, down`, that repeat indefinitely: 386 | 387 | ![](https://github.com/kafkesc/ksunami/raw/main/images/workload-phases.png) 388 | 389 | Each phase is associated with a `*-sec` duration argument, to choose how long each should be. 390 | Additionally, `min` and `max` are associated with an amount of _records/sec_ (i.e. _workload_), while `up` and `down` 391 | are associated with a _transition_. 392 | 393 | ### Transitions 394 | 395 | When moving between `min` and `max` phases, the phases `up` and `down` are traversed. Those phases are "transitional": 396 | Ksunami allows to describe _"how"_ the transition between the phases happens. **Each transition has a name**, 397 | and corresponds to a [Cubic Bézier curve](https://en.wikipedia.org/wiki/B%C3%A9zier_curve#Cubic_B%C3%A9zier_curves): 398 | leaving to the reader to learn about this class of curves, 399 | the short story is that Cubic Bézier curves describe the _interpolation_ across **4 control points**, `P0, P1, P2, P3`. 400 | 401 | Imagine we want to plot an `up` transition between the `min` and `max` phases, on a cartesian plane. 402 | Time is expressed by `x`, and we consider the interval `x = [0..1]` as start and end of the transition. 403 | The volume of records produced is instead expressed by `y`, also considered in the interval `y = [0..1]`. 404 | 405 | Given this premise, `P0=(0,0)` and `P3=(1,1)` represent the start and end of the `up` transition; 406 | `P0=(0,1)` and `P3=(1,0)` represent instead the start and end of the `down` transition. 407 | 408 | Our transition curve is _encased_ in the bounding box `(0,0), (1,0), (1,1), (0,1)`, and we can describe various kinds of 409 | curves, by placing `P1` and `P2` within this bounding box. The following is the current list of transition name 410 | that Ksunami supports, plotted both for the `up` and `down` phases: 411 | 412 | | Transition | `--up ` | `--down ` | 413 | |:--------------:|:--------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------:| 414 | | `none` | - | - | 415 | | `linear` | ![](https://github.com/kafkesc/ksunami/raw/main/images/up-transition_linear.png) | ![](https://github.com/kafkesc/ksunami/raw/main/images/down-transition_linear.png) | 416 | | `ease-in` | ![](https://github.com/kafkesc/ksunami/raw/main/images/up-transition_ease-in.png) | ![](https://github.com/kafkesc/ksunami/raw/main/images/down-transition_ease-in.png) | 417 | | `ease-out` | ![](https://github.com/kafkesc/ksunami/raw/main/images/up-transition_ease-out.png) | ![](https://github.com/kafkesc/ksunami/raw/main/images/down-transition_ease-out.png) | 418 | | `ease-in-out` | ![](https://github.com/kafkesc/ksunami/raw/main/images/up-transition_ease-in-out.png) | ![](https://github.com/kafkesc/ksunami/raw/main/images/down-transition_ease-in-out.png) | 419 | | `spike-in` | ![](https://github.com/kafkesc/ksunami/raw/main/images/up-transition_spike-in.png) | ![](https://github.com/kafkesc/ksunami/raw/main/images/down-transition_spike-in.png) | 420 | | `spike-out` | ![](https://github.com/kafkesc/ksunami/raw/main/images/up-transition_spike-out.png) | ![](https://github.com/kafkesc/ksunami/raw/main/images/down-transition_spike-out.png) | 421 | | `spike-in-out` | ![](https://github.com/kafkesc/ksunami/raw/main/images/up-transition_spike-in-out.png) | ![](https://github.com/kafkesc/ksunami/raw/main/images/down-transition_spike-in-out.png) | 422 | 423 | Please note: in the pictures above `P0` and `P3` don't change, but all variations are generated by moving `P1` and `P2`. 424 | 425 | **Yes!** It's possible to define additional variations, by picking new `P1` and `P2` points, 426 | and adding those to a new value in the `Transition enum`. PRs welcome. 427 | 428 | ## Configuration in depth 429 | 430 | To begin, start by giving the [usage](#usage) section a look. If that is not enough, in this section we go more in 431 | depth into the most important configuration aspect of Ksunami. 432 | 433 | ### Configuring the Producer 434 | 435 | Additional to the obvious `-b, --brokers` for the bootstrap brokers, and `--client-id` for the client identifier, 436 | it's possible to fine tune the Provider via `--partitioner` and `-c, --config`. 437 | 438 | #### Partitioner 439 | 440 | Possible values for the `--partitioner` argument are: 441 | 442 | | Partitioner name | Description | 443 | |------------------------------:|:--------------------------------------------------------------------------------------------------------------------------------------| 444 | | `random` | Random distribution | 445 | | `consistent` | CRC32 hash of key (Empty and NULL keys are mapped to single partition) | 446 | | `consistent_random` (default) | CRC32 hash of key (Empty and NULL keys are randomly partitioned) | 447 | | `murmur2` | Java Producer compatible Murmur2 hash of key (NULL keys are mapped to single partition) | 448 | | `murmur2_random` | Java Producer compatible Murmur2 hash of key (NULL keys are randomly partitioned): equivalent to default partitioner in Java Producer | 449 | | `fnv1a` | FNV-1a hash of key (NULL keys are mapped to single partition) | 450 | | `fnv1a_random` | FNV-1a hash of key (NULL keys are randomly partitioned) | 451 | 452 | **NOTE:** Ksunami, being based on [librdkafka](https://github.com/edenhill/librdkafka), offers "only" the partitioners 453 | provided by said library. 454 | 455 | For example, to use a _purely random partitioner_: 456 | 457 | ```shell 458 | $ ksunami ... --partitioner random ... 459 | ``` 460 | 461 | #### Additional configuration 462 | 463 | As per `-c,--config`, all the values supported [librdkafka](https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md) 464 | are supported by Ksunami's producer. 465 | 466 | For example, to set a _200ms producer lingering_ and to _limit the number of producer send retries to 5_: 467 | 468 | ```shell 469 | $ ksunami ... -c linger.ms:200 ... --config message.send.max.retries:5 ... 470 | ``` 471 | 472 | ### Records: destination and content 473 | 474 | You can configure the content of each record produced by Ksunami: 475 | 476 | * `-t, --topic `: the destination topic to send the record to 477 | * `-k, --key ` (optional): the key of the record 478 | * `-p, --payload ` (optional): the payload of the record 479 | * `--partition ` (optional): the specific partition inside the destination topic 480 | * `--head ` (optional): one (or more) header(s) to decorate the record with 481 | 482 | While for `--topic`, `--partition` and `--head` the input is pretty self-explanatory, `--key` and `--payload` support 483 | a richer set of options. 484 | 485 | #### Supported `key` and `payload` types 486 | 487 | | Format | Description | 488 | |----------------:|:----------------------------------------------------------------------------------------| 489 | | `string:STR` | `STR` is a plain string | 490 | | `file:PATH` | `PATH` is a path to an existing file | 491 | | `alpha:LENGTH` | `LENGTH` is the length of a random alphanumeric string | 492 | | `bytes:LENGTH` | `LENGTH` is the length of a random bytes array | 493 | | `int:MIN-MAX` | `MIN` and `MAX` are limits of an inclusive range from which an integer number is picked | 494 | | `float:MIN-MAX` | `MIN` and `MAX` are limits of an inclusive range from which a float number is picked | 495 | 496 | This allows to have a degree of flexibility to the content that is placed inside records. 497 | 498 | For example, to produce records where the _key is an integer between 1 and 1000_ 499 | and the _payload is a random sequence of 100 bytes_: 500 | 501 | ```shell 502 | $ ksunami ... --key int:1-1000 --payload bytes:100 503 | ``` 504 | 505 | ### Records: amount and duration 506 | 507 | As seen above when we introduced the [4 phases](#the-4-phases), Ksunami sees a workload pattern as 508 | a set of durations, workload volume and transitions. 509 | 510 | #### Min and Max 511 | 512 | The `min` and `max` phases represent the range of workload that a user wants to describe, and once setup, Ksunami will 513 | cyclically go from `min` to `max` to `min` and so forth. The workload is expressed as _records/sec_, and the duration of 514 | each phase in seconds. 515 | 516 | **Why seconds?** Because it's small enough to describe any meaningful Kafka workload. Using a smaller unit would have 517 | yielded no real benefit. And using a larger unit, would lead to too coarse workload description. 518 | 519 | The arguments used to configure `min` and `max`: 520 | 521 | | Argument | Description | Default | 522 | |------------------:|:-------------------------------------------------------------|:-------:| 523 | | `--min ` | Minimum amount of records/sec | | 524 | | `--min-sec ` | How long to produce at minimum records/sec, before ramp-up | `60` | 525 | | `--max ` | Maximum amount of records/sec | | 526 | | `--max-sec ` | How long to produce at maximum records/sec, before ramp-down | `60` | 527 | 528 | #### (Ramping) Up and Down 529 | 530 | Again, as [seen above](#the-4-phases), between the `min` and `max` phases there are 2 transitional phases: `up` and `down`. 531 | 532 | They exist to describe the mutation of workload, as time progresses. 533 | Ksunami offers a collection of [transitions](#transitions), and they are provided as arguments: Ksunami takes care 534 | of taking the `[0..1]` curves shown above, and transpose them to the actual records/sec workload the user is after. 535 | 536 | The arguments used to configure `up` and `down`: 537 | 538 | | Argument | Description | Default | 539 | |---------------------------:|:---------------------------------------------------------|:--------:| 540 | | `--up ` | Ramp-up transition from minimum to maximum records/sec | `linear` | 541 | | `--up-sec ` | How long the ramp-up transition should last | `10` | 542 | | `--down ` | Ramp-down transition from maximum to minimum records/sec | `none` | 543 | | `--down-sec ` | How long the ramp-down transition should last | `10` | 544 | 545 | ### Log verbosity 546 | 547 | Ksunami follows the long tradition of `-v/-q` to control the verbosity of it's logging: 548 | 549 | | Arguments | Log verbosity level | Default | 550 | |----------:|:--------------------|:-------:| 551 | | `-qq...` | `OFF` | | 552 | | `-q` | `ERROR` | | 553 | | _none_ | `WARN` | x | 554 | | `-v` | `INFO` | | 555 | | `-vv` | `DEBUG` | | 556 | | `-vvv...` | `TRACE` | | 557 | 558 | It uses [log](https://crates.io/crates/log) and [env_logger](https://crates.io/crates/env_logger), 559 | and so logging can be configured and fine-tuned using the Environment Variable `KSUNAMI_LOG`. 560 | Please take a look at [env_logger doc](https://docs.rs/env_logger/latest/env_logger/#enabling-logging) for 561 | more details. 562 | 563 | ## License 564 | 565 | Licensed under either of 566 | 567 | * Apache License, Version 2.0 568 | ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) 569 | * MIT license 570 | ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) 571 | 572 | at your option. 573 | 574 | ## Contribution 575 | 576 | Unless you explicitly state otherwise, any contribution intentionally submitted 577 | for inclusion in the work by you, as defined in the Apache-2.0 license, shall be 578 | dual licensed as above, without any additional terms or conditions. 579 | 580 | ## Thanks 581 | 582 | * Thanks to [this page](https://www.desmos.com/calculator/d1ofwre0fr) on [Desmos Graphing Calculator](https://www.desmos.com/calculator), 583 | for providing an easy way to plot [Cubic Bézier curves](https://en.wikipedia.org/wiki/B%C3%A9zier_curve#Cubic_B%C3%A9zier_curves). 584 | * Thanks to [flo_curves](https://crates.io/crates/flo_curves) for providing an easy Rust crate to manipulate Bézier curves. 585 | * Thanks to [librdkafka](https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md) for being an awesome Kafka library, 586 | used by pretty much all Kafka clients out there, and thanks to the Rust binding [rdkafka](https://crates.io/crates/rdkafka). 587 | * Thanks to [clap](https://crates.io/crates/clap), for being the awesome-est CLI argument parser in existence. 588 | -------------------------------------------------------------------------------- /Cargo.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Cargo. 2 | # It is not intended for manual editing. 3 | version = 3 4 | 5 | [[package]] 6 | name = "addr2line" 7 | version = "0.21.0" 8 | source = "registry+https://github.com/rust-lang/crates.io-index" 9 | checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" 10 | dependencies = [ 11 | "gimli", 12 | ] 13 | 14 | [[package]] 15 | name = "adler" 16 | version = "1.0.2" 17 | source = "registry+https://github.com/rust-lang/crates.io-index" 18 | checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" 19 | 20 | [[package]] 21 | name = "aho-corasick" 22 | version = "1.1.3" 23 | source = "registry+https://github.com/rust-lang/crates.io-index" 24 | checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" 25 | dependencies = [ 26 | "memchr", 27 | ] 28 | 29 | [[package]] 30 | name = "anstream" 31 | version = "0.6.14" 32 | source = "registry+https://github.com/rust-lang/crates.io-index" 33 | checksum = "418c75fa768af9c03be99d17643f93f79bbba589895012a80e3452a19ddda15b" 34 | dependencies = [ 35 | "anstyle", 36 | "anstyle-parse", 37 | "anstyle-query", 38 | "anstyle-wincon", 39 | "colorchoice", 40 | "is_terminal_polyfill", 41 | "utf8parse", 42 | ] 43 | 44 | [[package]] 45 | name = "anstyle" 46 | version = "1.0.7" 47 | source = "registry+https://github.com/rust-lang/crates.io-index" 48 | checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" 49 | 50 | [[package]] 51 | name = "anstyle-parse" 52 | version = "0.2.4" 53 | source = "registry+https://github.com/rust-lang/crates.io-index" 54 | checksum = "c03a11a9034d92058ceb6ee011ce58af4a9bf61491aa7e1e59ecd24bd40d22d4" 55 | dependencies = [ 56 | "utf8parse", 57 | ] 58 | 59 | [[package]] 60 | name = "anstyle-query" 61 | version = "1.0.3" 62 | source = "registry+https://github.com/rust-lang/crates.io-index" 63 | checksum = "a64c907d4e79225ac72e2a354c9ce84d50ebb4586dee56c82b3ee73004f537f5" 64 | dependencies = [ 65 | "windows-sys 0.52.0", 66 | ] 67 | 68 | [[package]] 69 | name = "anstyle-wincon" 70 | version = "3.0.3" 71 | source = "registry+https://github.com/rust-lang/crates.io-index" 72 | checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" 73 | dependencies = [ 74 | "anstyle", 75 | "windows-sys 0.52.0", 76 | ] 77 | 78 | [[package]] 79 | name = "autocfg" 80 | version = "1.3.0" 81 | source = "registry+https://github.com/rust-lang/crates.io-index" 82 | checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" 83 | 84 | [[package]] 85 | name = "backtrace" 86 | version = "0.3.71" 87 | source = "registry+https://github.com/rust-lang/crates.io-index" 88 | checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d" 89 | dependencies = [ 90 | "addr2line", 91 | "cc", 92 | "cfg-if", 93 | "libc", 94 | "miniz_oxide", 95 | "object", 96 | "rustc-demangle", 97 | ] 98 | 99 | [[package]] 100 | name = "bitflags" 101 | version = "2.5.0" 102 | source = "registry+https://github.com/rust-lang/crates.io-index" 103 | checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" 104 | 105 | [[package]] 106 | name = "cc" 107 | version = "1.0.98" 108 | source = "registry+https://github.com/rust-lang/crates.io-index" 109 | checksum = "41c270e7540d725e65ac7f1b212ac8ce349719624d7bcff99f8e2e488e8cf03f" 110 | 111 | [[package]] 112 | name = "cfg-if" 113 | version = "1.0.0" 114 | source = "registry+https://github.com/rust-lang/crates.io-index" 115 | checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" 116 | 117 | [[package]] 118 | name = "cfg_aliases" 119 | version = "0.1.1" 120 | source = "registry+https://github.com/rust-lang/crates.io-index" 121 | checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" 122 | 123 | [[package]] 124 | name = "clap" 125 | version = "4.5.4" 126 | source = "registry+https://github.com/rust-lang/crates.io-index" 127 | checksum = "90bc066a67923782aa8515dbaea16946c5bcc5addbd668bb80af688e53e548a0" 128 | dependencies = [ 129 | "clap_builder", 130 | "clap_derive", 131 | ] 132 | 133 | [[package]] 134 | name = "clap_builder" 135 | version = "4.5.2" 136 | source = "registry+https://github.com/rust-lang/crates.io-index" 137 | checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4" 138 | dependencies = [ 139 | "anstream", 140 | "anstyle", 141 | "clap_lex", 142 | "strsim", 143 | "terminal_size", 144 | ] 145 | 146 | [[package]] 147 | name = "clap_derive" 148 | version = "4.5.4" 149 | source = "registry+https://github.com/rust-lang/crates.io-index" 150 | checksum = "528131438037fd55894f62d6e9f068b8f45ac57ffa77517819645d10aed04f64" 151 | dependencies = [ 152 | "heck", 153 | "proc-macro2", 154 | "quote", 155 | "syn 2.0.65", 156 | ] 157 | 158 | [[package]] 159 | name = "clap_lex" 160 | version = "0.7.0" 161 | source = "registry+https://github.com/rust-lang/crates.io-index" 162 | checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" 163 | 164 | [[package]] 165 | name = "colorchoice" 166 | version = "1.0.1" 167 | source = "registry+https://github.com/rust-lang/crates.io-index" 168 | checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" 169 | 170 | [[package]] 171 | name = "ctrlc" 172 | version = "3.4.4" 173 | source = "registry+https://github.com/rust-lang/crates.io-index" 174 | checksum = "672465ae37dc1bc6380a6547a8883d5dd397b0f1faaad4f265726cc7042a5345" 175 | dependencies = [ 176 | "nix", 177 | "windows-sys 0.52.0", 178 | ] 179 | 180 | [[package]] 181 | name = "duct" 182 | version = "0.13.7" 183 | source = "registry+https://github.com/rust-lang/crates.io-index" 184 | checksum = "e4ab5718d1224b63252cd0c6f74f6480f9ffeb117438a2e0f5cf6d9a4798929c" 185 | dependencies = [ 186 | "libc", 187 | "once_cell", 188 | "os_pipe", 189 | "shared_child", 190 | ] 191 | 192 | [[package]] 193 | name = "either" 194 | version = "1.12.0" 195 | source = "registry+https://github.com/rust-lang/crates.io-index" 196 | checksum = "3dca9240753cf90908d7e4aac30f630662b02aebaa1b58a3cadabdb23385b58b" 197 | 198 | [[package]] 199 | name = "env_filter" 200 | version = "0.1.0" 201 | source = "registry+https://github.com/rust-lang/crates.io-index" 202 | checksum = "a009aa4810eb158359dda09d0c87378e4bbb89b5a801f016885a4707ba24f7ea" 203 | dependencies = [ 204 | "log", 205 | "regex", 206 | ] 207 | 208 | [[package]] 209 | name = "env_logger" 210 | version = "0.11.3" 211 | source = "registry+https://github.com/rust-lang/crates.io-index" 212 | checksum = "38b35839ba51819680ba087cd351788c9a3c476841207e0b8cee0b04722343b9" 213 | dependencies = [ 214 | "anstream", 215 | "anstyle", 216 | "env_filter", 217 | "humantime", 218 | "log", 219 | ] 220 | 221 | [[package]] 222 | name = "equivalent" 223 | version = "1.0.1" 224 | source = "registry+https://github.com/rust-lang/crates.io-index" 225 | checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" 226 | 227 | [[package]] 228 | name = "errno" 229 | version = "0.3.9" 230 | source = "registry+https://github.com/rust-lang/crates.io-index" 231 | checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" 232 | dependencies = [ 233 | "libc", 234 | "windows-sys 0.52.0", 235 | ] 236 | 237 | [[package]] 238 | name = "flo_curves" 239 | version = "0.7.2" 240 | source = "registry+https://github.com/rust-lang/crates.io-index" 241 | checksum = "21c3da91984f219319fc08a775ffdbc4962d4a1a6b3181967f64b456ac84d2eb" 242 | dependencies = [ 243 | "itertools", 244 | "roots", 245 | "smallvec", 246 | ] 247 | 248 | [[package]] 249 | name = "futures" 250 | version = "0.3.30" 251 | source = "registry+https://github.com/rust-lang/crates.io-index" 252 | checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" 253 | dependencies = [ 254 | "futures-channel", 255 | "futures-core", 256 | "futures-executor", 257 | "futures-io", 258 | "futures-sink", 259 | "futures-task", 260 | "futures-util", 261 | ] 262 | 263 | [[package]] 264 | name = "futures-channel" 265 | version = "0.3.30" 266 | source = "registry+https://github.com/rust-lang/crates.io-index" 267 | checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" 268 | dependencies = [ 269 | "futures-core", 270 | "futures-sink", 271 | ] 272 | 273 | [[package]] 274 | name = "futures-core" 275 | version = "0.3.30" 276 | source = "registry+https://github.com/rust-lang/crates.io-index" 277 | checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" 278 | 279 | [[package]] 280 | name = "futures-executor" 281 | version = "0.3.30" 282 | source = "registry+https://github.com/rust-lang/crates.io-index" 283 | checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" 284 | dependencies = [ 285 | "futures-core", 286 | "futures-task", 287 | "futures-util", 288 | ] 289 | 290 | [[package]] 291 | name = "futures-io" 292 | version = "0.3.30" 293 | source = "registry+https://github.com/rust-lang/crates.io-index" 294 | checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" 295 | 296 | [[package]] 297 | name = "futures-macro" 298 | version = "0.3.30" 299 | source = "registry+https://github.com/rust-lang/crates.io-index" 300 | checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" 301 | dependencies = [ 302 | "proc-macro2", 303 | "quote", 304 | "syn 2.0.65", 305 | ] 306 | 307 | [[package]] 308 | name = "futures-sink" 309 | version = "0.3.30" 310 | source = "registry+https://github.com/rust-lang/crates.io-index" 311 | checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" 312 | 313 | [[package]] 314 | name = "futures-task" 315 | version = "0.3.30" 316 | source = "registry+https://github.com/rust-lang/crates.io-index" 317 | checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" 318 | 319 | [[package]] 320 | name = "futures-util" 321 | version = "0.3.30" 322 | source = "registry+https://github.com/rust-lang/crates.io-index" 323 | checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" 324 | dependencies = [ 325 | "futures-channel", 326 | "futures-core", 327 | "futures-io", 328 | "futures-macro", 329 | "futures-sink", 330 | "futures-task", 331 | "memchr", 332 | "pin-project-lite", 333 | "pin-utils", 334 | "slab", 335 | ] 336 | 337 | [[package]] 338 | name = "getrandom" 339 | version = "0.2.15" 340 | source = "registry+https://github.com/rust-lang/crates.io-index" 341 | checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" 342 | dependencies = [ 343 | "cfg-if", 344 | "libc", 345 | "wasi", 346 | ] 347 | 348 | [[package]] 349 | name = "gimli" 350 | version = "0.28.1" 351 | source = "registry+https://github.com/rust-lang/crates.io-index" 352 | checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" 353 | 354 | [[package]] 355 | name = "hashbrown" 356 | version = "0.14.5" 357 | source = "registry+https://github.com/rust-lang/crates.io-index" 358 | checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" 359 | 360 | [[package]] 361 | name = "heck" 362 | version = "0.5.0" 363 | source = "registry+https://github.com/rust-lang/crates.io-index" 364 | checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" 365 | 366 | [[package]] 367 | name = "hermit-abi" 368 | version = "0.3.9" 369 | source = "registry+https://github.com/rust-lang/crates.io-index" 370 | checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" 371 | 372 | [[package]] 373 | name = "humantime" 374 | version = "2.1.0" 375 | source = "registry+https://github.com/rust-lang/crates.io-index" 376 | checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" 377 | 378 | [[package]] 379 | name = "indexmap" 380 | version = "2.2.6" 381 | source = "registry+https://github.com/rust-lang/crates.io-index" 382 | checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" 383 | dependencies = [ 384 | "equivalent", 385 | "hashbrown", 386 | ] 387 | 388 | [[package]] 389 | name = "is_terminal_polyfill" 390 | version = "1.70.0" 391 | source = "registry+https://github.com/rust-lang/crates.io-index" 392 | checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" 393 | 394 | [[package]] 395 | name = "itertools" 396 | version = "0.10.5" 397 | source = "registry+https://github.com/rust-lang/crates.io-index" 398 | checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" 399 | dependencies = [ 400 | "either", 401 | ] 402 | 403 | [[package]] 404 | name = "itoa" 405 | version = "1.0.11" 406 | source = "registry+https://github.com/rust-lang/crates.io-index" 407 | checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" 408 | 409 | [[package]] 410 | name = "krb5-src" 411 | version = "0.3.3+1.19.2" 412 | source = "registry+https://github.com/rust-lang/crates.io-index" 413 | checksum = "2f5dae230b7334f85b6a968d8b0549c39e3b6ba11e973d62f4c81bb37cdf73af" 414 | dependencies = [ 415 | "duct", 416 | ] 417 | 418 | [[package]] 419 | name = "ksunami" 420 | version = "0.1.10" 421 | dependencies = [ 422 | "clap", 423 | "ctrlc", 424 | "env_logger", 425 | "flo_curves", 426 | "futures", 427 | "log", 428 | "rand", 429 | "rdkafka", 430 | "tokio", 431 | ] 432 | 433 | [[package]] 434 | name = "libc" 435 | version = "0.2.155" 436 | source = "registry+https://github.com/rust-lang/crates.io-index" 437 | checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" 438 | 439 | [[package]] 440 | name = "libz-sys" 441 | version = "1.1.16" 442 | source = "registry+https://github.com/rust-lang/crates.io-index" 443 | checksum = "5e143b5e666b2695d28f6bca6497720813f699c9602dd7f5cac91008b8ada7f9" 444 | dependencies = [ 445 | "cc", 446 | "libc", 447 | "pkg-config", 448 | "vcpkg", 449 | ] 450 | 451 | [[package]] 452 | name = "linux-raw-sys" 453 | version = "0.4.14" 454 | source = "registry+https://github.com/rust-lang/crates.io-index" 455 | checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" 456 | 457 | [[package]] 458 | name = "log" 459 | version = "0.4.21" 460 | source = "registry+https://github.com/rust-lang/crates.io-index" 461 | checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" 462 | 463 | [[package]] 464 | name = "memchr" 465 | version = "2.7.2" 466 | source = "registry+https://github.com/rust-lang/crates.io-index" 467 | checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" 468 | 469 | [[package]] 470 | name = "miniz_oxide" 471 | version = "0.7.3" 472 | source = "registry+https://github.com/rust-lang/crates.io-index" 473 | checksum = "87dfd01fe195c66b572b37921ad8803d010623c0aca821bea2302239d155cdae" 474 | dependencies = [ 475 | "adler", 476 | ] 477 | 478 | [[package]] 479 | name = "nix" 480 | version = "0.28.0" 481 | source = "registry+https://github.com/rust-lang/crates.io-index" 482 | checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4" 483 | dependencies = [ 484 | "bitflags", 485 | "cfg-if", 486 | "cfg_aliases", 487 | "libc", 488 | ] 489 | 490 | [[package]] 491 | name = "num_cpus" 492 | version = "1.16.0" 493 | source = "registry+https://github.com/rust-lang/crates.io-index" 494 | checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" 495 | dependencies = [ 496 | "hermit-abi", 497 | "libc", 498 | ] 499 | 500 | [[package]] 501 | name = "num_enum" 502 | version = "0.5.11" 503 | source = "registry+https://github.com/rust-lang/crates.io-index" 504 | checksum = "1f646caf906c20226733ed5b1374287eb97e3c2a5c227ce668c1f2ce20ae57c9" 505 | dependencies = [ 506 | "num_enum_derive", 507 | ] 508 | 509 | [[package]] 510 | name = "num_enum_derive" 511 | version = "0.5.11" 512 | source = "registry+https://github.com/rust-lang/crates.io-index" 513 | checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" 514 | dependencies = [ 515 | "proc-macro-crate", 516 | "proc-macro2", 517 | "quote", 518 | "syn 1.0.109", 519 | ] 520 | 521 | [[package]] 522 | name = "object" 523 | version = "0.32.2" 524 | source = "registry+https://github.com/rust-lang/crates.io-index" 525 | checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" 526 | dependencies = [ 527 | "memchr", 528 | ] 529 | 530 | [[package]] 531 | name = "once_cell" 532 | version = "1.19.0" 533 | source = "registry+https://github.com/rust-lang/crates.io-index" 534 | checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" 535 | 536 | [[package]] 537 | name = "openssl-src" 538 | version = "300.2.3+3.2.1" 539 | source = "registry+https://github.com/rust-lang/crates.io-index" 540 | checksum = "5cff92b6f71555b61bb9315f7c64da3ca43d87531622120fea0195fc761b4843" 541 | dependencies = [ 542 | "cc", 543 | ] 544 | 545 | [[package]] 546 | name = "openssl-sys" 547 | version = "0.9.102" 548 | source = "registry+https://github.com/rust-lang/crates.io-index" 549 | checksum = "c597637d56fbc83893a35eb0dd04b2b8e7a50c91e64e9493e398b5df4fb45fa2" 550 | dependencies = [ 551 | "cc", 552 | "libc", 553 | "openssl-src", 554 | "pkg-config", 555 | "vcpkg", 556 | ] 557 | 558 | [[package]] 559 | name = "os_pipe" 560 | version = "1.1.5" 561 | source = "registry+https://github.com/rust-lang/crates.io-index" 562 | checksum = "57119c3b893986491ec9aa85056780d3a0f3cf4da7cc09dd3650dbd6c6738fb9" 563 | dependencies = [ 564 | "libc", 565 | "windows-sys 0.52.0", 566 | ] 567 | 568 | [[package]] 569 | name = "pin-project-lite" 570 | version = "0.2.14" 571 | source = "registry+https://github.com/rust-lang/crates.io-index" 572 | checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" 573 | 574 | [[package]] 575 | name = "pin-utils" 576 | version = "0.1.0" 577 | source = "registry+https://github.com/rust-lang/crates.io-index" 578 | checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" 579 | 580 | [[package]] 581 | name = "pkg-config" 582 | version = "0.3.30" 583 | source = "registry+https://github.com/rust-lang/crates.io-index" 584 | checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" 585 | 586 | [[package]] 587 | name = "ppv-lite86" 588 | version = "0.2.17" 589 | source = "registry+https://github.com/rust-lang/crates.io-index" 590 | checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" 591 | 592 | [[package]] 593 | name = "proc-macro-crate" 594 | version = "1.3.1" 595 | source = "registry+https://github.com/rust-lang/crates.io-index" 596 | checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" 597 | dependencies = [ 598 | "once_cell", 599 | "toml_edit", 600 | ] 601 | 602 | [[package]] 603 | name = "proc-macro2" 604 | version = "1.0.83" 605 | source = "registry+https://github.com/rust-lang/crates.io-index" 606 | checksum = "0b33eb56c327dec362a9e55b3ad14f9d2f0904fb5a5b03b513ab5465399e9f43" 607 | dependencies = [ 608 | "unicode-ident", 609 | ] 610 | 611 | [[package]] 612 | name = "quote" 613 | version = "1.0.36" 614 | source = "registry+https://github.com/rust-lang/crates.io-index" 615 | checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" 616 | dependencies = [ 617 | "proc-macro2", 618 | ] 619 | 620 | [[package]] 621 | name = "rand" 622 | version = "0.8.5" 623 | source = "registry+https://github.com/rust-lang/crates.io-index" 624 | checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" 625 | dependencies = [ 626 | "libc", 627 | "rand_chacha", 628 | "rand_core", 629 | ] 630 | 631 | [[package]] 632 | name = "rand_chacha" 633 | version = "0.3.1" 634 | source = "registry+https://github.com/rust-lang/crates.io-index" 635 | checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" 636 | dependencies = [ 637 | "ppv-lite86", 638 | "rand_core", 639 | ] 640 | 641 | [[package]] 642 | name = "rand_core" 643 | version = "0.6.4" 644 | source = "registry+https://github.com/rust-lang/crates.io-index" 645 | checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" 646 | dependencies = [ 647 | "getrandom", 648 | ] 649 | 650 | [[package]] 651 | name = "rdkafka" 652 | version = "0.36.2" 653 | source = "registry+https://github.com/rust-lang/crates.io-index" 654 | checksum = "1beea247b9a7600a81d4cc33f659ce1a77e1988323d7d2809c7ed1c21f4c316d" 655 | dependencies = [ 656 | "futures-channel", 657 | "futures-util", 658 | "libc", 659 | "log", 660 | "rdkafka-sys", 661 | "serde", 662 | "serde_derive", 663 | "serde_json", 664 | "slab", 665 | "tokio", 666 | ] 667 | 668 | [[package]] 669 | name = "rdkafka-sys" 670 | version = "4.7.0+2.3.0" 671 | source = "registry+https://github.com/rust-lang/crates.io-index" 672 | checksum = "55e0d2f9ba6253f6ec72385e453294f8618e9e15c2c6aba2a5c01ccf9622d615" 673 | dependencies = [ 674 | "libc", 675 | "libz-sys", 676 | "num_enum", 677 | "openssl-sys", 678 | "pkg-config", 679 | "sasl2-sys", 680 | ] 681 | 682 | [[package]] 683 | name = "regex" 684 | version = "1.10.4" 685 | source = "registry+https://github.com/rust-lang/crates.io-index" 686 | checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c" 687 | dependencies = [ 688 | "aho-corasick", 689 | "memchr", 690 | "regex-automata", 691 | "regex-syntax", 692 | ] 693 | 694 | [[package]] 695 | name = "regex-automata" 696 | version = "0.4.6" 697 | source = "registry+https://github.com/rust-lang/crates.io-index" 698 | checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea" 699 | dependencies = [ 700 | "aho-corasick", 701 | "memchr", 702 | "regex-syntax", 703 | ] 704 | 705 | [[package]] 706 | name = "regex-syntax" 707 | version = "0.8.3" 708 | source = "registry+https://github.com/rust-lang/crates.io-index" 709 | checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" 710 | 711 | [[package]] 712 | name = "roots" 713 | version = "0.0.8" 714 | source = "registry+https://github.com/rust-lang/crates.io-index" 715 | checksum = "082f11ffa03bbef6c2c6ea6bea1acafaade2fd9050ae0234ab44a2153742b058" 716 | 717 | [[package]] 718 | name = "rustc-demangle" 719 | version = "0.1.24" 720 | source = "registry+https://github.com/rust-lang/crates.io-index" 721 | checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" 722 | 723 | [[package]] 724 | name = "rustix" 725 | version = "0.38.34" 726 | source = "registry+https://github.com/rust-lang/crates.io-index" 727 | checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" 728 | dependencies = [ 729 | "bitflags", 730 | "errno", 731 | "libc", 732 | "linux-raw-sys", 733 | "windows-sys 0.52.0", 734 | ] 735 | 736 | [[package]] 737 | name = "ryu" 738 | version = "1.0.18" 739 | source = "registry+https://github.com/rust-lang/crates.io-index" 740 | checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" 741 | 742 | [[package]] 743 | name = "sasl2-sys" 744 | version = "0.1.22+2.1.28" 745 | source = "registry+https://github.com/rust-lang/crates.io-index" 746 | checksum = "05f2a7f7efd9fc98b3a9033272df10709f5ee3fa0eabbd61a527a3a1ed6bd3c6" 747 | dependencies = [ 748 | "cc", 749 | "duct", 750 | "krb5-src", 751 | "libc", 752 | "pkg-config", 753 | ] 754 | 755 | [[package]] 756 | name = "serde" 757 | version = "1.0.202" 758 | source = "registry+https://github.com/rust-lang/crates.io-index" 759 | checksum = "226b61a0d411b2ba5ff6d7f73a476ac4f8bb900373459cd00fab8512828ba395" 760 | dependencies = [ 761 | "serde_derive", 762 | ] 763 | 764 | [[package]] 765 | name = "serde_derive" 766 | version = "1.0.202" 767 | source = "registry+https://github.com/rust-lang/crates.io-index" 768 | checksum = "6048858004bcff69094cd972ed40a32500f153bd3be9f716b2eed2e8217c4838" 769 | dependencies = [ 770 | "proc-macro2", 771 | "quote", 772 | "syn 2.0.65", 773 | ] 774 | 775 | [[package]] 776 | name = "serde_json" 777 | version = "1.0.117" 778 | source = "registry+https://github.com/rust-lang/crates.io-index" 779 | checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3" 780 | dependencies = [ 781 | "itoa", 782 | "ryu", 783 | "serde", 784 | ] 785 | 786 | [[package]] 787 | name = "shared_child" 788 | version = "1.0.0" 789 | source = "registry+https://github.com/rust-lang/crates.io-index" 790 | checksum = "b0d94659ad3c2137fef23ae75b03d5241d633f8acded53d672decfa0e6e0caef" 791 | dependencies = [ 792 | "libc", 793 | "winapi", 794 | ] 795 | 796 | [[package]] 797 | name = "slab" 798 | version = "0.4.9" 799 | source = "registry+https://github.com/rust-lang/crates.io-index" 800 | checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" 801 | dependencies = [ 802 | "autocfg", 803 | ] 804 | 805 | [[package]] 806 | name = "smallvec" 807 | version = "1.13.2" 808 | source = "registry+https://github.com/rust-lang/crates.io-index" 809 | checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" 810 | 811 | [[package]] 812 | name = "strsim" 813 | version = "0.11.1" 814 | source = "registry+https://github.com/rust-lang/crates.io-index" 815 | checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" 816 | 817 | [[package]] 818 | name = "syn" 819 | version = "1.0.109" 820 | source = "registry+https://github.com/rust-lang/crates.io-index" 821 | checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" 822 | dependencies = [ 823 | "proc-macro2", 824 | "quote", 825 | "unicode-ident", 826 | ] 827 | 828 | [[package]] 829 | name = "syn" 830 | version = "2.0.65" 831 | source = "registry+https://github.com/rust-lang/crates.io-index" 832 | checksum = "d2863d96a84c6439701d7a38f9de935ec562c8832cc55d1dde0f513b52fad106" 833 | dependencies = [ 834 | "proc-macro2", 835 | "quote", 836 | "unicode-ident", 837 | ] 838 | 839 | [[package]] 840 | name = "terminal_size" 841 | version = "0.3.0" 842 | source = "registry+https://github.com/rust-lang/crates.io-index" 843 | checksum = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7" 844 | dependencies = [ 845 | "rustix", 846 | "windows-sys 0.48.0", 847 | ] 848 | 849 | [[package]] 850 | name = "tokio" 851 | version = "1.37.0" 852 | source = "registry+https://github.com/rust-lang/crates.io-index" 853 | checksum = "1adbebffeca75fcfd058afa480fb6c0b81e165a0323f9c9d39c9697e37c46787" 854 | dependencies = [ 855 | "backtrace", 856 | "num_cpus", 857 | "pin-project-lite", 858 | "tokio-macros", 859 | ] 860 | 861 | [[package]] 862 | name = "tokio-macros" 863 | version = "2.2.0" 864 | source = "registry+https://github.com/rust-lang/crates.io-index" 865 | checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" 866 | dependencies = [ 867 | "proc-macro2", 868 | "quote", 869 | "syn 2.0.65", 870 | ] 871 | 872 | [[package]] 873 | name = "toml_datetime" 874 | version = "0.6.6" 875 | source = "registry+https://github.com/rust-lang/crates.io-index" 876 | checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf" 877 | 878 | [[package]] 879 | name = "toml_edit" 880 | version = "0.19.15" 881 | source = "registry+https://github.com/rust-lang/crates.io-index" 882 | checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" 883 | dependencies = [ 884 | "indexmap", 885 | "toml_datetime", 886 | "winnow", 887 | ] 888 | 889 | [[package]] 890 | name = "unicode-ident" 891 | version = "1.0.12" 892 | source = "registry+https://github.com/rust-lang/crates.io-index" 893 | checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" 894 | 895 | [[package]] 896 | name = "utf8parse" 897 | version = "0.2.1" 898 | source = "registry+https://github.com/rust-lang/crates.io-index" 899 | checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" 900 | 901 | [[package]] 902 | name = "vcpkg" 903 | version = "0.2.15" 904 | source = "registry+https://github.com/rust-lang/crates.io-index" 905 | checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" 906 | 907 | [[package]] 908 | name = "wasi" 909 | version = "0.11.0+wasi-snapshot-preview1" 910 | source = "registry+https://github.com/rust-lang/crates.io-index" 911 | checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" 912 | 913 | [[package]] 914 | name = "winapi" 915 | version = "0.3.9" 916 | source = "registry+https://github.com/rust-lang/crates.io-index" 917 | checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" 918 | dependencies = [ 919 | "winapi-i686-pc-windows-gnu", 920 | "winapi-x86_64-pc-windows-gnu", 921 | ] 922 | 923 | [[package]] 924 | name = "winapi-i686-pc-windows-gnu" 925 | version = "0.4.0" 926 | source = "registry+https://github.com/rust-lang/crates.io-index" 927 | checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" 928 | 929 | [[package]] 930 | name = "winapi-x86_64-pc-windows-gnu" 931 | version = "0.4.0" 932 | source = "registry+https://github.com/rust-lang/crates.io-index" 933 | checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" 934 | 935 | [[package]] 936 | name = "windows-sys" 937 | version = "0.48.0" 938 | source = "registry+https://github.com/rust-lang/crates.io-index" 939 | checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" 940 | dependencies = [ 941 | "windows-targets 0.48.5", 942 | ] 943 | 944 | [[package]] 945 | name = "windows-sys" 946 | version = "0.52.0" 947 | source = "registry+https://github.com/rust-lang/crates.io-index" 948 | checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" 949 | dependencies = [ 950 | "windows-targets 0.52.5", 951 | ] 952 | 953 | [[package]] 954 | name = "windows-targets" 955 | version = "0.48.5" 956 | source = "registry+https://github.com/rust-lang/crates.io-index" 957 | checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" 958 | dependencies = [ 959 | "windows_aarch64_gnullvm 0.48.5", 960 | "windows_aarch64_msvc 0.48.5", 961 | "windows_i686_gnu 0.48.5", 962 | "windows_i686_msvc 0.48.5", 963 | "windows_x86_64_gnu 0.48.5", 964 | "windows_x86_64_gnullvm 0.48.5", 965 | "windows_x86_64_msvc 0.48.5", 966 | ] 967 | 968 | [[package]] 969 | name = "windows-targets" 970 | version = "0.52.5" 971 | source = "registry+https://github.com/rust-lang/crates.io-index" 972 | checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" 973 | dependencies = [ 974 | "windows_aarch64_gnullvm 0.52.5", 975 | "windows_aarch64_msvc 0.52.5", 976 | "windows_i686_gnu 0.52.5", 977 | "windows_i686_gnullvm", 978 | "windows_i686_msvc 0.52.5", 979 | "windows_x86_64_gnu 0.52.5", 980 | "windows_x86_64_gnullvm 0.52.5", 981 | "windows_x86_64_msvc 0.52.5", 982 | ] 983 | 984 | [[package]] 985 | name = "windows_aarch64_gnullvm" 986 | version = "0.48.5" 987 | source = "registry+https://github.com/rust-lang/crates.io-index" 988 | checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" 989 | 990 | [[package]] 991 | name = "windows_aarch64_gnullvm" 992 | version = "0.52.5" 993 | source = "registry+https://github.com/rust-lang/crates.io-index" 994 | checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" 995 | 996 | [[package]] 997 | name = "windows_aarch64_msvc" 998 | version = "0.48.5" 999 | source = "registry+https://github.com/rust-lang/crates.io-index" 1000 | checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" 1001 | 1002 | [[package]] 1003 | name = "windows_aarch64_msvc" 1004 | version = "0.52.5" 1005 | source = "registry+https://github.com/rust-lang/crates.io-index" 1006 | checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" 1007 | 1008 | [[package]] 1009 | name = "windows_i686_gnu" 1010 | version = "0.48.5" 1011 | source = "registry+https://github.com/rust-lang/crates.io-index" 1012 | checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" 1013 | 1014 | [[package]] 1015 | name = "windows_i686_gnu" 1016 | version = "0.52.5" 1017 | source = "registry+https://github.com/rust-lang/crates.io-index" 1018 | checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" 1019 | 1020 | [[package]] 1021 | name = "windows_i686_gnullvm" 1022 | version = "0.52.5" 1023 | source = "registry+https://github.com/rust-lang/crates.io-index" 1024 | checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" 1025 | 1026 | [[package]] 1027 | name = "windows_i686_msvc" 1028 | version = "0.48.5" 1029 | source = "registry+https://github.com/rust-lang/crates.io-index" 1030 | checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" 1031 | 1032 | [[package]] 1033 | name = "windows_i686_msvc" 1034 | version = "0.52.5" 1035 | source = "registry+https://github.com/rust-lang/crates.io-index" 1036 | checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" 1037 | 1038 | [[package]] 1039 | name = "windows_x86_64_gnu" 1040 | version = "0.48.5" 1041 | source = "registry+https://github.com/rust-lang/crates.io-index" 1042 | checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" 1043 | 1044 | [[package]] 1045 | name = "windows_x86_64_gnu" 1046 | version = "0.52.5" 1047 | source = "registry+https://github.com/rust-lang/crates.io-index" 1048 | checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" 1049 | 1050 | [[package]] 1051 | name = "windows_x86_64_gnullvm" 1052 | version = "0.48.5" 1053 | source = "registry+https://github.com/rust-lang/crates.io-index" 1054 | checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" 1055 | 1056 | [[package]] 1057 | name = "windows_x86_64_gnullvm" 1058 | version = "0.52.5" 1059 | source = "registry+https://github.com/rust-lang/crates.io-index" 1060 | checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" 1061 | 1062 | [[package]] 1063 | name = "windows_x86_64_msvc" 1064 | version = "0.48.5" 1065 | source = "registry+https://github.com/rust-lang/crates.io-index" 1066 | checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" 1067 | 1068 | [[package]] 1069 | name = "windows_x86_64_msvc" 1070 | version = "0.52.5" 1071 | source = "registry+https://github.com/rust-lang/crates.io-index" 1072 | checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" 1073 | 1074 | [[package]] 1075 | name = "winnow" 1076 | version = "0.5.40" 1077 | source = "registry+https://github.com/rust-lang/crates.io-index" 1078 | checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" 1079 | dependencies = [ 1080 | "memchr", 1081 | ] 1082 | --------------------------------------------------------------------------------