├── certs └── .gitignore ├── src ├── postgres │ ├── mod.rs │ └── client.rs ├── generated │ ├── mod.rs │ └── messages.rs ├── kafka │ ├── mod.rs │ ├── producer.rs │ └── consumer.rs ├── data │ └── message.proto ├── errors.rs ├── metrics.rs ├── config.rs └── main.rs ├── .gitignore ├── .rusty-hook.toml ├── migrations ├── 20210418201855_create_composite_index.sql ├── 20210418201918_create_hypertable.sql ├── 20210418201824_create_extension_tsdb.sql └── 20210418201409_create_metrics_table.sql ├── .github ├── dependabot.yml └── workflows │ ├── linters.yml │ └── linux.yml ├── rustfmt.toml ├── examples └── simple.rs ├── config └── env.dev ├── docker-compose.yml ├── LICENSE ├── Cargo.toml ├── README.md └── Makefile /certs/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /src/postgres/mod.rs: -------------------------------------------------------------------------------- 1 | mod client; 2 | pub use client::DbClient; 3 | -------------------------------------------------------------------------------- /src/generated/mod.rs: -------------------------------------------------------------------------------- 1 | mod messages; 2 | pub use messages::{BatchMessage, Message}; 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .*.swp 2 | doc 3 | build 4 | target 5 | Cargo.lock 6 | scratch* 7 | __pycache__ 8 | -------------------------------------------------------------------------------- /.rusty-hook.toml: -------------------------------------------------------------------------------- 1 | [hooks] 2 | pre-commit = "cargo check && cargo build" 3 | 4 | [logging] 5 | verbose = true 6 | -------------------------------------------------------------------------------- /src/kafka/mod.rs: -------------------------------------------------------------------------------- 1 | mod consumer; 2 | mod producer; 3 | pub use consumer::KafkaConsumer; 4 | pub use producer::KafkaProducer; 5 | -------------------------------------------------------------------------------- /migrations/20210418201855_create_composite_index.sql: -------------------------------------------------------------------------------- 1 | -- Add migration script here 2 | CREATE INDEX ON metrics (name, timestamp DESC); -------------------------------------------------------------------------------- /migrations/20210418201918_create_hypertable.sql: -------------------------------------------------------------------------------- 1 | -- Add migration script here 2 | SELECT create_hypertable('metrics', 'timestamp'); -------------------------------------------------------------------------------- /migrations/20210418201824_create_extension_tsdb.sql: -------------------------------------------------------------------------------- 1 | -- Add migration script here 2 | CREATE EXTENSION IF NOT EXISTS timescaledb CASCADE; -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: cargo 4 | directory: "/" 5 | schedule: 6 | interval: daily 7 | time: "04:00" 8 | open-pull-requests-limit: 10 9 | -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | max_width = 100 2 | newline_style = "Unix" 3 | merge_derives = true 4 | force_explicit_abi = true 5 | edition = "2018" 6 | reorder_imports = true 7 | hard_tabs = true 8 | use_try_shorthand = true 9 | -------------------------------------------------------------------------------- /migrations/20210418201409_create_metrics_table.sql: -------------------------------------------------------------------------------- 1 | -- Add migration script here 2 | 3 | CREATE TABLE metrics ( 4 | timestamp TIMESTAMPTZ PRIMARY KEY, 5 | name TEXT NOT NULL, 6 | value DOUBLE PRECISION NOT NULL 7 | ); -------------------------------------------------------------------------------- /src/data/message.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package messages; 4 | 5 | message BatchMessage { 6 | repeated Message multiple_points = 3; 7 | } 8 | 9 | message Message { 10 | int64 timestamp = 1; 11 | string name = 2; 12 | float value = 3; 13 | } 14 | -------------------------------------------------------------------------------- /examples/simple.rs: -------------------------------------------------------------------------------- 1 | //! Basic example. 2 | // extern crate kafka_rust_rs; 3 | 4 | // use kafka_rust_rs; 5 | use std::{boxed::Box, error::Error, process}; 6 | 7 | fn example() -> Result<(), Box> { 8 | println!("Hello world"); 9 | Ok(()) 10 | } 11 | 12 | fn main() { 13 | if let Err(err) = example() { 14 | println!("error running example: {}", err); 15 | process::exit(1); 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /src/generated/messages.rs: -------------------------------------------------------------------------------- 1 | #[derive(Clone, PartialEq, ::prost::Message)] 2 | pub struct BatchMessage { 3 | #[prost(message, repeated, tag = "3")] 4 | pub multiple_points: ::prost::alloc::vec::Vec, 5 | } 6 | #[derive(Clone, PartialEq, ::prost::Message)] 7 | pub struct Message { 8 | #[prost(int64, tag = "1")] 9 | pub timestamp: i64, 10 | #[prost(string, tag = "2")] 11 | pub name: ::prost::alloc::string::String, 12 | #[prost(float, tag = "3")] 13 | pub value: f32, 14 | } 15 | -------------------------------------------------------------------------------- /config/env.dev: -------------------------------------------------------------------------------- 1 | APPLICATION_POSTGRES_DATABASE_URL="postgresql://postgres:password@localhost:5432/timeseries" 2 | # To ignore this setting, simply comment it out 3 | # Check config.rs for more details about optional vs mandatory params. 4 | #APPLICATION_POSTGRES_CERT_PATH="certs/postgres-ca.pem" 5 | 6 | APPLICATION_KAFKA_TOPIC="metrics" 7 | APPLICATION_KAFKA_BROKERS="localhost:9092" 8 | 9 | #APPLICATION_KAFKA_USERNAME="" 10 | #APPLICATION_KAFKA_PASSWORD="" 11 | #APPLICATION_KAFKA_CA_CERT_PATH="certs/kafka-ca.pem" 12 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.5" 2 | services: 3 | redpanda: 4 | image: vectorized/redpanda:latest 5 | container_name: redpanda-c 6 | ports: 7 | - "9092:9092" 8 | restart: always 9 | command: redpanda start --overprovisioned --smp 1 --memory 1G --reserve-memory 0M --node-id 0 --check=false 10 | 11 | postgres-db: 12 | image: timescale/timescaledb:latest-pg11 13 | container_name: postgres-db 14 | environment: 15 | POSTGRES_PASSWORD: password 16 | POSTGRES_DB: timeseries 17 | POSTGRES_USER: postgres 18 | ports: 19 | - "5432:5432" 20 | restart: always 21 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2016 haya14busa 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /.github/workflows/linters.yml: -------------------------------------------------------------------------------- 1 | name: Linters 2 | on: 3 | push: 4 | pull_request: 5 | schedule: 6 | - cron: "0 7 * * *" 7 | 8 | jobs: 9 | clippy: 10 | name: clippy 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Install sasl 14 | run: sudo apt update -y && sudo apt-get install -y libsasl2-dev 15 | 16 | - uses: actions/checkout@v2 17 | - uses: actions-rs/toolchain@v1 18 | with: 19 | toolchain: stable 20 | components: clippy 21 | override: true 22 | - uses: actions-rs/clippy-check@v1 23 | with: 24 | token: ${{ secrets.GITHUB_TOKEN }} 25 | args: --all-features 26 | 27 | fmt: 28 | name: Rustfmt 29 | runs-on: ubuntu-latest 30 | strategy: 31 | matrix: 32 | rust: 33 | - stable 34 | steps: 35 | - name: Install sasl 36 | run: sudo apt update -y && sudo apt-get install -y libsasl2-dev 37 | 38 | - uses: actions/checkout@v2 39 | - uses: actions-rs/toolchain@v1 40 | with: 41 | components: rustfmt 42 | toolchain: ${{ matrix.rust }} 43 | override: true 44 | - uses: actions-rs/cargo@v1 45 | with: 46 | toolchain: stable 47 | command: fmt 48 | args: --all -- --check 49 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | authors = ["Ankur Srivastava "] 3 | categories = ["category keywords"] 4 | description = "Write some description for your project here." 5 | documentation = "https://docs.rs/kafka-rust-example" 6 | edition = "2018" 7 | homepage = "https://github.com/ansrivas/kafka-rust-example" 8 | keywords = ["some-awesome-keywords"] 9 | license = "mit" 10 | name = "kafka_rust_example" 11 | readme = "README.md" 12 | repository = "https://github.com/ansrivas/kafka-rust-example" 13 | version = "0.1.0" 14 | [[bin]] 15 | name = "kafka-rust-example" 16 | path = "src/main.rs" 17 | 18 | [build-dependencies] 19 | prost-build = "0.10.3" 20 | 21 | [dependencies] 22 | anyhow = "1.0.44" 23 | deadpool-postgres = "0.10.0" 24 | dotenv = "0.15.0" 25 | env_logger = "0.9.0" 26 | envy = "0.4.2" 27 | log = "0.4.14" 28 | serde = "1.0.130" 29 | serde_json = "1.0.67" 30 | structopt = "0.3.23" 31 | sysinfo = "0.27.2" 32 | thiserror = "1.0.29" 33 | postgres-native-tls = "0.5.0" 34 | native-tls = "0.2.8" 35 | # openssl = { version = "0.10", features = ["vendored"] } 36 | uuid = { version = "1.0.0", features = ["v4"] } 37 | prost = "0.10.3" 38 | futures = "0.3.17" 39 | # git = "https://github.com/danburkert/prost" 40 | # rev = "423f5ec5bd165a7007a388edfb2b485d5bbf40c7" 41 | 42 | 43 | [dependencies.chrono] 44 | features = ["serde"] 45 | version = "0.4.19" 46 | 47 | [dependencies.rdkafka] 48 | features = ["cmake-build", "gssapi", "ssl", "tokio"] 49 | version = "0.28.0" 50 | # git = "https://github.com/fede1024/rust-rdkafka" 51 | # rev = "8da55e2c58752d75babb800edc0162b519dd84e2" 52 | 53 | [dependencies.tokio] 54 | features = ["rt-multi-thread", "macros", "io-util"] 55 | version = "1.11.0" 56 | 57 | [dependencies.tokio-postgres] 58 | features = ["with-chrono-0_4"] 59 | version = "0.7.2" 60 | 61 | 62 | [dev-dependencies] 63 | rusty-hook = "^0.11.2" 64 | -------------------------------------------------------------------------------- /.github/workflows/linux.yml: -------------------------------------------------------------------------------- 1 | name: Linux 2 | 3 | on: 4 | push: 5 | pull_request: 6 | schedule: 7 | - cron: "0 7 * * *" 8 | 9 | jobs: 10 | build_and_test: 11 | strategy: 12 | fail-fast: false 13 | matrix: 14 | version: 15 | - stable 16 | - nightly 17 | 18 | name: ${{ matrix.version }} - x86_64-unknown-linux-gnu 19 | runs-on: ubuntu-latest 20 | 21 | steps: 22 | - uses: actions/checkout@master 23 | 24 | - name: Install sasl 25 | run: sudo apt update -y && sudo apt-get install -y libsasl2-dev 26 | 27 | - name: Install ${{ matrix.version }} 28 | uses: actions-rs/toolchain@v1 29 | with: 30 | toolchain: ${{ matrix.version }}-x86_64-unknown-linux-gnu 31 | profile: minimal 32 | override: true 33 | 34 | - name: Generate Cargo.lock 35 | uses: actions-rs/cargo@v1 36 | with: 37 | command: generate-lockfile 38 | 39 | - name: Cache cargo registry 40 | uses: actions/cache@v1 41 | with: 42 | path: ~/.cargo/registry 43 | key: ${{ matrix.version }}-x86_64-unknown-linux-gnu-cargo-registry-trimmed-${{ hashFiles('**/Cargo.lock') }} 44 | 45 | - name: Cache cargo index 46 | uses: actions/cache@v1 47 | with: 48 | path: ~/.cargo/git 49 | key: ${{ matrix.version }}-x86_64-unknown-linux-gnu-cargo-index-trimmed-${{ hashFiles('**/Cargo.lock') }} 50 | 51 | - name: Cache cargo build 52 | uses: actions/cache@v1 53 | with: 54 | path: target 55 | key: ${{ matrix.version }}-x86_64-unknown-linux-gnu-cargo-build-trimmed-${{ hashFiles('**/Cargo.lock') }} 56 | 57 | - name: Run build 58 | uses: actions-rs/cargo@v1 59 | timeout-minutes: 40 60 | with: 61 | command: build 62 | 63 | 64 | -------------------------------------------------------------------------------- /src/errors.rs: -------------------------------------------------------------------------------- 1 | // MIT License 2 | // 3 | // Copyright (c) 2019 Ankur Srivastava 4 | // 5 | // Permission is hereby granted, free of charge, to any person obtaining a copy 6 | // of this software and associated documentation files (the "Software"), to deal 7 | // in the Software without restriction, including without limitation the rights 8 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | // copies of the Software, and to permit persons to whom the Software is 10 | // furnished to do so, subject to the following conditions: 11 | // 12 | // The above copyright notice and this permission notice shall be included in 13 | // all copies or substantial portions of the Software. 14 | // 15 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | // SOFTWARE. 22 | 23 | use deadpool_postgres::BuildError; 24 | use deadpool_postgres::PoolError; 25 | use std::io; 26 | use thiserror::Error; 27 | 28 | /// Default AppError which provides translation between one error type to 29 | /// AppError 30 | #[derive(Error, Debug)] 31 | pub enum AppError { 32 | #[error("Failed to get a db-connection from database pool")] 33 | PoolConn(#[from] PoolError), 34 | 35 | #[error("Failed to build a db-connection pool")] 36 | PoolBuild(#[from] BuildError), 37 | 38 | #[error("Failed to get a db-connection from internal tokio postgres")] 39 | TokioConn(#[from] tokio_postgres::Error), 40 | 41 | #[error(transparent)] 42 | Io(#[from] io::Error), 43 | 44 | #[error(transparent)] 45 | Tls(#[from] native_tls::Error), 46 | } 47 | -------------------------------------------------------------------------------- /src/kafka/producer.rs: -------------------------------------------------------------------------------- 1 | // MIT License 2 | // 3 | // Copyright (c) 2019 Ankur Srivastava 4 | // 5 | // Permission is hereby granted, free of charge, to any person obtaining a copy 6 | // of this software and associated documentation files (the "Software"), to deal 7 | // in the Software without restriction, including without limitation the rights 8 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | // copies of the Software, and to permit persons to whom the Software is 10 | // furnished to do so, subject to the following conditions: 11 | // 12 | // The above copyright notice and this permission notice shall be included in 13 | // all copies or substantial portions of the Software. 14 | // 15 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | // SOFTWARE. 22 | 23 | use log::{debug, error}; 24 | use prost::bytes::BytesMut; 25 | use rdkafka::{ 26 | config::ClientConfig, 27 | producer::{FutureProducer, FutureRecord}, 28 | }; 29 | use std::time::Duration; 30 | 31 | pub struct KafkaProducer { 32 | producer: FutureProducer, 33 | } 34 | 35 | impl KafkaProducer { 36 | /// Create a new KafkaProducer instance with a provided FutureProducer 37 | /// 38 | /// # Examples 39 | /// Basic usage: 40 | /// 41 | /// ```rust norun 42 | /// let kproducer = KafkaProducer::new("localhost:9092"); 43 | /// ``` 44 | pub fn new(kafka_brokers: &str) -> KafkaProducer { 45 | // Create the `FutureProducer` to produce asynchronously. 46 | let kafka_producer: FutureProducer = ClientConfig::new() 47 | .set("bootstrap.servers", kafka_brokers) 48 | .set("message.timeout.ms", "10000") 49 | .create() 50 | .expect("Producer creation error"); 51 | KafkaProducer { 52 | producer: kafka_producer, 53 | } 54 | } 55 | 56 | /// Create a new KafkaProducer instance with a provided FutureProducer 57 | /// 58 | /// # Examples 59 | /// Basic usage: 60 | /// 61 | /// ```rust norun 62 | /// let producer = ClientConfig::new() 63 | /// .set("bootstrap.servers", &conf.kafka_brokers) 64 | /// .set("message.timeout.ms", "10000") 65 | /// .create() 66 | /// .expect("Producer creation error"); 67 | /// let kproducer = KafkaProducer::new_with_producer(producer); 68 | /// ``` 69 | pub fn new_with_producer(kafka_producer: FutureProducer) -> KafkaProducer { 70 | KafkaProducer { 71 | producer: kafka_producer, 72 | } 73 | } 74 | 75 | /// Publish a BytesMut record to a given topic on Kafka. 76 | pub async fn produce(&self, data: BytesMut, topic: &str) { 77 | let record = FutureRecord::to(topic).key("some key").payload(&data[..]); 78 | // let produce_future: DeliveryFuture = self.producer.send(record, 0); 79 | let produce_future = self.producer.send(record, Duration::from_millis(100)).await; 80 | match produce_future { 81 | Ok(message) => debug!("Status: {:?}", message), 82 | Err(_) => error!("Future cancelled"), 83 | }; 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # kafka-rust-example 2 | 3 | This is an application which 4 | 5 | - reads bunch of metrics from your machine/docker-container 6 | - publishes it to Kafka topic `metrics` 7 | - subscribes to Kafka topic `metrics` 8 | - reads the information from that topic and writes to postgres `defaultdb.metrics` on remote server 9 | 10 | ### Why this application? 11 | 12 | Simply to make myself aware of the followings in rust-ecosystem: 13 | 14 | - Protobuf in rust 15 | - Kafka in rust 16 | - Postgres in rust 17 | 18 | ### Design: 19 | 20 | - The application is divided into two sub-parts: `metrics-publisher` and `metrics-subscriber`. Each of these two subparts is exposed as a command line entry point. 21 | 22 | - `metrics-publisher`: 23 | 24 | - Launches a async-task to collect metrices to publish data on an tokio::sync::mpsc channel. 25 | - Another async-task listens to this channel and publishes this data to Kafka topic `metrics` 26 | - The messages are protobuf encoded and are sent out in batches. More details in `data/message.proto` 27 | - To edit the protobuf-message format, edit the `data/message.proto` file and re-generate the definitions using: 28 | 29 | ``` 30 | make generate-proto 31 | ``` 32 | 33 | - `metrics-subscriber`: 34 | - Launches a async-task to listen to a Kafka topic `metrics`. 35 | - Each incoming protobuf-message is deserialized and published on the internal tokio::sync::mpsc channel 36 | - On receiving messages the database async-task writes this to the database. 37 | 38 | ### For database migrations 39 | ``` 40 | cargo install sqlx-cli 41 | make migrations 42 | ``` 43 | ### Configuration: 44 | 45 | There are two methods for the execution - dockerized and local installation. You need to follow until step 5 for both the methods. 46 | The default configuration is in `config/env.dev`. 47 | 48 | ### Local Development Mode: 49 | 50 | - For local development please run `docker-compose up -d` 51 | - This will launch a bunch of docker containers locally, check it by running `docker ps` 52 | 53 | ``` 54 | $ docker ps 55 | docker ps 56 | CONTAINER ID IMAGE COMMAND PORTS NAMES 57 | 00886360bf34 timescale/timescaledb:latest-pg11 "docker-entrypoint.s…" 0.0.0.0:5432->5432/tcp timescale-db 58 | 0c78b8ad9c6c postgres:11-alpine "docker-entrypoint.s…" 0.0.0.0:54321->5432/tcp pg-docker 59 | 6022042c83fe confluentinc/cp-kafka:5.3.1 "/etc/confluent/dock…" 0.0.0.0:9092->9092/tcp kafka 60 | 61 | ``` 62 | 63 | - Run the migration using ( ensure you have `cargo install sqlx-cli`) 64 | ``` 65 | make migrations 66 | ``` 67 | - After building the project (`cargo build`), you will find a binary inside `target/debug/kafka-rust-example` 68 | - Run the publisher as: 69 | ``` 70 | RUST_LOG=info APPLICATION_CONFIG_PATH=./config/env.dev ./target/debug/kafka-rust-example metrics-publisher 71 | ``` 72 | - Run the subscriber as: 73 | ``` 74 | RUST_LOG=debug APPLICATION_CONFIG_PATH=./config/env.dev ./target/debug/kafka-rust-example metrics-subscriber 75 | ``` 76 | - Check rows in DB: 77 | ``` 78 | RUST_LOG=info APPLICATION_CONFIG_PATH=./config/env.dev ./target/debug/kafka-rust-example check-db-data 79 | ``` 80 | 81 | ## License 82 | 83 | MIT 84 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | 2 | IS_TARPAULIN:=$(shell cargo list | grep tarpaulin) 3 | IS_CI:=$(CI) 4 | IS_SCCACHE:="" 5 | export DATABASE_URL = postgresql://postgres:password@localhost:5432/timeseries 6 | 7 | .DEFAULT_GOAL := help 8 | help: ## Show available options with this Makefile 9 | @grep -F -h "##" $(MAKEFILE_LIST) | grep -v grep | awk 'BEGIN { FS = ":.*?##" }; { printf "%-18s %s\n", $$1,$$2 }' 10 | 11 | .PHONY: install_sccache 12 | install_sccache: ## Check if sccache is installed, else install it. 13 | ifdef IS_CI 14 | @IS_SCCACHE=$(shell ls $(CI_PROJECT_DIR)/.cargo/bin/ | grep sccache) 15 | else 16 | @IS_SCCACHE=$(shell sh -c 'type -P sccache') 17 | endif 18 | 19 | ifndef IS_SCCACHE 20 | @echo "*********** Installing sccache since its not found in the path ***********" 21 | @cargo install sccache 22 | else 23 | @echo "*********** Found sccache in the path. Expect faster builds/tests ***********" 24 | endif 25 | 26 | .PHONY: install_tarpaulin 27 | install_tarpaulin: ## Check if tarpaulin is installed, else install it. 28 | ifndef IS_TARPAULIN 29 | @echo "*************** Installing tarpaulin as its not found in the path ***********" 30 | @RUSTFLAGS="--cfg procmacro2_semver_exempt" cargo +nightly install cargo-tarpaulin ; 31 | endif 32 | 33 | .PHONY : test-cover 34 | test-cover: install_tarpaulin install_sccache ## Run all the tests with coverage 35 | ifdef IS_CI 36 | @echo "*********** Running coverage command on CI ***********" 37 | @RUSTC_WRAPPER=$(CI_PROJECT_DIR)/.cargo/bin/sccache cargo +nightly tarpaulin --root $(CI_PROJECT_DIR) --exclude-files *.cargo/** --verbose 38 | else 39 | @echo "*********** This is Local server. Running coverage now ***********" 40 | @RUSTC_WRAPPER=$(HOME)/.cargo/bin/sccache cargo +nightly tarpaulin --verbose 41 | endif 42 | 43 | .PHONY : test 44 | test: install_sccache ## Run all the tests 45 | ifdef IS_CI 46 | @echo "*********** This is CI server. Running tests now ***********" 47 | @RUSTC_WRAPPER=$(CI_PROJECT_DIR)/.cargo/bin/sccache cargo test -- --test-threads 1 --nocapture 48 | else 49 | @echo "*********** This is Local Machine. Running tests now ***********" 50 | @RUSTC_WRAPPER=$(HOME)/.cargo/bin/sccache cargo test -- --test-threads 1 --nocapture 51 | endif 52 | 53 | clean: ## Clean the application 54 | @cargo clean 55 | 56 | .PHONY: run_debug 57 | run_debug: ## Run a quick debug build 58 | cargo build 59 | RUST_LOG=debug APPLICATION_CONFIG_PATH=./config/env.dev ./target/debug/kafka-rust 60 | 61 | .PHONY: cross 62 | cross: ## Install cargo cross for cross comilation 63 | cargo install cross 64 | 65 | .PHONY : build_release 66 | build_release: test install_sccache cross ## Create a release build 67 | cross build --release --target=x86_64-unknown-linux-musl 68 | #RUSTC_WRAPPER=$(HOME)/.cargo/bin/sccache RUSTFLAGS='-C link-args=-s' cargo build --release --target=x86_64-unknown-linux-musl 69 | 70 | .PHONY: migrations 71 | migrations: ## Run migrations 72 | sqlx database reset -y 73 | # sqlx database drop 74 | # sqlx database create 75 | # sqlx migrate run 76 | 77 | .PHONY : lint 78 | lint: ## Run tests, fmt and clippy on this 79 | touch src/main.rs && cargo clippy --all && cargo fmt --all 80 | 81 | .PHONY : docs 82 | docs: ## Generate the docs for this project. Docs are located in target/doc/test_rs 83 | @cargo doc --bin kafka-rust --no-deps 84 | 85 | .PHONY : docs-open 86 | docs-open: docs ## Generate docs and open with xdg-open 87 | @xdg-open target/doc/kafka_rust/index.html 88 | -------------------------------------------------------------------------------- /src/metrics.rs: -------------------------------------------------------------------------------- 1 | // MIT License 2 | // 3 | // Copyright (c) 2019 Ankur Srivastava 4 | // 5 | // Permission is hereby granted, free of charge, to any person obtaining a copy 6 | // of this software and associated documentation files (the "Software"), to deal 7 | // in the Software without restriction, including without limitation the rights 8 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | // copies of the Software, and to permit persons to whom the Software is 10 | // furnished to do so, subject to the following conditions: 11 | // 12 | // The above copyright notice and this permission notice shall be included in 13 | // all copies or substantial portions of the Software. 14 | // 15 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | // SOFTWARE. 22 | 23 | use crate::generated::Message; 24 | use chrono::prelude::*; 25 | use sysinfo::{DiskExt, System, SystemExt}; 26 | 27 | #[derive(Default)] 28 | pub struct MetricsGenerator { 29 | pub client: System, 30 | } 31 | 32 | impl MetricsGenerator { 33 | /// Create a new instance of MetricsGenerator. 34 | /// 35 | /// # Examples 36 | /// Basic usage: 37 | /// 38 | /// ```rust norun 39 | /// let mg = MetricsGenerator::new(); 40 | /// ``` 41 | pub fn new() -> Self { 42 | MetricsGenerator { 43 | client: System::new(), 44 | } 45 | } 46 | 47 | /// Create a metrics message out of given entries. 48 | /// In case timestamp is not provided Utc::now() is set as timestamp entry. 49 | pub(crate) fn create_metrics(name: String, value: f32, timestamp: Option) -> Message { 50 | Message { 51 | timestamp: timestamp.unwrap_or_else(|| Utc::now().timestamp_millis()), 52 | name, 53 | value, 54 | } 55 | } 56 | 57 | /// Generate disk stats from running operating system. 58 | /// 59 | /// # Examples 60 | /// Basic usage: 61 | /// 62 | /// ```rust norun 63 | /// let mg = MetricsGenerator::new(); 64 | /// let metrics = mg.disk_stats(); 65 | /// ``` 66 | pub fn disk_stats(&self) -> Vec { 67 | let mut messages = vec![]; 68 | for (idx, disk) in self.client.disks().iter().enumerate() { 69 | let _metrics_name = format!("disk-available-space-{idx}", idx = idx); 70 | let metrics = Self::create_metrics( 71 | disk.name().to_os_string().into_string().unwrap(), 72 | disk.available_space() as f32, 73 | None, 74 | ); 75 | messages.push(metrics); 76 | } 77 | messages 78 | } 79 | 80 | /// Generate used memory from running operating system. 81 | /// 82 | /// # Examples 83 | /// Basic usage: 84 | /// 85 | /// ```rust norun 86 | /// let mg = MetricsGenerator::new(); 87 | /// let metrics = mg.used_memory(); 88 | /// ``` 89 | pub fn used_memory(&self) -> Vec { 90 | let mut messages = vec![]; 91 | let message = Self::create_metrics( 92 | "used-memory".to_string(), 93 | self.client.used_memory() as f32, 94 | None, 95 | ); 96 | messages.push(message); 97 | messages 98 | } 99 | } 100 | 101 | #[cfg(test)] 102 | mod tests { 103 | use super::*; 104 | 105 | #[test] 106 | fn test_used_memory() { 107 | let mg = MetricsGenerator::new(); 108 | let messages = mg.used_memory(); 109 | assert!( 110 | messages.len() > 0, 111 | "Should be able to collect the used memory" 112 | ); 113 | 114 | let message = messages[0].clone(); 115 | assert!(message.name == "used-memory".to_string()); 116 | assert!( 117 | message.value >= 0.0f32, 118 | "Actual value was {:?}", 119 | message.value 120 | ); 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /src/kafka/consumer.rs: -------------------------------------------------------------------------------- 1 | // MIT License 2 | // 3 | // Copyright (c) 2019 Ankur Srivastava 4 | // 5 | // Permission is hereby granted, free of charge, to any person obtaining a copy 6 | // of this software and associated documentation files (the "Software"), to deal 7 | // in the Software without restriction, including without limitation the rights 8 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | // copies of the Software, and to permit persons to whom the Software is 10 | // furnished to do so, subject to the following conditions: 11 | // 12 | // The above copyright notice and this permission notice shall be included in 13 | // all copies or substantial portions of the Software. 14 | // 15 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | // SOFTWARE. 22 | 23 | use futures::StreamExt; 24 | use log::{debug, error, warn}; 25 | 26 | use prost::bytes::BytesMut; 27 | 28 | use rdkafka::{ 29 | config::{ClientConfig, RDKafkaLogLevel}, 30 | consumer::{stream_consumer::StreamConsumer, CommitMode, Consumer}, 31 | message::Message, 32 | }; 33 | use tokio::{self, sync::mpsc}; 34 | 35 | pub struct KafkaConsumer { 36 | kafka_consumer: StreamConsumer, 37 | } 38 | 39 | impl KafkaConsumer { 40 | /// Create a new KafkaConsumer. 41 | /// 42 | /// # Examples 43 | /// Basic usage: 44 | /// 45 | /// ```rust norun 46 | /// let consumer = KafkaConsumer::new("localhost:9092", "my-unique-group", &["topic1"]); 47 | /// ``` 48 | pub fn new(kafka_brokers: &str, group_id: &str, topics: &[&str]) -> KafkaConsumer { 49 | // Create the `Futureconsumer` to produce asynchronously. 50 | let consumer: StreamConsumer = ClientConfig::new() 51 | .set("group.id", group_id) 52 | .set("bootstrap.servers", kafka_brokers) 53 | .set("enable.partition.eof", "false") 54 | .set("session.timeout.ms", "6000") 55 | .set("enable.auto.commit", "true") 56 | .set_log_level(RDKafkaLogLevel::Debug) 57 | .create() 58 | .expect("Consumer creation failed"); 59 | 60 | consumer 61 | .subscribe(topics) 62 | .expect("Failed to subscribe to specified topics"); 63 | 64 | KafkaConsumer { 65 | kafka_consumer: consumer, 66 | } 67 | } 68 | 69 | pub fn new_with_consumer(consumer: StreamConsumer, topics: &[&str]) -> KafkaConsumer { 70 | consumer 71 | .subscribe(topics) 72 | .expect("Failed to subscribe to specified topics"); 73 | 74 | KafkaConsumer { 75 | kafka_consumer: consumer, 76 | } 77 | } 78 | 79 | /// Consume the incoming topic and publishes the raw-payload to an internal 80 | /// mpsc channel to be consumed by another async-task which then writes the 81 | /// data to postgres. 82 | pub async fn consume(&self, sender_tx: mpsc::Sender) { 83 | debug!("initiating data consumption from kafka-topic"); 84 | 85 | let mut message_stream = self.kafka_consumer.stream(); 86 | while let Some(message) = message_stream.next().await { 87 | match message { 88 | Err(e) => warn!("Kafka error: {}", e), 89 | Ok(m) => { 90 | if let Some(raw_data) = m.payload() { 91 | debug!( 92 | "Received message on Kafka {:?} on offset {:?}", 93 | &raw_data, 94 | m.offset() 95 | ); 96 | let payload = BytesMut::from(raw_data); 97 | if let Err(e) = &sender_tx.send(payload).await { 98 | error!("receiver dropped: {:?}", e); 99 | } 100 | } else { 101 | warn!("Failed to read raw data from kafka topic") 102 | } 103 | 104 | if let Err(e) = self.kafka_consumer.commit_message(&m, CommitMode::Async) { 105 | error!("Failed to commit offset to kafka: {:?}", e); 106 | } 107 | } 108 | }; 109 | } 110 | debug!("Returned from consumer"); 111 | } 112 | } 113 | -------------------------------------------------------------------------------- /src/config.rs: -------------------------------------------------------------------------------- 1 | // MIT License 2 | // 3 | // Copyright (c) 2019 Ankur Srivastava 4 | // 5 | // Permission is hereby granted, free of charge, to any person obtaining a copy 6 | // of this software and associated documentation files (the "Software"), to deal 7 | // in the Software without restriction, including without limitation the rights 8 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | // copies of the Software, and to permit persons to whom the Software is 10 | // furnished to do so, subject to the following conditions: 11 | // 12 | // The above copyright notice and this permission notice shall be included in 13 | // all copies or substantial portions of the Software. 14 | // 15 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | // SOFTWARE. 22 | 23 | use std::env; 24 | 25 | use log::info; 26 | use serde::Deserialize; 27 | const DEFAULT_CONFIG_ENV_KEY: &str = "APPLICATION_CONFIG_PATH"; 28 | const CONFIG_PREFIX: &str = "APPLICATION_"; 29 | 30 | struct ConfigFn {} 31 | 32 | #[allow(dead_code)] 33 | impl ConfigFn { 34 | fn fn_false() -> bool { 35 | false 36 | } 37 | fn fn_true() -> bool { 38 | true 39 | } 40 | fn fn_default_host() -> String { 41 | "0.0.0.0".into() 42 | } 43 | fn fn_default_port() -> String { 44 | "8080".into() 45 | } 46 | } 47 | 48 | #[derive(Deserialize, Debug, Default)] 49 | pub struct Config { 50 | /// Run the app in debug mode 51 | #[serde(default = "ConfigFn::fn_true")] 52 | pub debug: bool, 53 | 54 | /// Set the address to bind the webserver on 55 | /// defaults to 0.0.0.0:8080 56 | #[serde(default = "ConfigFn::fn_default_host")] 57 | pub host: String, 58 | 59 | /// Default port, soon deprecated. 60 | #[serde(default = "ConfigFn::fn_default_port")] 61 | pub port: String, 62 | 63 | /// Kafka topic on which we want to publish the data. 64 | pub kafka_topic: String, 65 | 66 | /// Kafka brokers to connect to. 67 | pub kafka_brokers: String, 68 | 69 | /// Kafka username for sasl authentication. 70 | pub kafka_username: Option, 71 | 72 | /// Kafka password for sasl authentication. 73 | pub kafka_password: Option, 74 | 75 | /// Kafka ca-cert path for sasl authentication. 76 | pub kafka_ca_cert_path: Option, 77 | 78 | /// Postgres database url 79 | pub postgres_database_url: String, 80 | 81 | /// Postgres path to cert. 82 | pub postgres_cert_path: Option, 83 | } 84 | 85 | impl Config { 86 | // Create a new Config instance by reading from 87 | // environment variables 88 | pub fn new() -> Config { 89 | // Check if there is an environment variable DEVICE_REGISTRY_CONFIG_PATH 90 | // then read it from there else fallback to .env 91 | let filename = match env::var(DEFAULT_CONFIG_ENV_KEY) { 92 | Ok(filepath) => filepath, 93 | Err(_) => ".env".into(), 94 | }; 95 | info!("Trying to read the config file from [{}]", &filename); 96 | 97 | dotenv::from_filename(&filename).ok(); 98 | match envy::prefixed(CONFIG_PREFIX).from_env::() { 99 | Ok(config) => config, 100 | Err(e) => panic!("Config file being read: {}. And error {:?}", &filename, e), 101 | } 102 | } 103 | } 104 | 105 | #[cfg(test)] 106 | mod tests { 107 | use super::*; 108 | 109 | fn eq_with_nan_eq(a: &Config, b: &Config) -> bool { 110 | return (a.host == b.host) && (a.port == b.port) && (a.debug == b.debug); 111 | } 112 | 113 | fn vec_compare(va: &[Config], vb: &[Config]) -> bool { 114 | // zip stops at the shortest 115 | (va.len() == vb.len()) && va.iter().zip(vb).all(|(a, b)| eq_with_nan_eq(a, b)) 116 | } 117 | 118 | #[test] 119 | fn test_config_parsing() { 120 | let json = r#" 121 | [ 122 | { 123 | "debug": false, 124 | "postgres_database_url": "localhost", 125 | "kafka_brokers": "localhost:9092", 126 | "kafka_topic": "metrics" 127 | }, 128 | { 129 | "host": "127.0.0.1", 130 | "port": "9080", 131 | "postgres_database_url": "localhost", 132 | "kafka_brokers": "localhost:9092", 133 | "kafka_topic": "metrics" 134 | } 135 | ] 136 | "#; 137 | let config: Vec = serde_json::from_str(json).unwrap(); 138 | 139 | let expected_config: Vec = vec![ 140 | Config { 141 | debug: false, 142 | host: "0.0.0.0".into(), 143 | port: "8080".into(), 144 | postgres_database_url: "localhost".into(), 145 | kafka_brokers: "localhost:9092".into(), 146 | kafka_topic: "metrics".into(), 147 | postgres_cert_path: None, 148 | kafka_ca_cert_path: None, 149 | kafka_username: None, 150 | kafka_password: None, 151 | }, 152 | Config { 153 | debug: true, 154 | host: "127.0.0.1".into(), 155 | port: "9080".into(), 156 | postgres_database_url: "localhost".into(), 157 | kafka_brokers: "localhost:9092".into(), 158 | kafka_topic: "metrics".into(), 159 | postgres_cert_path: None, 160 | kafka_ca_cert_path: None, 161 | kafka_username: None, 162 | kafka_password: None, 163 | }, 164 | ]; 165 | assert_eq!( 166 | vec_compare(&config, &expected_config), 167 | true, 168 | "Parsing failed !!!" 169 | ); 170 | } 171 | 172 | #[test] 173 | fn test_config_reading() { 174 | let path = env::var("CARGO_MANIFEST_DIR"); 175 | let env_file = format!("{}/config/env.dev", path.unwrap()); 176 | env::set_var(DEFAULT_CONFIG_ENV_KEY, env_file); 177 | let config: Config = Config::new(); 178 | assert!(config.kafka_brokers == "localhost:9092"); 179 | assert!(config.kafka_topic == "metrics"); 180 | assert!(config.debug); 181 | } 182 | } 183 | -------------------------------------------------------------------------------- /src/postgres/client.rs: -------------------------------------------------------------------------------- 1 | // MIT License 2 | // 3 | // Copyright (c) 2019 Ankur Srivastava 4 | // 5 | // Permission is hereby granted, free of charge, to any person obtaining a copy 6 | // of this software and associated documentation files (the "Software"), to deal 7 | // in the Software without restriction, including without limitation the rights 8 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | // copies of the Software, and to permit persons to whom the Software is 10 | // furnished to do so, subject to the following conditions: 11 | // 12 | // The above copyright notice and this permission notice shall be included in 13 | // all copies or substantial portions of the Software. 14 | // 15 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | // SOFTWARE. 22 | 23 | use crate::{ 24 | errors::AppError, 25 | generated::{BatchMessage, Message}, 26 | }; 27 | use chrono::prelude::*; 28 | use deadpool_postgres::{Manager, Pool}; 29 | use log::info; 30 | use native_tls::{Certificate, TlsConnector}; 31 | use postgres_native_tls::MakeTlsConnector; 32 | use std::fs; 33 | use tokio_postgres::Config; 34 | 35 | pub struct DbClient { 36 | pool: Pool, 37 | } 38 | 39 | impl DbClient { 40 | /// Create a DbClient from username, password, etc. 41 | /// 42 | /// # Examples 43 | /// Basic usage: 44 | /// 45 | /// ```rust norun 46 | /// let conn_string = "postgresql://postgres:password@localhost:5432/timeseries"; 47 | /// let client = DbClient::new("localhost", "5432", "username", "password", "dbname"); 48 | /// // use this client from this point on. 49 | /// ``` 50 | pub fn new(host: &str, port: &str, username: &str, password: &str, dbname: &str) -> DbClient { 51 | let mut cfg = Config::new(); 52 | cfg.host(host); 53 | cfg.port(port.parse::().unwrap()); 54 | cfg.user(username); 55 | cfg.password(password); 56 | cfg.dbname(dbname); 57 | let mgr = Manager::new(cfg, tokio_postgres::NoTls); 58 | let connection_pool = Pool::builder(mgr) 59 | .max_size(16) 60 | .build() 61 | .expect("Failed to create a pool"); 62 | DbClient { 63 | pool: connection_pool, 64 | } 65 | } 66 | 67 | /// Create a DBClient from a connection-string and certificate path. 68 | /// 69 | /// # Examples 70 | /// Basic usage: 71 | /// 72 | /// ```rust norun 73 | /// let conn_string = "postgresql://postgres:password@localhost:5432/timeseries"; 74 | /// let client = DBClient::from(conn_string, path_to_cert); 75 | /// // use this client from this point on. 76 | /// ``` 77 | pub fn from(conn_string: &str, cert_path: Option<&str>) -> Result { 78 | let config = conn_string 79 | .parse::() 80 | .expect("Failed to parse db-connection string"); 81 | 82 | let pool = if let Some(cert_path) = cert_path { 83 | let connector = DbClient::create_tls_connection(cert_path)?; 84 | let mgr = Manager::new(config, connector); 85 | Pool::builder(mgr).max_size(16).build()? 86 | } else { 87 | let mgr = Manager::new(config, tokio_postgres::NoTls); 88 | Pool::builder(mgr).max_size(16).build()? 89 | }; 90 | 91 | Ok(DbClient { pool }) 92 | } 93 | 94 | fn create_tls_connection(path: &str) -> Result { 95 | let cert_path = fs::read(path)?; 96 | // .unwrap_or_else(|_| panic!("Failed to read the cert file from path: {}", path)); 97 | 98 | let cert = Certificate::from_pem(&cert_path)?; 99 | // .unwrap_or_else(|_| panic!("Failed to create the certificate from path: {}", path)); 100 | 101 | let connector = TlsConnector::builder() 102 | .add_root_certificate(cert) 103 | .build() 104 | .expect("Failed to create a tls connector for Postgres"); 105 | 106 | Ok(MakeTlsConnector::new(connector)) 107 | } 108 | 109 | /// Get current count of rows in the database. 110 | /// 111 | /// # Examples 112 | /// Basic usage: 113 | /// 114 | /// ```rust norun 115 | /// let conn_string = "postgresql://postgres:password@localhost:5432/timeseries"; 116 | /// let client = DBClient::from(conn_string, path_to_cert); 117 | /// client.get_count().await.unwrap(); 118 | /// // use this client from this point on. 119 | /// ``` 120 | pub async fn get_count(&self) -> Result { 121 | let client = self.pool.get().await?; 122 | let stmt = client.prepare("SELECT COUNT(*) FROM metrics").await?; 123 | let rows = client.query(&stmt, &[]).await?; 124 | let value: i64 = rows[0].get(0); 125 | Ok(value) 126 | } 127 | 128 | /// Insert a batch message in database 129 | /// 130 | /// # Examples 131 | /// Basic usage: 132 | /// 133 | /// ```rust norun 134 | /// let conn_string = "postgresql://postgres:password@localhost:5432/timeseries"; 135 | /// let client = DBClient::from(conn_string, path_to_cert); 136 | /// let batch_message = BatchMessage::default(); 137 | /// client.get_count().insert(batch_message).unwrap(); 138 | /// // use this client from this point on. 139 | /// ``` 140 | pub async fn insert(&self, messages: &BatchMessage) -> Result<(), AppError> { 141 | let client = self.pool.get().await?; 142 | let stmt = client 143 | .prepare("INSERT INTO metrics (timestamp, name, value) VALUES ($1, $2, $3)") 144 | .await?; 145 | 146 | for message in messages.multiple_points.iter() { 147 | let ts = DateTime::::from_utc( 148 | NaiveDateTime::from_timestamp_opt(message.timestamp, 0).unwrap(), 149 | Utc, 150 | ); 151 | client 152 | .execute(&stmt, &[&ts, &message.name, &(message.value as f64)]) 153 | .await?; 154 | } 155 | info!("Published data to db"); 156 | Ok(()) 157 | } 158 | 159 | /// Insert a single message in database 160 | /// 161 | /// # Examples 162 | /// 163 | /// ```rust norun 164 | /// let client = DBClient::new("localhost", "5432", "username", "password", "metrics"); 165 | /// let some_message = Message::Default(); 166 | /// client.insert_message(some_message).await.unwrap(); 167 | /// ``` 168 | pub async fn insert_message(&self, message: &Message) -> Result<(), AppError> { 169 | let client = self.pool.get().await?; 170 | let stmt = client 171 | .prepare("INSERT INTO metrics (timestamp, name, value) VALUES ($1, $2, $3)") 172 | .await?; 173 | 174 | let ts = DateTime::::from_utc( 175 | NaiveDateTime::from_timestamp_opt(message.timestamp, 0).unwrap(), 176 | Utc, 177 | ); 178 | client 179 | .execute(&stmt, &[&ts, &message.name, &(message.value as f64)]) 180 | .await?; 181 | info!("Published data to db"); 182 | Ok(()) 183 | } 184 | 185 | /// Truncate the table which contains all the metrics. 186 | /// 187 | /// # Examples 188 | /// 189 | /// ```rust norun 190 | /// let client = DBClient::new("localhost", "5432", "username", "password", "metrics"); 191 | /// // Clean up the DB first 192 | /// client.truncate().await.unwrap(); 193 | /// ``` 194 | #[allow(dead_code)] 195 | pub(crate) async fn truncate(&self) -> Result<(), AppError> { 196 | let client = self.pool.get().await?; 197 | let stmt = client.prepare("TRUNCATE TABLE metrics").await?; 198 | client.execute(&stmt, &[]).await?; 199 | 200 | info!("Published data to db"); 201 | Ok(()) 202 | } 203 | } 204 | 205 | #[cfg(test)] 206 | mod tests { 207 | use super::*; 208 | use crate::MetricsGenerator; 209 | #[tokio::test] 210 | async fn test_insert_single_message() { 211 | let client = DbClient::new("localhost", "5432", "postgres", "password", "timeseries"); 212 | 213 | // Clean up the DB first 214 | client.truncate().await.unwrap(); 215 | // Now insert a new row 216 | let message = MetricsGenerator::create_metrics("user".to_string(), 321f32, None); 217 | client.insert_message(&message).await.unwrap(); 218 | 219 | // Now get the count of rows 220 | let expected = 1; 221 | let actual = client.get_count().await.unwrap(); 222 | assert!( 223 | actual == expected, 224 | "Failed tests expected: {:?}, actual: {:?}", 225 | expected, 226 | actual 227 | ); 228 | } 229 | 230 | #[tokio::test] 231 | async fn test_insert_batch_message() { 232 | let client = DbClient::new("localhost", "5432", "postgres", "password", "timeseries"); 233 | 234 | // Clean up the DB first 235 | client.truncate().await.unwrap(); 236 | 237 | // Now insert a new row 238 | let message1 = MetricsGenerator::create_metrics("user1".to_string(), 321f32, None); 239 | let message2 = MetricsGenerator::create_metrics("user2".to_string(), 321f32, None); 240 | 241 | let mut batch_message = BatchMessage::default(); 242 | batch_message.multiple_points = vec![message1, message2]; 243 | client.insert(&batch_message).await.unwrap(); 244 | 245 | // Now get the count of rows 246 | let expected = 2; 247 | let actual = client.get_count().await.unwrap(); 248 | assert!( 249 | actual == expected, 250 | "Failed tests expected: {:?}, actual: {:?}", 251 | expected, 252 | actual 253 | ); 254 | } 255 | } 256 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | // MIT License 2 | // 3 | // Copyright (c) 2019 Ankur Srivastava 4 | // 5 | // Permission is hereby granted, free of charge, to any person obtaining a copy 6 | // of this software and associated documentation files (the "Software"), to deal 7 | // in the Software without restriction, including without limitation the rights 8 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | // copies of the Software, and to permit persons to whom the Software is 10 | // furnished to do so, subject to the following conditions: 11 | // 12 | // The above copyright notice and this permission notice shall be included in 13 | // all copies or substantial portions of the Software. 14 | // 15 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | // SOFTWARE. 22 | 23 | pub mod config; 24 | mod errors; 25 | pub mod generated; 26 | pub mod kafka; 27 | pub mod metrics; 28 | pub mod postgres; 29 | 30 | use prost::bytes::BytesMut; 31 | 32 | use config::Config; 33 | use uuid::Uuid; 34 | 35 | use generated::BatchMessage; 36 | use kafka::{KafkaConsumer, KafkaProducer}; 37 | use log::{debug, error, info}; 38 | use metrics::MetricsGenerator; 39 | use postgres::DbClient; 40 | use prost::Message as PMessage; 41 | use rdkafka::{ 42 | config::{ClientConfig, RDKafkaLogLevel}, 43 | consumer::stream_consumer::StreamConsumer, 44 | }; 45 | use std::sync::Arc; 46 | use structopt::{clap::Shell, StructOpt}; 47 | use tokio::{self, sync::mpsc, task, time, time::Duration}; 48 | 49 | #[derive(Debug, StructOpt)] 50 | pub enum Command { 51 | #[structopt(name = "metrics-publisher")] 52 | /// Start publishing the metrics data to a kafka-topic. 53 | MetricsPublisher, 54 | 55 | #[structopt(name = "check-db-data")] 56 | /// Check the current count of rows in database. 57 | CheckDbData, 58 | 59 | #[structopt(name = "metrics-subscriber")] 60 | /// Subscribe to a kafka-topic and write data to database. 61 | MetricsSubscriber, 62 | } 63 | 64 | #[derive(Debug, StructOpt)] 65 | pub struct Metrics { 66 | #[structopt(subcommand)] 67 | pub command: Command, 68 | } 69 | 70 | /// Create a consumer based on the given configuration. 71 | /// 72 | /// In case certificate path etc is provided then a sasl enabled client 73 | /// is created else a normal client. 74 | fn create_consumer(conf: Arc) -> KafkaConsumer { 75 | let is_tls = conf.kafka_ca_cert_path.is_some() 76 | && conf.kafka_password.is_some() 77 | && conf.kafka_username.is_some(); 78 | 79 | if is_tls { 80 | info!("TLS is enabled. Will try to create a secure client"); 81 | let username = conf 82 | .kafka_username 83 | .as_deref() 84 | .expect("Kafka username is required."); 85 | let password = conf 86 | .kafka_password 87 | .as_deref() 88 | .expect("Kafka password is required."); 89 | let ca_path = conf 90 | .kafka_ca_cert_path 91 | .as_deref() 92 | .expect("Kafka ca certificate is required."); 93 | let consumer: StreamConsumer = ClientConfig::new() 94 | .set("group.id", "some-random-id") 95 | .set("bootstrap.servers", &conf.kafka_brokers) 96 | .set("enable.partition.eof", "false") 97 | .set("session.timeout.ms", "6000") 98 | .set("enable.auto.commit", "true") 99 | .set("sasl.mechanisms", "PLAIN") 100 | .set("security.protocol", "SASL_SSL") 101 | .set("sasl.username", username) 102 | .set("sasl.password", password) 103 | .set("ssl.ca.location", ca_path) 104 | .set_log_level(RDKafkaLogLevel::Debug) 105 | .create() 106 | .expect("Consumer creation failed"); 107 | return KafkaConsumer::new_with_consumer(consumer, &[&conf.kafka_topic]); 108 | } 109 | 110 | let group_id = Uuid::new_v4(); 111 | KafkaConsumer::new( 112 | &conf.kafka_brokers, 113 | &group_id.to_string(), 114 | &[&conf.kafka_topic], 115 | ) 116 | } 117 | 118 | /// Create a producer based on the given configuration. 119 | /// 120 | /// In case certificate path etc is provided then a sasl enabled client 121 | /// is created else a normal client. 122 | fn create_producer(conf: Arc) -> KafkaProducer { 123 | let is_tls = conf.kafka_ca_cert_path.is_some() 124 | && conf.kafka_password.is_some() 125 | && conf.kafka_username.is_some(); 126 | 127 | if is_tls { 128 | let username = conf 129 | .kafka_username 130 | .as_deref() 131 | .expect("Kafka username is required."); 132 | let password = conf 133 | .kafka_password 134 | .as_deref() 135 | .expect("Kafka password is required."); 136 | let ca_path = conf 137 | .kafka_ca_cert_path 138 | .as_deref() 139 | .expect("Kafka ca certificate is required."); 140 | let producer = ClientConfig::new() 141 | .set("bootstrap.servers", &conf.kafka_brokers) 142 | .set("message.timeout.ms", "10000") 143 | .set("sasl.mechanisms", "PLAIN") 144 | .set("security.protocol", "SASL_SSL") 145 | .set("sasl.username", username) 146 | .set("sasl.password", password) 147 | .set("ssl.ca.location", ca_path) 148 | .create() 149 | .expect("Producer creation error"); 150 | return KafkaProducer::new_with_producer(producer); 151 | } 152 | 153 | KafkaProducer::new(&conf.kafka_brokers) 154 | } 155 | 156 | /// Handle the message subscription command. 157 | /// 158 | /// This will subscribe to a kafka-topic on which metrics are being published. 159 | /// Then the incoming message is deserialized back to BatchMessage and 160 | /// published to an internal channel. 161 | /// Then this data is read and published to postgres. 162 | async fn handle_message_receiving(config: Arc, dbclient: DbClient) { 163 | let (dbtx, mut dbrx) = mpsc::channel(100); 164 | task::spawn(async move { 165 | info!("Waiting to receive metrics-data on incoming queue."); 166 | while let Some(raw_data) = dbrx.recv().await { 167 | debug!("Received data on the incoming channel to write in database"); 168 | if let Ok(bmsg) = BatchMessage::decode(raw_data) { 169 | if let Err(e) = dbclient.insert(&bmsg).await { 170 | error!("Failed to write data to the db: {:?}", e); 171 | let _ = dbclient.insert(&bmsg).await; 172 | } 173 | } else { 174 | error!("Failed to decode the incoming message from kafka"); 175 | }; 176 | } 177 | }); 178 | 179 | debug!("Starting to cosume the data"); 180 | let conf = config.clone(); 181 | let kconsumer = create_consumer(conf); 182 | kconsumer.consume(dbtx).await; 183 | } 184 | 185 | /// Handle the message publishing command. 186 | /// 187 | /// This will generate metrics, convert it to protobuf messages of type 188 | /// BatchMessage and covert it to bytes 189 | /// Send this message to an internal channel which is then consumed 190 | /// by a kafka producer to publish this message to a kafka-topic. 191 | async fn handle_message_publishing(config: Arc) { 192 | // Create a mpsc channel to publish data to 193 | let (tx, mut rx) = mpsc::channel(100); 194 | let mut batch_messages = BatchMessage::default(); 195 | 196 | // Spawn an async task to collect metrics 197 | task::spawn(async move { 198 | debug!("Starting to produce the data"); 199 | 200 | let mut interval = time::interval(Duration::from_millis(1000)); 201 | loop { 202 | interval.tick().await; 203 | // This is in its own scope so that it gets collected and 204 | // ulimits are respected 205 | { 206 | let metrics_generator = MetricsGenerator::new(); 207 | let mut metrices = metrics_generator.used_memory(); 208 | let disks = metrics_generator.disk_stats(); 209 | // ... 210 | // ... simulate some more statistics here and extend them all in metrics vector 211 | metrices.extend(disks); 212 | batch_messages.multiple_points = metrices; 213 | } 214 | let mut buffer = BytesMut::with_capacity(batch_messages.encoded_len()); 215 | batch_messages.encode(&mut buffer).unwrap(); 216 | 217 | if let Err(e) = tx.send(buffer).await { 218 | error!("receiver dropped {e}", e = e); 219 | return; 220 | }; 221 | } 222 | }); 223 | 224 | let conf = config.clone(); 225 | 226 | // Create a kafka producer 227 | let kproducer = create_producer(conf); 228 | 229 | // Start reading data in the main thread 230 | // and publish it to Kafka 231 | while let Some(data) = rx.recv().await { 232 | debug!("Received data on the incoming channel"); 233 | kproducer.produce(data, &config.kafka_topic).await; 234 | info!( 235 | "Published data successfully on kafka topic: {}", 236 | &config.kafka_topic 237 | ); 238 | } 239 | } 240 | 241 | #[tokio::main] 242 | async fn main() -> Result<(), Box> { 243 | env_logger::init(); 244 | 245 | Metrics::clap().gen_completions(env!("CARGO_PKG_NAME"), Shell::Bash, "target"); 246 | let opt = Metrics::from_args(); 247 | debug!("starting up"); 248 | 249 | let app_config = Arc::new(Config::new()); 250 | 251 | let dbclient = DbClient::from( 252 | &app_config.postgres_database_url, 253 | app_config.postgres_cert_path.as_deref(), 254 | )?; 255 | 256 | match opt.command { 257 | Command::MetricsPublisher => { 258 | info!("Started metrics publishing to kafka-topic"); 259 | handle_message_publishing(app_config.clone()).await 260 | } 261 | Command::MetricsSubscriber => { 262 | info!("Subscriber was invoked"); 263 | handle_message_receiving(app_config.clone(), dbclient).await 264 | } 265 | Command::CheckDbData => { 266 | let rows = dbclient.get_count().await?; 267 | info!("Current count of rows in DB is {:?}", rows); 268 | } 269 | }; 270 | Ok(()) 271 | } 272 | --------------------------------------------------------------------------------