├── .editorconfig ├── .github ├── dependabot.yml └── workflows │ ├── release.yml │ └── test.yml ├── .gitignore ├── .pre-commit-config.yaml ├── Cargo.lock ├── Cargo.toml ├── LICENSE.md ├── README.md ├── build.rs ├── ci ├── cargo-build-test.sh ├── cargo-install-all.sh ├── create-tarball.sh └── solana-version.sh ├── proto └── event.proto ├── rust-toolchain.toml ├── scripts └── update-rust-toolchain.sh └── src ├── config.rs ├── event.rs ├── filter.rs ├── lib.rs ├── plugin.rs ├── prom.rs ├── publisher.rs └── version.rs /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | insert_final_newline = true 5 | end_of_line = lf 6 | 7 | [*.rs] 8 | indent_style = space 9 | indent_size = 4 10 | 11 | [*.proto] 12 | indent_style = space 13 | indent_size = 2 14 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "cargo" 4 | directory: "/" 5 | schedule: 6 | interval: "daily" 7 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | tags: 4 | - 'v*' 5 | pull_request: 6 | paths: 7 | - '.github/workflows/release.yml' 8 | 9 | env: 10 | CARGO_TERM_COLOR: always 11 | 12 | jobs: 13 | release: 14 | runs-on: ubuntu-22.04 15 | steps: 16 | - uses: actions/checkout@v2 17 | 18 | - name: Set env vars 19 | run: | 20 | rust_stable=$(rustc --version | awk '{print $2}') 21 | echo "RUST_STABLE=$rust_stable" | tee -a $GITHUB_ENV 22 | 23 | - name: Install build requirements 24 | if: runner.os == 'Linux' 25 | run: | 26 | sudo apt-get update 27 | sudo apt-get install -y \ 28 | gnupg \ 29 | libudev-dev \ 30 | libsasl2-dev \ 31 | libssl-dev \ 32 | libzstd-dev 33 | echo 'deb http://ftp.debian.org/debian stable main' | sudo tee -a /etc/apt/sources.list.d/debian.list 34 | sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 648ACFD622F3D138 35 | sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 0E98404D386FA1D9 36 | sudo apt-get update 37 | sudo apt-get satisfy -f -y "protobuf-compiler (>=3.15)" 38 | 39 | - uses: actions-rs/toolchain@v1 40 | with: 41 | toolchain: ${{ env.RUST_STABLE }} 42 | override: true 43 | profile: minimal 44 | components: rustfmt 45 | 46 | - name: Check Solana version 47 | run: | 48 | echo "CI_TAG=${GITHUB_REF#refs/*/}" >> "$GITHUB_ENV" 49 | echo "CI_OS_NAME=linux" >> "$GITHUB_ENV" 50 | 51 | SOLANA_VERSION="$(./ci/solana-version.sh)" 52 | SOLANA_VERSION="v${SOLANA_VERSION#=}" 53 | echo "SOLANA_VERSION=$SOLANA_VERSION" >> "$GITHUB_ENV" 54 | 55 | - name: Build release tarball 56 | run: ./ci/create-tarball.sh 57 | 58 | - name: Release 59 | uses: softprops/action-gh-release@v1 60 | if: startsWith(github.ref, 'refs/tags/') 61 | with: 62 | body: | 63 | solana-accountsdb-plugin-kafka ${{ env.CI_TAG }} 64 | solana ${{ env.SOLANA_VERSION }} 65 | rust ${{ env.RUST_STABLE }} 66 | files: | 67 | solana-accountsdb-plugin-kafka-release* 68 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | # Source: 2 | # https://github.com/solana-labs/solana-accountsdb-plugin-postgres/blob/master/.github/workflows/test.yml 3 | 4 | on: 5 | push: 6 | pull_request: 7 | 8 | env: 9 | CARGO_TERM_COLOR: always 10 | 11 | jobs: 12 | test: 13 | runs-on: ubuntu-22.04 14 | steps: 15 | - uses: actions/checkout@v2 16 | 17 | - name: Set env vars 18 | run: | 19 | rust_stable=$(rustc --version | awk '{print $2}') 20 | echo "RUST_STABLE=$rust_stable" | tee -a $GITHUB_ENV 21 | 22 | - name: Install build requirements 23 | if: runner.os == 'Linux' 24 | run: | 25 | sudo apt-get update 26 | sudo apt-get install -y \ 27 | gnupg \ 28 | libudev-dev \ 29 | libsasl2-dev \ 30 | libssl-dev \ 31 | libzstd-dev 32 | echo 'deb http://ftp.debian.org/debian stable main' | sudo tee -a /etc/apt/sources.list.d/debian.list 33 | sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 648ACFD622F3D138 34 | sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 0E98404D386FA1D9 35 | sudo apt-get update 36 | sudo apt-get satisfy -f -y "protobuf-compiler (>=3.15)" 37 | 38 | - uses: actions-rs/toolchain@v1 39 | with: 40 | toolchain: ${{ env.RUST_STABLE }} 41 | override: true 42 | profile: minimal 43 | components: rustfmt, clippy 44 | 45 | - uses: actions/cache@v3 46 | with: 47 | path: | 48 | ~/.cargo/registry 49 | ~/.cargo/git 50 | key: ${{ runner.os }}-cargo-build-${{ hashFiles('**/Cargo.lock', 'rust-toolchain.toml') }}-${{ env.RUST_STABLE }} 51 | 52 | - name: cargo fmt 53 | uses: actions-rs/cargo@v1 54 | with: 55 | command: fmt 56 | args: --all -- --check 57 | 58 | - name: cargo clippy 59 | uses: actions-rs/cargo@v1 60 | with: 61 | command: clippy 62 | args: --workspace --all-targets -- --deny=warnings 63 | 64 | - name: Build 65 | run: ./ci/cargo-build-test.sh 66 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | .idea/ 3 | *.json 4 | !example-config.json 5 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v4.1.0 4 | hooks: 5 | - id: trailing-whitespace 6 | - id: end-of-file-fixer 7 | - id: mixed-line-ending 8 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "solana-accountsdb-plugin-kafka" 3 | description = "Solana AccountsDb plugin for Kafka" 4 | authors = ["Blockdaemon"] 5 | version = "0.1.8+solana.2.2.7" 6 | edition = "2021" 7 | license = "Apache-2.0" 8 | repository = "https://github.com/Blockdaemon/solana-accountsdb-plugin-kafka" 9 | homepage = "https://blockdaemon.com" 10 | keywords = ["solana", "blockchain", "kafka", "plugin"] 11 | categories = ["cryptography::cryptocurrencies", "database", "network-programming"] 12 | 13 | [lib] 14 | crate-type = ["cdylib", "rlib"] 15 | 16 | [dependencies] 17 | agave-geyser-plugin-interface = { version = "~2.2" } 18 | solana-logger = { version = "*" } 19 | solana-message = { version = "~2.2" } 20 | solana-pubkey = { version = "~2.2" } 21 | solana-transaction-status = { version = "~2.2" } 22 | 23 | hyper = { version = "*", features = ["http1", "server"] } 24 | hyper-util = { version = "*", features = ["tokio", "server"] } 25 | lazy_static = "*" 26 | log = "*" 27 | prometheus = "*" 28 | prost = "*" 29 | rdkafka = { version = "*", features = ["ssl", "sasl"] } 30 | serde = { version = "*", features = ["derive"] } 31 | serde_json = { version = "*" } 32 | tokio = { version = "*", features = ["rt-multi-thread", "time", "macros", "sync"] } 33 | tower = { version = "*", features = ["make"] } 34 | bytes = "*" 35 | http-body-util = "*" 36 | http = "*" 37 | 38 | [build-dependencies] 39 | anyhow = "*" 40 | cargo-lock = "*" 41 | git-version = "*" 42 | prost-build = "*" 43 | vergen = { version = "*", features = ["build", "rustc"] } 44 | 45 | [package.metadata.docs.rs] 46 | targets = ["x86_64-unknown-linux-gnu"] 47 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Solana AccountsDB Plugin for Kafka 2 | 3 | Kafka publisher for use with Solana's [plugin framework](https://docs.solana.com/developing/plugins/geyser-plugins). 4 | 5 | ## Installation 6 | 7 | ### Binary releases 8 | 9 | Find binary releases [here](https://github.com/Blockdaemon/solana-accountsdb-plugin-kafka/releases). 10 | 11 | ### Building from source 12 | 13 | #### Prerequisites 14 | 15 | You will need version 3.15 or later of the protobuf compiler `protoc` installed, since it is required for the `--experimental_allow_proto3_optional` option. 16 | 17 | Note that as of this writing, ubuntu 22.04 still has an obsolete of `protoc`. 18 | 19 | For ubuntu, CI imports one from debian: 20 | 21 | ```shell 22 | echo 'deb http://ftp.debian.org/debian stable main' | sudo tee -a /etc/apt/sources.list.d/debian.list 23 | sudo apt-get update 24 | sudo apt-get satisfy -f -y "protobuf-compiler (>=3.15)" 25 | ``` 26 | 27 | You may need the appropriate debian keys: 28 | 29 | ```shell 30 | sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 648ACFD622F3D138 31 | sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 0E98404D386FA1D9 32 | ``` 33 | 34 | #### Build 35 | 36 | ```shell 37 | cargo build --release 38 | ``` 39 | 40 | - Linux: `./target/release/libsolana_accountsdb_plugin_kafka.so` 41 | - macOS: `./target/release/libsolana_accountsdb_plugin_kafka.dylib` 42 | 43 | **Important:** Solana's plugin interface requires the build environment of the Solana validator and this plugin to be **identical**. 44 | 45 | This includes the Solana version and Rust compiler version. 46 | Loading a plugin targeting wrong versions will result in memory corruption and crashes. 47 | 48 | ## Config 49 | 50 | Config is specified via the plugin's JSON config file. 51 | 52 | ### Example Config 53 | 54 | ```json 55 | { 56 | "libpath": "target/release/libsolana_accountsdb_plugin_kafka.so", 57 | "kafka": { 58 | "bootstrap.servers": "localhost:9092", 59 | "request.required.acks": "1", 60 | "message.timeout.ms": "30000", 61 | "compression.type": "lz4", 62 | "partitioner": "murmur2_random", 63 | "statistics.interval.ms": "1000" 64 | }, 65 | "shutdown_timeout_ms": 30000, 66 | "filters": [{ 67 | "update_account_topic": "solana.testnet.account_updates", 68 | "slot_status_topic": "solana.testnet.slot_status", 69 | "transaction_topic": "solana.testnet.transactions", 70 | "program_ignores": [ 71 | "Sysvar1111111111111111111111111111111111111", 72 | "Vote111111111111111111111111111111111111111" 73 | ], 74 | "publish_all_accounts": false, 75 | "wrap_messages": false 76 | }] 77 | } 78 | ``` 79 | 80 | ### Reference 81 | 82 | - `libpath`: Path to Kafka plugin 83 | - `kafka`: [`librdkafka` config options](https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md). 84 | - `shutdown_timeout_ms`: Time the plugin is given to flush out all messages to Kafka upon exit request. 85 | - `prometheus`: Optional port to provide metrics in Prometheus format. 86 | - `filters`: Vec of filters with next fields: 87 | - `update_account_topic`: Topic name of account updates. Omit to disable. 88 | - `slot_status_topic`: Topic name of slot status update. Omit to disable. 89 | - `transaction_topic`: Topic name of transaction update. Omit to disable. 90 | - `program_ignores`: Account addresses to ignore (see Filtering below). 91 | - `program_filters`: Solana program IDs to include. 92 | - `account_filters`: Solana accounts to include. 93 | - `publish_all_accounts`: Publish all accounts on startup. Omit to disable. 94 | - `include_vote_transactions`: Include Vote transactions. 95 | - `include_failed_transactions`: Include failed transactions. 96 | - `wrap_messages`: Wrap all messages in a unified wrapper object. Omit to disable (see Message Wrapping below). 97 | 98 | ### Message Keys 99 | 100 | The message types are keyed as follows: 101 | 102 | - **Account update:** account address (public key) 103 | - **Slot status:** slot number 104 | - **Transaction notification:** transaction signature 105 | 106 | ### Filtering 107 | 108 | If `program_ignores` are specified, then these addresses will be filtered out of the account updates 109 | and transaction notifications. More specifically, account update messages for these accounts will not be emitted, 110 | and transaction notifications for any transaction involving these accounts will not be emitted. 111 | 112 | ### Message Wrapping 113 | 114 | In some cases it may be desirable to send multiple types of messages to the same topic, 115 | for instance to preserve relative order. In this case it is helpful if all messages conform to a single schema. 116 | Setting `wrap_messages` to true will wrap all three message types in a uniform wrapper object so that they 117 | conform to a single schema. 118 | 119 | Note that if `wrap_messages` is true, in order to avoid key collision, the message keys are prefixed with a single byte, 120 | which is dependent on the type of the message being wrapped. Account update message keys are prefixed with 121 | 65 (A), slot status keys with 83 (S), and transaction keys with 84 (T). 122 | 123 | ## Buffering 124 | 125 | The Kafka producer acts strictly non-blocking to allow the Solana validator to sync without much induced lag. 126 | This means incoming events from the Solana validator will get buffered and published asynchronously. 127 | 128 | When the publishing buffer is exhausted any additional events will get dropped. 129 | This can happen when Kafka brokers are too slow or the connection to Kafka fails. 130 | Therefore it is crucial to choose a sufficiently large buffer. 131 | 132 | The buffer size can be controlled using `librdkafka` config options, including: 133 | 134 | - `queue.buffering.max.messages`: Maximum number of messages allowed on the producer queue. 135 | - `queue.buffering.max.kbytes`: Maximum total message size sum allowed on the producer queue. 136 | -------------------------------------------------------------------------------- /build.rs: -------------------------------------------------------------------------------- 1 | use cargo_lock::Lockfile; 2 | use vergen::{BuildBuilder, Emitter, RustcBuilder}; 3 | 4 | fn main() -> anyhow::Result<()> { 5 | // Proto 6 | let mut config = prost_build::Config::new(); 7 | let proto_file = "proto/event.proto"; 8 | 9 | println!("cargo:rerun-if-changed={}", proto_file); 10 | 11 | config.boxed(".blockdaemon.solana.accountsdb_plugin_kafka.types.MessageWrapper"); 12 | config.protoc_arg("--experimental_allow_proto3_optional"); 13 | config.compile_protos(&[proto_file], &["proto/"])?; 14 | 15 | // Version metrics 16 | let _ = Emitter::default() 17 | .add_instructions(&BuildBuilder::all_build()?)? 18 | .add_instructions(&RustcBuilder::all_rustc()?)? 19 | .emit(); 20 | 21 | // vergen git version does not looks cool 22 | println!( 23 | "cargo:rustc-env=GIT_VERSION={}", 24 | git_version::git_version!() 25 | ); 26 | 27 | // Extract Solana version 28 | let lockfile = Lockfile::load("./Cargo.lock")?; 29 | println!( 30 | "cargo:rustc-env=SOLANA_SDK_VERSION={}", 31 | get_pkg_version(&lockfile, "solana-sdk") 32 | ); 33 | 34 | Ok(()) 35 | } 36 | 37 | fn get_pkg_version(lockfile: &Lockfile, pkg_name: &str) -> String { 38 | lockfile 39 | .packages 40 | .iter() 41 | .filter(|pkg| pkg.name.as_str() == pkg_name) 42 | .map(|pkg| pkg.version.to_string()) 43 | .collect::>() 44 | .join(",") 45 | } 46 | -------------------------------------------------------------------------------- /ci/cargo-build-test.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Source: 4 | # https://github.com/solana-labs/solana-accountsdb-plugin-postgres/blob/master/ci/cargo-build-test.sh 5 | 6 | set -e 7 | cd "$(dirname "$0")/.." 8 | 9 | export RUSTFLAGS="-D warnings" 10 | export RUSTBACKTRACE=1 11 | 12 | set -x 13 | 14 | # Build/test all host crates 15 | rust_stable=$(rustc --version | awk '{print $2}') 16 | cargo +"$rust_stable" build 17 | cargo +"$rust_stable" test -- --nocapture 18 | 19 | exit 0 20 | -------------------------------------------------------------------------------- /ci/cargo-install-all.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | usage() { 6 | exitcode=0 7 | if [[ -n "$1" ]]; then 8 | exitcode=1 9 | echo "Error: $*" 10 | fi 11 | cat <] [--debug] 13 | EOF 14 | exit $exitcode 15 | } 16 | 17 | case "$CI_OS_NAME" in 18 | osx) 19 | libExt=dylib 20 | ;; 21 | linux) 22 | libExt=so 23 | ;; 24 | *) 25 | echo CI_OS_NAME unsupported 26 | exit 1 27 | ;; 28 | esac 29 | 30 | maybeRustVersion= 31 | installDir= 32 | buildVariant=release 33 | maybeReleaseFlag=--release 34 | 35 | while [[ -n $1 ]]; do 36 | if [[ ${1:0:1} = - ]]; then 37 | if [[ $1 = --debug ]]; then 38 | maybeReleaseFlag= 39 | buildVariant=debug 40 | shift 41 | else 42 | usage "Unknown option: $1" 43 | fi 44 | elif [[ ${1:0:1} = \+ ]]; then 45 | maybeRustVersion=$1 46 | shift 47 | else 48 | installDir=$1 49 | shift 50 | fi 51 | done 52 | 53 | if [[ -z "$installDir" ]]; then 54 | usage "Install directory not specified" 55 | exit 1 56 | fi 57 | 58 | installDir="$(mkdir -p "$installDir"; cd "$installDir"; pwd)" 59 | 60 | echo "Install location: $installDir ($buildVariant)" 61 | 62 | cd "$(dirname "$0")"/.. 63 | 64 | SECONDS=0 65 | 66 | mkdir -p "$installDir/lib" 67 | 68 | ( 69 | set -x 70 | # shellcheck disable=SC2086 # Don't want to double quote $rust_version 71 | cargo $maybeRustVersion build $maybeReleaseFlag --lib 72 | ) 73 | 74 | cp -fv "target/$buildVariant/libsolana_accountsdb_plugin_kafka.$libExt" "$installDir"/lib/ 75 | 76 | echo "Done after $SECONDS seconds" 77 | -------------------------------------------------------------------------------- /ci/create-tarball.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -e 3 | 4 | cd "$(dirname "$0")/.." 5 | 6 | case "$CI_OS_NAME" in 7 | osx) 8 | _cputype="$(uname -m)" 9 | if [[ $_cputype = arm64 ]]; then 10 | _cputype=aarch64 11 | fi 12 | TARGET=${_cputype}-apple-darwin 13 | ;; 14 | linux) 15 | TARGET=x86_64-unknown-linux-gnu 16 | ;; 17 | *) 18 | echo CI_OS_NAME unsupported 19 | exit 1 20 | ;; 21 | esac 22 | 23 | RELEASE_BASENAME="${RELEASE_BASENAME:=solana-accountsdb-plugin-kafka-release}" 24 | TARBALL_BASENAME="${TARBALL_BASENAME:="$RELEASE_BASENAME"}" 25 | 26 | echo --- Creating release tarball 27 | ( 28 | set -x 29 | rm -rf "${RELEASE_BASENAME:?}"/ 30 | mkdir "${RELEASE_BASENAME}"/ 31 | 32 | COMMIT="$(git rev-parse HEAD)" 33 | 34 | ( 35 | echo "channel: $CI_TAG" 36 | echo "commit: $COMMIT" 37 | echo "target: $TARGET" 38 | ) > "${RELEASE_BASENAME}"/version.yml 39 | 40 | # Make CHANNEL available to include in the software version information 41 | export CHANNEL 42 | 43 | ci/cargo-install-all.sh stable "${RELEASE_BASENAME}" 44 | 45 | tar cvf "${TARBALL_BASENAME}"-$TARGET.tar "${RELEASE_BASENAME}" 46 | bzip2 "${TARBALL_BASENAME}"-$TARGET.tar 47 | cp "${RELEASE_BASENAME}"/version.yml "${TARBALL_BASENAME}"-$TARGET.yml 48 | ) 49 | 50 | echo --- ok 51 | -------------------------------------------------------------------------------- /ci/solana-version.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Prints the Solana version. 4 | 5 | set -e 6 | 7 | cd "$(dirname "$0")/.." 8 | 9 | cargo read-manifest | jq -r '.dependencies[] | select(.name == "solana-geyser-plugin-interface") | .req' 10 | -------------------------------------------------------------------------------- /proto/event.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | import "google/protobuf/wrappers.proto"; 4 | 5 | package blockdaemon.solana.accountsdb_plugin_kafka.types; 6 | 7 | message UpdateAccountEvent { 8 | // The slot number when this update was emitted. 9 | uint64 slot = 1; 10 | 11 | // The Pubkey for the account. 12 | bytes pubkey = 2; 13 | 14 | // The lamports held by the account. 15 | uint64 lamports = 3; 16 | 17 | // The Pubkey of the owner program account. 18 | bytes owner = 4; 19 | 20 | // This account's data contains a loaded program. 21 | bool executable = 5; 22 | 23 | // The epoch at which this account will next owe rent. 24 | uint64 rent_epoch = 6; 25 | 26 | // The data held in this account. 27 | bytes data = 7; 28 | 29 | // A global monotonically increasing atomic number, which can be used 30 | // to tell the order of the account update. For example, when an 31 | // account is updated in the same slot multiple times, the update 32 | // with higher write_version should supersede the one with lower 33 | // write_version. 34 | uint64 write_version = 8; 35 | 36 | // First signature of the transaction caused this account modification 37 | optional bytes txn_signature = 9; 38 | } 39 | 40 | message SlotStatusEvent { 41 | uint64 slot = 1; 42 | 43 | uint64 parent = 2; 44 | 45 | SlotStatus status = 3; 46 | } 47 | 48 | enum SlotStatus { 49 | // The highest slot of the heaviest fork processed by the node. Ledger state at this slot is 50 | // not derived from a confirmed or finalized block, but if multiple forks are present, is from 51 | // the fork the validator believes is most likely to finalize. 52 | Processed = 0; 53 | 54 | // The highest slot having reached max vote lockout. 55 | Rooted = 1; 56 | 57 | // The highest slot that has been voted on by supermajority of the cluster, ie. is confirmed. 58 | Confirmed = 2; 59 | 60 | FirstShredReceived = 3; 61 | Completed = 4; 62 | CreatedBank = 5; 63 | 64 | Dead = 0xDEAD; 65 | } 66 | 67 | // MessageHeader 68 | message MessageHeader { 69 | uint32 num_required_signatures = 1; 70 | uint32 num_readonly_signed_accounts = 2; 71 | uint32 num_readonly_unsigned_accounts = 3; 72 | } 73 | 74 | // CompiledInstruction 75 | message CompiledInstruction { 76 | uint32 program_id_index = 1; 77 | repeated uint32 accounts = 2; 78 | bytes data = 3; 79 | } 80 | 81 | message LoadedAddresses { 82 | repeated bytes writable = 1; 83 | repeated bytes readonly = 2; 84 | } 85 | 86 | message MessageAddressTableLookup { 87 | bytes account_key = 1; 88 | repeated uint32 writable_indexes = 2; 89 | repeated uint32 readonly_indexes = 3; 90 | } 91 | 92 | message V0Message { 93 | MessageHeader header = 1; 94 | repeated bytes account_keys = 2; 95 | bytes recent_block_hash = 3; 96 | repeated CompiledInstruction instructions = 4; 97 | repeated MessageAddressTableLookup address_table_lookup = 5; 98 | } 99 | 100 | message V0LoadedMessage { 101 | V0Message message_ = 1; 102 | LoadedAddresses loaded_adresses = 2; 103 | repeated bool is_writable_account_cache = 3; 104 | } 105 | 106 | message LegacyMessage { 107 | MessageHeader header = 1; 108 | repeated bytes account_keys = 2; 109 | bytes recent_block_hash = 3; 110 | repeated CompiledInstruction instructions = 4; 111 | } 112 | 113 | message LegacyLoadedMessage { 114 | LegacyMessage message_ = 1; 115 | repeated bool is_writable_account_cache = 2; 116 | } 117 | 118 | message SanitizedMessage { 119 | oneof message_payload { 120 | LegacyLoadedMessage legacy = 1; 121 | V0LoadedMessage v0 = 2; 122 | } 123 | } 124 | 125 | message SanitizedTransaction { 126 | SanitizedMessage message_ = 1; 127 | bytes message_hash = 2; 128 | bool is_simple_vote_transaction = 3; 129 | repeated bytes signatures = 4; 130 | } 131 | 132 | // https://github.com/solana-labs/solana/pull/28430/files 133 | message InnerInstructions { 134 | uint32 index = 1; 135 | repeated InnerInstruction instructions = 2; 136 | } 137 | 138 | message InnerInstruction { 139 | CompiledInstruction instruction = 1; 140 | optional uint32 stack_height = 2; 141 | } 142 | 143 | message UiTokenAmount { 144 | google.protobuf.DoubleValue ui_amount = 1; 145 | uint32 decimals = 2; 146 | string amount = 3; 147 | string ui_amount_string = 4; 148 | } 149 | 150 | message TransactionTokenBalance { 151 | uint32 account_index = 1; 152 | string mint = 2; 153 | UiTokenAmount ui_token_account = 3; 154 | string owner = 4; 155 | } 156 | 157 | message Reward { 158 | string pubkey = 1; 159 | int64 lamports = 2; 160 | uint64 post_balance = 3; 161 | // reward_type is an enum, but protobuf will require it to be able to accept any int32. 162 | int32 reward_type = 4; 163 | uint32 commission = 5; 164 | } 165 | 166 | message TransactionStatusMeta { 167 | bool is_status_err = 1; 168 | string error_info = 2; 169 | uint64 fee = 3; 170 | repeated uint64 pre_balances = 4; 171 | repeated uint64 post_balances = 5; 172 | repeated InnerInstructions inner_instructions = 6; 173 | repeated string log_messages = 7; 174 | repeated TransactionTokenBalance pre_token_balances = 8; 175 | repeated TransactionTokenBalance post_token_balances = 9; 176 | repeated Reward rewards = 10; 177 | } 178 | 179 | // based on solana_accountsdb_plugin_interface::accountsdb_plugin_interface::ReplicaTransactionInfo 180 | message TransactionEvent { 181 | bytes signature = 1; 182 | bool is_vote = 2; 183 | SanitizedTransaction transaction = 3; 184 | TransactionStatusMeta transaction_status_meta = 4; 185 | uint64 slot = 5; 186 | uint64 index = 6; 187 | } 188 | 189 | message MessageWrapper { 190 | oneof event_message { 191 | UpdateAccountEvent account = 1; 192 | SlotStatusEvent slot = 2; 193 | TransactionEvent transaction = 3; 194 | } 195 | } 196 | -------------------------------------------------------------------------------- /rust-toolchain.toml: -------------------------------------------------------------------------------- 1 | # see also upstream solana/rust-toolchain.toml 2 | # https://github.com/anza-xyz/agave/blob/master/rust-toolchain.toml 3 | [toolchain] 4 | channel = "1.84.1" 5 | -------------------------------------------------------------------------------- /scripts/update-rust-toolchain.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Get agave-geyser-plugin-interface version from Cargo.lock 4 | AGAVE_VERSION=$(grep -A 1 'name = "agave-geyser-plugin-interface"' Cargo.lock | grep version | cut -d'"' -f2) 5 | echo "Found agave-geyser-plugin-interface version: $AGAVE_VERSION" 6 | 7 | # Get Agave's rust-toolchain.toml version based on the agave-geyser-plugin-interface version 8 | VERSION=$(curl -s "https://raw.githubusercontent.com/anza-xyz/agave/v$AGAVE_VERSION/rust-toolchain.toml" | grep channel | cut -d'"' -f2) 9 | echo "Found Agave's rust-toolchain.toml version: $VERSION" 10 | 11 | # Check current version 12 | CURRENT_VERSION=$(grep channel rust-toolchain.toml | cut -d'"' -f2) 13 | echo "Current rust-toolchain.toml version: $CURRENT_VERSION" 14 | 15 | if [ "$CURRENT_VERSION" != "$VERSION" ]; then 16 | echo "Updating rust-toolchain.toml to $VERSION" 17 | sed -i '' "s/channel = \".*\"/channel = \"$VERSION\"/" rust-toolchain.toml 18 | else 19 | echo "rust-toolchain.toml is already up to date" 20 | fi 21 | -------------------------------------------------------------------------------- /src/config.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2022 Blockdaemon Inc. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use { 16 | crate::{prom::StatsThreadedProducerContext, PrometheusService}, 17 | agave_geyser_plugin_interface::geyser_plugin_interface::{ 18 | GeyserPluginError, Result as PluginResult, 19 | }, 20 | rdkafka::{ 21 | config::FromClientConfigAndContext, 22 | error::KafkaResult, 23 | producer::{DefaultProducerContext, ThreadedProducer}, 24 | ClientConfig, 25 | }, 26 | serde::Deserialize, 27 | std::{collections::HashMap, fs::File, io::Result as IoResult, net::SocketAddr, path::Path}, 28 | }; 29 | 30 | /// Plugin config. 31 | #[derive(Debug, Deserialize)] 32 | #[serde(deny_unknown_fields)] 33 | pub struct Config { 34 | #[allow(dead_code)] 35 | libpath: String, 36 | 37 | /// Kafka config. 38 | pub kafka: HashMap, 39 | 40 | /// Graceful shutdown timeout. 41 | #[serde(default)] 42 | pub shutdown_timeout_ms: u64, 43 | 44 | /// Accounts, transactions filters 45 | pub filters: Vec, 46 | 47 | /// Prometheus endpoint. 48 | #[serde(default)] 49 | pub prometheus: Option, 50 | } 51 | 52 | impl Default for Config { 53 | fn default() -> Self { 54 | Self { 55 | libpath: "".to_owned(), 56 | kafka: HashMap::new(), 57 | shutdown_timeout_ms: 30_000, 58 | filters: vec![], 59 | prometheus: None, 60 | } 61 | } 62 | } 63 | 64 | impl Config { 65 | /// Read plugin from JSON file. 66 | pub fn read_from>(config_path: P) -> PluginResult { 67 | let file = File::open(config_path)?; 68 | let mut this: Self = serde_json::from_reader(file) 69 | .map_err(|e| GeyserPluginError::ConfigFileReadError { msg: e.to_string() })?; 70 | this.fill_defaults(); 71 | Ok(this) 72 | } 73 | 74 | /// Create rdkafka::FutureProducer from config. 75 | pub fn producer(&self) -> KafkaResult> { 76 | let mut config = ClientConfig::new(); 77 | for (k, v) in self.kafka.iter() { 78 | config.set(k, v); 79 | } 80 | ThreadedProducer::from_config_and_context(&config, StatsThreadedProducerContext) 81 | } 82 | 83 | fn set_default(&mut self, k: &'static str, v: &'static str) { 84 | if !self.kafka.contains_key(k) { 85 | self.kafka.insert(k.to_owned(), v.to_owned()); 86 | } 87 | } 88 | 89 | fn fill_defaults(&mut self) { 90 | self.set_default("request.required.acks", "1"); 91 | self.set_default("message.timeout.ms", "30000"); 92 | self.set_default("compression.type", "lz4"); 93 | self.set_default("partitioner", "murmur2_random"); 94 | } 95 | 96 | pub fn create_prometheus(&self) -> IoResult> { 97 | self.prometheus.map(PrometheusService::new).transpose() 98 | } 99 | } 100 | 101 | /// Plugin config. 102 | #[derive(Debug, Deserialize)] 103 | #[serde(deny_unknown_fields, default)] 104 | pub struct ConfigFilter { 105 | /// Kafka topic to send account updates to. 106 | pub update_account_topic: String, 107 | /// Kafka topic to send slot status updates to. 108 | pub slot_status_topic: String, 109 | /// Kafka topic to send transaction to. 110 | pub transaction_topic: String, 111 | /// List of programs to ignore. 112 | pub program_ignores: Vec, 113 | /// List of programs to include 114 | pub program_filters: Vec, 115 | // List of accounts to include 116 | pub account_filters: Vec, 117 | /// Publish all accounts on startup. 118 | pub publish_all_accounts: bool, 119 | /// Publish vote transactions. 120 | pub include_vote_transactions: bool, 121 | /// Publish failed transactions. 122 | pub include_failed_transactions: bool, 123 | /// Wrap all event message in a single message type. 124 | pub wrap_messages: bool, 125 | } 126 | 127 | impl Default for ConfigFilter { 128 | fn default() -> Self { 129 | Self { 130 | update_account_topic: "".to_owned(), 131 | slot_status_topic: "".to_owned(), 132 | transaction_topic: "".to_owned(), 133 | program_ignores: Vec::new(), 134 | program_filters: Vec::new(), 135 | account_filters: Vec::new(), 136 | publish_all_accounts: false, 137 | include_vote_transactions: true, 138 | include_failed_transactions: true, 139 | wrap_messages: false, 140 | } 141 | } 142 | } 143 | 144 | pub type Producer = ThreadedProducer; 145 | -------------------------------------------------------------------------------- /src/event.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2022 Blockdaemon Inc. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use agave_geyser_plugin_interface::geyser_plugin_interface::SlotStatus as PluginSlotStatus; 16 | 17 | include!(concat!( 18 | env!("OUT_DIR"), 19 | "/blockdaemon.solana.accountsdb_plugin_kafka.types.rs" 20 | )); 21 | 22 | impl From for SlotStatus { 23 | fn from(other: PluginSlotStatus) -> Self { 24 | match other { 25 | PluginSlotStatus::Processed => SlotStatus::Processed, 26 | PluginSlotStatus::Rooted => SlotStatus::Rooted, 27 | PluginSlotStatus::Confirmed => SlotStatus::Confirmed, 28 | PluginSlotStatus::FirstShredReceived => SlotStatus::FirstShredReceived, 29 | PluginSlotStatus::Completed => SlotStatus::Completed, 30 | PluginSlotStatus::CreatedBank => SlotStatus::CreatedBank, 31 | PluginSlotStatus::Dead(_) => SlotStatus::Dead, 32 | } 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/filter.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2022 Blockdaemon Inc. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use { 16 | crate::ConfigFilter, 17 | solana_pubkey::Pubkey, 18 | std::{collections::HashSet, str::FromStr}, 19 | }; 20 | 21 | pub struct Filter { 22 | pub publish_all_accounts: bool, 23 | pub program_ignores: HashSet<[u8; 32]>, 24 | pub program_filters: HashSet<[u8; 32]>, 25 | pub account_filters: HashSet<[u8; 32]>, 26 | pub include_vote_transactions: bool, 27 | pub include_failed_transactions: bool, 28 | 29 | pub update_account_topic: String, 30 | pub slot_status_topic: String, 31 | pub transaction_topic: String, 32 | 33 | pub wrap_messages: bool, 34 | } 35 | 36 | impl Filter { 37 | pub fn new(config: &ConfigFilter) -> Self { 38 | Self { 39 | publish_all_accounts: config.publish_all_accounts, 40 | program_ignores: config 41 | .program_ignores 42 | .iter() 43 | .flat_map(|p| Pubkey::from_str(p).ok().map(|p| p.to_bytes())) 44 | .collect(), 45 | program_filters: config 46 | .program_filters 47 | .iter() 48 | .flat_map(|p| Pubkey::from_str(p).ok().map(|p| p.to_bytes())) 49 | .collect(), 50 | account_filters: config 51 | .account_filters 52 | .iter() 53 | .flat_map(|p| Pubkey::from_str(p).ok().map(|p| p.to_bytes())) 54 | .collect(), 55 | include_vote_transactions: config.include_vote_transactions, 56 | include_failed_transactions: config.include_failed_transactions, 57 | 58 | update_account_topic: config.update_account_topic.clone(), 59 | slot_status_topic: config.slot_status_topic.clone(), 60 | transaction_topic: config.transaction_topic.clone(), 61 | 62 | wrap_messages: config.wrap_messages, 63 | } 64 | } 65 | 66 | pub fn wants_program(&self, program: &[u8]) -> bool { 67 | match <&[u8; 32]>::try_from(program) { 68 | Ok(key) => { 69 | !self.program_ignores.contains(key) 70 | && (self.program_filters.is_empty() || self.program_filters.contains(key)) 71 | } 72 | Err(_error) => true, 73 | } 74 | } 75 | 76 | pub fn wants_account(&self, account: &[u8]) -> bool { 77 | match <&[u8; 32]>::try_from(account) { 78 | Ok(key) => self.account_filters.contains(key), 79 | Err(_error) => true, 80 | } 81 | } 82 | 83 | pub fn wants_vote_tx(&self) -> bool { 84 | self.include_vote_transactions 85 | } 86 | 87 | pub fn wants_failed_tx(&self) -> bool { 88 | self.include_failed_transactions 89 | } 90 | } 91 | 92 | #[cfg(test)] 93 | mod tests { 94 | use { 95 | crate::{ConfigFilter, Filter}, 96 | solana_pubkey::Pubkey, 97 | std::str::FromStr, 98 | }; 99 | 100 | #[test] 101 | fn test_filter() { 102 | let config = ConfigFilter { 103 | program_ignores: vec![ 104 | "Sysvar1111111111111111111111111111111111111".to_owned(), 105 | "Vote111111111111111111111111111111111111111".to_owned(), 106 | ], 107 | program_filters: vec!["9xQeWvG816bUx9EPjHmaT23yvVM2ZWbrrpZb9PusVFin".to_owned()], 108 | ..Default::default() 109 | }; 110 | 111 | let filter = Filter::new(&config); 112 | assert_eq!(filter.program_ignores.len(), 2); 113 | 114 | assert!(filter.wants_program( 115 | &Pubkey::from_str("9xQeWvG816bUx9EPjHmaT23yvVM2ZWbrrpZb9PusVFin") 116 | .unwrap() 117 | .to_bytes() 118 | )); 119 | assert!(!filter.wants_program( 120 | &Pubkey::from_str("Vote111111111111111111111111111111111111111") 121 | .unwrap() 122 | .to_bytes() 123 | )); 124 | } 125 | 126 | #[test] 127 | fn test_owner_filter() { 128 | let config = ConfigFilter { 129 | program_ignores: vec![ 130 | "Sysvar1111111111111111111111111111111111111".to_owned(), 131 | "Vote111111111111111111111111111111111111111".to_owned(), 132 | ], 133 | program_filters: vec!["9xQeWvG816bUx9EPjHmaT23yvVM2ZWbrrpZb9PusVFin".to_owned()], 134 | ..Default::default() 135 | }; 136 | 137 | let filter = Filter::new(&config); 138 | assert_eq!(filter.program_ignores.len(), 2); 139 | 140 | assert!(filter.wants_program( 141 | &Pubkey::from_str("9xQeWvG816bUx9EPjHmaT23yvVM2ZWbrrpZb9PusVFin") 142 | .unwrap() 143 | .to_bytes() 144 | )); 145 | assert!(!filter.wants_program( 146 | &Pubkey::from_str("Vote111111111111111111111111111111111111111") 147 | .unwrap() 148 | .to_bytes() 149 | )); 150 | 151 | assert!(!filter.wants_program( 152 | &Pubkey::from_str("cndy3Z4yapfJBmL3ShUp5exZKqR3z33thTzeNMm2gRZ") 153 | .unwrap() 154 | .to_bytes() 155 | )); 156 | } 157 | 158 | #[test] 159 | fn test_account_filter() { 160 | let config = ConfigFilter { 161 | program_filters: vec!["9xQeWvG816bUx9EPjHmaT23yvVM2ZWbrrpZb9PusVFin".to_owned()], 162 | account_filters: vec!["5KKsLVU6TcbVDK4BS6K1DGDxnh4Q9xjYJ8XaDCG5t8ht".to_owned()], 163 | ..Default::default() 164 | }; 165 | 166 | let filter = Filter::new(&config); 167 | assert_eq!(filter.program_filters.len(), 1); 168 | assert_eq!(filter.account_filters.len(), 1); 169 | 170 | println!("{:?}", filter.account_filters); 171 | println!( 172 | "{:?}", 173 | &Pubkey::from_str("5KKsLVU6TcbVDK4BS6K1DGDxnh4Q9xjYJ8XaDCG5t8ht") 174 | .unwrap() 175 | .to_bytes() 176 | ); 177 | 178 | assert!(filter.wants_program( 179 | &Pubkey::from_str("9xQeWvG816bUx9EPjHmaT23yvVM2ZWbrrpZb9PusVFin") 180 | .unwrap() 181 | .to_bytes() 182 | )); 183 | 184 | assert!(filter.wants_account( 185 | &Pubkey::from_str("5KKsLVU6TcbVDK4BS6K1DGDxnh4Q9xjYJ8XaDCG5t8ht") 186 | .unwrap() 187 | .to_bytes() 188 | )); 189 | } 190 | } 191 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2022 Blockdaemon Inc. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use agave_geyser_plugin_interface::geyser_plugin_interface::GeyserPlugin; 16 | 17 | mod config; 18 | mod event; 19 | mod filter; 20 | mod plugin; 21 | mod prom; 22 | mod publisher; 23 | mod version; 24 | 25 | pub use { 26 | config::{Config, ConfigFilter, Producer}, 27 | event::*, 28 | filter::Filter, 29 | plugin::KafkaPlugin, 30 | prom::PrometheusService, 31 | publisher::Publisher, 32 | }; 33 | 34 | #[no_mangle] 35 | #[allow(improper_ctypes_definitions)] 36 | /// # Safety 37 | /// 38 | /// This function returns a pointer to the Kafka Plugin box implementing trait GeyserPlugin. 39 | /// 40 | /// The Solana validator and this plugin must be compiled with the same Rust compiler version and Solana core version. 41 | /// Loading this plugin with mismatching versions is undefined behavior and will likely cause memory corruption. 42 | pub unsafe extern "C" fn _create_plugin() -> *mut dyn GeyserPlugin { 43 | let plugin = KafkaPlugin::new(); 44 | let plugin: Box = Box::new(plugin); 45 | Box::into_raw(plugin) 46 | } 47 | -------------------------------------------------------------------------------- /src/plugin.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2022 Blockdaemon Inc. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use { 16 | crate::{ 17 | sanitized_message, CompiledInstruction, Config, Filter, InnerInstruction, 18 | InnerInstructions, LegacyLoadedMessage, LegacyMessage, LoadedAddresses, 19 | MessageAddressTableLookup, MessageHeader, PrometheusService, Publisher, Reward, 20 | SanitizedMessage, SanitizedTransaction, SlotStatus, SlotStatusEvent, TransactionEvent, 21 | TransactionStatusMeta, TransactionTokenBalance, UiTokenAmount, UpdateAccountEvent, 22 | V0LoadedMessage, V0Message, 23 | }, 24 | agave_geyser_plugin_interface::geyser_plugin_interface::{ 25 | GeyserPlugin, GeyserPluginError as PluginError, ReplicaAccountInfoV3, 26 | ReplicaAccountInfoVersions, ReplicaTransactionInfoV2, ReplicaTransactionInfoVersions, 27 | Result as PluginResult, SlotStatus as PluginSlotStatus, 28 | }, 29 | log::{debug, error, info, log_enabled}, 30 | rdkafka::util::get_rdkafka_version, 31 | solana_pubkey::Pubkey, 32 | std::fmt::{Debug, Formatter}, 33 | }; 34 | 35 | #[derive(Default)] 36 | pub struct KafkaPlugin { 37 | publisher: Option, 38 | filter: Option>, 39 | prometheus: Option, 40 | } 41 | 42 | impl Debug for KafkaPlugin { 43 | fn fmt(&self, _: &mut Formatter<'_>) -> std::fmt::Result { 44 | Ok(()) 45 | } 46 | } 47 | 48 | impl GeyserPlugin for KafkaPlugin { 49 | fn name(&self) -> &'static str { 50 | "KafkaPlugin" 51 | } 52 | 53 | fn on_load(&mut self, config_file: &str, _: bool) -> PluginResult<()> { 54 | if self.publisher.is_some() { 55 | return Err(PluginError::Custom("plugin already loaded".into())); 56 | } 57 | 58 | solana_logger::setup_with_default("info"); 59 | info!( 60 | "Loading plugin {:?} from config_file {:?}", 61 | self.name(), 62 | config_file 63 | ); 64 | let config = Config::read_from(config_file)?; 65 | 66 | let (version_n, version_s) = get_rdkafka_version(); 67 | info!("rd_kafka_version: {:#08x}, {}", version_n, version_s); 68 | 69 | let producer = config.producer().map_err(|error| { 70 | error!("Failed to create kafka producer: {error:?}"); 71 | PluginError::Custom(Box::new(error)) 72 | })?; 73 | info!("Created rdkafka::FutureProducer"); 74 | 75 | let publisher = Publisher::new(producer, &config); 76 | let prometheus = config 77 | .create_prometheus() 78 | .map_err(|error| PluginError::Custom(Box::new(error)))?; 79 | self.publisher = Some(publisher); 80 | self.filter = Some(config.filters.iter().map(Filter::new).collect()); 81 | self.prometheus = prometheus; 82 | info!("Spawned producer"); 83 | 84 | Ok(()) 85 | } 86 | 87 | fn on_unload(&mut self) { 88 | self.publisher = None; 89 | self.filter = None; 90 | if let Some(prometheus) = self.prometheus.take() { 91 | prometheus.shutdown(); 92 | } 93 | } 94 | 95 | fn update_account( 96 | &self, 97 | account: ReplicaAccountInfoVersions, 98 | slot: u64, 99 | is_startup: bool, 100 | ) -> PluginResult<()> { 101 | let filters = self.unwrap_filters(); 102 | if is_startup && filters.iter().all(|filter| !filter.publish_all_accounts) { 103 | return Ok(()); 104 | } 105 | 106 | let info = Self::unwrap_update_account(account); 107 | let publisher = self.unwrap_publisher(); 108 | for filter in filters { 109 | if !filter.update_account_topic.is_empty() { 110 | if !filter.wants_program(info.owner) && !filter.wants_account(info.pubkey) { 111 | Self::log_ignore_account_update(info); 112 | continue; 113 | } 114 | 115 | let event = UpdateAccountEvent { 116 | slot, 117 | pubkey: info.pubkey.to_vec(), 118 | lamports: info.lamports, 119 | owner: info.owner.to_vec(), 120 | executable: info.executable, 121 | rent_epoch: info.rent_epoch, 122 | data: info.data.to_vec(), 123 | write_version: info.write_version, 124 | txn_signature: info.txn.map(|v| v.signature().as_ref().to_owned()), 125 | }; 126 | 127 | publisher 128 | .update_account(event, filter.wrap_messages, &filter.update_account_topic) 129 | .map_err(|e| PluginError::AccountsUpdateError { msg: e.to_string() })?; 130 | } 131 | } 132 | 133 | Ok(()) 134 | } 135 | 136 | fn update_slot_status( 137 | &self, 138 | slot: u64, 139 | parent: Option, 140 | status: &PluginSlotStatus, 141 | ) -> PluginResult<()> { 142 | let publisher = self.unwrap_publisher(); 143 | let value = SlotStatus::from(status.clone()); 144 | for filter in self.unwrap_filters() { 145 | if !filter.slot_status_topic.is_empty() { 146 | let event = SlotStatusEvent { 147 | slot, 148 | parent: parent.unwrap_or(0), 149 | status: value.into(), 150 | }; 151 | 152 | publisher 153 | .update_slot_status(event, filter.wrap_messages, &filter.slot_status_topic) 154 | .map_err(|e| PluginError::AccountsUpdateError { msg: e.to_string() })?; 155 | } 156 | } 157 | 158 | Ok(()) 159 | } 160 | 161 | fn notify_transaction( 162 | &self, 163 | transaction: ReplicaTransactionInfoVersions, 164 | slot: u64, 165 | ) -> PluginResult<()> { 166 | let info = Self::unwrap_transaction(transaction); 167 | let publisher = self.unwrap_publisher(); 168 | for filter in self.unwrap_filters() { 169 | if !filter.transaction_topic.is_empty() { 170 | let is_failed = info.transaction_status_meta.status.is_err(); 171 | if (!filter.wants_vote_tx() && info.is_vote) 172 | || (!filter.wants_failed_tx() && is_failed) 173 | { 174 | debug!("Ignoring vote/failed transaction"); 175 | continue; 176 | } 177 | 178 | if !info 179 | .transaction 180 | .message() 181 | .account_keys() 182 | .iter() 183 | .any(|pubkey| { 184 | filter.wants_program(pubkey.as_ref()) 185 | || filter.wants_account(pubkey.as_ref()) 186 | }) 187 | { 188 | debug!("Ignoring transaction {:?}", info.signature); 189 | continue; 190 | } 191 | 192 | let event = Self::build_transaction_event(slot, info); 193 | publisher 194 | .update_transaction(event, filter.wrap_messages, &filter.transaction_topic) 195 | .map_err(|e| PluginError::TransactionUpdateError { msg: e.to_string() })?; 196 | } 197 | } 198 | 199 | Ok(()) 200 | } 201 | 202 | fn account_data_notifications_enabled(&self) -> bool { 203 | let filters = self.unwrap_filters(); 204 | filters 205 | .iter() 206 | .any(|filter| !filter.update_account_topic.is_empty()) 207 | } 208 | 209 | fn transaction_notifications_enabled(&self) -> bool { 210 | let filters = self.unwrap_filters(); 211 | filters 212 | .iter() 213 | .any(|filter| !filter.transaction_topic.is_empty()) 214 | } 215 | } 216 | 217 | impl KafkaPlugin { 218 | pub fn new() -> Self { 219 | Default::default() 220 | } 221 | 222 | fn unwrap_publisher(&self) -> &Publisher { 223 | self.publisher.as_ref().expect("publisher is unavailable") 224 | } 225 | 226 | fn unwrap_filters(&self) -> &Vec { 227 | self.filter.as_ref().expect("filter is unavailable") 228 | } 229 | 230 | fn unwrap_update_account(account: ReplicaAccountInfoVersions) -> &ReplicaAccountInfoV3 { 231 | match account { 232 | ReplicaAccountInfoVersions::V0_0_1(_info) => { 233 | panic!("ReplicaAccountInfoVersions::V0_0_1 unsupported, please upgrade your Solana node."); 234 | } 235 | ReplicaAccountInfoVersions::V0_0_2(_info) => { 236 | panic!("ReplicaAccountInfoVersions::V0_0_2 unsupported, please upgrade your Solana node."); 237 | } 238 | ReplicaAccountInfoVersions::V0_0_3(info) => info, 239 | } 240 | } 241 | 242 | fn unwrap_transaction( 243 | transaction: ReplicaTransactionInfoVersions, 244 | ) -> &ReplicaTransactionInfoV2 { 245 | match transaction { 246 | ReplicaTransactionInfoVersions::V0_0_1(_info) => { 247 | panic!("ReplicaTransactionInfoVersions::V0_0_1 unsupported, please upgrade your Solana node."); 248 | } 249 | ReplicaTransactionInfoVersions::V0_0_2(info) => info, 250 | } 251 | } 252 | 253 | fn build_compiled_instruction( 254 | ix: &solana_message::compiled_instruction::CompiledInstruction, 255 | ) -> CompiledInstruction { 256 | CompiledInstruction { 257 | program_id_index: ix.program_id_index as u32, 258 | accounts: ix.clone().accounts.into_iter().map(|v| v as u32).collect(), 259 | data: ix.data.clone(), 260 | } 261 | } 262 | 263 | fn build_inner_instruction( 264 | ix: &solana_transaction_status::InnerInstruction, 265 | ) -> InnerInstruction { 266 | InnerInstruction { 267 | instruction: Some(Self::build_compiled_instruction(&ix.instruction)), 268 | stack_height: ix.stack_height, 269 | } 270 | } 271 | 272 | fn build_message_header(header: &solana_message::MessageHeader) -> MessageHeader { 273 | MessageHeader { 274 | num_required_signatures: header.num_required_signatures as u32, 275 | num_readonly_signed_accounts: header.num_readonly_signed_accounts as u32, 276 | num_readonly_unsigned_accounts: header.num_readonly_unsigned_accounts as u32, 277 | } 278 | } 279 | 280 | fn build_transaction_token_balance( 281 | transaction_token_account_balance: solana_transaction_status::TransactionTokenBalance, 282 | ) -> TransactionTokenBalance { 283 | TransactionTokenBalance { 284 | account_index: transaction_token_account_balance.account_index as u32, 285 | ui_token_account: Some(UiTokenAmount { 286 | ui_amount: transaction_token_account_balance.ui_token_amount.ui_amount, 287 | decimals: transaction_token_account_balance.ui_token_amount.decimals as u32, 288 | amount: transaction_token_account_balance.ui_token_amount.amount, 289 | ui_amount_string: transaction_token_account_balance 290 | .ui_token_amount 291 | .ui_amount_string, 292 | }), 293 | mint: transaction_token_account_balance.mint, 294 | owner: transaction_token_account_balance.owner, 295 | } 296 | } 297 | 298 | fn build_transaction_event( 299 | slot: u64, 300 | ReplicaTransactionInfoV2 { 301 | signature, 302 | is_vote, 303 | transaction, 304 | transaction_status_meta, 305 | index, 306 | }: &ReplicaTransactionInfoV2, 307 | ) -> TransactionEvent { 308 | TransactionEvent { 309 | is_vote: *is_vote, 310 | slot, 311 | index: *index as u64, 312 | signature: signature.as_ref().into(), 313 | transaction_status_meta: Some(TransactionStatusMeta { 314 | is_status_err: transaction_status_meta.status.is_err(), 315 | error_info: match &transaction_status_meta.status { 316 | Err(e) => e.to_string(), 317 | Ok(_) => "".to_owned(), 318 | }, 319 | rewards: transaction_status_meta 320 | .rewards 321 | .clone() 322 | .unwrap() 323 | .into_iter() 324 | .map(|x| Reward { 325 | pubkey: x.pubkey, 326 | lamports: x.lamports, 327 | post_balance: x.post_balance, 328 | reward_type: match x.reward_type { 329 | Some(r) => r as i32, 330 | None => 0, 331 | }, 332 | commission: match x.commission { 333 | Some(v) => v as u32, 334 | None => 0, 335 | }, 336 | }) 337 | .collect(), 338 | fee: transaction_status_meta.fee, 339 | log_messages: match &transaction_status_meta.log_messages { 340 | Some(v) => v.to_owned(), 341 | None => vec![], 342 | }, 343 | inner_instructions: match &transaction_status_meta.inner_instructions { 344 | Some(inners) => inners 345 | .clone() 346 | .into_iter() 347 | .map(|inner| InnerInstructions { 348 | index: inner.index as u32, 349 | instructions: inner 350 | .instructions 351 | .iter() 352 | .map(Self::build_inner_instruction) 353 | .collect(), 354 | }) 355 | .collect(), 356 | None => vec![], 357 | }, 358 | pre_balances: transaction_status_meta.pre_balances.clone(), 359 | post_balances: transaction_status_meta.post_balances.clone(), 360 | pre_token_balances: match &transaction_status_meta.pre_token_balances { 361 | Some(v) => v 362 | .clone() 363 | .into_iter() 364 | .map(Self::build_transaction_token_balance) 365 | .collect(), 366 | None => vec![], 367 | }, 368 | post_token_balances: match &transaction_status_meta.post_token_balances { 369 | Some(v) => v 370 | .clone() 371 | .into_iter() 372 | .map(Self::build_transaction_token_balance) 373 | .collect(), 374 | None => vec![], 375 | }, 376 | }), 377 | transaction: Some(SanitizedTransaction { 378 | message_hash: transaction.message_hash().to_bytes().into(), 379 | is_simple_vote_transaction: transaction.is_simple_vote_transaction(), 380 | message: Some(SanitizedMessage { 381 | message_payload: Some(match transaction.message() { 382 | solana_message::SanitizedMessage::Legacy(lv) => { 383 | sanitized_message::MessagePayload::Legacy(LegacyLoadedMessage { 384 | message: Some(LegacyMessage { 385 | header: Some(Self::build_message_header(&lv.message.header)), 386 | account_keys: lv 387 | .message 388 | .account_keys 389 | .clone() 390 | .into_iter() 391 | .map(|k| k.as_ref().into()) 392 | .collect(), 393 | instructions: lv 394 | .message 395 | .instructions 396 | .iter() 397 | .map(Self::build_compiled_instruction) 398 | .collect(), 399 | recent_block_hash: lv.message.recent_blockhash.as_ref().into(), 400 | }), 401 | is_writable_account_cache: (0..(lv.account_keys().len() - 1)) 402 | .map(|i: usize| lv.is_writable(i)) 403 | .collect(), 404 | }) 405 | } 406 | solana_message::SanitizedMessage::V0(v0) => { 407 | sanitized_message::MessagePayload::V0(V0LoadedMessage { 408 | message: Some(V0Message { 409 | header: Some(Self::build_message_header(&v0.message.header)), 410 | account_keys: v0 411 | .message 412 | .account_keys 413 | .clone() 414 | .into_iter() 415 | .map(|k| k.as_ref().into()) 416 | .collect(), 417 | recent_block_hash: v0.message.recent_blockhash.as_ref().into(), 418 | instructions: v0 419 | .message 420 | .instructions 421 | .iter() 422 | .map(Self::build_compiled_instruction) 423 | .collect(), 424 | address_table_lookup: v0 425 | .message 426 | .address_table_lookups 427 | .clone() 428 | .into_iter() 429 | .map(|vf| MessageAddressTableLookup { 430 | account_key: vf.account_key.as_ref().into(), 431 | writable_indexes: vf 432 | .writable_indexes 433 | .iter() 434 | .map(|x| *x as u32) 435 | .collect(), 436 | readonly_indexes: vf 437 | .readonly_indexes 438 | .iter() 439 | .map(|x| *x as u32) 440 | .collect(), 441 | }) 442 | .collect(), 443 | }), 444 | loaded_adresses: Some(LoadedAddresses { 445 | writable: v0 446 | .loaded_addresses 447 | .writable 448 | .clone() 449 | .into_iter() 450 | .map(|x| x.as_ref().into()) 451 | .collect(), 452 | readonly: v0 453 | .loaded_addresses 454 | .readonly 455 | .clone() 456 | .into_iter() 457 | .map(|x| x.as_ref().into()) 458 | .collect(), 459 | }), 460 | is_writable_account_cache: (0..(v0.account_keys().len() - 1)) 461 | .map(|i: usize| v0.is_writable(i)) 462 | .collect(), 463 | }) 464 | } 465 | }), 466 | }), 467 | signatures: transaction 468 | .signatures() 469 | .iter() 470 | .copied() 471 | .map(|x| x.as_ref().into()) 472 | .collect(), 473 | }), 474 | } 475 | } 476 | 477 | fn log_ignore_account_update(info: &ReplicaAccountInfoV3) { 478 | if log_enabled!(::log::Level::Debug) { 479 | match <&[u8; 32]>::try_from(info.owner) { 480 | Ok(key) => debug!( 481 | "Ignoring update for account key: {:?}", 482 | Pubkey::new_from_array(*key) 483 | ), 484 | // Err should never happen because wants_account_key only returns false if the input is &[u8; 32] 485 | Err(_err) => debug!("Ignoring update for account key: {:?}", info.owner), 486 | }; 487 | } 488 | } 489 | } 490 | -------------------------------------------------------------------------------- /src/prom.rs: -------------------------------------------------------------------------------- 1 | use { 2 | crate::version::VERSION as VERSION_INFO, 3 | bytes::Bytes, 4 | http::StatusCode, 5 | http_body_util::Full, 6 | hyper::{body::Incoming, service::service_fn, Request, Response}, 7 | hyper_util::rt::TokioIo, 8 | log::*, 9 | prometheus::{GaugeVec, IntCounterVec, Opts, Registry, TextEncoder}, 10 | rdkafka::{ 11 | client::ClientContext, 12 | producer::{DeliveryResult, ProducerContext}, 13 | statistics::Statistics, 14 | }, 15 | std::{io::Result as IoResult, net::SocketAddr, sync::Once, time::Duration}, 16 | tokio::net::TcpListener, 17 | tokio::runtime::Runtime, 18 | }; 19 | 20 | lazy_static::lazy_static! { 21 | pub static ref REGISTRY: Registry = Registry::new(); 22 | 23 | static ref VERSION: IntCounterVec = IntCounterVec::new( 24 | Opts::new("version", "Plugin version info"), 25 | &["key", "value"] 26 | ).unwrap(); 27 | 28 | pub static ref UPLOAD_ACCOUNTS_TOTAL: IntCounterVec = IntCounterVec::new( 29 | Opts::new("upload_accounts_total", "Status of uploaded accounts"), 30 | &["status"] 31 | ).unwrap(); 32 | 33 | pub static ref UPLOAD_SLOTS_TOTAL: IntCounterVec = IntCounterVec::new( 34 | Opts::new("upload_slots_total", "Status of uploaded slots"), 35 | &["status"] 36 | ).unwrap(); 37 | 38 | pub static ref UPLOAD_TRANSACTIONS_TOTAL: IntCounterVec = IntCounterVec::new( 39 | Opts::new("upload_transactions_total", "Status of uploaded transactions"), 40 | &["status"] 41 | ).unwrap(); 42 | 43 | static ref KAFKA_STATS: GaugeVec = GaugeVec::new( 44 | Opts::new("kafka_stats", "librdkafka metrics"), 45 | &["broker", "metric"] 46 | ).unwrap(); 47 | } 48 | 49 | #[derive(Debug)] 50 | pub struct PrometheusService { 51 | runtime: Runtime, 52 | } 53 | 54 | impl PrometheusService { 55 | pub fn new(address: SocketAddr) -> IoResult { 56 | static REGISTER: Once = Once::new(); 57 | REGISTER.call_once(|| { 58 | macro_rules! register { 59 | ($collector:ident) => { 60 | REGISTRY 61 | .register(Box::new($collector.clone())) 62 | .expect("collector can't be registered"); 63 | }; 64 | } 65 | register!(VERSION); 66 | register!(UPLOAD_ACCOUNTS_TOTAL); 67 | register!(UPLOAD_SLOTS_TOTAL); 68 | register!(UPLOAD_TRANSACTIONS_TOTAL); 69 | register!(KAFKA_STATS); 70 | 71 | for (key, value) in &[ 72 | ("version", VERSION_INFO.version), 73 | ("solana", VERSION_INFO.solana), 74 | ("git", VERSION_INFO.git), 75 | ("rustc", VERSION_INFO.rustc), 76 | ("buildts", VERSION_INFO.buildts), 77 | ] { 78 | VERSION 79 | .with_label_values(&[key.to_string(), value.to_string()]) 80 | .inc(); 81 | } 82 | }); 83 | 84 | let runtime = Runtime::new()?; 85 | runtime.spawn(async move { 86 | let listener = TcpListener::bind(address).await.unwrap(); 87 | 88 | loop { 89 | let (stream, _) = match listener.accept().await { 90 | Ok(conn) => conn, 91 | Err(e) => { 92 | error!("Failed to accept connection: {}", e); 93 | continue; 94 | } 95 | }; 96 | 97 | let io = TokioIo::new(stream); 98 | 99 | let service = service_fn(|req: Request| async move { 100 | let response = match req.uri().path() { 101 | "/metrics" => metrics_handler(), 102 | _ => not_found_handler(), 103 | }; 104 | Ok::<_, hyper::Error>(response) 105 | }); 106 | 107 | tokio::task::spawn(async move { 108 | if let Err(err) = hyper::server::conn::http1::Builder::new() 109 | .serve_connection(io, service) 110 | .await 111 | { 112 | error!("Error serving connection: {}", err); 113 | } 114 | }); 115 | } 116 | }); 117 | Ok(PrometheusService { runtime }) 118 | } 119 | 120 | pub fn shutdown(self) { 121 | self.runtime.shutdown_timeout(Duration::from_secs(10)); 122 | } 123 | } 124 | 125 | fn metrics_handler() -> Response> { 126 | let metrics = TextEncoder::new() 127 | .encode_to_string(®ISTRY.gather()) 128 | .unwrap_or_else(|error| { 129 | error!("could not encode custom metrics: {}", error); 130 | String::new() 131 | }); 132 | Response::builder() 133 | .body(Full::new(Bytes::from(metrics))) 134 | .unwrap() 135 | } 136 | 137 | fn not_found_handler() -> Response> { 138 | Response::builder() 139 | .status(StatusCode::NOT_FOUND) 140 | .body(Full::new(Bytes::from(""))) 141 | .unwrap() 142 | } 143 | 144 | #[derive(Debug, Default, Clone, Copy)] 145 | pub struct StatsThreadedProducerContext; 146 | 147 | impl ClientContext for StatsThreadedProducerContext { 148 | fn stats(&self, statistics: Statistics) { 149 | for (name, broker) in statistics.brokers { 150 | macro_rules! set_value { 151 | ($name:expr, $value:expr) => { 152 | KAFKA_STATS 153 | .with_label_values(&[&name.to_string(), &$name.to_string()]) 154 | .set($value as f64); 155 | }; 156 | } 157 | 158 | set_value!("outbuf_cnt", broker.outbuf_cnt); 159 | set_value!("outbuf_msg_cnt", broker.outbuf_msg_cnt); 160 | set_value!("waitresp_cnt", broker.waitresp_cnt); 161 | set_value!("waitresp_msg_cnt", broker.waitresp_msg_cnt); 162 | set_value!("tx", broker.tx); 163 | set_value!("txerrs", broker.txerrs); 164 | set_value!("txretries", broker.txretries); 165 | set_value!("req_timeouts", broker.req_timeouts); 166 | 167 | if let Some(window) = broker.int_latency { 168 | set_value!("int_latency.min", window.min); 169 | set_value!("int_latency.max", window.max); 170 | set_value!("int_latency.avg", window.avg); 171 | set_value!("int_latency.sum", window.sum); 172 | set_value!("int_latency.cnt", window.cnt); 173 | set_value!("int_latency.stddev", window.stddev); 174 | set_value!("int_latency.hdrsize", window.hdrsize); 175 | set_value!("int_latency.p50", window.p50); 176 | set_value!("int_latency.p75", window.p75); 177 | set_value!("int_latency.p90", window.p90); 178 | set_value!("int_latency.p95", window.p95); 179 | set_value!("int_latency.p99", window.p99); 180 | set_value!("int_latency.p99_99", window.p99_99); 181 | set_value!("int_latency.outofrange", window.outofrange); 182 | } 183 | 184 | if let Some(window) = broker.outbuf_latency { 185 | set_value!("outbuf_latency.min", window.min); 186 | set_value!("outbuf_latency.max", window.max); 187 | set_value!("outbuf_latency.avg", window.avg); 188 | set_value!("outbuf_latency.sum", window.sum); 189 | set_value!("outbuf_latency.cnt", window.cnt); 190 | set_value!("outbuf_latency.stddev", window.stddev); 191 | set_value!("outbuf_latency.hdrsize", window.hdrsize); 192 | set_value!("outbuf_latency.p50", window.p50); 193 | set_value!("outbuf_latency.p75", window.p75); 194 | set_value!("outbuf_latency.p90", window.p90); 195 | set_value!("outbuf_latency.p95", window.p95); 196 | set_value!("outbuf_latency.p99", window.p99); 197 | set_value!("outbuf_latency.p99_99", window.p99_99); 198 | set_value!("outbuf_latency.outofrange", window.outofrange); 199 | } 200 | } 201 | } 202 | } 203 | 204 | impl ProducerContext for StatsThreadedProducerContext { 205 | type DeliveryOpaque = (); 206 | fn delivery(&self, _: &DeliveryResult<'_>, _: Self::DeliveryOpaque) {} 207 | } 208 | -------------------------------------------------------------------------------- /src/publisher.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2022 Blockdaemon Inc. 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | use { 16 | crate::{ 17 | message_wrapper::EventMessage::{self, Account, Slot, Transaction}, 18 | prom::{ 19 | StatsThreadedProducerContext, UPLOAD_ACCOUNTS_TOTAL, UPLOAD_SLOTS_TOTAL, 20 | UPLOAD_TRANSACTIONS_TOTAL, 21 | }, 22 | Config, MessageWrapper, SlotStatusEvent, TransactionEvent, UpdateAccountEvent, 23 | }, 24 | prost::Message, 25 | rdkafka::{ 26 | error::KafkaError, 27 | producer::{BaseRecord, Producer, ThreadedProducer}, 28 | }, 29 | std::time::Duration, 30 | }; 31 | 32 | pub struct Publisher { 33 | producer: ThreadedProducer, 34 | shutdown_timeout: Duration, 35 | } 36 | 37 | impl Publisher { 38 | pub fn new(producer: ThreadedProducer, config: &Config) -> Self { 39 | Self { 40 | producer, 41 | shutdown_timeout: Duration::from_millis(config.shutdown_timeout_ms), 42 | } 43 | } 44 | 45 | pub fn update_account( 46 | &self, 47 | ev: UpdateAccountEvent, 48 | wrap_messages: bool, 49 | topic: &str, 50 | ) -> Result<(), KafkaError> { 51 | let temp_key; 52 | let (key, buf) = if wrap_messages { 53 | temp_key = self.copy_and_prepend(ev.pubkey.as_slice(), 65u8); 54 | (&temp_key, Self::encode_with_wrapper(Account(Box::new(ev)))) 55 | } else { 56 | (&ev.pubkey, ev.encode_to_vec()) 57 | }; 58 | let record = BaseRecord::, _>::to(topic).key(key).payload(&buf); 59 | let result = self.producer.send(record).map(|_| ()).map_err(|(e, _)| e); 60 | UPLOAD_ACCOUNTS_TOTAL 61 | .with_label_values(&[if result.is_ok() { "success" } else { "failed" }]) 62 | .inc(); 63 | result 64 | } 65 | 66 | pub fn update_slot_status( 67 | &self, 68 | ev: SlotStatusEvent, 69 | wrap_messages: bool, 70 | topic: &str, 71 | ) -> Result<(), KafkaError> { 72 | let temp_key; 73 | let (key, buf) = if wrap_messages { 74 | temp_key = self.copy_and_prepend(&ev.slot.to_le_bytes(), 83u8); 75 | (&temp_key, Self::encode_with_wrapper(Slot(Box::new(ev)))) 76 | } else { 77 | temp_key = ev.slot.to_le_bytes().to_vec(); 78 | (&temp_key, ev.encode_to_vec()) 79 | }; 80 | let record = BaseRecord::, _>::to(topic).key(key).payload(&buf); 81 | let result = self.producer.send(record).map(|_| ()).map_err(|(e, _)| e); 82 | UPLOAD_SLOTS_TOTAL 83 | .with_label_values(&[if result.is_ok() { "success" } else { "failed" }]) 84 | .inc(); 85 | result 86 | } 87 | 88 | pub fn update_transaction( 89 | &self, 90 | ev: TransactionEvent, 91 | wrap_messages: bool, 92 | topic: &str, 93 | ) -> Result<(), KafkaError> { 94 | let temp_key; 95 | let (key, buf) = if wrap_messages { 96 | temp_key = self.copy_and_prepend(ev.signature.as_slice(), 84u8); 97 | ( 98 | &temp_key, 99 | Self::encode_with_wrapper(Transaction(Box::new(ev))), 100 | ) 101 | } else { 102 | (&ev.signature, ev.encode_to_vec()) 103 | }; 104 | let record = BaseRecord::, _>::to(topic).key(key).payload(&buf); 105 | let result = self.producer.send(record).map(|_| ()).map_err(|(e, _)| e); 106 | UPLOAD_TRANSACTIONS_TOTAL 107 | .with_label_values(&[if result.is_ok() { "success" } else { "failed" }]) 108 | .inc(); 109 | result 110 | } 111 | 112 | fn encode_with_wrapper(message: EventMessage) -> Vec { 113 | MessageWrapper { 114 | event_message: Some(message), 115 | } 116 | .encode_to_vec() 117 | } 118 | 119 | fn copy_and_prepend(&self, data: &[u8], prefix: u8) -> Vec { 120 | let mut temp_key = Vec::with_capacity(data.len() + 1); 121 | temp_key.push(prefix); 122 | temp_key.extend_from_slice(data); 123 | temp_key 124 | } 125 | } 126 | 127 | impl Drop for Publisher { 128 | fn drop(&mut self) { 129 | let _ = self.producer.flush(self.shutdown_timeout); 130 | } 131 | } 132 | -------------------------------------------------------------------------------- /src/version.rs: -------------------------------------------------------------------------------- 1 | use {serde::Serialize, std::env}; 2 | 3 | #[derive(Debug, Serialize)] 4 | pub struct Version { 5 | pub version: &'static str, 6 | pub solana: &'static str, 7 | pub git: &'static str, 8 | pub rustc: &'static str, 9 | pub buildts: &'static str, 10 | } 11 | 12 | pub const VERSION: Version = Version { 13 | version: env!("CARGO_PKG_VERSION"), 14 | solana: env!("SOLANA_SDK_VERSION"), 15 | git: env!("GIT_VERSION"), 16 | rustc: env!("VERGEN_RUSTC_SEMVER"), 17 | buildts: env!("VERGEN_BUILD_TIMESTAMP"), 18 | }; 19 | --------------------------------------------------------------------------------