├── .gitignore ├── .mvn └── wrapper │ ├── maven-wrapper.jar │ └── maven-wrapper.properties ├── .settings.xml ├── .travis.yml ├── LICENSE ├── README.md ├── RELEASE.md ├── header.txt ├── mvnw ├── mvnw.cmd ├── opentracing-kafka-client ├── pom.xml └── src │ ├── main │ └── java │ │ └── io │ │ └── opentracing │ │ └── contrib │ │ └── kafka │ │ ├── ClientSpanNameProvider.java │ │ ├── HeadersMapExtractAdapter.java │ │ ├── HeadersMapInjectAdapter.java │ │ ├── SpanDecorator.java │ │ ├── StandardSpanDecorator.java │ │ ├── TracingCallback.java │ │ ├── TracingConsumerInterceptor.java │ │ ├── TracingKafkaConsumer.java │ │ ├── TracingKafkaConsumerBuilder.java │ │ ├── TracingKafkaProducer.java │ │ ├── TracingKafkaProducerBuilder.java │ │ ├── TracingKafkaUtils.java │ │ └── TracingProducerInterceptor.java │ └── test │ └── java │ └── io │ └── opentracing │ └── contrib │ └── kafka │ ├── HeadersMapExtractAdapterTest.java │ ├── OperationNameSpanNameTest.java │ ├── OperationNameTopicSpanNameTest.java │ ├── TopicSpanNameTest.java │ ├── TracingCallbackTest.java │ ├── TracingKafkaTest.java │ └── TracingKafkaUtilsTest.java ├── opentracing-kafka-spring ├── pom.xml └── src │ ├── main │ └── java │ │ └── io │ │ └── opentracing │ │ └── contrib │ │ └── kafka │ │ └── spring │ │ ├── MessageListenerMethodInterceptor.java │ │ ├── TracingConsumerFactory.java │ │ ├── TracingKafkaAspect.java │ │ └── TracingProducerFactory.java │ └── test │ └── java │ └── io │ └── opentracing │ └── contrib │ └── kafka │ └── spring │ ├── Listener.java │ ├── TestConfiguration.java │ └── TracingSpringKafkaTest.java ├── opentracing-kafka-streams ├── pom.xml └── src │ ├── main │ └── java │ │ └── io │ │ └── opentracing │ │ └── contrib │ │ └── kafka │ │ └── streams │ │ └── TracingKafkaClientSupplier.java │ └── test │ └── java │ └── io │ └── opentracing │ └── contrib │ └── kafka │ └── streams │ └── TracingKafkaStreamsTest.java ├── pom.xml └── travis └── publish.sh /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | *.iml 3 | target/ 4 | .project 5 | .classpath 6 | .settings/**/*.* 7 | opentracing-kafka-client/.settings/**/*.* 8 | opentracing-kafka-spring/.settings/**/*.* 9 | opentracing-kafka-streams/.settings/**/*.* -------------------------------------------------------------------------------- /.mvn/wrapper/maven-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opentracing-contrib/java-kafka-client/31ce5260279c2cadf3d69c0acbd50f024afe4660/.mvn/wrapper/maven-wrapper.jar -------------------------------------------------------------------------------- /.mvn/wrapper/maven-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.5.0/apache-maven-3.5.0-bin.zip -------------------------------------------------------------------------------- /.settings.xml: -------------------------------------------------------------------------------- 1 | 2 | 17 | 21 | 22 | 23 | sonatype 24 | ${env.SONATYPE_USER} 25 | ${env.SONATYPE_PASSWORD} 26 | 27 | 28 | bintray 29 | ${env.BINTRAY_USER} 30 | ${env.BINTRAY_KEY} 31 | 32 | 33 | jfrog-snapshots 34 | ${env.BINTRAY_USER} 35 | ${env.BINTRAY_KEY} 36 | 37 | 38 | github.com 39 | ${env.GH_USER} 40 | ${env.GH_TOKEN} 41 | 42 | 43 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: required 2 | dist: trusty 3 | 4 | language: java 5 | jdk: 6 | - oraclejdk8 7 | 8 | cache: 9 | directories: 10 | - $HOME/.m2/repository 11 | 12 | before_install: 13 | # allocate commits to CI, not the owner of the deploy key 14 | - git config user.name "opentracingci" 15 | - git config user.email "opentracingci+opentracing@googlegroups.com" 16 | 17 | # setup https authentication credentials, used by ./mvnw release:prepare 18 | - git config credential.helper "store --file=.git/credentials" 19 | - echo "https://$GH_TOKEN:@github.com" > .git/credentials 20 | 21 | install: 22 | # Override default travis to use the maven wrapper 23 | - ./mvnw install -DskipTests=true -Dmaven.javadoc.skip=true -B -V 24 | 25 | script: 26 | - ./travis/publish.sh 27 | 28 | branches: 29 | except: 30 | - /^[0-9]/ 31 | 32 | after_success: 33 | - mvn jacoco:report coveralls:report 34 | 35 | env: 36 | global: 37 | # Ex. travis encrypt BINTRAY_USER=your_github_account 38 | - secure: "eng7HMp7Zd1X4o59WzsyfTSjvkUGsYLPNKR2RIdZiDXKd/eqTXzrxtQX5BldKtOxA8ZfLF8UWMBE7k9hJq+1eN9fYF6FgLClwZw6OfuLJ6C/kBUne9MA2h5pE+cuV8p6zZsF0AmUM5EKnsP++FmgLijMdMNG8Hx4yTvZeMtCyvF5GZ8jQFXgCnnn8K1h0rwKI7mQH5HPo3rOWd6OCZpRYic0/XIW4CLlY2g/7uUl9IDqBNnFKVbU6sdVrK+O648l5spjGHS9agm/5f1JpNMuVMQ37QlWr+T7HC+X6dhLv9dlqPtXSJp7VWV0AnRgzqc0M5T492+05Ng5nXnX6Fo+lIc+08JCfjq5tDvwuzhKjIepZUBR9jaZwUQxSgeNWv+xwjBJLQuMY16Nyu0f3QUmqnL2toZ+Co6+P0MUlqRTkkCoFKbL2Z/lpTj78Nct/QJnzuafUvCOgyZLYSwniVVP/cqwqJajnhAr2kXP77QiBTKrwxLCISgDwrnnHKwnIBZ6TLpCyoJ/v84c6qagzh/ltcfvJSfDj8LdrJP5sIvOB8tuRqUXWKkZDr5igXPEfUotv+VR82VqPCS8IZgT4Str87UAIU/J5v4NM7gWF207ri0lILjbU4WcVD80AmJHxWpCJHwk0jNnXpHTxSZACoRwNeju4NBkHeKN8McOdU3DOR8=" 39 | # Ex. travis encrypt BINTRAY_KEY=xxx-https://bintray.com/profile/edit-xxx --add 40 | - secure: "FzmEYmwU8RnH5y394X8pFkKjEtvrnMvas4DgoJJRum+8IMMkFcuiHTe1gw+kVQTXtj9q/YQ0rcdhu80A+EllcIKwc7wxf3tysnckF/l3lfSpcCHyiZ/e58yjpXg1XB/yAiVbRKgjubBJHI0RN005/M86PUlEi0HtA01jRgQvr3aQpicPuyRjleIkVx490clyTdAN5HXAJNfalWXcsEIcIl4GmpqA4o0MJ20DBqWrQYp25fgWDP9WYC+8oIQExZUtR/KWXdPWolYRE3tlBj6C6XSUTynaPzjcwveVGvfXAis6+NafowrRi1ohFzbdx3MbKhXxIdfVidw69BP1NMxqkPQaWWXtNEjpNbIMJvNQL/sFGkvsamYbEa1e7VALxMtjR19BYieZdYW4r33FeYH11zVAK3I3EjqpdnaLB6cTwgyWnblzDqIJ5WGWZQiomBE2MwqsYx5FOM0oXbukQlTWR/RxT/97f3nt8GsE5EqPk/uDWAiPY383R2wIY8FaehwA7iq9nKwbr7oQO2ZNtcR8VW0JeFc3b0r4F8uL4GDP7Le3JDe+o6Aoi+SFu+adjVQtRCX4IwH3CznhBP4wkfcTIBLxSVGzW2Fe3YHki6lulf6jBs1Of3VpzdPADADIzd0k1nOUVYZuUQU5IUlbtIdiIXPNhfNCkP5uMHBnIV8lc24=" 41 | # Ex. travis encrypt GH_TOKEN=XXX-https://github.com/settings/tokens-XXX --add 42 | - secure: "ns5to8XeWdzLtfc47Jh4XIIH97SRNaC3Rx6dlvM+7ic+WnVJETMneOcXc1yFfklwUzVj77ImMDCbgt+dE8nsnrKVsXYBT3jhCoz/S/m171ESlSPElrKrWUXDXpbxnsPlu3Ch2rhlkYAlN/AYC0vTwaDQvmKkoc6q7Uhqza8h4i6PxTIDwmv5zgR9EK+DqxoF3gpTv169hlQNXa1eIv5XeskjVdLtLPENxrUyaOJsEHljCEGtF9QZSicm+udSj1dbdoa/Fd5T2cFXvsdHaNZUT5I4WZTvBSw25BqhvkpKePuLV7naDoeUDhqnrBdtb2ZPFiR2icClm8HTpNH6MdTJ6mPWf8gl27P97D7Za0qFgriz9XkiuEpQJ0ukwm62EQmCxsdABB3VvxOG8Y6TUy2aHxDPQZrTszm+2jKxEVRzTlqkUbKaVDG0dMr8OejCML84JSWyYwq1XoO8vZ9gQEkmhY1Gp37SdaHD8qneX/rLiXCw4M/N9+Lx9+NzvaCrq6LpE7YBUznu4TBJFoz+/lv6osS/iviLIIBZM9PRinBn6NxPoeCQ9QzzyWikhRR/OnNyPZUiXI+toYdIt8BwulBvB6oNL7JPyQzmPoUo6+/CeYhHiAq+fQh2kYSSc4F4sowxFKY8DuDz3gMGdyQVds4+LHu523IR20QFaRTJGA9AD0I=" 43 | # Ex. travis encrypt SONATYPE_USER=your_sonatype_account 44 | - secure: "VA+eY7OL05AW3KyGiKret73Gz79W/DfQsB2rV68pTK5ioWVl0HyiTxftSZt1nfuaedPy1Tka+nL9hZSKi3K+Yweh3PsAsY2hGAw7brenOXT0c0pFhr4L7ryuCRnt9v+qDdZQY9m3RY97SsD80dytRYeLKoruCyzqJfU95a/5BJFqKpR7+AMrlMcyOP5qMgFwPVTz9w2JmkNPKdkGUsQPE+rW6p5JdOTlhNQi4nxmaCsYPJ42aV9gZUdBoaWfbei3PZLlrFl6Y0HU3ve4MzwUPZ22py/pp8zo310Pg+9oBjEnBtjDAKhJLn74NAIJbU1XqeyE6ybRXY9S7x1BLn+YXolTerCpZZUIc/kJNGfMM6N2qKELrZArE3cGLKOimeEzMVM2GCjRlJWtyTANpaTCH5Zenciiqst6/OPyGHC5zUXrdhzsRV8MHSKWwMTO6XI9dVTODUyJDfhKg9uhfV3wmPUw6pc9zifxiju4tqaGykwFewT8uwZW2uzPBBKYm4GndfeyMm/yR5XIFOADoXk62JmqczYM+yq8r1V/+yv2F4iYvl1YWOTRdDLzxkmj8gJR43+czFsOf4v54kFpLJB3JHWz9LAj0O/lH+JL6IKGcAyAWc47WrHgib/lEp43ERKhI8P7kF+L1Qpifi2uZ1QM0DdvfigbVhJfbMyCFjxvtuw=" 45 | # Ex. travis encrypt SONATYPE_PASSWORD=your_sonatype_password 46 | - secure: "VEV7do1Q4G5LEPwEicnAGMDg6kZKWrE6+HpWOBN8h//cuwRyXu+UcUvPhy5YQx2SSWYJtqem+PC32/3zgVrCpQWx0JPry8h+5V+IAyCtNv3Oi9udvggbY03T+DlpelXZzsUV0Fnbthc3m2uQgnkN6O7vswVLFXN7lO7GJESdEkapmWETp/v/IROsA9cwCBAGCkSgth4AEFUdihToCVd34PwiEPZV6ZmHYF9pZIWJZ+K+b23YiurGibyhdfChTvy6Y5bhG6VSrwofPXigVV5s/jy6QnBmdSYNjrKY9g1/ahFeNVhd/NTJU3SnyaEOJERiGtfw1xvvSFbCO8Hpo5QPx8fkMTXs2xpT9Cc0FuQkBF0kXeFy/lWklgW0vXcvbMo9R9ePTYul2oQtjxCrXApaLm2x9PsOpL7IkNT1acjkcOpBS3AIj2BjS2bSrPinf9eOj3bu8a9mhmG6LowczsqRHxY90JGfWoVGOZC5AvntvPteBRMpW46YY/z8atSNR24Fxp6cXNe6BOY32do+ZRVpHT/hu0jzaxHpYqyriUjWjtPq0rqfWEwrn/dtasBB7c/Xm0Clo1k7ePLK86QVIKGtYXqAE3S8yWD1f690nmLlMly8W1P2AGFhurydda86Oaz5mltsY39t1hLfpE2VGbsoLVJva/6Plt5HwbKI4jFYmrw=" 47 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Build Status][ci-img]][ci] [![Coverage Status][cov-img]][cov] [![Released Version][maven-img]][maven] [![Apache-2.0 license](https://img.shields.io/badge/license-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) 2 | 3 | 4 | # OpenTracing Apache Kafka Client Instrumentation 5 | OpenTracing instrumentation for Apache Kafka Client. 6 | Two solutions are provided: 7 | 1. Based on decorated Producer and Consumer 8 | 1. Based on Interceptors 9 | 10 | ## Requirements 11 | 12 | - Java 8 13 | - Kafka 2.2.0 14 | 15 | ## Installation 16 | 17 | ### Kafka Client 18 | 19 | pom.xml 20 | ```xml 21 | 22 | io.opentracing.contrib 23 | opentracing-kafka-client 24 | VERSION 25 | 26 | ``` 27 | 28 | ### Kafka Streams 29 | 30 | pom.xml 31 | ```xml 32 | 33 | io.opentracing.contrib 34 | opentracing-kafka-streams 35 | VERSION 36 | 37 | ``` 38 | 39 | ### Spring Kafka 40 | 41 | pom.xml 42 | ```xml 43 | 44 | io.opentracing.contrib 45 | opentracing-kafka-spring 46 | VERSION 47 | 48 | ``` 49 | 50 | ## Usage 51 | 52 | ```java 53 | 54 | // Instantiate tracer 55 | Tracer tracer = ... 56 | 57 | // Optionally register tracer with GlobalTracer 58 | GlobalTracer.register(tracer); 59 | ``` 60 | 61 | ### Kafka Client 62 | 63 | #### Decorators based solution 64 | 65 | ```java 66 | 67 | // Instantiate KafkaProducer 68 | KafkaProducer producer = new KafkaProducer<>(senderProps); 69 | 70 | //Decorate KafkaProducer with TracingKafkaProducer 71 | TracingKafkaProducer tracingProducer = new TracingKafkaProducer<>(producer, 72 | tracer); 73 | 74 | // Send 75 | tracingProducer.send(...); 76 | 77 | // Instantiate KafkaConsumer 78 | KafkaConsumer consumer = new KafkaConsumer<>(consumerProps); 79 | 80 | // Decorate KafkaConsumer with TracingKafkaConsumer 81 | TracingKafkaConsumer tracingConsumer = new TracingKafkaConsumer<>(consumer, 82 | tracer); 83 | 84 | //Subscribe 85 | tracingConsumer.subscribe(Collections.singletonList("messages")); 86 | 87 | // Get records 88 | ConsumerRecords records = tracingConsumer.poll(1000); 89 | 90 | // To retrieve SpanContext from polled record (Consumer side) 91 | ConsumerRecord record = ... 92 | SpanContext spanContext = TracingKafkaUtils.extractSpanContext(record.headers(), tracer); 93 | 94 | ``` 95 | 96 | ##### Custom Span Names for Decorators based solution 97 | The decorator-based solution includes support for custom span names by passing in a BiFunction object as an additional 98 | argument to the TracingKafkaConsumer or TracingKafkaProducer constructors, either one of the provided BiFunctions or 99 | your own custom one. 100 | 101 | ```java 102 | // Create BiFunction for the KafkaProducer that operates on 103 | // (String operationName, ProducerRecord consumerRecord) and 104 | // returns a String to be used as the name 105 | BiFunction producerSpanNameProvider = 106 | (operationName, producerRecord) -> "CUSTOM_PRODUCER_NAME"; 107 | 108 | // Instantiate KafkaProducer 109 | KafkaProducer producer = new KafkaProducer<>(senderProps); 110 | 111 | //Decorate KafkaProducer with TracingKafkaProducer 112 | TracingKafkaProducer tracingProducer = new TracingKafkaProducer<>(producer, 113 | tracer, 114 | producerSpanNameProvider); 115 | // Spans created by the tracingProducer will now have "CUSTOM_PRODUCER_NAME" as the span name. 116 | 117 | 118 | // Create BiFunction for the KafkaConsumer that operates on 119 | // (String operationName, ConsumerRecord consumerRecord) and 120 | // returns a String to be used as the name 121 | BiFunction consumerSpanNameProvider = 122 | (operationName, consumerRecord) -> operationName.toUpperCase(); 123 | // Instantiate KafkaConsumer 124 | KafkaConsumer consumer = new KafkaConsumer<>(consumerProps); 125 | // Decorate KafkaConsumer with TracingKafkaConsumer, passing in the consumerSpanNameProvider BiFunction 126 | TracingKafkaConsumer tracingConsumer = new TracingKafkaConsumer<>(consumer, 127 | tracer, 128 | consumerSpanNameProvider); 129 | // Spans created by the tracingConsumer will now have the capitalized operation name as the span name. 130 | // "receive" -> "RECEIVE" 131 | ``` 132 | 133 | 134 | #### Interceptors based solution 135 | ```java 136 | // Register tracer with GlobalTracer: 137 | GlobalTracer.register(tracer); 138 | 139 | // Add TracingProducerInterceptor to sender properties: 140 | senderProps.put(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG, 141 | TracingProducerInterceptor.class.getName()); 142 | 143 | // Instantiate KafkaProducer 144 | KafkaProducer producer = new KafkaProducer<>(senderProps); 145 | 146 | // Send 147 | producer.send(...); 148 | 149 | // Add TracingConsumerInterceptor to consumer properties: 150 | consumerProps.put(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG, 151 | TracingConsumerInterceptor.class.getName()); 152 | 153 | // Instantiate KafkaConsumer 154 | KafkaConsumer consumer = new KafkaConsumer<>(consumerProps); 155 | 156 | //Subscribe 157 | consumer.subscribe(Collections.singletonList("messages")); 158 | 159 | // Get records 160 | ConsumerRecords records = consumer.poll(1000); 161 | 162 | // To retrieve SpanContext from polled record (Consumer side) 163 | ConsumerRecord record = ... 164 | SpanContext spanContext = TracingKafkaUtils.extractSpanContext(record.headers(), tracer); 165 | 166 | ``` 167 | 168 | 169 | ### Kafka Streams 170 | 171 | ```java 172 | 173 | // Instantiate TracingKafkaClientSupplier 174 | KafkaClientSupplier supplier = new TracingKafkaClientSupplier(tracer); 175 | 176 | // Provide supplier to KafkaStreams 177 | KafkaStreams streams = new KafkaStreams(builder.build(), new StreamsConfig(config), supplier); 178 | streams.start(); 179 | 180 | ``` 181 | 182 | ### Spring Kafka 183 | 184 | ```java 185 | 186 | // Declare Tracer bean 187 | @Bean 188 | public Tracer tracer() { 189 | return ... 190 | } 191 | 192 | 193 | // Decorate ConsumerFactory with TracingConsumerFactory 194 | @Bean 195 | public ConsumerFactory consumerFactory() { 196 | return new TracingConsumerFactory<>(new DefaultKafkaConsumerFactory<>(consumerProps()), tracer()); 197 | } 198 | 199 | // Decorate ProducerFactory with TracingProducerFactory 200 | @Bean 201 | public ProducerFactory producerFactory() { 202 | return new TracingProducerFactory<>(new DefaultKafkaProducerFactory<>(producerProps()), tracer()); 203 | } 204 | 205 | // Use decorated ProducerFactory in KafkaTemplate 206 | @Bean 207 | public KafkaTemplate kafkaTemplate() { 208 | return new KafkaTemplate<>(producerFactory()); 209 | } 210 | 211 | // Use an aspect to decorate @KafkaListeners 212 | @Bean 213 | public TracingKafkaAspect tracingKafkaAspect() { 214 | return new TracingKafkaAspect(tracer()); 215 | } 216 | ``` 217 | 218 | ##### Custom Span Names for Spring Kafka 219 | The Spring Kafka factory implementations include support for custom span names by passing in a BiFunction object as an additional 220 | argument to the TracingConsumerFactory or TracingProducerFactory constructors, either one of the provided BiFunctions or 221 | your own custom one. 222 | 223 | ```java 224 | // Create BiFunction for the KafkaProducerFactory that operates on 225 | // (String operationName, ProducerRecord consumerRecord) and 226 | // returns a String to be used as the name 227 | BiFunction producerSpanNameProvider = 228 | (operationName, producerRecord) -> "CUSTOM_PRODUCER_NAME"; 229 | 230 | // Decorate ProducerFactory with TracingProducerFactory 231 | @Bean 232 | public ProducerFactory producerFactory() { 233 | return new TracingProducerFactory<>(new DefaultKafkaProducerFactory<>(producerProps()), tracer()); 234 | } 235 | // Spans created by the tracingProducer will now have "CUSTOM_PRODUCER_NAME" as the span name. 236 | 237 | 238 | // Create BiFunction for the KafkaConsumerFactory that operates on 239 | // (String operationName, ConsumerRecord consumerRecord) and 240 | // returns a String to be used as the name 241 | BiFunction consumerSpanNameProvider = 242 | (operationName, consumerRecord) -> operationName.toUpperCase(); 243 | 244 | // Decorate ConsumerFactory with TracingConsumerFactory 245 | @Bean 246 | public ConsumerFactory consumerFactory() { 247 | return new TracingConsumerFactory<>(new DefaultKafkaConsumerFactory<>(consumerProps()), tracer()); 248 | } 249 | // Consumers produced by the traced consumerFactory 250 | ``` 251 | 252 | #### Pre-made Span Name Providers 253 | 254 | The following BiFunctions are already included in the ClientSpanNameProvider class, with `CONSUMER_OPERATION_NAME` and `PRODUCER_OPERATION_NAME` being the default should no 255 | spanNameProvider be provided: 256 | 257 | - `CONSUMER_OPERATION_NAME` and `PRODUCER_OPERATION_NAME` : Returns the `operationName` as the span name ("receive" for Consumer, "send" for producer). 258 | - `CONSUMER_PREFIXED_OPERATION_NAME(String prefix)` and `PRODUCER_PREFIXED_OPERATION_NAME(String prefix)` : Returns a String concatenation of `prefix` and `operatioName`. 259 | - `CONSUMER_TOPIC` and `PRODUCER_TOPIC` : Returns the Kafka topic name that the record was pushed to/pulled from (`record.topic()`). 260 | - `PREFIXED_CONSUMER_TOPIC(String prefix)` and `PREFIXED_PRODUCER_TOPIC(String prefix)` : Returns a String concatenation of `prefix` and the Kafka topic name (`record.topic()`). 261 | - `CONSUMER_OPERATION_NAME_TOPIC` and `PRODUCER_OPERATION_NAME_TOPIC` : Returns "`operationName` - `record.topic()`". 262 | - `CONSUMER_PREFIXED_OPERATION_NAME_TOPIC(String prefix)` and `PRODUCER_PREFIXED_OPERATION_NAME_TOPIC(String prefix)` : Returns a String concatenation of `prefix` and "`operationName` - `record.topic()`". 263 | 264 | ## License 265 | 266 | [Apache 2.0 License](./LICENSE). 267 | 268 | [ci-img]: https://travis-ci.org/opentracing-contrib/java-kafka-client.svg?branch=master 269 | [ci]: https://travis-ci.org/opentracing-contrib/java-kafka-client 270 | [cov-img]: https://coveralls.io/repos/github/opentracing-contrib/java-kafka-client/badge.svg?branch=master 271 | [cov]: https://coveralls.io/github/opentracing-contrib/java-kafka-client?branch=master 272 | [maven-img]: https://img.shields.io/maven-central/v/io.opentracing.contrib/opentracing-kafka-client.svg 273 | [maven]: http://search.maven.org/#search%7Cga%7C1%7Copentracing-kafka-client 274 | -------------------------------------------------------------------------------- /RELEASE.md: -------------------------------------------------------------------------------- 1 | # OpenTracing Release Process 2 | 3 | This repo uses semantic versions. Please keep this in mind when choosing version numbers. 4 | 5 | For the up-to-date release process, please refer the 6 | [release process from the OpenTracing Java API](https://github.com/opentracing/opentracing-java/blob/master/RELEASE.md). -------------------------------------------------------------------------------- /header.txt: -------------------------------------------------------------------------------- 1 | Copyright ${project.inceptionYear} The OpenTracing Authors 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 4 | in compliance with the License. You may obtain a copy of the License at 5 | 6 | http://www.apache.org/licenses/LICENSE-2.0 7 | 8 | Unless required by applicable law or agreed to in writing, software distributed under the License 9 | is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 10 | or implied. See the License for the specific language governing permissions and limitations under 11 | the License. 12 | -------------------------------------------------------------------------------- /mvnw: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # ---------------------------------------------------------------------------- 3 | # Licensed to the Apache Software Foundation (ASF) under one 4 | # or more contributor license agreements. See the NOTICE file 5 | # distributed with this work for additional information 6 | # regarding copyright ownership. The ASF licenses this file 7 | # to you under the Apache License, Version 2.0 (the 8 | # "License"); you may not use this file except in compliance 9 | # with the License. You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, 14 | # software distributed under the License is distributed on an 15 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 16 | # KIND, either express or implied. See the License for the 17 | # specific language governing permissions and limitations 18 | # under the License. 19 | # ---------------------------------------------------------------------------- 20 | 21 | # ---------------------------------------------------------------------------- 22 | # Maven2 Start Up Batch script 23 | # 24 | # Required ENV vars: 25 | # ------------------ 26 | # JAVA_HOME - location of a JDK home dir 27 | # 28 | # Optional ENV vars 29 | # ----------------- 30 | # M2_HOME - location of maven2's installed home dir 31 | # MAVEN_OPTS - parameters passed to the Java VM when running Maven 32 | # e.g. to debug Maven itself, use 33 | # set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 34 | # MAVEN_SKIP_RC - flag to disable loading of mavenrc files 35 | # ---------------------------------------------------------------------------- 36 | 37 | if [ -z "$MAVEN_SKIP_RC" ]; then 38 | 39 | if [ -f /etc/mavenrc ]; then 40 | . /etc/mavenrc 41 | fi 42 | 43 | if [ -f "$HOME/.mavenrc" ]; then 44 | . "$HOME/.mavenrc" 45 | fi 46 | 47 | fi 48 | 49 | # OS specific support. $var _must_ be set to either true or false. 50 | cygwin=false 51 | darwin=false 52 | mingw=false 53 | case "$(uname)" in 54 | CYGWIN*) cygwin=true ;; 55 | MINGW*) mingw=true ;; 56 | Darwin*) 57 | darwin=true 58 | # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home 59 | # See https://developer.apple.com/library/mac/qa/qa1170/_index.html 60 | if [ -z "$JAVA_HOME" ]; then 61 | if [ -x "/usr/libexec/java_home" ]; then 62 | export JAVA_HOME="$(/usr/libexec/java_home)" 63 | else 64 | export JAVA_HOME="/Library/Java/Home" 65 | fi 66 | fi 67 | ;; 68 | esac 69 | 70 | if [ -z "$JAVA_HOME" ]; then 71 | if [ -r /etc/gentoo-release ]; then 72 | JAVA_HOME=$(java-config --jre-home) 73 | fi 74 | fi 75 | 76 | if [ -z "$M2_HOME" ]; then 77 | ## resolve links - $0 may be a link to maven's home 78 | PRG="$0" 79 | 80 | # need this for relative symlinks 81 | while [ -h "$PRG" ]; do 82 | ls=$(ls -ld "$PRG") 83 | link=$(expr "$ls" : '.*-> \(.*\)$') 84 | if expr "$link" : '/.*' >/dev/null; then 85 | PRG="$link" 86 | else 87 | PRG="$(dirname "$PRG")/$link" 88 | fi 89 | done 90 | 91 | saveddir=$(pwd) 92 | 93 | M2_HOME=$(dirname "$PRG")/.. 94 | 95 | # make it fully qualified 96 | M2_HOME=$(cd "$M2_HOME" && pwd) 97 | 98 | cd "$saveddir" 99 | # echo Using m2 at $M2_HOME 100 | fi 101 | 102 | # For Cygwin, ensure paths are in UNIX format before anything is touched 103 | if $cygwin; then 104 | [ -n "$M2_HOME" ] && 105 | M2_HOME=$(cygpath --unix "$M2_HOME") 106 | [ -n "$JAVA_HOME" ] && 107 | JAVA_HOME=$(cygpath --unix "$JAVA_HOME") 108 | [ -n "$CLASSPATH" ] && 109 | CLASSPATH=$(cygpath --path --unix "$CLASSPATH") 110 | fi 111 | 112 | # For Migwn, ensure paths are in UNIX format before anything is touched 113 | if $mingw; then 114 | [ -n "$M2_HOME" ] && 115 | M2_HOME="$( ( 116 | cd "$M2_HOME" 117 | pwd 118 | ))" 119 | [ -n "$JAVA_HOME" ] && 120 | JAVA_HOME="$( ( 121 | cd "$JAVA_HOME" 122 | pwd 123 | ))" 124 | # TODO classpath? 125 | fi 126 | 127 | if [ -z "$JAVA_HOME" ]; then 128 | javaExecutable="$(which javac)" 129 | if [ -n "$javaExecutable" ] && ! [ "$(expr \"$javaExecutable\" : '\([^ ]*\)')" = "no" ]; then 130 | # readlink(1) is not available as standard on Solaris 10. 131 | readLink=$(which readlink) 132 | if [ ! $(expr "$readLink" : '\([^ ]*\)') = "no" ]; then 133 | if $darwin; then 134 | javaHome="$(dirname \"$javaExecutable\")" 135 | javaExecutable="$(cd \"$javaHome\" && pwd -P)/javac" 136 | else 137 | javaExecutable="$(readlink -f \"$javaExecutable\")" 138 | fi 139 | javaHome="$(dirname \"$javaExecutable\")" 140 | javaHome=$(expr "$javaHome" : '\(.*\)/bin') 141 | JAVA_HOME="$javaHome" 142 | export JAVA_HOME 143 | fi 144 | fi 145 | fi 146 | 147 | if [ -z "$JAVACMD" ]; then 148 | if [ -n "$JAVA_HOME" ]; then 149 | if [ -x "$JAVA_HOME/jre/sh/java" ]; then 150 | # IBM's JDK on AIX uses strange locations for the executables 151 | JAVACMD="$JAVA_HOME/jre/sh/java" 152 | else 153 | JAVACMD="$JAVA_HOME/bin/java" 154 | fi 155 | else 156 | JAVACMD="$(which java)" 157 | fi 158 | fi 159 | 160 | if [ ! -x "$JAVACMD" ]; then 161 | echo "Error: JAVA_HOME is not defined correctly." >&2 162 | echo " We cannot execute $JAVACMD" >&2 163 | exit 1 164 | fi 165 | 166 | if [ -z "$JAVA_HOME" ]; then 167 | echo "Warning: JAVA_HOME environment variable is not set." 168 | fi 169 | 170 | CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher 171 | 172 | # traverses directory structure from process work directory to filesystem root 173 | # first directory with .mvn subdirectory is considered project base directory 174 | find_maven_basedir() { 175 | 176 | if [ -z "$1" ]; then 177 | echo "Path not specified to find_maven_basedir" 178 | return 1 179 | fi 180 | 181 | basedir="$1" 182 | wdir="$1" 183 | while [ "$wdir" != '/' ]; do 184 | if [ -d "$wdir"/.mvn ]; then 185 | basedir=$wdir 186 | break 187 | fi 188 | # workaround for JBEAP-8937 (on Solaris 10/Sparc) 189 | if [ -d "${wdir}" ]; then 190 | wdir=$( 191 | cd "$wdir/.." 192 | pwd 193 | ) 194 | fi 195 | # end of workaround 196 | done 197 | echo "${basedir}" 198 | } 199 | 200 | # concatenates all lines of a file 201 | concat_lines() { 202 | if [ -f "$1" ]; then 203 | echo "$(tr -s '\n' ' ' <"$1")" 204 | fi 205 | } 206 | 207 | BASE_DIR=$(find_maven_basedir "$(pwd)") 208 | if [ -z "$BASE_DIR" ]; then 209 | exit 1 210 | fi 211 | 212 | export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"} 213 | echo $MAVEN_PROJECTBASEDIR 214 | MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" 215 | 216 | # For Cygwin, switch paths to Windows format before running java 217 | if $cygwin; then 218 | [ -n "$M2_HOME" ] && 219 | M2_HOME=$(cygpath --path --windows "$M2_HOME") 220 | [ -n "$JAVA_HOME" ] && 221 | JAVA_HOME=$(cygpath --path --windows "$JAVA_HOME") 222 | [ -n "$CLASSPATH" ] && 223 | CLASSPATH=$(cygpath --path --windows "$CLASSPATH") 224 | [ -n "$MAVEN_PROJECTBASEDIR" ] && 225 | MAVEN_PROJECTBASEDIR=$(cygpath --path --windows "$MAVEN_PROJECTBASEDIR") 226 | fi 227 | 228 | WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain 229 | 230 | exec "$JAVACMD" \ 231 | $MAVEN_OPTS \ 232 | -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ 233 | "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ 234 | ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@" 235 | -------------------------------------------------------------------------------- /mvnw.cmd: -------------------------------------------------------------------------------- 1 | @REM ---------------------------------------------------------------------------- 2 | @REM Licensed to the Apache Software Foundation (ASF) under one 3 | @REM or more contributor license agreements. See the NOTICE file 4 | @REM distributed with this work for additional information 5 | @REM regarding copyright ownership. The ASF licenses this file 6 | @REM to you under the Apache License, Version 2.0 (the 7 | @REM "License"); you may not use this file except in compliance 8 | @REM with the License. You may obtain a copy of the License at 9 | @REM 10 | @REM http://www.apache.org/licenses/LICENSE-2.0 11 | @REM 12 | @REM Unless required by applicable law or agreed to in writing, 13 | @REM software distributed under the License is distributed on an 14 | @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | @REM KIND, either express or implied. See the License for the 16 | @REM specific language governing permissions and limitations 17 | @REM under the License. 18 | @REM ---------------------------------------------------------------------------- 19 | 20 | @REM ---------------------------------------------------------------------------- 21 | @REM Maven2 Start Up Batch script 22 | @REM 23 | @REM Required ENV vars: 24 | @REM JAVA_HOME - location of a JDK home dir 25 | @REM 26 | @REM Optional ENV vars 27 | @REM M2_HOME - location of maven2's installed home dir 28 | @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands 29 | @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending 30 | @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven 31 | @REM e.g. to debug Maven itself, use 32 | @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 33 | @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files 34 | @REM ---------------------------------------------------------------------------- 35 | 36 | @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' 37 | @echo off 38 | @REM enable echoing my setting MAVEN_BATCH_ECHO to 'on' 39 | @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% 40 | 41 | @REM set %HOME% to equivalent of $HOME 42 | if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") 43 | 44 | @REM Execute a user defined script before this one 45 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre 46 | @REM check for pre script, once with legacy .bat ending and once with .cmd ending 47 | if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" 48 | if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" 49 | :skipRcPre 50 | 51 | @setlocal 52 | 53 | set ERROR_CODE=0 54 | 55 | @REM To isolate internal variables from possible post scripts, we use another setlocal 56 | @setlocal 57 | 58 | @REM ==== START VALIDATION ==== 59 | if not "%JAVA_HOME%" == "" goto OkJHome 60 | 61 | echo. 62 | echo Error: JAVA_HOME not found in your environment. >&2 63 | echo Please set the JAVA_HOME variable in your environment to match the >&2 64 | echo location of your Java installation. >&2 65 | echo. 66 | goto error 67 | 68 | :OkJHome 69 | if exist "%JAVA_HOME%\bin\java.exe" goto init 70 | 71 | echo. 72 | echo Error: JAVA_HOME is set to an invalid directory. >&2 73 | echo JAVA_HOME = "%JAVA_HOME%" >&2 74 | echo Please set the JAVA_HOME variable in your environment to match the >&2 75 | echo location of your Java installation. >&2 76 | echo. 77 | goto error 78 | 79 | @REM ==== END VALIDATION ==== 80 | 81 | :init 82 | 83 | @REM Find the project base dir, i.e. the directory that contains the folder ".mvn". 84 | @REM Fallback to current working directory if not found. 85 | 86 | set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% 87 | IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir 88 | 89 | set EXEC_DIR=%CD% 90 | set WDIR=%EXEC_DIR% 91 | :findBaseDir 92 | IF EXIST "%WDIR%"\.mvn goto baseDirFound 93 | cd .. 94 | IF "%WDIR%"=="%CD%" goto baseDirNotFound 95 | set WDIR=%CD% 96 | goto findBaseDir 97 | 98 | :baseDirFound 99 | set MAVEN_PROJECTBASEDIR=%WDIR% 100 | cd "%EXEC_DIR%" 101 | goto endDetectBaseDir 102 | 103 | :baseDirNotFound 104 | set MAVEN_PROJECTBASEDIR=%EXEC_DIR% 105 | cd "%EXEC_DIR%" 106 | 107 | :endDetectBaseDir 108 | 109 | IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig 110 | 111 | @setlocal EnableExtensions EnableDelayedExpansion 112 | for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a 113 | @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% 114 | 115 | :endReadAdditionalConfig 116 | 117 | SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" 118 | 119 | set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" 120 | set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain 121 | 122 | %MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* 123 | if ERRORLEVEL 1 goto error 124 | goto end 125 | 126 | :error 127 | set ERROR_CODE=1 128 | 129 | :end 130 | @endlocal & set ERROR_CODE=%ERROR_CODE% 131 | 132 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost 133 | @REM check for post script, once with legacy .bat ending and once with .cmd ending 134 | if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" 135 | if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" 136 | :skipRcPost 137 | 138 | @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' 139 | if "%MAVEN_BATCH_PAUSE%" == "on" pause 140 | 141 | if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% 142 | 143 | exit /B %ERROR_CODE% 144 | -------------------------------------------------------------------------------- /opentracing-kafka-client/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 17 | 18 | 19 | opentracing-kafka-parent 20 | io.opentracing.contrib 21 | 0.1.16-SNAPSHOT 22 | 23 | 4.0.0 24 | 25 | opentracing-kafka-client 26 | ${project.groupId}:${project.artifactId} 27 | OpenTracing Instrumentation for Apache Kafka Client 28 | 29 | 30 | 31 | org.apache.kafka 32 | kafka-clients 33 | provided 34 | 35 | 36 | 37 | -------------------------------------------------------------------------------- /opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/ClientSpanNameProvider.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | 15 | package io.opentracing.contrib.kafka; 16 | 17 | import java.util.function.BiFunction; 18 | import org.apache.kafka.clients.consumer.ConsumerRecord; 19 | import org.apache.kafka.clients.producer.ProducerRecord; 20 | 21 | /** 22 | * @author Jordan J Lopez Returns a string to be used as the name of the spans, based on the 23 | * operation preformed and the record the span is based off of. 24 | */ 25 | public class ClientSpanNameProvider { 26 | 27 | // Operation Name as Span Name 28 | public static BiFunction CONSUMER_OPERATION_NAME = 29 | (operationName, consumerRecord) -> replaceIfNull(operationName, "unknown"); 30 | public static BiFunction PRODUCER_OPERATION_NAME = 31 | (operationName, producerRecord) -> replaceIfNull(operationName, "unknown"); 32 | 33 | public static BiFunction CONSUMER_PREFIXED_OPERATION_NAME( 34 | final String prefix) { 35 | return (operationName, consumerRecord) -> replaceIfNull(prefix, "") 36 | + replaceIfNull(operationName, "unknown"); 37 | } 38 | 39 | public static BiFunction PRODUCER_PREFIXED_OPERATION_NAME( 40 | final String prefix) { 41 | return (operationName, producerRecord) -> replaceIfNull(prefix, "") 42 | + replaceIfNull(operationName, "unknown"); 43 | } 44 | 45 | // Topic as Span Name 46 | public static BiFunction CONSUMER_TOPIC = 47 | (operationName, consumerRecord) -> replaceIfNull(consumerRecord, "unknown"); 48 | public static BiFunction PRODUCER_TOPIC = 49 | (operationName, producerRecord) -> replaceIfNull(producerRecord, "unknown"); 50 | 51 | public static BiFunction CONSUMER_PREFIXED_TOPIC( 52 | final String prefix) { 53 | return (operationName, consumerRecord) -> replaceIfNull(prefix, "") 54 | + replaceIfNull(consumerRecord, "unknown"); 55 | } 56 | 57 | public static BiFunction PRODUCER_PREFIXED_TOPIC( 58 | final String prefix) { 59 | return (operationName, producerRecord) -> replaceIfNull(prefix, "") 60 | + replaceIfNull(producerRecord, "unknown"); 61 | } 62 | 63 | // Operation Name and Topic as Span Name 64 | public static BiFunction CONSUMER_OPERATION_NAME_TOPIC = 65 | (operationName, consumerRecord) -> replaceIfNull(operationName, "unknown") 66 | + " - " + replaceIfNull(consumerRecord, "unknown"); 67 | public static BiFunction PRODUCER_OPERATION_NAME_TOPIC = 68 | (operationName, producerRecord) -> replaceIfNull(operationName, "unknown") 69 | + " - " + replaceIfNull(producerRecord, "unknown"); 70 | 71 | public static BiFunction CONSUMER_PREFIXED_OPERATION_NAME_TOPIC( 72 | final String prefix) { 73 | return (operationName, consumerRecord) -> replaceIfNull(prefix, "") 74 | + replaceIfNull(operationName, "unknown") 75 | + " - " + replaceIfNull(consumerRecord, "unknown"); 76 | } 77 | 78 | public static BiFunction PRODUCER_PREFIXED_OPERATION_NAME_TOPIC( 79 | final String prefix) { 80 | return (operationName, producerRecord) -> replaceIfNull(prefix, "") 81 | + replaceIfNull(operationName, "unknown") 82 | + " - " + replaceIfNull(producerRecord, "unknown"); 83 | } 84 | 85 | private static String replaceIfNull(String input, String replacement) { 86 | return (input == null) ? replacement : input; 87 | } 88 | 89 | private static String replaceIfNull(ConsumerRecord input, String replacement) { 90 | return ((input == null) ? replacement : input.topic()); 91 | } 92 | 93 | private static String replaceIfNull(ProducerRecord input, String replacement) { 94 | return ((input == null) ? replacement : input.topic()); 95 | } 96 | 97 | } 98 | -------------------------------------------------------------------------------- /opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/HeadersMapExtractAdapter.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | package io.opentracing.contrib.kafka; 15 | 16 | import io.opentracing.propagation.TextMap; 17 | import java.nio.charset.StandardCharsets; 18 | import java.util.HashMap; 19 | import java.util.Iterator; 20 | import java.util.Map; 21 | import java.util.Map.Entry; 22 | import org.apache.kafka.common.header.Header; 23 | import org.apache.kafka.common.header.Headers; 24 | 25 | 26 | public class HeadersMapExtractAdapter implements TextMap { 27 | 28 | private final Map map = new HashMap<>(); 29 | 30 | public HeadersMapExtractAdapter(Headers headers) { 31 | for (Header header : headers) { 32 | byte[] headerValue = header.value(); 33 | map.put(header.key(), 34 | headerValue == null ? null : new String(headerValue, StandardCharsets.UTF_8)); 35 | } 36 | } 37 | 38 | @Override 39 | public Iterator> iterator() { 40 | return map.entrySet().iterator(); 41 | } 42 | 43 | @Override 44 | public void put(String key, String value) { 45 | throw new UnsupportedOperationException( 46 | "HeadersMapExtractAdapter should only be used with Tracer.extract()"); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/HeadersMapInjectAdapter.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | package io.opentracing.contrib.kafka; 15 | 16 | 17 | import io.opentracing.propagation.TextMap; 18 | import java.nio.charset.StandardCharsets; 19 | import java.util.Iterator; 20 | import java.util.Map.Entry; 21 | import org.apache.kafka.common.header.Headers; 22 | 23 | public class HeadersMapInjectAdapter implements TextMap { 24 | 25 | private final Headers headers; 26 | 27 | public HeadersMapInjectAdapter(Headers headers) { 28 | this.headers = headers; 29 | } 30 | 31 | @Override 32 | public Iterator> iterator() { 33 | throw new UnsupportedOperationException("iterator should never be used with Tracer.inject()"); 34 | } 35 | 36 | @Override 37 | public void put(String key, String value) { 38 | headers.add(key, value.getBytes(StandardCharsets.UTF_8)); 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/SpanDecorator.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | package io.opentracing.contrib.kafka; 15 | 16 | import io.opentracing.Span; 17 | import org.apache.kafka.clients.consumer.ConsumerRecord; 18 | import org.apache.kafka.clients.producer.ProducerRecord; 19 | 20 | public interface SpanDecorator { 21 | 22 | /** 23 | * Method called before record is sent by producer 24 | */ 25 | void onSend(ProducerRecord record, Span span); 26 | 27 | /** 28 | * Method called when record is received in consumer 29 | */ 30 | void onResponse(ConsumerRecord record, Span span); 31 | 32 | /** 33 | * Method called when an error occurs 34 | */ 35 | void onError(Exception exception, Span span); 36 | 37 | /** 38 | * Gives a SpanDecorator with the standard tags 39 | */ 40 | SpanDecorator STANDARD_TAGS = new StandardSpanDecorator(); 41 | } 42 | -------------------------------------------------------------------------------- /opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/StandardSpanDecorator.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | package io.opentracing.contrib.kafka; 15 | 16 | import io.opentracing.Span; 17 | import io.opentracing.tag.Tags; 18 | import java.io.PrintWriter; 19 | import java.io.StringWriter; 20 | import java.util.HashMap; 21 | import java.util.Map; 22 | import org.apache.kafka.clients.consumer.ConsumerRecord; 23 | import org.apache.kafka.clients.producer.ProducerRecord; 24 | 25 | class StandardSpanDecorator implements SpanDecorator { 26 | 27 | static final String COMPONENT_NAME = "java-kafka"; 28 | static final String KAFKA_SERVICE = "kafka"; 29 | 30 | public void onSend(ProducerRecord record, Span span) { 31 | setCommonTags(span); 32 | Tags.MESSAGE_BUS_DESTINATION.set(span, record.topic()); 33 | if (record.partition() != null) { 34 | span.setTag("partition", record.partition()); 35 | } 36 | } 37 | 38 | public void onResponse(ConsumerRecord record, Span span) { 39 | setCommonTags(span); 40 | Tags.MESSAGE_BUS_DESTINATION.set(span, record.topic()); 41 | span.setTag("partition", record.partition()); 42 | span.setTag("offset", record.offset()); 43 | } 44 | 45 | public void onError(Exception exception, Span span) { 46 | Tags.ERROR.set(span, Boolean.TRUE); 47 | span.log(errorLogs(exception)); 48 | } 49 | 50 | private static Map errorLogs(Throwable throwable) { 51 | Map errorLogs = new HashMap<>(4); 52 | errorLogs.put("event", Tags.ERROR.getKey()); 53 | errorLogs.put("error.kind", throwable.getClass().getName()); 54 | errorLogs.put("error.object", throwable); 55 | errorLogs.put("message", throwable.getMessage()); 56 | 57 | StringWriter sw = new StringWriter(); 58 | throwable.printStackTrace(new PrintWriter(sw)); 59 | errorLogs.put("stack", sw.toString()); 60 | 61 | return errorLogs; 62 | } 63 | 64 | private static void setCommonTags(Span span) { 65 | Tags.COMPONENT.set(span, COMPONENT_NAME); 66 | Tags.PEER_SERVICE.set(span, KAFKA_SERVICE); 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/TracingCallback.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | package io.opentracing.contrib.kafka; 15 | 16 | 17 | import io.opentracing.Scope; 18 | import io.opentracing.Span; 19 | import io.opentracing.Tracer; 20 | import java.util.Collection; 21 | import java.util.Collections; 22 | import org.apache.kafka.clients.producer.Callback; 23 | import org.apache.kafka.clients.producer.RecordMetadata; 24 | 25 | /** 26 | * Callback executed after the producer has finished sending a message 27 | */ 28 | public class TracingCallback implements Callback { 29 | private final Callback callback; 30 | private Collection spanDecorators; 31 | private final Span span; 32 | private final Tracer tracer; 33 | 34 | public TracingCallback(Callback callback, Span span, Tracer tracer) { 35 | this.callback = callback; 36 | this.span = span; 37 | this.tracer = tracer; 38 | this.spanDecorators = Collections.singletonList(SpanDecorator.STANDARD_TAGS); 39 | } 40 | 41 | TracingCallback(Callback callback, Span span, Tracer tracer, 42 | Collection spanDecorators) { 43 | this.callback = callback; 44 | this.span = span; 45 | this.tracer = tracer; 46 | this.spanDecorators = spanDecorators; 47 | } 48 | 49 | @Override 50 | public void onCompletion(RecordMetadata metadata, Exception exception) { 51 | if (exception != null) { 52 | for (SpanDecorator decorator : spanDecorators) { 53 | decorator.onError(exception, span); 54 | } 55 | } 56 | 57 | try (Scope ignored = tracer.scopeManager().activate(span)) { 58 | if (callback != null) { 59 | callback.onCompletion(metadata, exception); 60 | } 61 | } finally { 62 | span.finish(); 63 | } 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/TracingConsumerInterceptor.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | package io.opentracing.contrib.kafka; 15 | 16 | import io.opentracing.util.GlobalTracer; 17 | import java.util.Map; 18 | import org.apache.kafka.clients.consumer.ConsumerInterceptor; 19 | import org.apache.kafka.clients.consumer.ConsumerRecord; 20 | import org.apache.kafka.clients.consumer.ConsumerRecords; 21 | import org.apache.kafka.clients.consumer.OffsetAndMetadata; 22 | import org.apache.kafka.common.TopicPartition; 23 | 24 | public class TracingConsumerInterceptor implements ConsumerInterceptor { 25 | 26 | @Override 27 | public ConsumerRecords onConsume(ConsumerRecords records) { 28 | for (ConsumerRecord record : records) { 29 | TracingKafkaUtils.buildAndFinishChildSpan(record, GlobalTracer.get()); 30 | } 31 | 32 | return records; 33 | } 34 | 35 | @Override 36 | public void onCommit(Map offsets) { 37 | 38 | } 39 | 40 | @Override 41 | public void close() { 42 | 43 | } 44 | 45 | @Override 46 | public void configure(Map configs) { 47 | 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/TracingKafkaConsumer.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | package io.opentracing.contrib.kafka; 15 | 16 | 17 | import static io.opentracing.contrib.kafka.SpanDecorator.STANDARD_TAGS; 18 | 19 | import io.opentracing.Tracer; 20 | import java.time.Duration; 21 | import java.util.Collection; 22 | import java.util.Collections; 23 | import java.util.List; 24 | import java.util.Map; 25 | import java.util.Set; 26 | import java.util.concurrent.TimeUnit; 27 | import java.util.function.BiFunction; 28 | import java.util.regex.Pattern; 29 | import org.apache.kafka.clients.consumer.Consumer; 30 | import org.apache.kafka.clients.consumer.ConsumerGroupMetadata; 31 | import org.apache.kafka.clients.consumer.ConsumerRebalanceListener; 32 | import org.apache.kafka.clients.consumer.ConsumerRecord; 33 | import org.apache.kafka.clients.consumer.ConsumerRecords; 34 | import org.apache.kafka.clients.consumer.OffsetAndMetadata; 35 | import org.apache.kafka.clients.consumer.OffsetAndTimestamp; 36 | import org.apache.kafka.clients.consumer.OffsetCommitCallback; 37 | import org.apache.kafka.common.Metric; 38 | import org.apache.kafka.common.MetricName; 39 | import org.apache.kafka.common.PartitionInfo; 40 | import org.apache.kafka.common.TopicPartition; 41 | 42 | public class TracingKafkaConsumer implements Consumer { 43 | 44 | private final Tracer tracer; 45 | private final Consumer consumer; 46 | private Collection spanDecorators; 47 | private final BiFunction consumerSpanNameProvider; 48 | 49 | TracingKafkaConsumer(Consumer consumer, Tracer tracer, 50 | Collection spanDecorators, 51 | BiFunction consumerSpanNameProvider) { 52 | this.consumer = consumer; 53 | this.tracer = tracer; 54 | this.spanDecorators = Collections.unmodifiableCollection(spanDecorators); 55 | this.consumerSpanNameProvider = (consumerSpanNameProvider == null) 56 | ? ClientSpanNameProvider.CONSUMER_OPERATION_NAME 57 | : consumerSpanNameProvider; 58 | } 59 | 60 | public TracingKafkaConsumer(Consumer consumer, Tracer tracer) { 61 | this.consumer = consumer; 62 | this.tracer = tracer; 63 | this.spanDecorators = Collections.singletonList(STANDARD_TAGS); 64 | this.consumerSpanNameProvider = ClientSpanNameProvider.CONSUMER_OPERATION_NAME; 65 | } 66 | 67 | public TracingKafkaConsumer(Consumer consumer, Tracer tracer, 68 | BiFunction consumerSpanNameProvider) { 69 | this.consumer = consumer; 70 | this.tracer = tracer; 71 | this.spanDecorators = Collections.singletonList(STANDARD_TAGS); 72 | this.consumerSpanNameProvider = (consumerSpanNameProvider == null) 73 | ? ClientSpanNameProvider.CONSUMER_OPERATION_NAME 74 | : consumerSpanNameProvider; 75 | } 76 | 77 | @Override 78 | public Set assignment() { 79 | return consumer.assignment(); 80 | } 81 | 82 | @Override 83 | public Set subscription() { 84 | return consumer.subscription(); 85 | } 86 | 87 | @Override 88 | public void subscribe(Collection topics, ConsumerRebalanceListener listener) { 89 | consumer.subscribe(topics, listener); 90 | } 91 | 92 | @Override 93 | public void subscribe(Collection topics) { 94 | consumer.subscribe(topics); 95 | } 96 | 97 | @Override 98 | public void subscribe(Pattern pattern, ConsumerRebalanceListener listener) { 99 | consumer.subscribe(pattern, listener); 100 | } 101 | 102 | @Override 103 | public void subscribe(Pattern pattern) { 104 | consumer.subscribe(pattern); 105 | } 106 | 107 | @Override 108 | public void unsubscribe() { 109 | consumer.unsubscribe(); 110 | } 111 | 112 | @Override 113 | public void assign(Collection partitions) { 114 | consumer.assign(partitions); 115 | } 116 | 117 | @Override 118 | @Deprecated 119 | public ConsumerRecords poll(long timeout) { 120 | ConsumerRecords records = consumer.poll(timeout); 121 | 122 | for (ConsumerRecord record : records) { 123 | TracingKafkaUtils 124 | .buildAndFinishChildSpan(record, tracer, consumerSpanNameProvider, spanDecorators); 125 | } 126 | 127 | return records; 128 | } 129 | 130 | @Override 131 | public ConsumerRecords poll(Duration duration) { 132 | ConsumerRecords records = consumer.poll(duration); 133 | 134 | for (ConsumerRecord record : records) { 135 | TracingKafkaUtils 136 | .buildAndFinishChildSpan(record, tracer, consumerSpanNameProvider, spanDecorators); 137 | } 138 | 139 | return records; 140 | } 141 | 142 | @Override 143 | public void commitSync() { 144 | consumer.commitSync(); 145 | } 146 | 147 | @Override 148 | public void commitSync(Duration duration) { 149 | consumer.commitSync(duration); 150 | } 151 | 152 | @Override 153 | public void commitSync(Map offsets) { 154 | consumer.commitSync(offsets); 155 | } 156 | 157 | @Override 158 | public void commitSync(Map map, Duration duration) { 159 | consumer.commitSync(map, duration); 160 | } 161 | 162 | @Override 163 | public void commitAsync() { 164 | consumer.commitAsync(); 165 | } 166 | 167 | @Override 168 | public void commitAsync(OffsetCommitCallback callback) { 169 | consumer.commitAsync(callback); 170 | } 171 | 172 | @Override 173 | public void commitAsync(Map offsets, 174 | OffsetCommitCallback callback) { 175 | consumer.commitAsync(offsets, callback); 176 | } 177 | 178 | @Override 179 | public void seek(TopicPartition partition, long offset) { 180 | consumer.seek(partition, offset); 181 | } 182 | 183 | @Override 184 | public void seek(TopicPartition partition, OffsetAndMetadata offsetAndMetadata) { 185 | consumer.seek(partition, offsetAndMetadata); 186 | } 187 | 188 | @Override 189 | public void seekToBeginning(Collection partitions) { 190 | consumer.seekToBeginning(partitions); 191 | } 192 | 193 | @Override 194 | public void seekToEnd(Collection partitions) { 195 | consumer.seekToEnd(partitions); 196 | } 197 | 198 | @Override 199 | public long position(TopicPartition partition) { 200 | return consumer.position(partition); 201 | } 202 | 203 | @Override 204 | public long position(TopicPartition topicPartition, Duration duration) { 205 | return consumer.position(topicPartition, duration); 206 | } 207 | 208 | @Override 209 | public OffsetAndMetadata committed(TopicPartition partition) { 210 | return consumer.committed(partition); 211 | } 212 | 213 | @Override 214 | public OffsetAndMetadata committed(TopicPartition topicPartition, Duration duration) { 215 | return consumer.committed(topicPartition, duration); 216 | } 217 | 218 | @Override 219 | public Map committed(Set partitions) { 220 | return consumer.committed(partitions); 221 | } 222 | 223 | @Override 224 | public Map committed(Set partitions, 225 | final Duration timeout) { 226 | return consumer.committed(partitions, timeout); 227 | } 228 | 229 | @Override 230 | public Map metrics() { 231 | return consumer.metrics(); 232 | } 233 | 234 | @Override 235 | public List partitionsFor(String topic) { 236 | return consumer.partitionsFor(topic); 237 | } 238 | 239 | @Override 240 | public List partitionsFor(String s, Duration duration) { 241 | return consumer.partitionsFor(s, duration); 242 | } 243 | 244 | @Override 245 | public Map> listTopics() { 246 | return consumer.listTopics(); 247 | } 248 | 249 | @Override 250 | public Map> listTopics(Duration duration) { 251 | return consumer.listTopics(duration); 252 | } 253 | 254 | @Override 255 | public void pause(Collection partitions) { 256 | consumer.pause(partitions); 257 | } 258 | 259 | @Override 260 | public void resume(Collection partitions) { 261 | consumer.resume(partitions); 262 | } 263 | 264 | @Override 265 | public Set paused() { 266 | return consumer.paused(); 267 | } 268 | 269 | @Override 270 | public Map offsetsForTimes( 271 | Map timestampsToSearch) { 272 | return consumer.offsetsForTimes(timestampsToSearch); 273 | } 274 | 275 | @Override 276 | public Map offsetsForTimes(Map map, 277 | Duration duration) { 278 | return consumer.offsetsForTimes(map, duration); 279 | } 280 | 281 | @Override 282 | public Map beginningOffsets(Collection partitions) { 283 | return consumer.beginningOffsets(partitions); 284 | } 285 | 286 | @Override 287 | public Map beginningOffsets(Collection collection, 288 | Duration duration) { 289 | return consumer.beginningOffsets(collection, duration); 290 | } 291 | 292 | @Override 293 | public Map endOffsets(Collection partitions) { 294 | return consumer.endOffsets(partitions); 295 | } 296 | 297 | @Override 298 | public Map endOffsets(Collection collection, 299 | Duration duration) { 300 | return consumer.endOffsets(collection, duration); 301 | } 302 | 303 | @Override 304 | public ConsumerGroupMetadata groupMetadata() { 305 | return consumer.groupMetadata(); 306 | } 307 | 308 | @Override 309 | public void enforceRebalance() { 310 | consumer.enforceRebalance(); 311 | } 312 | 313 | @Override 314 | public void close() { 315 | consumer.close(); 316 | } 317 | 318 | @Override 319 | @Deprecated 320 | public void close(long l, TimeUnit timeUnit) { 321 | consumer.close(l, timeUnit); 322 | } 323 | 324 | @Override 325 | public void close(Duration duration) { 326 | consumer.close(duration); 327 | } 328 | 329 | @Override 330 | public void wakeup() { 331 | consumer.wakeup(); 332 | } 333 | } 334 | -------------------------------------------------------------------------------- /opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/TracingKafkaConsumerBuilder.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | package io.opentracing.contrib.kafka; 15 | 16 | import io.opentracing.Tracer; 17 | import java.util.Collection; 18 | import java.util.Collections; 19 | import java.util.function.BiFunction; 20 | import org.apache.kafka.clients.consumer.Consumer; 21 | import org.apache.kafka.clients.consumer.ConsumerRecord; 22 | 23 | public class TracingKafkaConsumerBuilder { 24 | private Collection spanDecorators; 25 | private Consumer consumer; 26 | private Tracer tracer; 27 | private BiFunction consumerSpanNameProvider; 28 | 29 | public TracingKafkaConsumerBuilder(Consumer consumer, Tracer tracer) { 30 | this.tracer = tracer; 31 | this.consumer = consumer; 32 | this.spanDecorators = Collections.singletonList(SpanDecorator.STANDARD_TAGS); 33 | this.consumerSpanNameProvider = ClientSpanNameProvider.CONSUMER_OPERATION_NAME; 34 | } 35 | 36 | public TracingKafkaConsumerBuilder withDecorators(Collection spanDecorators) { 37 | this.spanDecorators = Collections.unmodifiableCollection(spanDecorators); 38 | return this; 39 | } 40 | 41 | public TracingKafkaConsumerBuilder withSpanNameProvider( 42 | BiFunction consumerSpanNameProvider) { 43 | this.consumerSpanNameProvider = consumerSpanNameProvider; 44 | return this; 45 | } 46 | 47 | public TracingKafkaConsumer build() { 48 | return new TracingKafkaConsumer<>(consumer, tracer, spanDecorators, consumerSpanNameProvider); 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/TracingKafkaProducer.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | package io.opentracing.contrib.kafka; 15 | 16 | import static io.opentracing.contrib.kafka.SpanDecorator.STANDARD_TAGS; 17 | 18 | import io.opentracing.Scope; 19 | import io.opentracing.Span; 20 | import io.opentracing.SpanContext; 21 | import io.opentracing.Tracer; 22 | import java.time.Duration; 23 | import java.util.Collection; 24 | import java.util.Collections; 25 | import java.util.List; 26 | import java.util.Map; 27 | import java.util.concurrent.Future; 28 | import java.util.concurrent.TimeUnit; 29 | import java.util.function.BiFunction; 30 | import org.apache.kafka.clients.consumer.ConsumerGroupMetadata; 31 | import org.apache.kafka.clients.consumer.OffsetAndMetadata; 32 | import org.apache.kafka.clients.producer.Callback; 33 | import org.apache.kafka.clients.producer.Producer; 34 | import org.apache.kafka.clients.producer.ProducerRecord; 35 | import org.apache.kafka.clients.producer.RecordMetadata; 36 | import org.apache.kafka.common.Metric; 37 | import org.apache.kafka.common.MetricName; 38 | import org.apache.kafka.common.PartitionInfo; 39 | import org.apache.kafka.common.TopicPartition; 40 | import org.apache.kafka.common.errors.ProducerFencedException; 41 | 42 | public class TracingKafkaProducer implements Producer { 43 | 44 | private Producer producer; 45 | private final Tracer tracer; 46 | private final BiFunction producerSpanNameProvider; 47 | private Collection spanDecorators; 48 | 49 | TracingKafkaProducer(Producer producer, Tracer tracer, 50 | Collection spanDecorators, 51 | BiFunction producerSpanNameProvider) { 52 | this.producer = producer; 53 | this.tracer = tracer; 54 | this.spanDecorators = Collections.unmodifiableCollection(spanDecorators); 55 | this.producerSpanNameProvider = (producerSpanNameProvider == null) 56 | ? ClientSpanNameProvider.PRODUCER_OPERATION_NAME 57 | : producerSpanNameProvider; 58 | } 59 | 60 | public TracingKafkaProducer(Producer producer, Tracer tracer) { 61 | this.producer = producer; 62 | this.tracer = tracer; 63 | this.spanDecorators = Collections.singletonList(STANDARD_TAGS); 64 | this.producerSpanNameProvider = ClientSpanNameProvider.PRODUCER_OPERATION_NAME; 65 | } 66 | 67 | public TracingKafkaProducer(Producer producer, Tracer tracer, 68 | BiFunction producerSpanNameProvider) { 69 | this.producer = producer; 70 | this.tracer = tracer; 71 | this.spanDecorators = Collections.singletonList(STANDARD_TAGS); 72 | this.producerSpanNameProvider = (producerSpanNameProvider == null) 73 | ? ClientSpanNameProvider.PRODUCER_OPERATION_NAME 74 | : producerSpanNameProvider; 75 | } 76 | 77 | @Override 78 | public void initTransactions() { 79 | producer.initTransactions(); 80 | } 81 | 82 | @Override 83 | public void beginTransaction() throws ProducerFencedException { 84 | producer.beginTransaction(); 85 | } 86 | 87 | @Override 88 | public void sendOffsetsToTransaction(Map offsets, 89 | String consumerGroupId) 90 | throws ProducerFencedException { 91 | producer.sendOffsetsToTransaction(offsets, consumerGroupId); 92 | } 93 | 94 | @Override 95 | public void sendOffsetsToTransaction(Map offsets, 96 | ConsumerGroupMetadata groupMetadata) throws ProducerFencedException { 97 | producer.sendOffsetsToTransaction(offsets, groupMetadata); 98 | } 99 | 100 | @Override 101 | public void commitTransaction() throws ProducerFencedException { 102 | producer.commitTransaction(); 103 | } 104 | 105 | @Override 106 | public void abortTransaction() throws ProducerFencedException { 107 | producer.abortTransaction(); 108 | } 109 | 110 | @Override 111 | public Future send(ProducerRecord record) { 112 | return send(record, null, null); 113 | } 114 | 115 | public Future send(ProducerRecord record, SpanContext parent) { 116 | return send(record, null, parent); 117 | } 118 | 119 | @Override 120 | public Future send(ProducerRecord record, Callback callback) { 121 | return send(record, callback, null); 122 | } 123 | 124 | public Future send(ProducerRecord record, Callback callback, 125 | SpanContext parent) { 126 | /* 127 | // Create wrappedRecord because headers can be read only in record (if record is sent second time) 128 | ProducerRecord wrappedRecord = new ProducerRecord<>(record.topic(), 129 | record.partition(), 130 | record.timestamp(), 131 | record.key(), 132 | record.value(), 133 | record.headers()); 134 | */ 135 | 136 | Span span = TracingKafkaUtils 137 | .buildAndInjectSpan(record, tracer, producerSpanNameProvider, parent, spanDecorators); 138 | try (Scope ignored = tracer.activateSpan(span)) { 139 | Callback wrappedCallback = new TracingCallback(callback, span, tracer, spanDecorators); 140 | return producer.send(record, wrappedCallback); 141 | } 142 | } 143 | 144 | @Override 145 | public void flush() { 146 | producer.flush(); 147 | } 148 | 149 | @Override 150 | public List partitionsFor(String topic) { 151 | return producer.partitionsFor(topic); 152 | } 153 | 154 | @Override 155 | public Map metrics() { 156 | return producer.metrics(); 157 | } 158 | 159 | @Override 160 | public void close() { 161 | producer.close(); 162 | } 163 | 164 | @Override 165 | public void close(Duration duration) { 166 | producer.close(duration); 167 | } 168 | 169 | @Override 170 | public void close(long timeout, TimeUnit timeUnit) { 171 | producer.close(timeout, timeUnit); 172 | } 173 | 174 | } 175 | -------------------------------------------------------------------------------- /opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/TracingKafkaProducerBuilder.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | package io.opentracing.contrib.kafka; 15 | 16 | import io.opentracing.Tracer; 17 | import java.util.Collection; 18 | import java.util.Collections; 19 | import java.util.function.BiFunction; 20 | import org.apache.kafka.clients.producer.Producer; 21 | import org.apache.kafka.clients.producer.ProducerRecord; 22 | 23 | public class TracingKafkaProducerBuilder { 24 | private Collection spanDecorators; 25 | private Producer producer; 26 | private Tracer tracer; 27 | private BiFunction producerSpanNameProvider; 28 | 29 | public TracingKafkaProducerBuilder(Producer producer, Tracer tracer) { 30 | this.tracer = tracer; 31 | this.producer = producer; 32 | this.spanDecorators = Collections.singletonList(SpanDecorator.STANDARD_TAGS); 33 | this.producerSpanNameProvider = ClientSpanNameProvider.PRODUCER_OPERATION_NAME; 34 | } 35 | 36 | public TracingKafkaProducerBuilder withDecorators(Collection spanDecorators) { 37 | this.spanDecorators = Collections.unmodifiableCollection(spanDecorators); 38 | return this; 39 | } 40 | 41 | public TracingKafkaProducerBuilder withSpanNameProvider( 42 | BiFunction producerSpanNameProvider) { 43 | this.producerSpanNameProvider = producerSpanNameProvider; 44 | return this; 45 | } 46 | 47 | public TracingKafkaProducer build() { 48 | return new TracingKafkaProducer<>(producer, tracer, spanDecorators, producerSpanNameProvider); 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/TracingKafkaUtils.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | package io.opentracing.contrib.kafka; 15 | 16 | import io.opentracing.References; 17 | import io.opentracing.Span; 18 | import io.opentracing.SpanContext; 19 | import io.opentracing.Tracer; 20 | import io.opentracing.propagation.Format; 21 | import io.opentracing.tag.Tags; 22 | import java.util.Collection; 23 | import java.util.Collections; 24 | import java.util.function.BiFunction; 25 | import org.apache.kafka.clients.consumer.ConsumerRecord; 26 | import org.apache.kafka.clients.producer.ProducerRecord; 27 | import org.apache.kafka.common.header.Headers; 28 | import org.slf4j.Logger; 29 | import org.slf4j.LoggerFactory; 30 | 31 | public class TracingKafkaUtils { 32 | 33 | private static final Logger logger = LoggerFactory.getLogger(TracingKafkaUtils.class); 34 | public static final String TO_PREFIX = "To_"; 35 | public static final String FROM_PREFIX = "From_"; 36 | 37 | /** 38 | * Extract Span Context from record headers 39 | * 40 | * @param headers record headers 41 | * @return span context 42 | */ 43 | public static SpanContext extractSpanContext(Headers headers, Tracer tracer) { 44 | return tracer 45 | .extract(Format.Builtin.TEXT_MAP, new HeadersMapExtractAdapter(headers)); 46 | } 47 | 48 | /** 49 | * Inject Span Context to record headers 50 | * 51 | * @param spanContext Span Context 52 | * @param headers record headers 53 | */ 54 | public static void inject(SpanContext spanContext, Headers headers, 55 | Tracer tracer) { 56 | tracer.inject(spanContext, Format.Builtin.TEXT_MAP, new HeadersMapInjectAdapter(headers)); 57 | } 58 | 59 | public static Span buildAndInjectSpan(ProducerRecord record, Tracer tracer) { 60 | return buildAndInjectSpan(record, tracer, ClientSpanNameProvider.PRODUCER_OPERATION_NAME, null, 61 | Collections.singletonList(SpanDecorator.STANDARD_TAGS)); 62 | } 63 | 64 | public static Span buildAndInjectSpan(ProducerRecord record, Tracer tracer, 65 | BiFunction producerSpanNameProvider, 66 | SpanContext parent) { 67 | return buildAndInjectSpan(record, tracer, producerSpanNameProvider, parent, 68 | Collections.singletonList(SpanDecorator.STANDARD_TAGS)); 69 | } 70 | 71 | public static Span buildAndInjectSpan(ProducerRecord record, Tracer tracer, 72 | BiFunction producerSpanNameProvider, 73 | SpanContext parent, Collection spanDecorators) { 74 | String producerOper = 75 | TO_PREFIX + record.topic(); // <======== It provides better readability in the UI 76 | Tracer.SpanBuilder spanBuilder = tracer 77 | .buildSpan(producerSpanNameProvider.apply(producerOper, record)) 78 | .withTag(Tags.SPAN_KIND.getKey(), Tags.SPAN_KIND_PRODUCER); 79 | 80 | SpanContext spanContext = TracingKafkaUtils.extractSpanContext(record.headers(), tracer); 81 | 82 | if (spanContext != null) { 83 | spanBuilder.asChildOf(spanContext); 84 | } else if (parent != null) { 85 | spanBuilder.asChildOf(parent); 86 | } 87 | 88 | Span span = spanBuilder.start(); 89 | 90 | for (SpanDecorator decorator : spanDecorators) { 91 | decorator.onSend(record, span); 92 | } 93 | 94 | try { 95 | TracingKafkaUtils.inject(span.context(), record.headers(), tracer); 96 | } catch (Exception e) { 97 | // it can happen if headers are read only (when record is sent second time) 98 | logger.error("failed to inject span context. sending record second time?", e); 99 | } 100 | 101 | return span; 102 | } 103 | 104 | public static void buildAndFinishChildSpan(ConsumerRecord record, Tracer tracer) { 105 | buildAndFinishChildSpan(record, tracer, ClientSpanNameProvider.CONSUMER_OPERATION_NAME, 106 | Collections.singletonList(SpanDecorator.STANDARD_TAGS)); 107 | } 108 | 109 | public static void buildAndFinishChildSpan(ConsumerRecord record, Tracer tracer, 110 | BiFunction consumerSpanNameProvider) { 111 | buildAndFinishChildSpan(record, tracer, consumerSpanNameProvider, 112 | Collections.singletonList(SpanDecorator.STANDARD_TAGS)); 113 | } 114 | 115 | public static void buildAndFinishChildSpan(ConsumerRecord record, Tracer tracer, 116 | BiFunction consumerSpanNameProvider, 117 | Collection spanDecorators) { 118 | SpanContext parentContext = TracingKafkaUtils.extractSpanContext(record.headers(), tracer); 119 | String consumerOper = 120 | FROM_PREFIX + record.topic(); // <====== It provides better readability in the UI 121 | Tracer.SpanBuilder spanBuilder = tracer 122 | .buildSpan(consumerSpanNameProvider.apply(consumerOper, record)) 123 | .withTag(Tags.SPAN_KIND.getKey(), Tags.SPAN_KIND_CONSUMER); 124 | 125 | if (parentContext != null) { 126 | spanBuilder.addReference(References.FOLLOWS_FROM, parentContext); 127 | } 128 | 129 | Span span = spanBuilder.start(); 130 | 131 | for (SpanDecorator decorator : spanDecorators) { 132 | decorator.onResponse(record, span); 133 | } 134 | 135 | span.finish(); 136 | 137 | // Inject created span context into record headers for extraction by client to continue span chain 138 | inject(span.context(), record.headers(), tracer); 139 | } 140 | } 141 | -------------------------------------------------------------------------------- /opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/TracingProducerInterceptor.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | package io.opentracing.contrib.kafka; 15 | 16 | import io.opentracing.util.GlobalTracer; 17 | import java.util.Map; 18 | import org.apache.kafka.clients.producer.ProducerInterceptor; 19 | import org.apache.kafka.clients.producer.ProducerRecord; 20 | import org.apache.kafka.clients.producer.RecordMetadata; 21 | 22 | public class TracingProducerInterceptor implements ProducerInterceptor { 23 | 24 | @Override 25 | public ProducerRecord onSend(ProducerRecord producerRecord) { 26 | TracingKafkaUtils.buildAndInjectSpan(producerRecord, GlobalTracer.get()).finish(); 27 | return producerRecord; 28 | } 29 | 30 | @Override 31 | public void onAcknowledgement(RecordMetadata recordMetadata, Exception e) { 32 | } 33 | 34 | @Override 35 | public void close() { 36 | 37 | } 38 | 39 | @Override 40 | public void configure(Map map) { 41 | 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /opentracing-kafka-client/src/test/java/io/opentracing/contrib/kafka/HeadersMapExtractAdapterTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | package io.opentracing.contrib.kafka; 15 | 16 | import static org.junit.Assert.assertEquals; 17 | import static org.junit.Assert.assertNotNull; 18 | import static org.junit.Assert.assertNull; 19 | 20 | import java.util.Map.Entry; 21 | import org.apache.kafka.common.header.Headers; 22 | import org.apache.kafka.common.header.internals.RecordHeaders; 23 | import org.junit.Test; 24 | 25 | 26 | public class HeadersMapExtractAdapterTest { 27 | 28 | @Test 29 | public void verifyNullHeaderHandled() { 30 | Headers headers = new RecordHeaders(); 31 | headers.add("test_null_header", null); 32 | HeadersMapExtractAdapter headersMapExtractAdapter = new HeadersMapExtractAdapter(headers); 33 | Entry header = headersMapExtractAdapter.iterator().next(); 34 | assertNotNull(header); 35 | assertEquals(header.getKey(), "test_null_header"); 36 | assertNull(header.getValue()); 37 | 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /opentracing-kafka-client/src/test/java/io/opentracing/contrib/kafka/OperationNameSpanNameTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | 15 | package io.opentracing.contrib.kafka; 16 | 17 | import static org.junit.Assert.assertEquals; 18 | 19 | import java.util.function.BiFunction; 20 | import org.apache.kafka.clients.consumer.ConsumerRecord; 21 | import org.apache.kafka.clients.producer.ProducerRecord; 22 | import org.junit.Test; 23 | 24 | public class OperationNameSpanNameTest { 25 | private final ConsumerRecord consumerRecord = new ConsumerRecord<>( 26 | "example_topic", 0, 0, "KEY", 999); 27 | private final ProducerRecord producerRecord = new ProducerRecord<>( 28 | "example_topic", 0, System.currentTimeMillis(), "KEY", 999); 29 | private BiFunction consumerSpanNameProvider; 30 | private BiFunction producerSpanNameProvider; 31 | 32 | @Test 33 | public void operationNameSpanNameTest() { 34 | consumerSpanNameProvider = ClientSpanNameProvider.CONSUMER_OPERATION_NAME; 35 | producerSpanNameProvider = ClientSpanNameProvider.PRODUCER_OPERATION_NAME; 36 | 37 | assertEquals("receive", consumerSpanNameProvider.apply("receive", consumerRecord)); 38 | assertEquals("send", producerSpanNameProvider.apply("send", producerRecord)); 39 | 40 | assertEquals("unknown", consumerSpanNameProvider.apply(null, consumerRecord)); 41 | assertEquals("unknown", producerSpanNameProvider.apply(null, producerRecord)); 42 | 43 | assertEquals("receive", consumerSpanNameProvider.apply("receive", null)); 44 | assertEquals("send", producerSpanNameProvider.apply("send", null)); 45 | 46 | assertEquals("unknown", consumerSpanNameProvider.apply(null, null)); 47 | assertEquals("unknown", producerSpanNameProvider.apply(null, null)); 48 | } 49 | 50 | @Test 51 | public void prefixedOperationNameSpanNameTest() { 52 | consumerSpanNameProvider = ClientSpanNameProvider 53 | .CONSUMER_PREFIXED_OPERATION_NAME("KafkaClient: "); 54 | producerSpanNameProvider = ClientSpanNameProvider 55 | .PRODUCER_PREFIXED_OPERATION_NAME("KafkaClient: "); 56 | 57 | assertEquals("KafkaClient: receive", consumerSpanNameProvider.apply("receive", consumerRecord)); 58 | assertEquals("KafkaClient: send", producerSpanNameProvider.apply("send", producerRecord)); 59 | 60 | assertEquals("KafkaClient: unknown", consumerSpanNameProvider.apply(null, consumerRecord)); 61 | assertEquals("KafkaClient: unknown", producerSpanNameProvider.apply(null, producerRecord)); 62 | 63 | assertEquals("KafkaClient: receive", consumerSpanNameProvider.apply("receive", null)); 64 | assertEquals("KafkaClient: send", producerSpanNameProvider.apply("send", null)); 65 | 66 | assertEquals("KafkaClient: unknown", consumerSpanNameProvider.apply(null, null)); 67 | assertEquals("KafkaClient: unknown", producerSpanNameProvider.apply(null, null)); 68 | 69 | consumerSpanNameProvider = ClientSpanNameProvider.CONSUMER_PREFIXED_OPERATION_NAME(null); 70 | producerSpanNameProvider = ClientSpanNameProvider.PRODUCER_PREFIXED_OPERATION_NAME(null); 71 | 72 | assertEquals("receive", consumerSpanNameProvider.apply("receive", consumerRecord)); 73 | assertEquals("send", producerSpanNameProvider.apply("send", producerRecord)); 74 | 75 | assertEquals("unknown", consumerSpanNameProvider.apply(null, consumerRecord)); 76 | assertEquals("unknown", producerSpanNameProvider.apply(null, producerRecord)); 77 | 78 | assertEquals("receive", consumerSpanNameProvider.apply("receive", null)); 79 | assertEquals("send", producerSpanNameProvider.apply("send", null)); 80 | 81 | assertEquals("unknown", consumerSpanNameProvider.apply(null, null)); 82 | assertEquals("unknown", producerSpanNameProvider.apply(null, null)); 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /opentracing-kafka-client/src/test/java/io/opentracing/contrib/kafka/OperationNameTopicSpanNameTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | 15 | package io.opentracing.contrib.kafka; 16 | 17 | import static org.junit.Assert.assertEquals; 18 | 19 | import java.util.function.BiFunction; 20 | import org.apache.kafka.clients.consumer.ConsumerRecord; 21 | import org.apache.kafka.clients.producer.ProducerRecord; 22 | import org.junit.Test; 23 | 24 | public class OperationNameTopicSpanNameTest { 25 | private final ConsumerRecord consumerRecord = new ConsumerRecord<>( 26 | "example_topic", 0, 0, "KEY", 999); 27 | private final ProducerRecord producerRecord = new ProducerRecord<>( 28 | "example_topic", 0, System.currentTimeMillis(), "KEY", 999); 29 | private BiFunction consumerSpanNameProvider; 30 | private BiFunction producerSpanNameProvider; 31 | 32 | @Test 33 | public void operationNameTopicSpanNameTest() { 34 | consumerSpanNameProvider = ClientSpanNameProvider.CONSUMER_OPERATION_NAME_TOPIC; 35 | producerSpanNameProvider = ClientSpanNameProvider.PRODUCER_OPERATION_NAME_TOPIC; 36 | 37 | assertEquals("receive - example_topic", 38 | consumerSpanNameProvider.apply("receive", consumerRecord)); 39 | assertEquals("send - example_topic", producerSpanNameProvider.apply("send", producerRecord)); 40 | 41 | assertEquals("unknown - example_topic", consumerSpanNameProvider.apply(null, consumerRecord)); 42 | assertEquals("unknown - example_topic", producerSpanNameProvider.apply(null, producerRecord)); 43 | 44 | assertEquals("receive - unknown", consumerSpanNameProvider.apply("receive", null)); 45 | assertEquals("send - unknown", producerSpanNameProvider.apply("send", null)); 46 | 47 | assertEquals("unknown - unknown", consumerSpanNameProvider.apply(null, null)); 48 | assertEquals("unknown - unknown", producerSpanNameProvider.apply(null, null)); 49 | } 50 | 51 | @Test 52 | public void prefixedOperationNameTopicSpanNameTest() { 53 | consumerSpanNameProvider = ClientSpanNameProvider 54 | .CONSUMER_PREFIXED_OPERATION_NAME_TOPIC("KafkaClient: "); 55 | producerSpanNameProvider = ClientSpanNameProvider 56 | .PRODUCER_PREFIXED_OPERATION_NAME_TOPIC("KafkaClient: "); 57 | 58 | assertEquals("KafkaClient: receive - example_topic", 59 | consumerSpanNameProvider.apply("receive", consumerRecord)); 60 | assertEquals("KafkaClient: send - example_topic", 61 | producerSpanNameProvider.apply("send", producerRecord)); 62 | 63 | assertEquals("KafkaClient: unknown - example_topic", 64 | consumerSpanNameProvider.apply(null, consumerRecord)); 65 | assertEquals("KafkaClient: unknown - example_topic", 66 | producerSpanNameProvider.apply(null, producerRecord)); 67 | 68 | assertEquals("KafkaClient: receive - unknown", consumerSpanNameProvider.apply("receive", null)); 69 | assertEquals("KafkaClient: send - unknown", producerSpanNameProvider.apply("send", null)); 70 | 71 | assertEquals("KafkaClient: unknown - unknown", consumerSpanNameProvider.apply(null, null)); 72 | assertEquals("KafkaClient: unknown - unknown", producerSpanNameProvider.apply(null, null)); 73 | 74 | consumerSpanNameProvider = ClientSpanNameProvider.CONSUMER_PREFIXED_OPERATION_NAME_TOPIC(null); 75 | producerSpanNameProvider = ClientSpanNameProvider.PRODUCER_PREFIXED_OPERATION_NAME_TOPIC(null); 76 | 77 | assertEquals("receive - example_topic", 78 | consumerSpanNameProvider.apply("receive", consumerRecord)); 79 | assertEquals("send - example_topic", producerSpanNameProvider.apply("send", producerRecord)); 80 | 81 | assertEquals("unknown - example_topic", consumerSpanNameProvider.apply(null, consumerRecord)); 82 | assertEquals("unknown - example_topic", producerSpanNameProvider.apply(null, producerRecord)); 83 | 84 | assertEquals("receive - unknown", consumerSpanNameProvider.apply("receive", null)); 85 | assertEquals("send - unknown", producerSpanNameProvider.apply("send", null)); 86 | 87 | assertEquals("unknown - unknown", consumerSpanNameProvider.apply(null, null)); 88 | assertEquals("unknown - unknown", producerSpanNameProvider.apply(null, null)); 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /opentracing-kafka-client/src/test/java/io/opentracing/contrib/kafka/TopicSpanNameTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | 15 | package io.opentracing.contrib.kafka; 16 | 17 | import static org.junit.Assert.assertEquals; 18 | 19 | import java.util.function.BiFunction; 20 | import org.apache.kafka.clients.consumer.ConsumerRecord; 21 | import org.apache.kafka.clients.producer.ProducerRecord; 22 | import org.junit.Test; 23 | 24 | public class TopicSpanNameTest { 25 | 26 | private final ConsumerRecord consumerRecord = new ConsumerRecord<>( 27 | "example_topic", 0, 0, "KEY", 999); 28 | private final ProducerRecord producerRecord = new ProducerRecord<>( 29 | "example_topic", 0, System.currentTimeMillis(), "KEY", 999); 30 | private BiFunction consumerSpanNameProvider; 31 | private BiFunction producerSpanNameProvider; 32 | 33 | @Test 34 | public void topicSpanNameTest() { 35 | 36 | consumerSpanNameProvider = ClientSpanNameProvider.CONSUMER_TOPIC; 37 | producerSpanNameProvider = ClientSpanNameProvider.PRODUCER_TOPIC; 38 | 39 | assertEquals("example_topic", consumerSpanNameProvider.apply("receive", consumerRecord)); 40 | assertEquals("example_topic", producerSpanNameProvider.apply("send", producerRecord)); 41 | 42 | assertEquals("example_topic", consumerSpanNameProvider.apply(null, consumerRecord)); 43 | assertEquals("example_topic", producerSpanNameProvider.apply(null, producerRecord)); 44 | 45 | assertEquals("unknown", consumerSpanNameProvider.apply("receive", null)); 46 | assertEquals("unknown", producerSpanNameProvider.apply("send", null)); 47 | } 48 | 49 | @Test 50 | public void prefixedTopicSpanNameTest() { 51 | consumerSpanNameProvider = ClientSpanNameProvider.CONSUMER_PREFIXED_TOPIC("KafkaClient: "); 52 | producerSpanNameProvider = ClientSpanNameProvider.PRODUCER_PREFIXED_TOPIC("KafkaClient: "); 53 | 54 | assertEquals("KafkaClient: example_topic", 55 | consumerSpanNameProvider.apply("receive", consumerRecord)); 56 | assertEquals("KafkaClient: example_topic", 57 | producerSpanNameProvider.apply("send", producerRecord)); 58 | 59 | assertEquals("KafkaClient: example_topic", 60 | consumerSpanNameProvider.apply(null, consumerRecord)); 61 | assertEquals("KafkaClient: example_topic", 62 | producerSpanNameProvider.apply(null, producerRecord)); 63 | 64 | assertEquals("KafkaClient: unknown", consumerSpanNameProvider.apply("receive", null)); 65 | assertEquals("KafkaClient: unknown", producerSpanNameProvider.apply("send", null)); 66 | 67 | consumerSpanNameProvider = ClientSpanNameProvider.CONSUMER_PREFIXED_TOPIC(null); 68 | producerSpanNameProvider = ClientSpanNameProvider.PRODUCER_PREFIXED_TOPIC(null); 69 | 70 | assertEquals("example_topic", consumerSpanNameProvider.apply("receive", consumerRecord)); 71 | assertEquals("example_topic", producerSpanNameProvider.apply("send", producerRecord)); 72 | 73 | assertEquals("example_topic", consumerSpanNameProvider.apply(null, consumerRecord)); 74 | assertEquals("example_topic", producerSpanNameProvider.apply(null, producerRecord)); 75 | 76 | assertEquals("unknown", consumerSpanNameProvider.apply("receive", null)); 77 | assertEquals("unknown", producerSpanNameProvider.apply("send", null)); 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /opentracing-kafka-client/src/test/java/io/opentracing/contrib/kafka/TracingCallbackTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | package io.opentracing.contrib.kafka; 15 | 16 | 17 | import static org.junit.Assert.assertEquals; 18 | import static org.junit.Assert.assertNull; 19 | 20 | import io.opentracing.Scope; 21 | import io.opentracing.Span; 22 | import io.opentracing.mock.MockSpan; 23 | import io.opentracing.mock.MockTracer; 24 | import io.opentracing.tag.Tags; 25 | import java.util.Arrays; 26 | import java.util.List; 27 | import org.apache.kafka.clients.consumer.ConsumerRecord; 28 | import org.apache.kafka.clients.producer.ProducerRecord; 29 | import org.junit.Before; 30 | import org.junit.Test; 31 | 32 | 33 | public class TracingCallbackTest { 34 | 35 | private MockTracer mockTracer = new MockTracer(); 36 | 37 | @Before 38 | public void before() { 39 | mockTracer.reset(); 40 | } 41 | 42 | @Test 43 | public void onCompletionWithError() { 44 | Span span = mockTracer.buildSpan("test").start(); 45 | try (Scope ignored = mockTracer.activateSpan(span)) { 46 | TracingCallback callback = new TracingCallback(null, span, mockTracer); 47 | callback.onCompletion(null, new RuntimeException("test")); 48 | } 49 | 50 | List finished = mockTracer.finishedSpans(); 51 | assertEquals(1, finished.size()); 52 | assertEquals(1, finished.get(0).logEntries().size()); 53 | assertEquals(true, finished.get(0).tags().get(Tags.ERROR.getKey())); 54 | } 55 | 56 | @Test 57 | public void onCompletionWithCustomErrorDecorators() { 58 | Span span = mockTracer.buildSpan("test").start(); 59 | try (Scope ignored = mockTracer.activateSpan(span)) { 60 | TracingCallback callback = new TracingCallback(null, span, mockTracer, 61 | Arrays.asList(SpanDecorator.STANDARD_TAGS, createDecorator())); 62 | callback.onCompletion(null, new RuntimeException("test")); 63 | } 64 | 65 | List finished = mockTracer.finishedSpans(); 66 | assertEquals(1, finished.size()); 67 | assertEquals(true, finished.get(0).tags().get(Tags.ERROR.getKey())); 68 | assertEquals("overwritten", finished.get(0).tags().get("error.of")); 69 | assertEquals("error-test", finished.get(0).tags().get("new.error.tag")); 70 | } 71 | 72 | @Test 73 | public void onCompletion() { 74 | Span span = mockTracer.buildSpan("test").start(); 75 | try (Scope ignored = mockTracer.activateSpan(span)) { 76 | TracingCallback callback = new TracingCallback(null, span, mockTracer); 77 | callback.onCompletion(null, null); 78 | } 79 | 80 | List finished = mockTracer.finishedSpans(); 81 | assertEquals(1, finished.size()); 82 | assertEquals(0, finished.get(0).logEntries().size()); 83 | assertNull(finished.get(0).tags().get(Tags.ERROR.getKey())); 84 | } 85 | 86 | private SpanDecorator createDecorator() { 87 | return new SpanDecorator() { 88 | @Override 89 | public void onSend(ProducerRecord record, Span span) { 90 | } 91 | 92 | @Override 93 | public void onResponse(ConsumerRecord record, Span span) { 94 | } 95 | 96 | @Override 97 | public void onError(Exception exception, Span span) { 98 | span.setTag("error.of", "overwritten"); 99 | span.setTag("new.error.tag", "error-test"); 100 | } 101 | }; 102 | } 103 | } -------------------------------------------------------------------------------- /opentracing-kafka-client/src/test/java/io/opentracing/contrib/kafka/TracingKafkaTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | package io.opentracing.contrib.kafka; 15 | 16 | 17 | import static org.junit.Assert.assertEquals; 18 | import static org.junit.Assert.assertNotNull; 19 | import static org.junit.Assert.assertNull; 20 | import static org.junit.Assert.assertTrue; 21 | 22 | import io.opentracing.Scope; 23 | import io.opentracing.Span; 24 | import io.opentracing.SpanContext; 25 | import io.opentracing.mock.MockSpan; 26 | import io.opentracing.mock.MockTracer; 27 | import io.opentracing.tag.Tags; 28 | import io.opentracing.util.GlobalTracer; 29 | import java.time.Duration; 30 | import java.util.ArrayList; 31 | import java.util.Arrays; 32 | import java.util.Collection; 33 | import java.util.Collections; 34 | import java.util.List; 35 | import java.util.Map; 36 | import java.util.concurrent.CountDownLatch; 37 | import java.util.concurrent.ExecutorService; 38 | import java.util.concurrent.Executors; 39 | import java.util.concurrent.TimeUnit; 40 | import java.util.function.BiFunction; 41 | import org.apache.kafka.clients.consumer.Consumer; 42 | import org.apache.kafka.clients.consumer.ConsumerConfig; 43 | import org.apache.kafka.clients.consumer.ConsumerRecord; 44 | import org.apache.kafka.clients.consumer.ConsumerRecords; 45 | import org.apache.kafka.clients.consumer.KafkaConsumer; 46 | import org.apache.kafka.clients.consumer.OffsetAndMetadata; 47 | import org.apache.kafka.clients.producer.KafkaProducer; 48 | import org.apache.kafka.clients.producer.Producer; 49 | import org.apache.kafka.clients.producer.ProducerConfig; 50 | import org.apache.kafka.clients.producer.ProducerRecord; 51 | import org.apache.kafka.common.TopicPartition; 52 | import org.junit.Before; 53 | import org.junit.BeforeClass; 54 | import org.junit.ClassRule; 55 | import org.junit.Test; 56 | import org.springframework.kafka.test.rule.EmbeddedKafkaRule; 57 | import org.springframework.kafka.test.utils.KafkaTestUtils; 58 | 59 | public class TracingKafkaTest { 60 | 61 | @ClassRule 62 | public static EmbeddedKafkaRule embeddedKafka = new EmbeddedKafkaRule(2, true, 2, "messages"); 63 | private static final MockTracer mockTracer = new MockTracer(); 64 | 65 | @BeforeClass 66 | public static void init() { 67 | GlobalTracer.registerIfAbsent(mockTracer); 68 | } 69 | 70 | @Before 71 | public void before() { 72 | mockTracer.reset(); 73 | } 74 | 75 | @Test 76 | public void with_interceptors() throws Exception { 77 | Map senderProps = KafkaTestUtils 78 | .producerProps(embeddedKafka.getEmbeddedKafka()); 79 | senderProps 80 | .put(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG, TracingProducerInterceptor.class.getName()); 81 | KafkaProducer producer = new KafkaProducer<>(senderProps); 82 | 83 | producer.send(new ProducerRecord<>("messages", 1, "test")); 84 | 85 | final CountDownLatch latch = new CountDownLatch(1); 86 | createConsumer(latch, 1, true, null); 87 | 88 | producer.close(); 89 | 90 | List mockSpans = mockTracer.finishedSpans(); 91 | assertEquals(2, mockSpans.size()); 92 | checkSpans(mockSpans); 93 | assertNull(mockTracer.activeSpan()); 94 | } 95 | 96 | @Test 97 | public void test() throws Exception { 98 | Producer producer = createTracingProducer(); 99 | 100 | // Send 1 101 | producer.send(new ProducerRecord<>("messages", 1, "test")); 102 | 103 | // Send 2 104 | producer.send(new ProducerRecord<>("messages", 1, "test"), 105 | (metadata, exception) -> assertEquals("messages", metadata.topic())); 106 | 107 | final CountDownLatch latch = new CountDownLatch(2); 108 | createConsumer(latch, 1, false, null); 109 | 110 | producer.close(); 111 | 112 | List mockSpans = mockTracer.finishedSpans(); 113 | assertEquals(4, mockSpans.size()); 114 | checkSpans(mockSpans); 115 | assertNull(mockTracer.activeSpan()); 116 | } 117 | 118 | @Test 119 | public void testWithParentContext() throws Exception { 120 | TracingKafkaProducer producer = createTracingProducer(); 121 | 122 | final MockSpan parent = mockTracer.buildSpan("parent").start(); 123 | 124 | // Send 1 125 | producer.send(new ProducerRecord<>("messages", 1, "test"), parent.context()); 126 | 127 | // Send 2 128 | producer.send(new ProducerRecord<>("messages", 1, "test"), 129 | (metadata, exception) -> assertEquals("messages", metadata.topic()), parent.context()); 130 | 131 | final CountDownLatch latch = new CountDownLatch(2); 132 | createConsumer(latch, 1, false, null); 133 | 134 | producer.close(); 135 | 136 | List mockSpans = mockTracer.finishedSpans(); 137 | assertEquals(4, mockSpans.size()); 138 | checkSpans(mockSpans); 139 | for (MockSpan span : mockSpans) { 140 | assertEquals(parent.context().traceId(), span.context().traceId()); 141 | } 142 | 143 | final List sendSpans = getByOperationNameAll(mockSpans, 144 | TracingKafkaUtils.TO_PREFIX + "messages"); 145 | assertEquals(2, sendSpans.size()); 146 | for (MockSpan sendSpan : sendSpans) { 147 | assertEquals(parent.context().spanId(), sendSpan.parentId()); 148 | } 149 | 150 | parent.finish(); 151 | 152 | assertNull(mockTracer.activeSpan()); 153 | } 154 | 155 | @Test 156 | public void testNotTracedProducer() throws Exception { 157 | Producer producer = createProducer(); 158 | 159 | // Send 1 160 | producer.send(new ProducerRecord<>("messages", 1, "test")); 161 | 162 | // Send 2 163 | producer.send(new ProducerRecord<>("messages", 1, "test"), 164 | (metadata, exception) -> assertEquals("messages", metadata.topic())); 165 | 166 | final CountDownLatch latch = new CountDownLatch(2); 167 | createConsumer(latch, 1, false, null); 168 | 169 | producer.close(); 170 | 171 | List mockSpans = mockTracer.finishedSpans(); 172 | assertEquals(2, mockSpans.size()); 173 | checkSpans(mockSpans); 174 | assertNull(mockTracer.activeSpan()); 175 | } 176 | 177 | @Test 178 | public void testWithTopicNameProvider() throws Exception { 179 | Producer producer = createNameProvidedProducer( 180 | ClientSpanNameProvider.PRODUCER_TOPIC); 181 | 182 | // Send 1 183 | producer.send(new ProducerRecord<>("messages", 1, "test")); 184 | 185 | // Send 2 186 | producer.send(new ProducerRecord<>("messages", 1, "test"), 187 | (metadata, exception) -> assertEquals("messages", metadata.topic())); 188 | 189 | final CountDownLatch latch = new CountDownLatch(2); 190 | createConsumer(latch, 1, false, ClientSpanNameProvider.CONSUMER_TOPIC); 191 | producer.close(); 192 | 193 | List mockSpans = mockTracer.finishedSpans(); 194 | assertEquals(4, mockSpans.size()); 195 | for (MockSpan mockSpan : mockSpans) { 196 | String operationName = mockSpan.operationName(); 197 | assertEquals("messages", operationName); 198 | String spanKind = (String) mockSpan.tags().get(Tags.SPAN_KIND.getKey()); 199 | assertTrue( 200 | spanKind.equals(Tags.SPAN_KIND_CONSUMER) || spanKind.equals(Tags.SPAN_KIND_PRODUCER)); 201 | } 202 | assertNull(mockTracer.activeSpan()); 203 | } 204 | 205 | @Test 206 | public void with_parent() throws Exception { 207 | Producer producer = createTracingProducer(); 208 | 209 | final MockSpan parent = mockTracer.buildSpan("parent").start(); 210 | try (Scope ignored = mockTracer.activateSpan(parent)) { 211 | producer.send(new ProducerRecord<>("messages", 1, "test")); 212 | } 213 | parent.finish(); 214 | 215 | final CountDownLatch latch = new CountDownLatch(1); 216 | createConsumer(latch, 1, false, null); 217 | 218 | producer.close(); 219 | 220 | List mockSpans = mockTracer.finishedSpans(); 221 | assertEquals(3, mockSpans.size()); 222 | 223 | assertNotNull(parent); 224 | 225 | for (MockSpan span : mockSpans) { 226 | assertEquals(parent.context().traceId(), span.context().traceId()); 227 | } 228 | 229 | MockSpan sendSpan = getByOperationName(mockSpans, TracingKafkaUtils.TO_PREFIX + "messages"); 230 | assertNotNull(sendSpan); 231 | 232 | MockSpan receiveSpan = getByOperationName(mockSpans, 233 | TracingKafkaUtils.FROM_PREFIX + "messages"); 234 | assertNotNull(receiveSpan); 235 | 236 | assertEquals(sendSpan.context().spanId(), receiveSpan.parentId()); 237 | assertEquals(parent.context().spanId(), sendSpan.parentId()); 238 | 239 | assertNull(mockTracer.activeSpan()); 240 | } 241 | 242 | @Test 243 | public void nullKey() throws Exception { 244 | Producer producer = createTracingProducer(); 245 | 246 | ProducerRecord record = new ProducerRecord<>("messages", "test"); 247 | producer.send(record); 248 | 249 | final Map consumerProps = KafkaTestUtils 250 | .consumerProps("sampleRawConsumer", "false", embeddedKafka.getEmbeddedKafka()); 251 | consumerProps.put("auto.offset.reset", "earliest"); 252 | 253 | final CountDownLatch latch = new CountDownLatch(1); 254 | createConsumer(latch, null, false, null); 255 | 256 | producer.close(); 257 | } 258 | 259 | @Test 260 | public void testSeekInConsumerAndCloseInProducer() throws InterruptedException { 261 | 262 | Producer producer = createTracingProducer(); 263 | 264 | // Send 1 265 | producer.send(new ProducerRecord<>("messages-for-seek", 1, "test")); 266 | 267 | producer.close(Duration.ofSeconds(40)); 268 | 269 | final CountDownLatch latch = new CountDownLatch(1); 270 | Integer key = 1; 271 | 272 | ExecutorService executorService = Executors.newSingleThreadExecutor(); 273 | 274 | final Map consumerProps = KafkaTestUtils 275 | .consumerProps("sampleRawConsumer", "false", embeddedKafka.getEmbeddedKafka()); 276 | consumerProps.put("auto.offset.reset", "earliest"); 277 | 278 | executorService.execute(() -> { 279 | KafkaConsumer kafkaConsumer = new KafkaConsumer<>(consumerProps); 280 | Consumer consumer; 281 | 282 | consumer = new TracingKafkaConsumer<>(kafkaConsumer, mockTracer, null); 283 | 284 | TopicPartition tp = new TopicPartition("messages-for-seek", 0); 285 | consumer.assign(Collections.singletonList(tp)); 286 | 287 | consumer.seek(tp, new OffsetAndMetadata(0)); 288 | 289 | while (latch.getCount() > 0) { 290 | ConsumerRecords records = consumer.poll(Duration.ofMillis(100)); 291 | for (ConsumerRecord record : records) { 292 | SpanContext spanContext = TracingKafkaUtils 293 | .extractSpanContext(record.headers(), mockTracer); 294 | assertNotNull(spanContext); 295 | assertEquals("test", record.value()); 296 | assertEquals(key, record.key()); 297 | 298 | consumer.commitSync(); 299 | latch.countDown(); 300 | } 301 | } 302 | kafkaConsumer.close(); 303 | }); 304 | 305 | assertTrue(latch.await(30, TimeUnit.SECONDS)); 306 | } 307 | 308 | @Test 309 | public void testProducerBuilderWithDecorators() throws InterruptedException { 310 | Producer producer = createProducerWithDecorators(null); 311 | 312 | producer.send(new ProducerRecord<>("messages", 1, "test")); 313 | 314 | producer = createProducerWithDecorators( 315 | Arrays.asList(SpanDecorator.STANDARD_TAGS, createDecorator())); 316 | 317 | producer.send(new ProducerRecord<>("messages", 1, "test")); 318 | 319 | producer = createProducerWithDecorators(new ArrayList()); 320 | 321 | producer.send(new ProducerRecord<>("messages", 1, "test")); 322 | 323 | final CountDownLatch latch = new CountDownLatch(3); 324 | createConsumer(latch, 1, false, null); 325 | 326 | producer.close(); 327 | 328 | List mockSpans = mockTracer.finishedSpans(); 329 | 330 | // With only standard decorator 331 | MockSpan standardSpan = mockSpans.get(0); 332 | checkSpans(Collections.singletonList(standardSpan)); 333 | assertEquals("kafka", standardSpan.tags().get("peer.service")); 334 | 335 | // With standard and custom decorator 336 | MockSpan customSpan = mockSpans.get(1); 337 | checkSpans(Collections.singletonList(customSpan)); 338 | assertEquals("overwritten", customSpan.tags().get("peer.service")); 339 | assertEquals("new-producer-test", customSpan.tags().get("new.tag.test")); 340 | 341 | // Without any decorator 342 | assertEquals("producer", mockSpans.get(2).tags().get("span.kind")); 343 | } 344 | 345 | @Test 346 | public void testProducerBuilderWithSpanNameProvider() throws InterruptedException { 347 | Producer producer = createProducerWithSpanNameProvider(null); 348 | ProducerRecord record1 = new ProducerRecord<>("messages", 1, "test"); 349 | 350 | producer.send(record1); 351 | 352 | BiFunction operationNameProvider = 353 | (operationName, producerRecord) -> createSpanNameProvider(); 354 | producer = createProducerWithSpanNameProvider(operationNameProvider); 355 | 356 | ProducerRecord record2 = new ProducerRecord<>("messages", 1, "test"); 357 | 358 | producer.send(record2); 359 | 360 | final CountDownLatch latch = new CountDownLatch(2); 361 | createConsumer(latch, 1, false, null); 362 | 363 | producer.close(); 364 | 365 | List mockSpans = mockTracer.finishedSpans(); 366 | 367 | // With standard span name provider 368 | assertEquals("To_" + record1.topic(), mockSpans.get(0).operationName()); 369 | 370 | // With custom span name provider 371 | assertEquals("Test_SpanNameProvider", mockSpans.get(1).operationName()); 372 | } 373 | 374 | @Test 375 | public void testConsumerBuilderWithStandardDecorators() throws InterruptedException { 376 | Producer producer = createTracingProducer(); 377 | producer.send(new ProducerRecord<>("messages", 1, "test")); 378 | 379 | producer.close(); 380 | 381 | assertEquals(1, mockTracer.finishedSpans().size()); 382 | 383 | ExecutorService executorService = Executors.newSingleThreadExecutor(); 384 | final CountDownLatch latch = new CountDownLatch(1); 385 | 386 | executorService.execute(() -> { 387 | Consumer consumer = createConsumerWithDecorators(null); 388 | 389 | while (latch.getCount() > 0) { 390 | ConsumerRecords records = consumer.poll(Duration.ofMillis(100)); 391 | for (ConsumerRecord record : records) { 392 | SpanContext spanContext = TracingKafkaUtils 393 | .extractSpanContext(record.headers(), mockTracer); 394 | assertNotNull(spanContext); 395 | assertEquals("test", record.value()); 396 | assertEquals((Integer) 1, record.key()); 397 | 398 | consumer.commitSync(); 399 | latch.countDown(); 400 | } 401 | } 402 | consumer.close(); 403 | }); 404 | 405 | assertTrue(latch.await(30, TimeUnit.SECONDS)); 406 | 407 | List mockSpans = mockTracer.finishedSpans(); 408 | checkSpans(mockSpans); 409 | 410 | MockSpan standardSpan = mockSpans.get(1); 411 | assertEquals("kafka", standardSpan.tags().get("peer.service")); 412 | } 413 | 414 | @Test 415 | public void testConsumerBuilderWithCustomDecorators() throws InterruptedException { 416 | Producer producer = createTracingProducer(); 417 | producer.send(new ProducerRecord<>("messages", 1, "test")); 418 | 419 | producer.close(); 420 | 421 | assertEquals(1, mockTracer.finishedSpans().size()); 422 | 423 | ExecutorService executorService = Executors.newSingleThreadExecutor(); 424 | final CountDownLatch latch = new CountDownLatch(1); 425 | 426 | executorService.execute(() -> { 427 | Consumer consumer = 428 | createConsumerWithDecorators( 429 | Arrays.asList(SpanDecorator.STANDARD_TAGS, createDecorator())); 430 | 431 | while (latch.getCount() > 0) { 432 | ConsumerRecords records = consumer.poll(Duration.ofMillis(100)); 433 | for (ConsumerRecord record : records) { 434 | SpanContext spanContext = TracingKafkaUtils 435 | .extractSpanContext(record.headers(), mockTracer); 436 | assertNotNull(spanContext); 437 | assertEquals("test", record.value()); 438 | assertEquals((Integer) 1, record.key()); 439 | 440 | consumer.commitSync(); 441 | latch.countDown(); 442 | } 443 | } 444 | consumer.close(); 445 | }); 446 | 447 | assertTrue(latch.await(30, TimeUnit.SECONDS)); 448 | 449 | List mockSpans = mockTracer.finishedSpans(); 450 | checkSpans(mockSpans); 451 | 452 | MockSpan customSpan = mockSpans.get(1); 453 | assertEquals("overwritten", customSpan.tags().get("peer.service")); 454 | assertEquals("new-consumer-test", customSpan.tags().get("new.tag.test")); 455 | } 456 | 457 | @Test 458 | public void testConsumerBuilderWithoutDecorators() throws InterruptedException { 459 | Producer producer = createTracingProducer(); 460 | producer.send(new ProducerRecord<>("messages", 1, "test")); 461 | 462 | producer.close(); 463 | 464 | assertEquals(1, mockTracer.finishedSpans().size()); 465 | 466 | ExecutorService executorService = Executors.newSingleThreadExecutor(); 467 | final CountDownLatch latch = new CountDownLatch(1); 468 | 469 | executorService.execute(() -> { 470 | Consumer consumer = createConsumerWithDecorators(new ArrayList()); 471 | 472 | while (latch.getCount() > 0) { 473 | ConsumerRecords records = consumer.poll(Duration.ofMillis(100)); 474 | for (ConsumerRecord record : records) { 475 | SpanContext spanContext = TracingKafkaUtils 476 | .extractSpanContext(record.headers(), mockTracer); 477 | assertNotNull(spanContext); 478 | assertEquals("test", record.value()); 479 | assertEquals((Integer) 1, record.key()); 480 | 481 | consumer.commitSync(); 482 | latch.countDown(); 483 | } 484 | } 485 | consumer.close(); 486 | }); 487 | 488 | assertTrue(latch.await(30, TimeUnit.SECONDS)); 489 | 490 | List mockSpans = mockTracer.finishedSpans(); 491 | 492 | MockSpan span = mockSpans.get(1); 493 | assertEquals("consumer", span.tags().get("span.kind")); 494 | } 495 | 496 | @Test 497 | public void testConsumerBuilderWithCustomSpanNameProvider() throws InterruptedException { 498 | Producer producer = createTracingProducer(); 499 | producer.send(new ProducerRecord<>("messages", 1, "test")); 500 | producer.close(); 501 | 502 | assertEquals(1, mockTracer.finishedSpans().size()); 503 | 504 | ExecutorService executorService = Executors.newSingleThreadExecutor(); 505 | final CountDownLatch latch = new CountDownLatch(1); 506 | 507 | executorService.execute(() -> { 508 | BiFunction operationNameProvider = 509 | (operationName, consumerRecord) -> createSpanNameProvider(); 510 | Consumer consumer = createConsumerWithSpanNameProvider( 511 | operationNameProvider); 512 | 513 | while (latch.getCount() > 0) { 514 | ConsumerRecords records = consumer.poll(Duration.ofMillis(100)); 515 | for (ConsumerRecord record : records) { 516 | SpanContext spanContext = TracingKafkaUtils 517 | .extractSpanContext(record.headers(), mockTracer); 518 | assertNotNull(spanContext); 519 | assertEquals("test", record.value()); 520 | assertEquals((Integer) 1, record.key()); 521 | 522 | consumer.commitSync(); 523 | latch.countDown(); 524 | } 525 | } 526 | consumer.close(); 527 | }); 528 | 529 | assertTrue(latch.await(30, TimeUnit.SECONDS)); 530 | 531 | assertEquals("Test_SpanNameProvider", mockTracer.finishedSpans().get(1).operationName()); 532 | } 533 | 534 | @Test 535 | public void testConsumerBuilderWithStandardSpanNameProvider() throws InterruptedException { 536 | Producer producer = createTracingProducer(); 537 | producer.send(new ProducerRecord<>("messages", 1, "test")); 538 | producer.close(); 539 | 540 | assertEquals(1, mockTracer.finishedSpans().size()); 541 | 542 | ExecutorService executorService = Executors.newSingleThreadExecutor(); 543 | final CountDownLatch latch = new CountDownLatch(1); 544 | 545 | executorService.execute(() -> { 546 | Consumer consumer = createConsumerWithSpanNameProvider(null); 547 | 548 | while (latch.getCount() > 0) { 549 | ConsumerRecords records = consumer.poll(Duration.ofMillis(100)); 550 | for (ConsumerRecord record : records) { 551 | SpanContext spanContext = TracingKafkaUtils 552 | .extractSpanContext(record.headers(), mockTracer); 553 | assertNotNull(spanContext); 554 | assertEquals("test", record.value()); 555 | assertEquals((Integer) 1, record.key()); 556 | 557 | consumer.commitSync(); 558 | latch.countDown(); 559 | } 560 | } 561 | consumer.close(); 562 | }); 563 | 564 | assertTrue(latch.await(30, TimeUnit.SECONDS)); 565 | 566 | assertEquals("From_messages", mockTracer.finishedSpans().get(1).operationName()); 567 | } 568 | 569 | private TracingKafkaProducer createTracingProducer() { 570 | return new TracingKafkaProducer<>(createProducer(), mockTracer); 571 | } 572 | 573 | private SpanDecorator createDecorator() { 574 | return new SpanDecorator() { 575 | @Override 576 | public void onSend(ProducerRecord record, Span span) { 577 | span.setTag("peer.service", "overwritten"); 578 | span.setTag("new.tag.test", "new-producer-test"); 579 | } 580 | 581 | @Override 582 | public void onResponse(ConsumerRecord record, Span span) { 583 | span.setTag("peer.service", "overwritten"); 584 | span.setTag("new.tag.test", "new-consumer-test"); 585 | } 586 | 587 | @Override 588 | public void onError(Exception exception, Span span) { 589 | } 590 | }; 591 | } 592 | 593 | private Producer createProducer() { 594 | Map senderProps = KafkaTestUtils 595 | .producerProps(embeddedKafka.getEmbeddedKafka()); 596 | return new KafkaProducer<>(senderProps); 597 | } 598 | 599 | private Producer createNameProvidedProducer( 600 | BiFunction producerSpanNameProvider) { 601 | return new TracingKafkaProducer<>(createProducer(), mockTracer, producerSpanNameProvider); 602 | } 603 | 604 | private Consumer createConsumerWithDecorators( 605 | Collection spanDecorators) { 606 | Map consumerProps = KafkaTestUtils 607 | .consumerProps("sampleRawConsumer", "false", embeddedKafka.getEmbeddedKafka()); 608 | consumerProps.put("auto.offset.reset", "earliest"); 609 | KafkaConsumer kafkaConsumer = new KafkaConsumer<>(consumerProps); 610 | TracingKafkaConsumerBuilder tracingKafkaConsumerBuilder = 611 | new TracingKafkaConsumerBuilder(kafkaConsumer, mockTracer); 612 | 613 | if (spanDecorators != null) { 614 | tracingKafkaConsumerBuilder = tracingKafkaConsumerBuilder.withDecorators(spanDecorators); 615 | } 616 | TracingKafkaConsumer tracingKafkaConsumer = tracingKafkaConsumerBuilder.build(); 617 | tracingKafkaConsumer.subscribe(Collections.singletonList("messages")); 618 | 619 | return tracingKafkaConsumer; 620 | } 621 | 622 | private Consumer createConsumerWithSpanNameProvider( 623 | BiFunction spanNameProvider) { 624 | Map consumerProps = KafkaTestUtils 625 | .consumerProps("sampleRawConsumer", "false", embeddedKafka.getEmbeddedKafka()); 626 | consumerProps.put("auto.offset.reset", "earliest"); 627 | KafkaConsumer kafkaConsumer = new KafkaConsumer<>(consumerProps); 628 | TracingKafkaConsumerBuilder tracingKafkaConsumerBuilder = 629 | new TracingKafkaConsumerBuilder(kafkaConsumer, mockTracer); 630 | 631 | if (spanNameProvider != null) { 632 | tracingKafkaConsumerBuilder = tracingKafkaConsumerBuilder 633 | .withSpanNameProvider(spanNameProvider); 634 | } 635 | TracingKafkaConsumer tracingKafkaConsumer = tracingKafkaConsumerBuilder.build(); 636 | tracingKafkaConsumer.subscribe(Collections.singletonList("messages")); 637 | 638 | return tracingKafkaConsumer; 639 | } 640 | 641 | private Producer createProducerWithDecorators( 642 | Collection spanDecorators) { 643 | Map senderProps = KafkaTestUtils 644 | .producerProps(embeddedKafka.getEmbeddedKafka()); 645 | KafkaProducer kafkaProducer = new KafkaProducer<>(senderProps); 646 | TracingKafkaProducerBuilder tracingKafkaProducerBuilder = 647 | new TracingKafkaProducerBuilder<>(kafkaProducer, mockTracer); 648 | if (spanDecorators != null) { 649 | tracingKafkaProducerBuilder = tracingKafkaProducerBuilder.withDecorators(spanDecorators); 650 | } 651 | 652 | return tracingKafkaProducerBuilder.build(); 653 | } 654 | 655 | private Producer createProducerWithSpanNameProvider( 656 | BiFunction spanNameProvider) { 657 | Map senderProps = KafkaTestUtils 658 | .producerProps(embeddedKafka.getEmbeddedKafka()); 659 | KafkaProducer kafkaProducer = new KafkaProducer<>(senderProps); 660 | TracingKafkaProducerBuilder tracingKafkaProducerBuilder = 661 | new TracingKafkaProducerBuilder<>(kafkaProducer, mockTracer); 662 | if (spanNameProvider != null) { 663 | tracingKafkaProducerBuilder = tracingKafkaProducerBuilder 664 | .withSpanNameProvider(spanNameProvider); 665 | } 666 | 667 | return tracingKafkaProducerBuilder.build(); 668 | } 669 | 670 | private void createConsumer(final CountDownLatch latch, final Integer key, 671 | final boolean withInterceptor, 672 | final BiFunction consumerNameProvider) 673 | throws InterruptedException { 674 | 675 | ExecutorService executorService = Executors.newSingleThreadExecutor(); 676 | 677 | final Map consumerProps = KafkaTestUtils 678 | .consumerProps("sampleRawConsumer", "false", embeddedKafka.getEmbeddedKafka()); 679 | consumerProps.put("auto.offset.reset", "earliest"); 680 | if (withInterceptor) { 681 | consumerProps.put(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG, 682 | TracingConsumerInterceptor.class.getName()); 683 | } 684 | 685 | executorService.execute(() -> { 686 | KafkaConsumer kafkaConsumer = new KafkaConsumer<>(consumerProps); 687 | Consumer consumer; 688 | if (withInterceptor) { 689 | consumer = kafkaConsumer; 690 | } else { 691 | consumer = new TracingKafkaConsumer<>(kafkaConsumer, mockTracer, consumerNameProvider); 692 | } 693 | consumer.subscribe(Collections.singletonList("messages")); 694 | 695 | while (latch.getCount() > 0) { 696 | ConsumerRecords records = consumer.poll(Duration.ofMillis(100)); 697 | for (ConsumerRecord record : records) { 698 | SpanContext spanContext = TracingKafkaUtils 699 | .extractSpanContext(record.headers(), mockTracer); 700 | assertNotNull(spanContext); 701 | assertEquals("test", record.value()); 702 | if (key != null) { 703 | assertEquals(key, record.key()); 704 | } 705 | consumer.commitSync(); 706 | latch.countDown(); 707 | } 708 | } 709 | kafkaConsumer.close(); 710 | }); 711 | 712 | assertTrue(latch.await(30, TimeUnit.SECONDS)); 713 | 714 | } 715 | 716 | private static String createSpanNameProvider() { 717 | return "Test_SpanNameProvider"; 718 | } 719 | 720 | private void checkSpans(List mockSpans) { 721 | for (MockSpan mockSpan : mockSpans) { 722 | String operationName = mockSpan.operationName(); 723 | if (operationName.equals(TracingKafkaUtils.TO_PREFIX + "messages")) { 724 | assertEquals(Tags.SPAN_KIND_PRODUCER, mockSpan.tags().get(Tags.SPAN_KIND.getKey())); 725 | assertEquals("messages", mockSpan.tags().get(Tags.MESSAGE_BUS_DESTINATION.getKey())); 726 | } else if (operationName.equals(TracingKafkaUtils.FROM_PREFIX + "messages")) { 727 | assertEquals(Tags.SPAN_KIND_CONSUMER, mockSpan.tags().get(Tags.SPAN_KIND.getKey())); 728 | assertEquals(0, mockSpan.tags().get("partition")); 729 | long offset = (Long) mockSpan.tags().get("offset"); 730 | assertTrue(offset >= 0L); 731 | assertEquals("messages", mockSpan.tags().get(Tags.MESSAGE_BUS_DESTINATION.getKey())); 732 | } 733 | assertEquals(StandardSpanDecorator.COMPONENT_NAME, 734 | mockSpan.tags().get(Tags.COMPONENT.getKey())); 735 | assertEquals(0, mockSpan.generatedErrors().size()); 736 | assertTrue(operationName.equals(TracingKafkaUtils.TO_PREFIX + "messages") 737 | || operationName.equals(TracingKafkaUtils.FROM_PREFIX + "messages")); 738 | } 739 | } 740 | 741 | private MockSpan getByOperationName(List spans, String operationName) { 742 | List found = new ArrayList<>(); 743 | for (MockSpan span : spans) { 744 | if (operationName.equals(span.operationName())) { 745 | found.add(span); 746 | } 747 | } 748 | 749 | if (found.size() > 1) { 750 | throw new RuntimeException("Ups, too many spans (" + found.size() + ") with operation name '" 751 | + operationName + "'"); 752 | } 753 | 754 | return found.isEmpty() ? null : found.get(0); 755 | } 756 | 757 | private List getByOperationNameAll(List spans, String operationName) { 758 | List found = new ArrayList<>(); 759 | for (MockSpan span : spans) { 760 | if (operationName.equals(span.operationName())) { 761 | found.add(span); 762 | } 763 | } 764 | return found; 765 | } 766 | 767 | } 768 | -------------------------------------------------------------------------------- /opentracing-kafka-client/src/test/java/io/opentracing/contrib/kafka/TracingKafkaUtilsTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | package io.opentracing.contrib.kafka; 15 | 16 | 17 | import static org.junit.Assert.assertEquals; 18 | import static org.junit.Assert.assertNull; 19 | import static org.junit.Assert.assertTrue; 20 | 21 | import io.opentracing.mock.MockSpan; 22 | import io.opentracing.mock.MockTracer; 23 | import org.apache.kafka.common.header.Headers; 24 | import org.apache.kafka.common.header.internals.RecordHeaders; 25 | import org.junit.Before; 26 | import org.junit.Test; 27 | 28 | 29 | public class TracingKafkaUtilsTest { 30 | 31 | private MockTracer mockTracer = new MockTracer(); 32 | 33 | @Before 34 | public void before() { 35 | mockTracer.reset(); 36 | } 37 | 38 | @Test 39 | public void inject() { 40 | MockSpan span = mockTracer.buildSpan("test").start(); 41 | Headers headers = new RecordHeaders(); 42 | assertEquals(0, headers.toArray().length); 43 | 44 | TracingKafkaUtils.inject(span.context(), headers, mockTracer); 45 | 46 | assertTrue(headers.toArray().length > 0); 47 | } 48 | 49 | @Test 50 | public void extract() { 51 | MockSpan span = mockTracer.buildSpan("test").start(); 52 | Headers headers = new RecordHeaders(); 53 | TracingKafkaUtils.inject(span.context(), headers, mockTracer); 54 | 55 | MockSpan.MockContext spanContext = (MockSpan.MockContext) TracingKafkaUtils 56 | .extractSpanContext(headers, mockTracer); 57 | 58 | assertEquals(span.context().spanId(), spanContext.spanId()); 59 | assertEquals(span.context().traceId(), spanContext.traceId()); 60 | } 61 | 62 | @Test 63 | public void extract_no_context() { 64 | Headers headers = new RecordHeaders(); 65 | 66 | MockSpan.MockContext spanContext = (MockSpan.MockContext) TracingKafkaUtils 67 | .extractSpanContext(headers, mockTracer); 68 | assertNull(spanContext); 69 | } 70 | 71 | @Test 72 | public void inject_two_contexts_and_extract() { 73 | MockSpan span = mockTracer.buildSpan("first").start(); 74 | Headers headers = new RecordHeaders(); 75 | assertEquals(0, headers.toArray().length); 76 | 77 | // inject first 78 | TracingKafkaUtils.inject(span.context(), headers, mockTracer); 79 | int headersLength = headers.toArray().length; 80 | assertTrue(headersLength > 0); 81 | 82 | // inject second 83 | MockSpan span2 = mockTracer.buildSpan("second").asChildOf(span.context()).start(); 84 | TracingKafkaUtils.inject(span2.context(), headers, mockTracer); 85 | assertTrue(headers.toArray().length > headersLength); 86 | 87 | // check first 88 | MockSpan.MockContext spanContext = (MockSpan.MockContext) TracingKafkaUtils 89 | .extractSpanContext(headers, mockTracer); 90 | assertEquals(span2.context().spanId(), spanContext.spanId()); 91 | assertEquals(span2.context().traceId(), spanContext.traceId()); 92 | } 93 | } -------------------------------------------------------------------------------- /opentracing-kafka-spring/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 17 | 18 | 19 | opentracing-kafka-parent 20 | io.opentracing.contrib 21 | 0.1.16-SNAPSHOT 22 | 23 | 4.0.0 24 | 25 | opentracing-kafka-spring 26 | ${project.groupId}:${project.artifactId} 27 | OpenTracing Instrumentation for Spring Kafka 28 | 29 | 30 | 31 | org.springframework.kafka 32 | spring-kafka 33 | ${spring.kafka.version} 34 | provided 35 | 36 | 37 | 38 | org.springframework 39 | spring-aspects 40 | ${spring.version} 41 | provided 42 | 43 | 44 | 45 | io.opentracing.contrib 46 | opentracing-kafka-client 47 | 0.1.16-SNAPSHOT 48 | 49 | 50 | 51 | org.awaitility 52 | awaitility 53 | test 54 | 55 | 56 | 57 | com.fasterxml.jackson.core 58 | jackson-databind 59 | 2.10.0 60 | test 61 | 62 | 63 | 64 | 65 | -------------------------------------------------------------------------------- /opentracing-kafka-spring/src/main/java/io/opentracing/contrib/kafka/spring/MessageListenerMethodInterceptor.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | package io.opentracing.contrib.kafka.spring; 15 | 16 | import io.opentracing.References; 17 | import io.opentracing.Scope; 18 | import io.opentracing.Span; 19 | import io.opentracing.SpanContext; 20 | import io.opentracing.Tracer; 21 | import io.opentracing.contrib.kafka.TracingKafkaUtils; 22 | import io.opentracing.tag.Tags; 23 | import org.aopalliance.intercept.MethodInterceptor; 24 | import org.aopalliance.intercept.MethodInvocation; 25 | import org.apache.kafka.clients.consumer.ConsumerRecord; 26 | 27 | class MessageListenerMethodInterceptor implements MethodInterceptor { 28 | 29 | private static final String SPAN_PREFIX = "KafkaListener_"; 30 | 31 | private final Tracer tracer; 32 | 33 | MessageListenerMethodInterceptor(Tracer tracer) { 34 | this.tracer = tracer; 35 | } 36 | 37 | @Override 38 | public Object invoke(MethodInvocation invocation) throws Throwable { 39 | if (!"onMessage".equals(invocation.getMethod().getName())) { 40 | return invocation.proceed(); 41 | } 42 | Object[] arguments = invocation.getArguments(); 43 | ConsumerRecord record = getConsumerRecord(arguments); 44 | if (record == null) { 45 | return invocation.proceed(); 46 | } 47 | 48 | Tracer.SpanBuilder spanBuilder = tracer.buildSpan(SPAN_PREFIX + record.topic()) 49 | .withTag(Tags.SPAN_KIND.getKey(), Tags.SPAN_KIND_CONSUMER); 50 | 51 | SpanContext parentContext = TracingKafkaUtils.extractSpanContext(record.headers(), tracer); 52 | if (parentContext != null) { 53 | spanBuilder.addReference(References.FOLLOWS_FROM, parentContext); 54 | } 55 | Span span = spanBuilder.start(); 56 | try (Scope ignored = tracer.activateSpan(span)) { 57 | return invocation.proceed(); 58 | } catch (Exception e) { 59 | Tags.ERROR.set(span, Boolean.TRUE); 60 | throw e; 61 | } finally { 62 | span.finish(); 63 | } 64 | } 65 | 66 | private ConsumerRecord getConsumerRecord(Object[] arguments) { 67 | for (Object object : arguments) { 68 | if (object instanceof ConsumerRecord) { 69 | return (ConsumerRecord) object; 70 | } 71 | } 72 | return null; 73 | } 74 | 75 | } 76 | -------------------------------------------------------------------------------- /opentracing-kafka-spring/src/main/java/io/opentracing/contrib/kafka/spring/TracingConsumerFactory.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | package io.opentracing.contrib.kafka.spring; 15 | 16 | import static io.opentracing.contrib.kafka.SpanDecorator.STANDARD_TAGS; 17 | 18 | import io.opentracing.Tracer; 19 | import io.opentracing.contrib.kafka.ClientSpanNameProvider; 20 | import io.opentracing.contrib.kafka.SpanDecorator; 21 | import io.opentracing.contrib.kafka.TracingKafkaConsumerBuilder; 22 | import java.util.Collection; 23 | import java.util.Collections; 24 | import java.util.Map; 25 | import java.util.Properties; 26 | import java.util.function.BiFunction; 27 | import org.apache.kafka.clients.consumer.Consumer; 28 | import org.apache.kafka.clients.consumer.ConsumerRecord; 29 | import org.apache.kafka.common.serialization.Deserializer; 30 | import org.springframework.kafka.core.ConsumerFactory; 31 | 32 | public class TracingConsumerFactory implements ConsumerFactory { 33 | 34 | private final ConsumerFactory consumerFactory; 35 | private final Tracer tracer; 36 | private final Collection spanDecorators; 37 | private final BiFunction consumerSpanNameProvider; 38 | 39 | public TracingConsumerFactory(ConsumerFactory consumerFactory, Tracer tracer) { 40 | this(consumerFactory, tracer, null, null); 41 | } 42 | 43 | public TracingConsumerFactory(ConsumerFactory consumerFactory, Tracer tracer, 44 | Collection spanDecorators) { 45 | this(consumerFactory, tracer, spanDecorators, null); 46 | } 47 | 48 | public TracingConsumerFactory(ConsumerFactory consumerFactory, Tracer tracer, 49 | BiFunction consumerSpanNameProvider) { 50 | this(consumerFactory, tracer, null, consumerSpanNameProvider); 51 | } 52 | 53 | public TracingConsumerFactory(ConsumerFactory consumerFactory, Tracer tracer, 54 | Collection spanDecorators, 55 | BiFunction consumerSpanNameProvider) { 56 | this.tracer = tracer; 57 | this.consumerFactory = consumerFactory; 58 | this.spanDecorators = (spanDecorators == null) 59 | ? Collections.singletonList(STANDARD_TAGS) 60 | : spanDecorators; 61 | this.consumerSpanNameProvider = (consumerSpanNameProvider == null) 62 | ? ClientSpanNameProvider.CONSUMER_OPERATION_NAME 63 | : consumerSpanNameProvider; 64 | } 65 | 66 | @Override 67 | public Consumer createConsumer() { 68 | return new TracingKafkaConsumerBuilder<>(consumerFactory.createConsumer(), tracer) 69 | .withDecorators(spanDecorators).withSpanNameProvider(consumerSpanNameProvider).build(); 70 | } 71 | 72 | @Override 73 | public Consumer createConsumer(String clientIdSuffix) { 74 | return new TracingKafkaConsumerBuilder<>(consumerFactory.createConsumer(clientIdSuffix), tracer) 75 | .withDecorators(spanDecorators).withSpanNameProvider(consumerSpanNameProvider).build(); 76 | } 77 | 78 | @Override 79 | public Consumer createConsumer(String groupId, String clientIdSuffix) { 80 | return new TracingKafkaConsumerBuilder<>( 81 | consumerFactory.createConsumer(groupId, clientIdSuffix), tracer) 82 | .withDecorators(spanDecorators).withSpanNameProvider(consumerSpanNameProvider).build(); 83 | } 84 | 85 | @Override 86 | public Consumer createConsumer(String groupId, String clientIdPrefix, 87 | String clientIdSuffix) { 88 | return new TracingKafkaConsumerBuilder<>( 89 | consumerFactory.createConsumer(groupId, clientIdPrefix, clientIdSuffix), 90 | tracer).withDecorators(spanDecorators).withSpanNameProvider(consumerSpanNameProvider) 91 | .build(); 92 | } 93 | 94 | @Override 95 | public Consumer createConsumer(String groupId, String clientIdPrefix, 96 | String clientIdSuffix, Properties properties) { 97 | return new TracingKafkaConsumerBuilder<>(consumerFactory.createConsumer(groupId, clientIdPrefix, 98 | clientIdSuffix, properties), tracer).withDecorators(spanDecorators) 99 | .withSpanNameProvider(consumerSpanNameProvider).build(); 100 | } 101 | 102 | @Override 103 | public boolean isAutoCommit() { 104 | return consumerFactory.isAutoCommit(); 105 | } 106 | 107 | @Override 108 | public Map getConfigurationProperties() { 109 | return consumerFactory.getConfigurationProperties(); 110 | } 111 | 112 | @Override 113 | public Deserializer getKeyDeserializer() { 114 | return consumerFactory.getKeyDeserializer(); 115 | } 116 | 117 | @Override 118 | public Deserializer getValueDeserializer() { 119 | return consumerFactory.getValueDeserializer(); 120 | } 121 | } 122 | -------------------------------------------------------------------------------- /opentracing-kafka-spring/src/main/java/io/opentracing/contrib/kafka/spring/TracingKafkaAspect.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | package io.opentracing.contrib.kafka.spring; 15 | 16 | import io.opentracing.Tracer; 17 | import org.aspectj.lang.ProceedingJoinPoint; 18 | import org.aspectj.lang.annotation.Around; 19 | import org.aspectj.lang.annotation.Aspect; 20 | import org.aspectj.lang.annotation.Pointcut; 21 | import org.springframework.aop.framework.ProxyFactoryBean; 22 | import org.springframework.kafka.listener.AbstractMessageListenerContainer; 23 | import org.springframework.kafka.listener.MessageListener; 24 | import org.springframework.kafka.listener.MessageListenerContainer; 25 | 26 | /** 27 | * Wraps a {@link MessageListener} into a tracing proxy, to support {@link 28 | * org.springframework.kafka.annotation.KafkaListener} beans. 29 | *

30 | * A port of Spring Sleuth implementation. 31 | */ 32 | @Aspect 33 | public class TracingKafkaAspect { 34 | private final Tracer tracer; 35 | 36 | public TracingKafkaAspect(Tracer tracer) { 37 | this.tracer = tracer; 38 | } 39 | 40 | @Pointcut("execution(public * org.springframework.kafka.config.KafkaListenerContainerFactory.createListenerContainer(..))") 41 | private void anyCreateListenerContainer() { 42 | } 43 | 44 | @Pointcut("execution(public * org.springframework.kafka.config.KafkaListenerContainerFactory.createContainer(..))") 45 | private void anyCreateContainer() { 46 | } 47 | 48 | @Around("anyCreateListenerContainer() || anyCreateContainer()") 49 | public Object wrapListenerContainerCreation(ProceedingJoinPoint pjp) throws Throwable { 50 | MessageListenerContainer listener = (MessageListenerContainer) pjp.proceed(); 51 | if (listener instanceof AbstractMessageListenerContainer) { 52 | AbstractMessageListenerContainer container = (AbstractMessageListenerContainer) listener; 53 | Object someMessageListener = container.getContainerProperties().getMessageListener(); 54 | if (someMessageListener instanceof MessageListener) { 55 | container.setupMessageListener(createProxy(someMessageListener)); 56 | } 57 | } 58 | return listener; 59 | } 60 | 61 | Object createProxy(Object bean) { 62 | ProxyFactoryBean factory = new ProxyFactoryBean(); 63 | factory.setProxyTargetClass(true); 64 | factory.addAdvice(new MessageListenerMethodInterceptor(this.tracer)); 65 | factory.setTarget(bean); 66 | return factory.getObject(); 67 | } 68 | 69 | } 70 | -------------------------------------------------------------------------------- /opentracing-kafka-spring/src/main/java/io/opentracing/contrib/kafka/spring/TracingProducerFactory.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | package io.opentracing.contrib.kafka.spring; 15 | 16 | import static io.opentracing.contrib.kafka.SpanDecorator.STANDARD_TAGS; 17 | 18 | import io.opentracing.Tracer; 19 | import io.opentracing.contrib.kafka.ClientSpanNameProvider; 20 | import io.opentracing.contrib.kafka.SpanDecorator; 21 | import io.opentracing.contrib.kafka.TracingKafkaProducerBuilder; 22 | 23 | import java.time.Duration; 24 | import java.util.Collection; 25 | import java.util.Collections; 26 | import java.util.Map; 27 | import java.util.function.BiFunction; 28 | import java.util.function.Supplier; 29 | 30 | import org.apache.kafka.clients.producer.Producer; 31 | import org.apache.kafka.clients.producer.ProducerRecord; 32 | import org.apache.kafka.common.serialization.Serializer; 33 | import org.springframework.beans.factory.DisposableBean; 34 | import org.springframework.kafka.core.ProducerFactory; 35 | 36 | public class TracingProducerFactory implements ProducerFactory, DisposableBean { 37 | 38 | private final ProducerFactory producerFactory; 39 | private final Tracer tracer; 40 | private final Collection spanDecorators; 41 | private final BiFunction producerSpanNameProvider; 42 | 43 | public TracingProducerFactory(ProducerFactory producerFactory, Tracer tracer) { 44 | this(producerFactory, tracer, null, null); 45 | } 46 | 47 | public TracingProducerFactory(ProducerFactory producerFactory, Tracer tracer, 48 | Collection spanDecorators) { 49 | this(producerFactory, tracer, spanDecorators, null); 50 | } 51 | 52 | public TracingProducerFactory(ProducerFactory producerFactory, Tracer tracer, 53 | BiFunction producerSpanNameProvider) { 54 | this(producerFactory, tracer, null, producerSpanNameProvider); 55 | } 56 | 57 | public TracingProducerFactory(ProducerFactory producerFactory, Tracer tracer, 58 | Collection spanDecorators, 59 | BiFunction producerSpanNameProvider) { 60 | this.producerFactory = producerFactory; 61 | this.tracer = tracer; 62 | this.spanDecorators = (spanDecorators == null) 63 | ? Collections.singletonList(STANDARD_TAGS) 64 | : spanDecorators; 65 | this.producerSpanNameProvider = (producerSpanNameProvider == null) 66 | ? ClientSpanNameProvider.PRODUCER_OPERATION_NAME 67 | : producerSpanNameProvider; 68 | } 69 | 70 | @Override 71 | public Producer createProducer() { 72 | return new TracingKafkaProducerBuilder<>(producerFactory.createProducer(), tracer) 73 | .withDecorators(spanDecorators) 74 | .withSpanNameProvider(producerSpanNameProvider).build(); 75 | } 76 | 77 | @Override 78 | public Producer createProducer(String txIdPrefix) { 79 | return new TracingKafkaProducerBuilder<>(producerFactory.createProducer(txIdPrefix), tracer) 80 | .withDecorators(spanDecorators).withSpanNameProvider(producerSpanNameProvider).build(); 81 | } 82 | 83 | @Override 84 | public boolean transactionCapable() { 85 | return producerFactory.transactionCapable(); 86 | } 87 | 88 | @Override 89 | public void closeProducerFor(String transactionIdSuffix) { 90 | producerFactory.closeProducerFor(transactionIdSuffix); 91 | } 92 | 93 | @Override 94 | public boolean isProducerPerConsumerPartition() { 95 | return producerFactory.isProducerPerConsumerPartition(); 96 | } 97 | 98 | @Override 99 | public void closeThreadBoundProducer() { 100 | producerFactory.closeThreadBoundProducer(); 101 | } 102 | 103 | @Override 104 | public void destroy() throws Exception { 105 | if (producerFactory instanceof DisposableBean) { 106 | ((DisposableBean) producerFactory).destroy(); 107 | } 108 | } 109 | 110 | @Override 111 | public Producer createNonTransactionalProducer() { 112 | return new TracingKafkaProducerBuilder<>(producerFactory.createNonTransactionalProducer(), tracer) 113 | .withDecorators(spanDecorators).withSpanNameProvider(producerSpanNameProvider).build(); 114 | } 115 | 116 | @Override 117 | public void reset() { 118 | producerFactory.reset(); 119 | } 120 | 121 | @Override 122 | public Map getConfigurationProperties() { 123 | return producerFactory.getConfigurationProperties(); 124 | } 125 | 126 | @Override 127 | public Supplier> getValueSerializerSupplier() { 128 | return producerFactory.getValueSerializerSupplier(); 129 | } 130 | 131 | @Override 132 | public Supplier> getKeySerializerSupplier() { 133 | return producerFactory.getKeySerializerSupplier(); 134 | } 135 | 136 | @Override 137 | public boolean isProducerPerThread() { 138 | return producerFactory.isProducerPerThread(); 139 | } 140 | 141 | @Override 142 | public String getTransactionIdPrefix() { 143 | return producerFactory.getTransactionIdPrefix(); 144 | } 145 | 146 | @Override 147 | public Duration getPhysicalCloseTimeout() { 148 | return producerFactory.getPhysicalCloseTimeout(); 149 | } 150 | } 151 | -------------------------------------------------------------------------------- /opentracing-kafka-spring/src/test/java/io/opentracing/contrib/kafka/spring/Listener.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | package io.opentracing.contrib.kafka.spring; 15 | 16 | import static org.hamcrest.MatcherAssert.assertThat; 17 | import static org.hamcrest.Matchers.notNullValue; 18 | 19 | import io.opentracing.mock.MockTracer; 20 | import org.springframework.beans.factory.annotation.Autowired; 21 | import org.springframework.kafka.annotation.KafkaListener; 22 | import org.springframework.stereotype.Component; 23 | 24 | @Component 25 | public class Listener { 26 | 27 | @Autowired 28 | private MockTracer tracer; 29 | 30 | @KafkaListener(topics = "spring") 31 | public void listen(String message) { 32 | assertThat(tracer.activeSpan(), notNullValue()); 33 | System.out.println(message); 34 | } 35 | 36 | } 37 | -------------------------------------------------------------------------------- /opentracing-kafka-spring/src/test/java/io/opentracing/contrib/kafka/spring/TestConfiguration.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | package io.opentracing.contrib.kafka.spring; 15 | 16 | import static io.opentracing.contrib.kafka.spring.TracingSpringKafkaTest.embeddedKafka; 17 | 18 | import io.opentracing.mock.MockTracer; 19 | import java.util.Map; 20 | import org.springframework.context.annotation.Bean; 21 | import org.springframework.context.annotation.ComponentScan; 22 | import org.springframework.context.annotation.Configuration; 23 | import org.springframework.context.annotation.EnableAspectJAutoProxy; 24 | import org.springframework.kafka.annotation.EnableKafka; 25 | import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory; 26 | import org.springframework.kafka.core.ConsumerFactory; 27 | import org.springframework.kafka.core.DefaultKafkaConsumerFactory; 28 | import org.springframework.kafka.core.DefaultKafkaProducerFactory; 29 | import org.springframework.kafka.core.KafkaTemplate; 30 | import org.springframework.kafka.core.ProducerFactory; 31 | import org.springframework.kafka.test.utils.KafkaTestUtils; 32 | 33 | @Configuration 34 | @EnableKafka 35 | @ComponentScan 36 | @EnableAspectJAutoProxy 37 | public class TestConfiguration { 38 | 39 | @Bean 40 | public MockTracer tracer() { 41 | return new MockTracer(); 42 | } 43 | 44 | @Bean 45 | public ConcurrentKafkaListenerContainerFactory 46 | kafkaListenerContainerFactory() { 47 | ConcurrentKafkaListenerContainerFactory factory = 48 | new ConcurrentKafkaListenerContainerFactory<>(); 49 | factory.setConsumerFactory(consumerFactory()); 50 | return factory; 51 | } 52 | 53 | @Bean 54 | public ConsumerFactory consumerFactory() { 55 | final Map consumerProps = KafkaTestUtils 56 | .consumerProps("sampleRawConsumer", "false", embeddedKafka.getEmbeddedKafka()); 57 | consumerProps.put("auto.offset.reset", "earliest"); 58 | 59 | return new TracingConsumerFactory<>(new DefaultKafkaConsumerFactory<>(consumerProps), tracer()); 60 | } 61 | 62 | 63 | @Bean 64 | public ProducerFactory producerFactory() { 65 | return new TracingProducerFactory<>(new DefaultKafkaProducerFactory<>( 66 | KafkaTestUtils.producerProps(embeddedKafka.getEmbeddedKafka())), tracer()); 67 | } 68 | 69 | @Bean 70 | public KafkaTemplate kafkaTemplate() { 71 | return new KafkaTemplate<>(producerFactory()); 72 | } 73 | 74 | @Bean 75 | public TracingKafkaAspect tracingKafkaAspect() { 76 | return new TracingKafkaAspect(tracer()); 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /opentracing-kafka-spring/src/test/java/io/opentracing/contrib/kafka/spring/TracingSpringKafkaTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | package io.opentracing.contrib.kafka.spring; 15 | 16 | import static org.awaitility.Awaitility.await; 17 | import static org.hamcrest.MatcherAssert.assertThat; 18 | import static org.hamcrest.Matchers.contains; 19 | import static org.hamcrest.Matchers.greaterThanOrEqualTo; 20 | 21 | import io.opentracing.mock.MockSpan; 22 | import io.opentracing.mock.MockTracer; 23 | import java.util.List; 24 | import java.util.concurrent.Callable; 25 | import java.util.concurrent.TimeUnit; 26 | import org.hamcrest.BaseMatcher; 27 | import org.hamcrest.Description; 28 | import org.junit.Before; 29 | import org.junit.ClassRule; 30 | import org.junit.Test; 31 | import org.junit.runner.RunWith; 32 | import org.springframework.beans.factory.annotation.Autowired; 33 | import org.springframework.kafka.core.KafkaTemplate; 34 | import org.springframework.kafka.test.rule.EmbeddedKafkaRule; 35 | import org.springframework.test.context.ContextConfiguration; 36 | import org.springframework.test.context.junit4.SpringRunner; 37 | 38 | @RunWith(SpringRunner.class) 39 | @ContextConfiguration(classes = {TestConfiguration.class}) 40 | public class TracingSpringKafkaTest { 41 | 42 | @ClassRule 43 | public static EmbeddedKafkaRule embeddedKafka = new EmbeddedKafkaRule(2, true, 2, "spring"); 44 | 45 | @Autowired 46 | private MockTracer mockTracer; 47 | 48 | @Autowired 49 | private KafkaTemplate kafkaTemplate; 50 | 51 | @Before 52 | public void before() { 53 | mockTracer.reset(); 54 | } 55 | 56 | @Test 57 | public void test() { 58 | kafkaTemplate.send("spring", "message"); 59 | 60 | await().atMost(15, TimeUnit.SECONDS).until(reportedSpansSize(), greaterThanOrEqualTo(3)); 61 | 62 | List spans = mockTracer.finishedSpans(); 63 | assertThat(spans, contains( 64 | new SpanMatcher("To_spring"), 65 | new SpanMatcher("From_spring"), 66 | new SpanMatcher("KafkaListener_spring"))); 67 | } 68 | 69 | private Callable reportedSpansSize() { 70 | return () -> mockTracer.finishedSpans().size(); 71 | } 72 | 73 | private static class SpanMatcher extends BaseMatcher { 74 | 75 | private final String operationName; 76 | 77 | private SpanMatcher(String operationName) { 78 | this.operationName = operationName; 79 | } 80 | 81 | @Override 82 | public boolean matches(Object actual) { 83 | return actual instanceof MockSpan && operationName 84 | .equals(((MockSpan) actual).operationName()); 85 | } 86 | 87 | @Override 88 | public void describeTo(Description description) { 89 | description.appendText(operationName); 90 | } 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /opentracing-kafka-streams/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 17 | 18 | 19 | opentracing-kafka-parent 20 | io.opentracing.contrib 21 | 0.1.16-SNAPSHOT 22 | 23 | 4.0.0 24 | 25 | opentracing-kafka-streams 26 | ${project.groupId}:${project.artifactId} 27 | OpenTracing Instrumentation for Apache Kafka Streams 28 | 29 | 30 | 31 | org.apache.kafka 32 | kafka-streams 33 | provided 34 | 35 | 36 | 37 | io.opentracing.contrib 38 | opentracing-kafka-client 39 | 0.1.16-SNAPSHOT 40 | 41 | 42 | 43 | org.awaitility 44 | awaitility 45 | test 46 | 47 | 48 | 49 | -------------------------------------------------------------------------------- /opentracing-kafka-streams/src/main/java/io/opentracing/contrib/kafka/streams/TracingKafkaClientSupplier.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | package io.opentracing.contrib.kafka.streams; 15 | 16 | import static io.opentracing.contrib.kafka.SpanDecorator.STANDARD_TAGS; 17 | 18 | import io.opentracing.Tracer; 19 | import io.opentracing.contrib.kafka.ClientSpanNameProvider; 20 | import io.opentracing.contrib.kafka.SpanDecorator; 21 | import io.opentracing.contrib.kafka.TracingKafkaConsumerBuilder; 22 | import io.opentracing.contrib.kafka.TracingKafkaProducerBuilder; 23 | import io.opentracing.util.GlobalTracer; 24 | import java.util.Collection; 25 | import java.util.Collections; 26 | import java.util.Map; 27 | import java.util.function.BiFunction; 28 | import org.apache.kafka.clients.admin.AdminClient; 29 | import org.apache.kafka.clients.consumer.Consumer; 30 | import org.apache.kafka.clients.consumer.ConsumerRecord; 31 | import org.apache.kafka.clients.consumer.KafkaConsumer; 32 | import org.apache.kafka.clients.producer.KafkaProducer; 33 | import org.apache.kafka.clients.producer.Producer; 34 | import org.apache.kafka.clients.producer.ProducerRecord; 35 | import org.apache.kafka.common.serialization.ByteArrayDeserializer; 36 | import org.apache.kafka.common.serialization.ByteArraySerializer; 37 | import org.apache.kafka.streams.KafkaClientSupplier; 38 | 39 | public class TracingKafkaClientSupplier implements KafkaClientSupplier { 40 | 41 | private final Tracer tracer; 42 | private final Collection spanDecorators; 43 | private final BiFunction consumerSpanNameProvider; 44 | private final BiFunction producerSpanNameProvider; 45 | 46 | public TracingKafkaClientSupplier(Tracer tracer) { 47 | this(tracer, null, null, null); 48 | } 49 | 50 | /** 51 | * GlobalTracer is used to get tracer 52 | */ 53 | public TracingKafkaClientSupplier() { 54 | this(GlobalTracer.get(), null, null, null); 55 | } 56 | 57 | public TracingKafkaClientSupplier(Tracer tracer, Collection spanDecorators) { 58 | this(tracer, spanDecorators, null, null); 59 | } 60 | 61 | public TracingKafkaClientSupplier(Tracer tracer, 62 | BiFunction consumerSpanNameProvider, 63 | BiFunction producerSpanNameProvider) { 64 | this(tracer, null, consumerSpanNameProvider, producerSpanNameProvider); 65 | } 66 | 67 | public TracingKafkaClientSupplier(Tracer tracer, 68 | Collection spanDecorators, 69 | BiFunction consumerSpanNameProvider, 70 | BiFunction producerSpanNameProvider) { 71 | this.tracer = tracer; 72 | this.spanDecorators = (spanDecorators == null) 73 | ? Collections.singletonList(STANDARD_TAGS) 74 | : spanDecorators; 75 | this.consumerSpanNameProvider = (consumerSpanNameProvider == null) 76 | ? ClientSpanNameProvider.CONSUMER_OPERATION_NAME 77 | : consumerSpanNameProvider; 78 | this.producerSpanNameProvider = (producerSpanNameProvider == null) 79 | ? ClientSpanNameProvider.PRODUCER_OPERATION_NAME 80 | : producerSpanNameProvider; 81 | } 82 | 83 | /** 84 | * GlobalTracer is used to get tracer 85 | */ 86 | public TracingKafkaClientSupplier( 87 | BiFunction consumerSpanNameProvider, 88 | BiFunction producerSpanNameProvider) { 89 | this(GlobalTracer.get(), null, consumerSpanNameProvider, producerSpanNameProvider); 90 | } 91 | 92 | // This method is required by Kafka Streams >=1.1, and optional for Kafka Streams <1.1 93 | public AdminClient getAdminClient(final Map config) { 94 | // create a new client upon each call; but expect this call to be only triggered once so this should be fine 95 | return AdminClient.create(config); 96 | } 97 | 98 | @Override 99 | public Producer getProducer(Map config) { 100 | return new TracingKafkaProducerBuilder<>( 101 | new KafkaProducer<>(config, new ByteArraySerializer(), new ByteArraySerializer()), 102 | tracer).withDecorators(spanDecorators).withSpanNameProvider(producerSpanNameProvider) 103 | .build(); 104 | } 105 | 106 | @Override 107 | public Consumer getConsumer(Map config) { 108 | return new TracingKafkaConsumerBuilder<>( 109 | new KafkaConsumer<>(config, new ByteArrayDeserializer(), new ByteArrayDeserializer()), 110 | tracer).withDecorators(spanDecorators).withSpanNameProvider(consumerSpanNameProvider) 111 | .build(); 112 | } 113 | 114 | @Override 115 | public Consumer getRestoreConsumer(Map config) { 116 | return new TracingKafkaConsumerBuilder<>( 117 | new KafkaConsumer<>(config, new ByteArrayDeserializer(), new ByteArrayDeserializer()), 118 | tracer).withDecorators(spanDecorators).withSpanNameProvider(consumerSpanNameProvider) 119 | .build(); 120 | } 121 | 122 | @Override 123 | public Consumer getGlobalConsumer(Map config) { 124 | return new TracingKafkaConsumerBuilder<>( 125 | new KafkaConsumer<>(config, new ByteArrayDeserializer(), new ByteArrayDeserializer()), 126 | tracer).withDecorators(spanDecorators).withSpanNameProvider(consumerSpanNameProvider) 127 | .build(); 128 | } 129 | } 130 | -------------------------------------------------------------------------------- /opentracing-kafka-streams/src/test/java/io/opentracing/contrib/kafka/streams/TracingKafkaStreamsTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2020 The OpenTracing Authors 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | * in compliance with the License. You may obtain a copy of the License at 6 | * 7 | * http://www.apache.org/licenses/LICENSE-2.0 8 | * 9 | * Unless required by applicable law or agreed to in writing, software distributed under the License 10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | * or implied. See the License for the specific language governing permissions and limitations under 12 | * the License. 13 | */ 14 | package io.opentracing.contrib.kafka.streams; 15 | 16 | import static org.awaitility.Awaitility.await; 17 | import static org.hamcrest.core.IsEqual.equalTo; 18 | import static org.junit.Assert.assertEquals; 19 | import static org.junit.Assert.assertNull; 20 | import static org.junit.Assert.assertTrue; 21 | 22 | import io.opentracing.contrib.kafka.TracingKafkaProducer; 23 | import io.opentracing.contrib.kafka.TracingKafkaUtils; 24 | import io.opentracing.mock.MockSpan; 25 | import io.opentracing.mock.MockTracer; 26 | import io.opentracing.tag.Tags; 27 | import java.util.List; 28 | import java.util.Map; 29 | import java.util.Properties; 30 | import java.util.concurrent.Callable; 31 | import java.util.concurrent.TimeUnit; 32 | import org.apache.kafka.clients.producer.KafkaProducer; 33 | import org.apache.kafka.clients.producer.Producer; 34 | import org.apache.kafka.clients.producer.ProducerRecord; 35 | import org.apache.kafka.common.serialization.Serde; 36 | import org.apache.kafka.common.serialization.Serdes; 37 | import org.apache.kafka.streams.KafkaStreams; 38 | import org.apache.kafka.streams.KeyValue; 39 | import org.apache.kafka.streams.StreamsBuilder; 40 | import org.apache.kafka.streams.StreamsConfig; 41 | import org.apache.kafka.streams.kstream.KStream; 42 | import org.apache.kafka.streams.kstream.Produced; 43 | import org.junit.Before; 44 | import org.junit.ClassRule; 45 | import org.junit.Test; 46 | import org.springframework.kafka.test.rule.EmbeddedKafkaRule; 47 | import org.springframework.kafka.test.utils.KafkaTestUtils; 48 | 49 | public class TracingKafkaStreamsTest { 50 | 51 | @ClassRule 52 | public static EmbeddedKafkaRule embeddedKafka = new EmbeddedKafkaRule(2, true, 2, "stream-test"); 53 | 54 | private MockTracer mockTracer = new MockTracer(); 55 | 56 | @Before 57 | public void before() { 58 | mockTracer.reset(); 59 | } 60 | 61 | @Test 62 | public void test() { 63 | Map senderProps = KafkaTestUtils 64 | .producerProps(embeddedKafka.getEmbeddedKafka()); 65 | 66 | Properties config = new Properties(); 67 | config.put(StreamsConfig.APPLICATION_ID_CONFIG, "stream-app"); 68 | config.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, senderProps.get("bootstrap.servers")); 69 | config.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.Integer().getClass()); 70 | config.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass()); 71 | 72 | Producer producer = createProducer(); 73 | ProducerRecord record = new ProducerRecord<>("stream-test", 1, "test"); 74 | producer.send(record); 75 | 76 | final Serde stringSerde = Serdes.String(); 77 | final Serde intSerde = Serdes.Integer(); 78 | 79 | StreamsBuilder builder = new StreamsBuilder(); 80 | KStream kStream = builder.stream("stream-test"); 81 | 82 | kStream.map((key, value) -> new KeyValue<>(key, value + "map")) 83 | .to("stream-out", Produced.with(intSerde, stringSerde)); 84 | 85 | KafkaStreams streams = new KafkaStreams(builder.build(), config, 86 | new TracingKafkaClientSupplier(mockTracer)); 87 | streams.start(); 88 | 89 | await().atMost(15, TimeUnit.SECONDS).until(reportedSpansSize(), equalTo(3)); 90 | 91 | streams.close(); 92 | producer.close(); 93 | 94 | List spans = mockTracer.finishedSpans(); 95 | assertEquals(3, spans.size()); 96 | checkSpans(spans); 97 | 98 | assertNull(mockTracer.activeSpan()); 99 | } 100 | 101 | private Producer createProducer() { 102 | Map senderProps = KafkaTestUtils 103 | .producerProps(embeddedKafka.getEmbeddedKafka()); 104 | KafkaProducer kafkaProducer = new KafkaProducer<>(senderProps); 105 | return new TracingKafkaProducer<>(kafkaProducer, mockTracer); 106 | } 107 | 108 | private void checkSpans(List mockSpans) { 109 | for (MockSpan mockSpan : mockSpans) { 110 | String operationName = mockSpan.operationName(); 111 | if (operationName.equals(TracingKafkaUtils.TO_PREFIX + "stream-test")) { 112 | assertEquals(Tags.SPAN_KIND_PRODUCER, mockSpan.tags().get(Tags.SPAN_KIND.getKey())); 113 | String topicName = (String) mockSpan.tags().get(Tags.MESSAGE_BUS_DESTINATION.getKey()); 114 | assertTrue(topicName.equals("stream-out") || topicName.equals("stream-test")); 115 | } else if (operationName.equals(TracingKafkaUtils.FROM_PREFIX + "stream-test")) { 116 | assertEquals(Tags.SPAN_KIND_CONSUMER, mockSpan.tags().get(Tags.SPAN_KIND.getKey())); 117 | assertEquals(0, mockSpan.tags().get("partition")); 118 | long offset = (Long) mockSpan.tags().get("offset"); 119 | assertTrue(offset == 0L || offset == 1L || offset == 2L); 120 | String topicName = (String) mockSpan.tags().get(Tags.MESSAGE_BUS_DESTINATION.getKey()); 121 | assertTrue(topicName.equals("stream-out") || topicName.equals("stream-test")); 122 | } 123 | assertEquals("java-kafka", mockSpan.tags().get(Tags.COMPONENT.getKey())); 124 | assertEquals(0, mockSpan.generatedErrors().size()); 125 | assertTrue(operationName.equals(TracingKafkaUtils.TO_PREFIX + "stream-test") 126 | || operationName.equals(TracingKafkaUtils.FROM_PREFIX + "stream-test") 127 | || operationName.equals(TracingKafkaUtils.FROM_PREFIX + "stream-out") 128 | || operationName.equals(TracingKafkaUtils.TO_PREFIX + "stream-out")); 129 | } 130 | } 131 | 132 | private Callable reportedSpansSize() { 133 | return () -> mockTracer.finishedSpans().size(); 134 | } 135 | } 136 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 17 | 18 | 4.0.0 19 | io.opentracing.contrib 20 | opentracing-kafka-parent 21 | pom 22 | 0.1.16-SNAPSHOT 23 | 24 | 25 | opentracing-kafka-client 26 | opentracing-kafka-streams 27 | opentracing-kafka-spring 28 | 29 | 30 | ${project.groupId}:${project.artifactId} 31 | OpenTracing Instrumentation for Apache Kafka Client 32 | https://github.com/opentracing-contrib/java-kafka-client 33 | 2017-2020 34 | 35 | 36 | http://github.com/opentracing-contrib/java-kafka-client 37 | scm:git:https://github.com/opentracing-contrib/java-kafka-client.git 38 | scm:git:https://github.com/opentracing-contrib/java-kafka-client.git 39 | 40 | HEAD 41 | 42 | 43 | 44 | 45 | The Apache Software License, Version 2.0 46 | http://www.apache.org/licenses/LICENSE-2.0.txt 47 | repo 48 | 49 | 50 | 51 | 52 | 53 | malafeev 54 | Sergei Malafeev 55 | sergeymalafeev@gmail.com 56 | 57 | 58 | 59 | 60 | GitHub 61 | https://github.com/opentracing-contrib/java-kafka-client/issues 62 | 63 | 64 | 65 | 1.8 66 | UTF-8 67 | UTF-8 68 | 69 | 0.33.0 70 | 2.6.0 71 | 2.6.1 72 | 5.2.7.RELEASE 73 | 4.3.0 74 | 0.8.5 75 | 76 | 77 | 78 | 79 | 80 | org.apache.kafka 81 | kafka-streams 82 | ${kafka.version} 83 | 84 | 85 | 86 | org.apache.kafka 87 | kafka-clients 88 | ${kafka.version} 89 | 90 | 91 | 92 | org.awaitility 93 | awaitility 94 | 4.0.2 95 | 96 | 97 | 98 | 99 | 100 | 101 | io.opentracing 102 | opentracing-api 103 | ${opentracing.version} 104 | 105 | 106 | 107 | io.opentracing 108 | opentracing-util 109 | ${opentracing.version} 110 | 111 | 112 | 113 | io.opentracing 114 | opentracing-mock 115 | ${opentracing.version} 116 | test 117 | 118 | 119 | 120 | org.springframework.kafka 121 | spring-kafka-test 122 | ${spring.kafka.version} 123 | test 124 | 125 | 126 | org.apache.kafka 127 | kafka-clients 128 | 129 | 130 | org.apache.kafka 131 | kafka_2.11 132 | 133 | 134 | org.apache.kafka 135 | kafka_2.12 136 | 137 | 138 | 139 | 140 | 141 | org.apache.kafka 142 | kafka_2.13 143 | ${kafka.version} 144 | test 145 | 146 | 147 | 148 | org.apache.kafka 149 | kafka_2.13 150 | ${kafka.version} 151 | test 152 | test 153 | 154 | 155 | 156 | org.apache.kafka 157 | kafka-clients 158 | ${kafka.version} 159 | test 160 | test 161 | 162 | 163 | 164 | junit 165 | junit 166 | 4.13.1 167 | test 168 | 169 | 170 | 171 | 172 | 173 | 174 | org.apache.maven.plugins 175 | maven-compiler-plugin 176 | 3.8.1 177 | 178 | ${java.version} 179 | ${java.version} 180 | 181 | 182 | 183 | org.apache.maven.plugins 184 | maven-release-plugin 185 | 2.5.3 186 | 187 | false 188 | release 189 | true 190 | @{project.version} 191 | 192 | 193 | 194 | io.zipkin.centralsync-maven-plugin 195 | centralsync-maven-plugin 196 | 0.1.1 197 | 198 | opentracing 199 | maven 200 | opentracing-kafka-client 201 | 202 | 203 | 204 | com.mycila 205 | license-maven-plugin 206 | 3.0 207 | 208 | 209 | SLASHSTAR_STYLE 210 | 211 |

header.txt
212 | true 213 | 214 | LICENSE 215 | mvnw 216 | mvnw.cmd 217 | .mvn/wrapper/maven-wrapper.properties 218 | .coveralls.yml 219 | 220 | 221 | 222 | 223 | 224 | check 225 | 226 | compile 227 | 228 | 229 | 230 | 231 | org.eluder.coveralls 232 | coveralls-maven-plugin 233 | ${coveralls-maven-plugin.version} 234 | 235 | 236 | org.jacoco 237 | jacoco-maven-plugin 238 | ${jacoco-maven-plugin.version} 239 | 240 | 241 | prepare-agent 242 | 243 | prepare-agent 244 | 245 | 246 | 247 | 248 | 249 | 250 | 251 | 252 | 253 | bintray 254 | https://api.bintray.com/maven/opentracing/maven/opentracing-kafka-client/;publish=1 255 | 256 | 257 | jfrog-snapshots 258 | http://oss.jfrog.org/artifactory/oss-snapshot-local 259 | 260 | 261 | 262 | 263 | 264 | release 265 | 266 | 267 | 268 | 269 | org.apache.maven.plugins 270 | maven-source-plugin 271 | 3.2.0 272 | 273 | 274 | attach-sources 275 | 276 | jar 277 | 278 | 279 | 280 | 281 | 282 | 283 | org.apache.maven.plugins 284 | maven-javadoc-plugin 285 | 3.1.1 286 | 287 | false 288 | 289 | 290 | 291 | attach-javadocs 292 | 293 | jar 294 | 295 | package 296 | 297 | 298 | 299 | 300 | 301 | 302 | 303 | 304 | 305 | -------------------------------------------------------------------------------- /travis/publish.sh: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2017-2020 The OpenTracing Authors 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except 5 | # in compliance with the License. You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software distributed under the License 10 | # is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 11 | # or implied. See the License for the specific language governing permissions and limitations under 12 | # the License. 13 | # 14 | 15 | set -euo pipefail 16 | set -x 17 | 18 | build_started_by_tag() { 19 | if [ "${TRAVIS_TAG}" == "" ]; then 20 | echo "[Publishing] This build was not started by a tag, publishing snapshot" 21 | return 1 22 | else 23 | echo "[Publishing] This build was started by the tag ${TRAVIS_TAG}, publishing release" 24 | return 0 25 | fi 26 | } 27 | 28 | is_pull_request() { 29 | if [ "${TRAVIS_PULL_REQUEST}" != "false" ]; then 30 | echo "[Not Publishing] This is a Pull Request" 31 | return 0 32 | else 33 | echo "[Publishing] This is not a Pull Request" 34 | return 1 35 | fi 36 | } 37 | 38 | is_travis_branch_master() { 39 | if [ "${TRAVIS_BRANCH}" = master ]; then 40 | echo "[Publishing] Travis branch is master" 41 | return 0 42 | else 43 | echo "[Not Publishing] Travis branch is not master" 44 | return 1 45 | fi 46 | } 47 | 48 | check_travis_branch_equals_travis_tag() { 49 | #Weird comparison comparing branch to tag because when you 'git push --tags' 50 | #the branch somehow becomes the tag value 51 | #github issue: https://github.com/travis-ci/travis-ci/issues/1675 52 | if [ "${TRAVIS_BRANCH}" != "${TRAVIS_TAG}" ]; then 53 | echo "Travis branch does not equal Travis tag, which it should, bailing out." 54 | echo " github issue: https://github.com/travis-ci/travis-ci/issues/1675" 55 | exit 1 56 | else 57 | echo "[Publishing] Branch (${TRAVIS_BRANCH}) same as Tag (${TRAVIS_TAG})" 58 | fi 59 | } 60 | 61 | check_release_tag() { 62 | tag="${TRAVIS_TAG}" 63 | if [[ "$tag" =~ ^[[:digit:]]+\.[[:digit:]]+\.[[:digit:]]+$ ]]; then 64 | echo "Build started by version tag $tag. During the release process tags like this" 65 | echo "are created by the 'release' Maven plugin. Nothing to do here." 66 | exit 0 67 | elif [[ ! "$tag" =~ ^release-[[:digit:]]+\.[[:digit:]]+\.[[:digit:]]+$ ]]; then 68 | echo "You must specify a tag of the format 'release-0.0.0' to release this project." 69 | echo "The provided tag ${tag} doesn't match that. Aborting." 70 | exit 1 71 | fi 72 | } 73 | 74 | is_release_commit() { 75 | project_version=$(./mvnw help:evaluate -N -Dexpression=project.version | grep -v '\[') 76 | if [[ "$project_version" =~ ^[[:digit:]]+\.[[:digit:]]+\.[[:digit:]]+$ ]]; then 77 | echo "Build started by release commit $project_version. Will synchronize to maven central." 78 | return 0 79 | else 80 | return 1 81 | fi 82 | } 83 | 84 | release_version() { 85 | echo "${TRAVIS_TAG}" | sed 's/^release-//' 86 | } 87 | 88 | safe_checkout_master() { 89 | # We need to be on a branch for release:perform to be able to create commits, and we want that branch to be master. 90 | # But we also want to make sure that we build and release exactly the tagged version, so we verify that the remote 91 | # master is where our tag is. 92 | git checkout -B master 93 | git fetch origin master:origin/master 94 | commit_local_master="$(git show --pretty='format:%H' master)" 95 | commit_remote_master="$(git show --pretty='format:%H' origin/master)" 96 | if [ "$commit_local_master" != "$commit_remote_master" ]; then 97 | echo "Master on remote 'origin' has commits since the version under release, aborting" 98 | exit 1 99 | fi 100 | } 101 | 102 | #---------------------- 103 | # MAIN 104 | #---------------------- 105 | 106 | if ! is_pull_request && build_started_by_tag; then 107 | check_travis_branch_equals_travis_tag 108 | check_release_tag 109 | fi 110 | 111 | ./mvnw install -nsu 112 | 113 | # If we are on a pull request, our only job is to run tests, which happened above via ./mvnw install 114 | if is_pull_request; then 115 | true 116 | # If we are on master, we will deploy the latest snapshot or release version 117 | # - If a release commit fails to deploy for a transient reason, delete the broken version from bintray and click rebuild 118 | elif is_travis_branch_master; then 119 | ./mvnw --batch-mode -s ./.settings.xml -Prelease -nsu -DskipTests deploy 120 | 121 | # If the deployment succeeded, sync it to Maven Central. Note: this needs to be done once per project, not module, hence -N 122 | if is_release_commit; then 123 | ./mvnw --batch-mode -s ./.settings.xml -nsu -N io.zipkin.centralsync-maven-plugin:centralsync-maven-plugin:sync 124 | fi 125 | 126 | # If we are on a release tag, the following will update any version references and push a version tag for deployment. 127 | elif build_started_by_tag; then 128 | safe_checkout_master 129 | ./mvnw --batch-mode -s ./.settings.xml -Prelease -nsu -DreleaseVersion="$(release_version)" -Darguments="-DskipTests" release:prepare 130 | fi 131 | --------------------------------------------------------------------------------