├── .gitignore
├── .mvn
└── wrapper
│ ├── maven-wrapper.jar
│ └── maven-wrapper.properties
├── .settings.xml
├── .travis.yml
├── LICENSE
├── README.md
├── RELEASE.md
├── header.txt
├── mvnw
├── mvnw.cmd
├── opentracing-kafka-client
├── pom.xml
└── src
│ ├── main
│ └── java
│ │ └── io
│ │ └── opentracing
│ │ └── contrib
│ │ └── kafka
│ │ ├── ClientSpanNameProvider.java
│ │ ├── HeadersMapExtractAdapter.java
│ │ ├── HeadersMapInjectAdapter.java
│ │ ├── SpanDecorator.java
│ │ ├── StandardSpanDecorator.java
│ │ ├── TracingCallback.java
│ │ ├── TracingConsumerInterceptor.java
│ │ ├── TracingKafkaConsumer.java
│ │ ├── TracingKafkaConsumerBuilder.java
│ │ ├── TracingKafkaProducer.java
│ │ ├── TracingKafkaProducerBuilder.java
│ │ ├── TracingKafkaUtils.java
│ │ └── TracingProducerInterceptor.java
│ └── test
│ └── java
│ └── io
│ └── opentracing
│ └── contrib
│ └── kafka
│ ├── HeadersMapExtractAdapterTest.java
│ ├── OperationNameSpanNameTest.java
│ ├── OperationNameTopicSpanNameTest.java
│ ├── TopicSpanNameTest.java
│ ├── TracingCallbackTest.java
│ ├── TracingKafkaTest.java
│ └── TracingKafkaUtilsTest.java
├── opentracing-kafka-spring
├── pom.xml
└── src
│ ├── main
│ └── java
│ │ └── io
│ │ └── opentracing
│ │ └── contrib
│ │ └── kafka
│ │ └── spring
│ │ ├── MessageListenerMethodInterceptor.java
│ │ ├── TracingConsumerFactory.java
│ │ ├── TracingKafkaAspect.java
│ │ └── TracingProducerFactory.java
│ └── test
│ └── java
│ └── io
│ └── opentracing
│ └── contrib
│ └── kafka
│ └── spring
│ ├── Listener.java
│ ├── TestConfiguration.java
│ └── TracingSpringKafkaTest.java
├── opentracing-kafka-streams
├── pom.xml
└── src
│ ├── main
│ └── java
│ │ └── io
│ │ └── opentracing
│ │ └── contrib
│ │ └── kafka
│ │ └── streams
│ │ └── TracingKafkaClientSupplier.java
│ └── test
│ └── java
│ └── io
│ └── opentracing
│ └── contrib
│ └── kafka
│ └── streams
│ └── TracingKafkaStreamsTest.java
├── pom.xml
└── travis
└── publish.sh
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea/
2 | *.iml
3 | target/
4 | .project
5 | .classpath
6 | .settings/**/*.*
7 | opentracing-kafka-client/.settings/**/*.*
8 | opentracing-kafka-spring/.settings/**/*.*
9 | opentracing-kafka-streams/.settings/**/*.*
--------------------------------------------------------------------------------
/.mvn/wrapper/maven-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opentracing-contrib/java-kafka-client/31ce5260279c2cadf3d69c0acbd50f024afe4660/.mvn/wrapper/maven-wrapper.jar
--------------------------------------------------------------------------------
/.mvn/wrapper/maven-wrapper.properties:
--------------------------------------------------------------------------------
1 | distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.5.0/apache-maven-3.5.0-bin.zip
--------------------------------------------------------------------------------
/.settings.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
21 |
22 |
23 | sonatype
24 | ${env.SONATYPE_USER}
25 | ${env.SONATYPE_PASSWORD}
26 |
27 |
28 | bintray
29 | ${env.BINTRAY_USER}
30 | ${env.BINTRAY_KEY}
31 |
32 |
33 | jfrog-snapshots
34 | ${env.BINTRAY_USER}
35 | ${env.BINTRAY_KEY}
36 |
37 |
38 | github.com
39 | ${env.GH_USER}
40 | ${env.GH_TOKEN}
41 |
42 |
43 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | sudo: required
2 | dist: trusty
3 |
4 | language: java
5 | jdk:
6 | - oraclejdk8
7 |
8 | cache:
9 | directories:
10 | - $HOME/.m2/repository
11 |
12 | before_install:
13 | # allocate commits to CI, not the owner of the deploy key
14 | - git config user.name "opentracingci"
15 | - git config user.email "opentracingci+opentracing@googlegroups.com"
16 |
17 | # setup https authentication credentials, used by ./mvnw release:prepare
18 | - git config credential.helper "store --file=.git/credentials"
19 | - echo "https://$GH_TOKEN:@github.com" > .git/credentials
20 |
21 | install:
22 | # Override default travis to use the maven wrapper
23 | - ./mvnw install -DskipTests=true -Dmaven.javadoc.skip=true -B -V
24 |
25 | script:
26 | - ./travis/publish.sh
27 |
28 | branches:
29 | except:
30 | - /^[0-9]/
31 |
32 | after_success:
33 | - mvn jacoco:report coveralls:report
34 |
35 | env:
36 | global:
37 | # Ex. travis encrypt BINTRAY_USER=your_github_account
38 | - secure: "eng7HMp7Zd1X4o59WzsyfTSjvkUGsYLPNKR2RIdZiDXKd/eqTXzrxtQX5BldKtOxA8ZfLF8UWMBE7k9hJq+1eN9fYF6FgLClwZw6OfuLJ6C/kBUne9MA2h5pE+cuV8p6zZsF0AmUM5EKnsP++FmgLijMdMNG8Hx4yTvZeMtCyvF5GZ8jQFXgCnnn8K1h0rwKI7mQH5HPo3rOWd6OCZpRYic0/XIW4CLlY2g/7uUl9IDqBNnFKVbU6sdVrK+O648l5spjGHS9agm/5f1JpNMuVMQ37QlWr+T7HC+X6dhLv9dlqPtXSJp7VWV0AnRgzqc0M5T492+05Ng5nXnX6Fo+lIc+08JCfjq5tDvwuzhKjIepZUBR9jaZwUQxSgeNWv+xwjBJLQuMY16Nyu0f3QUmqnL2toZ+Co6+P0MUlqRTkkCoFKbL2Z/lpTj78Nct/QJnzuafUvCOgyZLYSwniVVP/cqwqJajnhAr2kXP77QiBTKrwxLCISgDwrnnHKwnIBZ6TLpCyoJ/v84c6qagzh/ltcfvJSfDj8LdrJP5sIvOB8tuRqUXWKkZDr5igXPEfUotv+VR82VqPCS8IZgT4Str87UAIU/J5v4NM7gWF207ri0lILjbU4WcVD80AmJHxWpCJHwk0jNnXpHTxSZACoRwNeju4NBkHeKN8McOdU3DOR8="
39 | # Ex. travis encrypt BINTRAY_KEY=xxx-https://bintray.com/profile/edit-xxx --add
40 | - secure: "FzmEYmwU8RnH5y394X8pFkKjEtvrnMvas4DgoJJRum+8IMMkFcuiHTe1gw+kVQTXtj9q/YQ0rcdhu80A+EllcIKwc7wxf3tysnckF/l3lfSpcCHyiZ/e58yjpXg1XB/yAiVbRKgjubBJHI0RN005/M86PUlEi0HtA01jRgQvr3aQpicPuyRjleIkVx490clyTdAN5HXAJNfalWXcsEIcIl4GmpqA4o0MJ20DBqWrQYp25fgWDP9WYC+8oIQExZUtR/KWXdPWolYRE3tlBj6C6XSUTynaPzjcwveVGvfXAis6+NafowrRi1ohFzbdx3MbKhXxIdfVidw69BP1NMxqkPQaWWXtNEjpNbIMJvNQL/sFGkvsamYbEa1e7VALxMtjR19BYieZdYW4r33FeYH11zVAK3I3EjqpdnaLB6cTwgyWnblzDqIJ5WGWZQiomBE2MwqsYx5FOM0oXbukQlTWR/RxT/97f3nt8GsE5EqPk/uDWAiPY383R2wIY8FaehwA7iq9nKwbr7oQO2ZNtcR8VW0JeFc3b0r4F8uL4GDP7Le3JDe+o6Aoi+SFu+adjVQtRCX4IwH3CznhBP4wkfcTIBLxSVGzW2Fe3YHki6lulf6jBs1Of3VpzdPADADIzd0k1nOUVYZuUQU5IUlbtIdiIXPNhfNCkP5uMHBnIV8lc24="
41 | # Ex. travis encrypt GH_TOKEN=XXX-https://github.com/settings/tokens-XXX --add
42 | - secure: "ns5to8XeWdzLtfc47Jh4XIIH97SRNaC3Rx6dlvM+7ic+WnVJETMneOcXc1yFfklwUzVj77ImMDCbgt+dE8nsnrKVsXYBT3jhCoz/S/m171ESlSPElrKrWUXDXpbxnsPlu3Ch2rhlkYAlN/AYC0vTwaDQvmKkoc6q7Uhqza8h4i6PxTIDwmv5zgR9EK+DqxoF3gpTv169hlQNXa1eIv5XeskjVdLtLPENxrUyaOJsEHljCEGtF9QZSicm+udSj1dbdoa/Fd5T2cFXvsdHaNZUT5I4WZTvBSw25BqhvkpKePuLV7naDoeUDhqnrBdtb2ZPFiR2icClm8HTpNH6MdTJ6mPWf8gl27P97D7Za0qFgriz9XkiuEpQJ0ukwm62EQmCxsdABB3VvxOG8Y6TUy2aHxDPQZrTszm+2jKxEVRzTlqkUbKaVDG0dMr8OejCML84JSWyYwq1XoO8vZ9gQEkmhY1Gp37SdaHD8qneX/rLiXCw4M/N9+Lx9+NzvaCrq6LpE7YBUznu4TBJFoz+/lv6osS/iviLIIBZM9PRinBn6NxPoeCQ9QzzyWikhRR/OnNyPZUiXI+toYdIt8BwulBvB6oNL7JPyQzmPoUo6+/CeYhHiAq+fQh2kYSSc4F4sowxFKY8DuDz3gMGdyQVds4+LHu523IR20QFaRTJGA9AD0I="
43 | # Ex. travis encrypt SONATYPE_USER=your_sonatype_account
44 | - secure: "VA+eY7OL05AW3KyGiKret73Gz79W/DfQsB2rV68pTK5ioWVl0HyiTxftSZt1nfuaedPy1Tka+nL9hZSKi3K+Yweh3PsAsY2hGAw7brenOXT0c0pFhr4L7ryuCRnt9v+qDdZQY9m3RY97SsD80dytRYeLKoruCyzqJfU95a/5BJFqKpR7+AMrlMcyOP5qMgFwPVTz9w2JmkNPKdkGUsQPE+rW6p5JdOTlhNQi4nxmaCsYPJ42aV9gZUdBoaWfbei3PZLlrFl6Y0HU3ve4MzwUPZ22py/pp8zo310Pg+9oBjEnBtjDAKhJLn74NAIJbU1XqeyE6ybRXY9S7x1BLn+YXolTerCpZZUIc/kJNGfMM6N2qKELrZArE3cGLKOimeEzMVM2GCjRlJWtyTANpaTCH5Zenciiqst6/OPyGHC5zUXrdhzsRV8MHSKWwMTO6XI9dVTODUyJDfhKg9uhfV3wmPUw6pc9zifxiju4tqaGykwFewT8uwZW2uzPBBKYm4GndfeyMm/yR5XIFOADoXk62JmqczYM+yq8r1V/+yv2F4iYvl1YWOTRdDLzxkmj8gJR43+czFsOf4v54kFpLJB3JHWz9LAj0O/lH+JL6IKGcAyAWc47WrHgib/lEp43ERKhI8P7kF+L1Qpifi2uZ1QM0DdvfigbVhJfbMyCFjxvtuw="
45 | # Ex. travis encrypt SONATYPE_PASSWORD=your_sonatype_password
46 | - secure: "VEV7do1Q4G5LEPwEicnAGMDg6kZKWrE6+HpWOBN8h//cuwRyXu+UcUvPhy5YQx2SSWYJtqem+PC32/3zgVrCpQWx0JPry8h+5V+IAyCtNv3Oi9udvggbY03T+DlpelXZzsUV0Fnbthc3m2uQgnkN6O7vswVLFXN7lO7GJESdEkapmWETp/v/IROsA9cwCBAGCkSgth4AEFUdihToCVd34PwiEPZV6ZmHYF9pZIWJZ+K+b23YiurGibyhdfChTvy6Y5bhG6VSrwofPXigVV5s/jy6QnBmdSYNjrKY9g1/ahFeNVhd/NTJU3SnyaEOJERiGtfw1xvvSFbCO8Hpo5QPx8fkMTXs2xpT9Cc0FuQkBF0kXeFy/lWklgW0vXcvbMo9R9ePTYul2oQtjxCrXApaLm2x9PsOpL7IkNT1acjkcOpBS3AIj2BjS2bSrPinf9eOj3bu8a9mhmG6LowczsqRHxY90JGfWoVGOZC5AvntvPteBRMpW46YY/z8atSNR24Fxp6cXNe6BOY32do+ZRVpHT/hu0jzaxHpYqyriUjWjtPq0rqfWEwrn/dtasBB7c/Xm0Clo1k7ePLK86QVIKGtYXqAE3S8yWD1f690nmLlMly8W1P2AGFhurydda86Oaz5mltsY39t1hLfpE2VGbsoLVJva/6Plt5HwbKI4jFYmrw="
47 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "{}"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright {yyyy} {name of copyright owner}
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | [![Build Status][ci-img]][ci] [![Coverage Status][cov-img]][cov] [![Released Version][maven-img]][maven] [](https://opensource.org/licenses/Apache-2.0)
2 |
3 |
4 | # OpenTracing Apache Kafka Client Instrumentation
5 | OpenTracing instrumentation for Apache Kafka Client.
6 | Two solutions are provided:
7 | 1. Based on decorated Producer and Consumer
8 | 1. Based on Interceptors
9 |
10 | ## Requirements
11 |
12 | - Java 8
13 | - Kafka 2.2.0
14 |
15 | ## Installation
16 |
17 | ### Kafka Client
18 |
19 | pom.xml
20 | ```xml
21 |
22 | io.opentracing.contrib
23 | opentracing-kafka-client
24 | VERSION
25 |
26 | ```
27 |
28 | ### Kafka Streams
29 |
30 | pom.xml
31 | ```xml
32 |
33 | io.opentracing.contrib
34 | opentracing-kafka-streams
35 | VERSION
36 |
37 | ```
38 |
39 | ### Spring Kafka
40 |
41 | pom.xml
42 | ```xml
43 |
44 | io.opentracing.contrib
45 | opentracing-kafka-spring
46 | VERSION
47 |
48 | ```
49 |
50 | ## Usage
51 |
52 | ```java
53 |
54 | // Instantiate tracer
55 | Tracer tracer = ...
56 |
57 | // Optionally register tracer with GlobalTracer
58 | GlobalTracer.register(tracer);
59 | ```
60 |
61 | ### Kafka Client
62 |
63 | #### Decorators based solution
64 |
65 | ```java
66 |
67 | // Instantiate KafkaProducer
68 | KafkaProducer producer = new KafkaProducer<>(senderProps);
69 |
70 | //Decorate KafkaProducer with TracingKafkaProducer
71 | TracingKafkaProducer tracingProducer = new TracingKafkaProducer<>(producer,
72 | tracer);
73 |
74 | // Send
75 | tracingProducer.send(...);
76 |
77 | // Instantiate KafkaConsumer
78 | KafkaConsumer consumer = new KafkaConsumer<>(consumerProps);
79 |
80 | // Decorate KafkaConsumer with TracingKafkaConsumer
81 | TracingKafkaConsumer tracingConsumer = new TracingKafkaConsumer<>(consumer,
82 | tracer);
83 |
84 | //Subscribe
85 | tracingConsumer.subscribe(Collections.singletonList("messages"));
86 |
87 | // Get records
88 | ConsumerRecords records = tracingConsumer.poll(1000);
89 |
90 | // To retrieve SpanContext from polled record (Consumer side)
91 | ConsumerRecord record = ...
92 | SpanContext spanContext = TracingKafkaUtils.extractSpanContext(record.headers(), tracer);
93 |
94 | ```
95 |
96 | ##### Custom Span Names for Decorators based solution
97 | The decorator-based solution includes support for custom span names by passing in a BiFunction object as an additional
98 | argument to the TracingKafkaConsumer or TracingKafkaProducer constructors, either one of the provided BiFunctions or
99 | your own custom one.
100 |
101 | ```java
102 | // Create BiFunction for the KafkaProducer that operates on
103 | // (String operationName, ProducerRecord consumerRecord) and
104 | // returns a String to be used as the name
105 | BiFunction producerSpanNameProvider =
106 | (operationName, producerRecord) -> "CUSTOM_PRODUCER_NAME";
107 |
108 | // Instantiate KafkaProducer
109 | KafkaProducer producer = new KafkaProducer<>(senderProps);
110 |
111 | //Decorate KafkaProducer with TracingKafkaProducer
112 | TracingKafkaProducer tracingProducer = new TracingKafkaProducer<>(producer,
113 | tracer,
114 | producerSpanNameProvider);
115 | // Spans created by the tracingProducer will now have "CUSTOM_PRODUCER_NAME" as the span name.
116 |
117 |
118 | // Create BiFunction for the KafkaConsumer that operates on
119 | // (String operationName, ConsumerRecord consumerRecord) and
120 | // returns a String to be used as the name
121 | BiFunction consumerSpanNameProvider =
122 | (operationName, consumerRecord) -> operationName.toUpperCase();
123 | // Instantiate KafkaConsumer
124 | KafkaConsumer consumer = new KafkaConsumer<>(consumerProps);
125 | // Decorate KafkaConsumer with TracingKafkaConsumer, passing in the consumerSpanNameProvider BiFunction
126 | TracingKafkaConsumer tracingConsumer = new TracingKafkaConsumer<>(consumer,
127 | tracer,
128 | consumerSpanNameProvider);
129 | // Spans created by the tracingConsumer will now have the capitalized operation name as the span name.
130 | // "receive" -> "RECEIVE"
131 | ```
132 |
133 |
134 | #### Interceptors based solution
135 | ```java
136 | // Register tracer with GlobalTracer:
137 | GlobalTracer.register(tracer);
138 |
139 | // Add TracingProducerInterceptor to sender properties:
140 | senderProps.put(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG,
141 | TracingProducerInterceptor.class.getName());
142 |
143 | // Instantiate KafkaProducer
144 | KafkaProducer producer = new KafkaProducer<>(senderProps);
145 |
146 | // Send
147 | producer.send(...);
148 |
149 | // Add TracingConsumerInterceptor to consumer properties:
150 | consumerProps.put(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG,
151 | TracingConsumerInterceptor.class.getName());
152 |
153 | // Instantiate KafkaConsumer
154 | KafkaConsumer consumer = new KafkaConsumer<>(consumerProps);
155 |
156 | //Subscribe
157 | consumer.subscribe(Collections.singletonList("messages"));
158 |
159 | // Get records
160 | ConsumerRecords records = consumer.poll(1000);
161 |
162 | // To retrieve SpanContext from polled record (Consumer side)
163 | ConsumerRecord record = ...
164 | SpanContext spanContext = TracingKafkaUtils.extractSpanContext(record.headers(), tracer);
165 |
166 | ```
167 |
168 |
169 | ### Kafka Streams
170 |
171 | ```java
172 |
173 | // Instantiate TracingKafkaClientSupplier
174 | KafkaClientSupplier supplier = new TracingKafkaClientSupplier(tracer);
175 |
176 | // Provide supplier to KafkaStreams
177 | KafkaStreams streams = new KafkaStreams(builder.build(), new StreamsConfig(config), supplier);
178 | streams.start();
179 |
180 | ```
181 |
182 | ### Spring Kafka
183 |
184 | ```java
185 |
186 | // Declare Tracer bean
187 | @Bean
188 | public Tracer tracer() {
189 | return ...
190 | }
191 |
192 |
193 | // Decorate ConsumerFactory with TracingConsumerFactory
194 | @Bean
195 | public ConsumerFactory consumerFactory() {
196 | return new TracingConsumerFactory<>(new DefaultKafkaConsumerFactory<>(consumerProps()), tracer());
197 | }
198 |
199 | // Decorate ProducerFactory with TracingProducerFactory
200 | @Bean
201 | public ProducerFactory producerFactory() {
202 | return new TracingProducerFactory<>(new DefaultKafkaProducerFactory<>(producerProps()), tracer());
203 | }
204 |
205 | // Use decorated ProducerFactory in KafkaTemplate
206 | @Bean
207 | public KafkaTemplate kafkaTemplate() {
208 | return new KafkaTemplate<>(producerFactory());
209 | }
210 |
211 | // Use an aspect to decorate @KafkaListeners
212 | @Bean
213 | public TracingKafkaAspect tracingKafkaAspect() {
214 | return new TracingKafkaAspect(tracer());
215 | }
216 | ```
217 |
218 | ##### Custom Span Names for Spring Kafka
219 | The Spring Kafka factory implementations include support for custom span names by passing in a BiFunction object as an additional
220 | argument to the TracingConsumerFactory or TracingProducerFactory constructors, either one of the provided BiFunctions or
221 | your own custom one.
222 |
223 | ```java
224 | // Create BiFunction for the KafkaProducerFactory that operates on
225 | // (String operationName, ProducerRecord consumerRecord) and
226 | // returns a String to be used as the name
227 | BiFunction producerSpanNameProvider =
228 | (operationName, producerRecord) -> "CUSTOM_PRODUCER_NAME";
229 |
230 | // Decorate ProducerFactory with TracingProducerFactory
231 | @Bean
232 | public ProducerFactory producerFactory() {
233 | return new TracingProducerFactory<>(new DefaultKafkaProducerFactory<>(producerProps()), tracer());
234 | }
235 | // Spans created by the tracingProducer will now have "CUSTOM_PRODUCER_NAME" as the span name.
236 |
237 |
238 | // Create BiFunction for the KafkaConsumerFactory that operates on
239 | // (String operationName, ConsumerRecord consumerRecord) and
240 | // returns a String to be used as the name
241 | BiFunction consumerSpanNameProvider =
242 | (operationName, consumerRecord) -> operationName.toUpperCase();
243 |
244 | // Decorate ConsumerFactory with TracingConsumerFactory
245 | @Bean
246 | public ConsumerFactory consumerFactory() {
247 | return new TracingConsumerFactory<>(new DefaultKafkaConsumerFactory<>(consumerProps()), tracer());
248 | }
249 | // Consumers produced by the traced consumerFactory
250 | ```
251 |
252 | #### Pre-made Span Name Providers
253 |
254 | The following BiFunctions are already included in the ClientSpanNameProvider class, with `CONSUMER_OPERATION_NAME` and `PRODUCER_OPERATION_NAME` being the default should no
255 | spanNameProvider be provided:
256 |
257 | - `CONSUMER_OPERATION_NAME` and `PRODUCER_OPERATION_NAME` : Returns the `operationName` as the span name ("receive" for Consumer, "send" for producer).
258 | - `CONSUMER_PREFIXED_OPERATION_NAME(String prefix)` and `PRODUCER_PREFIXED_OPERATION_NAME(String prefix)` : Returns a String concatenation of `prefix` and `operatioName`.
259 | - `CONSUMER_TOPIC` and `PRODUCER_TOPIC` : Returns the Kafka topic name that the record was pushed to/pulled from (`record.topic()`).
260 | - `PREFIXED_CONSUMER_TOPIC(String prefix)` and `PREFIXED_PRODUCER_TOPIC(String prefix)` : Returns a String concatenation of `prefix` and the Kafka topic name (`record.topic()`).
261 | - `CONSUMER_OPERATION_NAME_TOPIC` and `PRODUCER_OPERATION_NAME_TOPIC` : Returns "`operationName` - `record.topic()`".
262 | - `CONSUMER_PREFIXED_OPERATION_NAME_TOPIC(String prefix)` and `PRODUCER_PREFIXED_OPERATION_NAME_TOPIC(String prefix)` : Returns a String concatenation of `prefix` and "`operationName` - `record.topic()`".
263 |
264 | ## License
265 |
266 | [Apache 2.0 License](./LICENSE).
267 |
268 | [ci-img]: https://travis-ci.org/opentracing-contrib/java-kafka-client.svg?branch=master
269 | [ci]: https://travis-ci.org/opentracing-contrib/java-kafka-client
270 | [cov-img]: https://coveralls.io/repos/github/opentracing-contrib/java-kafka-client/badge.svg?branch=master
271 | [cov]: https://coveralls.io/github/opentracing-contrib/java-kafka-client?branch=master
272 | [maven-img]: https://img.shields.io/maven-central/v/io.opentracing.contrib/opentracing-kafka-client.svg
273 | [maven]: http://search.maven.org/#search%7Cga%7C1%7Copentracing-kafka-client
274 |
--------------------------------------------------------------------------------
/RELEASE.md:
--------------------------------------------------------------------------------
1 | # OpenTracing Release Process
2 |
3 | This repo uses semantic versions. Please keep this in mind when choosing version numbers.
4 |
5 | For the up-to-date release process, please refer the
6 | [release process from the OpenTracing Java API](https://github.com/opentracing/opentracing-java/blob/master/RELEASE.md).
--------------------------------------------------------------------------------
/header.txt:
--------------------------------------------------------------------------------
1 | Copyright ${project.inceptionYear} The OpenTracing Authors
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
4 | in compliance with the License. You may obtain a copy of the License at
5 |
6 | http://www.apache.org/licenses/LICENSE-2.0
7 |
8 | Unless required by applicable law or agreed to in writing, software distributed under the License
9 | is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
10 | or implied. See the License for the specific language governing permissions and limitations under
11 | the License.
12 |
--------------------------------------------------------------------------------
/mvnw:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | # ----------------------------------------------------------------------------
3 | # Licensed to the Apache Software Foundation (ASF) under one
4 | # or more contributor license agreements. See the NOTICE file
5 | # distributed with this work for additional information
6 | # regarding copyright ownership. The ASF licenses this file
7 | # to you under the Apache License, Version 2.0 (the
8 | # "License"); you may not use this file except in compliance
9 | # with the License. You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing,
14 | # software distributed under the License is distributed on an
15 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
16 | # KIND, either express or implied. See the License for the
17 | # specific language governing permissions and limitations
18 | # under the License.
19 | # ----------------------------------------------------------------------------
20 |
21 | # ----------------------------------------------------------------------------
22 | # Maven2 Start Up Batch script
23 | #
24 | # Required ENV vars:
25 | # ------------------
26 | # JAVA_HOME - location of a JDK home dir
27 | #
28 | # Optional ENV vars
29 | # -----------------
30 | # M2_HOME - location of maven2's installed home dir
31 | # MAVEN_OPTS - parameters passed to the Java VM when running Maven
32 | # e.g. to debug Maven itself, use
33 | # set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
34 | # MAVEN_SKIP_RC - flag to disable loading of mavenrc files
35 | # ----------------------------------------------------------------------------
36 |
37 | if [ -z "$MAVEN_SKIP_RC" ]; then
38 |
39 | if [ -f /etc/mavenrc ]; then
40 | . /etc/mavenrc
41 | fi
42 |
43 | if [ -f "$HOME/.mavenrc" ]; then
44 | . "$HOME/.mavenrc"
45 | fi
46 |
47 | fi
48 |
49 | # OS specific support. $var _must_ be set to either true or false.
50 | cygwin=false
51 | darwin=false
52 | mingw=false
53 | case "$(uname)" in
54 | CYGWIN*) cygwin=true ;;
55 | MINGW*) mingw=true ;;
56 | Darwin*)
57 | darwin=true
58 | # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
59 | # See https://developer.apple.com/library/mac/qa/qa1170/_index.html
60 | if [ -z "$JAVA_HOME" ]; then
61 | if [ -x "/usr/libexec/java_home" ]; then
62 | export JAVA_HOME="$(/usr/libexec/java_home)"
63 | else
64 | export JAVA_HOME="/Library/Java/Home"
65 | fi
66 | fi
67 | ;;
68 | esac
69 |
70 | if [ -z "$JAVA_HOME" ]; then
71 | if [ -r /etc/gentoo-release ]; then
72 | JAVA_HOME=$(java-config --jre-home)
73 | fi
74 | fi
75 |
76 | if [ -z "$M2_HOME" ]; then
77 | ## resolve links - $0 may be a link to maven's home
78 | PRG="$0"
79 |
80 | # need this for relative symlinks
81 | while [ -h "$PRG" ]; do
82 | ls=$(ls -ld "$PRG")
83 | link=$(expr "$ls" : '.*-> \(.*\)$')
84 | if expr "$link" : '/.*' >/dev/null; then
85 | PRG="$link"
86 | else
87 | PRG="$(dirname "$PRG")/$link"
88 | fi
89 | done
90 |
91 | saveddir=$(pwd)
92 |
93 | M2_HOME=$(dirname "$PRG")/..
94 |
95 | # make it fully qualified
96 | M2_HOME=$(cd "$M2_HOME" && pwd)
97 |
98 | cd "$saveddir"
99 | # echo Using m2 at $M2_HOME
100 | fi
101 |
102 | # For Cygwin, ensure paths are in UNIX format before anything is touched
103 | if $cygwin; then
104 | [ -n "$M2_HOME" ] &&
105 | M2_HOME=$(cygpath --unix "$M2_HOME")
106 | [ -n "$JAVA_HOME" ] &&
107 | JAVA_HOME=$(cygpath --unix "$JAVA_HOME")
108 | [ -n "$CLASSPATH" ] &&
109 | CLASSPATH=$(cygpath --path --unix "$CLASSPATH")
110 | fi
111 |
112 | # For Migwn, ensure paths are in UNIX format before anything is touched
113 | if $mingw; then
114 | [ -n "$M2_HOME" ] &&
115 | M2_HOME="$( (
116 | cd "$M2_HOME"
117 | pwd
118 | ))"
119 | [ -n "$JAVA_HOME" ] &&
120 | JAVA_HOME="$( (
121 | cd "$JAVA_HOME"
122 | pwd
123 | ))"
124 | # TODO classpath?
125 | fi
126 |
127 | if [ -z "$JAVA_HOME" ]; then
128 | javaExecutable="$(which javac)"
129 | if [ -n "$javaExecutable" ] && ! [ "$(expr \"$javaExecutable\" : '\([^ ]*\)')" = "no" ]; then
130 | # readlink(1) is not available as standard on Solaris 10.
131 | readLink=$(which readlink)
132 | if [ ! $(expr "$readLink" : '\([^ ]*\)') = "no" ]; then
133 | if $darwin; then
134 | javaHome="$(dirname \"$javaExecutable\")"
135 | javaExecutable="$(cd \"$javaHome\" && pwd -P)/javac"
136 | else
137 | javaExecutable="$(readlink -f \"$javaExecutable\")"
138 | fi
139 | javaHome="$(dirname \"$javaExecutable\")"
140 | javaHome=$(expr "$javaHome" : '\(.*\)/bin')
141 | JAVA_HOME="$javaHome"
142 | export JAVA_HOME
143 | fi
144 | fi
145 | fi
146 |
147 | if [ -z "$JAVACMD" ]; then
148 | if [ -n "$JAVA_HOME" ]; then
149 | if [ -x "$JAVA_HOME/jre/sh/java" ]; then
150 | # IBM's JDK on AIX uses strange locations for the executables
151 | JAVACMD="$JAVA_HOME/jre/sh/java"
152 | else
153 | JAVACMD="$JAVA_HOME/bin/java"
154 | fi
155 | else
156 | JAVACMD="$(which java)"
157 | fi
158 | fi
159 |
160 | if [ ! -x "$JAVACMD" ]; then
161 | echo "Error: JAVA_HOME is not defined correctly." >&2
162 | echo " We cannot execute $JAVACMD" >&2
163 | exit 1
164 | fi
165 |
166 | if [ -z "$JAVA_HOME" ]; then
167 | echo "Warning: JAVA_HOME environment variable is not set."
168 | fi
169 |
170 | CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
171 |
172 | # traverses directory structure from process work directory to filesystem root
173 | # first directory with .mvn subdirectory is considered project base directory
174 | find_maven_basedir() {
175 |
176 | if [ -z "$1" ]; then
177 | echo "Path not specified to find_maven_basedir"
178 | return 1
179 | fi
180 |
181 | basedir="$1"
182 | wdir="$1"
183 | while [ "$wdir" != '/' ]; do
184 | if [ -d "$wdir"/.mvn ]; then
185 | basedir=$wdir
186 | break
187 | fi
188 | # workaround for JBEAP-8937 (on Solaris 10/Sparc)
189 | if [ -d "${wdir}" ]; then
190 | wdir=$(
191 | cd "$wdir/.."
192 | pwd
193 | )
194 | fi
195 | # end of workaround
196 | done
197 | echo "${basedir}"
198 | }
199 |
200 | # concatenates all lines of a file
201 | concat_lines() {
202 | if [ -f "$1" ]; then
203 | echo "$(tr -s '\n' ' ' <"$1")"
204 | fi
205 | }
206 |
207 | BASE_DIR=$(find_maven_basedir "$(pwd)")
208 | if [ -z "$BASE_DIR" ]; then
209 | exit 1
210 | fi
211 |
212 | export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
213 | echo $MAVEN_PROJECTBASEDIR
214 | MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
215 |
216 | # For Cygwin, switch paths to Windows format before running java
217 | if $cygwin; then
218 | [ -n "$M2_HOME" ] &&
219 | M2_HOME=$(cygpath --path --windows "$M2_HOME")
220 | [ -n "$JAVA_HOME" ] &&
221 | JAVA_HOME=$(cygpath --path --windows "$JAVA_HOME")
222 | [ -n "$CLASSPATH" ] &&
223 | CLASSPATH=$(cygpath --path --windows "$CLASSPATH")
224 | [ -n "$MAVEN_PROJECTBASEDIR" ] &&
225 | MAVEN_PROJECTBASEDIR=$(cygpath --path --windows "$MAVEN_PROJECTBASEDIR")
226 | fi
227 |
228 | WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
229 |
230 | exec "$JAVACMD" \
231 | $MAVEN_OPTS \
232 | -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
233 | "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
234 | ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"
235 |
--------------------------------------------------------------------------------
/mvnw.cmd:
--------------------------------------------------------------------------------
1 | @REM ----------------------------------------------------------------------------
2 | @REM Licensed to the Apache Software Foundation (ASF) under one
3 | @REM or more contributor license agreements. See the NOTICE file
4 | @REM distributed with this work for additional information
5 | @REM regarding copyright ownership. The ASF licenses this file
6 | @REM to you under the Apache License, Version 2.0 (the
7 | @REM "License"); you may not use this file except in compliance
8 | @REM with the License. You may obtain a copy of the License at
9 | @REM
10 | @REM http://www.apache.org/licenses/LICENSE-2.0
11 | @REM
12 | @REM Unless required by applicable law or agreed to in writing,
13 | @REM software distributed under the License is distributed on an
14 | @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15 | @REM KIND, either express or implied. See the License for the
16 | @REM specific language governing permissions and limitations
17 | @REM under the License.
18 | @REM ----------------------------------------------------------------------------
19 |
20 | @REM ----------------------------------------------------------------------------
21 | @REM Maven2 Start Up Batch script
22 | @REM
23 | @REM Required ENV vars:
24 | @REM JAVA_HOME - location of a JDK home dir
25 | @REM
26 | @REM Optional ENV vars
27 | @REM M2_HOME - location of maven2's installed home dir
28 | @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
29 | @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending
30 | @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
31 | @REM e.g. to debug Maven itself, use
32 | @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
33 | @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
34 | @REM ----------------------------------------------------------------------------
35 |
36 | @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
37 | @echo off
38 | @REM enable echoing my setting MAVEN_BATCH_ECHO to 'on'
39 | @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
40 |
41 | @REM set %HOME% to equivalent of $HOME
42 | if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
43 |
44 | @REM Execute a user defined script before this one
45 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
46 | @REM check for pre script, once with legacy .bat ending and once with .cmd ending
47 | if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
48 | if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
49 | :skipRcPre
50 |
51 | @setlocal
52 |
53 | set ERROR_CODE=0
54 |
55 | @REM To isolate internal variables from possible post scripts, we use another setlocal
56 | @setlocal
57 |
58 | @REM ==== START VALIDATION ====
59 | if not "%JAVA_HOME%" == "" goto OkJHome
60 |
61 | echo.
62 | echo Error: JAVA_HOME not found in your environment. >&2
63 | echo Please set the JAVA_HOME variable in your environment to match the >&2
64 | echo location of your Java installation. >&2
65 | echo.
66 | goto error
67 |
68 | :OkJHome
69 | if exist "%JAVA_HOME%\bin\java.exe" goto init
70 |
71 | echo.
72 | echo Error: JAVA_HOME is set to an invalid directory. >&2
73 | echo JAVA_HOME = "%JAVA_HOME%" >&2
74 | echo Please set the JAVA_HOME variable in your environment to match the >&2
75 | echo location of your Java installation. >&2
76 | echo.
77 | goto error
78 |
79 | @REM ==== END VALIDATION ====
80 |
81 | :init
82 |
83 | @REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
84 | @REM Fallback to current working directory if not found.
85 |
86 | set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
87 | IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
88 |
89 | set EXEC_DIR=%CD%
90 | set WDIR=%EXEC_DIR%
91 | :findBaseDir
92 | IF EXIST "%WDIR%"\.mvn goto baseDirFound
93 | cd ..
94 | IF "%WDIR%"=="%CD%" goto baseDirNotFound
95 | set WDIR=%CD%
96 | goto findBaseDir
97 |
98 | :baseDirFound
99 | set MAVEN_PROJECTBASEDIR=%WDIR%
100 | cd "%EXEC_DIR%"
101 | goto endDetectBaseDir
102 |
103 | :baseDirNotFound
104 | set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
105 | cd "%EXEC_DIR%"
106 |
107 | :endDetectBaseDir
108 |
109 | IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
110 |
111 | @setlocal EnableExtensions EnableDelayedExpansion
112 | for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
113 | @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
114 |
115 | :endReadAdditionalConfig
116 |
117 | SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
118 |
119 | set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
120 | set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
121 |
122 | %MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
123 | if ERRORLEVEL 1 goto error
124 | goto end
125 |
126 | :error
127 | set ERROR_CODE=1
128 |
129 | :end
130 | @endlocal & set ERROR_CODE=%ERROR_CODE%
131 |
132 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
133 | @REM check for post script, once with legacy .bat ending and once with .cmd ending
134 | if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
135 | if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
136 | :skipRcPost
137 |
138 | @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
139 | if "%MAVEN_BATCH_PAUSE%" == "on" pause
140 |
141 | if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
142 |
143 | exit /B %ERROR_CODE%
144 |
--------------------------------------------------------------------------------
/opentracing-kafka-client/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
18 |
19 | opentracing-kafka-parent
20 | io.opentracing.contrib
21 | 0.1.16-SNAPSHOT
22 |
23 | 4.0.0
24 |
25 | opentracing-kafka-client
26 | ${project.groupId}:${project.artifactId}
27 | OpenTracing Instrumentation for Apache Kafka Client
28 |
29 |
30 |
31 | org.apache.kafka
32 | kafka-clients
33 | provided
34 |
35 |
36 |
37 |
--------------------------------------------------------------------------------
/opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/ClientSpanNameProvider.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017-2020 The OpenTracing Authors
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
5 | * in compliance with the License. You may obtain a copy of the License at
6 | *
7 | * http://www.apache.org/licenses/LICENSE-2.0
8 | *
9 | * Unless required by applicable law or agreed to in writing, software distributed under the License
10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
11 | * or implied. See the License for the specific language governing permissions and limitations under
12 | * the License.
13 | */
14 |
15 | package io.opentracing.contrib.kafka;
16 |
17 | import java.util.function.BiFunction;
18 | import org.apache.kafka.clients.consumer.ConsumerRecord;
19 | import org.apache.kafka.clients.producer.ProducerRecord;
20 |
21 | /**
22 | * @author Jordan J Lopez Returns a string to be used as the name of the spans, based on the
23 | * operation preformed and the record the span is based off of.
24 | */
25 | public class ClientSpanNameProvider {
26 |
27 | // Operation Name as Span Name
28 | public static BiFunction CONSUMER_OPERATION_NAME =
29 | (operationName, consumerRecord) -> replaceIfNull(operationName, "unknown");
30 | public static BiFunction PRODUCER_OPERATION_NAME =
31 | (operationName, producerRecord) -> replaceIfNull(operationName, "unknown");
32 |
33 | public static BiFunction CONSUMER_PREFIXED_OPERATION_NAME(
34 | final String prefix) {
35 | return (operationName, consumerRecord) -> replaceIfNull(prefix, "")
36 | + replaceIfNull(operationName, "unknown");
37 | }
38 |
39 | public static BiFunction PRODUCER_PREFIXED_OPERATION_NAME(
40 | final String prefix) {
41 | return (operationName, producerRecord) -> replaceIfNull(prefix, "")
42 | + replaceIfNull(operationName, "unknown");
43 | }
44 |
45 | // Topic as Span Name
46 | public static BiFunction CONSUMER_TOPIC =
47 | (operationName, consumerRecord) -> replaceIfNull(consumerRecord, "unknown");
48 | public static BiFunction PRODUCER_TOPIC =
49 | (operationName, producerRecord) -> replaceIfNull(producerRecord, "unknown");
50 |
51 | public static BiFunction CONSUMER_PREFIXED_TOPIC(
52 | final String prefix) {
53 | return (operationName, consumerRecord) -> replaceIfNull(prefix, "")
54 | + replaceIfNull(consumerRecord, "unknown");
55 | }
56 |
57 | public static BiFunction PRODUCER_PREFIXED_TOPIC(
58 | final String prefix) {
59 | return (operationName, producerRecord) -> replaceIfNull(prefix, "")
60 | + replaceIfNull(producerRecord, "unknown");
61 | }
62 |
63 | // Operation Name and Topic as Span Name
64 | public static BiFunction CONSUMER_OPERATION_NAME_TOPIC =
65 | (operationName, consumerRecord) -> replaceIfNull(operationName, "unknown")
66 | + " - " + replaceIfNull(consumerRecord, "unknown");
67 | public static BiFunction PRODUCER_OPERATION_NAME_TOPIC =
68 | (operationName, producerRecord) -> replaceIfNull(operationName, "unknown")
69 | + " - " + replaceIfNull(producerRecord, "unknown");
70 |
71 | public static BiFunction CONSUMER_PREFIXED_OPERATION_NAME_TOPIC(
72 | final String prefix) {
73 | return (operationName, consumerRecord) -> replaceIfNull(prefix, "")
74 | + replaceIfNull(operationName, "unknown")
75 | + " - " + replaceIfNull(consumerRecord, "unknown");
76 | }
77 |
78 | public static BiFunction PRODUCER_PREFIXED_OPERATION_NAME_TOPIC(
79 | final String prefix) {
80 | return (operationName, producerRecord) -> replaceIfNull(prefix, "")
81 | + replaceIfNull(operationName, "unknown")
82 | + " - " + replaceIfNull(producerRecord, "unknown");
83 | }
84 |
85 | private static String replaceIfNull(String input, String replacement) {
86 | return (input == null) ? replacement : input;
87 | }
88 |
89 | private static String replaceIfNull(ConsumerRecord input, String replacement) {
90 | return ((input == null) ? replacement : input.topic());
91 | }
92 |
93 | private static String replaceIfNull(ProducerRecord input, String replacement) {
94 | return ((input == null) ? replacement : input.topic());
95 | }
96 |
97 | }
98 |
--------------------------------------------------------------------------------
/opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/HeadersMapExtractAdapter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017-2020 The OpenTracing Authors
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
5 | * in compliance with the License. You may obtain a copy of the License at
6 | *
7 | * http://www.apache.org/licenses/LICENSE-2.0
8 | *
9 | * Unless required by applicable law or agreed to in writing, software distributed under the License
10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
11 | * or implied. See the License for the specific language governing permissions and limitations under
12 | * the License.
13 | */
14 | package io.opentracing.contrib.kafka;
15 |
16 | import io.opentracing.propagation.TextMap;
17 | import java.nio.charset.StandardCharsets;
18 | import java.util.HashMap;
19 | import java.util.Iterator;
20 | import java.util.Map;
21 | import java.util.Map.Entry;
22 | import org.apache.kafka.common.header.Header;
23 | import org.apache.kafka.common.header.Headers;
24 |
25 |
26 | public class HeadersMapExtractAdapter implements TextMap {
27 |
28 | private final Map map = new HashMap<>();
29 |
30 | public HeadersMapExtractAdapter(Headers headers) {
31 | for (Header header : headers) {
32 | byte[] headerValue = header.value();
33 | map.put(header.key(),
34 | headerValue == null ? null : new String(headerValue, StandardCharsets.UTF_8));
35 | }
36 | }
37 |
38 | @Override
39 | public Iterator> iterator() {
40 | return map.entrySet().iterator();
41 | }
42 |
43 | @Override
44 | public void put(String key, String value) {
45 | throw new UnsupportedOperationException(
46 | "HeadersMapExtractAdapter should only be used with Tracer.extract()");
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/HeadersMapInjectAdapter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017-2020 The OpenTracing Authors
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
5 | * in compliance with the License. You may obtain a copy of the License at
6 | *
7 | * http://www.apache.org/licenses/LICENSE-2.0
8 | *
9 | * Unless required by applicable law or agreed to in writing, software distributed under the License
10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
11 | * or implied. See the License for the specific language governing permissions and limitations under
12 | * the License.
13 | */
14 | package io.opentracing.contrib.kafka;
15 |
16 |
17 | import io.opentracing.propagation.TextMap;
18 | import java.nio.charset.StandardCharsets;
19 | import java.util.Iterator;
20 | import java.util.Map.Entry;
21 | import org.apache.kafka.common.header.Headers;
22 |
23 | public class HeadersMapInjectAdapter implements TextMap {
24 |
25 | private final Headers headers;
26 |
27 | public HeadersMapInjectAdapter(Headers headers) {
28 | this.headers = headers;
29 | }
30 |
31 | @Override
32 | public Iterator> iterator() {
33 | throw new UnsupportedOperationException("iterator should never be used with Tracer.inject()");
34 | }
35 |
36 | @Override
37 | public void put(String key, String value) {
38 | headers.add(key, value.getBytes(StandardCharsets.UTF_8));
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/SpanDecorator.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017-2020 The OpenTracing Authors
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
5 | * in compliance with the License. You may obtain a copy of the License at
6 | *
7 | * http://www.apache.org/licenses/LICENSE-2.0
8 | *
9 | * Unless required by applicable law or agreed to in writing, software distributed under the License
10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
11 | * or implied. See the License for the specific language governing permissions and limitations under
12 | * the License.
13 | */
14 | package io.opentracing.contrib.kafka;
15 |
16 | import io.opentracing.Span;
17 | import org.apache.kafka.clients.consumer.ConsumerRecord;
18 | import org.apache.kafka.clients.producer.ProducerRecord;
19 |
20 | public interface SpanDecorator {
21 |
22 | /**
23 | * Method called before record is sent by producer
24 | */
25 | void onSend(ProducerRecord record, Span span);
26 |
27 | /**
28 | * Method called when record is received in consumer
29 | */
30 | void onResponse(ConsumerRecord record, Span span);
31 |
32 | /**
33 | * Method called when an error occurs
34 | */
35 | void onError(Exception exception, Span span);
36 |
37 | /**
38 | * Gives a SpanDecorator with the standard tags
39 | */
40 | SpanDecorator STANDARD_TAGS = new StandardSpanDecorator();
41 | }
42 |
--------------------------------------------------------------------------------
/opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/StandardSpanDecorator.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017-2020 The OpenTracing Authors
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
5 | * in compliance with the License. You may obtain a copy of the License at
6 | *
7 | * http://www.apache.org/licenses/LICENSE-2.0
8 | *
9 | * Unless required by applicable law or agreed to in writing, software distributed under the License
10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
11 | * or implied. See the License for the specific language governing permissions and limitations under
12 | * the License.
13 | */
14 | package io.opentracing.contrib.kafka;
15 |
16 | import io.opentracing.Span;
17 | import io.opentracing.tag.Tags;
18 | import java.io.PrintWriter;
19 | import java.io.StringWriter;
20 | import java.util.HashMap;
21 | import java.util.Map;
22 | import org.apache.kafka.clients.consumer.ConsumerRecord;
23 | import org.apache.kafka.clients.producer.ProducerRecord;
24 |
25 | class StandardSpanDecorator implements SpanDecorator {
26 |
27 | static final String COMPONENT_NAME = "java-kafka";
28 | static final String KAFKA_SERVICE = "kafka";
29 |
30 | public void onSend(ProducerRecord record, Span span) {
31 | setCommonTags(span);
32 | Tags.MESSAGE_BUS_DESTINATION.set(span, record.topic());
33 | if (record.partition() != null) {
34 | span.setTag("partition", record.partition());
35 | }
36 | }
37 |
38 | public void onResponse(ConsumerRecord record, Span span) {
39 | setCommonTags(span);
40 | Tags.MESSAGE_BUS_DESTINATION.set(span, record.topic());
41 | span.setTag("partition", record.partition());
42 | span.setTag("offset", record.offset());
43 | }
44 |
45 | public void onError(Exception exception, Span span) {
46 | Tags.ERROR.set(span, Boolean.TRUE);
47 | span.log(errorLogs(exception));
48 | }
49 |
50 | private static Map errorLogs(Throwable throwable) {
51 | Map errorLogs = new HashMap<>(4);
52 | errorLogs.put("event", Tags.ERROR.getKey());
53 | errorLogs.put("error.kind", throwable.getClass().getName());
54 | errorLogs.put("error.object", throwable);
55 | errorLogs.put("message", throwable.getMessage());
56 |
57 | StringWriter sw = new StringWriter();
58 | throwable.printStackTrace(new PrintWriter(sw));
59 | errorLogs.put("stack", sw.toString());
60 |
61 | return errorLogs;
62 | }
63 |
64 | private static void setCommonTags(Span span) {
65 | Tags.COMPONENT.set(span, COMPONENT_NAME);
66 | Tags.PEER_SERVICE.set(span, KAFKA_SERVICE);
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/TracingCallback.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017-2020 The OpenTracing Authors
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
5 | * in compliance with the License. You may obtain a copy of the License at
6 | *
7 | * http://www.apache.org/licenses/LICENSE-2.0
8 | *
9 | * Unless required by applicable law or agreed to in writing, software distributed under the License
10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
11 | * or implied. See the License for the specific language governing permissions and limitations under
12 | * the License.
13 | */
14 | package io.opentracing.contrib.kafka;
15 |
16 |
17 | import io.opentracing.Scope;
18 | import io.opentracing.Span;
19 | import io.opentracing.Tracer;
20 | import java.util.Collection;
21 | import java.util.Collections;
22 | import org.apache.kafka.clients.producer.Callback;
23 | import org.apache.kafka.clients.producer.RecordMetadata;
24 |
25 | /**
26 | * Callback executed after the producer has finished sending a message
27 | */
28 | public class TracingCallback implements Callback {
29 | private final Callback callback;
30 | private Collection spanDecorators;
31 | private final Span span;
32 | private final Tracer tracer;
33 |
34 | public TracingCallback(Callback callback, Span span, Tracer tracer) {
35 | this.callback = callback;
36 | this.span = span;
37 | this.tracer = tracer;
38 | this.spanDecorators = Collections.singletonList(SpanDecorator.STANDARD_TAGS);
39 | }
40 |
41 | TracingCallback(Callback callback, Span span, Tracer tracer,
42 | Collection spanDecorators) {
43 | this.callback = callback;
44 | this.span = span;
45 | this.tracer = tracer;
46 | this.spanDecorators = spanDecorators;
47 | }
48 |
49 | @Override
50 | public void onCompletion(RecordMetadata metadata, Exception exception) {
51 | if (exception != null) {
52 | for (SpanDecorator decorator : spanDecorators) {
53 | decorator.onError(exception, span);
54 | }
55 | }
56 |
57 | try (Scope ignored = tracer.scopeManager().activate(span)) {
58 | if (callback != null) {
59 | callback.onCompletion(metadata, exception);
60 | }
61 | } finally {
62 | span.finish();
63 | }
64 | }
65 | }
66 |
--------------------------------------------------------------------------------
/opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/TracingConsumerInterceptor.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017-2020 The OpenTracing Authors
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
5 | * in compliance with the License. You may obtain a copy of the License at
6 | *
7 | * http://www.apache.org/licenses/LICENSE-2.0
8 | *
9 | * Unless required by applicable law or agreed to in writing, software distributed under the License
10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
11 | * or implied. See the License for the specific language governing permissions and limitations under
12 | * the License.
13 | */
14 | package io.opentracing.contrib.kafka;
15 |
16 | import io.opentracing.util.GlobalTracer;
17 | import java.util.Map;
18 | import org.apache.kafka.clients.consumer.ConsumerInterceptor;
19 | import org.apache.kafka.clients.consumer.ConsumerRecord;
20 | import org.apache.kafka.clients.consumer.ConsumerRecords;
21 | import org.apache.kafka.clients.consumer.OffsetAndMetadata;
22 | import org.apache.kafka.common.TopicPartition;
23 |
24 | public class TracingConsumerInterceptor implements ConsumerInterceptor {
25 |
26 | @Override
27 | public ConsumerRecords onConsume(ConsumerRecords records) {
28 | for (ConsumerRecord record : records) {
29 | TracingKafkaUtils.buildAndFinishChildSpan(record, GlobalTracer.get());
30 | }
31 |
32 | return records;
33 | }
34 |
35 | @Override
36 | public void onCommit(Map offsets) {
37 |
38 | }
39 |
40 | @Override
41 | public void close() {
42 |
43 | }
44 |
45 | @Override
46 | public void configure(Map configs) {
47 |
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/TracingKafkaConsumer.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017-2020 The OpenTracing Authors
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
5 | * in compliance with the License. You may obtain a copy of the License at
6 | *
7 | * http://www.apache.org/licenses/LICENSE-2.0
8 | *
9 | * Unless required by applicable law or agreed to in writing, software distributed under the License
10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
11 | * or implied. See the License for the specific language governing permissions and limitations under
12 | * the License.
13 | */
14 | package io.opentracing.contrib.kafka;
15 |
16 |
17 | import static io.opentracing.contrib.kafka.SpanDecorator.STANDARD_TAGS;
18 |
19 | import io.opentracing.Tracer;
20 | import java.time.Duration;
21 | import java.util.Collection;
22 | import java.util.Collections;
23 | import java.util.List;
24 | import java.util.Map;
25 | import java.util.Set;
26 | import java.util.concurrent.TimeUnit;
27 | import java.util.function.BiFunction;
28 | import java.util.regex.Pattern;
29 | import org.apache.kafka.clients.consumer.Consumer;
30 | import org.apache.kafka.clients.consumer.ConsumerGroupMetadata;
31 | import org.apache.kafka.clients.consumer.ConsumerRebalanceListener;
32 | import org.apache.kafka.clients.consumer.ConsumerRecord;
33 | import org.apache.kafka.clients.consumer.ConsumerRecords;
34 | import org.apache.kafka.clients.consumer.OffsetAndMetadata;
35 | import org.apache.kafka.clients.consumer.OffsetAndTimestamp;
36 | import org.apache.kafka.clients.consumer.OffsetCommitCallback;
37 | import org.apache.kafka.common.Metric;
38 | import org.apache.kafka.common.MetricName;
39 | import org.apache.kafka.common.PartitionInfo;
40 | import org.apache.kafka.common.TopicPartition;
41 |
42 | public class TracingKafkaConsumer implements Consumer {
43 |
44 | private final Tracer tracer;
45 | private final Consumer consumer;
46 | private Collection spanDecorators;
47 | private final BiFunction consumerSpanNameProvider;
48 |
49 | TracingKafkaConsumer(Consumer consumer, Tracer tracer,
50 | Collection spanDecorators,
51 | BiFunction consumerSpanNameProvider) {
52 | this.consumer = consumer;
53 | this.tracer = tracer;
54 | this.spanDecorators = Collections.unmodifiableCollection(spanDecorators);
55 | this.consumerSpanNameProvider = (consumerSpanNameProvider == null)
56 | ? ClientSpanNameProvider.CONSUMER_OPERATION_NAME
57 | : consumerSpanNameProvider;
58 | }
59 |
60 | public TracingKafkaConsumer(Consumer consumer, Tracer tracer) {
61 | this.consumer = consumer;
62 | this.tracer = tracer;
63 | this.spanDecorators = Collections.singletonList(STANDARD_TAGS);
64 | this.consumerSpanNameProvider = ClientSpanNameProvider.CONSUMER_OPERATION_NAME;
65 | }
66 |
67 | public TracingKafkaConsumer(Consumer consumer, Tracer tracer,
68 | BiFunction consumerSpanNameProvider) {
69 | this.consumer = consumer;
70 | this.tracer = tracer;
71 | this.spanDecorators = Collections.singletonList(STANDARD_TAGS);
72 | this.consumerSpanNameProvider = (consumerSpanNameProvider == null)
73 | ? ClientSpanNameProvider.CONSUMER_OPERATION_NAME
74 | : consumerSpanNameProvider;
75 | }
76 |
77 | @Override
78 | public Set assignment() {
79 | return consumer.assignment();
80 | }
81 |
82 | @Override
83 | public Set subscription() {
84 | return consumer.subscription();
85 | }
86 |
87 | @Override
88 | public void subscribe(Collection topics, ConsumerRebalanceListener listener) {
89 | consumer.subscribe(topics, listener);
90 | }
91 |
92 | @Override
93 | public void subscribe(Collection topics) {
94 | consumer.subscribe(topics);
95 | }
96 |
97 | @Override
98 | public void subscribe(Pattern pattern, ConsumerRebalanceListener listener) {
99 | consumer.subscribe(pattern, listener);
100 | }
101 |
102 | @Override
103 | public void subscribe(Pattern pattern) {
104 | consumer.subscribe(pattern);
105 | }
106 |
107 | @Override
108 | public void unsubscribe() {
109 | consumer.unsubscribe();
110 | }
111 |
112 | @Override
113 | public void assign(Collection partitions) {
114 | consumer.assign(partitions);
115 | }
116 |
117 | @Override
118 | @Deprecated
119 | public ConsumerRecords poll(long timeout) {
120 | ConsumerRecords records = consumer.poll(timeout);
121 |
122 | for (ConsumerRecord record : records) {
123 | TracingKafkaUtils
124 | .buildAndFinishChildSpan(record, tracer, consumerSpanNameProvider, spanDecorators);
125 | }
126 |
127 | return records;
128 | }
129 |
130 | @Override
131 | public ConsumerRecords poll(Duration duration) {
132 | ConsumerRecords records = consumer.poll(duration);
133 |
134 | for (ConsumerRecord record : records) {
135 | TracingKafkaUtils
136 | .buildAndFinishChildSpan(record, tracer, consumerSpanNameProvider, spanDecorators);
137 | }
138 |
139 | return records;
140 | }
141 |
142 | @Override
143 | public void commitSync() {
144 | consumer.commitSync();
145 | }
146 |
147 | @Override
148 | public void commitSync(Duration duration) {
149 | consumer.commitSync(duration);
150 | }
151 |
152 | @Override
153 | public void commitSync(Map offsets) {
154 | consumer.commitSync(offsets);
155 | }
156 |
157 | @Override
158 | public void commitSync(Map map, Duration duration) {
159 | consumer.commitSync(map, duration);
160 | }
161 |
162 | @Override
163 | public void commitAsync() {
164 | consumer.commitAsync();
165 | }
166 |
167 | @Override
168 | public void commitAsync(OffsetCommitCallback callback) {
169 | consumer.commitAsync(callback);
170 | }
171 |
172 | @Override
173 | public void commitAsync(Map offsets,
174 | OffsetCommitCallback callback) {
175 | consumer.commitAsync(offsets, callback);
176 | }
177 |
178 | @Override
179 | public void seek(TopicPartition partition, long offset) {
180 | consumer.seek(partition, offset);
181 | }
182 |
183 | @Override
184 | public void seek(TopicPartition partition, OffsetAndMetadata offsetAndMetadata) {
185 | consumer.seek(partition, offsetAndMetadata);
186 | }
187 |
188 | @Override
189 | public void seekToBeginning(Collection partitions) {
190 | consumer.seekToBeginning(partitions);
191 | }
192 |
193 | @Override
194 | public void seekToEnd(Collection partitions) {
195 | consumer.seekToEnd(partitions);
196 | }
197 |
198 | @Override
199 | public long position(TopicPartition partition) {
200 | return consumer.position(partition);
201 | }
202 |
203 | @Override
204 | public long position(TopicPartition topicPartition, Duration duration) {
205 | return consumer.position(topicPartition, duration);
206 | }
207 |
208 | @Override
209 | public OffsetAndMetadata committed(TopicPartition partition) {
210 | return consumer.committed(partition);
211 | }
212 |
213 | @Override
214 | public OffsetAndMetadata committed(TopicPartition topicPartition, Duration duration) {
215 | return consumer.committed(topicPartition, duration);
216 | }
217 |
218 | @Override
219 | public Map committed(Set partitions) {
220 | return consumer.committed(partitions);
221 | }
222 |
223 | @Override
224 | public Map committed(Set partitions,
225 | final Duration timeout) {
226 | return consumer.committed(partitions, timeout);
227 | }
228 |
229 | @Override
230 | public Map metrics() {
231 | return consumer.metrics();
232 | }
233 |
234 | @Override
235 | public List partitionsFor(String topic) {
236 | return consumer.partitionsFor(topic);
237 | }
238 |
239 | @Override
240 | public List partitionsFor(String s, Duration duration) {
241 | return consumer.partitionsFor(s, duration);
242 | }
243 |
244 | @Override
245 | public Map> listTopics() {
246 | return consumer.listTopics();
247 | }
248 |
249 | @Override
250 | public Map> listTopics(Duration duration) {
251 | return consumer.listTopics(duration);
252 | }
253 |
254 | @Override
255 | public void pause(Collection partitions) {
256 | consumer.pause(partitions);
257 | }
258 |
259 | @Override
260 | public void resume(Collection partitions) {
261 | consumer.resume(partitions);
262 | }
263 |
264 | @Override
265 | public Set paused() {
266 | return consumer.paused();
267 | }
268 |
269 | @Override
270 | public Map offsetsForTimes(
271 | Map timestampsToSearch) {
272 | return consumer.offsetsForTimes(timestampsToSearch);
273 | }
274 |
275 | @Override
276 | public Map offsetsForTimes(Map map,
277 | Duration duration) {
278 | return consumer.offsetsForTimes(map, duration);
279 | }
280 |
281 | @Override
282 | public Map beginningOffsets(Collection partitions) {
283 | return consumer.beginningOffsets(partitions);
284 | }
285 |
286 | @Override
287 | public Map beginningOffsets(Collection collection,
288 | Duration duration) {
289 | return consumer.beginningOffsets(collection, duration);
290 | }
291 |
292 | @Override
293 | public Map endOffsets(Collection partitions) {
294 | return consumer.endOffsets(partitions);
295 | }
296 |
297 | @Override
298 | public Map endOffsets(Collection collection,
299 | Duration duration) {
300 | return consumer.endOffsets(collection, duration);
301 | }
302 |
303 | @Override
304 | public ConsumerGroupMetadata groupMetadata() {
305 | return consumer.groupMetadata();
306 | }
307 |
308 | @Override
309 | public void enforceRebalance() {
310 | consumer.enforceRebalance();
311 | }
312 |
313 | @Override
314 | public void close() {
315 | consumer.close();
316 | }
317 |
318 | @Override
319 | @Deprecated
320 | public void close(long l, TimeUnit timeUnit) {
321 | consumer.close(l, timeUnit);
322 | }
323 |
324 | @Override
325 | public void close(Duration duration) {
326 | consumer.close(duration);
327 | }
328 |
329 | @Override
330 | public void wakeup() {
331 | consumer.wakeup();
332 | }
333 | }
334 |
--------------------------------------------------------------------------------
/opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/TracingKafkaConsumerBuilder.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017-2020 The OpenTracing Authors
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
5 | * in compliance with the License. You may obtain a copy of the License at
6 | *
7 | * http://www.apache.org/licenses/LICENSE-2.0
8 | *
9 | * Unless required by applicable law or agreed to in writing, software distributed under the License
10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
11 | * or implied. See the License for the specific language governing permissions and limitations under
12 | * the License.
13 | */
14 | package io.opentracing.contrib.kafka;
15 |
16 | import io.opentracing.Tracer;
17 | import java.util.Collection;
18 | import java.util.Collections;
19 | import java.util.function.BiFunction;
20 | import org.apache.kafka.clients.consumer.Consumer;
21 | import org.apache.kafka.clients.consumer.ConsumerRecord;
22 |
23 | public class TracingKafkaConsumerBuilder {
24 | private Collection spanDecorators;
25 | private Consumer consumer;
26 | private Tracer tracer;
27 | private BiFunction consumerSpanNameProvider;
28 |
29 | public TracingKafkaConsumerBuilder(Consumer consumer, Tracer tracer) {
30 | this.tracer = tracer;
31 | this.consumer = consumer;
32 | this.spanDecorators = Collections.singletonList(SpanDecorator.STANDARD_TAGS);
33 | this.consumerSpanNameProvider = ClientSpanNameProvider.CONSUMER_OPERATION_NAME;
34 | }
35 |
36 | public TracingKafkaConsumerBuilder withDecorators(Collection spanDecorators) {
37 | this.spanDecorators = Collections.unmodifiableCollection(spanDecorators);
38 | return this;
39 | }
40 |
41 | public TracingKafkaConsumerBuilder withSpanNameProvider(
42 | BiFunction consumerSpanNameProvider) {
43 | this.consumerSpanNameProvider = consumerSpanNameProvider;
44 | return this;
45 | }
46 |
47 | public TracingKafkaConsumer build() {
48 | return new TracingKafkaConsumer<>(consumer, tracer, spanDecorators, consumerSpanNameProvider);
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/TracingKafkaProducer.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017-2020 The OpenTracing Authors
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
5 | * in compliance with the License. You may obtain a copy of the License at
6 | *
7 | * http://www.apache.org/licenses/LICENSE-2.0
8 | *
9 | * Unless required by applicable law or agreed to in writing, software distributed under the License
10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
11 | * or implied. See the License for the specific language governing permissions and limitations under
12 | * the License.
13 | */
14 | package io.opentracing.contrib.kafka;
15 |
16 | import static io.opentracing.contrib.kafka.SpanDecorator.STANDARD_TAGS;
17 |
18 | import io.opentracing.Scope;
19 | import io.opentracing.Span;
20 | import io.opentracing.SpanContext;
21 | import io.opentracing.Tracer;
22 | import java.time.Duration;
23 | import java.util.Collection;
24 | import java.util.Collections;
25 | import java.util.List;
26 | import java.util.Map;
27 | import java.util.concurrent.Future;
28 | import java.util.concurrent.TimeUnit;
29 | import java.util.function.BiFunction;
30 | import org.apache.kafka.clients.consumer.ConsumerGroupMetadata;
31 | import org.apache.kafka.clients.consumer.OffsetAndMetadata;
32 | import org.apache.kafka.clients.producer.Callback;
33 | import org.apache.kafka.clients.producer.Producer;
34 | import org.apache.kafka.clients.producer.ProducerRecord;
35 | import org.apache.kafka.clients.producer.RecordMetadata;
36 | import org.apache.kafka.common.Metric;
37 | import org.apache.kafka.common.MetricName;
38 | import org.apache.kafka.common.PartitionInfo;
39 | import org.apache.kafka.common.TopicPartition;
40 | import org.apache.kafka.common.errors.ProducerFencedException;
41 |
42 | public class TracingKafkaProducer implements Producer {
43 |
44 | private Producer producer;
45 | private final Tracer tracer;
46 | private final BiFunction producerSpanNameProvider;
47 | private Collection spanDecorators;
48 |
49 | TracingKafkaProducer(Producer producer, Tracer tracer,
50 | Collection spanDecorators,
51 | BiFunction producerSpanNameProvider) {
52 | this.producer = producer;
53 | this.tracer = tracer;
54 | this.spanDecorators = Collections.unmodifiableCollection(spanDecorators);
55 | this.producerSpanNameProvider = (producerSpanNameProvider == null)
56 | ? ClientSpanNameProvider.PRODUCER_OPERATION_NAME
57 | : producerSpanNameProvider;
58 | }
59 |
60 | public TracingKafkaProducer(Producer producer, Tracer tracer) {
61 | this.producer = producer;
62 | this.tracer = tracer;
63 | this.spanDecorators = Collections.singletonList(STANDARD_TAGS);
64 | this.producerSpanNameProvider = ClientSpanNameProvider.PRODUCER_OPERATION_NAME;
65 | }
66 |
67 | public TracingKafkaProducer(Producer producer, Tracer tracer,
68 | BiFunction producerSpanNameProvider) {
69 | this.producer = producer;
70 | this.tracer = tracer;
71 | this.spanDecorators = Collections.singletonList(STANDARD_TAGS);
72 | this.producerSpanNameProvider = (producerSpanNameProvider == null)
73 | ? ClientSpanNameProvider.PRODUCER_OPERATION_NAME
74 | : producerSpanNameProvider;
75 | }
76 |
77 | @Override
78 | public void initTransactions() {
79 | producer.initTransactions();
80 | }
81 |
82 | @Override
83 | public void beginTransaction() throws ProducerFencedException {
84 | producer.beginTransaction();
85 | }
86 |
87 | @Override
88 | public void sendOffsetsToTransaction(Map offsets,
89 | String consumerGroupId)
90 | throws ProducerFencedException {
91 | producer.sendOffsetsToTransaction(offsets, consumerGroupId);
92 | }
93 |
94 | @Override
95 | public void sendOffsetsToTransaction(Map offsets,
96 | ConsumerGroupMetadata groupMetadata) throws ProducerFencedException {
97 | producer.sendOffsetsToTransaction(offsets, groupMetadata);
98 | }
99 |
100 | @Override
101 | public void commitTransaction() throws ProducerFencedException {
102 | producer.commitTransaction();
103 | }
104 |
105 | @Override
106 | public void abortTransaction() throws ProducerFencedException {
107 | producer.abortTransaction();
108 | }
109 |
110 | @Override
111 | public Future send(ProducerRecord record) {
112 | return send(record, null, null);
113 | }
114 |
115 | public Future send(ProducerRecord record, SpanContext parent) {
116 | return send(record, null, parent);
117 | }
118 |
119 | @Override
120 | public Future send(ProducerRecord record, Callback callback) {
121 | return send(record, callback, null);
122 | }
123 |
124 | public Future send(ProducerRecord record, Callback callback,
125 | SpanContext parent) {
126 | /*
127 | // Create wrappedRecord because headers can be read only in record (if record is sent second time)
128 | ProducerRecord wrappedRecord = new ProducerRecord<>(record.topic(),
129 | record.partition(),
130 | record.timestamp(),
131 | record.key(),
132 | record.value(),
133 | record.headers());
134 | */
135 |
136 | Span span = TracingKafkaUtils
137 | .buildAndInjectSpan(record, tracer, producerSpanNameProvider, parent, spanDecorators);
138 | try (Scope ignored = tracer.activateSpan(span)) {
139 | Callback wrappedCallback = new TracingCallback(callback, span, tracer, spanDecorators);
140 | return producer.send(record, wrappedCallback);
141 | }
142 | }
143 |
144 | @Override
145 | public void flush() {
146 | producer.flush();
147 | }
148 |
149 | @Override
150 | public List partitionsFor(String topic) {
151 | return producer.partitionsFor(topic);
152 | }
153 |
154 | @Override
155 | public Map metrics() {
156 | return producer.metrics();
157 | }
158 |
159 | @Override
160 | public void close() {
161 | producer.close();
162 | }
163 |
164 | @Override
165 | public void close(Duration duration) {
166 | producer.close(duration);
167 | }
168 |
169 | @Override
170 | public void close(long timeout, TimeUnit timeUnit) {
171 | producer.close(timeout, timeUnit);
172 | }
173 |
174 | }
175 |
--------------------------------------------------------------------------------
/opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/TracingKafkaProducerBuilder.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017-2020 The OpenTracing Authors
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
5 | * in compliance with the License. You may obtain a copy of the License at
6 | *
7 | * http://www.apache.org/licenses/LICENSE-2.0
8 | *
9 | * Unless required by applicable law or agreed to in writing, software distributed under the License
10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
11 | * or implied. See the License for the specific language governing permissions and limitations under
12 | * the License.
13 | */
14 | package io.opentracing.contrib.kafka;
15 |
16 | import io.opentracing.Tracer;
17 | import java.util.Collection;
18 | import java.util.Collections;
19 | import java.util.function.BiFunction;
20 | import org.apache.kafka.clients.producer.Producer;
21 | import org.apache.kafka.clients.producer.ProducerRecord;
22 |
23 | public class TracingKafkaProducerBuilder {
24 | private Collection spanDecorators;
25 | private Producer producer;
26 | private Tracer tracer;
27 | private BiFunction producerSpanNameProvider;
28 |
29 | public TracingKafkaProducerBuilder(Producer producer, Tracer tracer) {
30 | this.tracer = tracer;
31 | this.producer = producer;
32 | this.spanDecorators = Collections.singletonList(SpanDecorator.STANDARD_TAGS);
33 | this.producerSpanNameProvider = ClientSpanNameProvider.PRODUCER_OPERATION_NAME;
34 | }
35 |
36 | public TracingKafkaProducerBuilder withDecorators(Collection spanDecorators) {
37 | this.spanDecorators = Collections.unmodifiableCollection(spanDecorators);
38 | return this;
39 | }
40 |
41 | public TracingKafkaProducerBuilder withSpanNameProvider(
42 | BiFunction producerSpanNameProvider) {
43 | this.producerSpanNameProvider = producerSpanNameProvider;
44 | return this;
45 | }
46 |
47 | public TracingKafkaProducer build() {
48 | return new TracingKafkaProducer<>(producer, tracer, spanDecorators, producerSpanNameProvider);
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/TracingKafkaUtils.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017-2020 The OpenTracing Authors
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
5 | * in compliance with the License. You may obtain a copy of the License at
6 | *
7 | * http://www.apache.org/licenses/LICENSE-2.0
8 | *
9 | * Unless required by applicable law or agreed to in writing, software distributed under the License
10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
11 | * or implied. See the License for the specific language governing permissions and limitations under
12 | * the License.
13 | */
14 | package io.opentracing.contrib.kafka;
15 |
16 | import io.opentracing.References;
17 | import io.opentracing.Span;
18 | import io.opentracing.SpanContext;
19 | import io.opentracing.Tracer;
20 | import io.opentracing.propagation.Format;
21 | import io.opentracing.tag.Tags;
22 | import java.util.Collection;
23 | import java.util.Collections;
24 | import java.util.function.BiFunction;
25 | import org.apache.kafka.clients.consumer.ConsumerRecord;
26 | import org.apache.kafka.clients.producer.ProducerRecord;
27 | import org.apache.kafka.common.header.Headers;
28 | import org.slf4j.Logger;
29 | import org.slf4j.LoggerFactory;
30 |
31 | public class TracingKafkaUtils {
32 |
33 | private static final Logger logger = LoggerFactory.getLogger(TracingKafkaUtils.class);
34 | public static final String TO_PREFIX = "To_";
35 | public static final String FROM_PREFIX = "From_";
36 |
37 | /**
38 | * Extract Span Context from record headers
39 | *
40 | * @param headers record headers
41 | * @return span context
42 | */
43 | public static SpanContext extractSpanContext(Headers headers, Tracer tracer) {
44 | return tracer
45 | .extract(Format.Builtin.TEXT_MAP, new HeadersMapExtractAdapter(headers));
46 | }
47 |
48 | /**
49 | * Inject Span Context to record headers
50 | *
51 | * @param spanContext Span Context
52 | * @param headers record headers
53 | */
54 | public static void inject(SpanContext spanContext, Headers headers,
55 | Tracer tracer) {
56 | tracer.inject(spanContext, Format.Builtin.TEXT_MAP, new HeadersMapInjectAdapter(headers));
57 | }
58 |
59 | public static Span buildAndInjectSpan(ProducerRecord record, Tracer tracer) {
60 | return buildAndInjectSpan(record, tracer, ClientSpanNameProvider.PRODUCER_OPERATION_NAME, null,
61 | Collections.singletonList(SpanDecorator.STANDARD_TAGS));
62 | }
63 |
64 | public static Span buildAndInjectSpan(ProducerRecord record, Tracer tracer,
65 | BiFunction producerSpanNameProvider,
66 | SpanContext parent) {
67 | return buildAndInjectSpan(record, tracer, producerSpanNameProvider, parent,
68 | Collections.singletonList(SpanDecorator.STANDARD_TAGS));
69 | }
70 |
71 | public static Span buildAndInjectSpan(ProducerRecord record, Tracer tracer,
72 | BiFunction producerSpanNameProvider,
73 | SpanContext parent, Collection spanDecorators) {
74 | String producerOper =
75 | TO_PREFIX + record.topic(); // <======== It provides better readability in the UI
76 | Tracer.SpanBuilder spanBuilder = tracer
77 | .buildSpan(producerSpanNameProvider.apply(producerOper, record))
78 | .withTag(Tags.SPAN_KIND.getKey(), Tags.SPAN_KIND_PRODUCER);
79 |
80 | SpanContext spanContext = TracingKafkaUtils.extractSpanContext(record.headers(), tracer);
81 |
82 | if (spanContext != null) {
83 | spanBuilder.asChildOf(spanContext);
84 | } else if (parent != null) {
85 | spanBuilder.asChildOf(parent);
86 | }
87 |
88 | Span span = spanBuilder.start();
89 |
90 | for (SpanDecorator decorator : spanDecorators) {
91 | decorator.onSend(record, span);
92 | }
93 |
94 | try {
95 | TracingKafkaUtils.inject(span.context(), record.headers(), tracer);
96 | } catch (Exception e) {
97 | // it can happen if headers are read only (when record is sent second time)
98 | logger.error("failed to inject span context. sending record second time?", e);
99 | }
100 |
101 | return span;
102 | }
103 |
104 | public static void buildAndFinishChildSpan(ConsumerRecord record, Tracer tracer) {
105 | buildAndFinishChildSpan(record, tracer, ClientSpanNameProvider.CONSUMER_OPERATION_NAME,
106 | Collections.singletonList(SpanDecorator.STANDARD_TAGS));
107 | }
108 |
109 | public static void buildAndFinishChildSpan(ConsumerRecord record, Tracer tracer,
110 | BiFunction consumerSpanNameProvider) {
111 | buildAndFinishChildSpan(record, tracer, consumerSpanNameProvider,
112 | Collections.singletonList(SpanDecorator.STANDARD_TAGS));
113 | }
114 |
115 | public static void buildAndFinishChildSpan(ConsumerRecord record, Tracer tracer,
116 | BiFunction consumerSpanNameProvider,
117 | Collection spanDecorators) {
118 | SpanContext parentContext = TracingKafkaUtils.extractSpanContext(record.headers(), tracer);
119 | String consumerOper =
120 | FROM_PREFIX + record.topic(); // <====== It provides better readability in the UI
121 | Tracer.SpanBuilder spanBuilder = tracer
122 | .buildSpan(consumerSpanNameProvider.apply(consumerOper, record))
123 | .withTag(Tags.SPAN_KIND.getKey(), Tags.SPAN_KIND_CONSUMER);
124 |
125 | if (parentContext != null) {
126 | spanBuilder.addReference(References.FOLLOWS_FROM, parentContext);
127 | }
128 |
129 | Span span = spanBuilder.start();
130 |
131 | for (SpanDecorator decorator : spanDecorators) {
132 | decorator.onResponse(record, span);
133 | }
134 |
135 | span.finish();
136 |
137 | // Inject created span context into record headers for extraction by client to continue span chain
138 | inject(span.context(), record.headers(), tracer);
139 | }
140 | }
141 |
--------------------------------------------------------------------------------
/opentracing-kafka-client/src/main/java/io/opentracing/contrib/kafka/TracingProducerInterceptor.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017-2020 The OpenTracing Authors
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
5 | * in compliance with the License. You may obtain a copy of the License at
6 | *
7 | * http://www.apache.org/licenses/LICENSE-2.0
8 | *
9 | * Unless required by applicable law or agreed to in writing, software distributed under the License
10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
11 | * or implied. See the License for the specific language governing permissions and limitations under
12 | * the License.
13 | */
14 | package io.opentracing.contrib.kafka;
15 |
16 | import io.opentracing.util.GlobalTracer;
17 | import java.util.Map;
18 | import org.apache.kafka.clients.producer.ProducerInterceptor;
19 | import org.apache.kafka.clients.producer.ProducerRecord;
20 | import org.apache.kafka.clients.producer.RecordMetadata;
21 |
22 | public class TracingProducerInterceptor implements ProducerInterceptor {
23 |
24 | @Override
25 | public ProducerRecord onSend(ProducerRecord producerRecord) {
26 | TracingKafkaUtils.buildAndInjectSpan(producerRecord, GlobalTracer.get()).finish();
27 | return producerRecord;
28 | }
29 |
30 | @Override
31 | public void onAcknowledgement(RecordMetadata recordMetadata, Exception e) {
32 | }
33 |
34 | @Override
35 | public void close() {
36 |
37 | }
38 |
39 | @Override
40 | public void configure(Map map) {
41 |
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/opentracing-kafka-client/src/test/java/io/opentracing/contrib/kafka/HeadersMapExtractAdapterTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017-2020 The OpenTracing Authors
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
5 | * in compliance with the License. You may obtain a copy of the License at
6 | *
7 | * http://www.apache.org/licenses/LICENSE-2.0
8 | *
9 | * Unless required by applicable law or agreed to in writing, software distributed under the License
10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
11 | * or implied. See the License for the specific language governing permissions and limitations under
12 | * the License.
13 | */
14 | package io.opentracing.contrib.kafka;
15 |
16 | import static org.junit.Assert.assertEquals;
17 | import static org.junit.Assert.assertNotNull;
18 | import static org.junit.Assert.assertNull;
19 |
20 | import java.util.Map.Entry;
21 | import org.apache.kafka.common.header.Headers;
22 | import org.apache.kafka.common.header.internals.RecordHeaders;
23 | import org.junit.Test;
24 |
25 |
26 | public class HeadersMapExtractAdapterTest {
27 |
28 | @Test
29 | public void verifyNullHeaderHandled() {
30 | Headers headers = new RecordHeaders();
31 | headers.add("test_null_header", null);
32 | HeadersMapExtractAdapter headersMapExtractAdapter = new HeadersMapExtractAdapter(headers);
33 | Entry header = headersMapExtractAdapter.iterator().next();
34 | assertNotNull(header);
35 | assertEquals(header.getKey(), "test_null_header");
36 | assertNull(header.getValue());
37 |
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/opentracing-kafka-client/src/test/java/io/opentracing/contrib/kafka/OperationNameSpanNameTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017-2020 The OpenTracing Authors
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
5 | * in compliance with the License. You may obtain a copy of the License at
6 | *
7 | * http://www.apache.org/licenses/LICENSE-2.0
8 | *
9 | * Unless required by applicable law or agreed to in writing, software distributed under the License
10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
11 | * or implied. See the License for the specific language governing permissions and limitations under
12 | * the License.
13 | */
14 |
15 | package io.opentracing.contrib.kafka;
16 |
17 | import static org.junit.Assert.assertEquals;
18 |
19 | import java.util.function.BiFunction;
20 | import org.apache.kafka.clients.consumer.ConsumerRecord;
21 | import org.apache.kafka.clients.producer.ProducerRecord;
22 | import org.junit.Test;
23 |
24 | public class OperationNameSpanNameTest {
25 | private final ConsumerRecord consumerRecord = new ConsumerRecord<>(
26 | "example_topic", 0, 0, "KEY", 999);
27 | private final ProducerRecord producerRecord = new ProducerRecord<>(
28 | "example_topic", 0, System.currentTimeMillis(), "KEY", 999);
29 | private BiFunction consumerSpanNameProvider;
30 | private BiFunction producerSpanNameProvider;
31 |
32 | @Test
33 | public void operationNameSpanNameTest() {
34 | consumerSpanNameProvider = ClientSpanNameProvider.CONSUMER_OPERATION_NAME;
35 | producerSpanNameProvider = ClientSpanNameProvider.PRODUCER_OPERATION_NAME;
36 |
37 | assertEquals("receive", consumerSpanNameProvider.apply("receive", consumerRecord));
38 | assertEquals("send", producerSpanNameProvider.apply("send", producerRecord));
39 |
40 | assertEquals("unknown", consumerSpanNameProvider.apply(null, consumerRecord));
41 | assertEquals("unknown", producerSpanNameProvider.apply(null, producerRecord));
42 |
43 | assertEquals("receive", consumerSpanNameProvider.apply("receive", null));
44 | assertEquals("send", producerSpanNameProvider.apply("send", null));
45 |
46 | assertEquals("unknown", consumerSpanNameProvider.apply(null, null));
47 | assertEquals("unknown", producerSpanNameProvider.apply(null, null));
48 | }
49 |
50 | @Test
51 | public void prefixedOperationNameSpanNameTest() {
52 | consumerSpanNameProvider = ClientSpanNameProvider
53 | .CONSUMER_PREFIXED_OPERATION_NAME("KafkaClient: ");
54 | producerSpanNameProvider = ClientSpanNameProvider
55 | .PRODUCER_PREFIXED_OPERATION_NAME("KafkaClient: ");
56 |
57 | assertEquals("KafkaClient: receive", consumerSpanNameProvider.apply("receive", consumerRecord));
58 | assertEquals("KafkaClient: send", producerSpanNameProvider.apply("send", producerRecord));
59 |
60 | assertEquals("KafkaClient: unknown", consumerSpanNameProvider.apply(null, consumerRecord));
61 | assertEquals("KafkaClient: unknown", producerSpanNameProvider.apply(null, producerRecord));
62 |
63 | assertEquals("KafkaClient: receive", consumerSpanNameProvider.apply("receive", null));
64 | assertEquals("KafkaClient: send", producerSpanNameProvider.apply("send", null));
65 |
66 | assertEquals("KafkaClient: unknown", consumerSpanNameProvider.apply(null, null));
67 | assertEquals("KafkaClient: unknown", producerSpanNameProvider.apply(null, null));
68 |
69 | consumerSpanNameProvider = ClientSpanNameProvider.CONSUMER_PREFIXED_OPERATION_NAME(null);
70 | producerSpanNameProvider = ClientSpanNameProvider.PRODUCER_PREFIXED_OPERATION_NAME(null);
71 |
72 | assertEquals("receive", consumerSpanNameProvider.apply("receive", consumerRecord));
73 | assertEquals("send", producerSpanNameProvider.apply("send", producerRecord));
74 |
75 | assertEquals("unknown", consumerSpanNameProvider.apply(null, consumerRecord));
76 | assertEquals("unknown", producerSpanNameProvider.apply(null, producerRecord));
77 |
78 | assertEquals("receive", consumerSpanNameProvider.apply("receive", null));
79 | assertEquals("send", producerSpanNameProvider.apply("send", null));
80 |
81 | assertEquals("unknown", consumerSpanNameProvider.apply(null, null));
82 | assertEquals("unknown", producerSpanNameProvider.apply(null, null));
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/opentracing-kafka-client/src/test/java/io/opentracing/contrib/kafka/OperationNameTopicSpanNameTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017-2020 The OpenTracing Authors
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
5 | * in compliance with the License. You may obtain a copy of the License at
6 | *
7 | * http://www.apache.org/licenses/LICENSE-2.0
8 | *
9 | * Unless required by applicable law or agreed to in writing, software distributed under the License
10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
11 | * or implied. See the License for the specific language governing permissions and limitations under
12 | * the License.
13 | */
14 |
15 | package io.opentracing.contrib.kafka;
16 |
17 | import static org.junit.Assert.assertEquals;
18 |
19 | import java.util.function.BiFunction;
20 | import org.apache.kafka.clients.consumer.ConsumerRecord;
21 | import org.apache.kafka.clients.producer.ProducerRecord;
22 | import org.junit.Test;
23 |
24 | public class OperationNameTopicSpanNameTest {
25 | private final ConsumerRecord consumerRecord = new ConsumerRecord<>(
26 | "example_topic", 0, 0, "KEY", 999);
27 | private final ProducerRecord producerRecord = new ProducerRecord<>(
28 | "example_topic", 0, System.currentTimeMillis(), "KEY", 999);
29 | private BiFunction consumerSpanNameProvider;
30 | private BiFunction producerSpanNameProvider;
31 |
32 | @Test
33 | public void operationNameTopicSpanNameTest() {
34 | consumerSpanNameProvider = ClientSpanNameProvider.CONSUMER_OPERATION_NAME_TOPIC;
35 | producerSpanNameProvider = ClientSpanNameProvider.PRODUCER_OPERATION_NAME_TOPIC;
36 |
37 | assertEquals("receive - example_topic",
38 | consumerSpanNameProvider.apply("receive", consumerRecord));
39 | assertEquals("send - example_topic", producerSpanNameProvider.apply("send", producerRecord));
40 |
41 | assertEquals("unknown - example_topic", consumerSpanNameProvider.apply(null, consumerRecord));
42 | assertEquals("unknown - example_topic", producerSpanNameProvider.apply(null, producerRecord));
43 |
44 | assertEquals("receive - unknown", consumerSpanNameProvider.apply("receive", null));
45 | assertEquals("send - unknown", producerSpanNameProvider.apply("send", null));
46 |
47 | assertEquals("unknown - unknown", consumerSpanNameProvider.apply(null, null));
48 | assertEquals("unknown - unknown", producerSpanNameProvider.apply(null, null));
49 | }
50 |
51 | @Test
52 | public void prefixedOperationNameTopicSpanNameTest() {
53 | consumerSpanNameProvider = ClientSpanNameProvider
54 | .CONSUMER_PREFIXED_OPERATION_NAME_TOPIC("KafkaClient: ");
55 | producerSpanNameProvider = ClientSpanNameProvider
56 | .PRODUCER_PREFIXED_OPERATION_NAME_TOPIC("KafkaClient: ");
57 |
58 | assertEquals("KafkaClient: receive - example_topic",
59 | consumerSpanNameProvider.apply("receive", consumerRecord));
60 | assertEquals("KafkaClient: send - example_topic",
61 | producerSpanNameProvider.apply("send", producerRecord));
62 |
63 | assertEquals("KafkaClient: unknown - example_topic",
64 | consumerSpanNameProvider.apply(null, consumerRecord));
65 | assertEquals("KafkaClient: unknown - example_topic",
66 | producerSpanNameProvider.apply(null, producerRecord));
67 |
68 | assertEquals("KafkaClient: receive - unknown", consumerSpanNameProvider.apply("receive", null));
69 | assertEquals("KafkaClient: send - unknown", producerSpanNameProvider.apply("send", null));
70 |
71 | assertEquals("KafkaClient: unknown - unknown", consumerSpanNameProvider.apply(null, null));
72 | assertEquals("KafkaClient: unknown - unknown", producerSpanNameProvider.apply(null, null));
73 |
74 | consumerSpanNameProvider = ClientSpanNameProvider.CONSUMER_PREFIXED_OPERATION_NAME_TOPIC(null);
75 | producerSpanNameProvider = ClientSpanNameProvider.PRODUCER_PREFIXED_OPERATION_NAME_TOPIC(null);
76 |
77 | assertEquals("receive - example_topic",
78 | consumerSpanNameProvider.apply("receive", consumerRecord));
79 | assertEquals("send - example_topic", producerSpanNameProvider.apply("send", producerRecord));
80 |
81 | assertEquals("unknown - example_topic", consumerSpanNameProvider.apply(null, consumerRecord));
82 | assertEquals("unknown - example_topic", producerSpanNameProvider.apply(null, producerRecord));
83 |
84 | assertEquals("receive - unknown", consumerSpanNameProvider.apply("receive", null));
85 | assertEquals("send - unknown", producerSpanNameProvider.apply("send", null));
86 |
87 | assertEquals("unknown - unknown", consumerSpanNameProvider.apply(null, null));
88 | assertEquals("unknown - unknown", producerSpanNameProvider.apply(null, null));
89 | }
90 | }
91 |
--------------------------------------------------------------------------------
/opentracing-kafka-client/src/test/java/io/opentracing/contrib/kafka/TopicSpanNameTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017-2020 The OpenTracing Authors
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
5 | * in compliance with the License. You may obtain a copy of the License at
6 | *
7 | * http://www.apache.org/licenses/LICENSE-2.0
8 | *
9 | * Unless required by applicable law or agreed to in writing, software distributed under the License
10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
11 | * or implied. See the License for the specific language governing permissions and limitations under
12 | * the License.
13 | */
14 |
15 | package io.opentracing.contrib.kafka;
16 |
17 | import static org.junit.Assert.assertEquals;
18 |
19 | import java.util.function.BiFunction;
20 | import org.apache.kafka.clients.consumer.ConsumerRecord;
21 | import org.apache.kafka.clients.producer.ProducerRecord;
22 | import org.junit.Test;
23 |
24 | public class TopicSpanNameTest {
25 |
26 | private final ConsumerRecord consumerRecord = new ConsumerRecord<>(
27 | "example_topic", 0, 0, "KEY", 999);
28 | private final ProducerRecord producerRecord = new ProducerRecord<>(
29 | "example_topic", 0, System.currentTimeMillis(), "KEY", 999);
30 | private BiFunction consumerSpanNameProvider;
31 | private BiFunction producerSpanNameProvider;
32 |
33 | @Test
34 | public void topicSpanNameTest() {
35 |
36 | consumerSpanNameProvider = ClientSpanNameProvider.CONSUMER_TOPIC;
37 | producerSpanNameProvider = ClientSpanNameProvider.PRODUCER_TOPIC;
38 |
39 | assertEquals("example_topic", consumerSpanNameProvider.apply("receive", consumerRecord));
40 | assertEquals("example_topic", producerSpanNameProvider.apply("send", producerRecord));
41 |
42 | assertEquals("example_topic", consumerSpanNameProvider.apply(null, consumerRecord));
43 | assertEquals("example_topic", producerSpanNameProvider.apply(null, producerRecord));
44 |
45 | assertEquals("unknown", consumerSpanNameProvider.apply("receive", null));
46 | assertEquals("unknown", producerSpanNameProvider.apply("send", null));
47 | }
48 |
49 | @Test
50 | public void prefixedTopicSpanNameTest() {
51 | consumerSpanNameProvider = ClientSpanNameProvider.CONSUMER_PREFIXED_TOPIC("KafkaClient: ");
52 | producerSpanNameProvider = ClientSpanNameProvider.PRODUCER_PREFIXED_TOPIC("KafkaClient: ");
53 |
54 | assertEquals("KafkaClient: example_topic",
55 | consumerSpanNameProvider.apply("receive", consumerRecord));
56 | assertEquals("KafkaClient: example_topic",
57 | producerSpanNameProvider.apply("send", producerRecord));
58 |
59 | assertEquals("KafkaClient: example_topic",
60 | consumerSpanNameProvider.apply(null, consumerRecord));
61 | assertEquals("KafkaClient: example_topic",
62 | producerSpanNameProvider.apply(null, producerRecord));
63 |
64 | assertEquals("KafkaClient: unknown", consumerSpanNameProvider.apply("receive", null));
65 | assertEquals("KafkaClient: unknown", producerSpanNameProvider.apply("send", null));
66 |
67 | consumerSpanNameProvider = ClientSpanNameProvider.CONSUMER_PREFIXED_TOPIC(null);
68 | producerSpanNameProvider = ClientSpanNameProvider.PRODUCER_PREFIXED_TOPIC(null);
69 |
70 | assertEquals("example_topic", consumerSpanNameProvider.apply("receive", consumerRecord));
71 | assertEquals("example_topic", producerSpanNameProvider.apply("send", producerRecord));
72 |
73 | assertEquals("example_topic", consumerSpanNameProvider.apply(null, consumerRecord));
74 | assertEquals("example_topic", producerSpanNameProvider.apply(null, producerRecord));
75 |
76 | assertEquals("unknown", consumerSpanNameProvider.apply("receive", null));
77 | assertEquals("unknown", producerSpanNameProvider.apply("send", null));
78 | }
79 | }
80 |
--------------------------------------------------------------------------------
/opentracing-kafka-client/src/test/java/io/opentracing/contrib/kafka/TracingCallbackTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017-2020 The OpenTracing Authors
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
5 | * in compliance with the License. You may obtain a copy of the License at
6 | *
7 | * http://www.apache.org/licenses/LICENSE-2.0
8 | *
9 | * Unless required by applicable law or agreed to in writing, software distributed under the License
10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
11 | * or implied. See the License for the specific language governing permissions and limitations under
12 | * the License.
13 | */
14 | package io.opentracing.contrib.kafka;
15 |
16 |
17 | import static org.junit.Assert.assertEquals;
18 | import static org.junit.Assert.assertNull;
19 |
20 | import io.opentracing.Scope;
21 | import io.opentracing.Span;
22 | import io.opentracing.mock.MockSpan;
23 | import io.opentracing.mock.MockTracer;
24 | import io.opentracing.tag.Tags;
25 | import java.util.Arrays;
26 | import java.util.List;
27 | import org.apache.kafka.clients.consumer.ConsumerRecord;
28 | import org.apache.kafka.clients.producer.ProducerRecord;
29 | import org.junit.Before;
30 | import org.junit.Test;
31 |
32 |
33 | public class TracingCallbackTest {
34 |
35 | private MockTracer mockTracer = new MockTracer();
36 |
37 | @Before
38 | public void before() {
39 | mockTracer.reset();
40 | }
41 |
42 | @Test
43 | public void onCompletionWithError() {
44 | Span span = mockTracer.buildSpan("test").start();
45 | try (Scope ignored = mockTracer.activateSpan(span)) {
46 | TracingCallback callback = new TracingCallback(null, span, mockTracer);
47 | callback.onCompletion(null, new RuntimeException("test"));
48 | }
49 |
50 | List finished = mockTracer.finishedSpans();
51 | assertEquals(1, finished.size());
52 | assertEquals(1, finished.get(0).logEntries().size());
53 | assertEquals(true, finished.get(0).tags().get(Tags.ERROR.getKey()));
54 | }
55 |
56 | @Test
57 | public void onCompletionWithCustomErrorDecorators() {
58 | Span span = mockTracer.buildSpan("test").start();
59 | try (Scope ignored = mockTracer.activateSpan(span)) {
60 | TracingCallback callback = new TracingCallback(null, span, mockTracer,
61 | Arrays.asList(SpanDecorator.STANDARD_TAGS, createDecorator()));
62 | callback.onCompletion(null, new RuntimeException("test"));
63 | }
64 |
65 | List finished = mockTracer.finishedSpans();
66 | assertEquals(1, finished.size());
67 | assertEquals(true, finished.get(0).tags().get(Tags.ERROR.getKey()));
68 | assertEquals("overwritten", finished.get(0).tags().get("error.of"));
69 | assertEquals("error-test", finished.get(0).tags().get("new.error.tag"));
70 | }
71 |
72 | @Test
73 | public void onCompletion() {
74 | Span span = mockTracer.buildSpan("test").start();
75 | try (Scope ignored = mockTracer.activateSpan(span)) {
76 | TracingCallback callback = new TracingCallback(null, span, mockTracer);
77 | callback.onCompletion(null, null);
78 | }
79 |
80 | List finished = mockTracer.finishedSpans();
81 | assertEquals(1, finished.size());
82 | assertEquals(0, finished.get(0).logEntries().size());
83 | assertNull(finished.get(0).tags().get(Tags.ERROR.getKey()));
84 | }
85 |
86 | private SpanDecorator createDecorator() {
87 | return new SpanDecorator() {
88 | @Override
89 | public void onSend(ProducerRecord record, Span span) {
90 | }
91 |
92 | @Override
93 | public void onResponse(ConsumerRecord record, Span span) {
94 | }
95 |
96 | @Override
97 | public void onError(Exception exception, Span span) {
98 | span.setTag("error.of", "overwritten");
99 | span.setTag("new.error.tag", "error-test");
100 | }
101 | };
102 | }
103 | }
--------------------------------------------------------------------------------
/opentracing-kafka-client/src/test/java/io/opentracing/contrib/kafka/TracingKafkaTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017-2020 The OpenTracing Authors
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
5 | * in compliance with the License. You may obtain a copy of the License at
6 | *
7 | * http://www.apache.org/licenses/LICENSE-2.0
8 | *
9 | * Unless required by applicable law or agreed to in writing, software distributed under the License
10 | * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
11 | * or implied. See the License for the specific language governing permissions and limitations under
12 | * the License.
13 | */
14 | package io.opentracing.contrib.kafka;
15 |
16 |
17 | import static org.junit.Assert.assertEquals;
18 | import static org.junit.Assert.assertNotNull;
19 | import static org.junit.Assert.assertNull;
20 | import static org.junit.Assert.assertTrue;
21 |
22 | import io.opentracing.Scope;
23 | import io.opentracing.Span;
24 | import io.opentracing.SpanContext;
25 | import io.opentracing.mock.MockSpan;
26 | import io.opentracing.mock.MockTracer;
27 | import io.opentracing.tag.Tags;
28 | import io.opentracing.util.GlobalTracer;
29 | import java.time.Duration;
30 | import java.util.ArrayList;
31 | import java.util.Arrays;
32 | import java.util.Collection;
33 | import java.util.Collections;
34 | import java.util.List;
35 | import java.util.Map;
36 | import java.util.concurrent.CountDownLatch;
37 | import java.util.concurrent.ExecutorService;
38 | import java.util.concurrent.Executors;
39 | import java.util.concurrent.TimeUnit;
40 | import java.util.function.BiFunction;
41 | import org.apache.kafka.clients.consumer.Consumer;
42 | import org.apache.kafka.clients.consumer.ConsumerConfig;
43 | import org.apache.kafka.clients.consumer.ConsumerRecord;
44 | import org.apache.kafka.clients.consumer.ConsumerRecords;
45 | import org.apache.kafka.clients.consumer.KafkaConsumer;
46 | import org.apache.kafka.clients.consumer.OffsetAndMetadata;
47 | import org.apache.kafka.clients.producer.KafkaProducer;
48 | import org.apache.kafka.clients.producer.Producer;
49 | import org.apache.kafka.clients.producer.ProducerConfig;
50 | import org.apache.kafka.clients.producer.ProducerRecord;
51 | import org.apache.kafka.common.TopicPartition;
52 | import org.junit.Before;
53 | import org.junit.BeforeClass;
54 | import org.junit.ClassRule;
55 | import org.junit.Test;
56 | import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
57 | import org.springframework.kafka.test.utils.KafkaTestUtils;
58 |
59 | public class TracingKafkaTest {
60 |
61 | @ClassRule
62 | public static EmbeddedKafkaRule embeddedKafka = new EmbeddedKafkaRule(2, true, 2, "messages");
63 | private static final MockTracer mockTracer = new MockTracer();
64 |
65 | @BeforeClass
66 | public static void init() {
67 | GlobalTracer.registerIfAbsent(mockTracer);
68 | }
69 |
70 | @Before
71 | public void before() {
72 | mockTracer.reset();
73 | }
74 |
75 | @Test
76 | public void with_interceptors() throws Exception {
77 | Map senderProps = KafkaTestUtils
78 | .producerProps(embeddedKafka.getEmbeddedKafka());
79 | senderProps
80 | .put(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG, TracingProducerInterceptor.class.getName());
81 | KafkaProducer producer = new KafkaProducer<>(senderProps);
82 |
83 | producer.send(new ProducerRecord<>("messages", 1, "test"));
84 |
85 | final CountDownLatch latch = new CountDownLatch(1);
86 | createConsumer(latch, 1, true, null);
87 |
88 | producer.close();
89 |
90 | List mockSpans = mockTracer.finishedSpans();
91 | assertEquals(2, mockSpans.size());
92 | checkSpans(mockSpans);
93 | assertNull(mockTracer.activeSpan());
94 | }
95 |
96 | @Test
97 | public void test() throws Exception {
98 | Producer producer = createTracingProducer();
99 |
100 | // Send 1
101 | producer.send(new ProducerRecord<>("messages", 1, "test"));
102 |
103 | // Send 2
104 | producer.send(new ProducerRecord<>("messages", 1, "test"),
105 | (metadata, exception) -> assertEquals("messages", metadata.topic()));
106 |
107 | final CountDownLatch latch = new CountDownLatch(2);
108 | createConsumer(latch, 1, false, null);
109 |
110 | producer.close();
111 |
112 | List mockSpans = mockTracer.finishedSpans();
113 | assertEquals(4, mockSpans.size());
114 | checkSpans(mockSpans);
115 | assertNull(mockTracer.activeSpan());
116 | }
117 |
118 | @Test
119 | public void testWithParentContext() throws Exception {
120 | TracingKafkaProducer producer = createTracingProducer();
121 |
122 | final MockSpan parent = mockTracer.buildSpan("parent").start();
123 |
124 | // Send 1
125 | producer.send(new ProducerRecord<>("messages", 1, "test"), parent.context());
126 |
127 | // Send 2
128 | producer.send(new ProducerRecord<>("messages", 1, "test"),
129 | (metadata, exception) -> assertEquals("messages", metadata.topic()), parent.context());
130 |
131 | final CountDownLatch latch = new CountDownLatch(2);
132 | createConsumer(latch, 1, false, null);
133 |
134 | producer.close();
135 |
136 | List mockSpans = mockTracer.finishedSpans();
137 | assertEquals(4, mockSpans.size());
138 | checkSpans(mockSpans);
139 | for (MockSpan span : mockSpans) {
140 | assertEquals(parent.context().traceId(), span.context().traceId());
141 | }
142 |
143 | final List sendSpans = getByOperationNameAll(mockSpans,
144 | TracingKafkaUtils.TO_PREFIX + "messages");
145 | assertEquals(2, sendSpans.size());
146 | for (MockSpan sendSpan : sendSpans) {
147 | assertEquals(parent.context().spanId(), sendSpan.parentId());
148 | }
149 |
150 | parent.finish();
151 |
152 | assertNull(mockTracer.activeSpan());
153 | }
154 |
155 | @Test
156 | public void testNotTracedProducer() throws Exception {
157 | Producer producer = createProducer();
158 |
159 | // Send 1
160 | producer.send(new ProducerRecord<>("messages", 1, "test"));
161 |
162 | // Send 2
163 | producer.send(new ProducerRecord<>("messages", 1, "test"),
164 | (metadata, exception) -> assertEquals("messages", metadata.topic()));
165 |
166 | final CountDownLatch latch = new CountDownLatch(2);
167 | createConsumer(latch, 1, false, null);
168 |
169 | producer.close();
170 |
171 | List mockSpans = mockTracer.finishedSpans();
172 | assertEquals(2, mockSpans.size());
173 | checkSpans(mockSpans);
174 | assertNull(mockTracer.activeSpan());
175 | }
176 |
177 | @Test
178 | public void testWithTopicNameProvider() throws Exception {
179 | Producer producer = createNameProvidedProducer(
180 | ClientSpanNameProvider.PRODUCER_TOPIC);
181 |
182 | // Send 1
183 | producer.send(new ProducerRecord<>("messages", 1, "test"));
184 |
185 | // Send 2
186 | producer.send(new ProducerRecord<>("messages", 1, "test"),
187 | (metadata, exception) -> assertEquals("messages", metadata.topic()));
188 |
189 | final CountDownLatch latch = new CountDownLatch(2);
190 | createConsumer(latch, 1, false, ClientSpanNameProvider.CONSUMER_TOPIC);
191 | producer.close();
192 |
193 | List mockSpans = mockTracer.finishedSpans();
194 | assertEquals(4, mockSpans.size());
195 | for (MockSpan mockSpan : mockSpans) {
196 | String operationName = mockSpan.operationName();
197 | assertEquals("messages", operationName);
198 | String spanKind = (String) mockSpan.tags().get(Tags.SPAN_KIND.getKey());
199 | assertTrue(
200 | spanKind.equals(Tags.SPAN_KIND_CONSUMER) || spanKind.equals(Tags.SPAN_KIND_PRODUCER));
201 | }
202 | assertNull(mockTracer.activeSpan());
203 | }
204 |
205 | @Test
206 | public void with_parent() throws Exception {
207 | Producer producer = createTracingProducer();
208 |
209 | final MockSpan parent = mockTracer.buildSpan("parent").start();
210 | try (Scope ignored = mockTracer.activateSpan(parent)) {
211 | producer.send(new ProducerRecord<>("messages", 1, "test"));
212 | }
213 | parent.finish();
214 |
215 | final CountDownLatch latch = new CountDownLatch(1);
216 | createConsumer(latch, 1, false, null);
217 |
218 | producer.close();
219 |
220 | List mockSpans = mockTracer.finishedSpans();
221 | assertEquals(3, mockSpans.size());
222 |
223 | assertNotNull(parent);
224 |
225 | for (MockSpan span : mockSpans) {
226 | assertEquals(parent.context().traceId(), span.context().traceId());
227 | }
228 |
229 | MockSpan sendSpan = getByOperationName(mockSpans, TracingKafkaUtils.TO_PREFIX + "messages");
230 | assertNotNull(sendSpan);
231 |
232 | MockSpan receiveSpan = getByOperationName(mockSpans,
233 | TracingKafkaUtils.FROM_PREFIX + "messages");
234 | assertNotNull(receiveSpan);
235 |
236 | assertEquals(sendSpan.context().spanId(), receiveSpan.parentId());
237 | assertEquals(parent.context().spanId(), sendSpan.parentId());
238 |
239 | assertNull(mockTracer.activeSpan());
240 | }
241 |
242 | @Test
243 | public void nullKey() throws Exception {
244 | Producer producer = createTracingProducer();
245 |
246 | ProducerRecord record = new ProducerRecord<>("messages", "test");
247 | producer.send(record);
248 |
249 | final Map consumerProps = KafkaTestUtils
250 | .consumerProps("sampleRawConsumer", "false", embeddedKafka.getEmbeddedKafka());
251 | consumerProps.put("auto.offset.reset", "earliest");
252 |
253 | final CountDownLatch latch = new CountDownLatch(1);
254 | createConsumer(latch, null, false, null);
255 |
256 | producer.close();
257 | }
258 |
259 | @Test
260 | public void testSeekInConsumerAndCloseInProducer() throws InterruptedException {
261 |
262 | Producer producer = createTracingProducer();
263 |
264 | // Send 1
265 | producer.send(new ProducerRecord<>("messages-for-seek", 1, "test"));
266 |
267 | producer.close(Duration.ofSeconds(40));
268 |
269 | final CountDownLatch latch = new CountDownLatch(1);
270 | Integer key = 1;
271 |
272 | ExecutorService executorService = Executors.newSingleThreadExecutor();
273 |
274 | final Map consumerProps = KafkaTestUtils
275 | .consumerProps("sampleRawConsumer", "false", embeddedKafka.getEmbeddedKafka());
276 | consumerProps.put("auto.offset.reset", "earliest");
277 |
278 | executorService.execute(() -> {
279 | KafkaConsumer kafkaConsumer = new KafkaConsumer<>(consumerProps);
280 | Consumer consumer;
281 |
282 | consumer = new TracingKafkaConsumer<>(kafkaConsumer, mockTracer, null);
283 |
284 | TopicPartition tp = new TopicPartition("messages-for-seek", 0);
285 | consumer.assign(Collections.singletonList(tp));
286 |
287 | consumer.seek(tp, new OffsetAndMetadata(0));
288 |
289 | while (latch.getCount() > 0) {
290 | ConsumerRecords records = consumer.poll(Duration.ofMillis(100));
291 | for (ConsumerRecord record : records) {
292 | SpanContext spanContext = TracingKafkaUtils
293 | .extractSpanContext(record.headers(), mockTracer);
294 | assertNotNull(spanContext);
295 | assertEquals("test", record.value());
296 | assertEquals(key, record.key());
297 |
298 | consumer.commitSync();
299 | latch.countDown();
300 | }
301 | }
302 | kafkaConsumer.close();
303 | });
304 |
305 | assertTrue(latch.await(30, TimeUnit.SECONDS));
306 | }
307 |
308 | @Test
309 | public void testProducerBuilderWithDecorators() throws InterruptedException {
310 | Producer producer = createProducerWithDecorators(null);
311 |
312 | producer.send(new ProducerRecord<>("messages", 1, "test"));
313 |
314 | producer = createProducerWithDecorators(
315 | Arrays.asList(SpanDecorator.STANDARD_TAGS, createDecorator()));
316 |
317 | producer.send(new ProducerRecord<>("messages", 1, "test"));
318 |
319 | producer = createProducerWithDecorators(new ArrayList());
320 |
321 | producer.send(new ProducerRecord<>("messages", 1, "test"));
322 |
323 | final CountDownLatch latch = new CountDownLatch(3);
324 | createConsumer(latch, 1, false, null);
325 |
326 | producer.close();
327 |
328 | List mockSpans = mockTracer.finishedSpans();
329 |
330 | // With only standard decorator
331 | MockSpan standardSpan = mockSpans.get(0);
332 | checkSpans(Collections.singletonList(standardSpan));
333 | assertEquals("kafka", standardSpan.tags().get("peer.service"));
334 |
335 | // With standard and custom decorator
336 | MockSpan customSpan = mockSpans.get(1);
337 | checkSpans(Collections.singletonList(customSpan));
338 | assertEquals("overwritten", customSpan.tags().get("peer.service"));
339 | assertEquals("new-producer-test", customSpan.tags().get("new.tag.test"));
340 |
341 | // Without any decorator
342 | assertEquals("producer", mockSpans.get(2).tags().get("span.kind"));
343 | }
344 |
345 | @Test
346 | public void testProducerBuilderWithSpanNameProvider() throws InterruptedException {
347 | Producer producer = createProducerWithSpanNameProvider(null);
348 | ProducerRecord record1 = new ProducerRecord<>("messages", 1, "test");
349 |
350 | producer.send(record1);
351 |
352 | BiFunction operationNameProvider =
353 | (operationName, producerRecord) -> createSpanNameProvider();
354 | producer = createProducerWithSpanNameProvider(operationNameProvider);
355 |
356 | ProducerRecord record2 = new ProducerRecord<>("messages", 1, "test");
357 |
358 | producer.send(record2);
359 |
360 | final CountDownLatch latch = new CountDownLatch(2);
361 | createConsumer(latch, 1, false, null);
362 |
363 | producer.close();
364 |
365 | List mockSpans = mockTracer.finishedSpans();
366 |
367 | // With standard span name provider
368 | assertEquals("To_" + record1.topic(), mockSpans.get(0).operationName());
369 |
370 | // With custom span name provider
371 | assertEquals("Test_SpanNameProvider", mockSpans.get(1).operationName());
372 | }
373 |
374 | @Test
375 | public void testConsumerBuilderWithStandardDecorators() throws InterruptedException {
376 | Producer producer = createTracingProducer();
377 | producer.send(new ProducerRecord<>("messages", 1, "test"));
378 |
379 | producer.close();
380 |
381 | assertEquals(1, mockTracer.finishedSpans().size());
382 |
383 | ExecutorService executorService = Executors.newSingleThreadExecutor();
384 | final CountDownLatch latch = new CountDownLatch(1);
385 |
386 | executorService.execute(() -> {
387 | Consumer consumer = createConsumerWithDecorators(null);
388 |
389 | while (latch.getCount() > 0) {
390 | ConsumerRecords records = consumer.poll(Duration.ofMillis(100));
391 | for (ConsumerRecord record : records) {
392 | SpanContext spanContext = TracingKafkaUtils
393 | .extractSpanContext(record.headers(), mockTracer);
394 | assertNotNull(spanContext);
395 | assertEquals("test", record.value());
396 | assertEquals((Integer) 1, record.key());
397 |
398 | consumer.commitSync();
399 | latch.countDown();
400 | }
401 | }
402 | consumer.close();
403 | });
404 |
405 | assertTrue(latch.await(30, TimeUnit.SECONDS));
406 |
407 | List mockSpans = mockTracer.finishedSpans();
408 | checkSpans(mockSpans);
409 |
410 | MockSpan standardSpan = mockSpans.get(1);
411 | assertEquals("kafka", standardSpan.tags().get("peer.service"));
412 | }
413 |
414 | @Test
415 | public void testConsumerBuilderWithCustomDecorators() throws InterruptedException {
416 | Producer producer = createTracingProducer();
417 | producer.send(new ProducerRecord<>("messages", 1, "test"));
418 |
419 | producer.close();
420 |
421 | assertEquals(1, mockTracer.finishedSpans().size());
422 |
423 | ExecutorService executorService = Executors.newSingleThreadExecutor();
424 | final CountDownLatch latch = new CountDownLatch(1);
425 |
426 | executorService.execute(() -> {
427 | Consumer consumer =
428 | createConsumerWithDecorators(
429 | Arrays.asList(SpanDecorator.STANDARD_TAGS, createDecorator()));
430 |
431 | while (latch.getCount() > 0) {
432 | ConsumerRecords records = consumer.poll(Duration.ofMillis(100));
433 | for (ConsumerRecord record : records) {
434 | SpanContext spanContext = TracingKafkaUtils
435 | .extractSpanContext(record.headers(), mockTracer);
436 | assertNotNull(spanContext);
437 | assertEquals("test", record.value());
438 | assertEquals((Integer) 1, record.key());
439 |
440 | consumer.commitSync();
441 | latch.countDown();
442 | }
443 | }
444 | consumer.close();
445 | });
446 |
447 | assertTrue(latch.await(30, TimeUnit.SECONDS));
448 |
449 | List mockSpans = mockTracer.finishedSpans();
450 | checkSpans(mockSpans);
451 |
452 | MockSpan customSpan = mockSpans.get(1);
453 | assertEquals("overwritten", customSpan.tags().get("peer.service"));
454 | assertEquals("new-consumer-test", customSpan.tags().get("new.tag.test"));
455 | }
456 |
457 | @Test
458 | public void testConsumerBuilderWithoutDecorators() throws InterruptedException {
459 | Producer producer = createTracingProducer();
460 | producer.send(new ProducerRecord<>("messages", 1, "test"));
461 |
462 | producer.close();
463 |
464 | assertEquals(1, mockTracer.finishedSpans().size());
465 |
466 | ExecutorService executorService = Executors.newSingleThreadExecutor();
467 | final CountDownLatch latch = new CountDownLatch(1);
468 |
469 | executorService.execute(() -> {
470 | Consumer consumer = createConsumerWithDecorators(new ArrayList());
471 |
472 | while (latch.getCount() > 0) {
473 | ConsumerRecords records = consumer.poll(Duration.ofMillis(100));
474 | for (ConsumerRecord record : records) {
475 | SpanContext spanContext = TracingKafkaUtils
476 | .extractSpanContext(record.headers(), mockTracer);
477 | assertNotNull(spanContext);
478 | assertEquals("test", record.value());
479 | assertEquals((Integer) 1, record.key());
480 |
481 | consumer.commitSync();
482 | latch.countDown();
483 | }
484 | }
485 | consumer.close();
486 | });
487 |
488 | assertTrue(latch.await(30, TimeUnit.SECONDS));
489 |
490 | List mockSpans = mockTracer.finishedSpans();
491 |
492 | MockSpan span = mockSpans.get(1);
493 | assertEquals("consumer", span.tags().get("span.kind"));
494 | }
495 |
496 | @Test
497 | public void testConsumerBuilderWithCustomSpanNameProvider() throws InterruptedException {
498 | Producer producer = createTracingProducer();
499 | producer.send(new ProducerRecord<>("messages", 1, "test"));
500 | producer.close();
501 |
502 | assertEquals(1, mockTracer.finishedSpans().size());
503 |
504 | ExecutorService executorService = Executors.newSingleThreadExecutor();
505 | final CountDownLatch latch = new CountDownLatch(1);
506 |
507 | executorService.execute(() -> {
508 | BiFunction operationNameProvider =
509 | (operationName, consumerRecord) -> createSpanNameProvider();
510 | Consumer consumer = createConsumerWithSpanNameProvider(
511 | operationNameProvider);
512 |
513 | while (latch.getCount() > 0) {
514 | ConsumerRecords records = consumer.poll(Duration.ofMillis(100));
515 | for (ConsumerRecord record : records) {
516 | SpanContext spanContext = TracingKafkaUtils
517 | .extractSpanContext(record.headers(), mockTracer);
518 | assertNotNull(spanContext);
519 | assertEquals("test", record.value());
520 | assertEquals((Integer) 1, record.key());
521 |
522 | consumer.commitSync();
523 | latch.countDown();
524 | }
525 | }
526 | consumer.close();
527 | });
528 |
529 | assertTrue(latch.await(30, TimeUnit.SECONDS));
530 |
531 | assertEquals("Test_SpanNameProvider", mockTracer.finishedSpans().get(1).operationName());
532 | }
533 |
534 | @Test
535 | public void testConsumerBuilderWithStandardSpanNameProvider() throws InterruptedException {
536 | Producer producer = createTracingProducer();
537 | producer.send(new ProducerRecord<>("messages", 1, "test"));
538 | producer.close();
539 |
540 | assertEquals(1, mockTracer.finishedSpans().size());
541 |
542 | ExecutorService executorService = Executors.newSingleThreadExecutor();
543 | final CountDownLatch latch = new CountDownLatch(1);
544 |
545 | executorService.execute(() -> {
546 | Consumer consumer = createConsumerWithSpanNameProvider(null);
547 |
548 | while (latch.getCount() > 0) {
549 | ConsumerRecords records = consumer.poll(Duration.ofMillis(100));
550 | for (ConsumerRecord record : records) {
551 | SpanContext spanContext = TracingKafkaUtils
552 | .extractSpanContext(record.headers(), mockTracer);
553 | assertNotNull(spanContext);
554 | assertEquals("test", record.value());
555 | assertEquals((Integer) 1, record.key());
556 |
557 | consumer.commitSync();
558 | latch.countDown();
559 | }
560 | }
561 | consumer.close();
562 | });
563 |
564 | assertTrue(latch.await(30, TimeUnit.SECONDS));
565 |
566 | assertEquals("From_messages", mockTracer.finishedSpans().get(1).operationName());
567 | }
568 |
569 | private TracingKafkaProducer createTracingProducer() {
570 | return new TracingKafkaProducer<>(createProducer(), mockTracer);
571 | }
572 |
573 | private SpanDecorator createDecorator() {
574 | return new SpanDecorator() {
575 | @Override
576 | public void onSend(ProducerRecord record, Span span) {
577 | span.setTag("peer.service", "overwritten");
578 | span.setTag("new.tag.test", "new-producer-test");
579 | }
580 |
581 | @Override
582 | public void onResponse(ConsumerRecord record, Span span) {
583 | span.setTag("peer.service", "overwritten");
584 | span.setTag("new.tag.test", "new-consumer-test");
585 | }
586 |
587 | @Override
588 | public void onError(Exception exception, Span span) {
589 | }
590 | };
591 | }
592 |
593 | private Producer createProducer() {
594 | Map