├── .idea
├── .name
├── sbt.xml
├── codeStyles
│ ├── codeStyleConfig.xml
│ └── Project.xml
├── vcs.xml
├── ClojureProjectResolveSettings.xml
├── modules.xml
├── Kafka_cheat_sheet.iml
├── misc.xml
├── compiler.xml
└── $PRODUCT_WORKSPACE_FILE$
├── Kafka_cheat_sheet.iml
├── pom.xml
├── docker-compose.yml
├── kafka-producer-tracing
├── pom.xml
├── kafka-producer-tracing.iml
└── src
│ └── main
│ └── java
│ └── ProducerTracing.java
├── kafka-consumer-tracing
├── pom.xml
├── kafka-consumer-tracing.iml
└── src
│ └── main
│ └── java
│ └── ConsumerTracing.java
├── kafka-streaming
├── src
│ └── main
│ │ └── java
│ │ └── KafkaStream.java
├── pom.xml
└── kafka-streaming.iml
├── .gitignore
└── README.md
/.idea/.name:
--------------------------------------------------------------------------------
1 | kafka
--------------------------------------------------------------------------------
/Kafka_cheat_sheet.iml:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/.idea/sbt.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.idea/codeStyles/codeStyleConfig.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.idea/ClojureProjectResolveSettings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | IDE
5 |
6 |
--------------------------------------------------------------------------------
/.idea/codeStyles/Project.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | {
4 | :com.gfredericks.test.chuck.clojure-test/checking :only-indent
5 | :cursive.formatting/align-binding-forms true
6 | :jackdaw.streams/transform :only-indent
7 | }
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 4.0.0
6 |
7 | io.github.kafka
8 | kafka
9 | pom
10 | 1.0-SNAPSHOT
11 |
12 | kafka-producer-tracing
13 | kafka-consumer-tracing
14 | kafka-streaming
15 |
16 |
17 |
18 | 1.8
19 | 1.8
20 |
21 |
22 |
--------------------------------------------------------------------------------
/.idea/Kafka_cheat_sheet.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/.idea/compiler.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/.idea/$PRODUCT_WORKSPACE_FILE$:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | scala-sdk-2.13.0
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 | 11
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 | 11
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | # This file uses the version 2 docker-compose file format, described here:
2 | # https://docs.docker.com/compose/compose-file/#version-2
3 | #
4 | # It extends the default configuration from docker-compose.yml to run the
5 | # zipkin-elasticsearch container instead of the zipkin-mysql container.
6 |
7 | version: '2.1'
8 |
9 | services:
10 | zookeeper:
11 | image: zookeeper:3.5
12 | hostname: zookeeper
13 | container_name: zookeeper
14 | ports:
15 | - 2181:2181
16 |
17 | kafka:
18 | image: confluentinc/cp-kafka:5.4.0
19 | hostname: kafka
20 | container_name: kafka
21 | ports:
22 | - 9092:9092
23 | environment:
24 | KAFKA_ADVERTISED_LISTENERS: LISTENER_DOCKER_INTERNAL://kafka:19092,LISTENER_DOCKER_EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1}:9092
25 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT
26 | KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_DOCKER_INTERNAL
27 | KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181"
28 | KAFKA_BROKER_ID: 1
29 | KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
30 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
31 | depends_on:
32 | - zookeeper
33 |
34 | # Run Elasticsearch
35 | storage:
36 | image: elasticsearch:7.6.0
37 | container_name: elasticsearch
38 | # Uncomment to expose the storage port for testing
39 | # ports:
40 | # - 9200:9200
41 | environment:
42 | - discovery.type=single-node
43 | ports:
44 | - 9200:9200
45 |
46 | # Switch storage type to Elasticsearch
47 | zipkin:
48 | image: openzipkin/zipkin-slim
49 | container_name: zipkin
50 | environment:
51 | - STORAGE_TYPE=elasticsearch
52 | # Point the zipkin at the storage backend
53 | - ES_HOSTS=elasticsearch:9200
54 | # Uncomment to see requests to and from elasticsearch
55 | - ES_HTTP_LOGGING=BODY
56 | ports:
57 | - 9411:9411
58 | depends_on:
59 | - storage
60 |
61 |
--------------------------------------------------------------------------------
/kafka-producer-tracing/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | kafka
7 | io.github.kafka
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | kafka-producer-tracing
13 |
14 |
15 |
16 | org.slf4j
17 | slf4j-simple
18 | 1.7.30
19 |
20 |
21 | org.apache.kafka
22 | kafka-clients
23 | 2.4.0
24 |
25 |
26 | io.zipkin.brave
27 | brave
28 | 5.9.5
29 |
30 |
31 | io.zipkin.brave
32 | brave-instrumentation-kafka-clients
33 | 5.9.5
34 |
35 |
36 | io.zipkin.brave
37 | brave-bom
38 | 5.9.5
39 | pom
40 |
41 |
42 | io.zipkin.reporter2
43 | zipkin-sender-urlconnection
44 | 2.12.1
45 |
46 |
47 | io.zipkin.brave
48 | brave-instrumentation-httpclient
49 | 5.9.4
50 |
51 |
52 |
53 |
54 |
--------------------------------------------------------------------------------
/kafka-consumer-tracing/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | kafka
7 | io.github.kafka
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | kafka-consumer-tracing
13 |
14 |
15 |
16 | org.apache.kafka
17 | kafka-clients
18 | 2.4.0
19 |
20 |
21 | org.slf4j
22 | slf4j-simple
23 | 1.7.30
24 |
25 |
26 | io.zipkin.brave
27 | brave
28 | 5.9.5
29 |
30 |
31 | io.zipkin.brave
32 | brave-instrumentation-kafka-clients
33 | 5.9.5
34 |
35 |
36 | io.zipkin.reporter2
37 | zipkin-sender-urlconnection
38 | 2.12.1
39 |
40 |
41 | org.apache.httpcomponents
42 | httpclient
43 | 4.5.11
44 |
45 |
46 | io.zipkin.brave
47 | brave-bom
48 | 5.9.5
49 | pom
50 | import
51 |
52 |
53 |
--------------------------------------------------------------------------------
/kafka-producer-tracing/kafka-producer-tracing.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/kafka-streaming/src/main/java/KafkaStream.java:
--------------------------------------------------------------------------------
1 | import brave.Tracing;
2 | import brave.kafka.streams.KafkaStreamsTracing;
3 | import brave.sampler.Sampler;
4 | import org.apache.kafka.clients.consumer.ConsumerConfig;
5 | import org.apache.kafka.common.serialization.Serdes;
6 | import org.apache.kafka.streams.KafkaStreams;
7 | import org.apache.kafka.streams.StreamsBuilder;
8 | import org.apache.kafka.streams.StreamsConfig;
9 | import org.apache.kafka.streams.kstream.KStream;
10 | import org.apache.kafka.streams.kstream.Produced;
11 | import zipkin2.reporter.AsyncReporter;
12 | import zipkin2.reporter.urlconnection.URLConnectionSender;
13 |
14 | import java.util.Properties;
15 |
16 | public class KafkaStream {
17 | public static void main(String[] args) {
18 |
19 | //CONFIGURE TRACING
20 | final URLConnectionSender sender = URLConnectionSender.newBuilder().endpoint("http://127.0.0.1:9411/api/v2/spans").build();
21 | final AsyncReporter reporter = AsyncReporter.builder(sender).build();
22 | final Tracing tracing = Tracing.newBuilder().localServiceName("Kafka_Streaming").sampler(Sampler.ALWAYS_SAMPLE).spanReporter(reporter).build();
23 | final KafkaStreamsTracing kafkaStreamsTracing = KafkaStreamsTracing.create(tracing);
24 | //END CONFIGURATION
25 |
26 | Properties config = new Properties();
27 | config.put(StreamsConfig.APPLICATION_ID_CONFIG, "stream-application");
28 | config.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092");
29 | config.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
30 | config.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
31 | config.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
32 |
33 | StreamsBuilder builder = new StreamsBuilder();
34 |
35 | KStream data = builder.stream("test_tracing");
36 | KStream streamData = data.mapValues(v -> v.toUpperCase());
37 | streamData.to("test_tracing_stream", Produced.with(Serdes.String(), Serdes.String()));
38 |
39 | KafkaStreams streams = kafkaStreamsTracing.kafkaStreams(builder.build(), config);
40 | //new KafkaStreams(builder.build(), config);
41 |
42 | streams.cleanUp();
43 | streams.start();
44 |
45 | // shutdown hook to correctly close the streams application
46 | Runtime.getRuntime().addShutdownHook(new Thread(streams::close));
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/kafka-consumer-tracing/kafka-consumer-tracing.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
--------------------------------------------------------------------------------
/kafka-streaming/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | kafka
7 | io.github.kafka
8 | 1.0-SNAPSHOT
9 |
10 | 4.0.0
11 |
12 | kafka-streaming
13 |
14 |
15 |
16 |
17 | org.apache.kafka
18 | kafka-clients
19 | 2.4.0
20 |
21 |
22 | org.apache.kafka
23 | kafka-streams
24 | 2.4.0
25 |
26 |
27 | org.slf4j
28 | slf4j-simple
29 | 1.7.30
30 |
31 |
32 | io.zipkin.brave
33 | brave
34 | 5.9.5
35 |
36 |
37 | io.zipkin.brave
38 | brave-instrumentation-kafka-clients
39 | 5.9.5
40 |
41 |
42 | io.zipkin.brave
43 | brave-bom
44 | 5.9.5
45 | pom
46 |
47 |
48 | io.zipkin.reporter2
49 | zipkin-sender-urlconnection
50 | 2.12.1
51 |
52 |
53 | io.zipkin.brave
54 | brave-instrumentation-httpclient
55 | 5.9.4
56 |
57 |
58 | org.apache.httpcomponents
59 | httpclient
60 | 4.5.11
61 |
62 |
63 | io.zipkin.brave
64 | brave-instrumentation-kafka-streams
65 | 5.9.5
66 |
67 |
68 |
--------------------------------------------------------------------------------
/kafka-consumer-tracing/src/main/java/ConsumerTracing.java:
--------------------------------------------------------------------------------
1 | import brave.Span;
2 | import brave.Tracer;
3 | import brave.Tracing;
4 | import brave.kafka.clients.KafkaTracing;
5 | import brave.sampler.Sampler;
6 | import org.apache.kafka.clients.consumer.Consumer;
7 | import org.apache.kafka.clients.consumer.ConsumerConfig;
8 | import org.apache.kafka.clients.consumer.ConsumerRecord;
9 | import org.apache.kafka.clients.consumer.ConsumerRecords;
10 | import org.apache.kafka.clients.consumer.KafkaConsumer;
11 | import org.apache.kafka.common.serialization.StringDeserializer;
12 | import org.slf4j.Logger;
13 | import org.slf4j.LoggerFactory;
14 | import zipkin2.reporter.AsyncReporter;
15 | import zipkin2.reporter.urlconnection.URLConnectionSender;
16 |
17 | import java.time.Duration;
18 | import java.util.Collections;
19 | import java.util.Properties;
20 |
21 | public class ConsumerTracing {
22 | public static void main(String[] args) {
23 | final Logger logger = LoggerFactory.getLogger(ConsumerTracing.class);
24 |
25 | Properties properties = new Properties();
26 | properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092");
27 | properties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
28 | properties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
29 | properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "test_group_application");
30 | properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
31 |
32 | KafkaConsumer consumer = new KafkaConsumer(properties);
33 |
34 | //CONFIGURE TRACING
35 | final URLConnectionSender sender = URLConnectionSender.newBuilder().endpoint("http://127.0.0.1:9411/api/v2/spans").build();
36 | final AsyncReporter reporter = AsyncReporter.builder(sender).build();
37 | final Tracing tracing = Tracing.newBuilder().localServiceName("Kafka_Consumer").sampler(Sampler.ALWAYS_SAMPLE).spanReporter(reporter).build();
38 | final KafkaTracing kafkaTracing = KafkaTracing.newBuilder(tracing).remoteServiceName("kafka").build();
39 | final Tracer tracer = Tracing.currentTracer();
40 | //END CONFIGURATION
41 |
42 | Consumer tracingConsumer = kafkaTracing.consumer(consumer);
43 |
44 | tracingConsumer.subscribe(Collections.singleton("test_tracing_stream"));
45 |
46 | while(true){
47 | ConsumerRecords records = consumer.poll(Duration.ofMillis(100));
48 |
49 | for (ConsumerRecord record: records){
50 | Span span = kafkaTracing.nextSpan(record).name("kafka-to-consumer").start();
51 | span.annotate("Start consuming");
52 |
53 | logger.info("key: " + record.key() + "value: " + record.value());
54 |
55 | span.annotate("Consume finished");
56 | span.finish();
57 | }
58 | }
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 |
2 | # Created by https://www.gitignore.io/api/java,maven,java-web,intellij
3 | # Edit at https://www.gitignore.io/?templates=java,maven,java-web,intellij
4 |
5 | ### Intellij ###
6 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm
7 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
8 |
9 | # User-specific stuff
10 | .idea/**/workspace.xml
11 | .idea/**/tasks.xml
12 | .idea/**/usage.statistics.xml
13 | .idea/**/dictionaries
14 | .idea/**/shelf
15 |
16 | # Generated files
17 | .idea/**/contentModel.xml
18 |
19 | # Sensitive or high-churn files
20 | .idea/**/dataSources/
21 | .idea/**/dataSources.ids
22 | .idea/**/dataSources.local.xml
23 | .idea/**/sqlDataSources.xml
24 | .idea/**/dynamic.xml
25 | .idea/**/uiDesigner.xml
26 | .idea/**/dbnavigator.xml
27 |
28 | # Gradle
29 | .idea/**/gradle.xml
30 | .idea/**/libraries
31 |
32 | # Gradle and Maven with auto-import
33 | # When using Gradle or Maven with auto-import, you should exclude module files,
34 | # since they will be recreated, and may cause churn. Uncomment if using
35 | # auto-import.
36 | # .idea/modules.xml
37 | # .idea/*.iml
38 | # .idea/modules
39 | # *.iml
40 | # *.ipr
41 |
42 | # CMake
43 | cmake-build-*/
44 |
45 | # Mongo Explorer plugin
46 | .idea/**/mongoSettings.xml
47 |
48 | # File-based project format
49 | *.iws
50 |
51 | # IntelliJ
52 | out/
53 |
54 | # mpeltonen/sbt-idea plugin
55 | .idea_modules/
56 |
57 | # JIRA plugin
58 | atlassian-ide-plugin.xml
59 |
60 | # Cursive Clojure plugin
61 | .idea/replstate.xml
62 |
63 | # Crashlytics plugin (for Android Studio and IntelliJ)
64 | com_crashlytics_export_strings.xml
65 | crashlytics.properties
66 | crashlytics-build.properties
67 | fabric.properties
68 |
69 | # Editor-based Rest Client
70 | .idea/httpRequests
71 |
72 | # Android studio 3.1+ serialized cache file
73 | .idea/caches/build_file_checksums.ser
74 |
75 | ### Intellij Patch ###
76 | # Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
77 |
78 | # *.iml
79 | # modules.xml
80 | # .idea/misc.xml
81 | # *.ipr
82 |
83 | # Sonarlint plugin
84 | .idea/**/sonarlint/
85 |
86 | # SonarQube Plugin
87 | .idea/**/sonarIssues.xml
88 |
89 | # Markdown Navigator plugin
90 | .idea/**/markdown-navigator.xml
91 | .idea/**/markdown-navigator/
92 |
93 | ### Java ###
94 | # Compiled class file
95 | *.class
96 |
97 | # Log file
98 | *.log
99 |
100 | # BlueJ files
101 | *.ctxt
102 |
103 | # Mobile Tools for Java (J2ME)
104 | .mtj.tmp/
105 |
106 | # Package Files #
107 | *.jar
108 | *.war
109 | *.nar
110 | *.ear
111 | *.zip
112 | *.tar.gz
113 | *.rar
114 |
115 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
116 | hs_err_pid*
117 |
118 | ### Java-Web ###
119 | ## ignoring target file
120 | target/
121 |
122 | ### Maven ###
123 | pom.xml.tag
124 | pom.xml.releaseBackup
125 | pom.xml.versionsBackup
126 | pom.xml.next
127 | release.properties
128 | dependency-reduced-pom.xml
129 | buildNumber.properties
130 | .mvn/timing.properties
131 | .mvn/wrapper/maven-wrapper.jar
132 | .flattened-pom.xml
133 |
134 | # End of https://www.gitignore.io/api/java,maven,java-web,intellij
--------------------------------------------------------------------------------
/kafka-streaming/kafka-streaming.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/kafka-producer-tracing/src/main/java/ProducerTracing.java:
--------------------------------------------------------------------------------
1 | import brave.ScopedSpan;
2 | import brave.Tracer;
3 | import brave.Tracing;
4 | import brave.kafka.clients.KafkaTracing;
5 | import brave.sampler.Sampler;
6 | import org.apache.kafka.clients.producer.KafkaProducer;
7 | import org.apache.kafka.clients.producer.Producer;
8 | import org.apache.kafka.clients.producer.ProducerConfig;
9 | import org.apache.kafka.clients.producer.ProducerRecord;
10 | import org.apache.kafka.common.serialization.StringSerializer;
11 | import org.slf4j.Logger;
12 | import org.slf4j.LoggerFactory;
13 | import zipkin2.reporter.AsyncReporter;
14 | import zipkin2.reporter.urlconnection.URLConnectionSender;
15 |
16 | import java.util.Properties;
17 |
18 | public class ProducerTracing {
19 | private final Logger logger = LoggerFactory.getLogger(ProducerTracing.class.getName());
20 |
21 | public ProducerTracing() {}
22 |
23 | public static void main(String[] args) throws InterruptedException {
24 | new ProducerTracing().run("test_tracing");
25 | }
26 |
27 | private void run(String topic) throws InterruptedException {
28 |
29 | KafkaProducer producer = createKafkaProducer();
30 |
31 | //CONFIGURE TRACING
32 | final URLConnectionSender sender = URLConnectionSender.newBuilder().endpoint("http://127.0.0.1:9411/api/v2/spans").build();
33 | final AsyncReporter reporter = AsyncReporter.builder(sender).build();
34 | final Tracing tracing = Tracing.newBuilder().localServiceName("Kafka_Producer").sampler(Sampler.ALWAYS_SAMPLE).spanReporter(reporter).build();
35 | final KafkaTracing kafkaTracing = KafkaTracing.newBuilder(tracing).remoteServiceName("kafka").build();
36 | final Tracer tracer = Tracing.currentTracer();
37 | //END CONFIGURATION
38 |
39 | final Producer tracedKafkaProducer = kafkaTracing.producer(producer);
40 |
41 | //Shutdown hook
42 | Runtime.getRuntime().addShutdownHook(new Thread(()-> {
43 | logger.info("stopping application...");
44 | logger.info("closing producer...");
45 | tracedKafkaProducer.close();
46 | logger.info("Done");
47 | }));
48 |
49 | //Create record
50 | ProducerRecord record = new ProducerRecord<>(topic, null, "some small test");
51 |
52 | //CREATE SPAN
53 | ScopedSpan span = tracer.startScopedSpan("produce-to-kafka");
54 | span.tag("name", "sending-kafka-record");
55 |
56 | span.annotate("starting operation");
57 |
58 | span.annotate("sending message to kafka");
59 |
60 | tracedKafkaProducer.send(record, (metadata, exception) -> {
61 | if (exception == null) {
62 | logger.info("Received new metadata: \n" +
63 | "Topic: " + metadata.topic() + "\n" +
64 | "Partition: " + metadata.partition() + "\n" +
65 | "Offset: " + metadata.offset()
66 | );
67 | } else {
68 | logger.error("Error while producing: " + exception);
69 | }
70 |
71 | });
72 |
73 | span.annotate("complete operation");
74 | span.finish();
75 | reporter.flush(); // flush method which sends messages to zipkin
76 |
77 | logger.info("End of application");
78 |
79 | }
80 |
81 | private KafkaProducer createKafkaProducer() {
82 |
83 | //Producer config
84 | Properties properties = new Properties();
85 | properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092");
86 | properties.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
87 | properties.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
88 |
89 | return new KafkaProducer<>(properties);
90 | }
91 |
92 | }
93 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Kafka Tracing via zipkin
2 | Run `docker-compose up` to start zipkin - elasticsearch - kafka - zookeeper
3 |
4 | Auto topic creation is set to true: run in order Producer, Consumer and Streaming to create topics and first trace.
5 |
6 | Go to localhost:9411 to check the traced record in zipkin UI
7 |
8 | Using the Brave(Java) instrumentation library for zipkin: https://github.com/openzipkin/brave
9 |
10 | Zipkin tracing system saves data into either ElasticSearch or Cassandra.
11 |
12 | ### Zipkin Configuration
13 |
14 | #### Producer
15 | Configure tracing
16 |
17 | ```java
18 | //CONFIGURE TRACING
19 | final URLConnectionSender sender = URLConnectionSender.newBuilder().endpoint("http://127.0.0.1:9411/api/v2/spans").build();
20 | final AsyncReporter reporter = AsyncReporter.builder(sender).build();
21 | final Tracing tracing = Tracing.newBuilder().localServiceName("simpleProducer_test").sampler(Sampler.ALWAYS_SAMPLE).spanReporter(reporter).build();
22 | final KafkaTracing kafkaTracing = KafkaTracing.newBuilder(tracing).remoteServiceName("kafka").build();
23 | final Tracer tracer = Tracing.currentTracer();
24 | //END CONFIGURATION
25 | ```
26 |
27 | Wrap kafka producer in kafka tracing:
28 | `final Producer tracedKafkaProducer = kafkaTracing.producer(producer);`
29 |
30 | Create spans:
31 | - measurements are taken between the annotations
32 | - in producer use the reporter flush to force messages to be sent to zipkin. If producer is too fast, its span will not have time to be sent to zipkin
33 |
34 | ```java
35 | //Create record
36 | ProducerRecord record = new ProducerRecord<>("test_tracing", null, "Test");
37 |
38 | //Create span
39 | ScopedSpan span = tracer.startScopedSpan("produce-to-kafka");
40 | span.tag("name", "sending-kafka-record");
41 | span.annotate("starting operation");
42 | span.annotate("sending message to kafka");
43 |
44 | tracedKafkaProducer.send(record);
45 |
46 | span.annotate("complete operation");
47 | span.finish();
48 | reporter.flush(); // flush method which sends messages to zipkin
49 |
50 | logger.info("End of application");
51 | ```
52 |
53 | #### Consumer
54 |
55 | Same configuration
56 | - only change it to localServiceName
57 | ```java
58 | //CONFIGURE TRACING
59 | final URLConnectionSender sender = URLConnectionSender.newBuilder().endpoint("http://127.0.0.1:9411/api/v2/spans").build();
60 | final AsyncReporter reporter = AsyncReporter.builder(sender).build();
61 | final Tracing tracing = Tracing.newBuilder().localServiceName("simpleConsumer_test").sampler(Sampler.ALWAYS_SAMPLE).spanReporter(reporter).build();
62 | final KafkaTracing kafkaTracing = KafkaTracing.newBuilder(tracing).remoteServiceName("kafka").build();
63 | final Tracer tracer = Tracing.currentTracer();
64 | //END CONFIGURATION
65 | ```
66 |
67 | Wrap consumer into kafkaTracing
68 |
69 | `Consumer tracingConsumer = kafkaTracing.consumer(consumer);`
70 |
71 | Subscribe tracing consumer to topic"
72 | `tracingConsumer.subscribe(Collections.singleton("test_tracing"));`
73 |
74 |
75 | Read data and send spans to zipkin: nextSpan starts sending and span.finish ends it
76 |
77 | ```java
78 | while(true){
79 | ConsumerRecords records = consumer.poll(Duration.ofMillis(100));
80 |
81 | for (ConsumerRecord record: records){
82 | Span span = kafkaTracing.nextSpan(record).name("kafka-to-consumer").start();
83 | span.annotate("Start consuming");
84 |
85 | logger.info("key: " + record.key() + "value: " + record.value());
86 |
87 | span.annotate("Consume finished");
88 | span.finish();
89 | }
90 | }
91 | ```
92 | #### Streaming
93 |
94 | Add the configuration:
95 |
96 | ```java
97 | //CONFIGURE TRACING
98 | final URLConnectionSender sender = URLConnectionSender.newBuilder().endpoint("http://127.0.0.1:9411/api/v2/spans").build();
99 | final AsyncReporter reporter = AsyncReporter.builder(sender).build();
100 | final Tracing tracing = Tracing.newBuilder().localServiceName("Kafka_Streaming").sampler(Sampler.ALWAYS_SAMPLE).spanReporter(reporter).build();
101 | final KafkaStreamsTracing kafkaStreamsTracing = KafkaStreamsTracing.create(tracing);
102 | //END CONFIGURATION
103 | ```
104 | Wrap kafkaStream into kafkaStreamTracing
105 |
106 | `KafkaStreams streams = kafkaStreamsTracing.kafkaStreams(builder.build(), config);`
107 |
--------------------------------------------------------------------------------