├── .idea ├── codeStyles │ └── codeStyleConfig.xml ├── compiler.xml ├── encodings.xml ├── misc.xml ├── sbt.xml ├── vcs.xml └── workspace.xml ├── README.md ├── pom.xml ├── src └── main │ └── java │ ├── AssignSeekConsumer.java │ ├── Consumer.java │ └── Producer.java ├── target └── classes │ ├── AssignSeekConsumer.class │ ├── Consumer$ConsumerRunnable.class │ ├── Consumer.class │ ├── META-INF │ └── tutorial-kafka-java.kotlin_module │ └── Producer.class └── tutorial-kafka-java.iml /.idea/codeStyles/codeStyleConfig.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | -------------------------------------------------------------------------------- /.idea/compiler.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.idea/encodings.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /.idea/misc.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 22 | -------------------------------------------------------------------------------- /.idea/sbt.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 7 | -------------------------------------------------------------------------------- /.idea/vcs.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /.idea/workspace.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 52 | 53 | 62 | 63 | 64 | true 65 | 66 | true 67 | true 68 | 69 | 70 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 154 | 155 | 156 | 157 | 164 | 165 | 172 | 173 | 180 | 181 | 188 | 189 | 194 | 195 | 201 | 202 | 209 | 210 | 211 | 212 | 213 | 214 | 215 | 216 | 217 | 218 | 219 | 220 | 221 | 222 | 223 | 224 | 1549137772112 225 | 232 | 233 | 234 | 235 | 237 | 238 | 239 | 240 | 241 | 242 | 243 | 244 | 245 | 246 | 247 | 248 | 249 | 250 | 251 | 252 | 253 | 254 | 255 | 256 | 257 | 258 | 259 | 260 | 261 | 262 | 263 | 264 | 265 | 266 | 267 | 268 | 269 | 270 | 271 | 272 | 273 | 275 | 276 | 277 | 278 | 279 | 280 | 281 | 282 | 283 | 284 | 285 | 286 | 287 | 288 | 289 | 290 | 291 | 292 | 293 | 294 | 295 | 296 | 297 | 298 | 299 | 300 | 301 | 302 | 303 | 304 | 305 | 306 | 307 | 308 | 309 | 310 | 11 311 | 312 | 317 | 318 | 319 | 320 | 321 | 322 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | By [Dmytro Nasyrov, Founder, CTO at Pharos Production Inc.](https://www.linkedin.com/in/dmytronasyrov/) 2 | And [Pharos Production Inc. - Web3, blockchain, fintech, defi software development services](https://pharosproduction.com) 3 | 4 | # Tutorial Kafka Using Java 5 | 6 | ## You can find it in our Medium publication 7 | [Pharos Production Medium Article - Kafka Using Java](https://medium.com/pharos-production/kafka-using-java-e10bfeec8638). 8 | 9 | Also you're warmely welcome to say hello to us 10 | [Pharos Production - Blockchain and FinTech Software Development](https://pharosproduction.com) 11 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | com.pharosproduction 8 | tutorial-kafka-java 9 | 1.0 10 | 11 | 12 | 13 | 14 | org.apache.maven.plugins 15 | maven-compiler-plugin 16 | 17 | 8 18 | 8 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | org.apache.kafka 28 | kafka-clients 29 | 2.1.0 30 | 31 | 32 | 33 | 34 | org.slf4j 35 | slf4j-simple 36 | 1.7.25 37 | 38 | 39 | -------------------------------------------------------------------------------- /src/main/java/AssignSeekConsumer.java: -------------------------------------------------------------------------------- 1 | import org.apache.kafka.clients.consumer.ConsumerConfig; 2 | import org.apache.kafka.clients.consumer.ConsumerRecord; 3 | import org.apache.kafka.clients.consumer.ConsumerRecords; 4 | import org.apache.kafka.clients.consumer.KafkaConsumer; 5 | import org.apache.kafka.common.TopicPartition; 6 | import org.apache.kafka.common.serialization.StringDeserializer; 7 | import org.slf4j.Logger; 8 | import org.slf4j.LoggerFactory; 9 | 10 | import java.time.Duration; 11 | import java.util.Arrays; 12 | import java.util.Collections; 13 | import java.util.Properties; 14 | 15 | public class AssignSeekConsumer { 16 | 17 | public static void main(String[] args) { 18 | String server = "127.0.0.1:9092"; 19 | String topic = "user_registered"; 20 | long offset = 15L; 21 | int partitionNum = 0; 22 | int numOfMessages = 5; 23 | 24 | new AssignSeekConsumer(server, topic).run(offset, partitionNum, numOfMessages); 25 | } 26 | 27 | // Variables 28 | 29 | private final Logger mLogger = LoggerFactory.getLogger(Consumer.class.getName()); 30 | private final String mBootstrapServer; 31 | private final String mTopic; 32 | 33 | // Constructor 34 | 35 | private AssignSeekConsumer(String bootstrapServer, String topic) { 36 | mBootstrapServer = bootstrapServer; 37 | mTopic = topic; 38 | } 39 | 40 | // Public 41 | 42 | void run(long offset, int partitionNum, int numOfMessages) { 43 | Properties props = consumerProps(mBootstrapServer); 44 | KafkaConsumer consumer = new KafkaConsumer<>(props); 45 | 46 | setupConsumer(consumer, offset, partitionNum); 47 | fetchMessages(consumer, numOfMessages); 48 | } 49 | 50 | // Private 51 | 52 | private void setupConsumer(KafkaConsumer consumer, long offset, int partitionNum) { 53 | TopicPartition partition = new TopicPartition(mTopic, partitionNum); 54 | consumer.assign(Collections.singletonList(partition)); 55 | consumer.seek(partition, offset); 56 | } 57 | 58 | private void fetchMessages(KafkaConsumer consumer, int numOfMessages) { 59 | int numberOfMessagesRead = 0; 60 | boolean keepOnReading = true; 61 | 62 | while (keepOnReading) { 63 | ConsumerRecords records = consumer.poll(Duration.ofMillis(100)); 64 | 65 | for (ConsumerRecord record : records) { 66 | numberOfMessagesRead += 1; 67 | 68 | mLogger.info("Key: " + record.key() + ", Value: " + record.value()); 69 | mLogger.info("Partition: " + record.partition() + ", Offset: " + record.offset()); 70 | 71 | if (numberOfMessagesRead >= numOfMessages) { 72 | keepOnReading = false; 73 | break; 74 | } 75 | } 76 | } 77 | } 78 | 79 | private Properties consumerProps(String bootstrapServer) { 80 | Properties properties = new Properties(); 81 | properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer); 82 | properties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); 83 | properties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); 84 | properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); 85 | 86 | return properties; 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /src/main/java/Consumer.java: -------------------------------------------------------------------------------- 1 | import org.apache.kafka.clients.consumer.ConsumerConfig; 2 | import org.apache.kafka.clients.consumer.ConsumerRecord; 3 | import org.apache.kafka.clients.consumer.ConsumerRecords; 4 | import org.apache.kafka.clients.consumer.KafkaConsumer; 5 | import org.apache.kafka.common.errors.WakeupException; 6 | import org.apache.kafka.common.serialization.StringDeserializer; 7 | import org.slf4j.Logger; 8 | import org.slf4j.LoggerFactory; 9 | 10 | import java.time.Duration; 11 | import java.util.Collections; 12 | import java.util.Properties; 13 | import java.util.concurrent.CountDownLatch; 14 | 15 | class Consumer { 16 | 17 | public static void main(String[] args) { 18 | String server = "127.0.0.1:9092"; 19 | String groupId = "some_application"; 20 | String topic = "user_registered"; 21 | 22 | new Consumer(server, groupId, topic).run(); 23 | } 24 | 25 | // Variables 26 | 27 | private final Logger mLogger = LoggerFactory.getLogger(Consumer.class.getName()); 28 | private final String mBootstrapServer; 29 | private final String mGroupId; 30 | private final String mTopic; 31 | 32 | // Constructor 33 | 34 | Consumer(String bootstrapServer, String groupId, String topic) { 35 | mBootstrapServer = bootstrapServer; 36 | mGroupId = groupId; 37 | mTopic = topic; 38 | } 39 | 40 | // Public 41 | 42 | void run() { 43 | mLogger.info("Creating consumer thread"); 44 | 45 | CountDownLatch latch = new CountDownLatch(1); 46 | 47 | ConsumerRunnable consumerRunnable = new ConsumerRunnable(mBootstrapServer, mGroupId, mTopic, latch); 48 | Thread thread = new Thread(consumerRunnable); 49 | thread.start(); 50 | 51 | Runtime.getRuntime().addShutdownHook(new Thread(() -> { 52 | mLogger.info("Caught shutdown hook"); 53 | consumerRunnable.shutdown(); 54 | await(latch); 55 | 56 | mLogger.info("Application has exited"); 57 | })); 58 | 59 | await(latch); 60 | } 61 | 62 | // Private 63 | 64 | void await(CountDownLatch latch) { 65 | try { 66 | latch.await(); 67 | } catch (InterruptedException e) { 68 | mLogger.error("Application got interrupted", e); 69 | } finally { 70 | mLogger.info("Application is closing"); 71 | } 72 | } 73 | 74 | // Inner classes 75 | 76 | private class ConsumerRunnable implements Runnable { 77 | 78 | private CountDownLatch mLatch; 79 | private KafkaConsumer mConsumer; 80 | 81 | ConsumerRunnable(String bootstrapServer, String groupId, String topic, CountDownLatch latch) { 82 | mLatch = latch; 83 | 84 | Properties props = consumerProps(bootstrapServer, groupId); 85 | mConsumer = new KafkaConsumer<>(props); 86 | mConsumer.subscribe(Collections.singletonList(topic)); 87 | } 88 | 89 | @Override 90 | public void run() { 91 | try { 92 | while (true) { 93 | ConsumerRecords records = mConsumer.poll(Duration.ofMillis(100)); 94 | 95 | for (ConsumerRecord record : records) { 96 | mLogger.info("Key: " + record.key() + ", Value: " + record.value()); 97 | mLogger.info("Partition: " + record.partition() + ", Offset: " + record.offset()); 98 | } 99 | } 100 | } catch (WakeupException e) { 101 | mLogger.info("Received shutdown signal!"); 102 | } finally { 103 | mConsumer.close(); 104 | mLatch.countDown(); 105 | } 106 | } 107 | 108 | void shutdown() { 109 | mConsumer.wakeup(); 110 | } 111 | 112 | private Properties consumerProps(String bootstrapServer, String groupId) { 113 | String deserializer = StringDeserializer.class.getName(); 114 | Properties properties = new Properties(); 115 | properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer); 116 | properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, groupId); 117 | properties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, deserializer); 118 | properties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, deserializer); 119 | properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); 120 | 121 | return properties; 122 | } 123 | } 124 | } 125 | -------------------------------------------------------------------------------- /src/main/java/Producer.java: -------------------------------------------------------------------------------- 1 | import org.apache.kafka.clients.producer.*; 2 | import org.apache.kafka.common.serialization.StringSerializer; 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | 6 | import java.util.Properties; 7 | import java.util.concurrent.ExecutionException; 8 | 9 | class Producer { 10 | 11 | public static void main(String[] args) throws ExecutionException, InterruptedException { 12 | String server = "127.0.0.1:9092"; 13 | String topic = "user_registered"; 14 | 15 | Producer producer = new Producer(server); 16 | producer.put(topic, "user1", "John"); 17 | producer.put(topic, "user2", "Peter"); 18 | producer.close(); 19 | } 20 | 21 | // Variables 22 | 23 | private final KafkaProducer mProducer; 24 | private final Logger mLogger = LoggerFactory.getLogger(Producer.class); 25 | 26 | // Constructors 27 | 28 | Producer(String bootstrapServer) { 29 | Properties props = producerProps(bootstrapServer); 30 | mProducer = new KafkaProducer<>(props); 31 | 32 | mLogger.info("Producer initialized"); 33 | } 34 | 35 | // Public 36 | 37 | void put(String topic, String key, String value) throws ExecutionException, InterruptedException { 38 | mLogger.info("Put value: " + value + ", for key: " + key); 39 | 40 | ProducerRecord record = new ProducerRecord<>(topic, key, value); 41 | mProducer.send(record, (recordMetadata, e) -> { 42 | if (e != null) { 43 | mLogger.error("Error while producing", e); 44 | return; 45 | } 46 | 47 | mLogger.info("Received new meta. Topic: " + recordMetadata.topic() 48 | + "; Partition: " + recordMetadata.partition() 49 | + "; Offset: " + recordMetadata.offset() 50 | + "; Timestamp: " + recordMetadata.timestamp()); 51 | }).get(); 52 | } 53 | 54 | void close() { 55 | mLogger.info("Closing producer's connection"); 56 | mProducer.close(); 57 | } 58 | 59 | // Private 60 | 61 | private Properties producerProps(String bootstrapServer) { 62 | String serializer = StringSerializer.class.getName(); 63 | Properties props = new Properties(); 64 | props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer); 65 | props.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, serializer); 66 | props.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, serializer); 67 | 68 | return props; 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /target/classes/AssignSeekConsumer.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PharosProduction/tutorial-kafka-java/c02a30dbc74c2e162b53ca0fe67f82427886331c/target/classes/AssignSeekConsumer.class -------------------------------------------------------------------------------- /target/classes/Consumer$ConsumerRunnable.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PharosProduction/tutorial-kafka-java/c02a30dbc74c2e162b53ca0fe67f82427886331c/target/classes/Consumer$ConsumerRunnable.class -------------------------------------------------------------------------------- /target/classes/Consumer.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PharosProduction/tutorial-kafka-java/c02a30dbc74c2e162b53ca0fe67f82427886331c/target/classes/Consumer.class -------------------------------------------------------------------------------- /target/classes/META-INF/tutorial-kafka-java.kotlin_module: -------------------------------------------------------------------------------- 1 |  -------------------------------------------------------------------------------- /target/classes/Producer.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PharosProduction/tutorial-kafka-java/c02a30dbc74c2e162b53ca0fe67f82427886331c/target/classes/Producer.class -------------------------------------------------------------------------------- /tutorial-kafka-java.iml: -------------------------------------------------------------------------------- 1 | 2 | --------------------------------------------------------------------------------