├── src ├── main │ ├── java │ │ └── com │ │ │ └── example │ │ │ └── kafka │ │ │ ├── KafkaApplication.java │ │ │ ├── producer │ │ │ ├── AtMostOnceProducer.java │ │ │ ├── AbstractKafkaProducer.java │ │ │ ├── AtLeastOnceProducer.java │ │ │ └── ExactlyOnceProducer.java │ │ │ └── config │ │ │ └── KafkaConfig.java │ └── resources │ │ └── application.yml └── test │ └── java │ └── com │ └── example │ └── kafka │ └── producer │ └── KafkaProducerTest.java ├── LICENSE.md ├── .gitignore ├── pom.xml └── README.md /src/main/java/com/example/kafka/KafkaApplication.java: -------------------------------------------------------------------------------- 1 | package com.example.kafka; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | /** 7 | * Main Spring Boot application class for Kafka delivery modes demonstration. 8 | * This application demonstrates three different Kafka delivery modes: 9 | * - At-most-once delivery 10 | * - At-least-once delivery 11 | * - Exactly-once delivery 12 | */ 13 | @SpringBootApplication 14 | public class KafkaApplication { 15 | public static void main(String[] args) { 16 | SpringApplication.run(KafkaApplication.class, args); 17 | } 18 | } -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2025 Luis Machado Reis 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | kafka: 3 | # Kafka broker connection settings 4 | bootstrap-servers: localhost:9092 5 | 6 | # Producer configurations 7 | producer: 8 | # Key and value serializers for all producers 9 | key-serializer: org.apache.kafka.common.serialization.StringSerializer 10 | value-serializer: org.apache.kafka.common.serialization.StringSerializer 11 | 12 | # At-most-once delivery configuration 13 | at-most-once: 14 | acks: 0 # No acknowledgment required 15 | retries: 0 # No retries on failure 16 | 17 | # At-least-once delivery configuration 18 | at-least-once: 19 | acks: all # Wait for all replicas to acknowledge 20 | retries: 3 # Retry up to 3 times on failure 21 | 22 | # Exactly-once delivery configuration 23 | exactly-once: 24 | acks: all # Wait for all replicas to acknowledge 25 | enable-idempotence: true # Enable idempotent producer 26 | transaction-id-prefix: exactly-once- # Prefix for transaction IDs 27 | 28 | # Consumer configurations (if needed) 29 | consumer: 30 | group-id: kafka-delivery-modes-group 31 | auto-offset-reset: earliest 32 | enable-auto-commit: true 33 | key-deserializer: org.apache.kafka.common.serialization.StringDeserializer 34 | value-deserializer: org.apache.kafka.common.serialization.StringDeserializer 35 | 36 | # Logging configuration 37 | logging: 38 | level: 39 | org.springframework.kafka: INFO 40 | com.example.kafka: DEBUG -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Compiled class files 2 | *.class 3 | 4 | # Log files 5 | *.log 6 | logs/ 7 | *.log.* 8 | 9 | # BlueJ files 10 | *.ctxt 11 | 12 | # Mobile Tools for Java (J2ME) 13 | .mtj.tmp/ 14 | 15 | # Package Files 16 | *.jar 17 | *.war 18 | *.nar 19 | *.ear 20 | *.zip 21 | *.tar.gz 22 | *.rar 23 | 24 | # virtual machine crash logs 25 | hs_err_pid* 26 | replay_pid* 27 | 28 | # Maven 29 | target/ 30 | pom.xml.tag 31 | pom.xml.releaseBackup 32 | pom.xml.versionsBackup 33 | pom.xml.next 34 | release.properties 35 | dependency-reduced-pom.xml 36 | buildNumber.properties 37 | .mvn/timing.properties 38 | .mvn/wrapper/maven-wrapper.jar 39 | 40 | # Gradle 41 | .gradle/ 42 | build/ 43 | gradle-app.setting 44 | !gradle-wrapper.jar 45 | !gradle-wrapper.properties 46 | .gradletasknamecache 47 | 48 | # IntelliJ IDEA 49 | .idea/ 50 | *.iws 51 | *.iml 52 | *.ipr 53 | out/ 54 | 55 | # Eclipse 56 | .apt_generated 57 | .classpath 58 | .factorypath 59 | .project 60 | .settings 61 | .springBeans 62 | .sts4-cache 63 | bin/ 64 | 65 | # NetBeans 66 | /nbproject/private/ 67 | /nbbuild/ 68 | /dist/ 69 | /nbdist/ 70 | /.nb-gradle/ 71 | 72 | # VS Code 73 | .vscode/ 74 | *.code-workspace 75 | 76 | # Mac OS 77 | .DS_Store 78 | .AppleDouble 79 | .LSOverride 80 | 81 | # Windows 82 | Thumbs.db 83 | ehthumbs.db 84 | Desktop.ini 85 | 86 | # Application specific 87 | application-*.yml 88 | application-*.properties 89 | !application.yml 90 | !application.properties 91 | 92 | # Test reports 93 | test-output/ 94 | test-results/ 95 | surefire-reports/ 96 | failsafe-reports/ 97 | 98 | # Local environment files 99 | .env 100 | .env.local 101 | .env.*.local 102 | 103 | # Temporary files 104 | *.tmp 105 | *.temp 106 | *.swp 107 | *~ -------------------------------------------------------------------------------- /src/main/java/com/example/kafka/producer/AtMostOnceProducer.java: -------------------------------------------------------------------------------- 1 | package com.example.kafka.producer; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | import org.springframework.kafka.core.KafkaTemplate; 6 | import org.springframework.kafka.support.SendResult; 7 | 8 | import java.util.concurrent.CompletableFuture; 9 | 10 | /** 11 | * Implementation of Kafka producer with at-most-once delivery semantics. 12 | * Messages are sent without waiting for acknowledgment and no retries are performed. 13 | * 14 | * @param The type of the key 15 | * @param The type of the value 16 | */ 17 | public class AtMostOnceProducer extends AbstractKafkaProducer { 18 | private static final Logger logger = LoggerFactory.getLogger(AtMostOnceProducer.class); 19 | 20 | /** 21 | * Constructs a new AtMostOnceProducer. 22 | * 23 | * @param kafkaTemplate The KafkaTemplate to use for sending messages 24 | * @param topic The topic to send messages to 25 | */ 26 | public AtMostOnceProducer(KafkaTemplate kafkaTemplate, String topic) { 27 | super(kafkaTemplate, topic); 28 | } 29 | 30 | @Override 31 | public CompletableFuture> send(V message) { 32 | CompletableFuture> future = super.send(message); 33 | addCallback(future, (result, ex) -> { 34 | if (ex != null) { 35 | logger.error("Failed to send message", ex); 36 | } else { 37 | logger.debug("Message sent successfully: {}", result.getProducerRecord().value()); 38 | } 39 | }); 40 | return future; 41 | } 42 | 43 | @Override 44 | public CompletableFuture> send(K key, V message) { 45 | CompletableFuture> future = super.send(key, message); 46 | addCallback(future, (result, ex) -> { 47 | if (ex != null) { 48 | logger.error("Failed to send message", ex); 49 | } else { 50 | logger.debug("Message sent successfully: {}", result.getProducerRecord().value()); 51 | } 52 | }); 53 | return future; 54 | } 55 | } -------------------------------------------------------------------------------- /src/main/java/com/example/kafka/producer/AbstractKafkaProducer.java: -------------------------------------------------------------------------------- 1 | package com.example.kafka.producer; 2 | 3 | import org.springframework.kafka.core.KafkaTemplate; 4 | import org.springframework.kafka.support.SendResult; 5 | 6 | import java.util.concurrent.CompletableFuture; 7 | import java.util.function.BiConsumer; 8 | 9 | /** 10 | * Abstract base class for Kafka producers implementing different delivery modes. 11 | * Provides common functionality for sending messages to Kafka topics. 12 | * 13 | * @param The type of the key 14 | * @param The type of the value 15 | */ 16 | public abstract class AbstractKafkaProducer { 17 | 18 | protected final KafkaTemplate kafkaTemplate; 19 | protected final String topic; 20 | 21 | /** 22 | * Constructs a new AbstractKafkaProducer with the specified KafkaTemplate and topic. 23 | * 24 | * @param kafkaTemplate The KafkaTemplate to use for sending messages 25 | * @param topic The topic to send messages to 26 | */ 27 | protected AbstractKafkaProducer(KafkaTemplate kafkaTemplate, String topic) { 28 | this.kafkaTemplate = kafkaTemplate; 29 | this.topic = topic; 30 | } 31 | 32 | /** 33 | * Sends a message to the configured topic without a key. 34 | * 35 | * @param message The message to send 36 | * @return A CompletableFuture containing the SendResult 37 | */ 38 | public CompletableFuture> send(V message) { 39 | return kafkaTemplate.send(topic, message); 40 | } 41 | 42 | /** 43 | * Sends a message to the configured topic with a key. 44 | * 45 | * @param key The key for the message 46 | * @param message The message to send 47 | * @return A CompletableFuture containing the SendResult 48 | */ 49 | public CompletableFuture> send(K key, V message) { 50 | return kafkaTemplate.send(topic, key, message); 51 | } 52 | 53 | /** 54 | * Adds a callback to handle the result of sending a message. 55 | * 56 | * @param future The CompletableFuture from the send operation 57 | * @param callback The callback to handle the result 58 | */ 59 | protected void addCallback(CompletableFuture> future, 60 | BiConsumer, Throwable> callback) { 61 | future.whenComplete(callback); 62 | } 63 | } -------------------------------------------------------------------------------- /src/main/java/com/example/kafka/producer/AtLeastOnceProducer.java: -------------------------------------------------------------------------------- 1 | package com.example.kafka.producer; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | import org.springframework.kafka.core.KafkaTemplate; 6 | import org.springframework.kafka.support.SendResult; 7 | 8 | import java.util.concurrent.CompletableFuture; 9 | 10 | /** 11 | * Implementation of Kafka producer with at-least-once delivery semantics. 12 | * Messages are retried on failure to ensure delivery, but may be delivered multiple times. 13 | * 14 | * @param The type of the key 15 | * @param The type of the value 16 | */ 17 | public class AtLeastOnceProducer extends AbstractKafkaProducer { 18 | private static final Logger logger = LoggerFactory.getLogger(AtLeastOnceProducer.class); 19 | private static final int MAX_RETRIES = 3; 20 | 21 | /** 22 | * Constructs a new AtLeastOnceProducer. 23 | * 24 | * @param kafkaTemplate The KafkaTemplate to use for sending messages 25 | * @param topic The topic to send messages to 26 | */ 27 | public AtLeastOnceProducer(KafkaTemplate kafkaTemplate, String topic) { 28 | super(kafkaTemplate, topic); 29 | } 30 | 31 | @Override 32 | public CompletableFuture> send(V message) { 33 | return sendWithRetry(message, null, 0); 34 | } 35 | 36 | @Override 37 | public CompletableFuture> send(K key, V message) { 38 | return sendWithRetry(message, key, 0); 39 | } 40 | 41 | private CompletableFuture> sendWithRetry(V message, K key, int retryCount) { 42 | CompletableFuture> future = key != null ? super.send(key, message) : super.send(message); 43 | addCallback(future, (result, ex) -> { 44 | if (ex != null) { 45 | logger.error("Failed to send message: {}", message, ex); 46 | if (retryCount < MAX_RETRIES) { 47 | logger.info("Retrying message. Attempt {}/{}", retryCount + 1, MAX_RETRIES); 48 | sendWithRetry(message, key, retryCount + 1); 49 | } else { 50 | logger.error("Max retries ({}) reached for message: {}", MAX_RETRIES, message); 51 | } 52 | } else { 53 | logger.debug("Message sent successfully: {}", result.getProducerRecord().value()); 54 | } 55 | }); 56 | return future; 57 | } 58 | } -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | 8 | org.springframework.boot 9 | spring-boot-starter-parent 10 | 3.2.3 11 | 12 | 13 | 14 | com.example 15 | kafka-springboot-blueprint 16 | 1.0-SNAPSHOT 17 | 18 | 19 | 17 20 | 3.6.2 21 | 22 | 23 | 24 | 25 | 26 | org.springframework.boot 27 | spring-boot-starter 28 | 29 | 30 | org.springframework.kafka 31 | spring-kafka 32 | 33 | 34 | 35 | 36 | org.springframework.boot 37 | spring-boot-starter-test 38 | test 39 | 40 | 41 | org.springframework.kafka 42 | spring-kafka-test 43 | test 44 | 45 | 46 | 47 | 48 | org.projectlombok 49 | lombok 50 | true 51 | 52 | 53 | 54 | 55 | 56 | 57 | org.springframework.boot 58 | spring-boot-maven-plugin 59 | 60 | 61 | 62 | org.projectlombok 63 | lombok 64 | 65 | 66 | 67 | 68 | 69 | 70 | -------------------------------------------------------------------------------- /src/main/java/com/example/kafka/producer/ExactlyOnceProducer.java: -------------------------------------------------------------------------------- 1 | package com.example.kafka.producer; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | import org.springframework.kafka.core.KafkaTemplate; 6 | import org.springframework.kafka.support.SendResult; 7 | 8 | import java.util.concurrent.CompletableFuture; 9 | 10 | /** 11 | * Implementation of Kafka producer with exactly-once delivery semantics. 12 | * Uses transactional messaging to ensure messages are delivered exactly once. 13 | * 14 | * @param The type of the key 15 | * @param The type of the value 16 | */ 17 | public class ExactlyOnceProducer extends AbstractKafkaProducer { 18 | private static final Logger logger = LoggerFactory.getLogger(ExactlyOnceProducer.class); 19 | 20 | /** 21 | * Constructs a new ExactlyOnceProducer. 22 | * 23 | * @param kafkaTemplate The KafkaTemplate to use for sending messages 24 | * @param topic The topic to send messages to 25 | */ 26 | public ExactlyOnceProducer(KafkaTemplate kafkaTemplate, String topic) { 27 | super(kafkaTemplate, topic); 28 | kafkaTemplate.setTransactionIdPrefix("txn-"); 29 | } 30 | 31 | @Override 32 | public CompletableFuture> send(V message) { 33 | kafkaTemplate.executeInTransaction(operations -> { 34 | CompletableFuture> future = super.send(message); 35 | addCallback(future, (result, ex) -> { 36 | if (ex != null) { 37 | logger.error("Failed to send message in transaction", ex); 38 | } else { 39 | logger.debug("Message sent successfully in transaction: {}", result.getProducerRecord().value()); 40 | } 41 | }); 42 | return future; 43 | }); 44 | return CompletableFuture.completedFuture(null); 45 | } 46 | 47 | @Override 48 | public CompletableFuture> send(K key, V message) { 49 | kafkaTemplate.executeInTransaction(operations -> { 50 | CompletableFuture> future = super.send(key, message); 51 | addCallback(future, (result, ex) -> { 52 | if (ex != null) { 53 | logger.error("Failed to send message in transaction", ex); 54 | } else { 55 | logger.debug("Message sent successfully in transaction: {}", result.getProducerRecord().value()); 56 | } 57 | }); 58 | return future; 59 | }); 60 | return CompletableFuture.completedFuture(null); 61 | } 62 | } -------------------------------------------------------------------------------- /src/test/java/com/example/kafka/producer/KafkaProducerTest.java: -------------------------------------------------------------------------------- 1 | package com.example.kafka.producer; 2 | 3 | import org.junit.jupiter.api.BeforeEach; 4 | import org.junit.jupiter.api.Test; 5 | import org.junit.jupiter.api.extension.ExtendWith; 6 | import org.mockito.Mock; 7 | import org.mockito.junit.jupiter.MockitoExtension; 8 | import org.springframework.kafka.core.KafkaOperations; 9 | import org.springframework.kafka.core.KafkaTemplate; 10 | import org.springframework.kafka.support.SendResult; 11 | 12 | import java.util.concurrent.CompletableFuture; 13 | 14 | import static org.mockito.ArgumentMatchers.any; 15 | import static org.mockito.ArgumentMatchers.eq; 16 | import static org.mockito.Mockito.*; 17 | 18 | @ExtendWith(MockitoExtension.class) 19 | class KafkaProducerTest { 20 | 21 | private static final String TOPIC = "test-topic"; 22 | private static final String MESSAGE = "test message"; 23 | 24 | @Mock 25 | private KafkaTemplate kafkaTemplate; 26 | 27 | private AtMostOnceProducer atMostOnceProducer; 28 | private AtLeastOnceProducer atLeastOnceProducer; 29 | private ExactlyOnceProducer exactlyOnceProducer; 30 | 31 | @BeforeEach 32 | void setUp() { 33 | atMostOnceProducer = new AtMostOnceProducer<>(kafkaTemplate, TOPIC); 34 | atLeastOnceProducer = new AtLeastOnceProducer<>(kafkaTemplate, TOPIC); 35 | exactlyOnceProducer = new ExactlyOnceProducer<>(kafkaTemplate, TOPIC); 36 | } 37 | 38 | @Test 39 | void testAtMostOnceProducer() { 40 | SendResult sendResult = mock(SendResult.class); 41 | CompletableFuture> future = CompletableFuture.completedFuture(sendResult); 42 | when(kafkaTemplate.send(eq(TOPIC), eq(MESSAGE))).thenReturn(future); 43 | 44 | atMostOnceProducer.send(MESSAGE); 45 | 46 | verify(kafkaTemplate).send(eq(TOPIC), eq(MESSAGE)); 47 | } 48 | 49 | @Test 50 | void testAtLeastOnceProducer() { 51 | SendResult sendResult = mock(SendResult.class); 52 | CompletableFuture> future = CompletableFuture.completedFuture(sendResult); 53 | when(kafkaTemplate.send(eq(TOPIC), eq(MESSAGE))).thenReturn(future); 54 | 55 | atLeastOnceProducer.send(MESSAGE); 56 | 57 | verify(kafkaTemplate).send(eq(TOPIC), eq(MESSAGE)); 58 | } 59 | 60 | @Test 61 | void testExactlyOnceProducer() { 62 | SendResult sendResult = mock(SendResult.class); 63 | CompletableFuture> future = CompletableFuture.completedFuture(sendResult); 64 | when(kafkaTemplate.send(eq(TOPIC), eq(MESSAGE))).thenReturn(future); 65 | when(kafkaTemplate.executeInTransaction(any())).thenAnswer(invocation -> { 66 | KafkaOperations.OperationsCallback callback = invocation.getArgument(0); 67 | return callback.doInOperations(kafkaTemplate); 68 | }); 69 | 70 | exactlyOnceProducer.send(MESSAGE); 71 | 72 | verify(kafkaTemplate).executeInTransaction(any()); 73 | } 74 | 75 | @Test 76 | void testAtLeastOnceProducerRetry() { 77 | SendResult sendResult = mock(SendResult.class); 78 | CompletableFuture> failureFuture = new CompletableFuture<>(); 79 | failureFuture.completeExceptionally(new RuntimeException("Test failure")); 80 | CompletableFuture> successFuture = CompletableFuture.completedFuture(sendResult); 81 | 82 | when(kafkaTemplate.send(eq(TOPIC), eq(MESSAGE))) 83 | .thenReturn(failureFuture) 84 | .thenReturn(successFuture); 85 | 86 | atLeastOnceProducer.send(MESSAGE); 87 | 88 | verify(kafkaTemplate, times(2)).send(eq(TOPIC), eq(MESSAGE)); 89 | } 90 | } -------------------------------------------------------------------------------- /src/main/java/com/example/kafka/config/KafkaConfig.java: -------------------------------------------------------------------------------- 1 | package com.example.kafka.config; 2 | 3 | import org.apache.kafka.clients.admin.NewTopic; 4 | import org.apache.kafka.clients.producer.ProducerConfig; 5 | import org.apache.kafka.common.serialization.StringSerializer; 6 | import org.springframework.beans.factory.annotation.Value; 7 | import org.springframework.context.annotation.Bean; 8 | import org.springframework.context.annotation.Configuration; 9 | import org.springframework.kafka.core.DefaultKafkaProducerFactory; 10 | import org.springframework.kafka.core.KafkaTemplate; 11 | import org.springframework.kafka.core.ProducerFactory; 12 | 13 | import java.util.HashMap; 14 | import java.util.Map; 15 | 16 | /** 17 | * Configuration class for Kafka producers and topics. 18 | * Configures three different KafkaTemplates for different delivery semantics: 19 | * - At-most-once delivery 20 | * - At-least-once delivery 21 | * - Exactly-once delivery 22 | */ 23 | @Configuration 24 | public class KafkaConfig { 25 | 26 | @Value("${spring.kafka.bootstrap-servers}") 27 | private String bootstrapServers; 28 | 29 | /** 30 | * Creates a KafkaTemplate configured for at-most-once delivery. 31 | * Messages may be lost but will never be delivered more than once. 32 | */ 33 | @Bean 34 | public KafkaTemplate atMostOnceTemplate() { 35 | Map configProps = new HashMap<>(); 36 | configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); 37 | configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); 38 | configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); 39 | configProps.put(ProducerConfig.ACKS_CONFIG, "0"); // No acknowledgment required 40 | configProps.put(ProducerConfig.RETRIES_CONFIG, 0); // No retries 41 | 42 | ProducerFactory factory = new DefaultKafkaProducerFactory<>(configProps); 43 | return new KafkaTemplate<>(factory); 44 | } 45 | 46 | /** 47 | * Creates a KafkaTemplate configured for at-least-once delivery. 48 | * Messages will be delivered at least once, with retries on failure. 49 | */ 50 | @Bean 51 | public KafkaTemplate atLeastOnceTemplate() { 52 | Map configProps = new HashMap<>(); 53 | configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); 54 | configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); 55 | configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); 56 | configProps.put(ProducerConfig.ACKS_CONFIG, "all"); // Wait for all replicas 57 | configProps.put(ProducerConfig.RETRIES_CONFIG, 3); // Retry up to 3 times 58 | 59 | ProducerFactory factory = new DefaultKafkaProducerFactory<>(configProps); 60 | return new KafkaTemplate<>(factory); 61 | } 62 | 63 | /** 64 | * Creates a KafkaTemplate configured for exactly-once delivery. 65 | * Uses idempotent producer and transaction management. 66 | */ 67 | @Bean 68 | public KafkaTemplate exactlyOnceTemplate() { 69 | Map configProps = new HashMap<>(); 70 | configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); 71 | configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); 72 | configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); 73 | configProps.put(ProducerConfig.ACKS_CONFIG, "all"); // Wait for all replicas 74 | configProps.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, true); // Enable idempotence 75 | configProps.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, "exactly-once-producer"); 76 | 77 | ProducerFactory factory = new DefaultKafkaProducerFactory<>(configProps); 78 | KafkaTemplate template = new KafkaTemplate<>(factory); 79 | template.setTransactionIdPrefix("exactly-once-"); 80 | return template; 81 | } 82 | 83 | /** 84 | * Creates a topic for at-most-once delivery messages. 85 | */ 86 | @Bean 87 | public NewTopic atMostOnceTopic() { 88 | return new NewTopic("at-most-once-topic", 3, (short) 3); 89 | } 90 | 91 | /** 92 | * Creates a topic for at-least-once delivery messages. 93 | */ 94 | @Bean 95 | public NewTopic atLeastOnceTopic() { 96 | return new NewTopic("at-least-once-topic", 3, (short) 3); 97 | } 98 | 99 | /** 100 | * Creates a topic for exactly-once delivery messages. 101 | */ 102 | @Bean 103 | public NewTopic exactlyOnceTopic() { 104 | return new NewTopic("exactly-once-topic", 3, (short) 3); 105 | } 106 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Kafka Spring Boot Blueprint 2 | 3 | ![Java](https://img.shields.io/badge/Java-17-blue) 4 | ![Maven](https://img.shields.io/badge/Maven-3-blue) 5 | ![Spring Boot](https://img.shields.io/badge/Spring%20Boot-3.2.3-blue) 6 | ![JUnit](https://img.shields.io/badge/JUnit-5-blue) 7 | ![Mockito](https://img.shields.io/badge/Mockito-blue) 8 | ![Kafka](https://img.shields.io/badge/Kafka-3.x-red) 9 | 10 | 11 | This project demonstrates three different Kafka delivery modes using Spring Boot: 12 | - At-most-once delivery 13 | - At-least-once delivery 14 | - Exactly-once delivery 15 | 16 | ## Getting Started 17 | 18 | ### Cloning the Repository 19 | 20 | ```bash 21 | git clone git@github.com:luismr/kafka-springboot-blueprint.git 22 | cd kafka-springboot-blueprint 23 | ``` 24 | 25 | ## Project Structure 26 | 27 | The project consists of the following main components: 28 | 29 | 1. **Abstract Producer**: `AbstractKafkaProducer` - Base class providing common functionality for all producers 30 | 2. **Delivery Mode Implementations**: 31 | - `AtMostOnceProducer` - Messages may be lost but never delivered more than once 32 | - `AtLeastOnceProducer` - Messages will be delivered at least once, with retries 33 | - `ExactlyOnceProducer` - Messages will be delivered exactly once, with no duplicates 34 | 35 | 3. **Configuration**: 36 | - `KafkaConfig` - Configures Kafka templates and topics 37 | - `application.yml` - Application properties with detailed comments 38 | 39 | ## Prerequisites 40 | 41 | - Java 17 or later 42 | - Maven 3.6 or later 43 | - Apache Kafka 3.x 44 | - Spring Boot 3.x 45 | 46 | ## Configuration 47 | 48 | The application can be configured using `application.yml` or environment variables. 49 | 50 | ### Using application.yml 51 | 52 | ```yaml 53 | spring: 54 | kafka: 55 | bootstrap-servers: localhost:9092 56 | producer: 57 | key-serializer: org.apache.kafka.common.serialization.StringSerializer 58 | value-serializer: org.apache.kafka.common.serialization.StringSerializer 59 | # Delivery mode specific configurations 60 | at-most-once: 61 | acks: 0 62 | at-least-once: 63 | acks: all 64 | retries: 3 65 | retry-backoff-ms: 1000 66 | exactly-once: 67 | acks: all 68 | enable.idempotence: true 69 | transaction.timeout.ms: 30000 70 | max.in.flight.requests.per.connection: 5 71 | ``` 72 | 73 | ### Using Environment Variables 74 | 75 | You can configure the Kafka bootstrap servers using environment variables on different operating systems: 76 | 77 | #### Linux/macOS 78 | ```bash 79 | export SPRING_KAFKA_BOOTSTRAP_SERVERS=localhost:9092 80 | ``` 81 | 82 | #### Windows (Command Prompt) 83 | ```cmd 84 | set SPRING_KAFKA_BOOTSTRAP_SERVERS=localhost:9092 85 | ``` 86 | 87 | #### Windows (PowerShell) 88 | ```powershell 89 | $env:SPRING_KAFKA_BOOTSTRAP_SERVERS="localhost:9092" 90 | ``` 91 | 92 | ## Building the Project 93 | 94 | ```bash 95 | mvn clean install 96 | ``` 97 | 98 | ## Running Tests 99 | 100 | ```bash 101 | mvn clean test 102 | ``` 103 | 104 | ## Delivery Modes 105 | 106 | ### At-most-once Delivery 107 | 108 | - Messages may be lost but will never be delivered more than once 109 | - No acknowledgment required from brokers 110 | - No retries on failure 111 | - Best for scenarios where message loss is acceptable 112 | 113 | ### At-least-once Delivery 114 | 115 | - Messages will be delivered at least once 116 | - Acknowledgment required from all replicas 117 | - Retries on failure 118 | - May result in duplicate messages 119 | - Best for scenarios where duplicates are acceptable but message loss is not 120 | 121 | ### Exactly-once Delivery 122 | 123 | - Messages will be delivered exactly once 124 | - Uses idempotent producer and transaction management 125 | - No duplicates and no message loss 126 | - Best for scenarios requiring strict message delivery guarantees 127 | 128 | ## Configuration 129 | 130 | The `application.yml` file contains detailed configuration for each delivery mode: 131 | 132 | ```yaml 133 | spring: 134 | kafka: 135 | bootstrap-servers: localhost:9092 136 | producer: 137 | # At-most-once configuration 138 | at-most-once: 139 | acks: 0 140 | retries: 0 141 | 142 | # At-least-once configuration 143 | at-least-once: 144 | acks: all 145 | retries: 3 146 | 147 | # Exactly-once configuration 148 | exactly-once: 149 | acks: all 150 | enable-idempotence: true 151 | transaction-id-prefix: exactly-once- 152 | ``` 153 | 154 | ## Testing 155 | 156 | The project includes unit tests for each producer implementation: 157 | - `testAtMostOnceProducer()` 158 | - `testAtLeastOnceProducer()` 159 | - `testExactlyOnceProducer()` 160 | - `testAtLeastOnceProducerRetry()` 161 | 162 | Tests use Mockito to mock the KafkaTemplate and verify producer behavior. 163 | 164 | ## Contributing 165 | 166 | Contributions are welcome! Please feel free to submit a Pull Request. For major changes, please open an issue first to discuss what you would like to change. 167 | 168 | 1. Fork the repository 169 | 2. Create your feature branch (`git checkout -b feature/AmazingFeature`) 170 | 3. Commit your changes (`git commit -m 'Add some AmazingFeature'`) 171 | 4. Push to the branch (`git push origin feature/AmazingFeature`) 172 | 5. Open a Pull Request 173 | 174 | Please make sure to update tests as appropriate and follow the existing code style. 175 | 176 | ## Running Kafka Locally 177 | 178 | ### Single Node Setup 179 | 180 | For local development, you can run a single Kafka broker using Docker: 181 | 182 | ```bash 183 | docker run -d \ 184 | --name kafka \ 185 | -p 9092:9092 \ 186 | -e KAFKA_BROKER_ID=1 \ 187 | -e KAFKA_LISTENERS=PLAINTEXT://:9092 \ 188 | -e KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://localhost:9092 \ 189 | -e KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1 \ 190 | -e KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS=0 \ 191 | -e KAFKA_TRANSACTION_STATE_LOG_MIN_ISR=1 \ 192 | -e KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR=1 \ 193 | -e KAFKA_NODE_ID=0 \ 194 | -e KAFKA_PROCESS_ROLES=controller,broker \ 195 | -e KAFKA_CONTROLLER_QUORUM_VOTERS=0@kafka:9093 \ 196 | -e KAFKA_LISTENERS=PLAINTEXT://:9092,CONTROLLER://:9093 \ 197 | -e KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://localhost:9092 \ 198 | -e KAFKA_CONTROLLER_LISTENER_NAMES=CONTROLLER \ 199 | -e KAFKA_INTER_BROKER_LISTENER_NAME=PLAINTEXT \ 200 | -e KAFKA_AUTO_CREATE_TOPICS_ENABLE=true \ 201 | confluentinc/cp-kafka:7.5.1 202 | ``` 203 | 204 | This setup: 205 | - Uses KRaft (Kafka Raft metadata mode) instead of Zookeeper 206 | - Exposes port 9092 for client connections 207 | - Enables auto topic creation 208 | - Configures the broker for local development 209 | - Uses the Confluent Kafka image 210 | 211 | ### Cluster Setup 212 | 213 | A Docker Compose setup for running a Kafka cluster locally is available at [kafka-cluster-docker-compose](https://github.com/luismr/kafka-cluster-docker-compose). This setup includes: 214 | - Multiple Kafka brokers using KRaft (Kafka Raft metadata mode) 215 | - Network configuration for local development 216 | 217 | ## License 218 | 219 | This project is licensed under the MIT License - see the [LICENSE.md](LICENSE.md) file for details. --------------------------------------------------------------------------------