├── src ├── test │ ├── resources │ │ ├── application-component-test.yml │ │ ├── application-test.yml │ │ └── logback-test.xml │ └── java │ │ └── demo │ │ └── kafka │ │ └── streams │ │ ├── util │ │ └── TestEventData.java │ │ ├── processor │ │ └── PaymentTopologyTest.java │ │ ├── component │ │ └── KafkaStreamsCT.java │ │ └── integration │ │ └── KafkaStreamsPaymentIntegrationTest.java └── main │ ├── java │ └── demo │ │ └── kafka │ │ └── streams │ │ ├── processor │ │ ├── Currency.java │ │ ├── Rails.java │ │ └── PaymentTopology.java │ │ ├── mapper │ │ ├── MappingException.java │ │ └── JsonMapper.java │ │ ├── exception │ │ └── KafkaStreamsDemoException.java │ │ ├── KafkaStreamsDemoApplication.java │ │ ├── event │ │ └── PaymentEvent.java │ │ ├── properties │ │ └── KafkaStreamsDemoProperties.java │ │ ├── serdes │ │ ├── PaymentSerdes.java │ │ ├── JsonSerializer.java │ │ └── JsonDeserializer.java │ │ ├── controller │ │ ├── TopologyController.java │ │ └── BalanceController.java │ │ └── KafkaStreamsDemoConfiguration.java │ └── resources │ └── application.yml ├── Dockerfile ├── .gitignore ├── README.md ├── pom.xml └── LICENSE /src/test/resources/application-component-test.yml: -------------------------------------------------------------------------------- 1 | --- 2 | kafka: 3 | bootstrap-servers: kafka:9092 4 | -------------------------------------------------------------------------------- /src/test/resources/application-test.yml: -------------------------------------------------------------------------------- 1 | kafka: 2 | bootstrap-servers: ${spring.embedded.kafka.brokers} 3 | -------------------------------------------------------------------------------- /src/main/java/demo/kafka/streams/processor/Currency.java: -------------------------------------------------------------------------------- 1 | package demo.kafka.streams.processor; 2 | 3 | public enum Currency { 4 | GBP, 5 | USD 6 | } 7 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM openjdk:17.0.2-jdk-slim 2 | ARG JAR_FILE=target/*.jar 3 | COPY ${JAR_FILE} app.jar 4 | ENTRYPOINT ["sh", "-c", "java ${JAVA_OPTS} -jar /app.jar"] 5 | -------------------------------------------------------------------------------- /src/main/java/demo/kafka/streams/processor/Rails.java: -------------------------------------------------------------------------------- 1 | package demo.kafka.streams.processor; 2 | 3 | public enum Rails { 4 | BANK_RAILS_FOO, 5 | BANK_RAILS_BAR, 6 | BANK_RAILS_XXX; 7 | } 8 | -------------------------------------------------------------------------------- /src/main/java/demo/kafka/streams/mapper/MappingException.java: -------------------------------------------------------------------------------- 1 | package demo.kafka.streams.mapper; 2 | 3 | public class MappingException extends RuntimeException { 4 | 5 | public MappingException(Throwable t) { 6 | super(t); 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /src/main/java/demo/kafka/streams/exception/KafkaStreamsDemoException.java: -------------------------------------------------------------------------------- 1 | package demo.kafka.streams.exception; 2 | 3 | public class KafkaStreamsDemoException extends RuntimeException { 4 | public KafkaStreamsDemoException(Throwable cause) { 5 | super(cause); 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | application: 3 | name: kafka-streams-demo 4 | 5 | kafka: 6 | bootstrap-servers: kafka:9092 7 | 8 | server: 9 | port: 9001 10 | 11 | kafkastreamsdemo: 12 | id: demo 13 | paymentInboundTopic: "payment-topic" 14 | railsFooOutboundTopic : "rails-foo-topic" 15 | railsBarOutboundTopic : "rails-bar-topic" 16 | -------------------------------------------------------------------------------- /src/main/java/demo/kafka/streams/KafkaStreamsDemoApplication.java: -------------------------------------------------------------------------------- 1 | package demo.kafka.streams; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class KafkaStreamsDemoApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(KafkaStreamsDemoApplication.class, args); 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | target/ 3 | !.mvn/wrapper/maven-wrapper.jar 4 | !**/src/main/**/target/ 5 | !**/src/test/**/target/ 6 | 7 | ### STS ### 8 | .apt_generated 9 | .classpath 10 | .factorypath 11 | .project 12 | .settings 13 | .springBeans 14 | .sts4-cache 15 | 16 | ### IntelliJ IDEA ### 17 | .idea 18 | *.iws 19 | *.iml 20 | *.ipr 21 | 22 | ### NetBeans ### 23 | /nbproject/private/ 24 | /nbbuild/ 25 | /dist/ 26 | /nbdist/ 27 | /.nb-gradle/ 28 | build/ 29 | !**/src/main/**/build/ 30 | !**/src/test/**/build/ 31 | 32 | ### VS Code ### 33 | .vscode/ 34 | -------------------------------------------------------------------------------- /src/main/java/demo/kafka/streams/event/PaymentEvent.java: -------------------------------------------------------------------------------- 1 | package demo.kafka.streams.event; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Builder; 5 | import lombok.Data; 6 | import lombok.NoArgsConstructor; 7 | 8 | @Builder 9 | @Data 10 | @NoArgsConstructor 11 | @AllArgsConstructor 12 | public class PaymentEvent { 13 | 14 | private String paymentId; 15 | 16 | private Long amount; 17 | 18 | private String currency; 19 | 20 | private String toAccount; 21 | 22 | private String fromAccount; 23 | 24 | private String rails; 25 | } 26 | -------------------------------------------------------------------------------- /src/test/java/demo/kafka/streams/util/TestEventData.java: -------------------------------------------------------------------------------- 1 | package demo.kafka.streams.util; 2 | 3 | import demo.kafka.streams.event.PaymentEvent; 4 | 5 | public class TestEventData { 6 | 7 | public static PaymentEvent buildPaymentEvent(String id, Long amount, String currency, String fromAccount, String toAccount, String rails) { 8 | return PaymentEvent.builder() 9 | .paymentId(id) 10 | .amount(amount) 11 | .currency(currency) 12 | .fromAccount(fromAccount) 13 | .toAccount(toAccount) 14 | .rails(rails) 15 | .build(); 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /src/main/java/demo/kafka/streams/properties/KafkaStreamsDemoProperties.java: -------------------------------------------------------------------------------- 1 | package demo.kafka.streams.properties; 2 | 3 | import javax.validation.constraints.NotNull; 4 | 5 | import lombok.Getter; 6 | import lombok.Setter; 7 | import org.springframework.boot.context.properties.ConfigurationProperties; 8 | import org.springframework.context.annotation.Configuration; 9 | import org.springframework.validation.annotation.Validated; 10 | 11 | @Configuration 12 | @ConfigurationProperties("kafkastreamsdemo") 13 | @Getter 14 | @Setter 15 | @Validated 16 | public class KafkaStreamsDemoProperties { 17 | @NotNull private String id; 18 | @NotNull private String paymentInboundTopic; 19 | @NotNull private String railsFooOutboundTopic; 20 | @NotNull private String railsBarOutboundTopic; 21 | } 22 | -------------------------------------------------------------------------------- /src/main/java/demo/kafka/streams/serdes/PaymentSerdes.java: -------------------------------------------------------------------------------- 1 | package demo.kafka.streams.serdes; 2 | 3 | import demo.kafka.streams.event.PaymentEvent; 4 | import org.apache.kafka.common.serialization.Serde; 5 | import org.apache.kafka.common.serialization.Serdes; 6 | 7 | /** 8 | * Requires the WrapperSerdes to allow this to be added as the default serdes config in the KafkaStreams configuration. 9 | */ 10 | public final class PaymentSerdes extends Serdes.WrapperSerde { 11 | 12 | public PaymentSerdes() { 13 | super(new JsonSerializer<>(), new JsonDeserializer<>(PaymentEvent.class)); 14 | } 15 | 16 | public static Serde serdes() { 17 | JsonSerializer serializer = new JsonSerializer<>(); 18 | JsonDeserializer deserializer = new JsonDeserializer<>(PaymentEvent.class); 19 | return Serdes.serdeFrom(serializer, deserializer); 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/main/java/demo/kafka/streams/controller/TopologyController.java: -------------------------------------------------------------------------------- 1 | package demo.kafka.streams.controller; 2 | 3 | import org.springframework.beans.factory.annotation.Autowired; 4 | import org.springframework.http.ResponseEntity; 5 | import org.springframework.kafka.config.StreamsBuilderFactoryBean; 6 | import org.springframework.web.bind.annotation.GetMapping; 7 | import org.springframework.web.bind.annotation.RequestMapping; 8 | import org.springframework.web.bind.annotation.RestController; 9 | 10 | @RestController 11 | @RequestMapping("/v1/kafka-streams") 12 | public class TopologyController { 13 | 14 | @Autowired 15 | private StreamsBuilderFactoryBean factoryBean; 16 | 17 | /** 18 | * Endpoint providing a description of the topology. 19 | */ 20 | @GetMapping("/topology") 21 | public ResponseEntity getTopology() { 22 | return ResponseEntity.ok(factoryBean.getTopology().describe().toString()); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/main/java/demo/kafka/streams/serdes/JsonSerializer.java: -------------------------------------------------------------------------------- 1 | package demo.kafka.streams.serdes; 2 | 3 | import java.nio.charset.StandardCharsets; 4 | import java.util.Map; 5 | 6 | import demo.kafka.streams.mapper.JsonMapper; 7 | import org.apache.kafka.common.errors.SerializationException; 8 | import org.apache.kafka.common.serialization.Serializer; 9 | 10 | public class JsonSerializer implements Serializer { 11 | 12 | public JsonSerializer() { 13 | } 14 | 15 | @Override 16 | public void configure(Map props, boolean isKey) { 17 | } 18 | 19 | @Override 20 | public byte[] serialize(String topic, T data) { 21 | if (data == null) 22 | return null; 23 | 24 | try { 25 | return JsonMapper.writeToJson(data).getBytes(StandardCharsets.UTF_8); 26 | } catch (Exception e) { 27 | throw new SerializationException("Error serializing JSON message", e); 28 | } 29 | } 30 | 31 | @Override 32 | public void close() { 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/main/java/demo/kafka/streams/serdes/JsonDeserializer.java: -------------------------------------------------------------------------------- 1 | package demo.kafka.streams.serdes; 2 | 3 | import java.nio.charset.StandardCharsets; 4 | import java.util.Map; 5 | 6 | import demo.kafka.streams.mapper.JsonMapper; 7 | import org.apache.kafka.common.errors.SerializationException; 8 | import org.apache.kafka.common.serialization.Deserializer; 9 | 10 | public class JsonDeserializer implements Deserializer { 11 | 12 | private Class destinationClass; 13 | 14 | public JsonDeserializer(Class destinationClass) { 15 | this.destinationClass = destinationClass; 16 | } 17 | 18 | @Override 19 | public void configure(Map props, boolean isKey) { 20 | } 21 | 22 | @Override 23 | public T deserialize(String topic, byte[] bytes) { 24 | if (bytes == null) 25 | return null; 26 | 27 | try { 28 | return JsonMapper.readFromJson(new String(bytes, StandardCharsets.UTF_8), destinationClass); 29 | } catch (Exception e) { 30 | throw new SerializationException("Error deserializing message", e); 31 | } 32 | } 33 | 34 | @Override 35 | public void close() { 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/main/java/demo/kafka/streams/mapper/JsonMapper.java: -------------------------------------------------------------------------------- 1 | package demo.kafka.streams.mapper; 2 | 3 | import com.fasterxml.jackson.databind.DeserializationFeature; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import com.fasterxml.jackson.databind.SerializationFeature; 6 | 7 | public class JsonMapper { 8 | 9 | private static final ObjectMapper objectMapper = new ObjectMapper(); 10 | 11 | static { 12 | objectMapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); 13 | objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); 14 | objectMapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false); 15 | objectMapper.configure(SerializationFeature.WRITE_DURATIONS_AS_TIMESTAMPS, false); 16 | objectMapper.findAndRegisterModules(); 17 | } 18 | 19 | /** 20 | * Map the given JSON String to the required class type. 21 | */ 22 | public static T readFromJson(String json, Class clazz) throws MappingException { 23 | try { 24 | return objectMapper.readValue(json, clazz); 25 | } catch (Exception e) { 26 | throw new MappingException(e); 27 | } 28 | } 29 | 30 | /** 31 | * Map the given Object to a JSON String. 32 | */ 33 | public static String writeToJson(Object obj) throws MappingException { 34 | try { 35 | return objectMapper.writeValueAsString(obj); 36 | } catch (Exception e) { 37 | throw new MappingException(e); 38 | } 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /src/main/java/demo/kafka/streams/controller/BalanceController.java: -------------------------------------------------------------------------------- 1 | package demo.kafka.streams.controller; 2 | 3 | import org.apache.kafka.streams.KafkaStreams; 4 | import org.apache.kafka.streams.StoreQueryParameters; 5 | import org.apache.kafka.streams.state.QueryableStoreTypes; 6 | import org.apache.kafka.streams.state.ReadOnlyKeyValueStore; 7 | import org.springframework.beans.factory.annotation.Autowired; 8 | import org.springframework.http.ResponseEntity; 9 | import org.springframework.kafka.config.StreamsBuilderFactoryBean; 10 | import org.springframework.web.bind.annotation.GetMapping; 11 | import org.springframework.web.bind.annotation.PathVariable; 12 | import org.springframework.web.bind.annotation.RequestMapping; 13 | import org.springframework.web.bind.annotation.RestController; 14 | 15 | @RestController 16 | @RequestMapping("/v1/kafka-streams") 17 | public class BalanceController { 18 | 19 | @Autowired 20 | private StreamsBuilderFactoryBean factoryBean; 21 | 22 | @GetMapping("/balance/{account}") 23 | public ResponseEntity getAccountBalance(@PathVariable String account) { 24 | KafkaStreams kafkaStreams = factoryBean.getKafkaStreams(); 25 | ReadOnlyKeyValueStore balances = kafkaStreams.store( 26 | StoreQueryParameters.fromNameAndType("balance", QueryableStoreTypes.keyValueStore()) 27 | ); 28 | ResponseEntity response; 29 | if(balances.get(account)==null) { 30 | response = ResponseEntity.notFound().build(); 31 | } else { 32 | response = ResponseEntity.ok(balances.get(account)); 33 | } 34 | return response; 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /src/test/resources/logback-test.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | %d{HH:mm:ss.SSS} %highlight(%-5level) %cyan(%logger{18}) - %msg%n 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Kafka Streams Project 2 | 3 | Spring Boot application demonstrating usage of the Kafka Streams. 4 | 5 | This repo accompanies the following series of articles on Kafka Streams: 6 | 7 | - [Kafka Streams: Introduction](https://medium.com/lydtech-consulting/kafka-streams-introduction-d7e5421feb1b) 8 | - [Kafka Streams: Spring Boot Demo](https://medium.com/lydtech-consulting/kafka-streams-spring-boot-demo-ff0e74e08c9c) 9 | - [Kafka Streams: Testing](https://medium.com/lydtech-consulting/kafka-streams-testing-f263f216808f) 10 | 11 | ## Integration Tests 12 | 13 | Run integration tests with `mvn clean test` 14 | 15 | The tests demonstrate streaming payments events that are filtered and transformed, and results emitted to outbound topics. 16 | 17 | The account balances are tracked in a state store, which is exposed via REST endpoint allowing querying the current values. 18 | 19 | ## Component Tests 20 | 21 | Build Spring Boot application jar: 22 | 23 | ``` 24 | mvn clean install 25 | ``` 26 | 27 | Build Docker container: 28 | 29 | ``` 30 | docker build -t ct/kafka-streams-demo:latest . 31 | ``` 32 | 33 | Assumes `ct` is used as the container prefix for the component tests (which is the default but can be overridden). 34 | 35 | Run tests: 36 | 37 | ``` 38 | mvn test -Pcomponent 39 | ``` 40 | 41 | Run tests leaving containers running (for further test runs): 42 | 43 | ``` 44 | mvn test -Pcomponent -Dcontainers.stayup 45 | ``` 46 | 47 | ### Inspecting Kafka Topics 48 | 49 | View consumer groups: 50 | `docker exec -it ct-kafka /bin/sh /usr/bin/kafka-consumer-groups --bootstrap-server localhost:9092 --list` 51 | 52 | Inspect consumer group: 53 | `docker exec -it ct-kafka /bin/sh /usr/bin/kafka-consumer-groups --bootstrap-server localhost:9092 --describe --group kafka-streams-demo` 54 | 55 | View topics: 56 | `docker exec -it ct-kafka /bin/sh /usr/bin/kafka-topics --bootstrap-server localhost:9092 --list` 57 | 58 | Inspect topic: 59 | `docker exec -it ct-kafka /bin/sh /usr/bin/kafka-topics --bootstrap-server localhost:9092 --describe --topic payment-event` 60 | 61 | View messages on topic: 62 | `docker exec -it ct-kafka /bin/sh /usr/bin/kafka-console-consumer --bootstrap-server localhost:9092 --topic payment-event --from-beginning` 63 | 64 | ### Docker Commands 65 | 66 | Manual clean up (if left containers up): 67 | `docker rm -f $(docker ps -aq)` 68 | -------------------------------------------------------------------------------- /src/main/java/demo/kafka/streams/KafkaStreamsDemoConfiguration.java: -------------------------------------------------------------------------------- 1 | package demo.kafka.streams; 2 | 3 | import java.util.HashMap; 4 | import java.util.Map; 5 | 6 | import lombok.extern.slf4j.Slf4j; 7 | import org.apache.kafka.clients.consumer.ConsumerConfig; 8 | import org.apache.kafka.clients.producer.ProducerConfig; 9 | import org.apache.kafka.common.serialization.Serdes; 10 | import org.apache.kafka.common.serialization.StringDeserializer; 11 | import org.apache.kafka.common.serialization.StringSerializer; 12 | import org.springframework.beans.factory.annotation.Value; 13 | import org.springframework.context.annotation.Bean; 14 | import org.springframework.context.annotation.ComponentScan; 15 | import org.springframework.context.annotation.Configuration; 16 | import org.springframework.kafka.annotation.EnableKafkaStreams; 17 | import org.springframework.kafka.annotation.KafkaStreamsDefaultConfiguration; 18 | import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory; 19 | import org.springframework.kafka.config.KafkaStreamsConfiguration; 20 | import org.springframework.kafka.core.ConsumerFactory; 21 | import org.springframework.kafka.core.DefaultKafkaConsumerFactory; 22 | import org.springframework.kafka.core.DefaultKafkaProducerFactory; 23 | import org.springframework.kafka.core.KafkaTemplate; 24 | import org.springframework.kafka.core.ProducerFactory; 25 | 26 | import static org.apache.kafka.streams.StreamsConfig.APPLICATION_ID_CONFIG; 27 | import static org.apache.kafka.streams.StreamsConfig.BOOTSTRAP_SERVERS_CONFIG; 28 | import static org.apache.kafka.streams.StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG; 29 | import static org.apache.kafka.streams.StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG; 30 | 31 | @Slf4j 32 | @ComponentScan(basePackages = {"demo.kafka.streams"}) 33 | @Configuration 34 | @EnableKafkaStreams 35 | public class KafkaStreamsDemoConfiguration { 36 | 37 | @Bean(name = KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_CONFIG_BEAN_NAME) 38 | public KafkaStreamsConfiguration kafkaStreamsConfig(@Value("${kafka.bootstrap-servers}") final String bootstrapServers) { 39 | Map props = new HashMap<>(); 40 | props.put(APPLICATION_ID_CONFIG, "kafka-streams-demo"); 41 | props.put(BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); 42 | // Use to specify location of stateful store. 43 | // props.put(STATE_DIR_CONFIG,"./rocksdb"); 44 | props.put(DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); 45 | // This default value serdes for Long type was required for the KTable aggregation step 46 | props.put(DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.Long().getClass().getName()); 47 | return new KafkaStreamsConfiguration(props); 48 | } 49 | 50 | @Bean 51 | public ConcurrentKafkaListenerContainerFactory kafkaListenerContainerFactory(final ConsumerFactory consumerFactory) { 52 | final ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory(); 53 | factory.setConsumerFactory(consumerFactory); 54 | return factory; 55 | } 56 | 57 | @Bean 58 | public KafkaTemplate kafkaTemplate(final ProducerFactory producerFactory) { 59 | return new KafkaTemplate<>(producerFactory); 60 | } 61 | 62 | @Bean 63 | public ConsumerFactory consumerFactory(@Value("${kafka.bootstrap-servers}") final String bootstrapServers) { 64 | final Map config = new HashMap<>(); 65 | config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); 66 | config.put(ConsumerConfig.GROUP_ID_CONFIG, "demo-kafka"); 67 | config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); 68 | config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); 69 | return new DefaultKafkaConsumerFactory<>(config); 70 | } 71 | 72 | @Bean 73 | public ProducerFactory producerFactory(@Value("${kafka.bootstrap-servers}") final String bootstrapServers) { 74 | final Map config = new HashMap<>(); 75 | config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); 76 | config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); 77 | config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); 78 | return new DefaultKafkaProducerFactory<>(config); 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /src/main/java/demo/kafka/streams/processor/PaymentTopology.java: -------------------------------------------------------------------------------- 1 | package demo.kafka.streams.processor; 2 | 3 | import java.util.Arrays; 4 | import java.util.List; 5 | 6 | import demo.kafka.streams.event.PaymentEvent; 7 | import demo.kafka.streams.properties.KafkaStreamsDemoProperties; 8 | import demo.kafka.streams.serdes.PaymentSerdes; 9 | import lombok.RequiredArgsConstructor; 10 | import lombok.extern.slf4j.Slf4j; 11 | import org.apache.kafka.common.serialization.Serde; 12 | import org.apache.kafka.common.serialization.Serdes; 13 | import org.apache.kafka.streams.KeyValue; 14 | import org.apache.kafka.streams.StreamsBuilder; 15 | import org.apache.kafka.streams.kstream.Aggregator; 16 | import org.apache.kafka.streams.kstream.Consumed; 17 | import org.apache.kafka.streams.kstream.Grouped; 18 | import org.apache.kafka.streams.kstream.Initializer; 19 | import org.apache.kafka.streams.kstream.KStream; 20 | import org.apache.kafka.streams.kstream.Materialized; 21 | import org.apache.kafka.streams.kstream.Produced; 22 | import org.springframework.beans.factory.annotation.Autowired; 23 | import org.springframework.stereotype.Component; 24 | 25 | @Component 26 | @Slf4j 27 | @RequiredArgsConstructor 28 | public class PaymentTopology { 29 | 30 | @Autowired 31 | private final KafkaStreamsDemoProperties properties; 32 | 33 | private static List SUPPORTED_RAILS = Arrays.asList(Rails.BANK_RAILS_FOO.name(), Rails.BANK_RAILS_BAR.name()); 34 | 35 | private static final Serde STRING_SERDE = Serdes.String(); 36 | private static final Serde LONG_SERDE = Serdes.Long(); 37 | 38 | @Autowired 39 | public void buildPipeline(StreamsBuilder streamsBuilder) { 40 | 41 | KStream messageStream = streamsBuilder 42 | .stream(properties.getPaymentInboundTopic(), Consumed.with(STRING_SERDE, PaymentSerdes.serdes())) 43 | .peek((key, payment) -> log.info("Payment event received with key=" + key + ", payment=" + payment)) 44 | 45 | // Filter out unsupported bank rails. 46 | .filter((key, payment) -> SUPPORTED_RAILS.contains(payment.getRails())) 47 | .peek((key, value) -> log.info("Filtered payment event received with key=" + key + ", value=" + value)); 48 | 49 | // Branch based on currency in order to perform any FX. 50 | KStream[] currenciesBranches = messageStream.branch( 51 | (key, payment) -> payment.getCurrency().equals(Currency.GBP.name()), 52 | (key, payment) -> payment.getCurrency().equals(Currency.USD.name()) 53 | ); 54 | KStream fxStream = currenciesBranches[1].mapValues( 55 | // Use mapValues() as we are transforming the payment, but not changing the key. 56 | (payment) -> { 57 | // Perform FX conversion. 58 | double usdToGbpRate = 0.8; 59 | PaymentEvent transformedPayment = PaymentEvent.builder() 60 | .paymentId(payment.getPaymentId()) 61 | .amount(Math.round(payment.getAmount() * usdToGbpRate)) 62 | .currency(Currency.GBP.name()) 63 | .fromAccount(payment.getFromAccount()) 64 | .toAccount(payment.getToAccount()) 65 | .rails(payment.getRails()) 66 | .build(); 67 | return transformedPayment; 68 | }); 69 | 70 | // Merge the payment streams back together. 71 | KStream mergedStreams = currenciesBranches[0].merge(fxStream) 72 | .peek((key, value) -> log.info("Merged payment event received with key=" + key + ", value=" + value)); 73 | 74 | // Create the KTable stateful store to track account balances. 75 | mergedStreams 76 | .map((key, payment) -> new KeyValue<>(payment.getFromAccount(), payment.getAmount())) 77 | .groupByKey(Grouped.with(STRING_SERDE, LONG_SERDE)) 78 | .aggregate(new Initializer() { 79 | @Override 80 | public Long apply() { 81 | return 0L; 82 | } 83 | }, new Aggregator() { 84 | @Override 85 | public Long apply(final String key, final Long value, final Long aggregate) { 86 | return aggregate + value; 87 | } 88 | }, Materialized.with(STRING_SERDE, LONG_SERDE).as("balance")); 89 | 90 | // Branch based on bank rails for outbound publish. 91 | KStream[] railsBranches = mergedStreams.branch( 92 | (key, payment) -> payment.getRails().equals(Rails.BANK_RAILS_FOO.name()), 93 | (key, payment) -> payment.getRails().equals(Rails.BANK_RAILS_BAR.name())); 94 | 95 | // Publish outbound events. 96 | railsBranches[0].to(properties.getRailsFooOutboundTopic(), Produced.with(STRING_SERDE, PaymentSerdes.serdes())); 97 | railsBranches[1].to(properties.getRailsBarOutboundTopic(), Produced.with(STRING_SERDE, PaymentSerdes.serdes())); 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /src/test/java/demo/kafka/streams/processor/PaymentTopologyTest.java: -------------------------------------------------------------------------------- 1 | package demo.kafka.streams.processor; 2 | 3 | import java.util.Properties; 4 | import java.util.UUID; 5 | 6 | import demo.kafka.streams.event.PaymentEvent; 7 | import demo.kafka.streams.properties.KafkaStreamsDemoProperties; 8 | import demo.kafka.streams.serdes.PaymentSerdes; 9 | import org.apache.kafka.common.serialization.Serdes; 10 | import org.apache.kafka.common.serialization.StringDeserializer; 11 | import org.apache.kafka.common.serialization.StringSerializer; 12 | import org.apache.kafka.streams.KeyValue; 13 | import org.apache.kafka.streams.StreamsBuilder; 14 | import org.apache.kafka.streams.TestInputTopic; 15 | import org.apache.kafka.streams.TestOutputTopic; 16 | import org.apache.kafka.streams.Topology; 17 | import org.apache.kafka.streams.TopologyTestDriver; 18 | import org.apache.kafka.streams.state.KeyValueStore; 19 | import org.junit.jupiter.api.BeforeEach; 20 | import org.junit.jupiter.api.Test; 21 | 22 | import static demo.kafka.streams.processor.Rails.BANK_RAILS_BAR; 23 | import static demo.kafka.streams.processor.Rails.BANK_RAILS_FOO; 24 | import static demo.kafka.streams.processor.Rails.BANK_RAILS_XXX; 25 | import static demo.kafka.streams.util.TestEventData.buildPaymentEvent; 26 | import static org.apache.kafka.streams.StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG; 27 | import static org.apache.kafka.streams.StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG; 28 | import static org.hamcrest.MatcherAssert.assertThat; 29 | import static org.hamcrest.Matchers.equalTo; 30 | import static org.hamcrest.Matchers.hasItems; 31 | import static org.hamcrest.Matchers.nullValue; 32 | import static org.mockito.Mockito.mock; 33 | import static org.mockito.Mockito.when; 34 | 35 | class PaymentTopologyTest { 36 | 37 | private PaymentTopology paymentTopology; 38 | private KafkaStreamsDemoProperties properties; 39 | 40 | private static final String PAYMENT_INBOUND_TOPIC = "payment-topic"; 41 | private static final String RAILS_FOO_OUTBOUND_TOPIC = "rails-foo-topic"; 42 | private static final String RAILS_BAR_OUTBOUND_TOPIC = "rails-BAR-topic"; 43 | 44 | // GBP Accounts. 45 | private static final String ACCOUNT_GBP_ABC = "ABC-"+UUID.randomUUID(); 46 | private static final String ACCOUNT_GBP_DEF = "DEF-"+UUID.randomUUID(); 47 | 48 | // USD Accounts. 49 | private static final String ACCOUNT_USD_XYZ = "XYZ-"+UUID.randomUUID(); 50 | 51 | @BeforeEach 52 | void setUp() { 53 | properties = mock(KafkaStreamsDemoProperties.class); 54 | when(properties.getPaymentInboundTopic()).thenReturn(PAYMENT_INBOUND_TOPIC); 55 | when(properties.getRailsFooOutboundTopic()).thenReturn(RAILS_FOO_OUTBOUND_TOPIC); 56 | when(properties.getRailsBarOutboundTopic()).thenReturn(RAILS_BAR_OUTBOUND_TOPIC); 57 | 58 | paymentTopology = new PaymentTopology(properties); 59 | } 60 | 61 | @Test 62 | void testPaymentTopology() { 63 | StreamsBuilder streamsBuilder = new StreamsBuilder(); 64 | paymentTopology.buildPipeline(streamsBuilder); 65 | Topology topology = streamsBuilder.build(); 66 | 67 | Properties streamsConfiguration = new Properties(); 68 | streamsConfiguration.put(DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); 69 | streamsConfiguration.put(DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.Long().getClass().getName()); 70 | 71 | TopologyTestDriver topologyTestDriver = new TopologyTestDriver(topology, streamsConfiguration); 72 | TestInputTopic inputTopic = topologyTestDriver 73 | .createInputTopic(PAYMENT_INBOUND_TOPIC, new StringSerializer(), PaymentSerdes.serdes().serializer()); 74 | 75 | TestOutputTopic railsFooOutputTopic = topologyTestDriver 76 | .createOutputTopic(RAILS_FOO_OUTBOUND_TOPIC, new StringDeserializer(), PaymentSerdes.serdes().deserializer()); 77 | TestOutputTopic railsBarOutputTopic = topologyTestDriver 78 | .createOutputTopic(RAILS_BAR_OUTBOUND_TOPIC, new StringDeserializer(), PaymentSerdes.serdes().deserializer()); 79 | 80 | // Three payments via FOO rails from ABC to DEF, total 210 GBP. 81 | PaymentEvent payment1 = buildPaymentEvent(UUID.randomUUID().toString(), 82 | 100L, 83 | "GBP", 84 | ACCOUNT_GBP_ABC, 85 | ACCOUNT_GBP_DEF, 86 | BANK_RAILS_FOO.name()); 87 | inputTopic.pipeInput(payment1.getPaymentId(), payment1); 88 | PaymentEvent payment2 = buildPaymentEvent(UUID.randomUUID().toString(), 89 | 50L, 90 | "GBP", 91 | ACCOUNT_GBP_ABC, 92 | ACCOUNT_GBP_DEF, 93 | BANK_RAILS_FOO.name()); 94 | inputTopic.pipeInput(payment2.getPaymentId(), payment2); 95 | PaymentEvent payment3 = buildPaymentEvent(UUID.randomUUID().toString(), 96 | 60L, 97 | "GBP", 98 | ACCOUNT_GBP_ABC, 99 | ACCOUNT_GBP_DEF, 100 | BANK_RAILS_FOO.name()); 101 | inputTopic.pipeInput(payment3.getPaymentId(), payment3); 102 | 103 | // Payment on an unsupported rails should be filtered out. 104 | PaymentEvent payment4 = buildPaymentEvent(UUID.randomUUID().toString(), 105 | 1200L, 106 | "GBP", 107 | ACCOUNT_GBP_ABC, 108 | ACCOUNT_GBP_DEF, 109 | BANK_RAILS_XXX.name()); 110 | inputTopic.pipeInput(payment4.getPaymentId(), payment4); 111 | 112 | // Payment from a USD account will require FX. 113 | PaymentEvent payment5 = buildPaymentEvent(UUID.randomUUID().toString(), 114 | 1000L, // Converts to 800 GBP. 115 | "USD", 116 | ACCOUNT_USD_XYZ, 117 | ACCOUNT_GBP_DEF, 118 | BANK_RAILS_BAR.name()); 119 | inputTopic.pipeInput(payment5.getPaymentId(), payment5); 120 | 121 | // Assert the outbound rails topics have the expected events. 122 | assertThat(railsFooOutputTopic.readKeyValuesToList(), 123 | hasItems( 124 | KeyValue.pair(payment1.getPaymentId(), payment1), 125 | KeyValue.pair(payment2.getPaymentId(), payment2), 126 | KeyValue.pair(payment3.getPaymentId(), payment3) 127 | )); 128 | 129 | // Expected event after FX transform. 130 | PaymentEvent payment5fx = buildPaymentEvent(payment5.getPaymentId(), 131 | 800L, 132 | "GBP", // Converted from 1000 USD. 133 | payment5.getFromAccount(), 134 | payment5.getToAccount(), 135 | payment5.getRails()); 136 | assertThat(railsBarOutputTopic.readKeyValuesToList(), 137 | hasItems( 138 | KeyValue.pair(payment5.getPaymentId(), payment5fx) 139 | )); 140 | 141 | // Expect the balances are correctly aggregated in the state store. 142 | KeyValueStore balanceStore = topologyTestDriver.getKeyValueStore("balance"); 143 | assertThat(balanceStore.get(ACCOUNT_GBP_ABC), equalTo(210L)); // Payments: 100 + 60 + 50. 144 | assertThat(balanceStore.get(ACCOUNT_GBP_DEF), nullValue()); // No payments from this account. 145 | assertThat(balanceStore.get(ACCOUNT_USD_XYZ), equalTo(800L)); // 1000 USD * 0.8 FX. 146 | } 147 | } 148 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 4.0.0 5 | 6 | org.springframework.boot 7 | spring-boot-starter-parent 8 | 2.5.2 9 | 10 | 11 | demo.kafka 12 | kafka-streams 13 | 1.3.0 14 | kafka-streams-demo 15 | Demo Kafka Streams 16 | 17 | 11 18 | 2.5.2 19 | 2.7.8 20 | 2.7.1 21 | 1.16.3 22 | 3.2.12 23 | 24 | 25 | false 26 | 27 | 28 | 29 | org.springframework.boot 30 | spring-boot-starter 31 | ${spring.boot.version} 32 | 33 | 34 | org.springframework.boot 35 | spring-boot-starter-web 36 | ${spring.boot.version} 37 | 38 | 39 | org.springframework.boot 40 | spring-boot-starter-actuator 41 | ${spring.boot.version} 42 | 43 | 44 | org.springframework.kafka 45 | spring-kafka 46 | ${spring.kafka.version} 47 | 48 | 49 | org.apache.kafka 50 | kafka-streams 51 | ${kafka.streams.version} 52 | 53 | 54 | 55 | javax.validation 56 | validation-api 57 | 2.0.1.Final 58 | 59 | 60 | org.hibernate.validator 61 | hibernate-validator 62 | 6.0.18.Final 63 | 64 | 65 | 66 | org.projectlombok 67 | lombok 68 | 1.18.26 69 | 70 | 71 | 72 | org.springframework.boot 73 | spring-boot-starter-test 74 | ${spring.boot.version} 75 | test 76 | 77 | 78 | org.junit.vintage 79 | junit-vintage-engine 80 | 81 | 82 | 83 | 84 | org.springframework.cloud 85 | spring-cloud-contract-wiremock 86 | 3.0.4 87 | test 88 | 89 | 90 | org.springframework.kafka 91 | spring-kafka-test 92 | ${spring.kafka.version} 93 | test 94 | 95 | 96 | org.junit.platform 97 | junit-platform-launcher 98 | 1.8.1 99 | test 100 | 101 | 102 | org.awaitility 103 | awaitility 104 | 3.0.0 105 | test 106 | 107 | 108 | dev.lydtech 109 | component-test-framework 110 | 1.4.0 111 | test 112 | 113 | 114 | 115 | org.testcontainers 116 | testcontainers 117 | ${testcontainers.version} 118 | 119 | 120 | org.slf4j 121 | slf4j-api 122 | 123 | 124 | org.apache.httpcomponents 125 | httpclient 126 | 127 | 128 | net.java.dev.jna 129 | jna-platform 130 | 131 | 132 | 133 | 134 | org.testcontainers 135 | kafka 136 | ${testcontainers.version} 137 | test 138 | 139 | 140 | com.squareup.okhttp3 141 | okhttp 142 | 4.9.0 143 | test 144 | 145 | 146 | 147 | commons-io 148 | commons-io 149 | 2.11.0 150 | test 151 | 152 | 153 | io.rest-assured 154 | rest-assured 155 | test 156 | 157 | 158 | org.apache.kafka 159 | kafka-streams-test-utils 160 | ${kafka.streams.version} 161 | test 162 | 163 | 164 | com.github.docker-java 165 | docker-java-core 166 | ${com.github.docker-java.version} 167 | test 168 | 169 | 170 | com.github.docker-java 171 | docker-java-transport-httpclient5 172 | ${com.github.docker-java.version} 173 | test 174 | 175 | 176 | com.github.docker-java 177 | docker-java-api 178 | ${com.github.docker-java.version} 179 | test 180 | 181 | 182 | 183 | 184 | 185 | 186 | org.springframework.boot 187 | spring-boot-maven-plugin 188 | 189 | 190 | org.apache.maven.plugins 191 | maven-surefire-plugin 192 | 3.0.0-M9 193 | 194 | 195 | 196 | 197 | 198 | 199 | component 200 | 201 | 202 | 203 | org.apache.maven.plugins 204 | maven-surefire-plugin 205 | 3.0.0-M9 206 | 207 | 208 | *CT.* 209 | 210 | 211 | ${containers.stayup} 212 | 213 | 214 | ${project.name} 215 | 1 216 | false 217 | ct 218 | true 219 | payment-topic,rails-foo-topic,rails-bar-topic 220 | 5 221 | false 222 | 223 | 224 | 225 | 226 | 227 | 228 | 229 | 230 | -------------------------------------------------------------------------------- /src/test/java/demo/kafka/streams/component/KafkaStreamsCT.java: -------------------------------------------------------------------------------- 1 | package demo.kafka.streams.component; 2 | 3 | import java.time.Duration; 4 | import java.util.Arrays; 5 | import java.util.List; 6 | import java.util.Random; 7 | import java.util.UUID; 8 | import java.util.concurrent.atomic.AtomicInteger; 9 | import java.util.concurrent.atomic.AtomicLong; 10 | import java.util.stream.IntStream; 11 | 12 | import demo.kafka.streams.event.PaymentEvent; 13 | import dev.lydtech.component.framework.client.kafka.KafkaClient; 14 | import dev.lydtech.component.framework.client.service.ServiceClient; 15 | import dev.lydtech.component.framework.extension.TestContainersSetupExtension; 16 | import dev.lydtech.component.framework.mapper.JsonMapper; 17 | import io.restassured.RestAssured; 18 | import lombok.extern.slf4j.Slf4j; 19 | import org.apache.kafka.clients.consumer.Consumer; 20 | import org.apache.kafka.clients.consumer.ConsumerRecord; 21 | import org.junit.jupiter.api.AfterEach; 22 | import org.junit.jupiter.api.BeforeEach; 23 | import org.junit.jupiter.api.Test; 24 | import org.junit.jupiter.api.extension.ExtendWith; 25 | 26 | import static demo.kafka.streams.processor.Currency.GBP; 27 | import static demo.kafka.streams.processor.Currency.USD; 28 | import static demo.kafka.streams.processor.Rails.BANK_RAILS_BAR; 29 | import static demo.kafka.streams.processor.Rails.BANK_RAILS_FOO; 30 | import static demo.kafka.streams.util.TestEventData.buildPaymentEvent; 31 | import static io.restassured.RestAssured.get; 32 | import static org.hamcrest.MatcherAssert.assertThat; 33 | import static org.hamcrest.Matchers.equalTo; 34 | import static org.junit.jupiter.api.Assertions.fail; 35 | 36 | @Slf4j 37 | @ExtendWith(TestContainersSetupExtension.class) 38 | public class KafkaStreamsCT { 39 | 40 | private final static String GROUP_ID = "KafkaStreamsComponentTest"; 41 | private final static String PAYMENT_TOPIC = "payment-topic"; 42 | private final static String RAILS_FOO_TOPIC = "rails-foo-topic"; 43 | private final static String RAILS_BAR_TOPIC = "rails-bar-topic"; 44 | 45 | // Accounts. 46 | private static final String ACCOUNT_XXX = "XXX-"+UUID.randomUUID(); 47 | private static final String ACCOUNT_YYY = "YYY-"+UUID.randomUUID(); 48 | private static final String ACCOUNT_ZZZ = "ZZZ-"+UUID.randomUUID(); 49 | 50 | // One destination account for all tests. 51 | private static final String ACCOUNT_DEST = UUID.randomUUID().toString(); 52 | 53 | private Consumer fooRailsConsumer; 54 | private Consumer barRailsConsumer; 55 | 56 | private static final Random RANDOM = new Random(); 57 | 58 | @BeforeEach 59 | public void setup() { 60 | RestAssured.baseURI = ServiceClient.getInstance().getBaseUrl(); 61 | 62 | fooRailsConsumer = KafkaClient.getInstance().createConsumer(GROUP_ID, RAILS_FOO_TOPIC); 63 | barRailsConsumer = KafkaClient.getInstance().createConsumer(GROUP_ID, RAILS_BAR_TOPIC); 64 | 65 | // Clear the topics. 66 | fooRailsConsumer.poll(Duration.ofSeconds(1)); 67 | barRailsConsumer.poll(Duration.ofSeconds(1)); 68 | } 69 | 70 | @AfterEach 71 | public void tearDown() { 72 | fooRailsConsumer.close(); 73 | barRailsConsumer.close(); 74 | } 75 | 76 | /** 77 | * Send in a single payment that is routed through to the FOO Rails topic. 78 | * 79 | * Verify the balance state is correct via the endpoint. 80 | */ 81 | @Test 82 | public void testPaymentStreamsProcessing_SinglePayment_FooRails_NonFx() throws Exception { 83 | PaymentEvent payment = buildPaymentEvent(UUID.randomUUID().toString(), 84 | 100L, 85 | "GBP", 86 | ACCOUNT_XXX, 87 | ACCOUNT_DEST, 88 | BANK_RAILS_FOO.name()); 89 | KafkaClient.getInstance().sendMessage(PAYMENT_TOPIC, payment.getPaymentId(), JsonMapper.writeToJson(payment)); 90 | List> outboundEvents = KafkaClient.getInstance().consumeAndAssert("SinglePayment_FooRails_NonFx", fooRailsConsumer, 1, 3); 91 | PaymentEvent result = JsonMapper.readFromJson(outboundEvents.get(0).value(), PaymentEvent.class); 92 | assertThat(result.getPaymentId(), equalTo(payment.getPaymentId())); 93 | assertThat(result.getRails(), equalTo(BANK_RAILS_FOO.name())); 94 | assertThat(result.getAmount(), equalTo(100L)); 95 | assertThat(result.getCurrency(), equalTo("GBP")); 96 | 97 | get("/v1/kafka-streams/balance/"+ACCOUNT_XXX).then().assertThat() 98 | .statusCode(200) 99 | .and() 100 | .body(equalTo("100")); 101 | } 102 | 103 | /** 104 | * Send in a single payment that requires an FX rate transform, and is routed through to the BAR Rails topic. 105 | * 106 | * Verify the balance state is correct via the endpoint. 107 | */ 108 | @Test 109 | public void testPaymentStreamsProcessing_SinglePayment_BarRails_Fx() throws Exception { 110 | PaymentEvent payment = buildPaymentEvent(UUID.randomUUID().toString(), 111 | 100L, 112 | "USD", 113 | ACCOUNT_YYY, 114 | ACCOUNT_DEST, 115 | BANK_RAILS_BAR.name()); 116 | KafkaClient.getInstance().sendMessage(PAYMENT_TOPIC, payment.getPaymentId(), JsonMapper.writeToJson(payment)); 117 | List> outboundEvents = KafkaClient.getInstance().consumeAndAssert("SinglePayment_BarRails_Fx", barRailsConsumer, 1, 3); 118 | PaymentEvent result = JsonMapper.readFromJson(outboundEvents.get(0).value(), PaymentEvent.class); 119 | assertThat(result.getPaymentId(), equalTo(payment.getPaymentId())); 120 | assertThat(result.getRails(), equalTo(BANK_RAILS_BAR.name())); 121 | // FX hardcoded as 0.8. 122 | assertThat(result.getAmount(), equalTo(80L)); 123 | // Currency converted to GBP. 124 | assertThat(result.getCurrency(), equalTo("GBP")); 125 | 126 | get("/v1/kafka-streams/balance/"+ACCOUNT_YYY).then().assertThat() 127 | .statusCode(200) 128 | .and() 129 | .body(equalTo("80")); 130 | } 131 | 132 | /** 133 | * Send in many payments with a mixture of amounts, currencies and rails. 134 | */ 135 | @Test 136 | public void testPaymentStreamsProcessing_MultiplePayments() throws Exception { 137 | 138 | // The number of payments to send. 139 | int totalPayments = 1000; 140 | 141 | // Amount range to use. 142 | Long minAmount = 10L; 143 | Long maxAmount = 10000L; 144 | 145 | // Currencies to choose from. 146 | List currencies = Arrays.asList(GBP.name(), USD.name()); 147 | 148 | // Rails to choose from. 149 | List rails = Arrays.asList(BANK_RAILS_FOO.name(), BANK_RAILS_BAR.name()); 150 | 151 | // Track where payments are sent to. 152 | AtomicInteger fooRailsCount = new AtomicInteger(0); 153 | AtomicInteger barRailsCount = new AtomicInteger(0); 154 | AtomicLong balance = new AtomicLong(0); 155 | 156 | IntStream.range(0, totalPayments).parallel().forEach(i -> { 157 | PaymentEvent payment = buildPaymentEvent(UUID.randomUUID().toString(), 158 | minAmount + (long) (Math.random() * (maxAmount - minAmount)), 159 | currencies.get(RANDOM.nextInt(2)), 160 | ACCOUNT_ZZZ, 161 | ACCOUNT_DEST, 162 | rails.get(RANDOM.nextInt(2))); 163 | try { 164 | KafkaClient.getInstance().sendMessage(PAYMENT_TOPIC, payment.getPaymentId(), JsonMapper.writeToJson(payment)); 165 | 166 | // Track payments and totals from the randomised data. 167 | if(payment.getRails().equals(BANK_RAILS_FOO.name())) { 168 | fooRailsCount.incrementAndGet(); 169 | } else{ 170 | barRailsCount.incrementAndGet(); 171 | } 172 | if(payment.getCurrency().equals(USD.name())) { 173 | // Calculate the GBP amount. 174 | balance.addAndGet(Math.round(payment.getAmount() * 0.8)); 175 | } else{ 176 | balance.addAndGet(payment.getAmount()); 177 | } 178 | } catch (Exception e) { 179 | fail(e); 180 | } 181 | }); 182 | 183 | log.info("Expecting "+fooRailsCount.get()+" FOO Rails payments and "+barRailsCount.get()+" BAR Rails payments."); 184 | List> fooOutboundEvents = KafkaClient.getInstance().consumeAndAssert("MultiplePayments", fooRailsConsumer, fooRailsCount.get(), 3); 185 | List> barOutboundEvents = KafkaClient.getInstance().consumeAndAssert("MultiplePayments", barRailsConsumer, barRailsCount.get(), 3); 186 | assertThat((fooOutboundEvents.size() + barOutboundEvents.size()), equalTo(totalPayments)); 187 | 188 | get("/v1/kafka-streams/balance/"+ACCOUNT_ZZZ).then().assertThat() 189 | .statusCode(200) 190 | .and() 191 | .body(equalTo(String.valueOf(balance.get()))); 192 | } 193 | } 194 | -------------------------------------------------------------------------------- /src/test/java/demo/kafka/streams/integration/KafkaStreamsPaymentIntegrationTest.java: -------------------------------------------------------------------------------- 1 | package demo.kafka.streams.integration; 2 | 3 | import java.util.ArrayList; 4 | import java.util.List; 5 | import java.util.UUID; 6 | import java.util.concurrent.TimeUnit; 7 | import java.util.concurrent.atomic.AtomicInteger; 8 | import java.util.concurrent.atomic.AtomicLong; 9 | 10 | import demo.kafka.streams.KafkaStreamsDemoConfiguration; 11 | import demo.kafka.streams.event.PaymentEvent; 12 | import demo.kafka.streams.mapper.JsonMapper; 13 | import lombok.extern.slf4j.Slf4j; 14 | import org.apache.kafka.clients.producer.ProducerRecord; 15 | import org.apache.kafka.clients.producer.RecordMetadata; 16 | import org.apache.kafka.common.header.Header; 17 | import org.awaitility.Awaitility; 18 | import org.junit.jupiter.api.BeforeEach; 19 | import org.junit.jupiter.api.Test; 20 | import org.springframework.beans.factory.annotation.Autowired; 21 | import org.springframework.boot.test.context.SpringBootTest; 22 | import org.springframework.boot.test.web.client.TestRestTemplate; 23 | import org.springframework.context.annotation.Bean; 24 | import org.springframework.context.annotation.Configuration; 25 | import org.springframework.http.HttpStatus; 26 | import org.springframework.http.ResponseEntity; 27 | import org.springframework.kafka.annotation.KafkaListener; 28 | import org.springframework.kafka.config.KafkaListenerEndpointRegistry; 29 | import org.springframework.kafka.core.KafkaTemplate; 30 | import org.springframework.kafka.support.SendResult; 31 | import org.springframework.kafka.test.EmbeddedKafkaBroker; 32 | import org.springframework.kafka.test.context.EmbeddedKafka; 33 | import org.springframework.kafka.test.utils.ContainerTestUtils; 34 | import org.springframework.messaging.MessageHeaders; 35 | import org.springframework.messaging.handler.annotation.Headers; 36 | import org.springframework.messaging.handler.annotation.Payload; 37 | import org.springframework.test.annotation.DirtiesContext; 38 | import org.springframework.test.context.ActiveProfiles; 39 | 40 | import static demo.kafka.streams.processor.Rails.BANK_RAILS_BAR; 41 | import static demo.kafka.streams.processor.Rails.BANK_RAILS_FOO; 42 | import static demo.kafka.streams.processor.Rails.BANK_RAILS_XXX; 43 | import static demo.kafka.streams.util.TestEventData.buildPaymentEvent; 44 | import static org.hamcrest.MatcherAssert.assertThat; 45 | import static org.hamcrest.Matchers.containsString; 46 | import static org.hamcrest.Matchers.equalTo; 47 | 48 | @Slf4j 49 | @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT, classes = { KafkaStreamsDemoConfiguration.class } ) 50 | @EmbeddedKafka(controlledShutdown = true, topics = { "payment-topic", "rails-foo-topic", "rails-bar-topic" }) 51 | @DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS) 52 | @ActiveProfiles("test") 53 | public class KafkaStreamsPaymentIntegrationTest { 54 | 55 | private final static String PAYMENT_TEST_TOPIC = "payment-topic"; 56 | 57 | @Autowired 58 | private KafkaTemplate testKafkaTemplate; 59 | 60 | @Autowired 61 | private EmbeddedKafkaBroker embeddedKafkaBroker; 62 | 63 | @Autowired 64 | private KafkaListenerEndpointRegistry registry; 65 | 66 | @Autowired 67 | private TestRestTemplate restTemplate; 68 | 69 | @Autowired 70 | private KafkaFooRailsListener fooRailsReceiver; 71 | 72 | @Autowired 73 | private KafkaBarRailsListener barRailsReceiver; 74 | 75 | // GBP Accounts. 76 | private static final String ACCOUNT_GBP_ABC = "ABC-"+UUID.randomUUID(); 77 | private static final String ACCOUNT_GBP_DEF = "DEF-"+UUID.randomUUID(); 78 | 79 | // USD Accounts. 80 | private static final String ACCOUNT_USD_XYZ = "XYZ-"+UUID.randomUUID(); 81 | 82 | @Configuration 83 | static class TestConfig { 84 | 85 | @Bean 86 | public KafkaFooRailsListener fooRailsReceiver() { 87 | return new KafkaFooRailsListener(); 88 | } 89 | 90 | @Bean 91 | public KafkaBarRailsListener barRailsReceiver() { 92 | return new KafkaBarRailsListener(); 93 | } 94 | } 95 | 96 | public static class KafkaFooRailsListener { 97 | AtomicInteger counter = new AtomicInteger(0); 98 | AtomicLong total = new AtomicLong(0); 99 | 100 | @KafkaListener(groupId = "KafkaStreamsIntegrationTest", topics = "rails-foo-topic", autoStartup = "true") 101 | void receive(@Payload final String payload, @Headers final MessageHeaders headers) { 102 | log.debug("KafkaFooRailsListener - Received message: " + payload); 103 | PaymentEvent payment = JsonMapper.readFromJson(payload, PaymentEvent.class); 104 | total.addAndGet(payment.getAmount()); 105 | counter.incrementAndGet(); 106 | } 107 | } 108 | 109 | public static class KafkaBarRailsListener { 110 | AtomicInteger counter = new AtomicInteger(0); 111 | AtomicLong total = new AtomicLong(0); 112 | 113 | @KafkaListener(groupId = "KafkaStreamsIntegrationTest", topics = "rails-bar-topic", autoStartup = "true") 114 | void receive(@Payload final String payload, @Headers final MessageHeaders headers) { 115 | log.debug("KafkaBarRailsListener - Received message: " + payload); 116 | PaymentEvent payment = JsonMapper.readFromJson(payload, PaymentEvent.class); 117 | total.addAndGet(payment.getAmount()); 118 | counter.incrementAndGet(); 119 | } 120 | } 121 | 122 | @BeforeEach 123 | public void setUp() { 124 | // Wait until the partitions are assigned. 125 | registry.getListenerContainers().stream().forEach(container -> 126 | ContainerTestUtils.waitForAssignment(container, embeddedKafkaBroker.getPartitionsPerTopic())); 127 | 128 | fooRailsReceiver.counter.set(0); 129 | barRailsReceiver.counter.set(0); 130 | } 131 | 132 | /** 133 | * Send a number of payments to the inbound payments topic. 134 | * 135 | * They will be processed and outbound events sent to two different topics, based on the payment rails specified. 136 | * 137 | * This test has listeners for both the outbound topics, so the expected events can be asserted. 138 | * 139 | * The test then calls the balance endpoint to ensure the aggregated amounts are correct. 140 | */ 141 | @Test 142 | public void testKafkaStreams() throws Exception { 143 | 144 | // Three payments via FOO rails from ABC to DEF, total 210 GBP. 145 | PaymentEvent payment1 = buildPaymentEvent(UUID.randomUUID().toString(), 146 | 100L, 147 | "GBP", 148 | ACCOUNT_GBP_ABC, 149 | ACCOUNT_GBP_DEF, 150 | BANK_RAILS_FOO.name()); 151 | sendMessage(PAYMENT_TEST_TOPIC, payment1); 152 | PaymentEvent payment2 = buildPaymentEvent(UUID.randomUUID().toString(), 153 | 50L, 154 | "GBP", 155 | ACCOUNT_GBP_ABC, 156 | ACCOUNT_GBP_DEF, 157 | BANK_RAILS_FOO.name()); 158 | sendMessage(PAYMENT_TEST_TOPIC, payment2); 159 | PaymentEvent payment3 = buildPaymentEvent(UUID.randomUUID().toString(), 160 | 60L, 161 | "GBP", 162 | ACCOUNT_GBP_ABC, 163 | ACCOUNT_GBP_DEF, 164 | BANK_RAILS_FOO.name()); 165 | sendMessage(PAYMENT_TEST_TOPIC, payment3); 166 | 167 | // Payment on an unsupported rails should be filtered out. 168 | PaymentEvent payment4 = buildPaymentEvent(UUID.randomUUID().toString(), 169 | 1200L, 170 | "GBP", 171 | ACCOUNT_GBP_ABC, 172 | ACCOUNT_GBP_DEF, 173 | BANK_RAILS_XXX.name()); 174 | sendMessage(PAYMENT_TEST_TOPIC, payment4); 175 | 176 | // Payment from a USD account will require FX. 177 | PaymentEvent payment5 = buildPaymentEvent(UUID.randomUUID().toString(), 178 | 1000L, // Converts to 800 GBP. 179 | "USD", 180 | ACCOUNT_USD_XYZ, 181 | ACCOUNT_GBP_DEF, 182 | BANK_RAILS_BAR.name()); 183 | sendMessage(PAYMENT_TEST_TOPIC, payment5); 184 | 185 | Awaitility.await().atMost(10, TimeUnit.SECONDS).pollDelay(100, TimeUnit.MILLISECONDS) 186 | .until(fooRailsReceiver.counter::get, equalTo(3)); 187 | Awaitility.await().atMost(10, TimeUnit.SECONDS).pollDelay(100, TimeUnit.MILLISECONDS) 188 | .until(barRailsReceiver.counter::get, equalTo(1)); 189 | 190 | // Assert total amounts received by the rails are as expected. 191 | assertThat(fooRailsReceiver.total.get(), equalTo(210L)); 192 | assertThat(barRailsReceiver.total.get(), equalTo(800L)); 193 | 194 | // Query the stateful balance stores for two accounts. 195 | ResponseEntity responseAbc = restTemplate.getForEntity("/v1/kafka-streams/balance/"+ACCOUNT_GBP_ABC, String.class); 196 | assertThat(responseAbc.getStatusCode(), equalTo(HttpStatus.OK)); 197 | assertThat(responseAbc.getBody(), equalTo("210")); // Payments: 100 + 60 + 50. 198 | 199 | ResponseEntity responseDef = restTemplate.getForEntity("/v1/kafka-streams/balance/"+ACCOUNT_GBP_DEF, String.class); 200 | assertThat(responseDef.getStatusCode(), equalTo(HttpStatus.NOT_FOUND)); // No payments from this account. 201 | 202 | ResponseEntity responseXyz = restTemplate.getForEntity("/v1/kafka-streams/balance/"+ACCOUNT_USD_XYZ, String.class); 203 | assertThat(responseXyz.getStatusCode(), equalTo(HttpStatus.OK)); 204 | assertThat(responseXyz.getBody(), equalTo("800")); // 1000 USD * 0.8 FX. 205 | } 206 | 207 | /** 208 | * Test the topology description endpoint is working. 209 | * 210 | * Capture the topology body and use: 211 | * https://zz85.github.io/kafka-streams-viz/ 212 | * to visualise the topology. 213 | */ 214 | @Test 215 | public void testTopology() throws Exception { 216 | ResponseEntity topology = restTemplate.getForEntity("/v1/kafka-streams/topology/", String.class); 217 | assertThat(topology.getStatusCode(), equalTo(HttpStatus.OK)); 218 | assertThat(topology.getBody(), containsString("topics: [payment-topic]")); 219 | log.info(topology.getBody()); 220 | } 221 | 222 | /** 223 | * Send the given payment event to the given topic. 224 | */ 225 | private SendResult sendMessage(String topic, PaymentEvent event) throws Exception { 226 | String payload = JsonMapper.writeToJson(event); 227 | List
headers = new ArrayList<>(); 228 | final ProducerRecord record = new ProducerRecord(topic, null, event.getPaymentId(), payload, headers); 229 | 230 | final SendResult result = (SendResult)testKafkaTemplate.send(record).get(); 231 | final RecordMetadata metadata = result.getRecordMetadata(); 232 | 233 | log.debug(String.format("Sent record(key=%s value=%s) meta(topic=%s, partition=%d, offset=%d)", 234 | record.key(), record.value(), metadata.topic(), metadata.partition(), metadata.offset())); 235 | 236 | return result; 237 | } 238 | } 239 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | --------------------------------------------------------------------------------