├── .env.example
├── .github
├── dependabot.yml
└── workflows
│ └── docker-upload.yml
├── .gitignore
├── Dockerfile
├── README.md
├── docker
├── .env
├── conf
│ ├── postgres-connector.json
│ └── run.sh
└── docker-compose.yaml
├── pom.xml
└── src
└── main
├── java
└── com
│ └── example
│ └── dataanalysermicroservice
│ ├── DataAnalyserMicroserviceApplication.java
│ ├── config
│ ├── BeanConfig.java
│ ├── KafkaConfig.java
│ ├── LocalDateTimeDeserializer.java
│ └── TextXpath.java
│ ├── model
│ └── Data.java
│ ├── repository
│ └── DataRepository.java
│ └── service
│ ├── KafkaDataReceiver.java
│ ├── KafkaDataReceiverImpl.java
│ ├── KafkaDataService.java
│ └── KafkaDataServiceImpl.java
└── resources
├── application.yaml
├── kafka
└── consumer.xml
└── liquibase
├── changesets
└── 001_create_tables.sql
└── db.changelog.yaml
/.env.example:
--------------------------------------------------------------------------------
1 | HOST=localhost:5437
2 | POSTGRES_DB=sensor_data
3 | POSTGRES_USERNAME=postgres
4 | POSTGRES_PASSWORD=postgres
5 | KAFKA_BOOTSTRAP_SERVERS=localhost:9092
6 | KAFKA_SUBSCRIBED_TOPICS=data-temperature,data-power,data-voltage
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: "maven"
4 | directory: "/"
5 | schedule:
6 | interval: "daily"
7 |
--------------------------------------------------------------------------------
/.github/workflows/docker-upload.yml:
--------------------------------------------------------------------------------
1 | name: Docker Image CI
2 |
3 | on:
4 | push:
5 | branches: [ "main" ]
6 |
7 | jobs:
8 | build:
9 | runs-on: ubuntu-latest
10 | steps:
11 | - name: Checkout
12 | uses: actions/checkout@v3
13 | - name: Login to Docker Hub
14 | uses: docker/login-action@v2
15 | with:
16 | username: ${{ secrets.DOCKERHUB_USERNAME }}
17 | password: ${{ secrets.DOCKERHUB_TOKEN }}
18 | - name: Set up Docker Buildx
19 | uses: docker/setup-buildx-action@v2
20 | - name: Build and Push to Docker Hub
21 | uses: mr-smithers-excellent/docker-build-push@v5
22 | with:
23 | image: ${{ secrets.DOCKERHUB_USERNAME }}/data-analyser-microservice
24 | tags: 0.0.$GITHUB_RUN_NUMBER, latest
25 | dockerfile: Dockerfile
26 | registry: docker.io
27 | username: ${{ secrets.DOCKERHUB_USERNAME }}
28 | password: ${{ secrets.DOCKERHUB_TOKEN }}
29 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | HELP.md
2 | target/
3 | !.mvn/wrapper/maven-wrapper.jar
4 | !**/src/main/**/target/
5 | !**/src/test/**/target/
6 |
7 | ### STS ###
8 | .apt_generated
9 | .classpath
10 | .factorypath
11 | .project
12 | .settings
13 | .springBeans
14 | .sts4-cache
15 |
16 | ### IntelliJ IDEA ###
17 | .idea
18 | *.iws
19 | *.iml
20 | *.ipr
21 |
22 | ### NetBeans ###
23 | /nbproject/private/
24 | /nbbuild/
25 | /dist/
26 | /nbdist/
27 | /.nb-gradle/
28 | build/
29 | !**/src/main/**/build/
30 | !**/src/test/**/build/
31 |
32 | ### VS Code ###
33 | .vscode/
34 |
35 | .env
36 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM maven:3.8.5-openjdk-17 AS build
2 | COPY pom.xml .
3 | RUN mvn dependency:go-offline
4 | COPY /src /src
5 | RUN mvn clean package -DskipTests
6 |
7 | FROM openjdk:17-jdk-slim
8 | COPY --from=build /target/*.jar application.jar
9 | EXPOSE 8082
10 | ENTRYPOINT ["java", "-jar", "application.jar"]
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Data analyser microservice
2 |
3 | This is data analyser microservice
4 | for [YouTube course](https://www.youtube.com/playlist?list=PL3Ur78l82EFBhKojbSO26BVqQ7n4AthHC).
5 |
6 | This application receives data
7 | from [Data generator service](https://github.com/IlyaLisov/data-generator-microservice)
8 | with Apache Kafka.
9 |
10 | Next, data is processed
11 | by [Data store service](https://github.com/IlyaLisov/data-store-microservice).
12 |
13 | ### Usage
14 |
15 | To start an application you need to pass variables to `.env` file.
16 |
17 | You can use example `.env.example` file with some predefined environments.
18 |
19 | You can find Docker compose file in `docker/docker-compose.yaml`.
20 |
21 | Application is running on port `8082`.
22 |
23 | All insignificant features (checkstyle, build check, dto validation) are not
24 | presented.
25 |
26 | Just after startup application will try to connect to Apache Kafka and begin to
27 | listen topics from `KAFKA_SUBSCRIBED_TOPICS`.
28 |
29 | ### Docker
30 |
31 | You can run all course applications via `docker-compose.yaml` from `docker`
32 | folder.
33 |
34 | It contains all needed configs.
35 |
36 | **NOTE**: after Debezium connect is started, apply source config manually.
37 |
38 | ```shell
39 | cd /on-startup/
40 |
41 | sh run.sh
42 | ```
43 |
44 | Note that all services must be in the same network to communicate with each
45 | other.
46 |
47 | Debezium needs different group id than Kafka uses, so default values from `.env`
48 | are 1 and 2.
49 |
50 | Debezium is configured to push messages to `data` topic due to routing in
51 | configuration.
--------------------------------------------------------------------------------
/docker/.env:
--------------------------------------------------------------------------------
1 | HOST=postgres:5432
2 | POSTGRES_USER=postgres
3 | POSTGRES_USERNAME=postgres
4 | POSTGRES_PASSWORD=postgres
5 | POSTGRES_DB=sensor_data
6 |
7 | REDIS_HOST=redis
8 | REDIS_PORT=6379
9 |
10 | KAFKA_BOOTSTRAP_SERVERS=kafka:9092
11 | KAFKA_SUBSCRIBED_TOPICS=data-temperature,data-power,data-voltage
12 |
13 | KAFKA_BROKER_ID=1
14 | DEBEZIUM_KAFKA_BROKER_ID=2
15 | KAFKA_ZOOKEEPER_CONNECT="zookeeper:2181"
16 | KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://kafka:9092
17 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP=PLAINTEXT:PLAINTEXT,PLAINTEXT_INTERNAL:PLAINTEXT
18 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1
19 |
20 | ZOOKEEPER_CLIENT_PORT=2181
21 | ZOOKEEPER_TICK_TIME=2000
--------------------------------------------------------------------------------
/docker/conf/postgres-connector.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "pg-connector",
3 | "config": {
4 | "connector.class": "io.debezium.connector.postgresql.PostgresConnector",
5 | "plugin.name": "pgoutput",
6 | "tasks.max": "1",
7 | "topic.prefix": "pg-replica",
8 | "database.hostname": "postgres",
9 | "database.port": "5432",
10 | "database.user": "postgres",
11 | "database.password": "postgres",
12 | "database.dbname": "sensor_data",
13 | "database.server.id": "184054",
14 | "table.include.list": "public.data",
15 | "schema.history.internal.kafka.bootstrap.servers": "kafka:9092",
16 | "transforms": "unwrap,route",
17 | "transforms.unwrap.type": "io.debezium.transforms.ExtractNewRecordState",
18 | "transforms.route.type": "org.apache.kafka.connect.transforms.RegexRouter",
19 | "transforms.route.regex": "([^.]+)\\.([^.]+)\\.([^.]+)",
20 | "transforms.route.replacement": "$3"
21 | }
22 | }
--------------------------------------------------------------------------------
/docker/conf/run.sh:
--------------------------------------------------------------------------------
1 | curl -i -X POST -H "Accept:application/json" -H \
2 | "Content-Type:application/json" http://localhost:8083/connectors/ -d \
3 | @postgres-connector.json
--------------------------------------------------------------------------------
/docker/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | version: '3.0'
2 |
3 | networks:
4 | app:
5 | driver: bridge
6 |
7 | volumes:
8 | db-data:
9 | driver: local
10 | redis-data:
11 | driver: local
12 | kafka-data:
13 | driver: local
14 | zookeeper-data:
15 | driver: local
16 | debezium-data:
17 | driver: local
18 |
19 | services:
20 | generator:
21 | image: 'ilyalisov/data-generator-microservice:latest'
22 | networks:
23 | - app
24 | depends_on:
25 | - kafka
26 | environment:
27 | - KAFKA_BOOTSTRAP_SERVERS=${KAFKA_BOOTSTRAP_SERVERS}
28 | - KAFKA_SUBSCRIBED_TOPICS=${KAFKA_SUBSCRIBED_TOPICS}
29 | ports:
30 | - '8081:8081'
31 |
32 | analyser:
33 | image: 'ilyalisov/data-analyser-microservice:latest'
34 | networks:
35 | - app
36 | depends_on:
37 | - postgres
38 | - kafka
39 | environment:
40 | - KAFKA_BOOTSTRAP_SERVERS=${KAFKA_BOOTSTRAP_SERVERS}
41 | - KAFKA_SUBSCRIBED_TOPICS=${KAFKA_SUBSCRIBED_TOPICS}
42 | - HOST=${HOST}
43 | - POSTGRES_DB=${POSTGRES_DB}
44 | - POSTGRES_USERNAME=${POSTGRES_USERNAME}
45 | - POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
46 | ports:
47 | - '8082:8082'
48 |
49 | store:
50 | image: 'ilyalisov/data-store-microservice:latest'
51 | networks:
52 | - app
53 | depends_on:
54 | - debezium
55 | - redis
56 | - kafka
57 | environment:
58 | - REDIS_HOST=${REDIS_HOST}
59 | - REDIS_PORT=${REDIS_PORT}
60 | - KAFKA_BOOTSTRAP_SERVERS=${KAFKA_BOOTSTRAP_SERVERS}
61 | - KAFKA_BROKER_ID=${KAFKA_BROKER_ID}
62 | ports:
63 | - '8083:8083'
64 |
65 | zookeeper:
66 | image: 'confluentinc/cp-zookeeper:latest'
67 | networks:
68 | - app
69 | environment:
70 | - ZOOKEEPER_CLIENT_PORT=${ZOOKEEPER_CLIENT_PORT}
71 | - ZOOKEEPER_TICK_TIME=${ZOOKEEPER_TICK_TIME}
72 | ports:
73 | - '2181:2181'
74 | volumes:
75 | - zookeeper-data:/var/lib/zookeeper/data
76 |
77 | kafka:
78 | image: 'confluentinc/cp-kafka:latest'
79 | networks:
80 | - app
81 | depends_on:
82 | - zookeeper
83 | environment:
84 | - KAFKA_BROKER_ID=${KAFKA_BROKER_ID}
85 | - KAFKA_ZOOKEEPER_CONNECT=${KAFKA_ZOOKEEPER_CONNECT}
86 | - KAFKA_ADVERTISED_LISTENERS=${KAFKA_ADVERTISED_LISTENERS}
87 | - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP=${KAFKA_LISTENER_SECURITY_PROTOCOL_MAP}
88 | - KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=${KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR}
89 | ports:
90 | - '9092:9092'
91 | volumes:
92 | - kafka-data:/var/lib/kafka/data
93 |
94 | postgres:
95 | image: 'postgres:15.1-alpine'
96 | networks:
97 | - app
98 | env_file:
99 | - .env
100 | ports:
101 | - '5437:5432'
102 | command:
103 | - "postgres"
104 | - "-c"
105 | - "wal_level=logical"
106 | volumes:
107 | - db-data:/var/lib/postgresql/data
108 |
109 | redis:
110 | image: redis:7.2-rc-alpine
111 | networks:
112 | - app
113 | env_file:
114 | - .env
115 | ports:
116 | - '6379:6379'
117 | command: redis-server --save 20 1 --loglevel warning
118 | volumes:
119 | - redis-data:/data
120 |
121 | debezium:
122 | image: debezium/connect:2.4.0.Final
123 | networks:
124 | - app
125 | depends_on:
126 | - kafka
127 | environment:
128 | - BOOTSTRAP_SERVERS=${KAFKA_BOOTSTRAP_SERVERS}
129 | - GROUP_ID=${DEBEZIUM_KAFKA_BROKER_ID}
130 | - CONFIG_STORAGE_TOPIC=my_connect_configs
131 | - OFFSET_STORAGE_TOPIC=my_connect_offsets
132 | - STATUS_STORAGE_TOPIC=my_connect_statuses
133 | user: root
134 | # command:
135 | # - bash
136 | # - -c
137 | # - "cd /on-startup; ls; ./run.sh"
138 | volumes:
139 | - debezium-data:/debezium/data
140 | - ./conf/:/on-startup/
141 |
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 | 4.0.0
7 |
8 | com.example
9 | data-analyser-microservice
10 | 0.0.1-SNAPSHOT
11 | data-analyser-microservice
12 | data-analyser-microservice
13 |
14 |
15 | org.springframework.boot
16 | spring-boot-starter-parent
17 | 3.2.5
18 |
19 |
20 |
21 |
22 | 17
23 | 1.18.32
24 | 3.1.4
25 | 0.29.0
26 | 1.3.22
27 | 2.10.1
28 | 4.27.0
29 | 42.7.3
30 |
31 |
32 |
33 |
34 | org.springframework.boot
35 | spring-boot-starter-web
36 |
37 |
38 |
39 | org.springframework.boot
40 | spring-boot-starter-data-jpa
41 |
42 |
43 |
44 | org.postgresql
45 | postgresql
46 | ${postgresql.version}
47 |
48 |
49 |
50 | org.liquibase
51 | liquibase-core
52 | ${liquibase.version}
53 |
54 |
55 |
56 | org.projectlombok
57 | lombok
58 | true
59 | ${lombok.version}
60 |
61 |
62 |
63 | org.springframework.kafka
64 | spring-kafka
65 | ${spring-kafka.version}
66 |
67 |
68 |
69 | io.projectreactor.kafka
70 | reactor-kafka
71 | ${reactor-kafka.version}
72 |
73 |
74 |
75 | com.jcabi
76 | jcabi-xml
77 | ${xml.version}
78 |
79 |
80 |
81 | com.google.code.gson
82 | gson
83 | ${gson.version}
84 |
85 |
86 |
87 |
88 |
89 |
90 | org.springframework.boot
91 | spring-boot-maven-plugin
92 |
93 |
94 |
95 | org.projectlombok
96 | lombok
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
--------------------------------------------------------------------------------
/src/main/java/com/example/dataanalysermicroservice/DataAnalyserMicroserviceApplication.java:
--------------------------------------------------------------------------------
1 | package com.example.dataanalysermicroservice;
2 |
3 | import org.springframework.boot.SpringApplication;
4 | import org.springframework.boot.autoconfigure.SpringBootApplication;
5 |
6 | @SpringBootApplication
7 | public class DataAnalyserMicroserviceApplication {
8 |
9 | public static void main(String[] args) {
10 | SpringApplication.run(DataAnalyserMicroserviceApplication.class, args);
11 | }
12 |
13 | }
14 |
--------------------------------------------------------------------------------
/src/main/java/com/example/dataanalysermicroservice/config/BeanConfig.java:
--------------------------------------------------------------------------------
1 | package com.example.dataanalysermicroservice.config;
2 |
3 | import com.jcabi.xml.XML;
4 | import com.jcabi.xml.XMLDocument;
5 | import lombok.SneakyThrows;
6 | import org.springframework.context.annotation.Bean;
7 | import org.springframework.context.annotation.Configuration;
8 |
9 | @Configuration
10 | public class BeanConfig {
11 |
12 | @SneakyThrows
13 | @Bean
14 | public XML consumerXml() {
15 | return new XMLDocument(
16 | getClass().getResourceAsStream("/kafka/consumer.xml").readAllBytes()
17 | );
18 | }
19 |
20 | }
21 |
--------------------------------------------------------------------------------
/src/main/java/com/example/dataanalysermicroservice/config/KafkaConfig.java:
--------------------------------------------------------------------------------
1 | package com.example.dataanalysermicroservice.config;
2 |
3 | import com.jcabi.xml.XML;
4 | import lombok.RequiredArgsConstructor;
5 | import org.apache.kafka.clients.consumer.ConsumerConfig;
6 | import org.springframework.beans.factory.annotation.Value;
7 | import org.springframework.context.annotation.Bean;
8 | import org.springframework.context.annotation.Configuration;
9 | import reactor.kafka.receiver.KafkaReceiver;
10 | import reactor.kafka.receiver.ReceiverOptions;
11 |
12 | import java.util.HashMap;
13 | import java.util.List;
14 | import java.util.Map;
15 |
16 | @Configuration
17 | @RequiredArgsConstructor
18 | public class KafkaConfig {
19 |
20 | @Value("${spring.kafka.bootstrap-servers}")
21 | private String servers;
22 |
23 | @Value("${topics}")
24 | private List topics;
25 |
26 | private final XML settings;
27 |
28 | @Bean
29 | public Map receiverProperties() {
30 | Map props = new HashMap<>();
31 | props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
32 | props.put(
33 | ConsumerConfig.GROUP_ID_CONFIG,
34 | new TextXpath(
35 | this.settings, "//groupId"
36 | ).toString()
37 | );
38 | props.put(
39 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
40 | new TextXpath(
41 | this.settings, "//keyDeserializer"
42 | ).toString()
43 | );
44 | props.put(
45 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
46 | new TextXpath(
47 | this.settings, "//valueDeserializer"
48 | ).toString()
49 | );
50 | props.put(
51 | "spring.json.trusted.packages",
52 | new TextXpath(
53 | this.settings, "//trustedPackages"
54 | ).toString()
55 | );
56 | return props;
57 | }
58 |
59 | @Bean
60 | public ReceiverOptions receiverOptions() {
61 | ReceiverOptions receiverOptions = ReceiverOptions
62 | .create(receiverProperties());
63 | return receiverOptions.subscription(topics)
64 | .addAssignListener(partitions ->
65 | System.out.println("onPartitionAssigned: "
66 | + partitions))
67 | .addRevokeListener(partitions ->
68 | System.out.println("onPartitionRevoked: "
69 | + partitions));
70 | }
71 |
72 | @Bean
73 | public KafkaReceiver receiver(
74 | ReceiverOptions receiverOptions
75 | ) {
76 | return KafkaReceiver.create(receiverOptions);
77 | }
78 |
79 | }
80 |
--------------------------------------------------------------------------------
/src/main/java/com/example/dataanalysermicroservice/config/LocalDateTimeDeserializer.java:
--------------------------------------------------------------------------------
1 | package com.example.dataanalysermicroservice.config;
2 |
3 | import com.google.gson.JsonArray;
4 | import com.google.gson.JsonDeserializationContext;
5 | import com.google.gson.JsonDeserializer;
6 | import com.google.gson.JsonElement;
7 | import org.springframework.stereotype.Component;
8 |
9 | import java.lang.reflect.Type;
10 | import java.time.LocalDateTime;
11 |
12 | @Component
13 | public class LocalDateTimeDeserializer
14 | implements JsonDeserializer {
15 |
16 | @Override
17 | public LocalDateTime deserialize(
18 | JsonElement json,
19 | Type typeOfT,
20 | JsonDeserializationContext context
21 | ) {
22 | JsonArray jsonArray = json.getAsJsonArray();
23 | int year = jsonArray.get(0).getAsInt();
24 | int month = jsonArray.get(1).getAsInt();
25 | int day = jsonArray.get(2).getAsInt();
26 | int hour = jsonArray.get(3).getAsInt();
27 | int minute = jsonArray.get(4).getAsInt();
28 | int second = jsonArray.get(5).getAsInt();
29 | return LocalDateTime.of(year, month, day, hour, minute, second);
30 | }
31 |
32 | }
33 |
34 |
--------------------------------------------------------------------------------
/src/main/java/com/example/dataanalysermicroservice/config/TextXpath.java:
--------------------------------------------------------------------------------
1 | package com.example.dataanalysermicroservice.config;
2 |
3 | import com.jcabi.xml.XML;
4 | import lombok.RequiredArgsConstructor;
5 |
6 | @RequiredArgsConstructor
7 | public final class TextXpath {
8 |
9 | private final XML xml;
10 | private final String node;
11 |
12 | @Override
13 | public String toString() {
14 | return this.xml.nodes(this.node)
15 | .get(0)
16 | .xpath("text()")
17 | .get(0);
18 | }
19 |
20 | }
21 |
--------------------------------------------------------------------------------
/src/main/java/com/example/dataanalysermicroservice/model/Data.java:
--------------------------------------------------------------------------------
1 | package com.example.dataanalysermicroservice.model;
2 |
3 | import jakarta.persistence.Column;
4 | import jakarta.persistence.Entity;
5 | import jakarta.persistence.EnumType;
6 | import jakarta.persistence.Enumerated;
7 | import jakarta.persistence.GeneratedValue;
8 | import jakarta.persistence.GenerationType;
9 | import jakarta.persistence.Id;
10 | import jakarta.persistence.Table;
11 | import lombok.Getter;
12 | import lombok.NoArgsConstructor;
13 | import lombok.Setter;
14 | import lombok.ToString;
15 |
16 | import java.time.LocalDateTime;
17 |
18 | @Entity
19 | @Table(name = "data")
20 | @NoArgsConstructor
21 | @Getter
22 | @Setter
23 | @ToString
24 | public class Data {
25 |
26 | @Id
27 | @GeneratedValue(strategy = GenerationType.IDENTITY)
28 | private Long id;
29 |
30 | private Long sensorId;
31 | private LocalDateTime timestamp;
32 | private double measurement;
33 |
34 | @Column(name = "type")
35 | @Enumerated(value = EnumType.STRING)
36 | private MeasurementType measurementType;
37 |
38 | public enum MeasurementType {
39 |
40 | TEMPERATURE,
41 | VOLTAGE,
42 | POWER
43 |
44 | }
45 |
46 | }
47 |
--------------------------------------------------------------------------------
/src/main/java/com/example/dataanalysermicroservice/repository/DataRepository.java:
--------------------------------------------------------------------------------
1 | package com.example.dataanalysermicroservice.repository;
2 |
3 | import com.example.dataanalysermicroservice.model.Data;
4 | import org.springframework.data.jpa.repository.JpaRepository;
5 |
6 | public interface DataRepository extends JpaRepository {
7 | }
8 |
--------------------------------------------------------------------------------
/src/main/java/com/example/dataanalysermicroservice/service/KafkaDataReceiver.java:
--------------------------------------------------------------------------------
1 | package com.example.dataanalysermicroservice.service;
2 |
3 | public interface KafkaDataReceiver {
4 |
5 | void fetch();
6 |
7 | }
8 |
--------------------------------------------------------------------------------
/src/main/java/com/example/dataanalysermicroservice/service/KafkaDataReceiverImpl.java:
--------------------------------------------------------------------------------
1 | package com.example.dataanalysermicroservice.service;
2 |
3 | import com.example.dataanalysermicroservice.config.LocalDateTimeDeserializer;
4 | import com.example.dataanalysermicroservice.model.Data;
5 | import com.google.gson.Gson;
6 | import com.google.gson.GsonBuilder;
7 | import jakarta.annotation.PostConstruct;
8 | import lombok.RequiredArgsConstructor;
9 | import org.springframework.stereotype.Service;
10 | import reactor.kafka.receiver.KafkaReceiver;
11 |
12 | import java.time.LocalDateTime;
13 |
14 | @Service
15 | @RequiredArgsConstructor
16 | public class KafkaDataReceiverImpl implements KafkaDataReceiver {
17 |
18 | private final KafkaReceiver receiver;
19 | private final LocalDateTimeDeserializer localDateTimeDeserializer;
20 | private final KafkaDataService kafkaDataService;
21 |
22 | @PostConstruct
23 | private void init() {
24 | fetch();
25 | }
26 |
27 | @Override
28 | public void fetch() {
29 | Gson gson = new GsonBuilder()
30 | .registerTypeAdapter(LocalDateTime.class,
31 | localDateTimeDeserializer)
32 | .create();
33 | receiver.receive()
34 | .subscribe(r -> {
35 | Data data = gson
36 | .fromJson(r.value().toString(), Data.class);
37 | kafkaDataService.handle(data);
38 | r.receiverOffset().acknowledge();
39 | });
40 | }
41 |
42 | }
43 |
--------------------------------------------------------------------------------
/src/main/java/com/example/dataanalysermicroservice/service/KafkaDataService.java:
--------------------------------------------------------------------------------
1 | package com.example.dataanalysermicroservice.service;
2 |
3 | import com.example.dataanalysermicroservice.model.Data;
4 |
5 | public interface KafkaDataService {
6 |
7 | void handle(Data data);
8 |
9 | }
10 |
--------------------------------------------------------------------------------
/src/main/java/com/example/dataanalysermicroservice/service/KafkaDataServiceImpl.java:
--------------------------------------------------------------------------------
1 | package com.example.dataanalysermicroservice.service;
2 |
3 | import com.example.dataanalysermicroservice.model.Data;
4 | import com.example.dataanalysermicroservice.repository.DataRepository;
5 | import lombok.RequiredArgsConstructor;
6 | import lombok.extern.slf4j.Slf4j;
7 | import org.springframework.stereotype.Service;
8 |
9 | @Service
10 | @Slf4j
11 | @RequiredArgsConstructor
12 | public class KafkaDataServiceImpl implements KafkaDataService {
13 |
14 | private final DataRepository dataRepository;
15 |
16 | @Override
17 | public void handle(Data data) {
18 | log.info("Data object {} was saved", data);
19 | dataRepository.save(data);
20 | }
21 |
22 | }
23 |
--------------------------------------------------------------------------------
/src/main/resources/application.yaml:
--------------------------------------------------------------------------------
1 | spring:
2 | config:
3 | import: optional:file:.env[.properties]
4 | datasource:
5 | url: jdbc:postgresql://${HOST}/${POSTGRES_DB}
6 | username: ${POSTGRES_USERNAME}
7 | password: ${POSTGRES_PASSWORD}
8 | jpa:
9 | hibernate:
10 | ddl-auto: none
11 | kafka:
12 | bootstrap-servers: ${KAFKA_BOOTSTRAP_SERVERS}
13 | liquibase:
14 | change-log: classpath:liquibase/db.changelog.yaml
15 | enabled: true
16 | server:
17 | port: 8082
18 |
19 | topics: ${KAFKA_SUBSCRIBED_TOPICS}
20 |
--------------------------------------------------------------------------------
/src/main/resources/kafka/consumer.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 1
4 | org.apache.kafka.common.serialization.StringDeserializer
5 |
6 |
7 | org.apache.kafka.common.serialization.StringDeserializer
8 |
9 | *
10 |
--------------------------------------------------------------------------------
/src/main/resources/liquibase/changesets/001_create_tables.sql:
--------------------------------------------------------------------------------
1 | create table data
2 | (
3 | id bigserial primary key,
4 | sensor_id bigint not null,
5 | timestamp timestamp not null,
6 | measurement float not null,
7 | type varchar not null
8 | );
--------------------------------------------------------------------------------
/src/main/resources/liquibase/db.changelog.yaml:
--------------------------------------------------------------------------------
1 | databaseChangeLog:
2 | - includeAll:
3 | path: /changesets
4 | relativeToChangelogFile: true
5 |
--------------------------------------------------------------------------------