├── .gitignore ├── README.md ├── kafkaflink ├── pom.xml └── src │ └── main │ ├── java │ └── com │ │ └── aman │ │ └── kafkalink │ │ ├── AsyncRegisterApiInvocation.java │ │ ├── FlinkReadFromKafka.java │ │ ├── config │ │ ├── FlinkKafkaConsumerConfig.java │ │ └── FlinkKafkaProducerConfig.java │ │ └── entity │ │ ├── MessageType.java │ │ ├── RegisterRequest.java │ │ ├── RegisterRequestSchema.java │ │ ├── RegisterResponse.java │ │ └── RegisterResponseSerializer.java │ └── resources │ └── log4j.properties └── wsvertx ├── .idea ├── compiler.xml ├── encodings.xml ├── misc.xml └── uiDesigner.xml ├── dependency-reduced-pom.xml ├── pom.xml └── src └── main ├── java └── org │ └── aman │ └── wsvertx │ ├── ClientSocketRequestVerticle.java │ ├── EventBusKafkaReceiverVerticle.java │ ├── EventBusKafkaSenderVerticle.java │ ├── KafkaConsumerVerticle.java │ ├── KafkaProducerVerticle.java │ ├── MainVerticle.java │ ├── ServerSocketEventBusVerticle.java │ ├── config │ ├── KafkaConsumerConfig.java │ └── KafkaProducerConfig.java │ ├── model │ ├── codec │ │ └── RegisterRequestCodec.java │ └── payload │ │ └── RegisterRequest.java │ └── util │ └── Util.java └── resources └── log4j.properties /.gitignore: -------------------------------------------------------------------------------- 1 | # Built application files 2 | *.apk 3 | *.ap_ 4 | 5 | # Files for the ART/Dalvik VM 6 | *.dex 7 | 8 | # Java class files 9 | *.class 10 | 11 | # Generated files 12 | bin/ 13 | gen/ 14 | out/ 15 | 16 | # Gradle files 17 | .gradle/ 18 | build/ 19 | 20 | # Local configuration file (sdk path, etc) 21 | local.properties 22 | 23 | # Proguard folder generated by Eclipse 24 | proguard/ 25 | 26 | # Log Files 27 | *.log 28 | 29 | # Android Studio Navigation editor temp files 30 | .navigation/ 31 | 32 | # Android Studio captures folder 33 | captures/ 34 | 35 | # IntelliJ 36 | *.iml 37 | .idea/workspace.xml 38 | .idea/tasks.xml 39 | .idea/gradle.xml 40 | .idea/assetWizardSettings.xml 41 | .idea/dictionaries 42 | .idea/libraries 43 | .idea/caches 44 | 45 | # Keystore files 46 | # Uncomment the following line if you do not want to check your keystore files in. 47 | #*.jks 48 | 49 | # External native build folder generated in Android Studio 2.2 and later 50 | .externalNativeBuild 51 | 52 | # Google Services (e.g. APIs or Firebase) 53 | google-services.json 54 | 55 | # Freeline 56 | freeline.py 57 | freeline/ 58 | freeline_project_description.json 59 | 60 | # fastlane 61 | fastlane/report.xml 62 | fastlane/Preview.html 63 | fastlane/screenshots 64 | fastlane/test_output 65 | fastlane/readme.md 66 | */workspace.xml[1~[ 67 | *workspace.xml 68 | wsvertx/.idea/ 69 | wsvertx/.idea 70 | 71 | kafkaflink/.idea 72 | kafkaflink/.idea/ 73 | wsvertx/target/* 74 | wsvertex/[ 75 | wsvertx/target/ 76 | kafkaflink/target/* 77 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Websockets-Vertx-Kafka-Flink 2 | ### A simple request response cycle using Websockets, Eclipse Vert-x server, Apache Kafka, Apache Flink. ### 3 | --- 4 | 5 | - An incoming request gets routed to a non blocking Vertx server which then writes the request to a specific Kafka topic. 6 | - A Flink consumer implemented as another side project consumes the messages from the given request topic 7 | - (Optional) Flink job hits a Rest API hosted on a Spring boot server. You can use Jax-Rs or even hardcode the response 8 | - Flink writes the API result to another topic. Every message has a unique sender id. Flink sends the response with the same 9 | - Finally the Vertx Kafka consumer listens for responses from the response topic and sends an event to a websocket handler 10 | - Websocket consumer for a specific id writes the response to its own socket thus completing the entire async request cycle 11 | 12 | 13 | ![image](https://user-images.githubusercontent.com/12872673/45586212-78acb580-b910-11e8-9d7a-9a3a85f22419.png) ![image](https://user-images.githubusercontent.com/12872673/45586233-ebb62c00-b910-11e8-9fc7-d48a73bcd31d.png) 14 | 15 | --- 16 | ### Prerequisites ### 17 | * Java 1.8 18 | * Apache Kafka 2.0.0 19 | * Apache Zookeeper 3.4.8 20 | * Eclipse Vertx 3.5.3 21 | * Apache Flink 1.6.0 22 | --- 23 | 24 | 25 | ### Setting up Apache Kafka ### 26 | ``` 27 | # Start Zookeeper instance 28 | $ zookeeper-server-start.bat ..\..\config\zookeeper.properties 29 | 30 | # Start Kafka server 31 | $ kafka-server-start.bat ..\..\config\server.properties 32 | 33 | # Create a request topic 34 | $ kafka-topics.bat --create --zookeeper localhost:2181 --replication-factor 1 --partitions 3 --topic flink-demo 35 | 36 | # Create a response queue 37 | $ kafka-topics.bat --create --zookeeper localhost:2181 --replication-factor 1 --partitions 3 --topic flink-demo-resp 38 | 39 | # Verify the consumer of request queue flink-demo 40 | $ kafka-console-consumer.bat --bootstrap-server localhost:9092 --from-beginning --topic flink-demo 41 | 42 | # Verify the consumer of response queue flink-demo-resp 43 | $ kafka-console-consumer.bat --bootstrap-server localhost:9092 --from-beginning --topic flink-demo-resp 44 | 45 | ``` 46 | Make sure following is appended to **config\server.properties** 47 | ``` 48 | port = 9092 49 | advertised.host.name = localhost 50 | ``` 51 | 52 | Note: Replace .bat files with .sh files when working in a UNIX environment. 53 | 54 | --- 55 | 56 | ### What you do in the Flink Job depends on the use case. Options are ### 57 | * Make async rest API call 58 | * Interact with a database using an async clients 59 | * Return a mock response 60 | 61 | ### Caveats ### 62 | * Here, we are making a request using the AsyncHTTP Client to an endpoint hosted on a Spring Boot Server 63 | * The rest API Server is listening on port 9004 64 | * You are free to experiment in this department. 65 | * If you choose to continue using the Rest API given in this project, make sure you have an endpoint implementation. 66 | 67 | ### Setting up the project ### 68 | * Run the kafka-flink connector project that waits for incoming data stream from kafka queue "flink_resp" 69 | * Run the ws-vertx project that invokes an event on the event bus which writes a sample API request to the topic. 70 | * Verify that the message is written correctly on the topic "flink-demo" 71 | * Flink Kafka connector consumes the message, serializes it, transforms the data stream into a response stream 72 | * Flink job now writes the response back to the response topic "flink-demo-resp" 73 | 74 | ### Testing the web socket flow ### 75 | * Incuded within the vertx flow is a client socket verticle that emulates a single web socket request 76 | * It is fired as soon as the server verticle is deployed. [Optional] Look for the following 77 | ``` 78 | # Uncomment the below line for local UI testing: It creates a websocket request to the given server 79 | //vertx.deployVerticle(new ClientSocketRequestVerticle()); 80 | 81 | ``` 82 | 83 | * You can however choose to send websocket requests from a client manually. Use the following 84 | ``` 85 | # Use the following websocket URL 86 | ws://127.0.0.1:9443/wsapi/register 87 | 88 | # Once the socket opens, begin sending messages in the correct format 89 | { 90 | "email": "your email", 91 | "password": "your password ", 92 | "registerAsAdmin": true 93 | } 94 | 95 | ``` 96 | 97 | ---- 98 | ### Websockets ### 99 | * Websocket for communication between app & backend 100 | * Async messages, non-blocking communication layer 101 | * Full duplex communication channels over single TCP 102 | 103 | --- 104 | ### Vert-x ### 105 | * A toolkit ecosystem, to build reactive application on JVM 106 | * Vert-x library helps implement non-blocking asynchronous event bus implementation. 107 | * Helps manage Websocket queue 108 | 109 | --- 110 | ### Kafka ### 111 | * Distributed streaming platform. 112 | * Kafka provides a fully integrated Streams API to allow an application to act as a stream processor, consuming an input stream from one or more topics and producing an output stream to one or more output topics, effectively transforming the input streams to output streams. 113 | * Handles out-of-order data. 114 | 115 | --- 116 | ### Flink ### 117 | * Open-source platform for distributed stream and batch data processing. 118 | * Provides data distribution, communication, and fault tolerance for distributed computations over data streams. 119 | * Builds batch processing on top of the streaming engine, overlaying native iteration support, managed memory, and program optimization. 120 | -------------------------------------------------------------------------------- /kafkaflink/pom.xml: -------------------------------------------------------------------------------- 1 | 3 | 4.0.0 4 | com.prudential.kafkaflink 5 | kafkaflink 6 | 0.0.1 7 | 8 | 9 | UTF-8 10 | 1.8 11 | 3.5.1 12 | 2.4.3 13 | 2.21.0 14 | 1.5.0 15 | 16 | 17 | 18 | 19 | 20 | 21 | org.apache.flink 22 | flink-connector-kafka-0.10_2.11 23 | compile 24 | 1.6.0 25 | 26 | 27 | 28 | org.apache.flink 29 | flink-core 30 | compile 31 | 1.6.0 32 | 33 | 34 | 35 | org.apache.flink 36 | flink-streaming-java_2.11 37 | compile 38 | 1.6.0 39 | 40 | 41 | 42 | org.apache.flink 43 | flink-java 44 | compile 45 | 1.6.0 46 | 47 | 48 | 49 | org.apache.flink 50 | flink-streaming-core 51 | compile 52 | 0.9.1 53 | 54 | 55 | org.apache.flink 56 | flink-clients 57 | compile 58 | 0.9.1 59 | 60 | 61 | 62 | org.asynchttpclient 63 | async-http-client 64 | 2.5.2 65 | 66 | 67 | 68 | org.apache.httpcomponents 69 | httpasyncclient 70 | 4.1.2 71 | 72 | 73 | 74 | com.google.code.gson 75 | gson 76 | 2.8.0 77 | 78 | 79 | 80 | io.netty 81 | netty-all 82 | 4.0.27.Final 83 | 84 | 85 | 86 | 87 | log4j 88 | log4j 89 | 1.2.17 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 100 | 101 | org.apache.maven.plugins 102 | maven-shade-plugin 103 | 2.4.3 104 | 105 | 106 | 107 | package 108 | 109 | shade 110 | 111 | 112 | 113 | 114 | 117 | 118 | 124 | 125 | 126 | 127 | 128 | 129 | org.apache.flink:* 130 | 131 | org/apache/flink/shaded/** 132 | web-docs/** 133 | 134 | 135 | 136 | 137 | 138 | 140 | com.dataartisans.Job 141 | 142 | 143 | false 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | org.apache.maven.plugins 152 | maven-jar-plugin 153 | 2.5 154 | 155 | 156 | 157 | com.aman.kafkalink.FlinkReadFromKafka 158 | 159 | 160 | 161 | 162 | 163 | 164 | org.apache.maven.plugins 165 | maven-compiler-plugin 166 | 167 | 8 168 | 8 169 | 170 | 171 | 172 | 173 | 174 | 175 | -------------------------------------------------------------------------------- /kafkaflink/src/main/java/com/aman/kafkalink/AsyncRegisterApiInvocation.java: -------------------------------------------------------------------------------- 1 | package com.aman.kafkalink; 2 | 3 | import com.aman.kafkalink.entity.MessageType; 4 | import com.aman.kafkalink.entity.RegisterRequest; 5 | import com.aman.kafkalink.entity.RegisterResponse; 6 | import com.google.gson.Gson; 7 | import org.apache.flink.configuration.Configuration; 8 | import org.apache.flink.streaming.api.functions.async.ResultFuture; 9 | import org.apache.flink.streaming.api.functions.async.RichAsyncFunction; 10 | import org.apache.log4j.Logger; 11 | import org.asynchttpclient.AsyncCompletionHandler; 12 | import org.asynchttpclient.AsyncHttpClient; 13 | import org.asynchttpclient.DefaultAsyncHttpClient; 14 | import org.asynchttpclient.Request; 15 | import org.asynchttpclient.Response; 16 | 17 | import java.util.Collections; 18 | 19 | 20 | public class AsyncRegisterApiInvocation extends RichAsyncFunction { 21 | 22 | private static final long serialVersionUID = 1L; 23 | private static final Logger logger = Logger.getLogger(AsyncRegisterApiInvocation.class); 24 | private final Integer apiTimeoutMs; 25 | 26 | /** 27 | * The Asynchronous client that can issue concurrent requests with callbacks 28 | */ 29 | private transient AsyncHttpClient asyncHttpClient = null; 30 | 31 | public AsyncRegisterApiInvocation(Integer apiTimeoutMs) { 32 | this.apiTimeoutMs = apiTimeoutMs; 33 | } 34 | 35 | @Override 36 | public void open(Configuration parameters) { 37 | logger.info("Opening connection " + parameters.toString()); 38 | this.asyncHttpClient = new DefaultAsyncHttpClient(); 39 | } 40 | 41 | @Override 42 | public void close() throws Exception { 43 | logger.info("Closing connection"); 44 | super.close(); 45 | asyncHttpClient.close(); 46 | } 47 | 48 | @Override 49 | public void timeout(RegisterRequest registerRequest, ResultFuture resultFuture) throws Exception { 50 | RegisterResponse registerResponse = new RegisterResponse(); 51 | registerResponse.setSuccess(false); 52 | registerResponse.setSenderId(registerRequest.getSenderId()); 53 | registerResponse.setError("[TimeoutException Api-Invocation]"); 54 | registerResponse.setCause("Timeout occurred during registration"); 55 | resultFuture.complete(Collections.singletonList(registerResponse)); 56 | } 57 | 58 | @Override 59 | public void asyncInvoke(RegisterRequest registerRequest, final ResultFuture resultFuture) throws Exception { 60 | // issue the asynchronous request, receive a future for result 61 | Gson g = new Gson(); 62 | String jsonContent = g.toJson(registerRequest); 63 | Request request = asyncHttpClient.preparePost("http://localhost:9004/api/auth/register").setHeader("Content" + 64 | "-Type", "application" + 65 | "/json") 66 | .setHeader("Content-Length", "" + jsonContent.length()).setBody(jsonContent) 67 | .setBody(jsonContent) 68 | .setRequestTimeout(this.apiTimeoutMs) 69 | .build(); 70 | 71 | try { 72 | asyncHttpClient.executeRequest(request, new AsyncCompletionHandler() { 73 | @Override 74 | public RegisterResponse onCompleted(Response response) throws Exception { 75 | logger.info("Spring returned" + response.getResponseBody()); 76 | Gson g = new Gson(); 77 | RegisterResponse responseMessage = g.fromJson(response.getResponseBody(), 78 | RegisterResponse.class); 79 | 80 | responseMessage.setSenderId(registerRequest.getSenderId()); 81 | responseMessage.setSuccess(true); 82 | responseMessage.setData(response.getResponseBody()); 83 | responseMessage.setMessageType(MessageType.REST); 84 | responseMessage.setMessageType(MessageType.REST); 85 | resultFuture.complete(Collections.singletonList(responseMessage)); 86 | return responseMessage; 87 | } 88 | 89 | @Override 90 | public void onThrowable(Throwable t) { 91 | RegisterResponse registerResponse = new RegisterResponse(); 92 | registerResponse.setSuccess(false); 93 | registerResponse.setSenderId(registerRequest.getSenderId()); 94 | registerResponse.setError(t.getMessage()); 95 | registerResponse.setCause(t.getCause().getMessage()); 96 | resultFuture.complete(Collections.singletonList(registerResponse)); 97 | } 98 | }); 99 | 100 | } catch (Exception ex) { 101 | logger.error("Exception [HTTP] Client " + ex); 102 | } 103 | } 104 | 105 | 106 | } 107 | -------------------------------------------------------------------------------- /kafkaflink/src/main/java/com/aman/kafkalink/FlinkReadFromKafka.java: -------------------------------------------------------------------------------- 1 | package com.aman.kafkalink; 2 | 3 | import com.aman.kafkalink.config.FlinkKafkaConsumerConfig; 4 | import com.aman.kafkalink.config.FlinkKafkaProducerConfig; 5 | import com.aman.kafkalink.entity.RegisterRequest; 6 | import com.aman.kafkalink.entity.RegisterRequestSchema; 7 | import com.aman.kafkalink.entity.RegisterResponse; 8 | import com.aman.kafkalink.entity.RegisterResponseSerializer; 9 | import org.apache.flink.streaming.api.datastream.AsyncDataStream; 10 | import org.apache.flink.streaming.api.datastream.DataStream; 11 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 12 | import org.apache.flink.streaming.api.functions.ProcessFunction; 13 | import org.apache.flink.streaming.api.functions.async.AsyncFunction; 14 | import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer010; 15 | import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer010; 16 | import org.apache.flink.util.Collector; 17 | import org.apache.log4j.Logger; 18 | 19 | import java.util.Properties; 20 | import java.util.concurrent.TimeUnit; 21 | 22 | public class FlinkReadFromKafka { 23 | 24 | private static final Logger logger = Logger.getLogger(FlinkReadFromKafka.class); 25 | 26 | public static void main(String[] args) throws Exception { 27 | StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 28 | env.setParallelism(1); 29 | Properties consumerProp = FlinkKafkaConsumerConfig.getKafkaConsumerConfig(); 30 | 31 | // Create a flink consumer from the topic with a custom serializer for "RegisterRequest" 32 | FlinkKafkaConsumer010 consumer = new FlinkKafkaConsumer010<>(consumerProp.getProperty( 33 | "topic"), 34 | new RegisterRequestSchema(), consumerProp); 35 | 36 | // Start reading partitions from the consumer group’s committed offsets in Kafka brokers 37 | consumer.setStartFromGroupOffsets(); 38 | 39 | // Create a flink data stream from the consumer source i.e Kafka topic 40 | DataStream messageStream = env.addSource(consumer); 41 | 42 | logger.info(messageStream.process(new ProcessFunction() { 43 | @Override 44 | public void processElement(RegisterRequest RegisterRequest, Context context, Collector collector) throws Exception { 45 | logger.info("Processing incoming request " + RegisterRequest); 46 | } 47 | })); 48 | 49 | //Set default timeout for the api. Ideally this should be fetched from a config server 50 | Integer apiTimeoutMs = 5000; 51 | 52 | //Function that defines how a datastream object would be transformed from within flink 53 | AsyncFunction loginRestTransform = 54 | new AsyncRegisterApiInvocation(apiTimeoutMs); 55 | 56 | //Transform the datastream in parallel 57 | DataStream result = AsyncDataStream 58 | .unorderedWait(messageStream, loginRestTransform, apiTimeoutMs, TimeUnit.MILLISECONDS, 1) 59 | .setParallelism(1); 60 | 61 | Properties producerProp = FlinkKafkaProducerConfig.getKafkaProduerConfig(); 62 | 63 | //Write the result back to the Kafka sink i.e response topic 64 | result.addSink(new FlinkKafkaProducer010<>(producerProp.getProperty("topic"), new RegisterResponseSerializer(), 65 | producerProp)); 66 | env.execute(); 67 | } 68 | 69 | 70 | } 71 | -------------------------------------------------------------------------------- /kafkaflink/src/main/java/com/aman/kafkalink/config/FlinkKafkaConsumerConfig.java: -------------------------------------------------------------------------------- 1 | package com.aman.kafkalink.config; 2 | 3 | import java.util.Properties; 4 | 5 | public class FlinkKafkaConsumerConfig { 6 | 7 | /** 8 | * Generate the properties for the kafka consumer 9 | */ 10 | public static Properties getKafkaConsumerConfig() { 11 | Properties prop = new Properties(); 12 | prop.setProperty("topic", "flink-demo"); 13 | prop.setProperty("bootstrap.servers", "localhost:9092,localhost:9093,localhost:9094"); 14 | prop.setProperty("zookeeper.connect", "localhost:2181"); 15 | prop.setProperty("group.id", "flink-login-request-consumer-group"); 16 | prop.setProperty("enable.auto.commit", "true"); 17 | return prop; 18 | 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /kafkaflink/src/main/java/com/aman/kafkalink/config/FlinkKafkaProducerConfig.java: -------------------------------------------------------------------------------- 1 | package com.aman.kafkalink.config; 2 | 3 | import java.util.Properties; 4 | 5 | public class FlinkKafkaProducerConfig { 6 | 7 | /** 8 | * Generate the properties for the kafka consumer 9 | */ 10 | public static Properties getKafkaProduerConfig() { 11 | Properties prop = new Properties(); 12 | prop.setProperty("topic", "flink-demo-resp"); 13 | prop.setProperty("bootstrap.servers", "localhost:9092,localhost:9093,localhost:9094"); 14 | prop.setProperty("zookeeper.connect", "localhost:2181"); 15 | prop.setProperty("group.id", "flink-login-request-consumer-group"); 16 | prop.setProperty("enable.auto.commit", "true"); 17 | return prop; 18 | 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /kafkaflink/src/main/java/com/aman/kafkalink/entity/MessageType.java: -------------------------------------------------------------------------------- 1 | package com.aman.kafkalink.entity; 2 | 3 | 4 | public enum MessageType { 5 | 6 | /** 7 | * A database call 8 | */ 9 | DB, 10 | 11 | 12 | /** 13 | * A rest API call 14 | */ 15 | REST 16 | 17 | } 18 | -------------------------------------------------------------------------------- /kafkaflink/src/main/java/com/aman/kafkalink/entity/RegisterRequest.java: -------------------------------------------------------------------------------- 1 | package com.aman.kafkalink.entity; 2 | 3 | import java.io.Serializable; 4 | 5 | public class RegisterRequest implements Serializable { 6 | 7 | private static final long serialVersionUID = -1533359700996484156L; 8 | 9 | private String username; 10 | 11 | private String email; 12 | 13 | private String password; 14 | 15 | private Boolean registerAsAdmin; 16 | 17 | private String senderId; 18 | 19 | public RegisterRequest(String username, String email, 20 | String password, Boolean registerAsAdmin, String senderId) { 21 | this.username = username; 22 | this.email = email; 23 | this.password = password; 24 | this.registerAsAdmin = registerAsAdmin; 25 | this.senderId = senderId; 26 | } 27 | 28 | public RegisterRequest() { 29 | } 30 | 31 | public String getUsername() { 32 | return username; 33 | } 34 | 35 | public void setUsername(String username) { 36 | this.username = username; 37 | } 38 | 39 | public String getEmail() { 40 | return email; 41 | } 42 | 43 | public void setEmail(String email) { 44 | this.email = email; 45 | } 46 | 47 | public String getPassword() { 48 | return password; 49 | } 50 | 51 | public void setPassword(String password) { 52 | this.password = password; 53 | } 54 | 55 | public Boolean getRegisterAsAdmin() { 56 | return registerAsAdmin; 57 | } 58 | 59 | public void setRegisterAsAdmin(Boolean registerAsAdmin) { 60 | this.registerAsAdmin = registerAsAdmin; 61 | } 62 | 63 | public String getSenderId() { 64 | return senderId; 65 | } 66 | 67 | public void setSenderId(String senderId) { 68 | this.senderId = senderId; 69 | } 70 | 71 | @Override 72 | public String toString() { 73 | return "RegisterRequest{" + 74 | "username='" + username + '\'' + 75 | ", email='" + email + '\'' + 76 | ", password='" + password + '\'' + 77 | ", registerAsAdmin=" + registerAsAdmin + 78 | ", senderId='" + senderId + '\'' + 79 | '}'; 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /kafkaflink/src/main/java/com/aman/kafkalink/entity/RegisterRequestSchema.java: -------------------------------------------------------------------------------- 1 | package com.aman.kafkalink.entity; 2 | 3 | import com.google.gson.Gson; 4 | import org.apache.flink.api.common.serialization.DeserializationSchema; 5 | import org.apache.flink.api.common.serialization.SerializationSchema; 6 | import org.apache.flink.api.common.typeinfo.TypeInformation; 7 | import org.apache.flink.api.java.typeutils.TypeExtractor; 8 | 9 | import java.io.IOException; 10 | 11 | public class RegisterRequestSchema implements DeserializationSchema, 12 | SerializationSchema { 13 | 14 | private static final long serialVersionUID = 6154188370181669711L; 15 | 16 | public TypeInformation getProducedType() { 17 | return TypeExtractor.getForClass(RegisterRequest.class); 18 | } 19 | 20 | 21 | public byte[] serialize(RegisterRequest element) { 22 | Gson g = new Gson(); 23 | String message = g.toJson(element); 24 | return message.getBytes(); 25 | } 26 | 27 | 28 | public RegisterRequest deserialize(byte[] message) throws IOException { 29 | String strMessage = new String(message); 30 | return new Gson().fromJson(strMessage, RegisterRequest.class); 31 | } 32 | 33 | 34 | public boolean isEndOfStream(RegisterRequest nextElement) { 35 | return false; 36 | } 37 | 38 | 39 | } 40 | -------------------------------------------------------------------------------- /kafkaflink/src/main/java/com/aman/kafkalink/entity/RegisterResponse.java: -------------------------------------------------------------------------------- 1 | package com.aman.kafkalink.entity; 2 | 3 | public class RegisterResponse { 4 | 5 | private String data; 6 | 7 | private String error; 8 | 9 | private Boolean success; 10 | 11 | private String cause; 12 | 13 | private MessageType messageType; 14 | 15 | private String senderId; 16 | 17 | public RegisterResponse() { 18 | } 19 | 20 | public RegisterResponse(String data, String error, Boolean success, String cause, MessageType messageType, 21 | String senderId) { 22 | this.data = data; 23 | this.error = error; 24 | this.success = success; 25 | this.cause = cause; 26 | this.messageType = messageType; 27 | this.senderId = senderId; 28 | } 29 | 30 | public String getData() { 31 | return data; 32 | } 33 | 34 | public void setData(String data) { 35 | this.data = data; 36 | } 37 | 38 | public String getError() { 39 | return error; 40 | } 41 | 42 | public void setError(String error) { 43 | this.error = error; 44 | } 45 | 46 | public Boolean getSuccess() { 47 | return success; 48 | } 49 | 50 | public void setSuccess(Boolean success) { 51 | this.success = success; 52 | } 53 | 54 | public String getCause() { 55 | return cause; 56 | } 57 | 58 | public void setCause(String cause) { 59 | this.cause = cause; 60 | } 61 | 62 | public MessageType getMessageType() { 63 | return messageType; 64 | } 65 | 66 | public void setMessageType(MessageType messageType) { 67 | this.messageType = messageType; 68 | } 69 | 70 | public String getSenderId() { 71 | return senderId; 72 | } 73 | 74 | public void setSenderId(String senderId) { 75 | this.senderId = senderId; 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /kafkaflink/src/main/java/com/aman/kafkalink/entity/RegisterResponseSerializer.java: -------------------------------------------------------------------------------- 1 | package com.aman.kafkalink.entity; 2 | 3 | import com.google.gson.Gson; 4 | import org.apache.flink.api.common.serialization.SerializationSchema; 5 | 6 | public class RegisterResponseSerializer implements SerializationSchema { 7 | 8 | private static final long serialVersionUID = 6154188370181669751L; 9 | 10 | @Override 11 | public byte[] serialize(RegisterResponse registerResponse) { 12 | Gson g = new Gson(); 13 | String message = g.toJson(registerResponse); 14 | return message.getBytes(); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /kafkaflink/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | # Root logger option 2 | log4j.rootLogger=INFO, stdout 3 | # Direct log messages to stdout 4 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender 5 | log4j.appender.stdout.Target=System.out 6 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout 7 | log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n -------------------------------------------------------------------------------- /wsvertx/.idea/compiler.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /wsvertx/.idea/encodings.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /wsvertx/.idea/misc.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /wsvertx/.idea/uiDesigner.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | -------------------------------------------------------------------------------- /wsvertx/dependency-reduced-pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4.0.0 4 | org.aman.cep 5 | ws-vertx 6 | 1.0-SNAPSHOT 7 | 8 | 9 | 10 | maven-compiler-plugin 11 | 12 | 8 13 | 8 14 | 15 | 16 | 17 | maven-shade-plugin 18 | 2.3 19 | 20 | 21 | package 22 | 23 | shade 24 | 25 | 26 | 27 | 28 | org.aman.wsvertx.MainVerticle 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 1.8 39 | UTF-8 40 | 1.8 41 | 42 | 43 | 44 | -------------------------------------------------------------------------------- /wsvertx/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | org.aman.cep 8 | ws-vertx 9 | 1.0-SNAPSHOT 10 | 11 | 12 | 13 | org.apache.maven.plugins 14 | maven-compiler-plugin 15 | 16 | 8 17 | 8 18 | 19 | 20 | 21 | 22 | 23 | org.apache.maven.plugins 24 | maven-shade-plugin 25 | 2.3 26 | 27 | 28 | 29 | package 30 | 31 | shade 32 | 33 | 34 | 35 | 36 | 37 | org.aman.wsvertx.MainVerticle 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | UTF-8 49 | 1.8 50 | 1.8 51 | 52 | 53 | 54 | 55 | 56 | io.vertx 57 | vertx-core 58 | 3.5.3 59 | 60 | 61 | 62 | 63 | io.vertx 64 | vertx-web 65 | 3.5.3 66 | 67 | 68 | 69 | 70 | log4j 71 | log4j 72 | 1.2.17 73 | 74 | 75 | 76 | 77 | io.vertx 78 | vertx-kafka-client 79 | 3.5.3 80 | 81 | 82 | 83 | 84 | -------------------------------------------------------------------------------- /wsvertx/src/main/java/org/aman/wsvertx/ClientSocketRequestVerticle.java: -------------------------------------------------------------------------------- 1 | package org.aman.wsvertx; 2 | 3 | import io.vertx.core.AbstractVerticle; 4 | import io.vertx.core.Future; 5 | import io.vertx.core.http.HttpClient; 6 | import io.vertx.core.http.HttpClientOptions; 7 | import org.aman.wsvertx.model.payload.RegisterRequest; 8 | import org.aman.wsvertx.util.Util; 9 | import org.apache.log4j.Logger; 10 | 11 | public class ClientSocketRequestVerticle extends AbstractVerticle { 12 | 13 | private static final Logger logger = Logger.getLogger(ClientSocketRequestVerticle.class); 14 | private HttpClient httpClient; 15 | 16 | public ClientSocketRequestVerticle() { 17 | this.httpClient = null; 18 | } 19 | 20 | @Override 21 | public void start(Future future) throws Exception { 22 | logger.info("Deployed verticle [" + this.getClass().getName()); 23 | 24 | HttpClientOptions options = new HttpClientOptions() 25 | .setSsl(false) 26 | .setTrustAll(true); 27 | 28 | this.httpClient = vertx.createHttpClient(options).websocket(9443, "127.0.0.1", "/wsapi/register", 29 | webSocket -> { 30 | // Set the handler for processing server response if any 31 | webSocket.handler(dataBuffer -> { 32 | logger.info("Received response from server " + dataBuffer); 33 | }); 34 | 35 | // Emulate client side register request 36 | RegisterRequest registerRequest = createClientRegisterRequest(); 37 | Util.getJsonStringFromObject(registerRequest) 38 | .ifPresent(webSocket::writeTextMessage); 39 | 40 | }); 41 | } 42 | 43 | /** 44 | * Creates a dummy client register request 45 | */ 46 | private RegisterRequest createClientRegisterRequest() { 47 | RegisterRequest registerRequest = new RegisterRequest(); 48 | registerRequest.setEmail("amangarg1995sep@gmail.com"); 49 | registerRequest.setPassword("test"); 50 | registerRequest.setRegisterAsAdmin(true); 51 | return registerRequest; 52 | } 53 | 54 | @Override 55 | public void stop(Future future) throws Exception { 56 | if (null != this.httpClient) { 57 | this.httpClient.close(); 58 | } 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /wsvertx/src/main/java/org/aman/wsvertx/EventBusKafkaReceiverVerticle.java: -------------------------------------------------------------------------------- 1 | package org.aman.wsvertx; 2 | 3 | import io.vertx.core.AbstractVerticle; 4 | import io.vertx.core.Future; 5 | import org.apache.log4j.Logger; 6 | 7 | /** 8 | * Receives an event from the event bus. 9 | */ 10 | public class EventBusKafkaReceiverVerticle extends AbstractVerticle { 11 | 12 | private static final Logger logger = Logger.getLogger(EventBusKafkaReceiverVerticle.class); 13 | 14 | public void start(Future startFuture) throws InterruptedException { 15 | logger.info("Deployed verticle [" + this.getClass().getName() + "]"); 16 | 17 | // Deploy the kafka consumer verticle that reads incoming messages on topic fink-demo-resp 18 | vertx.deployVerticle(new KafkaConsumerVerticle("flink-demo-resp"), stringAsyncResult -> { 19 | if (stringAsyncResult.succeeded()) { 20 | logger.info("Kafka consumer verticle executed successfully"); 21 | } 22 | }); 23 | } 24 | } -------------------------------------------------------------------------------- /wsvertx/src/main/java/org/aman/wsvertx/EventBusKafkaSenderVerticle.java: -------------------------------------------------------------------------------- 1 | package org.aman.wsvertx; 2 | 3 | import io.vertx.core.AbstractVerticle; 4 | import io.vertx.core.Future; 5 | import org.apache.log4j.Logger; 6 | 7 | /** 8 | * Receives an event from the event bus. 9 | */ 10 | public class EventBusKafkaSenderVerticle extends AbstractVerticle { 11 | 12 | private static final Logger logger = Logger.getLogger(EventBusKafkaSenderVerticle.class); 13 | 14 | public void start(Future startFuture) throws InterruptedException { 15 | logger.info("Deployed verticle [" + this.getClass().getName() + "]"); 16 | 17 | // Deploy the kafka producer verticle that reads events on "kafka.queue.publisher" 18 | vertx.deployVerticle(new KafkaProducerVerticle("flink-demo"), stringAsyncResult -> { 19 | if (stringAsyncResult.succeeded()) { 20 | // Once the kafka producer handler is setup successfully, send periodic requests 21 | logger.info("Kafka producer handler setup successful"); 22 | } 23 | }); 24 | } 25 | 26 | } -------------------------------------------------------------------------------- /wsvertx/src/main/java/org/aman/wsvertx/KafkaConsumerVerticle.java: -------------------------------------------------------------------------------- 1 | package org.aman.wsvertx; 2 | 3 | import io.vertx.core.AbstractVerticle; 4 | import io.vertx.core.Future; 5 | import io.vertx.core.json.JsonObject; 6 | import io.vertx.kafka.client.consumer.KafkaReadStream; 7 | import org.aman.wsvertx.config.KafkaConsumerConfig; 8 | import org.apache.log4j.Logger; 9 | 10 | import java.util.Collections; 11 | import java.util.Optional; 12 | 13 | public class KafkaConsumerVerticle extends AbstractVerticle { 14 | 15 | private static final Logger logger = Logger.getLogger(KafkaConsumerVerticle.class); 16 | 17 | private String topic; 18 | private KafkaReadStream kafkaConsumer; 19 | 20 | public KafkaConsumerVerticle(String topic) { 21 | this.topic = topic; 22 | this.kafkaConsumer = null; 23 | } 24 | 25 | @Override 26 | public void start(Future startFuture) { 27 | logger.info("Deployed verticle [" + this.getClass().getName() + "] topic[" + this.topic + "]"); 28 | 29 | //Create the kafka consumer 30 | kafkaConsumer = KafkaConsumerConfig.getKafkaConsumerConfig(vertx); 31 | 32 | // Subscribe to the correct kafka topic 33 | kafkaConsumer.subscribe(Collections.singleton(this.topic)); 34 | 35 | //Attach the consumer handler 36 | kafkaConsumer.handler(record -> { 37 | // Extract the message & put it back into event.bus for ws handler to intercept 38 | JsonObject message = record.value(); 39 | logger.info("Consumed Message " + record.value() + "on topic=" + record.topic() + 40 | ", partition=" + record.partition() + 41 | ", offset=" + record.offset()); 42 | 43 | //Extract the sender id from the message 44 | Optional senderId = Optional.ofNullable(message.getValue("senderId")) 45 | .map(String::valueOf); 46 | 47 | //Put the event back to the websocket handler address on the bus 48 | senderId.ifPresent(id -> { 49 | vertx.eventBus().send("ws-handler-" + id, message, messageAsyncResult -> { 50 | if (messageAsyncResult.succeeded()) { 51 | logger.info("Kafka consumer event handled successfully [ws-handler-" + id + "]"); 52 | } else { 53 | logger.info("Failed to send event to [ws-handler-" + id + "]"); 54 | } 55 | }); 56 | }); 57 | }); 58 | 59 | // Handle errors in the kafka consumer 60 | kafkaConsumer.exceptionHandler(exception -> { 61 | logger.error("Error while receiving message from Kafka " + exception.getMessage(), exception); 62 | startFuture.fail(exception); 63 | }); 64 | } 65 | 66 | @Override 67 | public void stop() throws Exception { 68 | if (null != kafkaConsumer) { 69 | kafkaConsumer.unsubscribe(voidAsyncResult -> { 70 | if (voidAsyncResult.succeeded()) { 71 | logger.info("Consumer successfully unsubscribed"); 72 | } 73 | }); 74 | kafkaConsumer.close(voidAsyncResult -> { 75 | if (voidAsyncResult.succeeded()) { 76 | logger.info("Consumer [" + this.topic + "] closed successfully"); 77 | } else { 78 | logger.info("Consumer [" + this.topic + "] failed to close"); 79 | } 80 | }); 81 | } 82 | } 83 | } -------------------------------------------------------------------------------- /wsvertx/src/main/java/org/aman/wsvertx/KafkaProducerVerticle.java: -------------------------------------------------------------------------------- 1 | package org.aman.wsvertx; 2 | 3 | import com.fasterxml.jackson.databind.ObjectMapper; 4 | import io.vertx.core.AbstractVerticle; 5 | import io.vertx.core.Future; 6 | import io.vertx.core.eventbus.Message; 7 | import io.vertx.core.json.JsonObject; 8 | import io.vertx.kafka.client.producer.KafkaProducerRecord; 9 | import io.vertx.kafka.client.producer.KafkaWriteStream; 10 | import org.aman.wsvertx.config.KafkaProducerConfig; 11 | import org.apache.kafka.clients.producer.ProducerRecord; 12 | import org.apache.kafka.clients.producer.RecordMetadata; 13 | import org.apache.log4j.Logger; 14 | 15 | import java.util.Optional; 16 | 17 | public class KafkaProducerVerticle extends AbstractVerticle { 18 | 19 | private static final Logger logger = Logger.getLogger(KafkaProducerVerticle.class); 20 | 21 | private String topic; 22 | private KafkaWriteStream kafkaProducer; 23 | 24 | public KafkaProducerVerticle(String topic) { 25 | this.topic = topic; 26 | this.kafkaProducer = null; 27 | } 28 | 29 | @Override 30 | public void start(Future startFuture) { 31 | logger.info("Deployed verticle [" + this.getClass().getName() + "] topic[" + this.topic + "]"); 32 | 33 | //Create the producer 34 | this.kafkaProducer = KafkaProducerConfig.getKafkaProducerConfig(vertx); 35 | 36 | // Listen to the events on the bus with the address "kafka.queue.publisher" 37 | vertx.eventBus().consumer("ws.messages.producer.event.bus", message -> { 38 | Optional validJsonRequestOpt = getJsonRequest(message); 39 | Optional> kafkaProducerRecordOpt = 40 | validJsonRequestOpt.map(jsonReq -> KafkaProducerRecord.create(this.topic, jsonReq)) 41 | .map(KafkaProducerRecord::record); 42 | 43 | kafkaProducerRecordOpt.ifPresent(record -> { 44 | kafkaProducer.write(record, done -> { 45 | if (done.succeeded()) { 46 | RecordMetadata recordMetadata = done.result(); 47 | logger.info("Message " + record.value() + " written on topic=" + recordMetadata.topic() + 48 | ", partition=" + recordMetadata.partition() + 49 | ", offset=" + recordMetadata.offset()); 50 | message.reply("Published to Kafka"); 51 | } 52 | }); 53 | }); 54 | }).completionHandler(voidAsyncResult -> { 55 | if (voidAsyncResult.succeeded()){ 56 | logger.info("kafka.queue.publisher handler set up successful"); 57 | // Signal to the caller that handler setup successfully 58 | startFuture.complete(); 59 | } else { 60 | // Signal to the caller that the consumer handler failed 61 | logger.info("kafka.queue.publisher handler set up failed"); 62 | startFuture.fail(voidAsyncResult.cause()); 63 | } 64 | }); 65 | } 66 | 67 | /** 68 | * Generate a json object given the message object 69 | */ 70 | private Optional getJsonRequest(Message object) { 71 | try { 72 | ObjectMapper mapper = new ObjectMapper(); 73 | String jsonString = mapper.writeValueAsString(object.body()); 74 | return Optional.ofNullable(new JsonObject(jsonString)); 75 | } catch (Exception e) { 76 | logger.error("Cannot serialize [" + object + "] into JSON"); 77 | } 78 | return Optional.empty(); 79 | } 80 | 81 | @Override 82 | public void stop() throws Exception { 83 | if (null != kafkaProducer) { 84 | kafkaProducer.close(voidAsyncResult -> { 85 | if (voidAsyncResult.succeeded()) { 86 | logger.info("Producer [" + this.topic + "] closed successfully"); 87 | } else { 88 | logger.info("Producer [" + this.topic + "] failed to close"); 89 | } 90 | }); 91 | } 92 | } 93 | } -------------------------------------------------------------------------------- /wsvertx/src/main/java/org/aman/wsvertx/MainVerticle.java: -------------------------------------------------------------------------------- 1 | package org.aman.wsvertx; 2 | 3 | import io.vertx.core.AbstractVerticle; 4 | import io.vertx.core.Future; 5 | import io.vertx.core.Vertx; 6 | import org.apache.log4j.Logger; 7 | 8 | public class MainVerticle extends AbstractVerticle { 9 | 10 | private static final Logger logger = Logger.getLogger(MainVerticle.class); 11 | 12 | public static void main(String[] args) throws InterruptedException { 13 | Vertx vertx = Vertx.vertx(); 14 | vertx.deployVerticle(new MainVerticle()); 15 | } 16 | 17 | @Override 18 | public void start(Future startFuture) throws Exception { 19 | logger.info("Deployed main module " + startFuture + Thread.currentThread().getName()); 20 | 21 | //Deploy the server verticle that listens to socket reqs with unique id 22 | vertx.deployVerticle(new ServerSocketEventBusVerticle()); 23 | 24 | //Deploy the kafka sender verticle 25 | vertx.deployVerticle(new EventBusKafkaSenderVerticle()); 26 | 27 | //Deploy the kafka receiver verticle 28 | vertx.deployVerticle(new EventBusKafkaReceiverVerticle()); 29 | } 30 | 31 | @Override 32 | public void stop() throws Exception { 33 | logger.info("Successfully stopping verticle."); 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /wsvertx/src/main/java/org/aman/wsvertx/ServerSocketEventBusVerticle.java: -------------------------------------------------------------------------------- 1 | package org.aman.wsvertx; 2 | 3 | import com.fasterxml.jackson.databind.ObjectMapper; 4 | import io.vertx.core.AbstractVerticle; 5 | import io.vertx.core.Future; 6 | import io.vertx.core.eventbus.DeliveryOptions; 7 | import io.vertx.core.http.HttpServer; 8 | import io.vertx.core.shareddata.LocalMap; 9 | import org.aman.wsvertx.model.codec.RegisterRequestCodec; 10 | import org.aman.wsvertx.model.payload.RegisterRequest; 11 | import org.apache.log4j.Logger; 12 | 13 | import java.io.IOException; 14 | 15 | public class ServerSocketEventBusVerticle extends AbstractVerticle { 16 | 17 | private static final Logger logger = Logger.getLogger(ServerSocketEventBusVerticle.class); 18 | private HttpServer httpServer; 19 | 20 | public ServerSocketEventBusVerticle() { 21 | this.httpServer = null; 22 | } 23 | 24 | @Override 25 | public void start(Future startFuture){ 26 | logger.info("Deployed verticle [" + this.getClass().getName() + "]"); 27 | this.httpServer = vertx.createHttpServer(); 28 | 29 | // Set delivery options to include a custom codec for sending the register request 30 | DeliveryOptions deliveryOptions = new DeliveryOptions(); 31 | deliveryOptions.setCodecName(RegisterRequestCodec.class.getName()); 32 | vertx.eventBus().registerDefaultCodec(RegisterRequest.class, new RegisterRequestCodec()); 33 | 34 | httpServer.websocketHandler(webSocket -> { 35 | LocalMap wsSessions = vertx.sharedData().getLocalMap("ws.sessions"); 36 | 37 | //Filter socket base url 38 | if (webSocket.path().equals("/wsapi/register")) { 39 | logger.info("Request received at socket [" + webSocket.textHandlerID() + "]"); 40 | wsSessions.put(webSocket.textHandlerID(), webSocket.textHandlerID()); 41 | 42 | 43 | // Set handler for the incoming text data 44 | webSocket.textMessageHandler(data -> { 45 | logger.info("Received web socket data [" + data + "]"); 46 | ObjectMapper mapper = new ObjectMapper(); 47 | try { 48 | RegisterRequest registerRequest = mapper.readValue(data, RegisterRequest.class); 49 | registerRequest.setSenderId(webSocket.textHandlerID()); 50 | logger.info("Sending to kafka topic: data [" + registerRequest + "]"); 51 | // Send raw socket data to kafka producer event bus 52 | vertx.eventBus() 53 | .send("ws.messages.producer.event.bus", registerRequest, deliveryOptions, 54 | messageAsyncResult -> { 55 | if (messageAsyncResult.succeeded()) { 56 | logger.info("Message status [" + messageAsyncResult.result().body() + "]"); 57 | } 58 | }); 59 | 60 | // Receive processed data from kafka consumer and write back to the socket 61 | vertx.eventBus().consumer("ws-handler-" + webSocket.textHandlerID(), kafkaMessage -> { 62 | logger.info("Received message from Kafka at Vertx: " + kafkaMessage.body()); 63 | webSocket.writeTextMessage(kafkaMessage.body().toString()); 64 | kafkaMessage.reply("Writing the response to websocket"); 65 | }); 66 | } catch (IOException e) { 67 | logger.error("Error deserializing websocket data [" + data + "] id [" + webSocket.textHandlerID() + "]"); 68 | webSocket.writeTextMessage("Error deserializing websocket data [" + data + "] id [" + webSocket.textHandlerID() + "]"); 69 | } 70 | }); 71 | } 72 | else { 73 | logger.info("Websocket path [" + webSocket.path() + " is invalid"); 74 | webSocket.reject(); 75 | } 76 | 77 | // Specify the close handler for the web socket connection 78 | webSocket.closeHandler(aVoid -> { 79 | logger.info("Closing socket session : " + webSocket.textHandlerID()); 80 | wsSessions.remove(webSocket.textHandlerID()); 81 | }); 82 | }); 83 | 84 | httpServer.listen(9443, httpServerAsyncResult -> { 85 | logger.info("Http server up and running at port ["+ httpServer.actualPort() + "]"); 86 | //Deploy the client verticle that sends response to socket with unique id 87 | vertx.deployVerticle(new ClientSocketRequestVerticle()); 88 | }); 89 | } 90 | 91 | @Override 92 | public void stop() throws Exception { 93 | if (null != this.httpServer){ 94 | this.httpServer.close(voidAsyncResult -> { 95 | if (voidAsyncResult.succeeded()){ 96 | logger.info("Server ["+ httpServer.actualPort() + "] closed successfully"); 97 | } 98 | else{ 99 | logger.info("Server [" + httpServer.actualPort() + "] closed successfully"); 100 | } 101 | }); 102 | } 103 | super.stop(); 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /wsvertx/src/main/java/org/aman/wsvertx/config/KafkaConsumerConfig.java: -------------------------------------------------------------------------------- 1 | package org.aman.wsvertx.config; 2 | 3 | import io.vertx.core.Vertx; 4 | import io.vertx.core.json.JsonObject; 5 | import io.vertx.kafka.client.consumer.KafkaReadStream; 6 | import io.vertx.kafka.client.serialization.JsonObjectDeserializer; 7 | import org.apache.kafka.clients.consumer.ConsumerConfig; 8 | import org.apache.kafka.common.serialization.StringDeserializer; 9 | 10 | import java.util.Properties; 11 | 12 | public class KafkaConsumerConfig { 13 | 14 | public static KafkaReadStream getKafkaConsumerConfig(Vertx vertx) { 15 | Properties config = new Properties(); 16 | config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092,localhost:9093,localhost:9094"); 17 | config.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"); 18 | config.put(ConsumerConfig.GROUP_ID_CONFIG, "flink-resp-vertx-group"); 19 | config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); 20 | config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonObjectDeserializer.class); 21 | config.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); 22 | 23 | return KafkaReadStream.create(vertx, config, String.class, JsonObject.class); 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /wsvertx/src/main/java/org/aman/wsvertx/config/KafkaProducerConfig.java: -------------------------------------------------------------------------------- 1 | package org.aman.wsvertx.config; 2 | 3 | import io.vertx.core.Vertx; 4 | import io.vertx.core.json.JsonObject; 5 | import io.vertx.kafka.client.producer.KafkaWriteStream; 6 | import io.vertx.kafka.client.serialization.JsonObjectSerializer; 7 | import org.apache.kafka.clients.producer.ProducerConfig; 8 | import org.apache.kafka.common.serialization.StringDeserializer; 9 | 10 | import java.util.Properties; 11 | 12 | public class KafkaProducerConfig { 13 | 14 | public static KafkaWriteStream getKafkaProducerConfig(Vertx vertx) { 15 | Properties config = new Properties(); 16 | config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092,localhost:9093,localhost:9094"); 17 | config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringDeserializer.class); 18 | config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonObjectSerializer.class); 19 | config.put(ProducerConfig.ACKS_CONFIG, "1"); 20 | 21 | return KafkaWriteStream.create(vertx, config, String.class, JsonObject.class); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /wsvertx/src/main/java/org/aman/wsvertx/model/codec/RegisterRequestCodec.java: -------------------------------------------------------------------------------- 1 | package org.aman.wsvertx.model.codec; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import com.fasterxml.jackson.databind.ObjectWriter; 6 | import io.vertx.core.buffer.Buffer; 7 | import io.vertx.core.eventbus.MessageCodec; 8 | import org.aman.wsvertx.model.payload.RegisterRequest; 9 | import org.apache.log4j.Logger; 10 | 11 | import java.io.IOException; 12 | import java.io.StringReader; 13 | 14 | public class RegisterRequestCodec implements MessageCodec { 15 | 16 | private static final Logger logger = Logger.getLogger(RegisterRequestCodec.class); 17 | 18 | @Override 19 | public void encodeToWire(Buffer buffer, RegisterRequest registerRequest) { 20 | ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter(); 21 | try { 22 | String jsonToStr = ow.writeValueAsString(registerRequest); 23 | int length = jsonToStr.getBytes().length; 24 | buffer.appendInt(length); 25 | buffer.appendString(jsonToStr); 26 | } catch (JsonProcessingException e) { 27 | logger.error("Error encoding [" + registerRequest + "] from " + this.name()); 28 | } 29 | } 30 | 31 | @Override 32 | public RegisterRequest decodeFromWire(int position, Buffer buffer) { 33 | int length = buffer.getInt(position); 34 | // Get JSON string by it`s length 35 | // Jump 4 because getInt() == 4 bytes 36 | String jsonStr = buffer.getString(position += 4, position += length); 37 | ObjectMapper mapper = new ObjectMapper(); 38 | try { 39 | return mapper.readValue(new StringReader(jsonStr), RegisterRequest.class); 40 | } catch (IOException e) { 41 | logger.error("Error decoding [" + jsonStr + "] to " + this.name()); 42 | } 43 | return null; 44 | } 45 | 46 | @Override 47 | public RegisterRequest transform(RegisterRequest registerRequest) { 48 | return registerRequest; 49 | } 50 | 51 | @Override 52 | public String name() { 53 | return this.getClass().getName(); 54 | } 55 | 56 | @Override 57 | public byte systemCodecID() { 58 | return -1; 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /wsvertx/src/main/java/org/aman/wsvertx/model/payload/RegisterRequest.java: -------------------------------------------------------------------------------- 1 | package org.aman.wsvertx.model.payload; 2 | 3 | public class RegisterRequest { 4 | 5 | private String username; 6 | 7 | private String email; 8 | 9 | private String password; 10 | 11 | private Boolean registerAsAdmin; 12 | 13 | private String senderId; 14 | 15 | public RegisterRequest(String username, String email, 16 | String password, Boolean registerAsAdmin, String senderId) { 17 | this.username = username; 18 | this.email = email; 19 | this.password = password; 20 | this.registerAsAdmin = registerAsAdmin; 21 | this.senderId = senderId; 22 | } 23 | 24 | public RegisterRequest() { 25 | } 26 | 27 | public String getUsername() { 28 | return username; 29 | } 30 | 31 | public void setUsername(String username) { 32 | this.username = username; 33 | } 34 | 35 | public String getEmail() { 36 | return email; 37 | } 38 | 39 | public void setEmail(String email) { 40 | this.email = email; 41 | } 42 | 43 | public String getPassword() { 44 | return password; 45 | } 46 | 47 | public void setPassword(String password) { 48 | this.password = password; 49 | } 50 | 51 | public Boolean getRegisterAsAdmin() { 52 | return registerAsAdmin; 53 | } 54 | 55 | public void setRegisterAsAdmin(Boolean registerAsAdmin) { 56 | this.registerAsAdmin = registerAsAdmin; 57 | } 58 | 59 | public String getSenderId() { 60 | return senderId; 61 | } 62 | 63 | public void setSenderId(String senderId) { 64 | this.senderId = senderId; 65 | } 66 | 67 | @Override 68 | public String toString() { 69 | return "RegisterRequest{" + 70 | "username='" + username + '\'' + 71 | ", email='" + email + '\'' + 72 | ", password='" + password + '\'' + 73 | ", registerAsAdmin=" + registerAsAdmin + 74 | ", senderId='" + senderId + '\'' + 75 | '}'; 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /wsvertx/src/main/java/org/aman/wsvertx/util/Util.java: -------------------------------------------------------------------------------- 1 | package org.aman.wsvertx.util; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | 6 | import java.util.Optional; 7 | import java.util.UUID; 8 | 9 | public class Util { 10 | 11 | /** 12 | * Generates a random string using the default UUID 13 | */ 14 | public static String generateRandomUUID(){ 15 | return UUID.randomUUID().toString(); 16 | } 17 | 18 | public static Optional getJsonStringFromObject(Object object){ 19 | ObjectMapper objectMapper = new ObjectMapper(); 20 | try { 21 | return Optional.ofNullable(objectMapper.writeValueAsString(object)); 22 | } catch (JsonProcessingException e) { 23 | return Optional.empty(); 24 | } 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /wsvertx/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | # Root logger option 2 | log4j.rootLogger=INFO, stdout 3 | 4 | # Direct log messages to stdout 5 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender 6 | log4j.appender.stdout.Target=System.out 7 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout 8 | log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n --------------------------------------------------------------------------------