├── .gitignore ├── Chapter02 └── monedero │ ├── build.gradle │ └── src │ └── main │ └── java │ └── monedero │ ├── Consumer.java │ ├── ProcessingEngine.java │ ├── Producer.java │ ├── Reader.java │ ├── Validator.java │ └── Writer.java ├── Chapter03 └── monedero │ ├── build.gradle │ └── src │ └── main │ └── java │ └── monedero │ ├── Enricher.java │ ├── ProcessingEngine.java │ ├── Producer.java │ └── extractors │ ├── GeoIPService.java │ ├── OpenExchangeService.java │ └── OpenWeatherService.java ├── Chapter04 └── kioto │ ├── build.gradle │ └── src │ └── main │ └── java │ └── kioto │ ├── Constants.java │ ├── HealthCheck.java │ ├── custom │ ├── CustomProcessor.java │ └── CustomProducer.java │ ├── plain │ ├── PlainProcessor.java │ └── PlainProducer.java │ └── serde │ ├── HealthCheckDeserializer.java │ └── HealthCheckSerializer.java ├── Chapter05 └── kioto │ ├── build.gradle │ └── src │ └── main │ ├── java │ └── kioto │ │ ├── Constants.java │ │ ├── HealthCheck.java │ │ └── avro │ │ ├── AvroProcessor.java │ │ └── AvroProducer.java │ └── resources │ └── healthcheck.avsc ├── Chapter06 └── kioto │ ├── build.gradle │ └── src │ └── main │ ├── java │ └── kioto │ │ ├── Constants.java │ │ ├── HealthCheck.java │ │ ├── avro │ │ ├── AvroProducer.java │ │ └── AvroStreamsProcessor.java │ │ ├── custom │ │ ├── CustomProducer.java │ │ └── CustomStreamsProcessor.java │ │ ├── events │ │ ├── EventProcessor.java │ │ └── EventProducer.java │ │ ├── plain │ │ ├── PlainProducer.java │ │ └── PlainStreamsProcessor.java │ │ └── serde │ │ ├── HealthCheckDeserializer.java │ │ └── HealthCheckSerializer.java │ └── resources │ └── healthcheck.avsc ├── Chapter08 └── kioto │ ├── build.gradle │ └── src │ └── main │ └── java │ └── kioto │ ├── Constants.java │ ├── HealthCheck.java │ ├── plain │ └── PlainProducer.java │ └── spark │ └── SparkProcessor.java ├── LICENSE └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | # Build products 2 | target/ 3 | docs/_build/ 4 | dist/ 5 | 6 | # IntelliJ data 7 | *.iml 8 | *.iws 9 | *.ipr 10 | .idea/ 11 | 12 | # Other 13 | .DS_Store 14 | .dumbjump 15 | *classes 16 | *~ 17 | *# 18 | .#* 19 | .classpath 20 | /.metadata 21 | .project 22 | .settings 23 | .tern-port 24 | ui/ 25 | -------------------------------------------------------------------------------- /Chapter02/monedero/build.gradle: -------------------------------------------------------------------------------- 1 | 2 | apply plugin: 'java' 3 | apply plugin: 'application' 4 | 5 | sourceCompatibility = '1.8' 6 | 7 | mainClassName = 'monedero.ProcessingEngine' 8 | 9 | repositories { 10 | mavenCentral() 11 | } 12 | 13 | version = '0.1.0' 14 | 15 | dependencies { 16 | compile group: 'org.apache.kafka', name: 'kafka_2.12', version: '2.0.0' 17 | compile group: 'com.fasterxml.jackson.core', name: 'jackson-core', version: '2.9.7' 18 | } 19 | 20 | jar { 21 | manifest { 22 | attributes 'Main-Class': mainClassName 23 | } from { 24 | configurations.compile.collect { 25 | it.isDirectory() ? it : zipTree(it) 26 | } 27 | } 28 | exclude "META-INF/*.SF" 29 | exclude "META-INF/*.DSA" 30 | exclude "META-INF/*.RSA" 31 | } -------------------------------------------------------------------------------- /Chapter02/monedero/src/main/java/monedero/Consumer.java: -------------------------------------------------------------------------------- 1 | package monedero; 2 | 3 | import java.util.Properties; 4 | 5 | public interface Consumer { 6 | static Properties createConfig(String servers, String groupId) { 7 | Properties config = new Properties(); 8 | config.put("bootstrap.servers", servers); 9 | config.put("group.id", groupId); 10 | config.put("enable.auto.commit", "true"); 11 | config.put("auto.commit.interval.ms", "1000"); 12 | config.put("auto.offset.reset", "earliest"); 13 | config.put("session.timeout.ms", "30000"); 14 | config.put("key.deserializer", 15 | "org.apache.kafka.common.serialization.StringDeserializer"); 16 | config.put("value.deserializer", 17 | "org.apache.kafka.common.serialization.StringDeserializer"); 18 | return config; 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /Chapter02/monedero/src/main/java/monedero/ProcessingEngine.java: -------------------------------------------------------------------------------- 1 | package monedero; 2 | 3 | public class ProcessingEngine { 4 | 5 | public static void main(String[] args) { 6 | String servers = args[0]; 7 | String groupId = args[1]; 8 | String sourceTopic = args[2]; 9 | String targetTopic = args[3]; 10 | 11 | Reader reader = new Reader(servers, groupId, sourceTopic); 12 | 13 | Writer writer = new Writer(servers, targetTopic); 14 | reader.run(writer); 15 | } 16 | } 17 | 18 | /* 19 | package monedero; 20 | 21 | public class ProcessingEngine { 22 | 23 | public static void main(String[] args){ 24 | String servers = args[0]; 25 | String groupId = args[1]; 26 | String sourceTopic = args[2]; 27 | String validTopic = args[3]; 28 | String invalidTopic = args[4]; 29 | 30 | Reader reader = new Reader(servers, groupId, sourceTopic); 31 | 32 | //Validator validator = new Validator(servers, validTopic, invalidTopic); 33 | Enricher enricher = new Enricher(servers, validTopic, invalidTopic); 34 | reader.run(enricher); 35 | 36 | } 37 | } 38 | */ -------------------------------------------------------------------------------- /Chapter02/monedero/src/main/java/monedero/Producer.java: -------------------------------------------------------------------------------- 1 | package monedero; 2 | 3 | import java.util.Properties; 4 | 5 | import org.apache.kafka.clients.producer.KafkaProducer; 6 | import org.apache.kafka.clients.producer.ProducerRecord; 7 | 8 | public interface Producer { 9 | 10 | void process(String message); //1 11 | 12 | static void write(KafkaProducer producer, 13 | String topic, String message) { //2 14 | ProducerRecord pr = new ProducerRecord<>(topic, message); 15 | producer.send(pr); 16 | } 17 | 18 | static Properties createConfig(String servers) { //3 19 | Properties config = new Properties(); 20 | config.put("bootstrap.servers", servers); 21 | config.put("acks", "all"); 22 | config.put("retries", 0); 23 | config.put("batch.size", 1000); 24 | config.put("linger.ms", 1); 25 | config.put("key.serializer", 26 | "org.apache.kafka.common.serialization.StringSerializer"); 27 | config.put("value.serializer", 28 | "org.apache.kafka.common.serialization.StringSerializer"); 29 | return config; 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /Chapter02/monedero/src/main/java/monedero/Reader.java: -------------------------------------------------------------------------------- 1 | package monedero; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerRecord; 4 | import org.apache.kafka.clients.consumer.ConsumerRecords; 5 | import org.apache.kafka.clients.consumer.KafkaConsumer; 6 | 7 | import java.time.Duration; 8 | import java.util.Collections; 9 | 10 | class Reader implements Consumer { 11 | 12 | private final KafkaConsumer consumer; //1 13 | private final String topic; 14 | 15 | Reader(String servers, String groupId, String topic) { 16 | this.consumer = 17 | new KafkaConsumer<>(Consumer.createConfig(servers, groupId)); 18 | this.topic = topic; 19 | } 20 | 21 | void run(Producer producer) { 22 | this.consumer.subscribe(Collections.singletonList(this.topic)); //2 23 | while (true) { //3 24 | ConsumerRecords records = consumer.poll(Duration.ofMillis(100)); //4 25 | for (ConsumerRecord record : records) { 26 | producer.process(record.value()); //5 27 | } 28 | } 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /Chapter02/monedero/src/main/java/monedero/Validator.java: -------------------------------------------------------------------------------- 1 | package monedero; 2 | 3 | import com.fasterxml.jackson.databind.JsonNode; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import org.apache.kafka.clients.producer.KafkaProducer; 6 | 7 | import java.io.IOException; 8 | 9 | public class Validator implements Producer { 10 | 11 | private final KafkaProducer producer; 12 | private final String validMessages; 13 | private final String invalidMessages; 14 | 15 | private static final ObjectMapper MAPPER = new ObjectMapper(); 16 | 17 | public Validator(String servers, String validMessages, String invalidMessages) { //1 18 | this.producer = new KafkaProducer<>(Producer.createConfig(servers)); 19 | this.validMessages = validMessages; 20 | this.invalidMessages = invalidMessages; 21 | } 22 | 23 | @Override 24 | public void process(String message) { 25 | try { 26 | JsonNode root = MAPPER.readTree(message); 27 | String error = ""; 28 | error = error.concat(validate(root, "event")); //2 29 | error = error.concat(validate(root, "customer")); 30 | error = error.concat(validate(root, "currency")); 31 | error = error.concat(validate(root, "timestamp")); 32 | if (error.length() > 0) { 33 | Producer.write(this.producer, this.invalidMessages, //3 34 | "{\"error\": \" " + error + "\"}"); 35 | } else { 36 | Producer.write(this.producer, this.validMessages, //4 37 | MAPPER.writeValueAsString(root)); 38 | } 39 | } catch (IOException e) { 40 | Producer.write(this.producer, this.invalidMessages, "{\"error\": \"" 41 | + e.getClass().getSimpleName() + ": " + e.getMessage() + "\"}"); //5 42 | } 43 | } 44 | 45 | private String validate(JsonNode root, String path) { 46 | if (!root.has(path)) { 47 | return path.concat(" is missing. "); 48 | } 49 | JsonNode node = root.path(path); 50 | if (node.isMissingNode()) { 51 | return path.concat(" is missing. "); 52 | } 53 | return ""; 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /Chapter02/monedero/src/main/java/monedero/Writer.java: -------------------------------------------------------------------------------- 1 | package monedero; 2 | 3 | import org.apache.kafka.clients.producer.KafkaProducer; 4 | 5 | public class Writer implements Producer { 6 | 7 | private final KafkaProducer producer; 8 | private final String topic; 9 | 10 | Writer(String servers, String topic) { 11 | this.producer = new KafkaProducer<>( 12 | Producer.createConfig(servers)); //1 13 | this.topic = topic; 14 | } 15 | 16 | @Override 17 | public void process(String message) { 18 | Producer.write(this.producer, this.topic, message); //2 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /Chapter03/monedero/build.gradle: -------------------------------------------------------------------------------- 1 | 2 | apply plugin: 'java' 3 | apply plugin: 'application' 4 | 5 | sourceCompatibility = '1.8' 6 | 7 | mainClassName = 'monedero.ProcessingEngine' 8 | 9 | repositories { 10 | mavenCentral() 11 | } 12 | 13 | version = '0.1.0' 14 | 15 | dependencies { 16 | compile group: 'org.apache.kafka', name: 'kafka_2.12', version: '2.0.0' 17 | compile group: 'com.maxmind.geoip', name: 'geoip-api', version: '1.3.1' 18 | compile group: 'com.fasterxml.jackson.core', name: 'jackson-core', version: '2.9.7' 19 | } 20 | 21 | jar { 22 | manifest { 23 | attributes 'Main-Class': mainClassName 24 | } from { 25 | configurations.compile.collect { 26 | it.isDirectory() ? it : zipTree(it) 27 | } 28 | } 29 | exclude "META-INF/*.SF" 30 | exclude "META-INF/*.DSA" 31 | exclude "META-INF/*.RSA" 32 | } -------------------------------------------------------------------------------- /Chapter03/monedero/src/main/java/monedero/Enricher.java: -------------------------------------------------------------------------------- 1 | package monedero; 2 | 3 | import com.fasterxml.jackson.databind.JsonNode; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import com.fasterxml.jackson.databind.node.ObjectNode; 6 | import com.maxmind.geoip.Location; 7 | import monedero.extractors.GeoIPService; 8 | import monedero.extractors.OpenExchangeService; 9 | import org.apache.kafka.clients.producer.KafkaProducer; 10 | 11 | import java.io.IOException; 12 | 13 | public final class Enricher implements Producer { 14 | 15 | private final KafkaProducer producer; 16 | private final String validMessages; 17 | private final String invalidMessages; 18 | private static final ObjectMapper MAPPER = new ObjectMapper(); 19 | 20 | public Enricher(String servers, String validMessages, String invalidMessages) { 21 | this.producer = new KafkaProducer<>(Producer.createConfig(servers)); 22 | this.validMessages = validMessages; 23 | this.invalidMessages = invalidMessages; 24 | } 25 | 26 | @Override 27 | public void process(String message) { 28 | try { 29 | final JsonNode root = MAPPER.readTree(message); 30 | final JsonNode ipAddressNode = root.path("customer").path("ipAddress"); 31 | 32 | if (ipAddressNode.isMissingNode()) { //1 33 | Producer.write(this.producer, this.invalidMessages, 34 | "{\"error\": \"customer.ipAddress is missing\"}"); 35 | } else { 36 | final String ipAddress = ipAddressNode.textValue(); 37 | 38 | final Location location = new GeoIPService().getLocation(ipAddress); 39 | ((ObjectNode) root).with("customer").put("country", location.countryName); 40 | ((ObjectNode) root).with("customer").put("city", location.city); 41 | 42 | final OpenExchangeService oes = new OpenExchangeService(); //2 43 | ((ObjectNode) root).with("currency").put("rate", oes.getPrice("BTC")); //3 44 | 45 | Producer.write(this.producer, this.validMessages, MAPPER.writeValueAsString(root)); //4 46 | } 47 | } catch (IOException e) { 48 | Producer.write(this.producer, this.invalidMessages, "{\"error\": \"" 49 | + e.getClass().getSimpleName() + ": " + e.getMessage() + "\"}"); 50 | } 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /Chapter03/monedero/src/main/java/monedero/ProcessingEngine.java: -------------------------------------------------------------------------------- 1 | package monedero; 2 | 3 | public class ProcessingEngine { 4 | 5 | public static void main(String[] args){ 6 | String servers = args[0]; 7 | String groupId = args[1]; 8 | String sourceTopic = args[2]; 9 | String validTopic = args[3]; 10 | String invalidTopic = args[4]; 11 | 12 | Reader reader = new Reader(servers, groupId, sourceTopic); 13 | 14 | Enricher enricher = new Enricher(servers, validTopic, invalidTopic); 15 | reader.run(enricher); 16 | 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /Chapter03/monedero/src/main/java/monedero/Producer.java: -------------------------------------------------------------------------------- 1 | package monedero; 2 | 3 | import java.util.Properties; 4 | 5 | import org.apache.kafka.clients.producer.KafkaProducer; 6 | import org.apache.kafka.clients.producer.ProducerRecord; 7 | 8 | public interface Producer { 9 | 10 | void process(String message); //1 11 | 12 | static void write(KafkaProducer producer, 13 | String topic, String message) { //2 14 | ProducerRecord pr = new ProducerRecord<>(topic, message); 15 | producer.send(pr); 16 | } 17 | 18 | static Properties createConfig(String servers) { //3 19 | Properties config = new Properties(); 20 | config.put("bootstrap.servers", servers); 21 | config.put("acks", "all"); 22 | config.put("retries", 0); 23 | config.put("batch.size", 1000); 24 | config.put("linger.ms", 1); 25 | config.put("key.serializer", 26 | "org.apache.kafka.common.serialization.StringSerializer"); 27 | config.put("value.serializer", 28 | "org.apache.kafka.common.serialization.StringSerializer"); 29 | return config; 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /Chapter03/monedero/src/main/java/monedero/extractors/GeoIPService.java: -------------------------------------------------------------------------------- 1 | package monedero.extractors; 2 | 3 | import com.maxmind.geoip.Location; 4 | import com.maxmind.geoip.LookupService; 5 | import java.io.IOException; 6 | import java.util.logging.Level; 7 | import java.util.logging.Logger; 8 | 9 | public final class GeoIPService { 10 | 11 | private static final String MAXMINDDB = "/path_to_your_GeoLiteCity.dat_file"; 12 | 13 | public Location getLocation(String ipAddress) { 14 | try { 15 | final LookupService maxmind = new LookupService(MAXMINDDB, LookupService.GEOIP_MEMORY_CACHE); 16 | return maxmind.getLocation(ipAddress); 17 | } catch (IOException ex) { 18 | Logger.getLogger(GeoIPService.class.getName()).log(Level.SEVERE, null, ex); 19 | } 20 | return null; 21 | } 22 | 23 | } 24 | -------------------------------------------------------------------------------- /Chapter03/monedero/src/main/java/monedero/extractors/OpenExchangeService.java: -------------------------------------------------------------------------------- 1 | package monedero.extractors; 2 | 3 | import com.fasterxml.jackson.databind.JsonNode; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import java.io.IOException; 6 | import java.net.URL; 7 | import java.util.logging.Level; 8 | import java.util.logging.Logger; 9 | 10 | public final class OpenExchangeService { 11 | 12 | private static final String API_KEY = "YOUR_API_KEY_VALUE_HERE"; //1 13 | private static final ObjectMapper MAPPER = new ObjectMapper(); 14 | 15 | public double getPrice(String currency) { 16 | try { 17 | final URL url = new URL("https://openexchangerates.org/api/latest.json?app_id=" + API_KEY); //2 18 | 19 | final JsonNode root = MAPPER.readTree(url); 20 | final JsonNode node = root.path("rates").path(currency); //3 21 | return Double.parseDouble(node.toString()); //4 22 | 23 | } catch (IOException ex) { 24 | Logger.getLogger(OpenExchangeService.class.getName()).log(Level.SEVERE, null, ex); 25 | } 26 | 27 | return 0; 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /Chapter03/monedero/src/main/java/monedero/extractors/OpenWeatherService.java: -------------------------------------------------------------------------------- 1 | package monedero.extractors; 2 | 3 | import com.fasterxml.jackson.databind.JsonNode; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import java.io.IOException; 6 | import java.net.URL; 7 | import java.util.logging.Level; 8 | import java.util.logging.Logger; 9 | 10 | public class OpenWeatherService { 11 | 12 | private static final String API_KEY = "YOUR API_KEY_VALUE"; //1 13 | private static final ObjectMapper MAPPER = new ObjectMapper(); 14 | 15 | public double getTemperature(String lat, String lon) { 16 | try { 17 | final URL url = new URL( 18 | "http://api.openweathermap.org/data/2.5/weather?lat=" + lat + "&lon="+ lon + "&units=metric&appid=" + API_KEY); 19 | final JsonNode root = MAPPER.readTree(url); 20 | final JsonNode node = root.path("main").path("temp"); 21 | return Double.parseDouble(node.toString()); 22 | 23 | } catch (IOException ex) { 24 | Logger.getLogger(OpenWeatherService.class.getName()).log(Level.SEVERE, null, ex); 25 | } 26 | return 0; 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /Chapter04/kioto/build.gradle: -------------------------------------------------------------------------------- 1 | 2 | apply plugin: 'java' 3 | apply plugin: 'application' 4 | 5 | sourceCompatibility = '1.8' 6 | 7 | mainClassName = 'kioto.ProcessingEngine' 8 | 9 | repositories { 10 | mavenCentral() 11 | } 12 | 13 | version = '0.1.0' 14 | 15 | dependencies { 16 | compile group: 'com.github.javafaker', name: 'javafaker', version: '0.15' 17 | compile group: 'com.fasterxml.jackson.core', name: 'jackson-core', version: '2.9.7' 18 | compile group: 'org.apache.kafka', name: 'kafka_2.12', version: '2.0.0' 19 | } 20 | 21 | jar { 22 | manifest { 23 | attributes 'Main-Class': mainClassName 24 | } from { 25 | configurations.compile.collect { 26 | it.isDirectory() ? it : zipTree(it) 27 | } 28 | } 29 | exclude "META-INF/*.SF" 30 | exclude "META-INF/*.DSA" 31 | exclude "META-INF/*.RSA" 32 | } -------------------------------------------------------------------------------- /Chapter04/kioto/src/main/java/kioto/Constants.java: -------------------------------------------------------------------------------- 1 | package kioto; 2 | 3 | import com.fasterxml.jackson.databind.ObjectMapper; 4 | import com.fasterxml.jackson.databind.SerializationFeature; 5 | import com.fasterxml.jackson.databind.util.StdDateFormat; 6 | 7 | public final class Constants { 8 | 9 | private static final ObjectMapper jsonMapper; 10 | 11 | static { 12 | ObjectMapper mapper = new ObjectMapper(); 13 | mapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS); 14 | mapper.setDateFormat(new StdDateFormat()); 15 | jsonMapper = mapper; 16 | } 17 | 18 | public static String getHealthChecksTopic() { 19 | return "healthchecks"; 20 | } 21 | 22 | public static String getHealthChecksAvroTopic() { 23 | return "healthchecks-avro"; 24 | } 25 | 26 | public static String getUptimesTopic() { 27 | return "uptimes"; 28 | } 29 | 30 | public enum machineType { GEOTHERMAL, HYDROELECTRIC, NUCLEAR, WIND, SOLAR } 31 | 32 | public enum machineStatus { STARTING, RUNNING, SHUTTING_DOWN, SHUT_DOWN } 33 | 34 | public static ObjectMapper getJsonMapper() { 35 | return jsonMapper; 36 | } 37 | } -------------------------------------------------------------------------------- /Chapter04/kioto/src/main/java/kioto/HealthCheck.java: -------------------------------------------------------------------------------- 1 | package kioto; 2 | 3 | import java.util.Date; 4 | 5 | public final class HealthCheck { 6 | private String event; 7 | private String factory; 8 | private String serialNumber; 9 | private String type; 10 | private String status; 11 | private Date lastStartedAt; 12 | private float temperature; 13 | private String ipAddress; 14 | 15 | public HealthCheck() { 16 | } 17 | 18 | public HealthCheck(String event, 19 | String factory, 20 | String serialNumber, 21 | String type, 22 | String status, 23 | Date lastStartedAt, 24 | float temperature, 25 | String ipAddress) { 26 | this.event = event; 27 | this.factory = factory; 28 | this.serialNumber = serialNumber; 29 | this.type = type; 30 | this.status = status; 31 | this.lastStartedAt = lastStartedAt; 32 | this.temperature = temperature; 33 | this.ipAddress = ipAddress; 34 | } 35 | 36 | public String getEvent() { 37 | return event; 38 | } 39 | 40 | public void setEvent(String event) { 41 | this.event = event; 42 | } 43 | 44 | public String getFactory() { 45 | return factory; 46 | } 47 | 48 | public void setFactory(String factory) { 49 | this.factory = factory; 50 | } 51 | 52 | public String getSerialNumber() { 53 | return serialNumber; 54 | } 55 | 56 | public void setSerialNumber(String serialNumber) { 57 | this.serialNumber = serialNumber; 58 | } 59 | 60 | public String getType() { 61 | return type; 62 | } 63 | 64 | public void setType(String type) { 65 | this.type = type; 66 | } 67 | 68 | public String getStatus() { 69 | return status; 70 | } 71 | 72 | public void setStatus(String status) { 73 | this.status = status; 74 | } 75 | 76 | public Date getLastStartedAt() { 77 | return lastStartedAt; 78 | } 79 | 80 | public void setLastStartedAt(Date lastStartedAt) { 81 | this.lastStartedAt = lastStartedAt; 82 | } 83 | 84 | public float getTemperature() { 85 | return temperature; 86 | } 87 | 88 | public void setTemperature(float temperature) { 89 | this.temperature = temperature; 90 | } 91 | 92 | public String getIpAddress() { 93 | return ipAddress; 94 | } 95 | 96 | public void setIpAddress(String ipAddress) { 97 | this.ipAddress = ipAddress; 98 | } 99 | } -------------------------------------------------------------------------------- /Chapter04/kioto/src/main/java/kioto/custom/CustomProcessor.java: -------------------------------------------------------------------------------- 1 | package kioto.custom; 2 | 3 | import kioto.Constants; 4 | import kioto.HealthCheck; 5 | import kioto.serde.HealthCheckDeserializer; 6 | 7 | import java.time.Duration; 8 | import java.time.LocalDate; 9 | import java.time.Period; 10 | import java.time.ZoneId; 11 | import java.util.Collections; 12 | import java.util.Properties; 13 | import java.util.concurrent.ExecutionException; 14 | import java.util.concurrent.Future; 15 | import org.apache.kafka.clients.consumer.Consumer; 16 | import org.apache.kafka.clients.consumer.ConsumerRecord; 17 | import org.apache.kafka.clients.consumer.ConsumerRecords; 18 | import org.apache.kafka.clients.consumer.KafkaConsumer; 19 | import org.apache.kafka.clients.producer.KafkaProducer; 20 | import org.apache.kafka.clients.producer.Producer; 21 | import org.apache.kafka.clients.producer.ProducerRecord; 22 | import org.apache.kafka.common.serialization.StringDeserializer; 23 | import org.apache.kafka.common.serialization.StringSerializer; 24 | 25 | // $ kafka-topics --zookeeper localhost:2181 --create --topic uptimes --replication-factor 1 --partitions 4 26 | 27 | public final class CustomProcessor { 28 | private Consumer consumer; 29 | private Producer producer; 30 | 31 | public CustomProcessor(String brokers) { 32 | Properties consumerProps = new Properties(); 33 | consumerProps.put("bootstrap.servers", brokers); 34 | consumerProps.put("group.id", "healthcheck-processor"); 35 | consumerProps.put("key.deserializer", StringDeserializer.class); 36 | consumerProps.put("value.deserializer", HealthCheckDeserializer.class); 37 | consumer = new KafkaConsumer<>(consumerProps); 38 | 39 | Properties producerProps = new Properties(); 40 | producerProps.put("bootstrap.servers", brokers); 41 | producerProps.put("key.serializer", StringSerializer.class); 42 | producerProps.put("value.serializer", StringSerializer.class); 43 | producer = new KafkaProducer<>(producerProps); 44 | } 45 | 46 | public final void process() { 47 | consumer.subscribe(Collections.singletonList(Constants.getHealthChecksTopic())); 48 | while(true) { 49 | ConsumerRecords records = consumer.poll(Duration.ofSeconds(1L)); 50 | for(Object record : records) { 51 | ConsumerRecord it = (ConsumerRecord) record; 52 | HealthCheck healthCheck = (HealthCheck) it.value(); 53 | LocalDate startDateLocal = 54 | healthCheck.getLastStartedAt().toInstant().atZone(ZoneId.systemDefault()).toLocalDate(); 55 | int uptime = Period.between(startDateLocal, LocalDate.now()).getDays(); 56 | Future future = producer.send( 57 | new ProducerRecord<>(Constants.getUptimesTopic(), 58 | healthCheck.getSerialNumber(), String.valueOf(uptime))); 59 | try { 60 | future.get(); 61 | } catch (InterruptedException | ExecutionException e) { 62 | e.printStackTrace(); 63 | } 64 | } 65 | } 66 | } 67 | 68 | public static void main( String[] args) { 69 | new CustomProcessor("localhost:9092").process(); 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /Chapter04/kioto/src/main/java/kioto/custom/CustomProducer.java: -------------------------------------------------------------------------------- 1 | package kioto.custom; 2 | 3 | import com.github.javafaker.Faker; 4 | import kioto.Constants; 5 | import kioto.HealthCheck; 6 | import java.util.Properties; 7 | import java.util.concurrent.ExecutionException; 8 | import java.util.concurrent.Future; 9 | import java.util.concurrent.TimeUnit; 10 | 11 | import kioto.serde.HealthCheckSerializer; 12 | import org.apache.kafka.clients.producer.KafkaProducer; 13 | import org.apache.kafka.clients.producer.Producer; 14 | import org.apache.kafka.clients.producer.ProducerRecord; 15 | import org.apache.kafka.common.serialization.StringSerializer; 16 | 17 | public final class CustomProducer { 18 | private final Producer producer; 19 | 20 | public CustomProducer(String brokers) { 21 | Properties props = new Properties(); 22 | props.put("bootstrap.servers", brokers); 23 | props.put("key.serializer", StringSerializer.class); 24 | props.put("value.serializer", HealthCheckSerializer.class); 25 | producer = new KafkaProducer<>(props); 26 | } 27 | 28 | public void produce(int ratePerSecond) { 29 | long waitTimeBetweenIterationsMs = 1000L / (long)ratePerSecond; 30 | Faker faker = new Faker(); 31 | 32 | while(true) { 33 | HealthCheck fakeHealthCheck = 34 | new HealthCheck( 35 | "HEALTH_CHECK", 36 | faker.address().city(), 37 | faker.bothify("??##-??##", true), 38 | Constants.machineType.values()[faker.number().numberBetween(0,4)].toString(), 39 | Constants.machineStatus.values()[faker.number().numberBetween(0,3)].toString(), 40 | faker.date().past(100, TimeUnit.DAYS), 41 | faker.number().numberBetween(100L, 0L), 42 | faker.internet().ipV4Address()); 43 | Future futureResult = producer.send(new ProducerRecord<>(Constants.getHealthChecksTopic(), fakeHealthCheck)); 44 | try { 45 | Thread.sleep(waitTimeBetweenIterationsMs); 46 | futureResult.get(); 47 | } catch (InterruptedException | ExecutionException e) { 48 | e.printStackTrace(); 49 | } 50 | } 51 | } 52 | 53 | public static void main(String[] args) { 54 | new CustomProducer("localhost:9092").produce(2); 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /Chapter04/kioto/src/main/java/kioto/plain/PlainProcessor.java: -------------------------------------------------------------------------------- 1 | package kioto.plain; 2 | 3 | import kioto.Constants; 4 | import kioto.HealthCheck; 5 | import org.apache.kafka.clients.consumer.Consumer; 6 | import org.apache.kafka.clients.consumer.ConsumerRecord; 7 | import org.apache.kafka.clients.consumer.ConsumerRecords; 8 | import org.apache.kafka.clients.consumer.KafkaConsumer; 9 | import org.apache.kafka.clients.producer.KafkaProducer; 10 | import org.apache.kafka.clients.producer.Producer; 11 | import org.apache.kafka.clients.producer.ProducerRecord; 12 | import org.apache.kafka.common.serialization.StringDeserializer; 13 | import org.apache.kafka.common.serialization.StringSerializer; 14 | 15 | import java.io.IOException; 16 | import java.time.Duration; 17 | import java.time.LocalDate; 18 | import java.time.Period; 19 | import java.time.ZoneId; 20 | import java.util.Collections; 21 | import java.util.Properties; 22 | import java.util.concurrent.ExecutionException; 23 | import java.util.concurrent.Future; 24 | 25 | // $ kafka-topics --zookeeper localhost:2181 --create --topic uptimes --replication-factor 1 --partitions 4 26 | 27 | public final class PlainProcessor { 28 | private Consumer consumer; 29 | private Producer producer; 30 | 31 | public PlainProcessor(String brokers) { 32 | Properties consumerProps = new Properties(); 33 | consumerProps.put("bootstrap.servers", brokers); 34 | consumerProps.put("group.id", "healthcheck-processor"); 35 | consumerProps.put("key.deserializer", StringDeserializer.class); 36 | consumerProps.put("value.deserializer", StringDeserializer.class); 37 | consumer = new KafkaConsumer<>(consumerProps); 38 | 39 | Properties producerProps = new Properties(); 40 | producerProps.put("bootstrap.servers", brokers); 41 | producerProps.put("key.serializer", StringSerializer.class); 42 | producerProps.put("value.serializer", StringSerializer.class); 43 | producer = new KafkaProducer<>(producerProps); 44 | } 45 | 46 | public final void process() { 47 | consumer.subscribe(Collections.singletonList(Constants.getHealthChecksTopic())); 48 | 49 | while (true) { 50 | ConsumerRecords records = consumer.poll(Duration.ofSeconds(1L)); 51 | 52 | for (Object record : records) { 53 | ConsumerRecord it = (ConsumerRecord) record; 54 | String healthCheckJson = (String) it.value(); 55 | HealthCheck healthCheck = null; 56 | try { 57 | healthCheck = Constants.getJsonMapper().readValue(healthCheckJson, HealthCheck.class); 58 | } catch (IOException e) { 59 | e.printStackTrace(); 60 | } 61 | LocalDate startDateLocal = healthCheck.getLastStartedAt().toInstant().atZone(ZoneId.systemDefault()).toLocalDate(); 62 | int uptime = Period.between(startDateLocal, LocalDate.now()).getDays(); 63 | Future future = producer.send( 64 | new ProducerRecord<>(Constants.getUptimesTopic(), healthCheck.getSerialNumber(), String.valueOf(uptime))); 65 | try { 66 | future.get(); 67 | } catch (InterruptedException | ExecutionException e) { 68 | e.printStackTrace(); 69 | } 70 | } 71 | } 72 | } 73 | 74 | public static void main(String[] args) { 75 | (new PlainProcessor("localhost:9092")).process(); 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /Chapter04/kioto/src/main/java/kioto/plain/PlainProducer.java: -------------------------------------------------------------------------------- 1 | package kioto.plain; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.github.javafaker.Faker; 5 | import kioto.Constants; 6 | import kioto.HealthCheck; 7 | import java.util.Properties; 8 | import java.util.concurrent.ExecutionException; 9 | import java.util.concurrent.Future; 10 | import java.util.concurrent.TimeUnit; 11 | 12 | import org.apache.kafka.clients.producer.KafkaProducer; 13 | import org.apache.kafka.clients.producer.Producer; 14 | import org.apache.kafka.clients.producer.ProducerRecord; 15 | import org.apache.kafka.common.serialization.StringSerializer; 16 | 17 | // $ kafka-topics --zookeeper localhost:2181 --create --topic healthchecks --replication-factor 1 --partitions 4 18 | 19 | public final class PlainProducer { 20 | private final Producer producer; 21 | 22 | public PlainProducer(String brokers) { 23 | Properties props = new Properties(); 24 | props.put("bootstrap.servers", brokers); 25 | props.put("key.serializer", StringSerializer.class); 26 | props.put("value.serializer", StringSerializer.class); 27 | producer = new KafkaProducer<>(props); 28 | } 29 | 30 | public void produce(int ratePerSecond) { 31 | long waitTimeBetweenIterationsMs = 1000L / (long)ratePerSecond; 32 | Faker faker = new Faker(); 33 | 34 | while(true) { 35 | HealthCheck fakeHealthCheck = 36 | new HealthCheck( 37 | "HEALTH_CHECK", 38 | faker.address().city(), 39 | faker.bothify("??##-??##", true), 40 | Constants.machineType.values()[faker.number().numberBetween(0,4)].toString(), 41 | Constants.machineStatus.values()[faker.number().numberBetween(0,3)].toString(), 42 | faker.date().past(100, TimeUnit.DAYS), 43 | faker.number().numberBetween(100L, 0L), 44 | faker.internet().ipV4Address()); 45 | 46 | String fakeHealthCheckJson = null; 47 | try { 48 | fakeHealthCheckJson = Constants.getJsonMapper().writeValueAsString(fakeHealthCheck); 49 | } catch (JsonProcessingException e) { 50 | e.printStackTrace(); 51 | } 52 | Future futureResult = producer.send(new ProducerRecord<>(Constants.getHealthChecksTopic(), fakeHealthCheckJson)); 53 | try { 54 | Thread.sleep(waitTimeBetweenIterationsMs); 55 | futureResult.get(); 56 | } catch (InterruptedException | ExecutionException e) { 57 | e.printStackTrace(); 58 | } 59 | } 60 | } 61 | 62 | public static void main(String[] args) { 63 | new PlainProducer("localhost:9092").produce(2); 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /Chapter04/kioto/src/main/java/kioto/serde/HealthCheckDeserializer.java: -------------------------------------------------------------------------------- 1 | package kioto.serde; 2 | 3 | import kioto.Constants; 4 | import kioto.HealthCheck; 5 | import java.io.IOException; 6 | import java.util.Map; 7 | import org.apache.kafka.common.serialization.Deserializer; 8 | 9 | public final class HealthCheckDeserializer implements Deserializer { 10 | 11 | @Override 12 | public HealthCheck deserialize(String topic, byte[] data) { 13 | if (data == null) { 14 | return null; 15 | } 16 | try { 17 | return Constants.getJsonMapper().readValue(data, HealthCheck.class); 18 | } catch (IOException e) { 19 | return null; 20 | } 21 | } 22 | 23 | @Override 24 | public void close() {} 25 | 26 | @Override 27 | public void configure(Map configs, boolean isKey) {} 28 | } 29 | -------------------------------------------------------------------------------- /Chapter04/kioto/src/main/java/kioto/serde/HealthCheckSerializer.java: -------------------------------------------------------------------------------- 1 | package kioto.serde; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import kioto.Constants; 5 | import java.util.Map; 6 | import org.apache.kafka.common.serialization.Serializer; 7 | 8 | public final class HealthCheckSerializer implements Serializer { 9 | 10 | @Override 11 | public byte[] serialize(String topic, Object data) { 12 | if (data == null) { 13 | return null; 14 | } 15 | try { 16 | return Constants.getJsonMapper().writeValueAsBytes(data); 17 | } catch (JsonProcessingException e) { 18 | return null; 19 | } 20 | } 21 | 22 | @Override 23 | public void close() {} 24 | 25 | @Override 26 | public void configure(Map configs, boolean isKey) {} 27 | } 28 | -------------------------------------------------------------------------------- /Chapter05/kioto/build.gradle: -------------------------------------------------------------------------------- 1 | 2 | apply plugin: 'java' 3 | apply plugin: 'application' 4 | 5 | sourceCompatibility = '1.8' 6 | 7 | mainClassName = 'kioto.ProcessingEngine' 8 | 9 | repositories { 10 | mavenCentral() 11 | maven { url 'https://packages.confluent.io/maven/' } 12 | } 13 | 14 | version = '0.1.0' 15 | 16 | dependencies { 17 | compile group: 'com.github.javafaker', name: 'javafaker', version: '0.15' 18 | compile group: 'com.fasterxml.jackson.core', name: 'jackson-core', version: '2.9.7' 19 | compile group: 'org.apache.kafka', name: 'kafka_2.12', version: '2.0.0' 20 | compile group: 'io.confluent', name: 'kafka-avro-serializer', version: '5.0.0' 21 | } 22 | 23 | jar { 24 | manifest { 25 | attributes 'Main-Class': mainClassName 26 | } from { 27 | configurations.compile.collect { 28 | it.isDirectory() ? it : zipTree(it) 29 | } 30 | } 31 | exclude "META-INF/*.SF" 32 | exclude "META-INF/*.DSA" 33 | exclude "META-INF/*.RSA" 34 | } -------------------------------------------------------------------------------- /Chapter05/kioto/src/main/java/kioto/Constants.java: -------------------------------------------------------------------------------- 1 | package kioto; 2 | 3 | import com.fasterxml.jackson.databind.ObjectMapper; 4 | import com.fasterxml.jackson.databind.SerializationFeature; 5 | import com.fasterxml.jackson.databind.util.StdDateFormat; 6 | 7 | public final class Constants { 8 | 9 | private static final ObjectMapper jsonMapper; 10 | 11 | static { 12 | ObjectMapper mapper = new ObjectMapper(); 13 | mapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS); 14 | mapper.setDateFormat(new StdDateFormat()); 15 | jsonMapper = mapper; 16 | } 17 | 18 | public static String getHealthChecksTopic() { 19 | return "healthchecks"; 20 | } 21 | 22 | public static String getHealthChecksAvroTopic() { 23 | return "healthchecks-avro"; 24 | } 25 | 26 | public static String getUptimesTopic() { 27 | return "uptimes"; 28 | } 29 | 30 | public enum machineType { GEOTHERMAL, HYDROELECTRIC, NUCLEAR, WIND, SOLAR } 31 | 32 | public enum machineStatus { STARTING, RUNNING, SHUTTING_DOWN, SHUT_DOWN } 33 | 34 | public static ObjectMapper getJsonMapper() { 35 | return jsonMapper; 36 | } 37 | } -------------------------------------------------------------------------------- /Chapter05/kioto/src/main/java/kioto/HealthCheck.java: -------------------------------------------------------------------------------- 1 | package kioto; 2 | 3 | import java.util.Date; 4 | 5 | public final class HealthCheck { 6 | private String event; 7 | private String factory; 8 | private String serialNumber; 9 | private String type; 10 | private String status; 11 | private Date lastStartedAt; 12 | private float temperature; 13 | private String ipAddress; 14 | 15 | public HealthCheck() { 16 | } 17 | 18 | public HealthCheck(String event, 19 | String factory, 20 | String serialNumber, 21 | String type, 22 | String status, 23 | Date lastStartedAt, 24 | float temperature, 25 | String ipAddress) { 26 | this.event = event; 27 | this.factory = factory; 28 | this.serialNumber = serialNumber; 29 | this.type = type; 30 | this.status = status; 31 | this.lastStartedAt = lastStartedAt; 32 | this.temperature = temperature; 33 | this.ipAddress = ipAddress; 34 | } 35 | 36 | public String getEvent() { 37 | return event; 38 | } 39 | 40 | public void setEvent(String event) { 41 | this.event = event; 42 | } 43 | 44 | public String getFactory() { 45 | return factory; 46 | } 47 | 48 | public void setFactory(String factory) { 49 | this.factory = factory; 50 | } 51 | 52 | public String getSerialNumber() { 53 | return serialNumber; 54 | } 55 | 56 | public void setSerialNumber(String serialNumber) { 57 | this.serialNumber = serialNumber; 58 | } 59 | 60 | public String getType() { 61 | return type; 62 | } 63 | 64 | public void setType(String type) { 65 | this.type = type; 66 | } 67 | 68 | public String getStatus() { 69 | return status; 70 | } 71 | 72 | public void setStatus(String status) { 73 | this.status = status; 74 | } 75 | 76 | public Date getLastStartedAt() { 77 | return lastStartedAt; 78 | } 79 | 80 | public void setLastStartedAt(Date lastStartedAt) { 81 | this.lastStartedAt = lastStartedAt; 82 | } 83 | 84 | public float getTemperature() { 85 | return temperature; 86 | } 87 | 88 | public void setTemperature(float temperature) { 89 | this.temperature = temperature; 90 | } 91 | 92 | public String getIpAddress() { 93 | return ipAddress; 94 | } 95 | 96 | public void setIpAddress(String ipAddress) { 97 | this.ipAddress = ipAddress; 98 | } 99 | } -------------------------------------------------------------------------------- /Chapter05/kioto/src/main/java/kioto/avro/AvroProcessor.java: -------------------------------------------------------------------------------- 1 | package kioto.avro; 2 | 3 | import io.confluent.kafka.serializers.KafkaAvroDeserializer; 4 | import kioto.Constants; 5 | import kioto.HealthCheck; 6 | 7 | import java.time.Duration; 8 | import java.time.LocalDate; 9 | import java.time.Period; 10 | import java.time.ZoneId; 11 | import java.util.Collections; 12 | import java.util.Date; 13 | import java.util.Properties; 14 | import java.util.concurrent.ExecutionException; 15 | import java.util.concurrent.Future; 16 | import org.apache.avro.generic.GenericRecord; 17 | import org.apache.kafka.clients.consumer.Consumer; 18 | import org.apache.kafka.clients.consumer.ConsumerRecord; 19 | import org.apache.kafka.clients.consumer.ConsumerRecords; 20 | import org.apache.kafka.clients.consumer.KafkaConsumer; 21 | import org.apache.kafka.clients.producer.KafkaProducer; 22 | import org.apache.kafka.clients.producer.Producer; 23 | import org.apache.kafka.clients.producer.ProducerRecord; 24 | import org.apache.kafka.common.serialization.StringDeserializer; 25 | import org.apache.kafka.common.serialization.StringSerializer; 26 | 27 | 28 | public final class AvroProcessor { 29 | private Consumer consumer; 30 | private Producer producer; 31 | 32 | public AvroProcessor(String brokers, String schemaRegistryUrl) { 33 | Properties consumerProps = new Properties(); 34 | consumerProps.put("bootstrap.servers", brokers); 35 | consumerProps.put("group.id", "healthcheck-processor"); 36 | consumerProps.put("key.deserializer", StringDeserializer.class); 37 | consumerProps.put("value.deserializer", KafkaAvroDeserializer.class); 38 | consumerProps.put("schema.registry.url", schemaRegistryUrl); 39 | consumer = new KafkaConsumer<>(consumerProps); 40 | 41 | Properties producerProps = new Properties(); 42 | producerProps.put("bootstrap.servers", brokers); 43 | producerProps.put("key.serializer", StringSerializer.class); 44 | producerProps.put("value.serializer", StringSerializer.class); 45 | producer = new KafkaProducer<>(producerProps); 46 | } 47 | 48 | public final void process() { 49 | consumer.subscribe(Collections.singletonList(Constants.getHealthChecksAvroTopic())); 50 | 51 | while (true) { 52 | ConsumerRecords records = consumer.poll(Duration.ofSeconds(1L)); 53 | 54 | for (Object record : records) { 55 | ConsumerRecord it = (ConsumerRecord) record; 56 | GenericRecord healthCheckAvro = (GenericRecord) it.value(); 57 | HealthCheck healthCheck = new HealthCheck( 58 | healthCheckAvro.get("event").toString(), 59 | healthCheckAvro.get("factory").toString(), 60 | healthCheckAvro.get("serialNumber").toString(), 61 | healthCheckAvro.get("type").toString(), 62 | healthCheckAvro.get("status").toString(), 63 | new Date((Long) healthCheckAvro.get("lastStartedAt")), 64 | Float.parseFloat(healthCheckAvro.get("temperature").toString()), 65 | healthCheckAvro.get("ipAddress").toString()); 66 | LocalDate startDateLocal = healthCheck.getLastStartedAt().toInstant().atZone(ZoneId.systemDefault()).toLocalDate(); 67 | int uptime = Period.between(startDateLocal, LocalDate.now()).getYears(); 68 | Future future = producer.send( 69 | new ProducerRecord<>(Constants.getUptimesTopic(), 70 | healthCheck.getSerialNumber(), String.valueOf(uptime))); 71 | try { 72 | future.get(); 73 | } catch (InterruptedException | ExecutionException e) { 74 | e.printStackTrace(); 75 | } 76 | } 77 | } 78 | } 79 | 80 | public static void main(String[] args) { 81 | new AvroProcessor("localhost:9092", "http://localhost:8081").process(); 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /Chapter05/kioto/src/main/java/kioto/avro/AvroProducer.java: -------------------------------------------------------------------------------- 1 | package kioto.avro; 2 | 3 | import com.github.javafaker.Faker; 4 | import kioto.Constants; 5 | import kioto.HealthCheck; 6 | import io.confluent.kafka.serializers.KafkaAvroSerializer; 7 | import java.io.File; 8 | import java.io.IOException; 9 | import java.util.Properties; 10 | import java.util.concurrent.ExecutionException; 11 | import java.util.concurrent.Future; 12 | import java.util.concurrent.TimeUnit; 13 | 14 | import org.apache.avro.Schema; 15 | import org.apache.avro.Schema.Parser; 16 | import org.apache.avro.generic.GenericRecord; 17 | import org.apache.avro.generic.GenericRecordBuilder; 18 | import org.apache.avro.generic.GenericData.Record; 19 | import org.apache.kafka.clients.producer.KafkaProducer; 20 | import org.apache.kafka.clients.producer.Producer; 21 | import org.apache.kafka.clients.producer.ProducerRecord; 22 | import org.apache.kafka.common.serialization.StringSerializer; 23 | 24 | public final class AvroProducer { 25 | 26 | private final Producer producer; 27 | private Schema schema; 28 | 29 | public AvroProducer(String brokers, String schemaRegistryUrl) { 30 | Properties props = new Properties(); 31 | props.put("bootstrap.servers", brokers); 32 | props.put("key.serializer", StringSerializer.class); 33 | props.put("value.serializer", KafkaAvroSerializer.class); 34 | props.put("schema.registry.url", schemaRegistryUrl); 35 | producer = new KafkaProducer<>(props); 36 | try { 37 | schema = (new Parser()).parse(new File("src/main/resources/healthcheck.avsc")); 38 | } catch (IOException e) { 39 | e.printStackTrace(); 40 | } 41 | } 42 | 43 | public final void produce(int ratePerSecond) { 44 | long waitTimeBetweenIterationsMs = 1000L / (long)ratePerSecond; 45 | Faker faker = new Faker(); 46 | 47 | while(true) { 48 | HealthCheck fakeHealthCheck = 49 | new HealthCheck( 50 | "HEALTH_CHECK", 51 | faker.address().city(), 52 | faker.bothify("??##-??##", true), 53 | Constants.machineType.values()[faker.number().numberBetween(0,4)].toString(), 54 | Constants.machineStatus.values()[faker.number().numberBetween(0,3)].toString(), 55 | faker.date().past(100, TimeUnit.DAYS), 56 | faker.number().numberBetween(100L, 0L), 57 | faker.internet().ipV4Address()); 58 | GenericRecordBuilder recordBuilder = new GenericRecordBuilder(schema); 59 | recordBuilder.set("event", fakeHealthCheck.getEvent()); 60 | recordBuilder.set("factory", fakeHealthCheck.getFactory()); 61 | recordBuilder.set("serialNumber", fakeHealthCheck.getSerialNumber()); 62 | recordBuilder.set("type", fakeHealthCheck.getType()); 63 | recordBuilder.set("status", fakeHealthCheck.getStatus()); 64 | recordBuilder.set("lastStartedAt", fakeHealthCheck.getLastStartedAt().getTime()); 65 | recordBuilder.set("temperature", fakeHealthCheck.getTemperature()); 66 | recordBuilder.set("ipAddress", fakeHealthCheck.getIpAddress()); 67 | Record avroHealthCheck = recordBuilder.build(); 68 | Future futureResult = producer.send(new ProducerRecord<>(Constants.getHealthChecksAvroTopic(), avroHealthCheck)); 69 | try { 70 | Thread.sleep(waitTimeBetweenIterationsMs); 71 | futureResult.get(); 72 | } catch (InterruptedException | ExecutionException e) { 73 | e.printStackTrace(); 74 | } 75 | } 76 | } 77 | 78 | public static void main( String[] args) { 79 | new AvroProducer("localhost:9092", "http://localhost:8081").produce(2); 80 | } 81 | } -------------------------------------------------------------------------------- /Chapter05/kioto/src/main/resources/healthcheck.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "name": "HealthCheck", 3 | "namespace": "kioto.avro", 4 | "type": "record", 5 | "fields": [ 6 | { "name": "event", "type": "string" }, 7 | { "name": "factory", "type": "string" }, 8 | { "name": "serialNumber", "type": "string" }, 9 | { "name": "type", "type": "string" }, 10 | { "name": "status", "type": "string"}, 11 | { "name": "lastStartedAt", "type": "long", "logicalType": "timestamp-millis"}, 12 | { "name": "temperature", "type": "float" }, 13 | { "name": "ipAddress", "type": "string" } 14 | ] 15 | } 16 | -------------------------------------------------------------------------------- /Chapter06/kioto/build.gradle: -------------------------------------------------------------------------------- 1 | 2 | apply plugin: 'java' 3 | apply plugin: 'application' 4 | 5 | sourceCompatibility = '1.8' 6 | 7 | mainClassName = 'kioto.ProcessingEngine' 8 | 9 | repositories { 10 | mavenCentral() 11 | maven { url 'https://packages.confluent.io/maven/' } 12 | } 13 | 14 | version = '0.1.0' 15 | 16 | dependencies { 17 | compile group: 'com.github.javafaker', name: 'javafaker', version: '0.15' 18 | compile group: 'com.fasterxml.jackson.core', name: 'jackson-core', version: '2.9.7' 19 | compile group: 'org.apache.kafka', name: 'kafka_2.12', version: '2.0.0' 20 | compile group: 'io.confluent', name: 'kafka-avro-serializer', version: '5.0.0' 21 | 22 | compile group: 'org.apache.kafka', name: 'kafka-streams', version: '2.0.0' 23 | compile group: 'io.confluent', name: 'kafka-streams-avro-serde', version: '5.0.0' 24 | } 25 | 26 | jar { 27 | manifest { 28 | attributes 'Main-Class': mainClassName 29 | } from { 30 | configurations.compile.collect { 31 | it.isDirectory() ? it : zipTree(it) 32 | } 33 | } 34 | exclude "META-INF/*.SF" 35 | exclude "META-INF/*.DSA" 36 | exclude "META-INF/*.RSA" 37 | } -------------------------------------------------------------------------------- /Chapter06/kioto/src/main/java/kioto/Constants.java: -------------------------------------------------------------------------------- 1 | package kioto; 2 | 3 | import com.fasterxml.jackson.databind.ObjectMapper; 4 | import com.fasterxml.jackson.databind.SerializationFeature; 5 | import com.fasterxml.jackson.databind.util.StdDateFormat; 6 | 7 | public final class Constants { 8 | 9 | private static final ObjectMapper jsonMapper; 10 | 11 | static { 12 | ObjectMapper mapper = new ObjectMapper(); 13 | mapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS); 14 | mapper.setDateFormat(new StdDateFormat()); 15 | jsonMapper = mapper; 16 | } 17 | 18 | public static String getHealthChecksTopic() { 19 | return "healthchecks"; 20 | } 21 | 22 | public static String getHealthChecksAvroTopic() { 23 | return "healthchecks-avro"; 24 | } 25 | 26 | public static String getUptimesTopic() { 27 | return "uptimes"; 28 | } 29 | 30 | public enum machineType { GEOTHERMAL, HYDROELECTRIC, NUCLEAR, WIND, SOLAR } 31 | 32 | public enum machineStatus { STARTING, RUNNING, SHUTTING_DOWN, SHUT_DOWN } 33 | 34 | public static ObjectMapper getJsonMapper() { 35 | return jsonMapper; 36 | } 37 | } -------------------------------------------------------------------------------- /Chapter06/kioto/src/main/java/kioto/HealthCheck.java: -------------------------------------------------------------------------------- 1 | package kioto; 2 | 3 | import java.util.Date; 4 | 5 | public final class HealthCheck { 6 | private String event; 7 | private String factory; 8 | private String serialNumber; 9 | private String type; 10 | private String status; 11 | private Date lastStartedAt; 12 | private float temperature; 13 | private String ipAddress; 14 | 15 | public HealthCheck() { 16 | } 17 | 18 | public HealthCheck(String event, 19 | String factory, 20 | String serialNumber, 21 | String type, 22 | String status, 23 | Date lastStartedAt, 24 | float temperature, 25 | String ipAddress) { 26 | this.event = event; 27 | this.factory = factory; 28 | this.serialNumber = serialNumber; 29 | this.type = type; 30 | this.status = status; 31 | this.lastStartedAt = lastStartedAt; 32 | this.temperature = temperature; 33 | this.ipAddress = ipAddress; 34 | } 35 | 36 | public String getEvent() { 37 | return event; 38 | } 39 | 40 | public void setEvent(String event) { 41 | this.event = event; 42 | } 43 | 44 | public String getFactory() { 45 | return factory; 46 | } 47 | 48 | public void setFactory(String factory) { 49 | this.factory = factory; 50 | } 51 | 52 | public String getSerialNumber() { 53 | return serialNumber; 54 | } 55 | 56 | public void setSerialNumber(String serialNumber) { 57 | this.serialNumber = serialNumber; 58 | } 59 | 60 | public String getType() { 61 | return type; 62 | } 63 | 64 | public void setType(String type) { 65 | this.type = type; 66 | } 67 | 68 | public String getStatus() { 69 | return status; 70 | } 71 | 72 | public void setStatus(String status) { 73 | this.status = status; 74 | } 75 | 76 | public Date getLastStartedAt() { 77 | return lastStartedAt; 78 | } 79 | 80 | public void setLastStartedAt(Date lastStartedAt) { 81 | this.lastStartedAt = lastStartedAt; 82 | } 83 | 84 | public float getTemperature() { 85 | return temperature; 86 | } 87 | 88 | public void setTemperature(float temperature) { 89 | this.temperature = temperature; 90 | } 91 | 92 | public String getIpAddress() { 93 | return ipAddress; 94 | } 95 | 96 | public void setIpAddress(String ipAddress) { 97 | this.ipAddress = ipAddress; 98 | } 99 | } -------------------------------------------------------------------------------- /Chapter06/kioto/src/main/java/kioto/avro/AvroProducer.java: -------------------------------------------------------------------------------- 1 | package kioto.avro; 2 | 3 | import com.github.javafaker.Faker; 4 | import kioto.Constants; 5 | import kioto.HealthCheck; 6 | import io.confluent.kafka.serializers.KafkaAvroSerializer; 7 | import java.io.File; 8 | import java.io.IOException; 9 | import java.util.Properties; 10 | import java.util.concurrent.ExecutionException; 11 | import java.util.concurrent.Future; 12 | import java.util.concurrent.TimeUnit; 13 | 14 | import org.apache.avro.Schema; 15 | import org.apache.avro.Schema.Parser; 16 | import org.apache.avro.generic.GenericRecord; 17 | import org.apache.avro.generic.GenericRecordBuilder; 18 | import org.apache.avro.generic.GenericData.Record; 19 | import org.apache.kafka.clients.producer.KafkaProducer; 20 | import org.apache.kafka.clients.producer.Producer; 21 | import org.apache.kafka.clients.producer.ProducerRecord; 22 | import org.apache.kafka.common.serialization.StringSerializer; 23 | 24 | public final class AvroProducer { 25 | 26 | private final Producer producer; 27 | private Schema schema; 28 | 29 | public AvroProducer(String brokers, String schemaRegistryUrl) { 30 | Properties props = new Properties(); 31 | props.put("bootstrap.servers", brokers); 32 | props.put("key.serializer", StringSerializer.class); 33 | props.put("value.serializer", KafkaAvroSerializer.class); 34 | props.put("schema.registry.url", schemaRegistryUrl); 35 | producer = new KafkaProducer<>(props); 36 | try { 37 | schema = (new Parser()).parse(new File("src/main/resources/healthcheck.avsc")); 38 | } catch (IOException e) { 39 | e.printStackTrace(); 40 | } 41 | } 42 | 43 | public final void produce(int ratePerSecond) { 44 | long waitTimeBetweenIterationsMs = 1000L / (long)ratePerSecond; 45 | Faker faker = new Faker(); 46 | 47 | while(true) { 48 | HealthCheck fakeHealthCheck = 49 | new HealthCheck( 50 | "HEALTH_CHECK", 51 | faker.address().city(), 52 | faker.bothify("??##-??##", true), 53 | Constants.machineType.values()[faker.number().numberBetween(0,4)].toString(), 54 | Constants.machineStatus.values()[faker.number().numberBetween(0,3)].toString(), 55 | faker.date().past(100, TimeUnit.DAYS), 56 | faker.number().numberBetween(100L, 0L), 57 | faker.internet().ipV4Address()); 58 | GenericRecordBuilder recordBuilder = new GenericRecordBuilder(schema); 59 | recordBuilder.set("event", fakeHealthCheck.getEvent()); 60 | recordBuilder.set("factory", fakeHealthCheck.getFactory()); 61 | recordBuilder.set("serialNumber", fakeHealthCheck.getSerialNumber()); 62 | recordBuilder.set("type", fakeHealthCheck.getType()); 63 | recordBuilder.set("status", fakeHealthCheck.getStatus()); 64 | recordBuilder.set("lastStartedAt", fakeHealthCheck.getLastStartedAt().getTime()); 65 | recordBuilder.set("temperature", fakeHealthCheck.getTemperature()); 66 | recordBuilder.set("ipAddress", fakeHealthCheck.getIpAddress()); 67 | Record avroHealthCheck = recordBuilder.build(); 68 | Future futureResult = producer.send(new ProducerRecord<>(Constants.getHealthChecksAvroTopic(), avroHealthCheck)); 69 | try { 70 | Thread.sleep(waitTimeBetweenIterationsMs); 71 | futureResult.get(); 72 | } catch (InterruptedException | ExecutionException e) { 73 | e.printStackTrace(); 74 | } 75 | } 76 | } 77 | 78 | public static void main( String[] args) { 79 | new AvroProducer("localhost:9092", "http://localhost:8081").produce(2); 80 | } 81 | } -------------------------------------------------------------------------------- /Chapter06/kioto/src/main/java/kioto/avro/AvroStreamsProcessor.java: -------------------------------------------------------------------------------- 1 | package kioto.avro; 2 | 3 | import io.confluent.kafka.streams.serdes.avro.GenericAvroSerde; 4 | import kioto.Constants; 5 | import kioto.HealthCheck; 6 | 7 | import java.time.LocalDate; 8 | import java.time.Period; 9 | import java.time.ZoneId; 10 | import java.util.Collections; 11 | import java.util.Date; 12 | import java.util.Properties; 13 | import org.apache.avro.generic.GenericRecord; 14 | import org.apache.kafka.common.serialization.Serdes; 15 | import org.apache.kafka.streams.KafkaStreams; 16 | import org.apache.kafka.streams.KeyValue; 17 | import org.apache.kafka.streams.StreamsBuilder; 18 | import org.apache.kafka.streams.Topology; 19 | import org.apache.kafka.streams.kstream.Consumed; 20 | import org.apache.kafka.streams.kstream.KStream; 21 | import org.apache.kafka.streams.kstream.KeyValueMapper; 22 | import org.apache.kafka.streams.kstream.Produced; 23 | 24 | // $ kafka-topics --zookeeper localhost:2181 --create --topic uptimes --replication-factor 1 --partitions 4 25 | 26 | public final class AvroStreamsProcessor { 27 | 28 | private final String brokers; 29 | private final String schemaRegistryUrl; 30 | 31 | public AvroStreamsProcessor(String brokers, String schemaRegistryUrl) { 32 | super(); 33 | this.brokers = brokers; 34 | this.schemaRegistryUrl = schemaRegistryUrl; 35 | } 36 | 37 | public final void process() { 38 | StreamsBuilder streamsBuilder = new StreamsBuilder(); 39 | GenericAvroSerde avroSerde = new GenericAvroSerde(); 40 | 41 | avroSerde.configure( 42 | Collections.singletonMap("schema.registry.url", schemaRegistryUrl), false); 43 | 44 | KStream avroStream = streamsBuilder.stream( 45 | Constants.getHealthChecksAvroTopic(), Consumed.with(Serdes.String(), avroSerde)); 46 | 47 | KStream healthCheckStream = avroStream.mapValues((v -> { 48 | GenericRecord healthCheckAvro = (GenericRecord) v; 49 | HealthCheck healthCheck = new HealthCheck( 50 | healthCheckAvro.get("event").toString(), 51 | healthCheckAvro.get("factory").toString(), 52 | healthCheckAvro.get("serialNumber").toString(), 53 | healthCheckAvro.get("type").toString(), 54 | healthCheckAvro.get("status").toString(), 55 | new Date((Long) healthCheckAvro.get("lastStartedAt")), 56 | Float.parseFloat(healthCheckAvro.get("temperature").toString()), 57 | healthCheckAvro.get("ipAddress").toString()); 58 | return healthCheck; 59 | })); 60 | 61 | KStream uptimeStream = healthCheckStream.map(((KeyValueMapper) (k, v) -> { 62 | HealthCheck healthCheck = (HealthCheck) v; 63 | LocalDate startDateLocal = 64 | healthCheck.getLastStartedAt().toInstant().atZone(ZoneId.systemDefault()).toLocalDate(); 65 | int uptime = Period.between(startDateLocal, LocalDate.now()).getDays(); 66 | return new KeyValue<>(healthCheck.getSerialNumber(), String.valueOf(uptime)); 67 | })); 68 | 69 | uptimeStream.to(Constants.getUptimesTopic(), Produced.with(Serdes.String(), Serdes.String())); 70 | Topology topology = streamsBuilder.build(); 71 | Properties props = new Properties(); 72 | props.put("bootstrap.servers", this.brokers); 73 | props.put("application.id", "kioto"); 74 | KafkaStreams streams = new KafkaStreams(topology, props); 75 | streams.start(); 76 | } 77 | 78 | public static void main(String[] args) { 79 | (new AvroStreamsProcessor("localhost:9092", "http://localhost:8081")).process(); 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /Chapter06/kioto/src/main/java/kioto/custom/CustomProducer.java: -------------------------------------------------------------------------------- 1 | package kioto.custom; 2 | 3 | import com.github.javafaker.Faker; 4 | import kioto.Constants; 5 | import kioto.HealthCheck; 6 | import java.util.Properties; 7 | import java.util.concurrent.ExecutionException; 8 | import java.util.concurrent.Future; 9 | import java.util.concurrent.TimeUnit; 10 | 11 | import kioto.serde.HealthCheckSerializer; 12 | import org.apache.kafka.clients.producer.KafkaProducer; 13 | import org.apache.kafka.clients.producer.Producer; 14 | import org.apache.kafka.clients.producer.ProducerRecord; 15 | import org.apache.kafka.common.serialization.StringSerializer; 16 | 17 | public final class CustomProducer { 18 | private final Producer producer; 19 | 20 | public CustomProducer(String brokers) { 21 | Properties props = new Properties(); 22 | props.put("bootstrap.servers", brokers); 23 | props.put("key.serializer", StringSerializer.class); 24 | props.put("value.serializer", HealthCheckSerializer.class); 25 | producer = new KafkaProducer<>(props); 26 | } 27 | 28 | public void produce(int ratePerSecond) { 29 | long waitTimeBetweenIterationsMs = 1000L / (long)ratePerSecond; 30 | Faker faker = new Faker(); 31 | 32 | while(true) { 33 | HealthCheck fakeHealthCheck = 34 | new HealthCheck( 35 | "HEALTH_CHECK", 36 | faker.address().city(), 37 | faker.bothify("??##-??##", true), 38 | Constants.machineType.values()[faker.number().numberBetween(0,4)].toString(), 39 | Constants.machineStatus.values()[faker.number().numberBetween(0,3)].toString(), 40 | faker.date().past(100, TimeUnit.DAYS), 41 | faker.number().numberBetween(100L, 0L), 42 | faker.internet().ipV4Address()); 43 | Future futureResult = producer.send(new ProducerRecord<>(Constants.getHealthChecksTopic(), fakeHealthCheck)); 44 | try { 45 | Thread.sleep(waitTimeBetweenIterationsMs); 46 | futureResult.get(); 47 | } catch (InterruptedException | ExecutionException e) { 48 | e.printStackTrace(); 49 | } 50 | } 51 | } 52 | 53 | public static void main(String[] args) { 54 | new CustomProducer("localhost:9092").produce(2); 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /Chapter06/kioto/src/main/java/kioto/custom/CustomStreamsProcessor.java: -------------------------------------------------------------------------------- 1 | package kioto.custom; 2 | 3 | import kioto.Constants; 4 | import kioto.HealthCheck; 5 | import kioto.serde.HealthCheckDeserializer; 6 | import kioto.serde.HealthCheckSerializer; 7 | 8 | import java.time.LocalDate; 9 | import java.time.Period; 10 | import java.time.ZoneId; 11 | import java.util.Properties; 12 | import org.apache.kafka.common.serialization.Serde; 13 | import org.apache.kafka.common.serialization.Serdes; 14 | import org.apache.kafka.streams.KafkaStreams; 15 | import org.apache.kafka.streams.KeyValue; 16 | import org.apache.kafka.streams.StreamsBuilder; 17 | import org.apache.kafka.streams.Topology; 18 | import org.apache.kafka.streams.kstream.Consumed; 19 | import org.apache.kafka.streams.kstream.KStream; 20 | import org.apache.kafka.streams.kstream.KeyValueMapper; 21 | import org.apache.kafka.streams.kstream.Produced; 22 | 23 | // $ kafka-topics --zookeeper localhost:2181 --create --topic uptimes --replication-factor 1 --partitions 4 24 | 25 | public final class CustomStreamsProcessor { 26 | 27 | private final String brokers; 28 | 29 | public CustomStreamsProcessor(String brokers) { 30 | super(); 31 | this.brokers = brokers; 32 | } 33 | 34 | public final void process() { 35 | StreamsBuilder streamsBuilder = new StreamsBuilder(); 36 | 37 | Serde customSerde = Serdes.serdeFrom( 38 | new HealthCheckSerializer(), new HealthCheckDeserializer()); 39 | 40 | KStream healthCheckStream = streamsBuilder.stream( 41 | Constants.getHealthChecksTopic(), Consumed.with(Serdes.String(), customSerde)); 42 | 43 | KStream uptimeStream = healthCheckStream.map(((KeyValueMapper) (k, v) -> { 44 | HealthCheck healthCheck = (HealthCheck) v; 45 | LocalDate startDateLocal = 46 | healthCheck.getLastStartedAt().toInstant().atZone(ZoneId.systemDefault()).toLocalDate(); 47 | int uptime = Period.between(startDateLocal, LocalDate.now()).getDays(); 48 | return new KeyValue<>(healthCheck.getSerialNumber(), String.valueOf(uptime)); 49 | })); 50 | 51 | uptimeStream.to(Constants.getUptimesTopic(), Produced.with(Serdes.String(), Serdes.String())); 52 | Topology topology = streamsBuilder.build(); 53 | Properties props = new Properties(); 54 | props.put("bootstrap.servers", this.brokers); 55 | props.put("application.id", "kioto"); 56 | KafkaStreams streams = new KafkaStreams(topology, props); 57 | streams.start(); 58 | } 59 | 60 | public static void main(String[] args) { 61 | (new CustomStreamsProcessor("localhost:9092")).process(); 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /Chapter06/kioto/src/main/java/kioto/events/EventProcessor.java: -------------------------------------------------------------------------------- 1 | package kioto.events; 2 | 3 | import org.apache.kafka.common.serialization.Serdes; 4 | import org.apache.kafka.streams.KafkaStreams; 5 | import org.apache.kafka.streams.KeyValue; 6 | import org.apache.kafka.streams.StreamsBuilder; 7 | import org.apache.kafka.streams.Topology; 8 | import org.apache.kafka.streams.kstream.*; 9 | import java.util.Properties; 10 | 11 | // $ kafka-topics --zookeeper localhost:2181 --create --topic aggregates --replication-factor 1 --partitions 4 12 | 13 | public final class EventProcessor { 14 | 15 | private final String brokers; 16 | 17 | private EventProcessor(String brokers) { 18 | this.brokers = brokers; 19 | } 20 | 21 | private void process() { 22 | StreamsBuilder streamsBuilder = new StreamsBuilder(); 23 | KStream stream = streamsBuilder.stream("events", Consumed.with(Serdes.String(), Serdes.String())); 24 | 25 | KTable aggregates = 26 | stream.groupBy( (k, v) -> "foo", Serialized.with(Serdes.String(), Serdes.String())) 27 | .windowedBy(TimeWindows.of(10000L)) 28 | .count(Materialized.with(Serdes.String(), Serdes.Long())); 29 | 30 | aggregates 31 | .toStream() 32 | .map( (ws, i) -> new KeyValue( ""+((Windowed)ws).window().start(), ""+i)) 33 | .to("aggregates", Produced.with(Serdes.String(), Serdes.String())); 34 | 35 | Topology topology = streamsBuilder.build(); 36 | Properties props = new Properties(); 37 | props.put("bootstrap.servers", this.brokers); 38 | props.put("application.id", "kioto"); 39 | props.put("auto.offset.reset", "latest"); 40 | props.put("commit.interval.ms", 0); 41 | KafkaStreams streams = new KafkaStreams(topology, props); 42 | streams.start(); 43 | } 44 | 45 | public static void main(String[] args) { 46 | (new EventProcessor("localhost:9092")).process(); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /Chapter06/kioto/src/main/java/kioto/events/EventProducer.java: -------------------------------------------------------------------------------- 1 | package kioto.events; 2 | 3 | import java.util.Properties; 4 | import java.util.Timer; 5 | import java.util.TimerTask; 6 | import java.util.concurrent.ExecutionException; 7 | import java.util.concurrent.Future; 8 | import org.apache.kafka.clients.producer.KafkaProducer; 9 | import org.apache.kafka.clients.producer.Producer; 10 | import org.apache.kafka.clients.producer.ProducerRecord; 11 | import org.apache.kafka.common.serialization.StringSerializer; 12 | 13 | // $ kafka-topics --zookeeper localhost:2181 --create --topic events --replication-factor 1 --partitions 4 14 | 15 | public final class EventProducer { 16 | private final Producer producer; 17 | 18 | private EventProducer(String brokers) { 19 | Properties props = new Properties(); 20 | props.put("bootstrap.servers", brokers); 21 | props.put("key.serializer", StringSerializer.class); 22 | props.put("value.serializer", StringSerializer.class); 23 | producer = new KafkaProducer<>(props); 24 | } 25 | 26 | private void produce() { 27 | long now = System.currentTimeMillis(); 28 | long delay = 1300 - Math.floorMod(now, 1000); 29 | Timer timer = new Timer(); 30 | timer.schedule(new TimerTask() { 31 | public void run() { 32 | long ts = System.currentTimeMillis(); 33 | long second = Math.floorMod(ts / 1000, 60); 34 | if (second != 54) { 35 | EventProducer.this.sendMessage(second, ts, "on time"); 36 | } 37 | if (second == 6) { 38 | EventProducer.this.sendMessage(54, ts - 12000, "late"); 39 | } 40 | } 41 | }, delay, 1000); 42 | } 43 | 44 | private void sendMessage(long id, long ts, String info) { 45 | long window = ts / 10000 * 10000; 46 | String value = "" + window + ',' + id + ',' + info; 47 | Future futureResult = 48 | this.producer.send(new ProducerRecord<>("events", null, ts, String.valueOf(id), value)); 49 | try { 50 | futureResult.get(); 51 | } catch (InterruptedException | ExecutionException e) { 52 | e.printStackTrace(); 53 | } 54 | } 55 | 56 | public static void main(String[] args) { 57 | (new EventProducer("localhost:9092")).produce(); 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /Chapter06/kioto/src/main/java/kioto/plain/PlainProducer.java: -------------------------------------------------------------------------------- 1 | package kioto.plain; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.github.javafaker.Faker; 5 | import kioto.Constants; 6 | import kioto.HealthCheck; 7 | import java.util.Properties; 8 | import java.util.concurrent.ExecutionException; 9 | import java.util.concurrent.Future; 10 | import java.util.concurrent.TimeUnit; 11 | 12 | import org.apache.kafka.clients.producer.KafkaProducer; 13 | import org.apache.kafka.clients.producer.Producer; 14 | import org.apache.kafka.clients.producer.ProducerRecord; 15 | import org.apache.kafka.common.serialization.StringSerializer; 16 | 17 | // $ kafka-topics --zookeeper localhost:2181 --create --topic healthchecks --replication-factor 1 --partitions 4 18 | 19 | public final class PlainProducer { 20 | private final Producer producer; 21 | 22 | public PlainProducer(String brokers) { 23 | Properties props = new Properties(); 24 | props.put("bootstrap.servers", brokers); 25 | props.put("key.serializer", StringSerializer.class); 26 | props.put("value.serializer", StringSerializer.class); 27 | producer = new KafkaProducer<>(props); 28 | } 29 | 30 | public void produce(int ratePerSecond) { 31 | long waitTimeBetweenIterationsMs = 1000L / (long)ratePerSecond; 32 | Faker faker = new Faker(); 33 | 34 | while(true) { 35 | HealthCheck fakeHealthCheck = 36 | new HealthCheck( 37 | "HEALTH_CHECK", 38 | faker.address().city(), 39 | faker.bothify("??##-??##", true), 40 | Constants.machineType.values()[faker.number().numberBetween(0,4)].toString(), 41 | Constants.machineStatus.values()[faker.number().numberBetween(0,3)].toString(), 42 | faker.date().past(100, TimeUnit.DAYS), 43 | faker.number().numberBetween(100L, 0L), 44 | faker.internet().ipV4Address()); 45 | 46 | String fakeHealthCheckJson = null; 47 | try { 48 | fakeHealthCheckJson = Constants.getJsonMapper().writeValueAsString(fakeHealthCheck); 49 | } catch (JsonProcessingException e) { 50 | e.printStackTrace(); 51 | } 52 | Future futureResult = producer.send(new ProducerRecord<>(Constants.getHealthChecksTopic(), fakeHealthCheckJson)); 53 | try { 54 | Thread.sleep(waitTimeBetweenIterationsMs); 55 | futureResult.get(); 56 | } catch (InterruptedException | ExecutionException e) { 57 | e.printStackTrace(); 58 | } 59 | } 60 | } 61 | 62 | public static void main(String[] args) { 63 | new PlainProducer("localhost:9092").produce(2); 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /Chapter06/kioto/src/main/java/kioto/plain/PlainStreamsProcessor.java: -------------------------------------------------------------------------------- 1 | package kioto.plain; 2 | 3 | import kioto.Constants; 4 | import kioto.HealthCheck; 5 | 6 | import java.io.IOException; 7 | import java.time.LocalDate; 8 | import java.time.Period; 9 | import java.time.ZoneId; 10 | import java.util.Properties; 11 | import org.apache.kafka.common.serialization.Serdes; 12 | import org.apache.kafka.streams.KafkaStreams; 13 | import org.apache.kafka.streams.KeyValue; 14 | import org.apache.kafka.streams.StreamsBuilder; 15 | import org.apache.kafka.streams.Topology; 16 | import org.apache.kafka.streams.kstream.Consumed; 17 | import org.apache.kafka.streams.kstream.KStream; 18 | import org.apache.kafka.streams.kstream.KeyValueMapper; 19 | import org.apache.kafka.streams.kstream.Produced; 20 | 21 | // $ kafka-topics --zookeeper localhost:2181 --create --topic uptimes --replication-factor 1 --partitions 4 22 | 23 | public final class PlainStreamsProcessor { 24 | 25 | private final String brokers; 26 | 27 | public PlainStreamsProcessor(String brokers) { 28 | super(); 29 | this.brokers = brokers; 30 | } 31 | 32 | public final void process() { 33 | StreamsBuilder streamsBuilder = new StreamsBuilder(); 34 | 35 | KStream healthCheckJsonStream = streamsBuilder.stream( 36 | Constants.getHealthChecksTopic(), Consumed.with(Serdes.String(), Serdes.String())); 37 | 38 | KStream healthCheckStream = healthCheckJsonStream.mapValues((v -> { 39 | try { 40 | return Constants.getJsonMapper().readValue((String) v, HealthCheck.class); 41 | } catch (IOException e) { 42 | e.printStackTrace(); 43 | return null; 44 | } 45 | })); 46 | 47 | KStream uptimeStream = healthCheckStream.map(((KeyValueMapper) (k, v) -> { 48 | HealthCheck healthCheck = (HealthCheck) v; 49 | LocalDate startDateLocal = 50 | healthCheck.getLastStartedAt().toInstant().atZone(ZoneId.systemDefault()).toLocalDate(); 51 | int uptime = Period.between(startDateLocal, LocalDate.now()).getDays(); 52 | return new KeyValue<>(healthCheck.getSerialNumber(), String.valueOf(uptime)); 53 | })); 54 | 55 | uptimeStream.to(Constants.getUptimesTopic(), Produced.with(Serdes.String(), Serdes.String())); 56 | Topology topology = streamsBuilder.build(); 57 | Properties props = new Properties(); 58 | props.put("bootstrap.servers", this.brokers); 59 | props.put("application.id", "kioto"); 60 | KafkaStreams streams = new KafkaStreams(topology, props); 61 | streams.start(); 62 | } 63 | 64 | public static void main(String[] args) { 65 | (new PlainStreamsProcessor("localhost:9092")).process(); 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /Chapter06/kioto/src/main/java/kioto/serde/HealthCheckDeserializer.java: -------------------------------------------------------------------------------- 1 | package kioto.serde; 2 | 3 | import kioto.Constants; 4 | import kioto.HealthCheck; 5 | import java.io.IOException; 6 | import java.util.Map; 7 | import org.apache.kafka.common.serialization.Deserializer; 8 | 9 | public final class HealthCheckDeserializer implements Deserializer { 10 | 11 | @Override 12 | public HealthCheck deserialize(String topic, byte[] data) { 13 | if (data == null) { 14 | return null; 15 | } 16 | try { 17 | return Constants.getJsonMapper().readValue(data, HealthCheck.class); 18 | } catch (IOException e) { 19 | return null; 20 | } 21 | } 22 | 23 | @Override 24 | public void close() {} 25 | 26 | @Override 27 | public void configure(Map configs, boolean isKey) {} 28 | } 29 | -------------------------------------------------------------------------------- /Chapter06/kioto/src/main/java/kioto/serde/HealthCheckSerializer.java: -------------------------------------------------------------------------------- 1 | package kioto.serde; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import kioto.Constants; 5 | import java.util.Map; 6 | import org.apache.kafka.common.serialization.Serializer; 7 | 8 | public final class HealthCheckSerializer implements Serializer { 9 | 10 | @Override 11 | public byte[] serialize(String topic, Object data) { 12 | if (data == null) { 13 | return null; 14 | } 15 | try { 16 | return Constants.getJsonMapper().writeValueAsBytes(data); 17 | } catch (JsonProcessingException e) { 18 | return null; 19 | } 20 | } 21 | 22 | @Override 23 | public void close() {} 24 | 25 | @Override 26 | public void configure(Map configs, boolean isKey) {} 27 | } 28 | -------------------------------------------------------------------------------- /Chapter06/kioto/src/main/resources/healthcheck.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "name": "HealthCheck", 3 | "namespace": "kioto.avro", 4 | "type": "record", 5 | "fields": [ 6 | { "name": "event", "type": "string" }, 7 | { "name": "factory", "type": "string" }, 8 | { "name": "serialNumber", "type": "string" }, 9 | { "name": "type", "type": "string" }, 10 | { "name": "status", "type": "string"}, 11 | { "name": "lastStartedAt", "type": "long", "logicalType": "timestamp-millis"}, 12 | { "name": "temperature", "type": "float" }, 13 | { "name": "ipAddress", "type": "string" } 14 | ] 15 | } 16 | -------------------------------------------------------------------------------- /Chapter08/kioto/build.gradle: -------------------------------------------------------------------------------- 1 | 2 | apply plugin: 'java' 3 | apply plugin: 'application' 4 | 5 | sourceCompatibility = '1.8' 6 | 7 | mainClassName = 'kioto.ProcessingEngine' 8 | 9 | repositories { 10 | mavenCentral() 11 | } 12 | 13 | version = '0.1.0' 14 | 15 | dependencies { 16 | compile group: 'com.github.javafaker', name: 'javafaker', version: '0.15' 17 | compile group: 'org.apache.kafka', name: 'kafka_2.12', version: '2.0.0' 18 | compile group: 'com.fasterxml.jackson.core', name: 'jackson-core', version: '2.9.7' 19 | 20 | compile group: 'org.apache.spark', name: 'spark-sql_2.11', version: '2.2.2' 21 | compile group: 'org.apache.spark', name: 'spark-sql-kafka-0-10_2.11', version: '2.2.2' 22 | } 23 | 24 | jar { 25 | manifest { 26 | attributes 'Main-Class': mainClassName 27 | } from { 28 | configurations.compile.collect { 29 | it.isDirectory() ? it : zipTree(it) 30 | } 31 | } 32 | exclude "META-INF/*.SF" 33 | exclude "META-INF/*.DSA" 34 | exclude "META-INF/*.RSA" 35 | } -------------------------------------------------------------------------------- /Chapter08/kioto/src/main/java/kioto/Constants.java: -------------------------------------------------------------------------------- 1 | package kioto; 2 | 3 | import com.fasterxml.jackson.databind.ObjectMapper; 4 | import com.fasterxml.jackson.databind.SerializationFeature; 5 | import com.fasterxml.jackson.databind.util.StdDateFormat; 6 | 7 | public final class Constants { 8 | 9 | private static final ObjectMapper jsonMapper; 10 | 11 | static { 12 | ObjectMapper mapper = new ObjectMapper(); 13 | mapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS); 14 | mapper.setDateFormat(new StdDateFormat()); 15 | jsonMapper = mapper; 16 | } 17 | 18 | public static String getHealthChecksTopic() { 19 | return "healthchecks"; 20 | } 21 | 22 | public static String getHealthChecksAvroTopic() { 23 | return "healthchecks-avro"; 24 | } 25 | 26 | public static String getUptimesTopic() { 27 | return "uptimes"; 28 | } 29 | 30 | public enum machineType { GEOTHERMAL, HYDROELECTRIC, NUCLEAR, WIND, SOLAR } 31 | 32 | public enum machineStatus { STARTING, RUNNING, SHUTTING_DOWN, SHUT_DOWN } 33 | 34 | public static ObjectMapper getJsonMapper() { 35 | return jsonMapper; 36 | } 37 | } -------------------------------------------------------------------------------- /Chapter08/kioto/src/main/java/kioto/HealthCheck.java: -------------------------------------------------------------------------------- 1 | package kioto; 2 | 3 | import java.util.Date; 4 | 5 | public final class HealthCheck { 6 | private String event; 7 | private String factory; 8 | private String serialNumber; 9 | private String type; 10 | private String status; 11 | private Date lastStartedAt; 12 | private float temperature; 13 | private String ipAddress; 14 | 15 | public HealthCheck() { 16 | } 17 | 18 | public HealthCheck(String event, 19 | String factory, 20 | String serialNumber, 21 | String type, 22 | String status, 23 | Date lastStartedAt, 24 | float temperature, 25 | String ipAddress) { 26 | this.event = event; 27 | this.factory = factory; 28 | this.serialNumber = serialNumber; 29 | this.type = type; 30 | this.status = status; 31 | this.lastStartedAt = lastStartedAt; 32 | this.temperature = temperature; 33 | this.ipAddress = ipAddress; 34 | } 35 | 36 | public String getEvent() { 37 | return event; 38 | } 39 | 40 | public void setEvent(String event) { 41 | this.event = event; 42 | } 43 | 44 | public String getFactory() { 45 | return factory; 46 | } 47 | 48 | public void setFactory(String factory) { 49 | this.factory = factory; 50 | } 51 | 52 | public String getSerialNumber() { 53 | return serialNumber; 54 | } 55 | 56 | public void setSerialNumber(String serialNumber) { 57 | this.serialNumber = serialNumber; 58 | } 59 | 60 | public String getType() { 61 | return type; 62 | } 63 | 64 | public void setType(String type) { 65 | this.type = type; 66 | } 67 | 68 | public String getStatus() { 69 | return status; 70 | } 71 | 72 | public void setStatus(String status) { 73 | this.status = status; 74 | } 75 | 76 | public Date getLastStartedAt() { 77 | return lastStartedAt; 78 | } 79 | 80 | public void setLastStartedAt(Date lastStartedAt) { 81 | this.lastStartedAt = lastStartedAt; 82 | } 83 | 84 | public float getTemperature() { 85 | return temperature; 86 | } 87 | 88 | public void setTemperature(float temperature) { 89 | this.temperature = temperature; 90 | } 91 | 92 | public String getIpAddress() { 93 | return ipAddress; 94 | } 95 | 96 | public void setIpAddress(String ipAddress) { 97 | this.ipAddress = ipAddress; 98 | } 99 | } -------------------------------------------------------------------------------- /Chapter08/kioto/src/main/java/kioto/plain/PlainProducer.java: -------------------------------------------------------------------------------- 1 | package kioto.plain; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.github.javafaker.Faker; 5 | import kioto.Constants; 6 | import kioto.HealthCheck; 7 | import java.util.Properties; 8 | import java.util.concurrent.ExecutionException; 9 | import java.util.concurrent.Future; 10 | import java.util.concurrent.TimeUnit; 11 | 12 | import org.apache.kafka.clients.producer.KafkaProducer; 13 | import org.apache.kafka.clients.producer.Producer; 14 | import org.apache.kafka.clients.producer.ProducerRecord; 15 | import org.apache.kafka.common.serialization.StringSerializer; 16 | 17 | // $ kafka-topics --zookeeper localhost:2181 --create --topic healthchecks --replication-factor 1 --partitions 4 18 | 19 | public final class PlainProducer { 20 | private final Producer producer; 21 | 22 | public PlainProducer(String brokers) { 23 | Properties props = new Properties(); 24 | props.put("bootstrap.servers", brokers); 25 | props.put("key.serializer", StringSerializer.class); 26 | props.put("value.serializer", StringSerializer.class); 27 | producer = new KafkaProducer<>(props); 28 | } 29 | 30 | public void produce(int ratePerSecond) { 31 | long waitTimeBetweenIterationsMs = 1000L / (long)ratePerSecond; 32 | Faker faker = new Faker(); 33 | 34 | while(true) { 35 | HealthCheck fakeHealthCheck = 36 | new HealthCheck( 37 | "HEALTH_CHECK", 38 | faker.address().city(), 39 | faker.bothify("??##-??##", true), 40 | Constants.machineType.values()[faker.number().numberBetween(0,4)].toString(), 41 | Constants.machineStatus.values()[faker.number().numberBetween(0,3)].toString(), 42 | faker.date().past(100, TimeUnit.DAYS), 43 | faker.number().numberBetween(100L, 0L), 44 | faker.internet().ipV4Address()); 45 | 46 | String fakeHealthCheckJson = null; 47 | try { 48 | fakeHealthCheckJson = Constants.getJsonMapper().writeValueAsString(fakeHealthCheck); 49 | } catch (JsonProcessingException e) { 50 | e.printStackTrace(); 51 | } 52 | Future futureResult = producer.send(new ProducerRecord<>(Constants.getHealthChecksTopic(), fakeHealthCheckJson)); 53 | try { 54 | Thread.sleep(waitTimeBetweenIterationsMs); 55 | futureResult.get(); 56 | } catch (InterruptedException | ExecutionException e) { 57 | e.printStackTrace(); 58 | } 59 | } 60 | } 61 | 62 | public static void main(String[] args) { 63 | new PlainProducer("localhost:9092").produce(2); 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /Chapter08/kioto/src/main/java/kioto/spark/SparkProcessor.java: -------------------------------------------------------------------------------- 1 | package kioto.spark; 2 | 3 | import kioto.Constants; 4 | import org.apache.spark.sql.*; 5 | import org.apache.spark.sql.streaming.StreamingQuery; 6 | import org.apache.spark.sql.streaming.StreamingQueryException; 7 | import org.apache.spark.sql.types.DataTypes; 8 | import org.apache.spark.sql.types.StructType; 9 | 10 | import java.sql.Timestamp; 11 | import java.time.LocalDate; 12 | import java.time.Period; 13 | 14 | public class SparkProcessor { 15 | 16 | private String brokers; 17 | 18 | public SparkProcessor(String brokers) { 19 | this.brokers = brokers; 20 | } 21 | 22 | public final void process() { 23 | 24 | SparkSession spark = SparkSession.builder() 25 | .appName("kioto") 26 | .master("local[*]") 27 | .getOrCreate(); 28 | 29 | Dataset inputDataset = spark 30 | .readStream() 31 | .format("kafka") 32 | .option("kafka.bootstrap.servers", brokers) 33 | .option("subscribe", Constants.getHealthChecksTopic()) 34 | .load(); 35 | 36 | Dataset healthCheckJsonDf = inputDataset.selectExpr("CAST(value AS STRING)"); 37 | 38 | StructType struct = new StructType() 39 | .add("event", DataTypes.StringType) 40 | .add("factory", DataTypes.StringType) 41 | .add("serialNumber", DataTypes.StringType) 42 | .add("type", DataTypes.StringType) 43 | .add("status", DataTypes.StringType) 44 | .add("lastStartedAt", DataTypes.StringType) 45 | .add("temperature", DataTypes.FloatType) 46 | .add("ipAddress", DataTypes.StringType); 47 | 48 | Dataset healthCheckNestedDs = healthCheckJsonDf.select( 49 | functions.from_json(new Column( "value" ), struct).as("healthCheck")); 50 | 51 | Dataset healthCheckFlattenedDs = 52 | healthCheckNestedDs.selectExpr("healthCheck.serialNumber", "healthCheck.lastStartedAt"); 53 | 54 | Dataset healthCheckDs = healthCheckFlattenedDs 55 | .withColumn("lastStartedAt", 56 | functions.to_timestamp(new Column ("lastStartedAt"), "yyyy-MM-dd'T'HH:mm:ss.SSSZ")); 57 | 58 | Dataset processedDs = healthCheckDs 59 | .withColumn( "lastStartedAt", 60 | new Column("uptime")); 61 | 62 | Dataset resDf = processedDs.select( 63 | functions.concat(new Column("serialNumber")).as("key"), 64 | processedDs.col("uptime").cast(DataTypes.StringType).as("value")); 65 | 66 | //StreamingQuery consoleOutput = 67 | processedDs.writeStream() 68 | .outputMode("append") 69 | .format("console") 70 | .start(); 71 | 72 | //StreamingQuery kafkaOutput = 73 | resDf.writeStream() 74 | .format("kafka") 75 | .option("kafka.bootstrap.servers", brokers) 76 | .option("topic", "uptimes") 77 | .start(); 78 | 79 | try { 80 | spark.streams().awaitAnyTermination(); 81 | } catch (StreamingQueryException e) { 82 | e.printStackTrace(); 83 | } 84 | } 85 | 86 | private final int uptimeFunc(Timestamp date) { 87 | LocalDate localDate = date.toLocalDateTime().toLocalDate(); 88 | return Period.between(localDate, LocalDate.now()).getDays(); 89 | } 90 | 91 | public static void main(String[] args) { 92 | (new SparkProcessor("localhost:9092")).process(); 93 | 94 | } 95 | 96 | } 97 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 Packt 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | # Apache Kafka Quick Start Guide 5 | 6 | Apache Kafka Quick Start Guide 7 | 8 | This is the code repository for [Apache Kafka Quick Start Guide](https://www.packtpub.com/big-data-and-business-intelligence/apache-kafka-quick-start-guide?utm_source=github&utm_medium=repository&utm_campaign=9781788997829), published by Packt. 9 | 10 | **Leverage Apache Kafka 2.0 to simplify real-time data processing for distributed applications** 11 | 12 | ## What is this book about? 13 | Apache Kafka is a great open source platform for handling your real-time data pipeline to ensure high-speed filtering and pattern matching on the fly. In this book, you will learn how to use Apache Kafka for efficient processing of distributed applications and will get familiar with solving everyday problems in fast data and processing pipelines. 14 | 15 | This book covers the following exciting features: 16 | * How to validate data with Kafka 17 | * Add information to existing data flows 18 | * Generate new information through message composition 19 | * Perform data validation and versioning with the Schema Registry 20 | * How to perform message Serialization and Deserialization 21 | * How to perform message Serialization and Deserialization 22 | * Process data streams with Kafka Streams 23 | * Understand the duality between tables and streams with KSQL 24 | 25 | If you feel this book is for you, get your [copy](https://www.amazon.com/dp/1788997824) today! 26 | 27 | https://www.packtpub.com/ 29 | 30 | ## Instructions and Navigations 31 | All of the code is organized into folders. For example, Chapter02. 32 | 33 | The code will look like the following: 34 | ``` 35 | { 36 | "event": "CUSTOMER_CONSULTS_ETHPRICE", 37 | "customer": { 38 | "id": "14862768", 39 | "name": "Snowden, Edward", 40 | "ipAddress": "95.31.18.111" 41 | }, 42 | "currency": { 43 | "name": "ethereum", 44 | "price": "RUB" 45 | }, 46 | "timestamp": "2018-09-28T09:09:09Z" 47 | } 48 | ``` 49 | 50 | **Following is what you need for this book:** 51 | This book is for developers who want to quickly master the practical concepts behind Apache Kafka. The audience need not have come across Apache Kafka previously; however, a familiarity of Java or any JVM language will be helpful in understanding the code in this book. 52 | 53 | With the following software and hardware list you can run all code files present in the book (Chapter 1-8). 54 | ### Software and Hardware List 55 | | Chapter | Software required | OS required | 56 | | -------- | ------------------------------------ | ----------------------------------- | 57 | | 3, 4, 5, 6, 7, 8 | Apache Kafka 2.0.0 Gradle 4.0 or above Confluent Platform 5.0.0 | macOS or Linux distribution released after 2014 (Ubuntu/Redhat/Debian) | 58 | 59 | ### Related products 60 | * Building Data Streaming Applications with Apache Kafka [[Packt]](https://www.packtpub.com/big-data-and-business-intelligence/building-data-streaming-applications-apache-kafka?utm_source=github&utm_medium=repository&utm_campaign=9781787283985 ) [[Amazon]](https://www.amazon.com/dp/1787283984) 61 | 62 | * Apache Kafka 1.0 Cookbook [[Packt]](https://www.packtpub.com/big-data-and-business-intelligence/apache-kafka-10-cookbook?utm_source=github&utm_medium=repository&utm_campaign=9781787286849 ) [[Amazon]](https://www.amazon.com/dp/1787286843) 63 | 64 | ## Get to Know the Author 65 | **Raúl Estrada** 66 | has been a programmer since 1996 and a Java developer since 2001. He loves all topics related to computer science. With more than 15 years of experience in high-availability and enterprise software, he has been designing and implementing architectures since 2003. His specialization is in systems integration, and he mainly participates in projects related to the financial sector. He has been an enterprise architect for BEA Systems and Oracle Inc., but he also enjoys web, mobile, and game programming. Raúl is a supporter of free software and enjoys experimenting with new technologies, frameworks, languages, and methods. 67 | 68 | Raúl is the author of other Packt Publishing titles, such as Fast Data Processing Systems with SMACK and Apache Kafka Cookbook. 69 | 70 | 71 | ## Other books by the author 72 | [Fast Data Processing Systems with SMACK Stack](https://www.packtpub.com/big-data-and-business-intelligence/fast-data-processing-systems-smack-stack?utm_source=github&utm_medium=repository&utm_campaign=9781786467201 ) 73 | 74 | [Fast Data Processing Systems with SMACK stack [Video]](https://www.packtpub.com/big-data-and-business-intelligence/fast-data-processing-systems-smack-stack-video?utm_source=github&utm_medium=repository&utm_campaign=9781788298452 ) 75 | 76 | []() 77 | 78 | []() 79 | 80 | []() 81 | 82 | ### Suggestions and Feedback 83 | [Click here](https://docs.google.com/forms/d/e/1FAIpQLSdy7dATC6QmEL81FIUuymZ0Wy9vH1jHkvpY57OiMeKGqib_Ow/viewform) if you have any feedback or suggestions. 84 | 85 | 86 | ### Download a free PDF 87 | 88 | If you have already purchased a print or Kindle version of this book, you can get a DRM-free PDF version at no cost.
Simply click on the link to claim your free PDF.
89 |

https://packt.link/free-ebook/9781788997829

--------------------------------------------------------------------------------