├── .gitignore
├── .travis.yml
├── README.md
├── pom.xml
└── src
└── main
├── avro
└── life
│ └── jugnu
│ └── learnkafka
│ └── ch03
│ └── User.avsc
├── java
└── life
│ └── jugnu
│ └── learnkafka
│ ├── ch03
│ ├── AvroProducer.java
│ ├── FirstProducer.java
│ ├── MyCallback.java
│ ├── MyPartitioner.java
│ ├── User.java
│ └── UserGenerator.java
│ └── ch04
│ ├── AvroConsumer.java
│ └── FirstConsumer.java
└── scala
└── life
└── jugnu
└── learnkafka
└── ch04
└── MyFirstScalaConsumer.scala
/.gitignore:
--------------------------------------------------------------------------------
1 | # Created by .ignore support plugin (hsz.mobi)
2 | ### Linux template
3 | *~
4 |
5 | # KDE directory preferences
6 | .directory
7 |
8 | # Linux trash folder which might appear on any partition or disk
9 | .Trash-*
10 | ### JetBrains template
11 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio
12 |
13 | *.iml
14 |
15 | ## Directory-based project format:
16 | .idea/
17 | # if you remove the above rule, at least ignore the following:
18 |
19 | # User-specific stuff:
20 | # .idea/workspace.xml
21 | # .idea/tasks.xml
22 | # .idea/dictionaries
23 |
24 | # Sensitive or high-churn files:
25 | # .idea/dataSources.ids
26 | # .idea/dataSources.xml
27 | # .idea/sqlDataSources.xml
28 | # .idea/dynamic.xml
29 | # .idea/uiDesigner.xml
30 |
31 | # Gradle:
32 | # .idea/gradle.xml
33 | # .idea/libraries
34 |
35 | # Mongo Explorer plugin:
36 | # .idea/mongoSettings.xml
37 |
38 | ## File-based project format:
39 | *.ipr
40 | *.iws
41 |
42 | ## Plugin-specific files:
43 |
44 | # IntelliJ
45 | /out/
46 |
47 | # mpeltonen/sbt-idea plugin
48 | .idea_modules/
49 |
50 | # JIRA plugin
51 | atlassian-ide-plugin.xml
52 |
53 | # Crashlytics plugin (for Android Studio and IntelliJ)
54 | com_crashlytics_export_strings.xml
55 | crashlytics.properties
56 | crashlytics-build.properties
57 | ### Java template
58 | *.class
59 |
60 | # Mobile Tools for Java (J2ME)
61 | .mtj.tmp/
62 |
63 | # Package Files #
64 | *.jar
65 | *.war
66 | *.ear
67 |
68 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
69 | hs_err_pid*
70 | ### MicrosoftOffice template
71 | *.tmp
72 |
73 | # Word temporary
74 | ~$*.doc*
75 |
76 | # Excel temporary
77 | ~$*.xls*
78 |
79 | # Excel Backup File
80 | *.xlk
81 | ### Vagrant template
82 | .vagrant/
83 | ### Vim template
84 | [._]*.s[a-w][a-z]
85 | [._]s[a-w][a-z]
86 | *.un~
87 | Session.vim
88 | .netrwhist
89 | *~
90 | ### Scala template
91 | *.class
92 | *.log
93 |
94 | # sbt specific
95 | .cache
96 | .history
97 | .lib/
98 | dist/*
99 | target/
100 | lib_managed/
101 | src_managed/
102 | project/boot/
103 | project/plugins/project/
104 |
105 | # Scala-IDE specific
106 | .scala_dependencies
107 | .worksheet
108 | ### SBT template
109 | # Simple Build Tool
110 | # http://www.scala-sbt.org/release/docs/Getting-Started/Directories.html#configuring-version-control
111 |
112 | target/
113 | lib_managed/
114 | src_managed/
115 | project/boot/
116 | .history
117 | .cache
118 | ### OSX template
119 | .DS_Store
120 | .AppleDouble
121 | .LSOverride
122 |
123 | # Icon must end with two \r
124 | Icon
125 |
126 | # Thumbnails
127 | ._*
128 |
129 | # Files that might appear in the root of a volume
130 | .DocumentRevisions-V100
131 | .fseventsd
132 | .Spotlight-V100
133 | .TemporaryItems
134 | .Trashes
135 | .VolumeIcon.icns
136 |
137 | # Directories potentially created on remote AFP share
138 | .AppleDB
139 | .AppleDesktop
140 | Network Trash Folder
141 | Temporary Items
142 | .apdisk
143 | ### Eclipse template
144 | *.pydevproject
145 | .metadata
146 | .gradle
147 | bin/
148 | tmp/
149 | *.tmp
150 | *.bak
151 | *.swp
152 | *~.nib
153 | local.properties
154 | .settings/
155 | .loadpath
156 |
157 | # Eclipse Core
158 | .project
159 |
160 | # External tool builders
161 | .externalToolBuilders/
162 |
163 | # Locally stored "Eclipse launch configurations"
164 | *.launch
165 |
166 | # CDT-specific
167 | .cproject
168 |
169 | # JDT-specific (Eclipse Java Development Tools)
170 | .classpath
171 |
172 | # Java annotation processor (APT)
173 | .factorypath
174 |
175 | # PDT-specific
176 | .buildpath
177 |
178 | # sbteclipse plugin
179 | .target
180 |
181 | # TeXlipse plugin
182 | .texlipse
183 | ### Maven template
184 | target/
185 | pom.xml.tag
186 | pom.xml.releaseBackup
187 | pom.xml.versionsBackup
188 | pom.xml.next
189 | release.properties
190 | dependency-reduced-pom.xml
191 | buildNumber.properties
192 | .mvn/timing.properties
193 | ### PlayFramework template
194 | # Ignore Play! working directory #
195 | bin/
196 | /db
197 | .eclipse
198 | /lib/
199 | /logs/
200 | /modules
201 | /project/project
202 | /project/target
203 | /target
204 | tmp/
205 | test-result
206 | server.pid
207 | *.iml
208 | *.eml
209 | /dist/
210 | .cache
211 | ### Redis template
212 | # Ignore redis binary dump (dump.rdb) files
213 |
214 | *.rdb
215 |
216 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: java
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Learning Kafka notes
2 |
3 | [](https://travis-ci.org/jagatsingh/learnkafka)
4 |
5 | This project is learning related to upcoming book Kafka Definitive guide.
6 |
7 | ## Pull requests
8 |
9 | If you want to add something , please raise the Pull requests.
10 |
11 | ## Preparation bits
12 |
13 | * Kafka and Confluent
14 |
15 | Download Confluent and extract it to some place.
16 | Call it CONFLUENT_HOME
17 |
18 | `cd $CONFLUENT_HOME`
19 |
20 | Start confluent in separate terminals and leave them running
21 |
22 | `./bin/zookeeper-server-start ./etc/kafka/zookeeper.properties`
23 | `./bin/kafka-server-start ./etc/kafka/server.properties`
24 | `./bin/schema-registry-start ./etc/schema-registry/schema-registry.properties`
25 |
26 | * Source code for this repo
27 |
28 | In another terminal lets clone the code lets call it LEARNKAFKA_HOME
29 |
30 | Compile and make fat jar
31 |
32 | `cd $LEARNKAFKA_HOME`
33 | `git clone https://github.com/jagatsingh/learnkafka.git`
34 | `cd learnkafka`
35 | `mvn clean compile assembly:single`
36 |
37 |
38 | # Chapter 3
39 |
40 | ## Exercise : Create first topic
41 |
42 | Create first topic with single partition and replication
43 |
44 | `cd $CONFLUENT_HOME`
45 | `bin/kafka-topics --zookeeper localhost:2181 --create --topic firsttopic --partitions 1 --replication-factor 1`
46 |
47 | ## Exercise : Send message to topic
48 |
49 | Send messages to our first topic
50 |
51 | `cd $LEARNKAFKA_HOME`
52 | `java -cp ./target/learnkafka-1.0-SNAPSHOT-jar-with-dependencies.jar life.jugnu.learnkafka.ch03.FirstProducer`
53 |
54 | Run it multiple times
55 |
56 | ## Exercise : See messages in topic
57 |
58 | See the messages from command line
59 |
60 | `bin/kafka-console-consumer --zookeeper localhost:2181 --topic firsttopic --from-beginning`
61 |
62 |
63 | ## Exercise : Send Avro messages to topic
64 |
65 | `java -cp ./target/learnkafka-1.0-SNAPSHOT-jar-with-dependencies.jar life.jugnu.learnkafka.ch03.AvroProducer`
66 |
67 |
68 | # Chapter 4
69 |
70 | ## Exercise : Consuming String messages from topic
71 |
72 | In this we will Consuming messages from topic named firsttopic where we had (String, String) as (key, value)
73 |
74 | `java -cp ./target/learnkafka-1.0-SNAPSHOT-jar-with-dependencies.jar life.jugnu.learnkafka.ch04.FirstConsumer`
75 |
76 | In other console start sending messages to Kafka topic using earlier command
77 |
78 | `java -cp ./target/learnkafka-1.0-SNAPSHOT-jar-with-dependencies.jar life.jugnu.learnkafka.ch03.FirstProducer`
79 |
80 | Watch the output of consumer
81 |
82 | ## Exercise : Consume Avro messages from topic
83 |
84 | We will use build in console consumer first
85 |
86 | Produce some records using
87 |
88 | `java -cp ./target/learnkafka-1.0-SNAPSHOT-jar-with-dependencies.jar life.jugnu.learnkafka.ch03.AvroProducer`
89 |
90 | Consume them using built in console consumer
91 |
92 | `bin/kafka-console-consumer --zookeeper localhost:2181 --topic avrotopic --from-beginning \
93 | --value-deserializer io.confluent.kafka.serializers.KafkaAvroDeserializer \
94 | --key-deserializer org.apache.kafka.common.serialization.StringDeserializer`
95 |
96 |
97 | ## Exercise : Consume Avro messages from topic via Java
98 |
99 | Produce the messages , run it multiple times
100 |
101 | `java -cp ./target/learnkafka-1.0-SNAPSHOT-jar-with-dependencies.jar life.jugnu.learnkafka.ch03.AvroProducer`
102 |
103 | Consume the messages using custom Java code
104 |
105 | `java -cp ./target/learnkafka-1.0-SNAPSHOT-jar-with-dependencies.jar life.jugnu.learnkafka.ch04.AvroConsumer`
106 |
107 |
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 4.0.0
6 |
7 | life.jugnu
8 | learnkafka
9 | 1.0-SNAPSHOT
10 | jar
11 |
12 |
13 |
14 | confluent
15 | http://packages.confluent.io/maven/
16 |
17 |
18 |
19 |
20 | 1.7
21 | UTF-8
22 |
23 | 0.9.0.1-cp1
24 | 2.11
25 | 2.0.1
26 | 1.8.0
27 |
28 |
29 | 2.6
30 | 3.3
31 | 3.2.1
32 |
33 |
34 |
35 |
36 | io.confluent
37 | kafka-avro-serializer
38 | ${confluent.version}
39 |
40 |
41 | org.apache.kafka
42 | kafka_${kafka.scala.version}
43 | ${kafka.version}
44 |
45 |
46 | org.apache.avro
47 | avro
48 | ${avro.version}
49 |
50 |
51 |
52 |
53 | src/main/java
54 | src/test/java
55 |
56 |
57 | org.apache.maven.plugins
58 | maven-compiler-plugin
59 | ${maven.compiler.plugin.version}
60 | true
61 |
62 | ${java.version}
63 | ${java.version}
64 |
65 |
66 |
67 | org.apache.avro
68 | avro-maven-plugin
69 | ${avro.version}
70 |
71 |
72 | generate-sources
73 |
74 | schema
75 |
76 |
77 | ${project.basedir}/src/main/avro/
78 | ${project.basedir}/src/main/java/
79 |
80 |
81 |
82 |
83 |
84 | org.apache.maven.plugins
85 | maven-compiler-plugin
86 |
87 | ${java.version}
88 | ${java.version}
89 |
90 |
91 |
92 | maven-assembly-plugin
93 | ${maven.assembly.plugin.version}
94 |
95 |
96 | jar-with-dependencies
97 |
98 |
99 |
100 |
101 | net.alchim31.maven
102 | scala-maven-plugin
103 | ${scala.maven.plugin.version}
104 |
105 |
106 | -unchecked
107 | -deprecation
108 | -explaintypes
109 |
110 |
111 |
112 |
113 |
114 |
--------------------------------------------------------------------------------
/src/main/avro/life/jugnu/learnkafka/ch03/User.avsc:
--------------------------------------------------------------------------------
1 | {
2 | "namespace": "life.jugnu.learnkafka.ch03",
3 | "type": "record",
4 | "name": "User",
5 | "fields": [
6 | {"name": "name", "type": "string"},
7 | {"name": "favorite_number", "type": ["int", "null"]},
8 | {"name": "favorite_color", "type": ["string", "null"]}
9 | ]
10 | }
--------------------------------------------------------------------------------
/src/main/java/life/jugnu/learnkafka/ch03/AvroProducer.java:
--------------------------------------------------------------------------------
1 | package life.jugnu.learnkafka.ch03;
2 |
3 | import java.util.Properties;
4 | import java.util.concurrent.Future;
5 |
6 | import org.apache.kafka.clients.producer.KafkaProducer;
7 | import org.apache.kafka.clients.producer.Producer;
8 | import org.apache.kafka.clients.producer.RecordMetadata;
9 | import org.apache.kafka.clients.producer.ProducerRecord;
10 |
11 | public class AvroProducer {
12 | public static void main(String[] args) {
13 | Properties p = new Properties();
14 |
15 | // Properties are created similarly , note the KafkaAvroSerializer used here instead of StringSerializer
16 | p.put("bootstrap.servers", "localhost:9092");
17 | //p.put("key.serializer", "io.confluent.kafka.serializers.KafkaAvroSerializer");
18 | p.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
19 | p.put("value.serializer", "io.confluent.kafka.serializers.KafkaAvroSerializer");
20 | p.put("schema.registry.url", "http://localhost:8081");
21 |
22 | Producer pd = new KafkaProducer<>(p);
23 |
24 | User u = UserGenerator.getNext();
25 | ProducerRecord rec = new ProducerRecord("avrotopic", u.getName().toString(), u);
26 | try {
27 | pd.send(rec);
28 | // Capture the Future information and see which all things are reorted by Kafka
29 | Future resultFuture = pd.send(rec);
30 | System.out.println("Avro Message sent to partition " + resultFuture.get().partition());
31 | System.out.println("Offset of message is " + resultFuture.get().offset());
32 | System.out.println("Topic of the message is " + resultFuture.get().topic());
33 | } catch (Exception e) {
34 | System.out.println("Failed to send Avro message");
35 | e.printStackTrace();
36 | }
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/src/main/java/life/jugnu/learnkafka/ch03/FirstProducer.java:
--------------------------------------------------------------------------------
1 | package life.jugnu.learnkafka.ch03;
2 |
3 | import org.apache.kafka.clients.producer.KafkaProducer;
4 | import org.apache.kafka.clients.producer.Producer;
5 | import org.apache.kafka.clients.producer.ProducerRecord;
6 |
7 | import java.util.Properties;
8 | import java.util.concurrent.ExecutionException;
9 |
10 | class FirstProducer {
11 | public static void main(String[] args) {
12 |
13 | Properties p = new Properties();
14 |
15 | // Declare the propeties of cluster and informationa about data key and value
16 | p.put("bootstrap.servers","localhost:9092");
17 | p.put("key.serializer","org.apache.kafka.common.serialization.StringSerializer");
18 | p.put("value.serializer","org.apache.kafka.common.serialization.StringSerializer");
19 |
20 | // Create producer and send data in format : (topic name , key , value)
21 | Producer pd = new KafkaProducer<>(p);
22 | ProducerRecord rec = new ProducerRecord<>("firsttopic" ,"key","value");
23 |
24 | // Kafka has 3 methods of sending
25 | // 1) Fire and forget
26 | pd.send(rec);
27 |
28 | // 2) Syncronous send , wait for response object
29 | try {
30 | pd.send(rec).get();
31 | } catch (InterruptedException e) {
32 | e.printStackTrace();
33 | } catch (ExecutionException e) {
34 | e.printStackTrace();
35 | }
36 |
37 | // 3 Asynchronous send , give a call back function and track success using call back
38 | pd.send(rec,new MyCallback());
39 | pd.close();
40 | }
41 | }
--------------------------------------------------------------------------------
/src/main/java/life/jugnu/learnkafka/ch03/MyCallback.java:
--------------------------------------------------------------------------------
1 | package life.jugnu.learnkafka.ch03;
2 |
3 | import org.apache.kafka.clients.producer.Callback;
4 | import org.apache.kafka.clients.producer.RecordMetadata;
5 |
6 | class MyCallback implements Callback {
7 |
8 | public void onCompletion(RecordMetadata metadata, Exception exception) {
9 | if ( exception != null)
10 | exception.printStackTrace();
11 | else
12 | System.out.println("Message posted call back success");
13 | }
14 | }
--------------------------------------------------------------------------------
/src/main/java/life/jugnu/learnkafka/ch03/MyPartitioner.java:
--------------------------------------------------------------------------------
1 | package life.jugnu.learnkafka.ch03;
2 |
3 | import org.apache.kafka.clients.producer.Partitioner;
4 | import org.apache.kafka.common.Cluster;
5 |
6 | import java.util.Map;
7 |
8 | class MyPartitioner implements Partitioner {
9 |
10 | @Override
11 | public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) {
12 | return 0;
13 | }
14 |
15 | @Override
16 | public void close() {
17 |
18 | }
19 |
20 | @Override
21 | public void configure(Map configs) {
22 |
23 | }
24 | }
--------------------------------------------------------------------------------
/src/main/java/life/jugnu/learnkafka/ch03/User.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Autogenerated by Avro
3 | *
4 | * DO NOT EDIT DIRECTLY
5 | */
6 | package life.jugnu.learnkafka.ch03;
7 | @SuppressWarnings("all")
8 | @org.apache.avro.specific.AvroGenerated
9 | public class User extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
10 | private static final long serialVersionUID = 3756949726399827899L;
11 | public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"User\",\"namespace\":\"life.jugnu.learnkafka.ch03\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"favorite_number\",\"type\":[\"int\",\"null\"]},{\"name\":\"favorite_color\",\"type\":[\"string\",\"null\"]}]}");
12 | public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
13 | @Deprecated public java.lang.CharSequence name;
14 | @Deprecated public java.lang.Integer favorite_number;
15 | @Deprecated public java.lang.CharSequence favorite_color;
16 |
17 | /**
18 | * Default constructor. Note that this does not initialize fields
19 | * to their default values from the schema. If that is desired then
20 | * one should use newBuilder()
.
21 | */
22 | public User() {}
23 |
24 | /**
25 | * All-args constructor.
26 | */
27 | public User(java.lang.CharSequence name, java.lang.Integer favorite_number, java.lang.CharSequence favorite_color) {
28 | this.name = name;
29 | this.favorite_number = favorite_number;
30 | this.favorite_color = favorite_color;
31 | }
32 |
33 | public org.apache.avro.Schema getSchema() { return SCHEMA$; }
34 | // Used by DatumWriter. Applications should not call.
35 | public java.lang.Object get(int field$) {
36 | switch (field$) {
37 | case 0: return name;
38 | case 1: return favorite_number;
39 | case 2: return favorite_color;
40 | default: throw new org.apache.avro.AvroRuntimeException("Bad index");
41 | }
42 | }
43 | // Used by DatumReader. Applications should not call.
44 | @SuppressWarnings(value="unchecked")
45 | public void put(int field$, java.lang.Object value$) {
46 | switch (field$) {
47 | case 0: name = (java.lang.CharSequence)value$; break;
48 | case 1: favorite_number = (java.lang.Integer)value$; break;
49 | case 2: favorite_color = (java.lang.CharSequence)value$; break;
50 | default: throw new org.apache.avro.AvroRuntimeException("Bad index");
51 | }
52 | }
53 |
54 | /**
55 | * Gets the value of the 'name' field.
56 | */
57 | public java.lang.CharSequence getName() {
58 | return name;
59 | }
60 |
61 | /**
62 | * Sets the value of the 'name' field.
63 | * @param value the value to set.
64 | */
65 | public void setName(java.lang.CharSequence value) {
66 | this.name = value;
67 | }
68 |
69 | /**
70 | * Gets the value of the 'favorite_number' field.
71 | */
72 | public java.lang.Integer getFavoriteNumber() {
73 | return favorite_number;
74 | }
75 |
76 | /**
77 | * Sets the value of the 'favorite_number' field.
78 | * @param value the value to set.
79 | */
80 | public void setFavoriteNumber(java.lang.Integer value) {
81 | this.favorite_number = value;
82 | }
83 |
84 | /**
85 | * Gets the value of the 'favorite_color' field.
86 | */
87 | public java.lang.CharSequence getFavoriteColor() {
88 | return favorite_color;
89 | }
90 |
91 | /**
92 | * Sets the value of the 'favorite_color' field.
93 | * @param value the value to set.
94 | */
95 | public void setFavoriteColor(java.lang.CharSequence value) {
96 | this.favorite_color = value;
97 | }
98 |
99 | /**
100 | * Creates a new User RecordBuilder.
101 | * @return A new User RecordBuilder
102 | */
103 | public static life.jugnu.learnkafka.ch03.User.Builder newBuilder() {
104 | return new life.jugnu.learnkafka.ch03.User.Builder();
105 | }
106 |
107 | /**
108 | * Creates a new User RecordBuilder by copying an existing Builder.
109 | * @param other The existing builder to copy.
110 | * @return A new User RecordBuilder
111 | */
112 | public static life.jugnu.learnkafka.ch03.User.Builder newBuilder(life.jugnu.learnkafka.ch03.User.Builder other) {
113 | return new life.jugnu.learnkafka.ch03.User.Builder(other);
114 | }
115 |
116 | /**
117 | * Creates a new User RecordBuilder by copying an existing User instance.
118 | * @param other The existing instance to copy.
119 | * @return A new User RecordBuilder
120 | */
121 | public static life.jugnu.learnkafka.ch03.User.Builder newBuilder(life.jugnu.learnkafka.ch03.User other) {
122 | return new life.jugnu.learnkafka.ch03.User.Builder(other);
123 | }
124 |
125 | /**
126 | * RecordBuilder for User instances.
127 | */
128 | public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase
129 | implements org.apache.avro.data.RecordBuilder {
130 |
131 | private java.lang.CharSequence name;
132 | private java.lang.Integer favorite_number;
133 | private java.lang.CharSequence favorite_color;
134 |
135 | /** Creates a new Builder */
136 | private Builder() {
137 | super(life.jugnu.learnkafka.ch03.User.SCHEMA$);
138 | }
139 |
140 | /**
141 | * Creates a Builder by copying an existing Builder.
142 | * @param other The existing Builder to copy.
143 | */
144 | private Builder(life.jugnu.learnkafka.ch03.User.Builder other) {
145 | super(other);
146 | if (isValidValue(fields()[0], other.name)) {
147 | this.name = data().deepCopy(fields()[0].schema(), other.name);
148 | fieldSetFlags()[0] = true;
149 | }
150 | if (isValidValue(fields()[1], other.favorite_number)) {
151 | this.favorite_number = data().deepCopy(fields()[1].schema(), other.favorite_number);
152 | fieldSetFlags()[1] = true;
153 | }
154 | if (isValidValue(fields()[2], other.favorite_color)) {
155 | this.favorite_color = data().deepCopy(fields()[2].schema(), other.favorite_color);
156 | fieldSetFlags()[2] = true;
157 | }
158 | }
159 |
160 | /**
161 | * Creates a Builder by copying an existing User instance
162 | * @param other The existing instance to copy.
163 | */
164 | private Builder(life.jugnu.learnkafka.ch03.User other) {
165 | super(life.jugnu.learnkafka.ch03.User.SCHEMA$);
166 | if (isValidValue(fields()[0], other.name)) {
167 | this.name = data().deepCopy(fields()[0].schema(), other.name);
168 | fieldSetFlags()[0] = true;
169 | }
170 | if (isValidValue(fields()[1], other.favorite_number)) {
171 | this.favorite_number = data().deepCopy(fields()[1].schema(), other.favorite_number);
172 | fieldSetFlags()[1] = true;
173 | }
174 | if (isValidValue(fields()[2], other.favorite_color)) {
175 | this.favorite_color = data().deepCopy(fields()[2].schema(), other.favorite_color);
176 | fieldSetFlags()[2] = true;
177 | }
178 | }
179 |
180 | /**
181 | * Gets the value of the 'name' field.
182 | * @return The value.
183 | */
184 | public java.lang.CharSequence getName() {
185 | return name;
186 | }
187 |
188 | /**
189 | * Sets the value of the 'name' field.
190 | * @param value The value of 'name'.
191 | * @return This builder.
192 | */
193 | public life.jugnu.learnkafka.ch03.User.Builder setName(java.lang.CharSequence value) {
194 | validate(fields()[0], value);
195 | this.name = value;
196 | fieldSetFlags()[0] = true;
197 | return this;
198 | }
199 |
200 | /**
201 | * Checks whether the 'name' field has been set.
202 | * @return True if the 'name' field has been set, false otherwise.
203 | */
204 | public boolean hasName() {
205 | return fieldSetFlags()[0];
206 | }
207 |
208 |
209 | /**
210 | * Clears the value of the 'name' field.
211 | * @return This builder.
212 | */
213 | public life.jugnu.learnkafka.ch03.User.Builder clearName() {
214 | name = null;
215 | fieldSetFlags()[0] = false;
216 | return this;
217 | }
218 |
219 | /**
220 | * Gets the value of the 'favorite_number' field.
221 | * @return The value.
222 | */
223 | public java.lang.Integer getFavoriteNumber() {
224 | return favorite_number;
225 | }
226 |
227 | /**
228 | * Sets the value of the 'favorite_number' field.
229 | * @param value The value of 'favorite_number'.
230 | * @return This builder.
231 | */
232 | public life.jugnu.learnkafka.ch03.User.Builder setFavoriteNumber(java.lang.Integer value) {
233 | validate(fields()[1], value);
234 | this.favorite_number = value;
235 | fieldSetFlags()[1] = true;
236 | return this;
237 | }
238 |
239 | /**
240 | * Checks whether the 'favorite_number' field has been set.
241 | * @return True if the 'favorite_number' field has been set, false otherwise.
242 | */
243 | public boolean hasFavoriteNumber() {
244 | return fieldSetFlags()[1];
245 | }
246 |
247 |
248 | /**
249 | * Clears the value of the 'favorite_number' field.
250 | * @return This builder.
251 | */
252 | public life.jugnu.learnkafka.ch03.User.Builder clearFavoriteNumber() {
253 | favorite_number = null;
254 | fieldSetFlags()[1] = false;
255 | return this;
256 | }
257 |
258 | /**
259 | * Gets the value of the 'favorite_color' field.
260 | * @return The value.
261 | */
262 | public java.lang.CharSequence getFavoriteColor() {
263 | return favorite_color;
264 | }
265 |
266 | /**
267 | * Sets the value of the 'favorite_color' field.
268 | * @param value The value of 'favorite_color'.
269 | * @return This builder.
270 | */
271 | public life.jugnu.learnkafka.ch03.User.Builder setFavoriteColor(java.lang.CharSequence value) {
272 | validate(fields()[2], value);
273 | this.favorite_color = value;
274 | fieldSetFlags()[2] = true;
275 | return this;
276 | }
277 |
278 | /**
279 | * Checks whether the 'favorite_color' field has been set.
280 | * @return True if the 'favorite_color' field has been set, false otherwise.
281 | */
282 | public boolean hasFavoriteColor() {
283 | return fieldSetFlags()[2];
284 | }
285 |
286 |
287 | /**
288 | * Clears the value of the 'favorite_color' field.
289 | * @return This builder.
290 | */
291 | public life.jugnu.learnkafka.ch03.User.Builder clearFavoriteColor() {
292 | favorite_color = null;
293 | fieldSetFlags()[2] = false;
294 | return this;
295 | }
296 |
297 | @Override
298 | public User build() {
299 | try {
300 | User record = new User();
301 | record.name = fieldSetFlags()[0] ? this.name : (java.lang.CharSequence) defaultValue(fields()[0]);
302 | record.favorite_number = fieldSetFlags()[1] ? this.favorite_number : (java.lang.Integer) defaultValue(fields()[1]);
303 | record.favorite_color = fieldSetFlags()[2] ? this.favorite_color : (java.lang.CharSequence) defaultValue(fields()[2]);
304 | return record;
305 | } catch (Exception e) {
306 | throw new org.apache.avro.AvroRuntimeException(e);
307 | }
308 | }
309 | }
310 |
311 | private static final org.apache.avro.io.DatumWriter
312 | WRITER$ = new org.apache.avro.specific.SpecificDatumWriter(SCHEMA$);
313 |
314 | @Override public void writeExternal(java.io.ObjectOutput out)
315 | throws java.io.IOException {
316 | WRITER$.write(this, org.apache.avro.specific.SpecificData.getEncoder(out));
317 | }
318 |
319 | private static final org.apache.avro.io.DatumReader
320 | READER$ = new org.apache.avro.specific.SpecificDatumReader(SCHEMA$);
321 |
322 | @Override public void readExternal(java.io.ObjectInput in)
323 | throws java.io.IOException {
324 | READER$.read(this, org.apache.avro.specific.SpecificData.getDecoder(in));
325 | }
326 |
327 | }
328 |
--------------------------------------------------------------------------------
/src/main/java/life/jugnu/learnkafka/ch03/UserGenerator.java:
--------------------------------------------------------------------------------
1 | package life.jugnu.learnkafka.ch03;
2 |
3 | public class UserGenerator {
4 | public static void main(String[] args) {
5 |
6 | }
7 |
8 | public static User getNext(){
9 | User u = new User();
10 | u.setName("hello");
11 | u.setFavoriteColor("green");
12 | u.setFavoriteNumber(1);
13 | return u;
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/src/main/java/life/jugnu/learnkafka/ch04/AvroConsumer.java:
--------------------------------------------------------------------------------
1 | package life.jugnu.learnkafka.ch04;
2 |
3 | import java.util.Properties;
4 | import java.util.Collections;
5 |
6 | import org.apache.kafka.clients.consumer.KafkaConsumer;
7 | import org.apache.kafka.clients.consumer.ConsumerRecords;
8 | import org.apache.kafka.clients.consumer.ConsumerRecord;
9 |
10 | import life.jugnu.learnkafka.ch03.User;
11 |
12 | public class AvroConsumer {
13 | public static void main(String[] args) {
14 | Properties p = new Properties();
15 | p.put("bootstrap.servers", "localhost:9092");
16 | p.put("group.id", "AvroConsumer");
17 | p.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
18 | p.put("value.deserializer", "io.confluent.kafka.serializers.KafkaAvroDeserializer");
19 | p.put("schema.registry.url", "localhost:8081");
20 |
21 | KafkaConsumer c = new KafkaConsumer(p);
22 | c.subscribe(Collections.singletonList("avrotopic"));
23 |
24 | try {
25 | while (true) {
26 | ConsumerRecords rec = c.poll(100);
27 | System.out.println("We got record count " + rec.count());
28 | for (ConsumerRecord r : rec) {
29 | System.out.println(r.value().getName());
30 | }
31 | }
32 | } catch (Exception e) {
33 | e.printStackTrace();
34 | } finally {
35 | c.close();
36 | }
37 |
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/src/main/java/life/jugnu/learnkafka/ch04/FirstConsumer.java:
--------------------------------------------------------------------------------
1 | package life.jugnu.learnkafka.ch04;
2 |
3 | import org.apache.kafka.clients.consumer.ConsumerRecord;
4 | import org.apache.kafka.clients.consumer.ConsumerRecords;
5 | import org.apache.kafka.clients.consumer.KafkaConsumer;
6 |
7 | import java.util.Collections;
8 | import java.util.Properties;
9 |
10 | public class FirstConsumer {
11 | public static void main(String[] args) {
12 | Properties p = new Properties();
13 | p.put("bootstrap.servers", "localhost:9092");
14 | p.put("group.id", "FirstConsumer");
15 | p.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
16 | p.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
17 |
18 | KafkaConsumer c = new KafkaConsumer(p);
19 | c.subscribe(Collections.singletonList("firsttopic"));
20 | try {
21 | while (true) {
22 | ConsumerRecords rec = c.poll(100);
23 | System.out.println("We got record count " + rec.count());
24 | for (ConsumerRecord r : rec) {
25 | System.out.println(r.value());
26 | }
27 | }
28 | } catch (Exception e) {
29 | e.printStackTrace();
30 | } finally {
31 | c.close();
32 | }
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/src/main/scala/life/jugnu/learnkafka/ch04/MyFirstScalaConsumer.scala:
--------------------------------------------------------------------------------
1 | package life.jugnu.learnkafka.ch04
2 |
3 | import java.util.{Collections, Properties}
4 |
5 | import org.apache.kafka.clients.consumer.{ConsumerRecords, KafkaConsumer}
6 |
7 | class MyFirstScalaConsumer {
8 | def main(args: Array[String]) {
9 | val p: Properties = new Properties
10 | p.put("bootstrap.servers", "")
11 | p.put("group.id", "")
12 | p.put("", "")
13 |
14 | val c: KafkaConsumer[String, String] = new KafkaConsumer[String, String](p)
15 | c.subscribe(Collections.singletonList("topic"))
16 |
17 |
18 | try {
19 | val rec: ConsumerRecords[String, String] = c.poll(100)
20 | import scala.collection.JavaConversions._
21 | for (r <- rec) {
22 | r.value
23 | }
24 | }
25 | catch {
26 | case e: Exception => {
27 | e.printStackTrace
28 | }
29 | } finally {
30 | c.close
31 | }
32 | }
33 | }
--------------------------------------------------------------------------------