├── .gitignore
├── LICENSE
├── README.md
├── demokafka.0.10.1.0
├── pom.xml
└── src
│ ├── main
│ ├── java
│ │ └── com
│ │ │ └── jasongj
│ │ │ └── kafka
│ │ │ ├── connect
│ │ │ ├── ConsoleSinkConnect.java
│ │ │ ├── ConsoleSinkTask.java
│ │ │ ├── ConsoleSourceConnect.java
│ │ │ └── ConsoleSourceTask.java
│ │ │ ├── consumer
│ │ │ ├── DemoConsumerAssign.java
│ │ │ ├── DemoConsumerAutoCommit.java
│ │ │ ├── DemoConsumerCommitCallback.java
│ │ │ ├── DemoConsumerCommitPartition.java
│ │ │ ├── DemoConsumerFlowControl.java
│ │ │ ├── DemoConsumerInterceptor.java
│ │ │ ├── DemoConsumerManualCommit.java
│ │ │ └── DemoConsumerRebalance.java
│ │ │ ├── producer
│ │ │ ├── EvenProducerInterceptor.java
│ │ │ ├── HashPartitioner.java
│ │ │ ├── ProducerDemo.java
│ │ │ └── ProducerDemoCallback.java
│ │ │ └── stream
│ │ │ ├── PurchaseAnalysis.java
│ │ │ ├── WordCountDSL.java
│ │ │ ├── WordCountProcessor.java
│ │ │ ├── WordCountTopology.java
│ │ │ ├── model
│ │ │ ├── Item.java
│ │ │ ├── Order.java
│ │ │ └── User.java
│ │ │ ├── producer
│ │ │ ├── ItemProducer.java
│ │ │ ├── OrderProducer.java
│ │ │ └── UserProducer.java
│ │ │ ├── serdes
│ │ │ ├── GenericDeserializer.java
│ │ │ ├── GenericSerializer.java
│ │ │ └── SerdesFactory.java
│ │ │ └── timeextractor
│ │ │ └── OrderTimestampExtractor.java
│ └── resources
│ │ ├── items.csv
│ │ ├── log4j.properties
│ │ ├── orders.csv
│ │ └── users.csv
│ └── test
│ └── java
│ └── com
│ └── jasongj
│ └── kafka
│ └── AppTest.java
├── demokafka.0.8.2.2
├── pom.xml
└── src
│ ├── main
│ ├── java
│ │ └── com
│ │ │ └── jasongj
│ │ │ └── kafka
│ │ │ ├── DemoHighLevelConsumer.java
│ │ │ ├── DemoLowLevelConsumer.java
│ │ │ ├── HashPartitioner.java
│ │ │ ├── ProducerDemo.java
│ │ │ └── RoundRobinPartitioner.java
│ └── resources
│ │ └── log4j.properties
│ └── test
│ └── java
│ └── com
│ └── jasongj
│ └── kafka
│ └── AppTest.java
└── pom.xml
/.gitignore:
--------------------------------------------------------------------------------
1 | target
2 | bin
3 | .DS_Store
4 | .classpath
5 | .project
6 | .settings
7 | .idea
8 | *.iml
9 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "{}"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright {yyyy} {name of copyright owner}
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Kafka使用示例
2 |
3 | ------
4 |
5 | #Kafka 0.8.2.2示例
6 | - [Producer示例](https://github.com/habren/KafkaExample/blob/master/demokafka.0.8.2.2/src/main/java/com/jasongj/kafka/ProducerDemo.java)
7 | - [HashPartitioner示例](https://github.com/habren/KafkaExample/blob/master/demokafka.0.8.2.2/src/main/java/com/jasongj/kafka/HashPartitioner.java) 实现HashPartitioner从而保证key相同的消息被发送到同一个Partition
8 | - [RoundRobinPartitioner示例](https://github.com/habren/KafkaExample/blob/master/demokafka.0.8.2.2/src/main/java/com/jasongj/kafka/HashPartitioner.java) 提供RoundRobin消息路由算法,实现Load balance
9 | - [High Level Consumer示例](https://github.com/habren/KafkaExample/blob/master/demokafka.0.8.2.2/src/main/java/com/jasongj/kafka/DemoHighLevelConsumer.java) 通过High level API中的consumer group实现group内的消息单播和group间的消息广播
10 | - [Low Level Consumer示例](https://github.com/habren/KafkaExample/blob/master/demokafka.0.8.2.2/src/main/java/com/jasongj/kafka/DemoLowLevelConsumer.java) 使用Low level API可实现精确的消息消费控制
11 |
12 | #Kafka 0.10.1.0示例
13 | - [Producer示例](https://github.com/habren/KafkaExample/tree/master/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/producer) Producer支持send callback
14 | - [Partitioner示例](https://github.com/habren/KafkaExample/blob/master/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/producer/HashPartitioner.java) Partitioner接口与旧版本相比有所区别,可以实现更多语义的消息路由/消息分发
15 | - [Consumer示例](https://github.com/habren/KafkaExample/tree/master/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/consumer) Kafka 0.10.*版本中新的Consumer使用同一套API同时实现0.8.*及以前版本中的High Level API及Low Level API
16 | - [Stream Low Level Processor API示例](https://github.com/habren/KafkaExample/blob/master/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/stream/WordCountProcessor.java)
17 | - [Stream Topology示例](https://github.com/habren/KafkaExample/blob/master/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/stream/WordCountTopology.java) 使用Kafka Stream的Low-level Processor API实现word count
18 | - [Stream DSL示例](https://github.com/habren/KafkaExample/blob/master/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/stream/WordCountDSL.java) 通过Kafka Stream的DSL API实现word count功能
19 | - [Purchase Analysis](https://github.com/habren/KafkaExample/blob/master/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/stream/PurchaseAnalysis.java) 如何使用KStream与KTable Join,如何创建自己的Serializer/Deserializer和Serde,以及如何使用Kafka Stream的Transform和Kafka Stream的Window
20 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/pom.xml:
--------------------------------------------------------------------------------
1 |
3 | 4.0.0
4 |
5 |
6 | demokafka.0.10.1.0
7 | 0.10.1.0
8 | jar
9 |
10 | demokafka.0.10.1.0
11 | http://www.jasongj.com
12 |
13 |
14 | com.jasongj.kafka
15 | demokafka
16 | 1.0.0
17 | ../pom.xml
18 |
19 |
20 |
21 |
22 | org.apache.kafka
23 | kafka_2.11
24 | 0.10.1.0
25 |
26 |
27 | com.101tec
28 | zkclient
29 |
30 |
31 |
32 |
33 | org.apache.kafka
34 | kafka-clients
35 | 0.10.1.0
36 |
37 |
38 | org.apache.kafka
39 | kafka-streams
40 | 0.10.1.0
41 |
42 |
43 | com.101tec
44 | zkclient
45 |
46 |
47 |
48 |
49 | com.101tec
50 | zkclient
51 | 0.10
52 |
53 |
54 | commons-io
55 | commons-io
56 | 2.5
57 |
58 |
59 |
60 |
61 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/connect/ConsoleSinkConnect.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.connect;
2 |
3 | import java.util.ArrayList;
4 | import java.util.HashMap;
5 | import java.util.List;
6 | import java.util.Map;
7 |
8 | import org.apache.kafka.common.config.ConfigDef;
9 | import org.apache.kafka.common.utils.AppInfoParser;
10 | import org.apache.kafka.connect.connector.Task;
11 | import org.apache.kafka.connect.sink.SinkConnector;
12 |
13 | public class ConsoleSinkConnect extends SinkConnector {
14 |
15 | @Override
16 | public ConfigDef config() {
17 | return null;
18 | }
19 |
20 | @Override
21 | public void start(Map config) {
22 | }
23 |
24 | @Override
25 | public void stop() {
26 |
27 | }
28 |
29 | @Override
30 | public Class extends Task> taskClass() {
31 | return ConsoleSinkTask.class;
32 | }
33 |
34 | @Override
35 | public List> taskConfigs(int maxTasks) {
36 | List> tasks = new ArrayList>();
37 | Map task = new HashMap();
38 | tasks.add(task);
39 | return tasks;
40 | }
41 |
42 | @Override
43 | public String version() {
44 | return AppInfoParser.getVersion();
45 | }
46 |
47 |
48 | }
49 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/connect/ConsoleSinkTask.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.connect;
2 |
3 | import java.io.PrintStream;
4 | import java.util.Collection;
5 | import java.util.Map;
6 |
7 | import org.apache.kafka.clients.consumer.OffsetAndMetadata;
8 | import org.apache.kafka.common.TopicPartition;
9 | import org.apache.kafka.common.utils.AppInfoParser;
10 | import org.apache.kafka.connect.sink.SinkRecord;
11 | import org.apache.kafka.connect.sink.SinkTask;
12 |
13 | public class ConsoleSinkTask extends SinkTask {
14 |
15 | private PrintStream printStream;
16 |
17 | @Override
18 | public String version() {
19 | return AppInfoParser.getVersion();
20 | }
21 |
22 | @Override
23 | public void flush(Map offsets) {
24 | if(printStream != null){
25 | printStream.flush();
26 | }
27 | }
28 |
29 | @Override
30 | public void put(Collection records) {
31 | records.forEach(printStream::println);
32 | }
33 |
34 | @Override
35 | public void start(Map config) {
36 | this.printStream = System.out;
37 | }
38 |
39 | @Override
40 | public void stop() {
41 | if(printStream != null){
42 | printStream.close();
43 | }
44 |
45 | }
46 |
47 | }
48 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/connect/ConsoleSourceConnect.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.connect;
2 |
3 | import java.util.ArrayList;
4 | import java.util.HashMap;
5 | import java.util.List;
6 | import java.util.Map;
7 |
8 | import org.apache.commons.lang3.StringUtils;
9 | import org.apache.kafka.common.config.ConfigDef;
10 | import org.apache.kafka.common.config.ConfigDef.Importance;
11 | import org.apache.kafka.common.config.ConfigDef.Type;
12 | import org.apache.kafka.common.utils.AppInfoParser;
13 | import org.apache.kafka.connect.connector.Task;
14 | import org.apache.kafka.connect.errors.ConnectException;
15 | import org.apache.kafka.connect.source.SourceConnector;
16 |
17 | public class ConsoleSourceConnect extends SourceConnector {
18 |
19 | private static final ConfigDef CONFIG_DEF = new ConfigDef().define("topic", Type.STRING, Importance.HIGH,
20 | "Target topic name");
21 | private static final String TOPIC_CONF_NAME = "topic";
22 |
23 | private String topic;
24 |
25 | @Override
26 | public ConfigDef config() {
27 | return CONFIG_DEF;
28 | }
29 |
30 | @Override
31 | public void start(Map props) {
32 | topic = props.get(TOPIC_CONF_NAME);
33 | if(StringUtils.isBlank(topic)) {
34 | throw new ConnectException("Topic must be configured");
35 | }
36 | }
37 |
38 | @Override
39 | public void stop() {
40 |
41 | }
42 |
43 | @Override
44 | public Class extends Task> taskClass() {
45 | return ConsoleSourceTask.class;
46 | }
47 |
48 | @Override
49 | public List> taskConfigs(int maxTasks) {
50 | List> tasks = new ArrayList>();
51 | Map task = new HashMap();
52 | task.put(TOPIC_CONF_NAME, topic);
53 | tasks.add(task);
54 | return tasks;
55 | }
56 |
57 | @Override
58 | public String version() {
59 | return AppInfoParser.getVersion();
60 | }
61 |
62 | }
63 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/connect/ConsoleSourceTask.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.connect;
2 |
3 | import java.io.BufferedReader;
4 | import java.io.IOException;
5 | import java.io.InputStream;
6 | import java.io.InputStreamReader;
7 | import java.util.ArrayList;
8 | import java.util.Collections;
9 | import java.util.List;
10 | import java.util.Map;
11 |
12 | import org.apache.kafka.connect.data.Schema;
13 | import org.apache.kafka.connect.source.SourceRecord;
14 | import org.apache.kafka.connect.source.SourceTask;
15 |
16 | public class ConsoleSourceTask extends SourceTask {
17 |
18 | private InputStream inputStream;
19 | private String topic;
20 |
21 | @Override
22 | public String version() {
23 | return null;
24 | }
25 |
26 | @Override
27 | public List poll() throws InterruptedException {
28 | try{
29 | BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
30 | List records = new ArrayList();
31 | while(reader.ready()){
32 | String line = reader.readLine();
33 | SourceRecord record = new SourceRecord(Collections.singletonMap("SYSTEM_IN", null), Collections.singletonMap("OFFSET", 0), topic, Schema.STRING_SCHEMA, line);
34 | }
35 | return records;
36 | } catch(IOException ex) {
37 | return null;
38 | }
39 | }
40 |
41 | @Override
42 | public void start(Map conf) {
43 | inputStream = System.in;
44 | topic = conf.get("topic");
45 | }
46 |
47 | @Override
48 | public void stop() {
49 |
50 | }
51 |
52 |
53 | }
54 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/consumer/DemoConsumerAssign.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.consumer;
2 |
3 | import java.util.Arrays;
4 | import java.util.Properties;
5 |
6 | import org.apache.kafka.clients.consumer.ConsumerRecords;
7 | import org.apache.kafka.clients.consumer.KafkaConsumer;
8 | import org.apache.kafka.common.TopicPartition;
9 | import org.apache.kafka.common.serialization.StringDeserializer;
10 |
11 | public class DemoConsumerAssign {
12 |
13 | public static void main(String[] args) {
14 | args = new String[] { "kafka0:9092", "topic1", "group1", "consumer3" };
15 | if (args == null || args.length != 4) {
16 | System.err.println(
17 | "Usage:\n\tjava -jar kafka_consumer.jar ${bootstrap_server} ${topic_name} ${group_name} ${client_id}");
18 | System.exit(1);
19 | }
20 | String bootstrap = args[0];
21 | String topic = args[1];
22 | String groupid = args[2];
23 | String clientid = args[3];
24 |
25 | Properties props = new Properties();
26 | props.put("bootstrap.servers", bootstrap);
27 | props.put("group.id", groupid);
28 | props.put("client.id", clientid);
29 | props.put("enable.auto.commit", "true");
30 | props.put("auto.commit.interval.ms", "1000");
31 | props.put("key.deserializer", StringDeserializer.class.getName());
32 | props.put("value.deserializer", StringDeserializer.class.getName());
33 | props.put("auto.offset.reset", "earliest");
34 | KafkaConsumer consumer = new KafkaConsumer<>(props);
35 | consumer.assign(Arrays.asList(new TopicPartition(topic, 0), new TopicPartition(topic, 1)));
36 | while (true) {
37 | ConsumerRecords records = consumer.poll(100);
38 | records.forEach(record -> {
39 | System.out.printf("client : %s , topic: %s , partition: %d , offset = %d, key = %s, value = %s%n", clientid, record.topic(),
40 | record.partition(), record.offset(), record.key(), record.value());
41 | });
42 | }
43 | }
44 |
45 | }
46 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/consumer/DemoConsumerAutoCommit.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.consumer;
2 |
3 | import java.util.Arrays;
4 | import java.util.Properties;
5 |
6 | import org.apache.kafka.clients.consumer.ConsumerRecords;
7 | import org.apache.kafka.clients.consumer.KafkaConsumer;
8 | import org.apache.kafka.common.serialization.IntegerDeserializer;
9 | import org.apache.kafka.common.serialization.LongDeserializer;
10 | import org.apache.kafka.common.serialization.StringDeserializer;
11 |
12 | public class DemoConsumerAutoCommit {
13 |
14 | public static void main(String[] args) {
15 | args = new String[] { "kafka0:19092", "words", "group1", "consumer2" };
16 | if (args == null || args.length != 4) {
17 | System.err.println(
18 | "Usage:\n\tjava -jar kafka_consumer.jar ${bootstrap_server} ${topic_name} ${group_name} ${client_id}");
19 | System.exit(1);
20 | }
21 | String bootstrap = args[0];
22 | String topic = args[1];
23 | String groupid = args[2];
24 | String clientid = args[3];
25 |
26 | Properties props = new Properties();
27 | props.put("bootstrap.servers", bootstrap);
28 | props.put("group.id", groupid);
29 | props.put("client.id", clientid);
30 | props.put("enable.auto.commit", "true");
31 | props.put("auto.commit.interval.ms", "1000");
32 | props.put("auto.offset.reset", "earliest");
33 | props.put("key.deserializer", StringDeserializer.class.getName());
34 | props.put("value.deserializer", StringDeserializer.class.getName());
35 | KafkaConsumer consumer = new KafkaConsumer<>(props);
36 | consumer.subscribe(Arrays.asList(topic));
37 | while (true) {
38 | ConsumerRecords records = consumer.poll(100);
39 | records.forEach(record -> {
40 | System.out.printf("client : %s , topic: %s , partition: %d , offset = %d, key = %s, value = %s%n", clientid, record.topic(),
41 | record.partition(), record.offset(), record.key(), record.value());
42 | });
43 | }
44 | }
45 |
46 | }
47 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/consumer/DemoConsumerCommitCallback.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.consumer;
2 |
3 | import java.util.Arrays;
4 | import java.util.Map;
5 | import java.util.Properties;
6 | import java.util.concurrent.atomic.AtomicLong;
7 |
8 | import org.apache.kafka.clients.consumer.ConsumerRecords;
9 | import org.apache.kafka.clients.consumer.KafkaConsumer;
10 | import org.apache.kafka.clients.consumer.OffsetAndMetadata;
11 | import org.apache.kafka.common.TopicPartition;
12 | import org.apache.kafka.common.serialization.StringDeserializer;
13 |
14 | public class DemoConsumerCommitCallback {
15 |
16 | public static void main(String[] args) throws Exception {
17 | args = new String[] { "kafka0:9092", "topic1", "group11", "consumer2" };
18 | if (args == null || args.length != 4) {
19 | System.err.println(
20 | "Usage:\n\tjava -jar kafka_consumer.jar ${bootstrap_server} ${topic_name} ${group_name} ${client_id}");
21 | System.exit(1);
22 | }
23 | String bootstrap = args[0];
24 | String topic = args[1];
25 | String groupid = args[2];
26 | String clientid = args[3];
27 |
28 | Properties props = new Properties();
29 | props.put("bootstrap.servers", bootstrap);
30 | props.put("group.id", groupid);
31 | props.put("enable.auto.commit", "false");
32 | props.put("key.deserializer", StringDeserializer.class.getName());
33 | props.put("value.deserializer", StringDeserializer.class.getName());
34 | props.put("max.poll.interval.ms", "300000");
35 | props.put("max.poll.records", "500");
36 | props.put("auto.offset.reset", "earliest");
37 | KafkaConsumer consumer = new KafkaConsumer<>(props);
38 | consumer.subscribe(Arrays.asList(topic));
39 | AtomicLong atomicLong = new AtomicLong();
40 | while (true) {
41 | ConsumerRecords records = consumer.poll(100);
42 | records.forEach(record -> {
43 | System.out.printf("client : %s , topic: %s , partition: %d , offset = %d, key = %s, value = %s%n",
44 | clientid, record.topic(), record.partition(), record.offset(), record.key(), record.value());
45 | if (atomicLong.get() % 10 == 0)
46 | consumer.commitAsync((Map offsets, Exception exception) -> {
47 | offsets.forEach((TopicPartition partition, OffsetAndMetadata offset) ->
48 | System.out.printf("Commit %s-%d-%d %n", partition.topic(), partition.partition(), offset.offset())
49 | );
50 | if(null != null ) {
51 | exception.printStackTrace();
52 | }
53 | });
54 | });
55 | }
56 | }
57 |
58 | }
59 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/consumer/DemoConsumerCommitPartition.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.consumer;
2 |
3 | import java.util.Arrays;
4 | import java.util.Collections;
5 | import java.util.List;
6 | import java.util.Properties;
7 | import java.util.concurrent.atomic.AtomicLong;
8 |
9 | import org.apache.kafka.clients.consumer.ConsumerRecord;
10 | import org.apache.kafka.clients.consumer.ConsumerRecords;
11 | import org.apache.kafka.clients.consumer.KafkaConsumer;
12 | import org.apache.kafka.clients.consumer.OffsetAndMetadata;
13 | import org.apache.kafka.common.serialization.StringDeserializer;
14 |
15 | public class DemoConsumerCommitPartition {
16 |
17 | public static void main(String[] args) throws Exception {
18 | args = new String[] { "kafka0:9092", "topic1", "group2", "consumer2" };
19 | if (args == null || args.length != 4) {
20 | System.err.println(
21 | "Usage:\n\tjava -jar kafka_consumer.jar ${bootstrap_server} ${topic_name} ${group_name} ${client_id}");
22 | System.exit(1);
23 | }
24 | String bootstrap = args[0];
25 | String topic = args[1];
26 | String groupid = args[2];
27 | String clientid = args[3];
28 |
29 | Properties props = new Properties();
30 | props.put("bootstrap.servers", bootstrap);
31 | props.put("group.id", groupid);
32 | props.put("enable.auto.commit", "false");
33 | props.put("key.deserializer", StringDeserializer.class.getName());
34 | props.put("value.deserializer", StringDeserializer.class.getName());
35 | props.put("max.poll.interval.ms", "300000");
36 | props.put("max.poll.records", "500");
37 | props.put("auto.offset.reset", "earliest");
38 | KafkaConsumer consumer = new KafkaConsumer<>(props);
39 | consumer.subscribe(Arrays.asList(topic));
40 | while (true) {
41 | ConsumerRecords records = consumer.poll(100);
42 | records.partitions().forEach(topicPartition -> {
43 | List> partitionRecords = records.records(topicPartition);
44 | partitionRecords.forEach(record -> {
45 | System.out.printf("client : %s , topic: %s , partition: %d , offset = %d, key = %s, value = %s%n", clientid, record.topic(),
46 | record.partition(), record.offset(), record.key(), record.value());
47 | });
48 | long lastOffset = partitionRecords.get(partitionRecords.size() - 1).offset();
49 | consumer.commitSync(Collections.singletonMap(topicPartition, new OffsetAndMetadata(lastOffset + 1)));
50 | });
51 | }
52 | }
53 |
54 | }
55 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/consumer/DemoConsumerFlowControl.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.consumer;
2 |
3 | import java.util.Arrays;
4 | import java.util.Collection;
5 | import java.util.Properties;
6 |
7 | import org.apache.kafka.clients.consumer.ConsumerRebalanceListener;
8 | import org.apache.kafka.clients.consumer.ConsumerRecords;
9 | import org.apache.kafka.clients.consumer.KafkaConsumer;
10 | import org.apache.kafka.common.TopicPartition;
11 | import org.apache.kafka.common.serialization.StringDeserializer;
12 |
13 | public class DemoConsumerFlowControl {
14 |
15 | public static void main(String[] args) {
16 | args = new String[] { "kafka0:9092", "topic1", "group239", "consumer2" };
17 | if (args == null || args.length != 4) {
18 | System.err.println(
19 | "Usage:\n\tjava -jar kafka_consumer.jar ${bootstrap_server} ${topic_name} ${group_name} ${client_id}");
20 | System.exit(1);
21 | }
22 | String bootstrap = args[0];
23 | String topic = args[1];
24 | String groupid = args[2];
25 | String clientid = args[3];
26 |
27 | Properties props = new Properties();
28 | props.put("bootstrap.servers", bootstrap);
29 | props.put("group.id", groupid);
30 | props.put("client.id", clientid);
31 | props.put("enable.auto.commit", "true");
32 | props.put("auto.commit.interval.ms", "1000");
33 | props.put("key.deserializer", StringDeserializer.class.getName());
34 | props.put("value.deserializer", StringDeserializer.class.getName());
35 | props.put("auto.offset.reset", "earliest");
36 | KafkaConsumer consumer = new KafkaConsumer<>(props);
37 | consumer.subscribe(Arrays.asList(topic), new ConsumerRebalanceListener(){
38 |
39 | @Override
40 | public void onPartitionsRevoked(Collection partitions) {
41 | partitions.forEach(topicPartition -> {
42 | System.out.printf("Revoked partition for client %s : %s-%s %n", clientid, topicPartition.topic(), topicPartition.partition());
43 | });
44 | }
45 |
46 | @Override
47 | public void onPartitionsAssigned(Collection partitions) {
48 | partitions.forEach(topicPartition -> {
49 | System.out.printf("Assigned partition for client %s : %s-%s %n", clientid, topicPartition.topic(), topicPartition.partition());
50 | });
51 | }});
52 | while (true) {
53 | ConsumerRecords records = consumer.poll(100000000);
54 | consumer.pause(Arrays.asList(new TopicPartition(topic, 0)));
55 | consumer.pause(Arrays.asList(new TopicPartition(topic, 1)));
56 | records.forEach(record -> {
57 | System.out.printf("client : %s , topic: %s , partition: %d , offset = %d, key = %s, value = %s%n", clientid, record.topic(),
58 | record.partition(), record.offset(), record.key(), record.value());
59 | });
60 | }
61 | }
62 |
63 | }
64 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/consumer/DemoConsumerInterceptor.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.consumer;
2 |
3 | import org.apache.kafka.clients.consumer.ConsumerInterceptor;
4 | import org.apache.kafka.clients.consumer.ConsumerRecords;
5 |
6 | import java.util.Map;
7 |
8 | /**
9 | * Created by juguo on 1/2/17.
10 | */
11 | public class DemoConsumerInterceptor implements ConsumerInterceptor {
12 | @Override
13 | public ConsumerRecords onConsume(ConsumerRecords records) {
14 | return null;
15 | }
16 |
17 | @Override
18 | public void close() {
19 |
20 | }
21 |
22 | @Override
23 | public void onCommit(Map offsets) {
24 |
25 | }
26 |
27 | @Override
28 | public void configure(Map configs) {
29 |
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/consumer/DemoConsumerManualCommit.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.consumer;
2 |
3 | import java.util.Arrays;
4 | import java.util.Properties;
5 | import java.util.concurrent.atomic.AtomicLong;
6 |
7 | import org.apache.kafka.clients.consumer.ConsumerRecords;
8 | import org.apache.kafka.clients.consumer.KafkaConsumer;
9 | import org.apache.kafka.common.serialization.DoubleDeserializer;
10 | import org.apache.kafka.common.serialization.StringDeserializer;
11 |
12 | public class DemoConsumerManualCommit {
13 |
14 | public static void main(String[] args) throws Exception {
15 | args = new String[] { "kafka0:19092", "gender-amount", "group4", "consumer2" };
16 | if (args == null || args.length != 4) {
17 | System.err.println(
18 | "Usage:\n\tjava -jar kafka_consumer.jar ${bootstrap_server} ${topic_name} ${group_name} ${client_id}");
19 | System.exit(1);
20 | }
21 | String bootstrap = args[0];
22 | String topic = args[1];
23 | String groupid = args[2];
24 | String clientid = args[3];
25 |
26 | Properties props = new Properties();
27 | props.put("bootstrap.servers", bootstrap);
28 | props.put("group.id", groupid);
29 | props.put("enable.auto.commit", "false");
30 | props.put("key.deserializer", StringDeserializer.class.getName());
31 | props.put("value.deserializer", DoubleDeserializer.class.getName());
32 | props.put("max.poll.interval.ms", "300000");
33 | props.put("max.poll.records", "500");
34 | props.put("auto.offset.reset", "earliest");
35 | KafkaConsumer consumer = new KafkaConsumer<>(props);
36 | consumer.subscribe(Arrays.asList(topic));
37 | AtomicLong atomicLong = new AtomicLong();
38 | while (true) {
39 | ConsumerRecords records = consumer.poll(100);
40 | records.forEach(record -> {
41 | System.out.printf("client : %s , topic: %s , partition: %d , offset = %d, key = %s, value = %s%n",
42 | clientid, record.topic(), record.partition(), record.offset(), record.key(), record.value());
43 | if (atomicLong.get() % 10 == 0) {
44 | // consumer.commitSync();
45 | }
46 | });
47 | }
48 | }
49 |
50 | }
51 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/consumer/DemoConsumerRebalance.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.consumer;
2 |
3 | import java.util.Arrays;
4 | import java.util.Collection;
5 | import java.util.Properties;
6 |
7 | import org.apache.kafka.clients.consumer.ConsumerRebalanceListener;
8 | import org.apache.kafka.clients.consumer.ConsumerRecords;
9 | import org.apache.kafka.clients.consumer.KafkaConsumer;
10 | import org.apache.kafka.common.TopicPartition;
11 | import org.apache.kafka.common.serialization.StringDeserializer;
12 |
13 | public class DemoConsumerRebalance {
14 |
15 | public static void main(String[] args) {
16 | args = new String[] { "kafka0:9092", "topic1", "group1", "consumer1" };
17 | if (args == null || args.length != 4) {
18 | System.err.println(
19 | "Usage:\n\tjava -jar kafka_consumer.jar ${bootstrap_server} ${topic_name} ${group_name} ${client_id}");
20 | System.exit(1);
21 | }
22 | String bootstrap = args[0];
23 | String topic = args[1];
24 | String groupid = args[2];
25 | String clientid = args[3];
26 |
27 | Properties props = new Properties();
28 | props.put("bootstrap.servers", bootstrap);
29 | props.put("group.id", groupid);
30 | props.put("client.id", clientid);
31 | props.put("enable.auto.commit", "true");
32 | props.put("auto.commit.interval.ms", "1000");
33 | props.put("key.deserializer", StringDeserializer.class.getName());
34 | props.put("value.deserializer", StringDeserializer.class.getName());
35 | KafkaConsumer consumer = new KafkaConsumer<>(props);
36 | consumer.subscribe(Arrays.asList(topic), new ConsumerRebalanceListener(){
37 |
38 | @Override
39 | public void onPartitionsRevoked(Collection partitions) {
40 | partitions.forEach(topicPartition -> {
41 | System.out.printf("Revoked partition for client %s : %s-%s %n", clientid, topicPartition.topic(), topicPartition.partition());
42 | });
43 | }
44 |
45 | @Override
46 | public void onPartitionsAssigned(Collection partitions) {
47 | partitions.forEach(topicPartition -> {
48 | System.out.printf("Assigned partition for client %s : %s-%s %n", clientid, topicPartition.topic(), topicPartition.partition());
49 | });
50 | }});
51 | while (true) {
52 | ConsumerRecords records = consumer.poll(100);
53 | records.forEach(record -> {
54 | System.out.printf("client : %s , topic: %s , partition: %d , offset = %d, key = %s, value = %s%n", clientid, record.topic(),
55 | record.partition(), record.offset(), record.key(), record.value());
56 | });
57 | }
58 | }
59 |
60 | }
61 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/producer/EvenProducerInterceptor.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.producer;
2 |
3 | import org.apache.kafka.clients.producer.ProducerInterceptor;
4 | import org.apache.kafka.clients.producer.ProducerRecord;
5 | import org.apache.kafka.clients.producer.RecordMetadata;
6 |
7 | import java.util.Map;
8 |
9 | /**
10 | * Created by Jason Guo (jason.guo.vip@gmail.com).
11 | */
12 | public class EvenProducerInterceptor implements ProducerInterceptor {
13 | @Override
14 | public ProducerRecord onSend(ProducerRecord record) {
15 | return record;
16 | }
17 |
18 | @Override
19 | public void onAcknowledgement(RecordMetadata metadata, Exception exception) {
20 |
21 | }
22 |
23 | @Override
24 | public void close() {
25 |
26 | }
27 |
28 | @Override
29 | public void configure(Map configs) {
30 |
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/producer/HashPartitioner.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.producer;
2 |
3 | import java.util.List;
4 | import java.util.Map;
5 |
6 | import org.apache.kafka.clients.producer.Partitioner;
7 | import org.apache.kafka.common.Cluster;
8 | import org.apache.kafka.common.PartitionInfo;
9 |
10 | public class HashPartitioner implements Partitioner {
11 |
12 | @Override
13 | public void configure(Map configs) {
14 | }
15 |
16 | @Override
17 | public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) {
18 | List partitions = cluster.partitionsForTopic(topic);
19 | int numPartitions = partitions.size();
20 | if (keyBytes != null) {
21 | int hashCode = 0;
22 | if (key instanceof Integer || key instanceof Long) {
23 | hashCode = (int) key;
24 | } else {
25 | hashCode = key.hashCode();
26 | }
27 | hashCode = hashCode & 0x7fffffff;
28 | return hashCode % numPartitions;
29 | } else {
30 | return 0;
31 | }
32 | }
33 |
34 | @Override
35 | public void close() {
36 | }
37 |
38 | }
39 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/producer/ProducerDemo.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.producer;
2 |
3 | import java.util.Properties;
4 |
5 | import org.apache.kafka.clients.producer.KafkaProducer;
6 | import org.apache.kafka.clients.producer.Producer;
7 | import org.apache.kafka.clients.producer.ProducerRecord;
8 | import org.apache.kafka.common.serialization.StringSerializer;
9 |
10 | public class ProducerDemo {
11 |
12 | public static void main(String[] args) throws Exception {
13 | Properties props = new Properties();
14 | props.put("bootstrap.servers", "kafka0:9092");
15 | props.put("acks", "all");
16 | props.put("retries", 3);
17 | props.put("batch.size", 16384);
18 | props.put("linger.ms", 1);
19 | props.put("buffer.memory", 33554432);
20 | props.put("key.serializer", StringSerializer.class.getName());
21 | props.put("value.serializer", StringSerializer.class.getName());
22 | props.put("partitioner.class", HashPartitioner.class.getName());
23 | props.put("interceptor.classes", EvenProducerInterceptor.class.getName());
24 |
25 | Producer producer = new KafkaProducer(props);
26 | for (int i = 0; i < 10; i++)
27 | producer.send(new ProducerRecord("topic1", Integer.toString(i), Integer.toString(i)));
28 | producer.close();
29 | }
30 |
31 | }
32 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/producer/ProducerDemoCallback.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.producer;
2 |
3 | import java.util.Properties;
4 |
5 | import org.apache.kafka.clients.producer.KafkaProducer;
6 | import org.apache.kafka.clients.producer.Producer;
7 | import org.apache.kafka.clients.producer.ProducerRecord;
8 | import org.apache.kafka.common.serialization.StringSerializer;
9 |
10 | public class ProducerDemoCallback {
11 |
12 | public static void main(String[] args) throws Exception {
13 | Properties props = new Properties();
14 | props.put("bootstrap.servers", "kafka0:9092");
15 | props.put("acks", "all");
16 | props.put("retries", 3);
17 | props.put("batch.size", 16384);
18 | props.put("linger.ms", 1);
19 | props.put("buffer.memory", 33554432);
20 | props.put("key.serializer", StringSerializer.class.getName());
21 | props.put("value.serializer", StringSerializer.class.getName());
22 | props.put("partitioner.class", HashPartitioner.class.getName());
23 |
24 | Producer producer = new KafkaProducer(props);
25 | for (int i = 0; i < 10; i++) {
26 | ProducerRecord record = new ProducerRecord("topic1", Integer.toString(i),
27 | Integer.toString(i));
28 | // producer.send(record);
29 | // producer.send(record, new Callback() {
30 | //
31 | // @Override
32 | // public void onCompletion(RecordMetadata metadata, Exception exception) {
33 | // System.out.printf("Send record partition:%d, offset:%d, keysize:%d, valuesize:%d %n",
34 | // metadata.partition(), metadata.offset(), metadata.serializedKeySize(),
35 | // metadata.serializedValueSize());
36 | // }
37 | //
38 | // });
39 | producer.send(record, (metadata, exception) -> {
40 | if(metadata != null) {
41 | System.out.printf("Send record partition:%d, offset:%d, keysize:%d, valuesize:%d %n",
42 | metadata.partition(), metadata.offset(), metadata.serializedKeySize(),
43 | metadata.serializedValueSize());
44 | }
45 | if(exception != null) {
46 | exception.printStackTrace();
47 | }
48 | });
49 | }
50 | producer.close();
51 | }
52 |
53 | }
54 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/stream/PurchaseAnalysis.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.stream;
2 |
3 | import java.io.IOException;
4 | import java.util.Properties;
5 |
6 | import org.apache.commons.lang3.StringUtils;
7 | import org.apache.kafka.clients.consumer.ConsumerConfig;
8 | import org.apache.kafka.common.serialization.Serdes;
9 | import org.apache.kafka.streams.KafkaStreams;
10 | import org.apache.kafka.streams.KeyValue;
11 | import org.apache.kafka.streams.StreamsConfig;
12 | import org.apache.kafka.streams.kstream.KStream;
13 | import org.apache.kafka.streams.kstream.KStreamBuilder;
14 | import org.apache.kafka.streams.kstream.KTable;
15 |
16 | import com.jasongj.kafka.stream.model.Item;
17 | import com.jasongj.kafka.stream.model.Order;
18 | import com.jasongj.kafka.stream.model.User;
19 | import com.jasongj.kafka.stream.serdes.SerdesFactory;
20 | import com.jasongj.kafka.stream.timeextractor.OrderTimestampExtractor;
21 |
22 | public class PurchaseAnalysis {
23 |
24 | public static void main(String[] args) throws IOException, InterruptedException {
25 | Properties props = new Properties();
26 | props.put(StreamsConfig.APPLICATION_ID_CONFIG, "streams-purchase-analysis2");
27 | props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "kafka0:19092");
28 | props.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG, "zookeeper0:12181/kafka");
29 | props.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
30 | props.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
31 | props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
32 | props.put(StreamsConfig.TIMESTAMP_EXTRACTOR_CLASS_CONFIG, OrderTimestampExtractor.class);
33 |
34 | KStreamBuilder streamBuilder = new KStreamBuilder();
35 | KStream orderStream = streamBuilder.stream(Serdes.String(), SerdesFactory.serdFrom(Order.class), "orders");
36 | KTable userTable = streamBuilder.table(Serdes.String(), SerdesFactory.serdFrom(User.class), "users", "users-state-store");
37 | KTable itemTable = streamBuilder.table(Serdes.String(), SerdesFactory.serdFrom(Item.class), "items", "items-state-store");
38 | // itemTable.toStream().foreach((String itemName, Item item) -> System.out.printf("Item info %s-%s-%s-%s\n", item.getItemName(), item.getAddress(), item.getType(), item.getPrice()));
39 | KTable kTable = orderStream
40 | .leftJoin(userTable, (Order order, User user) -> OrderUser.fromOrderUser(order, user), Serdes.String(), SerdesFactory.serdFrom(Order.class))
41 | .filter((String userName, OrderUser orderUser) -> orderUser.userAddress != null)
42 | .map((String userName, OrderUser orderUser) -> new KeyValue(orderUser.itemName, orderUser))
43 | .through(Serdes.String(), SerdesFactory.serdFrom(OrderUser.class), (String key, OrderUser orderUser, int numPartitions) -> (orderUser.getItemName().hashCode() & 0x7FFFFFFF) % numPartitions, "orderuser-repartition-by-item")
44 | .leftJoin(itemTable, (OrderUser orderUser, Item item) -> OrderUserItem.fromOrderUser(orderUser, item), Serdes.String(), SerdesFactory.serdFrom(OrderUser.class))
45 | .filter((String item, OrderUserItem orderUserItem) -> StringUtils.compare(orderUserItem.userAddress, orderUserItem.itemAddress) == 0)
46 | // .foreach((String itemName, OrderUserItem orderUserItem) -> System.out.printf("%s-%s-%s-%s\n", itemName, orderUserItem.itemAddress, orderUserItem.userName, orderUserItem.userAddress))
47 | .map((String item, OrderUserItem orderUserItem) -> KeyValue.pair(orderUserItem.gender, (Double)(orderUserItem.quantity * orderUserItem.itemPrice)))
48 | .groupByKey(Serdes.String(), Serdes.Double())
49 | .reduce((Double v1, Double v2) -> v1 + v2, "gender-amount-state-store");
50 | // kTable.foreach((str, dou) -> System.out.printf("%s-%s\n", str, dou));
51 | kTable
52 | .toStream()
53 | .map((String gender, Double total) -> new KeyValue(gender, String.valueOf(total)))
54 | .to("gender-amount");
55 |
56 | KafkaStreams kafkaStreams = new KafkaStreams(streamBuilder, props);
57 | kafkaStreams.cleanUp();
58 | kafkaStreams.start();
59 |
60 | System.in.read();
61 | kafkaStreams.close();
62 | kafkaStreams.cleanUp();
63 | }
64 |
65 | public static class OrderUser {
66 | private String userName;
67 | private String itemName;
68 | private long transactionDate;
69 | private int quantity;
70 | private String userAddress;
71 | private String gender;
72 | private int age;
73 |
74 | public String getUserName() {
75 | return userName;
76 | }
77 |
78 | public void setUserName(String userName) {
79 | this.userName = userName;
80 | }
81 |
82 | public String getItemName() {
83 | return itemName;
84 | }
85 |
86 | public void setItemName(String itemName) {
87 | this.itemName = itemName;
88 | }
89 |
90 | public long getTransactionDate() {
91 | return transactionDate;
92 | }
93 |
94 | public void setTransactionDate(long transactionDate) {
95 | this.transactionDate = transactionDate;
96 | }
97 |
98 | public int getQuantity() {
99 | return quantity;
100 | }
101 |
102 | public void setQuantity(int quantity) {
103 | this.quantity = quantity;
104 | }
105 |
106 | public String getUserAddress() {
107 | return userAddress;
108 | }
109 |
110 | public void setUserAddress(String userAddress) {
111 | this.userAddress = userAddress;
112 | }
113 |
114 | public String getGender() {
115 | return gender;
116 | }
117 |
118 | public void setGender(String gender) {
119 | this.gender = gender;
120 | }
121 |
122 | public int getAge() {
123 | return age;
124 | }
125 |
126 | public void setAge(int age) {
127 | this.age = age;
128 | }
129 |
130 | public static OrderUser fromOrder(Order order) {
131 | OrderUser orderUser = new OrderUser();
132 | if(order == null) {
133 | return orderUser;
134 | }
135 | orderUser.userName = order.getUserName();
136 | orderUser.itemName = order.getItemName();
137 | orderUser.transactionDate = order.getTransactionDate();
138 | orderUser.quantity = order.getQuantity();
139 | return orderUser;
140 | }
141 |
142 | public static OrderUser fromOrderUser(Order order, User user) {
143 | OrderUser orderUser = fromOrder(order);
144 | if(user == null) {
145 | return orderUser;
146 | }
147 | orderUser.gender = user.getGender();
148 | orderUser.age = user.getAge();
149 | orderUser.userAddress = user.getAddress();
150 | return orderUser;
151 | }
152 | }
153 |
154 | public static class OrderUserItem {
155 | private String userName;
156 | private String itemName;
157 | private long transactionDate;
158 | private int quantity;
159 | private String userAddress;
160 | private String gender;
161 | private int age;
162 | private String itemAddress;
163 | private String itemType;
164 | private double itemPrice;
165 |
166 | public String getUserName() {
167 | return userName;
168 | }
169 |
170 | public void setUserName(String userName) {
171 | this.userName = userName;
172 | }
173 |
174 | public String getItemName() {
175 | return itemName;
176 | }
177 |
178 | public void setItemName(String itemName) {
179 | this.itemName = itemName;
180 | }
181 |
182 | public long getTransactionDate() {
183 | return transactionDate;
184 | }
185 |
186 | public void setTransactionDate(long transactionDate) {
187 | this.transactionDate = transactionDate;
188 | }
189 |
190 | public int getQuantity() {
191 | return quantity;
192 | }
193 |
194 | public void setQuantity(int quantity) {
195 | this.quantity = quantity;
196 | }
197 |
198 | public String getUserAddress() {
199 | return userAddress;
200 | }
201 |
202 | public void setUserAddress(String userAddress) {
203 | this.userAddress = userAddress;
204 | }
205 |
206 | public String getGender() {
207 | return gender;
208 | }
209 |
210 | public void setGender(String gender) {
211 | this.gender = gender;
212 | }
213 |
214 | public int getAge() {
215 | return age;
216 | }
217 |
218 | public void setAge(int age) {
219 | this.age = age;
220 | }
221 |
222 | public String getItemAddress() {
223 | return itemAddress;
224 | }
225 |
226 | public void setItemAddress(String itemAddress) {
227 | this.itemAddress = itemAddress;
228 | }
229 |
230 | public String getItemType() {
231 | return itemType;
232 | }
233 |
234 | public void setItemType(String itemType) {
235 | this.itemType = itemType;
236 | }
237 |
238 | public double getItemPrice() {
239 | return itemPrice;
240 | }
241 |
242 | public void setItemPrice(double itemPrice) {
243 | this.itemPrice = itemPrice;
244 | }
245 |
246 | public static OrderUserItem fromOrderUser(OrderUser orderUser) {
247 | OrderUserItem orderUserItem = new OrderUserItem();
248 | if(orderUser == null) {
249 | return orderUserItem;
250 | }
251 | orderUserItem.userName = orderUser.userName;
252 | orderUserItem.itemName = orderUser.itemName;
253 | orderUserItem.transactionDate = orderUser.transactionDate;
254 | orderUserItem.quantity = orderUser.quantity;
255 | orderUserItem.userAddress = orderUser.userAddress;
256 | orderUserItem.gender = orderUser.gender;
257 | orderUserItem.age = orderUser.age;
258 | return orderUserItem;
259 | }
260 |
261 | public static OrderUserItem fromOrderUser(OrderUser orderUser, Item item) {
262 | OrderUserItem orderUserItem = fromOrderUser(orderUser);
263 | if(item == null) {
264 | return orderUserItem;
265 | }
266 | orderUserItem.itemAddress = item.getAddress();
267 | orderUserItem.itemType = item.getType();
268 | orderUserItem.itemPrice = item.getPrice();
269 | return orderUserItem;
270 | }
271 | }
272 |
273 | }
274 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/stream/WordCountDSL.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.stream;
2 |
3 | import java.util.Arrays;
4 | import java.util.Properties;
5 |
6 | import org.apache.kafka.clients.consumer.ConsumerConfig;
7 | import org.apache.kafka.common.serialization.Serdes;
8 | import org.apache.kafka.streams.KafkaStreams;
9 | import org.apache.kafka.streams.KeyValue;
10 | import org.apache.kafka.streams.StreamsConfig;
11 | import org.apache.kafka.streams.kstream.KStream;
12 | import org.apache.kafka.streams.kstream.KStreamBuilder;
13 | import org.apache.kafka.streams.kstream.TimeWindows;
14 | import org.apache.kafka.streams.kstream.Windowed;
15 |
16 | public class WordCountDSL {
17 |
18 | public static void main(String[] args) throws InterruptedException {
19 | Properties props = new Properties();
20 | props.put(StreamsConfig.APPLICATION_ID_CONFIG, "streams-wordcount-dsl");
21 | props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "kafka0:19092");
22 | props.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG, "zookeeper0:12181/kafka");
23 | props.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
24 | props.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
25 | props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
26 |
27 | KStreamBuilder builder = new KStreamBuilder();
28 | KStream stream = builder.stream("words");
29 | // stream.flatMapValues(values -> Arrays.asList(values.toLowerCase().split(" ")))
30 | // .map((k, v) -> KeyValue.pair(v, v)).groupByKey().aggregate(
31 | // () -> 0L,
32 | // (aggKey, value, aggregate) -> aggregate + 1L,
33 | // TimeWindows.of(5000).advanceBy(1000),
34 | // Serdes.Long(),
35 | // "Counts1")
36 | // .foreach((Windowed window, Long value) -> {
37 | // System.out.printf("key=%s, value=%s, start=%d, end=%d\n",window.key(), value, window.window().start(), window.window().end());
38 | // });
39 |
40 | KStream kStream = stream.flatMapValues(values -> Arrays.asList(values.toLowerCase().split(" ")))
41 | .map((k, v) -> KeyValue.pair(v, v)).groupByKey().aggregate(
42 | () -> 0L,
43 | (aggKey, value, aggregate) -> aggregate + 1L,
44 | TimeWindows.of(5000).advanceBy(5000),
45 | Serdes.Long(),
46 | "Counts")
47 | .toStream()
48 | .map((Windowed window, Long value) -> {
49 | return new KeyValue(window.key(), String.format("key=%s, value=%s, start=%d, end=%d\n",window.key(), value, window.window().start(), window.window().end()));
50 | });
51 | kStream.to(Serdes.String(), Serdes.String(), "count");
52 |
53 | // KTable kTable = stream.flatMapValues(values -> Arrays.asList(values.toLowerCase().split(" ")))
54 | // .map((k, v) -> KeyValue.pair(v, v)).groupByKey().count("Counts");
55 | // kTable.to(Serdes.String(), Serdes.Long(), "stream-dsl-sink");
56 |
57 | KafkaStreams streams = new KafkaStreams(builder, props);
58 | streams.start();
59 | Thread.sleep(100000L);
60 | streams.close();
61 | }
62 |
63 | }
64 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/stream/WordCountProcessor.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.stream;
2 |
3 | import java.util.Optional;
4 | import java.util.stream.Stream;
5 |
6 | import org.apache.kafka.streams.processor.Processor;
7 | import org.apache.kafka.streams.processor.ProcessorContext;
8 | import org.apache.kafka.streams.state.KeyValueIterator;
9 | import org.apache.kafka.streams.state.KeyValueStore;
10 |
11 | public class WordCountProcessor implements Processor {
12 |
13 | private ProcessorContext context;
14 | private KeyValueStore kvStore;
15 |
16 | @SuppressWarnings("unchecked")
17 | @Override
18 | public void init(ProcessorContext context) {
19 | this.context = context;
20 | this.context.schedule(1000);
21 | this.kvStore = (KeyValueStore) context.getStateStore("Counts");
22 | }
23 |
24 | @Override
25 | public void process(String key, String value) {
26 | Stream.of(value.toLowerCase().split(" ")).forEach((String word) -> {
27 | Optional counts = Optional.ofNullable(kvStore.get(word));
28 | int count = counts.map(wordcount -> wordcount + 1).orElse(1);
29 | kvStore.put(word, count);
30 | });
31 | }
32 |
33 | @Override
34 | public void punctuate(long timestamp) {
35 | try (KeyValueIterator iterator = this.kvStore.all()) {
36 | iterator.forEachRemaining(entry -> {
37 | context.forward(entry.key, entry.value);
38 | this.kvStore.delete(entry.key);
39 | });
40 | }
41 | context.commit();
42 | }
43 |
44 | @Override
45 | public void close() {
46 | this.kvStore.close();
47 | }
48 |
49 | }
50 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/stream/WordCountTopology.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.stream;
2 |
3 | import java.io.IOException;
4 | import java.util.Properties;
5 |
6 | import org.apache.kafka.clients.consumer.ConsumerConfig;
7 | import org.apache.kafka.common.serialization.IntegerSerializer;
8 | import org.apache.kafka.common.serialization.Serdes;
9 | import org.apache.kafka.common.serialization.StringDeserializer;
10 | import org.apache.kafka.common.serialization.StringSerializer;
11 | import org.apache.kafka.streams.KafkaStreams;
12 | import org.apache.kafka.streams.StreamsConfig;
13 | import org.apache.kafka.streams.processor.TopologyBuilder;
14 | import org.apache.kafka.streams.state.Stores;
15 |
16 | public class WordCountTopology {
17 |
18 | public static void main(String[] args) throws IOException {
19 | Properties props = new Properties();
20 | props.put(StreamsConfig.APPLICATION_ID_CONFIG, "streams-wordcount-processor");
21 | props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "kafka0:19092");
22 | props.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG, "zookeeper0:12181/kafka");
23 | props.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
24 | props.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.Integer().getClass());
25 | props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
26 |
27 | TopologyBuilder builder = new TopologyBuilder();
28 | builder.addSource("SOURCE", new StringDeserializer(), new StringDeserializer(), "words")
29 | .addProcessor("WordCountProcessor", WordCountProcessor::new, "SOURCE")
30 | .addStateStore(Stores.create("Counts").withStringKeys().withIntegerValues().inMemory().build(), "WordCountProcessor")
31 | // .connectProcessorAndStateStores("WordCountProcessor", "Counts")
32 | .addSink("SINK", "count", new StringSerializer(), new IntegerSerializer(), "WordCountProcessor");
33 |
34 | KafkaStreams stream = new KafkaStreams(builder, props);
35 | stream.start();
36 | System.in.read();
37 | stream.close();
38 | stream.cleanUp();
39 | }
40 |
41 | }
42 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/stream/model/Item.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.stream.model;
2 |
3 | public class Item {
4 | private String itemName;
5 | private String address;
6 | private String type;
7 | private double price;
8 |
9 | public Item() {}
10 |
11 | public Item(String itemName, String address, String type, double price) {
12 | this.itemName = itemName;
13 | this.address = address;
14 | this.type = type;
15 | this.price = price;
16 | }
17 |
18 | public String getItemName() {
19 | return itemName;
20 | }
21 |
22 | public void setItemName(String itemName) {
23 | this.itemName = itemName;
24 | }
25 |
26 | public String getAddress() {
27 | return address;
28 | }
29 |
30 | public void setAddress(String address) {
31 | this.address = address;
32 | }
33 |
34 | public String getType() {
35 | return type;
36 | }
37 |
38 | public void setType(String type) {
39 | this.type = type;
40 | }
41 |
42 | public double getPrice() {
43 | return price;
44 | }
45 |
46 | public void setPrice(double price) {
47 | this.price = price;
48 | }
49 |
50 | }
51 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/stream/model/Order.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.stream.model;
2 |
3 | public class Order {
4 |
5 | private String userName;
6 | private String itemName;
7 | private long transactionDate;
8 | private int quantity;
9 |
10 | public Order() {}
11 |
12 | public Order(String userName, String itemName, long transactionDate, int quantity) {
13 | this.userName = userName;
14 | this.itemName = itemName;
15 | this.transactionDate = transactionDate;
16 | this.quantity = quantity;
17 | }
18 |
19 | public String getUserName() {
20 | return userName;
21 | }
22 |
23 | public void setUserName(String userName) {
24 | this.userName = userName;
25 | }
26 |
27 | public String getItemName() {
28 | return itemName;
29 | }
30 |
31 | public void setItemName(String itemName) {
32 | this.itemName = itemName;
33 | }
34 |
35 | public long getTransactionDate() {
36 | return transactionDate;
37 | }
38 |
39 | public void setTransactionDate(long transactionDate) {
40 | this.transactionDate = transactionDate;
41 | }
42 |
43 | public int getQuantity() {
44 | return quantity;
45 | }
46 |
47 | public void setQuantity(int quantity) {
48 | this.quantity = quantity;
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/stream/model/User.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.stream.model;
2 |
3 | public class User {
4 | private String name;
5 | private String address;
6 | private String gender;
7 | private int age;
8 |
9 | public User() {}
10 |
11 | public User(String name, String address, String gender, int age) {
12 | this.name = name;
13 | this.address = address;
14 | this.gender = gender;
15 | this.age = age;
16 | }
17 |
18 | public String getName() {
19 | return name;
20 | }
21 |
22 | public void setName(String name) {
23 | this.name = name;
24 | }
25 |
26 | public String getAddress() {
27 | return address;
28 | }
29 |
30 | public void setAddress(String address) {
31 | this.address = address;
32 | }
33 |
34 | public String getGender() {
35 | return gender;
36 | }
37 |
38 | public void setGender(String gender) {
39 | this.gender = gender;
40 | }
41 |
42 | public int getAge() {
43 | return age;
44 | }
45 |
46 | public void setAge(int age) {
47 | this.age = age;
48 | }
49 |
50 | }
51 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/stream/producer/ItemProducer.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.stream.producer;
2 |
3 | import java.io.IOException;
4 | import java.nio.charset.Charset;
5 | import java.util.List;
6 | import java.util.Properties;
7 | import java.util.stream.Collectors;
8 |
9 | import org.apache.commons.io.IOUtils;
10 | import org.apache.commons.lang3.StringUtils;
11 | import org.apache.kafka.clients.producer.KafkaProducer;
12 | import org.apache.kafka.clients.producer.Producer;
13 | import org.apache.kafka.clients.producer.ProducerRecord;
14 | import org.apache.kafka.common.serialization.StringSerializer;
15 |
16 | import com.jasongj.kafka.producer.HashPartitioner;
17 | import com.jasongj.kafka.stream.model.Item;
18 | import com.jasongj.kafka.stream.serdes.GenericSerializer;
19 |
20 | public class ItemProducer {
21 |
22 | public static void main(String[] args) throws Exception {
23 | Properties props = new Properties();
24 | props.put("bootstrap.servers", "kafka0:19092");
25 | props.put("acks", "all");
26 | props.put("retries", 3);
27 | props.put("batch.size", 16384);
28 | props.put("linger.ms", 1);
29 | props.put("buffer.memory", 33554432);
30 | props.put("key.serializer", StringSerializer.class.getName());
31 | props.put("value.serializer", GenericSerializer.class.getName());
32 | props.put("value.serializer.type", Item.class.getName());
33 | props.put("partitioner.class", HashPartitioner.class.getName());
34 |
35 | Producer producer = new KafkaProducer(props);
36 | List- items = readItem();
37 | items.forEach((Item item) -> producer.send(new ProducerRecord
("items", item.getItemName(), item)));
38 | producer.close();
39 | }
40 |
41 | public static List- readItem() throws IOException {
42 | List
lines = IOUtils.readLines(OrderProducer.class.getResourceAsStream("/items.csv"), Charset.forName("UTF-8"));
43 | List- items = lines.stream()
44 | .filter(StringUtils::isNoneBlank)
45 | .map((String line) -> line.split("\\s*,\\s*"))
46 | .filter((String[] values) -> values.length == 4)
47 | .map((String[] values) -> new Item(values[0], values[1], values[2], Double.parseDouble(values[3])))
48 | .collect(Collectors.toList());
49 | return items;
50 | }
51 |
52 | }
53 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/stream/producer/OrderProducer.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.stream.producer;
2 |
3 | import java.io.IOException;
4 | import java.io.InputStream;
5 | import java.nio.charset.Charset;
6 | import java.time.LocalDateTime;
7 | import java.time.ZoneOffset;
8 | import java.time.format.DateTimeFormatter;
9 | import java.util.List;
10 | import java.util.Properties;
11 | import java.util.stream.Collectors;
12 |
13 | import org.apache.commons.io.IOUtils;
14 | import org.apache.commons.lang3.StringUtils;
15 | import org.apache.kafka.clients.producer.KafkaProducer;
16 | import org.apache.kafka.clients.producer.Producer;
17 | import org.apache.kafka.clients.producer.ProducerRecord;
18 | import org.apache.kafka.common.serialization.StringSerializer;
19 |
20 | import com.jasongj.kafka.producer.HashPartitioner;
21 | import com.jasongj.kafka.stream.model.Order;
22 | import com.jasongj.kafka.stream.serdes.GenericSerializer;
23 |
24 | public class OrderProducer {
25 |
26 | private static DateTimeFormatter dataTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
27 |
28 | public static void main(String[] args) throws Exception {
29 | Properties props = new Properties();
30 | props.put("bootstrap.servers", "kafka0:19092");
31 | props.put("acks", "all");
32 | props.put("retries", 3);
33 | props.put("batch.size", 16384);
34 | props.put("linger.ms", 1);
35 | props.put("buffer.memory", 33554432);
36 | props.put("key.serializer", StringSerializer.class.getName());
37 | props.put("value.serializer", GenericSerializer.class.getName());
38 | props.put("value.serializer.type", Order.class.getName());
39 | props.put("partitioner.class", HashPartitioner.class.getName());
40 |
41 | Producer
producer = new KafkaProducer(props);
42 | List orders = readOrder();
43 | orders.forEach((Order order) -> producer.send(new ProducerRecord("orders", order.getUserName(), order)));
44 | producer.close();
45 | }
46 |
47 | public static List readOrder() throws IOException {
48 | InputStream inputStream = OrderProducer.class.getResourceAsStream("/orders.csv");
49 | List lines = IOUtils.readLines(inputStream, Charset.forName("UTF-8"));
50 | List orders = lines.stream()
51 | .filter(StringUtils::isNoneBlank)
52 | .map((String line) -> line.split("\\s*,\\s*"))
53 | .filter((String[] values) -> values.length == 4)
54 | .map((String[] values) -> new Order(values[0], values[1], LocalDateTime.parse(values[2], dataTimeFormatter).toEpochSecond(ZoneOffset.UTC) * 1000, Integer.parseInt(values[3])))
55 | .collect(Collectors.toList());
56 | return orders;
57 | }
58 |
59 | }
60 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/stream/producer/UserProducer.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.stream.producer;
2 |
3 | import java.io.IOException;
4 | import java.nio.charset.Charset;
5 | import java.util.List;
6 | import java.util.Properties;
7 | import java.util.stream.Collectors;
8 |
9 | import org.apache.commons.io.IOUtils;
10 | import org.apache.commons.lang3.StringUtils;
11 | import org.apache.kafka.clients.producer.KafkaProducer;
12 | import org.apache.kafka.clients.producer.Producer;
13 | import org.apache.kafka.clients.producer.ProducerRecord;
14 | import org.apache.kafka.common.serialization.StringSerializer;
15 |
16 | import com.jasongj.kafka.producer.HashPartitioner;
17 | import com.jasongj.kafka.stream.model.User;
18 | import com.jasongj.kafka.stream.serdes.GenericSerializer;
19 |
20 | public class UserProducer {
21 |
22 | public static void main(String[] args) throws Exception {
23 | Properties props = new Properties();
24 | props.put("bootstrap.servers", "kafka0:19092");
25 | props.put("acks", "all");
26 | props.put("retries", 3);
27 | props.put("batch.size", 16384);
28 | props.put("linger.ms", 1);
29 | props.put("buffer.memory", 33554432);
30 | props.put("key.serializer", StringSerializer.class.getName());
31 | props.put("value.serializer", GenericSerializer.class.getName());
32 | props.put("value.serializer.type", User.class.getName());
33 | props.put("partitioner.class", HashPartitioner.class.getName());
34 |
35 | Producer producer = new KafkaProducer(props);
36 | List users = readUser();
37 | users.forEach((User user) -> producer.send(new ProducerRecord("users", user.getName(), user)));
38 | producer.close();
39 | }
40 |
41 | public static List readUser() throws IOException {
42 | List lines = IOUtils.readLines(OrderProducer.class.getResourceAsStream("/users.csv"), Charset.forName("UTF-8"));
43 | List users = lines.stream()
44 | .filter(StringUtils::isNoneBlank)
45 | .map((String line) -> line.split("\\s*,\\s*"))
46 | .filter((String[] values) -> values.length == 4)
47 | .map((String[] values) -> new User(values[0], values[1], values[2], Integer.parseInt(values[3])))
48 | .collect(Collectors.toList());
49 | return users;
50 | }
51 |
52 | }
53 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/stream/serdes/GenericDeserializer.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.stream.serdes;
2 |
3 | import java.io.IOException;
4 | import java.util.Map;
5 |
6 | import org.apache.kafka.common.errors.SerializationException;
7 | import org.apache.kafka.common.serialization.Deserializer;
8 |
9 | import com.fasterxml.jackson.databind.ObjectMapper;
10 |
11 | /**
12 | * This deserializer can deserialize any object of POJO class
13 | *
14 | * @author Jason Guo
15 | *
16 | * @param
17 | * POJO class. The class should have a constructor without any
18 | * arguments and have setter and getter for every member variable
19 | *
20 | */
21 |
22 | public class GenericDeserializer implements Deserializer {
23 |
24 | private Class type;
25 | private ObjectMapper objectMapper = new ObjectMapper();
26 |
27 | public GenericDeserializer() {}
28 |
29 | public GenericDeserializer(Class type) {
30 | this.type = type;
31 | }
32 |
33 | @SuppressWarnings("unchecked")
34 | @Override
35 | public void configure(Map configs, boolean isKey) {
36 | if(type != null) {
37 | return;
38 | }
39 |
40 | String typeProp = isKey ? "key.deserializer.type" : "value.deserializer.type";
41 | String typeName = (String)configs.get(typeProp);
42 | try {
43 | type = (Class)Class.forName(typeName);
44 | } catch (Exception ex) {
45 | throw new SerializationException("Failed to initialize GenericDeserializer for " + typeName, ex);
46 | }
47 | }
48 |
49 | @Override
50 | public T deserialize(String topic, byte[] data) {
51 | if (data == null) {
52 | return null;
53 | }
54 | try {
55 | return this.objectMapper.readValue(data, type);
56 | } catch (IOException ex) {
57 | throw new SerializationException(ex);
58 | }
59 | }
60 |
61 | @Override
62 | public void close() {
63 | }
64 |
65 | }
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/stream/serdes/GenericSerializer.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.stream.serdes;
2 |
3 | import java.io.IOException;
4 | import java.util.Map;
5 |
6 | import org.apache.kafka.common.errors.SerializationException;
7 | import org.apache.kafka.common.serialization.Serde;
8 | import org.apache.kafka.common.serialization.Serializer;
9 |
10 | import com.fasterxml.jackson.databind.ObjectMapper;
11 | import com.jasongj.kafka.stream.model.User;
12 |
13 | /**
14 | * This serializer can serialize any object of POJO class
15 | *
16 | * @author Jason Guo
17 | *
18 | * @param
19 | * POJO class. The class should have a constructor without any
20 | * arguments and have setter and getter for every member variable
21 | *
22 | */
23 |
24 | public class GenericSerializer implements Serializer {
25 |
26 | private Class type;
27 | private ObjectMapper objectMapper = new ObjectMapper();
28 |
29 | public GenericSerializer() {}
30 |
31 | public GenericSerializer(Class type) {
32 | this.type = type;
33 | }
34 |
35 | @SuppressWarnings("unchecked")
36 | @Override
37 | public void configure(Map configs, boolean isKey) {
38 | if(type != null) {
39 | return;
40 | }
41 | String typeProp = isKey ? "key.serializer.type" : "value.serializer.type";
42 | String typeName = (String)configs.get(typeProp);
43 | try {
44 | type = (Class)Class.forName(typeName);
45 | } catch (Exception ex) {
46 | throw new SerializationException("Failed to initialize GenericSerializer for " + typeName, ex);
47 | }
48 | }
49 |
50 | @Override
51 | public byte[] serialize(String topic, T object) {
52 | if (object == null) {
53 | return null;
54 | }
55 | try {
56 | return this.objectMapper.writerFor(type).writeValueAsBytes(object);
57 | } catch (IOException ex) {
58 | throw new SerializationException(ex);
59 | }
60 | }
61 |
62 | @Override
63 | public void close() {
64 | }
65 |
66 | }
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/stream/serdes/SerdesFactory.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.stream.serdes;
2 |
3 | import org.apache.kafka.common.serialization.Serde;
4 | import org.apache.kafka.common.serialization.Serdes;
5 |
6 | /**
7 | * This factory can create Serde for any POJO class
8 | * Be careful, the class should have a constructor without any arguments
9 | * and have setter and getter for every member variable
10 | * @author Jason Guo
11 | *
12 | */
13 |
14 | public class SerdesFactory {
15 |
16 | /**
17 | * @param The class should have a constructor without any
18 | * arguments and have setter and getter for every member variable
19 | * @param pojoClass POJO class.
20 | * @return Instance of {@link Serde}
21 | */
22 | public static Serde serdFrom(Class pojoClass) {
23 | return Serdes.serdeFrom(new GenericSerializer(pojoClass), new GenericDeserializer(pojoClass));
24 | }
25 |
26 | }
27 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/java/com/jasongj/kafka/stream/timeextractor/OrderTimestampExtractor.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka.stream.timeextractor;
2 |
3 | import java.time.LocalDateTime;
4 | import java.time.ZoneOffset;
5 |
6 | import org.apache.kafka.clients.consumer.ConsumerRecord;
7 | import org.apache.kafka.streams.processor.TimestampExtractor;
8 |
9 | import com.fasterxml.jackson.databind.JsonNode;
10 | import com.jasongj.kafka.stream.model.Item;
11 | import com.jasongj.kafka.stream.model.Order;
12 | import com.jasongj.kafka.stream.model.User;
13 |
14 | public class OrderTimestampExtractor implements TimestampExtractor {
15 |
16 | @Override
17 | public long extract(ConsumerRecord record) {
18 | Object value = record.value();
19 | if (record.value() instanceof Order) {
20 | Order order = (Order) value;
21 | return order.getTransactionDate();
22 | }
23 | if (value instanceof JsonNode) {
24 | return ((JsonNode) record.value()).get("transactionDate").longValue();
25 | }
26 | if (value instanceof Item) {
27 | return LocalDateTime.of(2015, 12,11,1,0,10).toEpochSecond(ZoneOffset.UTC) * 1000;
28 | }
29 | if (value instanceof User) {
30 | return LocalDateTime.of(2015, 12,11,0,0,10).toEpochSecond(ZoneOffset.UTC) * 1000;
31 | }
32 | return LocalDateTime.of(2015, 11,10,0,0,10).toEpochSecond(ZoneOffset.UTC) * 1000;
33 | // throw new IllegalArgumentException("OrderTimestampExtractor cannot recognize the record value " + record.value());
34 | }
35 |
36 | }
37 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/resources/items.csv:
--------------------------------------------------------------------------------
1 | iphone, BJ, phone, 5388.88
2 | ipad, SH, pad, 4888.88
3 | iwatch, SZ, watch, 2668.88
4 | ipod, GZ, pod, 1888.88
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | log4j.rootLogger=INFO, stdout
2 |
3 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
4 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
5 | log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n
6 |
7 | #log4j.appender.fileAppender=org.apache.log4j.FileAppender
8 | #log4j.appender.fileAppender.File=kafka-request.log
9 | #log4j.appender.fileAppender.layout=org.apache.log4j.PatternLayout
10 | #log4j.appender.fileAppender.layout.ConversionPattern= %-4r [%t] %-5p %c %x - %m%n
11 |
12 |
13 | # Turn on all our debugging info
14 | log4j.logger.kafka=WARN
15 | log4j.logger.org=WARN
16 |
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/resources/orders.csv:
--------------------------------------------------------------------------------
1 | Jack, iphone, 2016-11-11 00:00:01, 3
2 | Jack, ipad, 2016-11-11 00:00:02, 4
3 | Jack, iwatch, 2016-11-11 00:00:03, 5
4 | Jack, ipod, 2016-11-11 00:00:04, 4
5 |
6 | Lily, ipad, 2016-11-11 00:00:06, 3
7 | Lily, iwatch, 2016-11-11 00:00:07, 4
8 | Lily, iphone, 2016-11-11 00:00:08, 2
9 | Lily, ipod, 2016-11-11 00:00:09, 3
10 |
11 | Mike, ipad, 2016-11-11 00:00:11, 2
12 | Mike, iwatch, 2016-11-11 00:00:12, 3
13 | Mike, iphone, 2016-11-11 00:00:13, 4
14 | Mike, ipod, 2016-11-11 00:00:14, 3
15 |
16 | Lucy, ipod, 2016-11-11 00:00:16, 3
17 | Lucy, ipad, 2016-11-11 00:00:17, 4
18 | Lucy, iwatch, 2016-11-11 00:00:18, 3
19 | Lucy, iphone, 2016-11-11 00:00:19, 5
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/main/resources/users.csv:
--------------------------------------------------------------------------------
1 | Jack, BJ, male, 23
2 | Lily, SH, female, 21
3 | Mike, SZ, male, 22
4 | Lucy, GZ, female, 20
--------------------------------------------------------------------------------
/demokafka.0.10.1.0/src/test/java/com/jasongj/kafka/AppTest.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka;
2 |
3 | import junit.framework.Test;
4 | import junit.framework.TestCase;
5 | import junit.framework.TestSuite;
6 |
7 | /**
8 | * Unit test for simple App.
9 | */
10 | public class AppTest
11 | extends TestCase
12 | {
13 | /**
14 | * Create the test case
15 | *
16 | * @param testName name of the test case
17 | */
18 | public AppTest( String testName )
19 | {
20 | super( testName );
21 | }
22 |
23 | /**
24 | * @return the suite of tests being tested
25 | */
26 | public static Test suite()
27 | {
28 | return new TestSuite( AppTest.class );
29 | }
30 |
31 | /**
32 | * Rigourous Test :-)
33 | */
34 | public void testApp()
35 | {
36 | assertTrue( true );
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/demokafka.0.8.2.2/pom.xml:
--------------------------------------------------------------------------------
1 |
3 | 4.0.0
4 |
5 |
6 | demokafka.0.8.2.2
7 | 0.8.2.2
8 | jar
9 |
10 | demokafka.0.8.2.2
11 | http://www.jasongj.com
12 |
13 |
14 | com.jasongj.kafka
15 | demokafka
16 | 1.0.0
17 | ../pom.xml
18 |
19 |
20 |
21 |
22 | org.apache.kafka
23 | kafka_2.11
24 | 0.8.2.1
25 |
26 |
27 |
28 | org.apache.kafka
29 | kafka-clients
30 | 3.7.0
31 |
32 |
33 |
34 |
35 |
--------------------------------------------------------------------------------
/demokafka.0.8.2.2/src/main/java/com/jasongj/kafka/DemoHighLevelConsumer.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka;
2 |
3 | import java.util.HashMap;
4 | import java.util.List;
5 | import java.util.Map;
6 | import java.util.Properties;
7 |
8 | import kafka.consumer.Consumer;
9 | import kafka.consumer.ConsumerConfig;
10 | import kafka.consumer.ConsumerIterator;
11 | import kafka.consumer.KafkaStream;
12 | import kafka.javaapi.consumer.ConsumerConnector;
13 | import kafka.message.MessageAndMetadata;
14 |
15 | public class DemoHighLevelConsumer {
16 |
17 | public static void main(String[] args) {
18 | args = new String[] { "zookeeper0:2181/kafka", "topic1", "group2", "consumer1" };
19 | if (args == null || args.length != 4) {
20 | System.err.println("Usage:\n\tjava -jar kafka_consumer.jar ${zookeeper_list} ${topic_name} ${group_name} ${consumer_id}");
21 | System.exit(1);
22 | }
23 | String zk = args[0];
24 | String topic = args[1];
25 | String groupid = args[2];
26 | String consumerid = args[3];
27 | Properties props = new Properties();
28 | props.put("zookeeper.connect", zk);
29 | props.put("group.id", groupid);
30 | props.put("client.id", "test");
31 | props.put("consumer.id", consumerid);
32 | props.put("auto.offset.reset", "largest");
33 | props.put("auto.commit.enable", "false");
34 | props.put("auto.commit.interval.ms", "60000");
35 |
36 | ConsumerConfig consumerConfig = new ConsumerConfig(props);
37 | ConsumerConnector consumerConnector = Consumer.createJavaConsumerConnector(consumerConfig);
38 |
39 | Map topicCountMap = new HashMap();
40 | topicCountMap.put(topic, 1);
41 | Map>> consumerMap = consumerConnector.createMessageStreams(topicCountMap);
42 |
43 | KafkaStream stream1 = consumerMap.get(topic).get(0);
44 | ConsumerIterator interator = stream1.iterator();
45 | while (interator.hasNext()) {
46 | MessageAndMetadata messageAndMetadata = interator.next();
47 | String message = String.format(
48 | "Topic:%s, GroupID:%s, Consumer ID:%s, PartitionID:%s, Offset:%s, Message Key:%s, Message Payload: %s",
49 | messageAndMetadata.topic(), groupid, consumerid, messageAndMetadata.partition(),
50 | messageAndMetadata.offset(), new String(messageAndMetadata.key()),
51 | new String(messageAndMetadata.message()));
52 | System.out.println(message);
53 | consumerConnector.commitOffsets();
54 | }
55 | }
56 |
57 | }
58 |
--------------------------------------------------------------------------------
/demokafka.0.8.2.2/src/main/java/com/jasongj/kafka/DemoLowLevelConsumer.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka;
2 |
3 | import java.nio.ByteBuffer;
4 |
5 | import kafka.api.FetchRequest;
6 | import kafka.api.FetchRequestBuilder;
7 | import kafka.javaapi.FetchResponse;
8 | import kafka.javaapi.consumer.SimpleConsumer;
9 | import kafka.javaapi.message.ByteBufferMessageSet;
10 | import kafka.message.MessageAndOffset;
11 |
12 | public class DemoLowLevelConsumer {
13 |
14 | public static void main(String[] args) throws Exception {
15 | final String topic = "topic1";
16 | String clientID = "DemoLowLevelConsumer1";
17 | SimpleConsumer simpleConsumer = new SimpleConsumer("kafka0", 9092, 100000, 64 * 1000000, clientID);
18 | FetchRequest req = new FetchRequestBuilder().clientId(clientID)
19 | .addFetch(topic, 0, 0L, 50).addFetch(topic, 1, 0L, 5000).addFetch(topic, 2, 0L, 1000000).build();
20 | FetchResponse fetchResponse = simpleConsumer.fetch(req);
21 | ByteBufferMessageSet messageSet = (ByteBufferMessageSet) fetchResponse.messageSet(topic, 0);
22 | for (MessageAndOffset messageAndOffset : messageSet) {
23 | ByteBuffer payload = messageAndOffset.message().payload();
24 | long offset = messageAndOffset.offset();
25 | byte[] bytes = new byte[payload.limit()];
26 | payload.get(bytes);
27 | System.out.println("Offset:" + offset + ", Payload:" + new String(bytes, "UTF-8"));
28 | }
29 | }
30 |
31 | }
32 |
--------------------------------------------------------------------------------
/demokafka.0.8.2.2/src/main/java/com/jasongj/kafka/HashPartitioner.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka;
2 |
3 | import kafka.producer.Partitioner;
4 | import kafka.utils.VerifiableProperties;
5 |
6 | public class HashPartitioner implements Partitioner {
7 |
8 | public HashPartitioner(VerifiableProperties verifiableProperties) {}
9 |
10 | @Override
11 | public int partition(Object key, int numPartitions) {
12 | if ((key instanceof Integer)) {
13 | return Math.abs(Integer.parseInt(key.toString())) % numPartitions;
14 | }
15 | return Math.abs(key.hashCode() % numPartitions);
16 | }
17 | }
18 |
19 |
20 |
--------------------------------------------------------------------------------
/demokafka.0.8.2.2/src/main/java/com/jasongj/kafka/ProducerDemo.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka;
2 |
3 | import java.util.Properties;
4 |
5 | import kafka.javaapi.producer.Producer;
6 | import kafka.producer.KeyedMessage;
7 | import kafka.producer.ProducerConfig;
8 | import kafka.serializer.StringEncoder;
9 |
10 | public class ProducerDemo {
11 |
12 | static private final String TOPIC = "topic1";
13 | static private final String BROKER_LIST = "kafka0:9092";
14 |
15 |
16 | public static void main(String[] args) throws Exception {
17 | Producer producer = initProducer();
18 | sendOne(producer, TOPIC);
19 | }
20 |
21 | private static Producer initProducer() {
22 | Properties props = new Properties();
23 | props.put("metadata.broker.list", BROKER_LIST);
24 | // props.put("serializer.class", "kafka.serializer.StringEncoder");
25 | props.put("serializer.class", StringEncoder.class.getName());
26 | props.put("partitioner.class", HashPartitioner.class.getName());
27 | // props.put("compression.codec", "0");
28 | props.put("producer.type", "sync");
29 | props.put("batch.num.messages", "1");
30 | props.put("queue.buffering.max.messages", "1000000");
31 | props.put("queue.enqueue.timeout.ms", "20000000");
32 |
33 |
34 | ProducerConfig config = new ProducerConfig(props);
35 | Producer producer = new Producer(config);
36 | return producer;
37 | }
38 |
39 | public static void sendOne(Producer producer, String topic) throws InterruptedException {
40 | boolean sleepFlag = false;
41 | KeyedMessage message1 = new KeyedMessage(topic, "0", "test 0");
42 | producer.send(message1);
43 | if(sleepFlag) Thread.sleep(5000);
44 | KeyedMessage message2 = new KeyedMessage(topic, "1", "test 1");
45 | producer.send(message2);
46 | if(sleepFlag) Thread.sleep(5000);
47 | KeyedMessage message3 = new KeyedMessage(topic, "2", "test 2");
48 | producer.send(message3);
49 | if(sleepFlag) Thread.sleep(5000);
50 | KeyedMessage message4 = new KeyedMessage(topic, "3", "test 3");
51 | producer.send(message4);
52 | if(sleepFlag) Thread.sleep(5000);
53 | KeyedMessage message5 = new KeyedMessage(topic, "4", "test 4");
54 | producer.send(message5);
55 | if(sleepFlag) Thread.sleep(5000);
56 | KeyedMessage message6 = new KeyedMessage(topic, "5", "test 5");
57 | producer.send(message6);
58 | if(sleepFlag) Thread.sleep(5000);
59 | KeyedMessage message7 = new KeyedMessage(topic, "6", "test 6");
60 | producer.send(message7);
61 | if(sleepFlag) Thread.sleep(5000);
62 | KeyedMessage message8 = new KeyedMessage(topic, "7", "test 7");
63 | producer.send(message8);
64 | if(sleepFlag) Thread.sleep(5000);
65 | KeyedMessage message9 = new KeyedMessage(topic, "8", "test 8");
66 | producer.send(message9);
67 | if(sleepFlag) Thread.sleep(5000);
68 | producer.close();
69 | }
70 |
71 | }
72 |
--------------------------------------------------------------------------------
/demokafka.0.8.2.2/src/main/java/com/jasongj/kafka/RoundRobinPartitioner.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka;
2 |
3 | import java.util.concurrent.atomic.AtomicLong;
4 |
5 | import kafka.producer.Partitioner;
6 | import kafka.utils.VerifiableProperties;
7 |
8 | public class RoundRobinPartitioner implements Partitioner {
9 |
10 | private static AtomicLong next = new AtomicLong();
11 |
12 | public RoundRobinPartitioner(VerifiableProperties verifiableProperties) {}
13 |
14 | @Override
15 | public int partition(Object key, int numPartitions) {
16 | long nextIndex = next.incrementAndGet();
17 | return (int)nextIndex % numPartitions;
18 | }
19 | }
20 |
21 |
22 |
--------------------------------------------------------------------------------
/demokafka.0.8.2.2/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | log4j.rootLogger=INFO, stdout
2 |
3 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
4 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
5 | log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n
6 |
7 | #log4j.appender.fileAppender=org.apache.log4j.FileAppender
8 | #log4j.appender.fileAppender.File=kafka-request.log
9 | #log4j.appender.fileAppender.layout=org.apache.log4j.PatternLayout
10 | #log4j.appender.fileAppender.layout.ConversionPattern= %-4r [%t] %-5p %c %x - %m%n
11 |
12 |
13 | # Turn on all our debugging info
14 | log4j.logger.kafka=WARN
15 | log4j.logger.org=WARN
16 |
--------------------------------------------------------------------------------
/demokafka.0.8.2.2/src/test/java/com/jasongj/kafka/AppTest.java:
--------------------------------------------------------------------------------
1 | package com.jasongj.kafka;
2 |
3 | import junit.framework.Test;
4 | import junit.framework.TestCase;
5 | import junit.framework.TestSuite;
6 |
7 | /**
8 | * Unit test for simple App.
9 | */
10 | public class AppTest
11 | extends TestCase
12 | {
13 | /**
14 | * Create the test case
15 | *
16 | * @param testName name of the test case
17 | */
18 | public AppTest( String testName )
19 | {
20 | super( testName );
21 | }
22 |
23 | /**
24 | * @return the suite of tests being tested
25 | */
26 | public static Test suite()
27 | {
28 | return new TestSuite( AppTest.class );
29 | }
30 |
31 | /**
32 | * Rigourous Test :-)
33 | */
34 | public void testApp()
35 | {
36 | assertTrue( true );
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
3 | 4.0.0
4 |
5 | com.jasongj.kafka
6 | demokafka
7 | 1.0.0
8 | pom
9 |
10 | demokafka
11 | http://www.jasongj.com
12 |
13 |
14 | demokafka.0.8.2.2
15 | demokafka.0.10.1.0
16 |
17 |
18 |
19 | UTF-8
20 | 1.8
21 | 1.8
22 |
23 |
24 |
25 |
26 | Aliyun
27 | Aliyun Repository
28 | https://maven.aliyun.com/nexus/content/groups/public/
29 |
30 | true
31 |
32 |
33 | false
34 |
35 |
36 |
37 |
38 |
39 | Aliyun
40 | Aliyun Repository
41 | https://maven.aliyun.com/nexus/content/groups/public/
42 |
43 | true
44 |
45 |
46 | false
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 | junit
55 | junit
56 | 4.11
57 | test
58 |
59 |
60 |
61 | com.google.guava
62 | guava
63 | 14.0-rc1
64 |
65 |
66 | org.scala-lang
67 | scala-library
68 | 2.11.5
69 |
70 |
71 | org.apache.zookeeper
72 | zookeeper
73 | 3.4.6
74 |
75 |
76 | org.xerial.snappy
77 | snappy-java
78 | 1.1.1.6
79 |
80 |
81 | org.apache.commons
82 | commons-lang3
83 | 3.5
84 |
85 |
86 |
87 |
88 |
89 | maven-assembly-plugin
90 |
91 |
92 | jar-dependencies
93 | package
94 |
95 | single
96 |
97 |
98 | true
99 |
100 |
101 | com.jasongj.kafka.DemoConsumer
102 |
103 |
104 |
105 | jar-with-dependencies
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
--------------------------------------------------------------------------------