├── Kafka-Message-Server-Example
├── config
│ ├── Build.scala
│ ├── KafkaProject.scala
│ ├── build.sbt
│ ├── java-mail-consumer-demo.sh
│ ├── java-mail-content-producer.sh
│ ├── java-mail-producer-consumer-demo.sh
│ └── java-mail-producer-demo.sh
├── kafka_2.8.0-0.8.0.jar
├── pom.xml
└── src
│ └── main
│ ├── java
│ └── com
│ │ └── kafka
│ │ └── message
│ │ └── server
│ │ └── example
│ │ ├── core
│ │ ├── KafkaMailConsumer.java
│ │ ├── KafkaMailProducer.java
│ │ └── KafkaMailProperties.java
│ │ ├── launch
│ │ ├── MailConsumerDemo.java
│ │ ├── MailConsumerProducerDemo.java
│ │ └── MailProducerDemo.java
│ │ ├── other
│ │ └── launch
│ │ │ └── CreateFile.java
│ │ └── util
│ │ ├── KafkaExampleCommandLineHandler.java
│ │ ├── KafkaExampleFileUtil.java
│ │ ├── KafkaExampleProperty.java
│ │ └── KafkaExamplePropertyKey.java
│ └── resources
│ └── kafka-message-server-example-properties.prop
├── README.md
├── commons-cli-1.1.jar
└── kafka-message-server-example-0.8.0.jar
/Kafka-Message-Server-Example/config/Build.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | import sbt._
19 | import Keys._
20 | import Process._
21 |
22 | import scala.xml.{Node, Elem}
23 | import scala.xml.transform.{RewriteRule, RuleTransformer}
24 |
25 | object KafkaBuild extends Build {
26 | val buildNumber = SettingKey[String]("build-number", "Build number defaults to $BUILD_NUMBER environment variable")
27 | val releaseName = SettingKey[String]("release-name", "the full name of this release")
28 | val commonSettings = Seq(
29 | organization := "org.apache.kafka",
30 | pomExtra :=
31 |
32 | org.apache
33 | apache
34 | 10
35 |
36 |
37 |
38 | Apache 2
39 | http://www.apache.org/licenses/LICENSE-2.0.txt
40 | repo
41 |
42 | ,
43 | scalacOptions ++= Seq("-deprecation", "-unchecked", "-g:none"),
44 | crossScalaVersions := Seq("2.8.0","2.8.2", "2.9.1", "2.9.2", "2.10.1"),
45 | excludeFilter in unmanagedSources <<= scalaVersion(v => if (v.startsWith("2.8")) "*_2.9+.scala" else "*_2.8.scala"),
46 | scalaVersion := "2.8.0",
47 | version := "0.8.0",
48 | publishTo := Some("Apache Maven Repo" at "https://repository.apache.org/service/local/staging/deploy/maven2"),
49 | credentials += Credentials(Path.userHome / ".m2" / ".credentials"),
50 | buildNumber := System.getProperty("build.number", ""),
51 | version <<= (buildNumber, version) { (build, version) => if (build == "") version else version + "+" + build},
52 | releaseName <<= (name, version, scalaVersion) {(name, version, scalaVersion) => name + "_" + scalaVersion + "-" + version},
53 | javacOptions in compile ++= Seq("-Xlint:unchecked", "-source", "1.5"),
54 | javacOptions in doc ++= Seq("-source", "1.5"),
55 | parallelExecution in Test := false, // Prevent tests from overrunning each other
56 | publishArtifact in Test := true,
57 | libraryDependencies ++= Seq(
58 | "log4j" % "log4j" % "1.2.15" exclude("javax.jms", "jms"),
59 | "net.sf.jopt-simple" % "jopt-simple" % "3.2",
60 | "org.slf4j" % "slf4j-simple" % "1.6.4"
61 | ),
62 | // The issue is going from log4j 1.2.14 to 1.2.15, the developers added some features which required
63 | // some dependencies on various sun and javax packages.
64 | ivyXML :=
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 | ,
75 | mappings in packageBin in Compile += file("LICENSE") -> "LICENSE",
76 | mappings in packageBin in Compile += file("NOTICE") -> "NOTICE"
77 | )
78 |
79 | val hadoopSettings = Seq(
80 | javacOptions in compile ++= Seq("-Xlint:deprecation"),
81 | libraryDependencies ++= Seq(
82 | "org.apache.avro" % "avro" % "1.4.0",
83 | "org.apache.pig" % "pig" % "0.8.0",
84 | "commons-logging" % "commons-logging" % "1.0.4",
85 | "org.codehaus.jackson" % "jackson-core-asl" % "1.5.5",
86 | "org.codehaus.jackson" % "jackson-mapper-asl" % "1.5.5",
87 | "org.apache.hadoop" % "hadoop-core" % "0.20.2"
88 | ),
89 | ivyXML :=
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 | )
105 |
106 |
107 | val runRat = TaskKey[Unit]("run-rat-task", "Runs Apache rat on Kafka")
108 | val runRatTask = runRat := {
109 | "bin/run-rat.sh" !
110 | }
111 |
112 | val release = TaskKey[Unit]("release", "Creates a deployable release directory file with dependencies, config, and scripts.")
113 | val releaseTask = release <<= ( packageBin in (core, Compile), dependencyClasspath in (core, Runtime), exportedProducts in Compile,
114 | target, releaseName in core ) map { (packageBin, deps, products, target, releaseName) =>
115 | val jarFiles = deps.files.filter(f => !products.files.contains(f) && f.getName.endsWith(".jar"))
116 | val destination = target / "RELEASE" / releaseName
117 | IO.copyFile(packageBin, destination / packageBin.getName)
118 | IO.copyFile(file("LICENSE"), destination / "LICENSE")
119 | IO.copyFile(file("NOTICE"), destination / "NOTICE")
120 | IO.copy(jarFiles.map { f => (f, destination / "libs" / f.getName) })
121 | IO.copyDirectory(file("config"), destination / "config")
122 | IO.copyDirectory(file("bin"), destination / "bin")
123 | for {file <- (destination / "bin").listFiles} { file.setExecutable(true, true) }
124 | }
125 |
126 | val releaseZip = TaskKey[Unit]("release-zip", "Creates a deployable zip file with dependencies, config, and scripts.")
127 | val releaseZipTask = releaseZip <<= (release, target, releaseName in core) map { (release, target, releaseName) =>
128 | val zipPath = target / "RELEASE" / "%s.zip".format(releaseName)
129 | IO.delete(zipPath)
130 | IO.zip((target/"RELEASE" ** releaseName ***) x relativeTo(target/"RELEASE"), zipPath)
131 | }
132 |
133 | val releaseTar = TaskKey[Unit]("release-tar", "Creates a deployable tar.gz file with dependencies, config, and scripts.")
134 | val releaseTarTask = releaseTar <<= ( release, target, releaseName in core) map { (release, target, releaseName) =>
135 | Process(Seq("tar", "czf", "%s.tar.gz".format(releaseName), releaseName), target / "RELEASE").! match {
136 | case 0 => ()
137 | case n => sys.error("Failed to run native tar application!")
138 | }
139 | }
140 |
141 | lazy val kafka = Project(id = "Kafka", base = file(".")).aggregate(core, examples, message, contrib, perf).settings((commonSettings ++
142 | runRatTask ++ releaseTask ++ releaseZipTask ++ releaseTarTask): _*)
143 | lazy val core = Project(id = "core", base = file("core")).settings(commonSettings: _*)
144 | lazy val examples = Project(id = "java-examples", base = file("examples")).settings(commonSettings :_*) dependsOn (core)
145 | lazy val message = Project(id = "java-message-server-example", base = file("message-server-example")).settings(commonSettings :_*) dependsOn (core)
146 | lazy val perf = Project(id = "perf", base = file("perf")).settings((Seq(name := "kafka-perf") ++ commonSettings):_*) dependsOn (core)
147 |
148 | lazy val contrib = Project(id = "contrib", base = file("contrib")).aggregate(hadoopProducer, hadoopConsumer).settings(commonSettings :_*)
149 | lazy val hadoopProducer = Project(id = "hadoop-producer", base = file("contrib/hadoop-producer")).settings(hadoopSettings ++ commonSettings: _*) dependsOn (core)
150 | lazy val hadoopConsumer = Project(id = "hadoop-consumer", base = file("contrib/hadoop-consumer")).settings(hadoopSettings ++ commonSettings: _*) dependsOn (core)
151 |
152 | }
153 |
--------------------------------------------------------------------------------
/Kafka-Message-Server-Example/config/KafkaProject.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | import sbt._
19 | import scala.xml.{Node, Elem}
20 | import scala.xml.transform.{RewriteRule, RuleTransformer}
21 |
22 | class KafkaProject(info: ProjectInfo) extends ParentProject(info) with IdeaProject {
23 | override def managedStyle = ManagedStyle.Maven
24 | val publishTo = "Maven Repo" at "http://maven/content/repositories/repository.snapshots"
25 | Credentials(Path.userHome / ".m2" / ".credentials", log)
26 |
27 | lazy val core = project("core", "core-kafka", new CoreKafkaProject(_))
28 | lazy val examples = project("examples", "java-examples", new KafkaExamplesProject(_), core)
29 | lazy val message = project("message-server-example", "java-message-server-example", new KafkaMessageExampleProject(_), core)
30 | lazy val contrib = project("contrib", "contrib", new ContribProject(_))
31 | lazy val perf = project("perf", "perf", new KafkaPerfProject(_))
32 |
33 | lazy val releaseZipTask = core.packageDistTask
34 |
35 | val releaseZipDescription = "Compiles every sub project, runs unit tests, creates a deployable release zip file with dependencies, config, and scripts."
36 | lazy val releaseZip = releaseZipTask dependsOn(core.corePackageAction, core.test, examples.examplesPackageAction, message.messagePackageAction
37 | contrib.producerPackageAction, contrib.consumerPackageAction) describedAs releaseZipDescription
38 |
39 | val runRatDescription = "Runs Apache rat on Kafka"
40 | lazy val runRatTask = task {
41 | Runtime.getRuntime().exec("bin/run-rat.sh")
42 | None
43 | } describedAs runRatDescription
44 |
45 | val rat = "org.apache.rat" % "apache-rat" % "0.8"
46 |
47 | class CoreKafkaProject(info: ProjectInfo) extends DefaultProject(info)
48 | with IdeaProject with CoreDependencies with TestDependencies with CompressionDependencies {
49 | val corePackageAction = packageAllAction
50 |
51 | //The issue is going from log4j 1.2.14 to 1.2.15, the developers added some features which required
52 | // some dependencies on various sun and javax packages.
53 | override def ivyXML =
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 | override def organization = "org.apache"
67 | override def filterScalaJars = false
68 |
69 | // build the executable jar's classpath.
70 | // (why is it necessary to explicitly remove the target/{classes,resources} paths? hm.)
71 | def dependentJars = {
72 | val jars =
73 | publicClasspath +++ mainDependencies.scalaJars --- mainCompilePath --- mainResourcesOutputPath
74 | if (jars.get.find { jar => jar.name.startsWith("scala-library-") }.isDefined) {
75 | // workaround bug in sbt: if the compiler is explicitly included, don't include 2 versions
76 | // of the library.
77 | jars --- jars.filter { jar =>
78 | jar.absolutePath.contains("/boot/") && jar.name == "scala-library.jar"
79 | }
80 | } else {
81 | jars
82 | }
83 | }
84 |
85 | def dependentJarNames = dependentJars.getFiles.map(_.getName).filter(_.endsWith(".jar"))
86 | override def manifestClassPath = Some(dependentJarNames.map { "libs/" + _ }.mkString(" "))
87 |
88 | def distName = (artifactID + "-" + projectVersion.value)
89 | def distPath = "dist" / distName ##
90 |
91 | def configPath = "config" ##
92 | def configOutputPath = distPath / "config"
93 |
94 | def binPath = "bin" ##
95 | def binOutputPath = distPath / "bin"
96 |
97 | def distZipName = {
98 | "%s-%s.zip".format(artifactID, projectVersion.value)
99 | }
100 |
101 | lazy val packageDistTask = task {
102 | distPath.asFile.mkdirs()
103 | (distPath / "libs").asFile.mkdirs()
104 | binOutputPath.asFile.mkdirs()
105 | configOutputPath.asFile.mkdirs()
106 |
107 | FileUtilities.copyFlat(List(jarPath), distPath, log).left.toOption orElse
108 | FileUtilities.copyFlat(dependentJars.get, distPath / "libs", log).left.toOption orElse
109 | FileUtilities.copy((configPath ***).get, configOutputPath, log).left.toOption orElse
110 | FileUtilities.copy((binPath ***).get, binOutputPath, log).left.toOption orElse
111 | FileUtilities.zip((("dist" / distName) ##).get, "dist" / distZipName, true, log)
112 | None
113 | }
114 |
115 | val PackageDistDescription = "Creates a deployable zip file with dependencies, config, and scripts."
116 | lazy val packageDist = packageDistTask dependsOn(`package`, `test`) describedAs PackageDistDescription
117 |
118 | val cleanDist = cleanTask("dist" ##) describedAs("Erase any packaged distributions.")
119 | override def cleanAction = super.cleanAction dependsOn(cleanDist)
120 |
121 | override def javaCompileOptions = super.javaCompileOptions ++
122 | List(JavaCompileOption("-source"), JavaCompileOption("1.5"))
123 |
124 | override def packageAction = super.packageAction dependsOn (testCompileAction, packageTestAction)
125 |
126 | }
127 |
128 | class KafkaPerfProject(info: ProjectInfo) extends DefaultProject(info)
129 | with IdeaProject
130 | with CoreDependencies {
131 | val perfPackageAction = packageAllAction
132 | val dependsOnCore = core
133 |
134 | //The issue is going from log4j 1.2.14 to 1.2.15, the developers added some features which required
135 | // some dependencies on various sun and javax packages.
136 | override def ivyXML =
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 | override def artifactID = "kafka-perf"
146 | override def filterScalaJars = false
147 | override def javaCompileOptions = super.javaCompileOptions ++
148 | List(JavaCompileOption("-Xlint:unchecked"))
149 | }
150 |
151 | class KafkaExamplesProject(info: ProjectInfo) extends DefaultProject(info)
152 | with IdeaProject
153 | with CoreDependencies {
154 | val examplesPackageAction = packageAllAction
155 | val dependsOnCore = core
156 | //The issue is going from log4j 1.2.14 to 1.2.15, the developers added some features which required
157 | // some dependencies on various sun and javax packages.
158 | override def ivyXML =
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 | override def artifactID = "kafka-java-examples"
168 | override def filterScalaJars = false
169 | override def javaCompileOptions = super.javaCompileOptions ++
170 | List(JavaCompileOption("-Xlint:unchecked"))
171 | }
172 |
173 | class KafkaMessageExampleProject(info: ProjectInfo) extends DefaultProject(info)
174 | with IdeaProject
175 | with CoreDependencies {
176 | val messagePackageAction = packageAllAction
177 | val dependsOnCore = core
178 | //The issue is going from log4j 1.2.14 to 1.2.15, the developers added some features which required
179 | // some dependencies on various sun and javax packages.
180 | override def ivyXML =
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 | override def artifactID = "kafka-message-server-example"
190 | override def filterScalaJars = false
191 | override def javaCompileOptions = super.javaCompileOptions ++
192 | List(JavaCompileOption("-Xlint:unchecked"))
193 | }
194 |
195 | class ContribProject(info: ProjectInfo) extends ParentProject(info) with IdeaProject {
196 | lazy val hadoopProducer = project("hadoop-producer", "hadoop producer",
197 | new HadoopProducerProject(_), core)
198 | lazy val hadoopConsumer = project("hadoop-consumer", "hadoop consumer",
199 | new HadoopConsumerProject(_), core)
200 |
201 | val producerPackageAction = hadoopProducer.producerPackageAction
202 | val consumerPackageAction = hadoopConsumer.consumerPackageAction
203 |
204 | class HadoopProducerProject(info: ProjectInfo) extends DefaultProject(info)
205 | with IdeaProject
206 | with CoreDependencies with HadoopDependencies {
207 | val producerPackageAction = packageAllAction
208 | override def ivyXML =
209 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 | }
225 |
226 | class HadoopConsumerProject(info: ProjectInfo) extends DefaultProject(info)
227 | with IdeaProject
228 | with CoreDependencies {
229 | val consumerPackageAction = packageAllAction
230 | override def ivyXML =
231 |
232 |
233 |
234 |
235 |
236 |
237 |
238 |
239 |
240 |
241 |
242 |
243 |
244 |
245 |
246 |
247 | val jodaTime = "joda-time" % "joda-time" % "1.6"
248 | }
249 | }
250 |
251 | trait TestDependencies {
252 | val easymock = "org.easymock" % "easymock" % "3.0" % "test"
253 | val junit = "junit" % "junit" % "4.1" % "test"
254 | val scalaTest = "org.scalatest" % "scalatest" % "1.2" % "test"
255 | }
256 |
257 | trait CoreDependencies {
258 | val log4j = "log4j" % "log4j" % "1.2.15"
259 | val jopt = "net.sf.jopt-simple" % "jopt-simple" % "3.2"
260 | val slf4jSimple = "org.slf4j" % "slf4j-simple" % "1.6.4"
261 | }
262 |
263 | trait HadoopDependencies {
264 | val avro = "org.apache.avro" % "avro" % "1.4.0"
265 | val commonsLogging = "commons-logging" % "commons-logging" % "1.0.4"
266 | val jacksonCore = "org.codehaus.jackson" % "jackson-core-asl" % "1.5.5"
267 | val jacksonMapper = "org.codehaus.jackson" % "jackson-mapper-asl" % "1.5.5"
268 | val hadoop = "org.apache.hadoop" % "hadoop-core" % "0.20.2"
269 | }
270 |
271 | trait CompressionDependencies {
272 | val snappy = "org.xerial.snappy" % "snappy-java" % "1.0.4.1"
273 | }
274 | }
275 |
--------------------------------------------------------------------------------
/Kafka-Message-Server-Example/config/build.sbt:
--------------------------------------------------------------------------------
1 | import sbt._
2 | import Keys._
3 | import AssemblyKeys._
4 |
5 | name := "kafka-message-server-example"
6 |
7 | crossPaths := false
8 |
9 | libraryDependencies ++= Seq(
10 | "commons-cli" % "commons-cli" % "1.1"
11 | )
12 |
13 | libraryDependencies <<= (libraryDependencies)
14 |
15 | assemblySettings
--------------------------------------------------------------------------------
/Kafka-Message-Server-Example/config/java-mail-consumer-demo.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Licensed to the Apache Software Foundation (ASF) under one or more
3 | # contributor license agreements. See the NOTICE file distributed with
4 | # this work for additional information regarding copyright ownership.
5 | # The ASF licenses this file to You under the Apache License, Version 2.0
6 | # (the "License"); you may not use this file except in compliance with
7 | # the License. You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 |
17 | export KAFKA_HEAP_OPTS="-Xmx512M"
18 | exec $(dirname $0)/kafka-run-class.sh com.kafka.message.server.example.launch.MailConsumerDemo $@
19 |
--------------------------------------------------------------------------------
/Kafka-Message-Server-Example/config/java-mail-content-producer.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #
3 | base_dir=$(dirname $0)/..
4 |
5 | for file in $base_dir/kafka*.jar;
6 | do
7 | CLASSPATH=$CLASSPATH:$file
8 | done
9 |
10 | # classpath addition for release
11 | for file in $base_dir/libs/commons*.jar;
12 | do
13 | CLASSPATH=$CLASSPATH:$file
14 | done
15 |
16 | echo $CLASSPATH
17 |
18 | # Which java to use
19 | if [ -z "$JAVA_HOME" ]; then
20 | JAVA="java"
21 | else
22 | JAVA="$JAVA_HOME/bin/java"
23 | fi
24 |
25 | $JAVA -cp $CLASSPATH com.kafka.message.server.example.other.launch.CreateFile $@
26 |
--------------------------------------------------------------------------------
/Kafka-Message-Server-Example/config/java-mail-producer-consumer-demo.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Licensed to the Apache Software Foundation (ASF) under one or more
3 | # contributor license agreements. See the NOTICE file distributed with
4 | # this work for additional information regarding copyright ownership.
5 | # The ASF licenses this file to You under the Apache License, Version 2.0
6 | # (the "License"); you may not use this file except in compliance with
7 | # the License. You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 |
17 | export KAFKA_HEAP_OPTS="-Xmx512M"
18 | exec $(dirname $0)/kafka-run-class.sh com.kafka.message.server.example.launch.MailConsumerProducerDemo $@
19 |
--------------------------------------------------------------------------------
/Kafka-Message-Server-Example/config/java-mail-producer-demo.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Licensed to the Apache Software Foundation (ASF) under one or more
3 | # contributor license agreements. See the NOTICE file distributed with
4 | # this work for additional information regarding copyright ownership.
5 | # The ASF licenses this file to You under the Apache License, Version 2.0
6 | # (the "License"); you may not use this file except in compliance with
7 | # the License. You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 |
17 | export KAFKA_HEAP_OPTS="-Xmx512M"
18 | exec $(dirname $0)/kafka-run-class.sh com.kafka.message.server.example.launch.MailProducerDemo $@
19 |
--------------------------------------------------------------------------------
/Kafka-Message-Server-Example/kafka_2.8.0-0.8.0.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/abhioncbr/Kafka-Message-Server/5af32dc503828e856f0581a54a44b3955d45e994/Kafka-Message-Server-Example/kafka_2.8.0-0.8.0.jar
--------------------------------------------------------------------------------
/Kafka-Message-Server-Example/pom.xml:
--------------------------------------------------------------------------------
1 |
3 | 4.0.0
4 | Kafka-Message-Server-Example
5 | Kafka-Message-Server-Example
6 | 0.0.1
7 |
8 |
9 |
10 | maven-compiler-plugin
11 | 3.1
12 |
13 | 1.6
14 | 1.6
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 | org.scala-lang
24 | scala-library
25 | 2.8.0
26 |
27 |
28 |
29 | commons-cli
30 | commons-cli
31 | 1.1
32 |
33 |
34 |
35 |
36 |
37 |
--------------------------------------------------------------------------------
/Kafka-Message-Server-Example/src/main/java/com/kafka/message/server/example/core/KafkaMailConsumer.java:
--------------------------------------------------------------------------------
1 | package com.kafka.message.server.example.core;
2 |
3 | import java.util.HashMap;
4 | import java.util.List;
5 | import java.util.Map;
6 | import java.util.Properties;
7 |
8 | import kafka.consumer.ConsumerConfig;
9 | import kafka.consumer.ConsumerIterator;
10 | import kafka.consumer.KafkaStream;
11 | import kafka.javaapi.consumer.ConsumerConnector;
12 |
13 | /**
14 | * The Class KafkaMailConsumer.
15 | *
16 | * @author Abhishek Sharma
17 | */
18 | public class KafkaMailConsumer extends Thread {
19 | private final ConsumerConnector consumer;
20 | private final String topic;
21 |
22 | public KafkaMailConsumer(String topic) {
23 | consumer = kafka.consumer.Consumer.createJavaConsumerConnector(createConsumerConfig());
24 | this.topic = topic;
25 | }
26 |
27 | /**
28 | * Creates the consumer config.
29 | *
30 | * @return the consumer config
31 | */
32 | private static ConsumerConfig createConsumerConfig() {
33 | Properties props = new Properties();
34 | props.put("zookeeper.connect", KafkaMailProperties.zkConnect);
35 | props.put("group.id", KafkaMailProperties.groupId);
36 | props.put("zookeeper.session.timeout.ms", "400");
37 | props.put("zookeeper.sync.time.ms", "200");
38 | props.put("auto.commit.interval.ms", "1000");
39 |
40 | return new ConsumerConfig(props);
41 |
42 | }
43 |
44 | public void run() {
45 | Map topicCountMap = new HashMap();
46 | topicCountMap.put(topic, new Integer(1));
47 | Map>> consumerMap = consumer.createMessageStreams(topicCountMap);
48 | KafkaStream stream = consumerMap.get(topic).get(0);
49 | ConsumerIterator it = stream.iterator();
50 | while (it.hasNext())
51 | System.out.println(new String(it.next().message()));
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/Kafka-Message-Server-Example/src/main/java/com/kafka/message/server/example/core/KafkaMailProducer.java:
--------------------------------------------------------------------------------
1 | package com.kafka.message.server.example.core;
2 |
3 | import static java.nio.file.LinkOption.NOFOLLOW_LINKS;
4 | import static java.nio.file.StandardWatchEventKinds.ENTRY_CREATE;
5 | import static java.nio.file.StandardWatchEventKinds.OVERFLOW;
6 |
7 | import java.io.File;
8 | import java.io.FileFilter;
9 | import java.io.IOException;
10 | import java.io.RandomAccessFile;
11 | import java.nio.MappedByteBuffer;
12 | import java.nio.channels.FileChannel;
13 | import java.nio.file.FileSystems;
14 | import java.nio.file.Files;
15 | import java.nio.file.Path;
16 | import java.nio.file.Paths;
17 | import java.nio.file.WatchEvent;
18 | import java.nio.file.WatchEvent.Kind;
19 | import java.nio.file.WatchKey;
20 | import java.nio.file.WatchService;
21 | import java.util.Arrays;
22 | import java.util.List;
23 | import java.util.Properties;
24 |
25 | import kafka.producer.KeyedMessage;
26 | import kafka.producer.ProducerConfig;
27 |
28 | /**
29 | * The Class MailProducer.
30 | *
31 | * @author Abhishek Sharma
32 | */
33 | public class KafkaMailProducer extends Thread {
34 | private final kafka.javaapi.producer.Producer producer;
35 | private final String topic;
36 | private final String directoryPath;
37 | private final Properties props = new Properties();
38 |
39 | /**
40 | * Instantiates a new kafka producer.
41 | *
42 | * @param topic the topic
43 | * @param directoryPath the directory path
44 | */
45 | public KafkaMailProducer(String topic, String directoryPath) {
46 | props.put("serializer.class", "kafka.serializer.StringEncoder");
47 | props.put("metadata.broker.list", "localhost:9092");
48 | producer = new kafka.javaapi.producer.Producer(new ProducerConfig(props));
49 | this.topic = topic;
50 | this.directoryPath = directoryPath;
51 | }
52 |
53 | public void run() {
54 | Path dir = Paths.get(directoryPath);
55 | try {
56 |
57 | new WatchDir(dir).start();
58 |
59 | new ReadDir(dir).start();
60 | } catch (IOException e) {
61 | e.printStackTrace();
62 | }
63 | }
64 |
65 | /**
66 | * The Class WatchDir.
67 | */
68 | private class WatchDir extends Thread{
69 | private WatchKey key;
70 | private final Path directory;
71 | private WatchService watcher;
72 |
73 |
74 | @SuppressWarnings("unchecked")
75 | WatchEvent cast(WatchEvent> event) {
76 | return (WatchEvent)event;
77 | }
78 |
79 |
80 | /**
81 | * Creates a WatchService and registers the given directory
82 | */
83 | WatchDir(Path directory) {
84 | this.directory = directory;
85 | try{
86 | this.watcher = FileSystems.getDefault().newWatchService();
87 | this.key = directory.register(watcher, ENTRY_CREATE);
88 | } catch (IOException ex) {
89 | ex.printStackTrace();
90 | }
91 | }
92 |
93 | /**
94 | * Process all events for keys queued to the watcher
95 | */
96 | public void run() {
97 | for (;;) {
98 | for (WatchEvent> event: key.pollEvents()) {
99 | Kind> kind = event.kind();
100 |
101 | // TBD - provide example of how OVERFLOW event is handled
102 | if (kind == OVERFLOW) {
103 | continue;
104 | }
105 |
106 | // Context for directory entry event is the file name of entry
107 | WatchEvent ev = cast(event);
108 | Path name = ev.context();
109 | Path child = directory.resolve(name);
110 |
111 | if (kind == ENTRY_CREATE) {
112 | try {
113 | if (!Files.isDirectory(child, NOFOLLOW_LINKS)) {
114 | readFileContent(child.toFile());
115 | }
116 | } catch (IOException ex) {
117 | ex.printStackTrace();
118 | }
119 | }
120 | }
121 |
122 | boolean valid = key.reset();
123 | if (!valid)
124 | break;
125 | }
126 | }
127 |
128 | }
129 |
130 | /**
131 | * The Class ReadDir.
132 | */
133 | class ReadDir extends Thread{
134 | private final Path directory;
135 |
136 | ReadDir(Path directory) throws IOException {
137 | this.directory = directory;
138 | }
139 |
140 | public void start() {
141 | File file = directory.toFile();
142 |
143 | List dirMessages = Arrays.asList(file.listFiles(new FileFilter() {
144 | @Override
145 | public boolean accept(File pathname) {
146 | if(pathname.isFile() && !pathname.isHidden()) return true;
147 | return false;
148 | }
149 | }));
150 |
151 | for(File temp :dirMessages){
152 | try {
153 | readFileContent(temp);
154 | } catch (IOException ex) {
155 | ex.printStackTrace();
156 | }
157 | }
158 | }
159 |
160 | }
161 |
162 | /**
163 | * Read file content.
164 | *
165 | * @param file the file
166 | * @throws IOException Signals that an I/O exception has occurred.
167 | */
168 | private void readFileContent(File file) throws IOException{
169 |
170 | RandomAccessFile aFile = new RandomAccessFile(file, "r");
171 | FileChannel inChannel = aFile.getChannel();
172 | MappedByteBuffer buffer = inChannel.map(FileChannel.MapMode.READ_ONLY, 0, inChannel.size());
173 | buffer.load();
174 | StringBuilder strBuilder = new StringBuilder();
175 | for (int i = 0; i < buffer.limit(); i++){
176 | strBuilder.append((char) buffer.get());
177 | }
178 | buffer.clear(); // do something with the data and clear/compact it.
179 | inChannel.close();
180 | aFile.close();
181 |
182 |
183 | producer.send(new KeyedMessage(topic, strBuilder.toString()));
184 |
185 | System.out.println(file.getAbsolutePath() + " - content consumed.");
186 |
187 | file.delete();
188 | }
189 |
190 | }
191 |
--------------------------------------------------------------------------------
/Kafka-Message-Server-Example/src/main/java/com/kafka/message/server/example/core/KafkaMailProperties.java:
--------------------------------------------------------------------------------
1 | package com.kafka.message.server.example.core;
2 |
3 | /**
4 | * The Interface KafkaMailProperties.
5 | *
6 | * @author Abhishek Sharma
7 | */
8 | public interface KafkaMailProperties{
9 | final static String topic = "topic1";
10 | final static String groupId = "group1";
11 | final static String zkConnect = "127.0.0.1:2181";
12 | final static String kafkaServerURL = "localhost";
13 |
14 | final static int kafkaServerPort = 9092;
15 | final static int kafkaProducerBufferSize = 64*1024;
16 | final static int connectionTimeOut = 100000;
17 | final static int reconnectInterval = 10000;
18 |
19 | final static String topic2 = "topic2";
20 | final static String topic3 = "topic3";
21 | final static String clientId = "MailConsumerDemoClient";
22 |
23 | }
24 |
--------------------------------------------------------------------------------
/Kafka-Message-Server-Example/src/main/java/com/kafka/message/server/example/launch/MailConsumerDemo.java:
--------------------------------------------------------------------------------
1 | package com.kafka.message.server.example.launch;
2 |
3 | import java.util.ArrayList;
4 | import java.util.List;
5 |
6 | import org.apache.commons.cli.Option;
7 | import org.apache.commons.cli.ParseException;
8 |
9 | import com.kafka.message.server.example.core.KafkaMailConsumer;
10 | import com.kafka.message.server.example.core.KafkaMailProperties;
11 | import com.kafka.message.server.example.util.KafkaExampleCommandLineHandler;
12 |
13 | /**
14 | * The Class MailConsumerDemo.
15 | *
16 | * @author Abhishek Sharma
17 | */
18 | public class MailConsumerDemo implements KafkaMailProperties {
19 |
20 | private static final String TOPIC_NAME = "topic";
21 |
22 |
23 | /**
24 | * The main method.
25 | *
26 | * @param args the arguments
27 | */
28 | public static void main(String[] args) {
29 | try {
30 | KafkaExampleCommandLineHandler commandLine = new KafkaExampleCommandLineHandler(getProducerOptions(), args);
31 |
32 | String topic = commandLine.getOption(TOPIC_NAME);
33 |
34 | //start consumer thread
35 | KafkaMailConsumer consumerThread = new KafkaMailConsumer(topic!=null? topic : KafkaMailProperties.topic);
36 | consumerThread.start();
37 | } catch (ParseException e) {
38 | e.printStackTrace();
39 | }
40 | }
41 |
42 | /**
43 | * Gets the producer options.
44 | *
45 | * @return the producer options
46 | */
47 | private static List