├── JENKINS ├── jenkins.js ├── jenkins.php ├── debian ├── compat ├── source │ ├── format │ ├── options │ └── include-binaries ├── kafka.dirs ├── gbp.conf ├── patches │ ├── series │ ├── kafka_data_dirs_fixes.patch │ ├── remove_scala_annotations2.8.patch │ ├── logging_to_var_log.patch │ └── our-own-build-system.patch ├── kafka.links ├── kafka.postrm ├── kafka.install ├── kafka.postinst ├── changelog ├── kafka.default ├── control ├── kafka.kafka-mirror.default ├── README.Debian ├── rules ├── copyright ├── kafka.init ├── kafka.kafka-mirror.init └── bin │ └── kafka ├── jenkins-testfile.py ├── ext ├── kfs-0.3.jar ├── pig-0.8.0.jar ├── avro-1.4.0.jar ├── easymock-3.0.jar ├── zkclient-0.3.jar ├── jsp-2.1-6.1.14.jar ├── scalatest-1.2.jar ├── jopt-simple-3.2.jar ├── paranamer-ant-2.2.jar ├── hadoop-core-0.20.2.jar ├── jsp-api-2.1-6.1.14.jar ├── kafka-ganglia-1.0.0.jar ├── metrics-core-2.2.0.jar ├── jasper-compiler-5.5.12.jar ├── jasper-runtime-5.5.12.jar ├── metrics-ganglia-2.2.0.jar ├── metrics-graphite-2.2.0.jar ├── metrics-annotation-2.2.0.jar └── paranamer-generator-2.2.jar └── .gitreview /JENKINS: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /jenkins.js: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /jenkins.php: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /debian/compat: -------------------------------------------------------------------------------- 1 | 9 2 | -------------------------------------------------------------------------------- /jenkins-testfile.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /debian/source/format: -------------------------------------------------------------------------------- 1 | 3.0 (quilt) 2 | -------------------------------------------------------------------------------- /debian/kafka.dirs: -------------------------------------------------------------------------------- 1 | var/log/kafka 2 | var/spool/kafka 3 | -------------------------------------------------------------------------------- /debian/source/options: -------------------------------------------------------------------------------- 1 | extend-diff-ignore = '^\.gitreview$' 2 | -------------------------------------------------------------------------------- /ext/kfs-0.3.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fede1024/operations-debs-kafka/debian/ext/kfs-0.3.jar -------------------------------------------------------------------------------- /ext/pig-0.8.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fede1024/operations-debs-kafka/debian/ext/pig-0.8.0.jar -------------------------------------------------------------------------------- /ext/avro-1.4.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fede1024/operations-debs-kafka/debian/ext/avro-1.4.0.jar -------------------------------------------------------------------------------- /ext/easymock-3.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fede1024/operations-debs-kafka/debian/ext/easymock-3.0.jar -------------------------------------------------------------------------------- /ext/zkclient-0.3.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fede1024/operations-debs-kafka/debian/ext/zkclient-0.3.jar -------------------------------------------------------------------------------- /ext/jsp-2.1-6.1.14.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fede1024/operations-debs-kafka/debian/ext/jsp-2.1-6.1.14.jar -------------------------------------------------------------------------------- /ext/scalatest-1.2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fede1024/operations-debs-kafka/debian/ext/scalatest-1.2.jar -------------------------------------------------------------------------------- /ext/jopt-simple-3.2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fede1024/operations-debs-kafka/debian/ext/jopt-simple-3.2.jar -------------------------------------------------------------------------------- /ext/paranamer-ant-2.2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fede1024/operations-debs-kafka/debian/ext/paranamer-ant-2.2.jar -------------------------------------------------------------------------------- /ext/hadoop-core-0.20.2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fede1024/operations-debs-kafka/debian/ext/hadoop-core-0.20.2.jar -------------------------------------------------------------------------------- /ext/jsp-api-2.1-6.1.14.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fede1024/operations-debs-kafka/debian/ext/jsp-api-2.1-6.1.14.jar -------------------------------------------------------------------------------- /ext/kafka-ganglia-1.0.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fede1024/operations-debs-kafka/debian/ext/kafka-ganglia-1.0.0.jar -------------------------------------------------------------------------------- /ext/metrics-core-2.2.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fede1024/operations-debs-kafka/debian/ext/metrics-core-2.2.0.jar -------------------------------------------------------------------------------- /ext/jasper-compiler-5.5.12.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fede1024/operations-debs-kafka/debian/ext/jasper-compiler-5.5.12.jar -------------------------------------------------------------------------------- /ext/jasper-runtime-5.5.12.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fede1024/operations-debs-kafka/debian/ext/jasper-runtime-5.5.12.jar -------------------------------------------------------------------------------- /ext/metrics-ganglia-2.2.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fede1024/operations-debs-kafka/debian/ext/metrics-ganglia-2.2.0.jar -------------------------------------------------------------------------------- /ext/metrics-graphite-2.2.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fede1024/operations-debs-kafka/debian/ext/metrics-graphite-2.2.0.jar -------------------------------------------------------------------------------- /ext/metrics-annotation-2.2.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fede1024/operations-debs-kafka/debian/ext/metrics-annotation-2.2.0.jar -------------------------------------------------------------------------------- /ext/paranamer-generator-2.2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fede1024/operations-debs-kafka/debian/ext/paranamer-generator-2.2.jar -------------------------------------------------------------------------------- /.gitreview: -------------------------------------------------------------------------------- 1 | [gerrit] 2 | host=gerrit.wikimedia.org 3 | port=29418 4 | project=operations/debs/kafka 5 | defaultbranch=debian 6 | defaultrebase=0 7 | -------------------------------------------------------------------------------- /debian/gbp.conf: -------------------------------------------------------------------------------- 1 | [DEFAULT] 2 | upstream-tree=tag 3 | upstream-tag=%(version)s 4 | debian-tag=debian-%(version)s 5 | debian-branch=debian-%(version)s 6 | -------------------------------------------------------------------------------- /debian/patches/series: -------------------------------------------------------------------------------- 1 | our-own-build-system.patch 2 | kafka_data_dirs_fixes.patch 3 | logging_to_var_log.patch 4 | remove_scala_annotations2.8.patch 5 | -------------------------------------------------------------------------------- /debian/kafka.links: -------------------------------------------------------------------------------- 1 | usr/share/kafka/kafka-0.8.jar usr/share/kafka/kafka.jar 2 | usr/share/kafka/kafka-perf-0.8.jar usr/share/kafka/kafka-perf.jar 3 | usr/share/kafka/kafka-kafka-examples-0.8.jar usr/share/kafka/kafka-kafka-examples.jar 4 | usr/share/kafka/hadoop-consumer-0.8.jar usr/share/kafka/hadoop-consumer.jar 5 | usr/share/kafka/hadoop-producer-0.8.jar usr/share/kafka/hadoop-producer.jar 6 | 7 | -------------------------------------------------------------------------------- /debian/source/include-binaries: -------------------------------------------------------------------------------- 1 | ext/jasper-runtime-5.5.12.jar 2 | ext/jsp-api-2.1-6.1.14.jar 3 | ext/avro-1.4.0.jar 4 | ext/kfs-0.3.jar 5 | ext/jsp-2.1-6.1.14.jar 6 | ext/scalatest-1.2.jar 7 | ext/paranamer-ant-2.2.jar 8 | ext/paranamer-generator-2.2.jar 9 | ext/pig-0.8.0.jar 10 | ext/zkclient-0.3.jar 11 | ext/easymock-3.0.jar 12 | ext/jasper-compiler-5.5.12.jar 13 | ext/hadoop-core-0.20.2.jar 14 | ext/jopt-simple-3.2.jar 15 | ext/metrics-annotation-2.2.0.jar 16 | ext/metrics-core-2.2.0.jar 17 | ext/metrics-ganglia-2.2.0.jar 18 | ext/metrics-graphite-2.2.0.jar 19 | ext/kafka-ganglia-1.0.0.jar 20 | -------------------------------------------------------------------------------- /debian/patches/kafka_data_dirs_fixes.patch: -------------------------------------------------------------------------------- 1 | diff -r -u a/config/server.properties b/config/server.properties 2 | --- a/config/server.properties 2013-06-12 10:09:28.531748537 +0000 3 | +++ b/config/server.properties 2013-06-12 10:50:27.995707650 +0000 4 | @@ -55,7 +55,7 @@ 5 | ############################# Log Basics ############################# 6 | 7 | # A comma seperated list of directories under which to store log files 8 | -log.dirs=/tmp/kafka-logs 9 | +log.dirs=/var/spool/kafka 10 | 11 | # The default number of log partitions per topic. More partitions allow greater 12 | # parallelism for consumption, but this will also result in more files across 13 | 14 | -------------------------------------------------------------------------------- /debian/kafka.postrm: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # postrm script for kafka 4 | # 5 | # see: dh_installdeb(1) 6 | 7 | set -e 8 | 9 | case "$1" in 10 | purge) 11 | deluser --quiet --system kafka >/dev/null || true 12 | deluser --quiet --group --system --only-if-empty --remove-home kafka || true 13 | rm -rf /var/log/kafka 14 | ;; 15 | remove|upgrade|failed-upgrade|abort-install|abort-upgrade|disappear) 16 | ;; 17 | *) 18 | echo "postrm called with unknown argument \`$1'" >&2 19 | exit 1 20 | ;; 21 | esac 22 | 23 | # dh_installdeb will replace this with shell code automatically 24 | # generated by other debhelper scripts. 25 | 26 | #DEBHELPER# 27 | 28 | exit 0 29 | -------------------------------------------------------------------------------- /debian/kafka.install: -------------------------------------------------------------------------------- 1 | ext/avro-1.4.0.jar usr/share/java 2 | ext/easymock-3.0.jar usr/share/java 3 | ext/hadoop-core-0.20.2.jar usr/share/java 4 | ext/jasper-compiler-5.5.12.jar usr/share/java 5 | ext/jasper-runtime-5.5.12.jar usr/share/java 6 | ext/jopt-simple-3.2.jar usr/share/java 7 | ext/jsp-2.1-6.1.14.jar usr/share/java 8 | ext/jsp-api-2.1-6.1.14.jar usr/share/java 9 | ext/kfs-0.3.jar usr/share/java 10 | ext/paranamer-ant-2.2.jar usr/share/java 11 | ext/paranamer-generator-2.2.jar usr/share/java 12 | ext/pig-0.8.0.jar usr/share/java 13 | ext/scalatest-1.2.jar usr/share/java 14 | ext/zkclient-0.3.jar usr/share/java 15 | ext/metrics-annotation-2.2.0.jar usr/share/java 16 | ext/metrics-core-2.2.0.jar usr/share/java 17 | ext/metrics-ganglia-2.2.0.jar usr/share/java 18 | ext/metrics-graphite-2.2.0.jar usr/share/java 19 | ext/kafka-ganglia-1.0.0.jar usr/share/kafka 20 | debian/bin/kafka usr/sbin 21 | -------------------------------------------------------------------------------- /debian/kafka.postinst: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # postinst script for kafka 4 | # 5 | # see: dh_installdeb(1) 6 | 7 | set -e 8 | 9 | case "$1" in 10 | configure|reconfigure) 11 | if ! getent passwd kafka >/dev/null; then 12 | # Adding system user: kafka. 13 | adduser \ 14 | --system \ 15 | --group \ 16 | --no-create-home \ 17 | --home /nonexistent \ 18 | --gecos "Apache Kafka" \ 19 | --shell /bin/false \ 20 | kafka >/dev/null 2>/dev/null || : 21 | fi 22 | 23 | # chown kafka directories 24 | chown kafka:kafka /var/log/kafka 25 | chown kafka:kafka /var/spool/kafka 26 | ;; 27 | 28 | abort-upgrade|abort-remove|abort-deconfigure) 29 | ;; 30 | 31 | *) 32 | echo "postinst called with unknown argument \`$1'" >&2 33 | exit 1 34 | ;; 35 | esac 36 | 37 | # dh_installdeb will replace this with shell code automatically 38 | # generated by other debhelper scripts. 39 | 40 | #DEBHELPER# 41 | 42 | exit 0 43 | -------------------------------------------------------------------------------- /debian/changelog: -------------------------------------------------------------------------------- 1 | kafka (0.8.1.1-1) precise; urgency=low 2 | 3 | * Build new version 0.8.1.1 4 | 5 | -- Alexandros Kosiaris Wed, 16 Jul 2014 18:22:45 +0000 6 | 7 | kafka (0.8.0-2) precise; urgency=low 8 | 9 | * Adding support for setting ulimit open files. 10 | 11 | -- Andrew Otto (WMF) Wed, 05 Feb 2014 15:49:05 +0000 12 | 13 | kafka (0.8.0-1) precise; urgency=low 14 | 15 | * Stable release of 0.8.0 tag. 16 | 17 | -- Andrew Otto (WMF) Wed, 04 Dec 2013 15:01:05 +0000 18 | 19 | kafka (0.8~20130913-1) precise; urgency=low 20 | 21 | * Bumping version to reflect latest commit (aebf7461) in 0.8 branch. 22 | 23 | -- Alexandros Kosiaris Wed, 09 Sep 2013 16:44:14 -0700 24 | 25 | kafka (0.8~20130827-1) precise; urgency=low 26 | 27 | * Bumping version to reflect latest commit in 0.8 branch. 28 | 29 | -- Alexandros Kosiaris Wed, 28 Aug 2013 16:50:14 +0000 30 | 31 | kafka (0.8~20130610-1) unstable; urgency=low 32 | 33 | * Initial release. 34 | 35 | -- Alexandros Kosiaris Wed, 29 May 2013 23:45:30 +0000 36 | -------------------------------------------------------------------------------- /debian/kafka.default: -------------------------------------------------------------------------------- 1 | # whether to allow init.d script to start a kafka broker ("yes", "no") 2 | KAFKA_START=no 3 | 4 | # User and group to run as 5 | KAFKA_USER=kafka 6 | KAFKA_GROUP=kafka 7 | KAFKA_CONFIG=/etc/kafka 8 | 9 | # ulimit open files limit for kafka user. 10 | KAFKA_NOFILES_ULIMIT=8192 11 | 12 | # The default JMX_PORT for Kafka Brokers is 9999. 13 | # Set JMX_PORT to something else to override this. 14 | #JMX_PORT=9999 15 | #KAFKA_JMX_OPTS=${KAFKA_JMX_OPTS:="-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false"} 16 | 17 | # Memory sizes, and logging configuration 18 | #KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$KAFKA_CONFIG/log4j.properties" 19 | 20 | # Default GC settings. 21 | #KAFKA_JVM_PERFORMANCE_OPTS="-server -XX:+UseCompressedOops -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:+CMSClassUnloadingEnabled -XX:+CMSScavengeBeforeRemark -XX:+DisableExplicitGC" 22 | # LinkedIn recommended GC settings. See: https://kafka.apache.org/081/ops.html 23 | # You need Java 7 u51 or greater to use the G1 GC. 24 | #KAFKA_JVM_PERFORMANCE_OPTS="-server -XX:PermSize=48m -XX:MaxPermSize=48m -XX:+UseG1GC -XX:MaxGCPauseMillis=20 -XX:InitiatingHeapOccupancyPercent=35" 25 | 26 | #KAFKA_HEAP_OPTS="-Xmx1G -Xms1G" 27 | #KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:${KAFKA_CONFIG}/log4j.properties" 28 | #KAFKA_OPTS="" 29 | -------------------------------------------------------------------------------- /debian/patches/remove_scala_annotations2.8.patch: -------------------------------------------------------------------------------- 1 | diff -r -u a/core/src/main/scala/kafka/utils/Annotations_2.8.scala /dev/null 2 | --- a/core/src/main/scala/kafka/utils/Annotations_2.8.scala 3 | +++ /dev/null 4 | @@ -1,36 +0,0 @@ 5 | -/** 6 | - * Licensed to the Apache Software Foundation (ASF) under one or more 7 | - * contributor license agreements. See the NOTICE file distributed with 8 | - * this work for additional information regarding copyright ownership. 9 | - * The ASF licenses this file to You under the Apache License, Version 2.0 10 | - * (the "License"); you may not use this file except in compliance with 11 | - * the License. You may obtain a copy of the License at 12 | - * 13 | - * http://www.apache.org/licenses/LICENSE-2.0 14 | - * 15 | - * Unless required by applicable law or agreed to in writing, software 16 | - * distributed under the License is distributed on an "AS IS" BASIS, 17 | - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 18 | - * See the License for the specific language governing permissions and 19 | - * limitations under the License. 20 | - */ 21 | - 22 | -package kafka.utils 23 | - 24 | -/* Some helpful annotations */ 25 | - 26 | -/** 27 | - * Indicates that the annotated class is meant to be threadsafe. For an abstract class it is an part of the interface that an implementation 28 | - * must respect 29 | - */ 30 | -class threadsafe extends StaticAnnotation 31 | - 32 | -/** 33 | - * Indicates that the annotated class is not threadsafe 34 | - */ 35 | -class nonthreadsafe extends StaticAnnotation 36 | - 37 | -/** 38 | - * Indicates that the annotated class is immutable 39 | - */ 40 | -class immutable extends StaticAnnotation 41 | -------------------------------------------------------------------------------- /debian/control: -------------------------------------------------------------------------------- 1 | Source: kafka 2 | Section: misc 3 | Priority: optional 4 | Maintainer: Alexandros Kosiaris 5 | Build-Depends: debhelper (>= 9), openjdk-7-jdk | java7-jdk, javahelper (>= 0.40), 6 | scala, 7 | junit4, 8 | velocity, 9 | libasm3-java, 10 | libcglib-java, 11 | libcommons-cli-java, 12 | libcommons-codec-java, 13 | libcommons-collections-java, 14 | libcommons-compress-java, 15 | libcommons-el-java, 16 | libcommons-httpclient-java, 17 | libcommons-io-java, 18 | libcommons-lang-java, 19 | libcommons-logging-java, 20 | libcommons-net1-java, 21 | libhsqldb-java, 22 | libjackson-json-java, 23 | libjets3t-java, 24 | libjetty-java, 25 | libjoda-time-java, 26 | liblog4j1.2-java, 27 | libobjenesis-java, 28 | liboro-java, 29 | libparanamer-java, 30 | libqdox-java, 31 | libservlet2.5-java, 32 | libsnappy-java, 33 | libslf4j-java, 34 | libxmlenc-java, 35 | libzookeeper-java 36 | Standards-Version: 3.9.3 37 | Homepage: http://kafka.apache.org 38 | 39 | Package: kafka 40 | Architecture: all 41 | Depends: openjdk-7-jre | java7-runtime, scala-library, adduser, ${java:Depends}, ${misc:Depends} 42 | Recommends: ${java:Recommends} 43 | Description: Apache Kafka is a distributed publish-subscribe messaging system 44 | Apache Kafka is designed to support the following 45 | * Persistent messaging with O(1) disk structures 46 | that provide constant time performance even with many TB of stored messages. 47 | * High-throughput: even with very modest hardware Kafka can support hundreds 48 | of thousands of messages per second. 49 | * Explicit support for partitioning messages over Kafka servers and 50 | distributing consumption over a cluster of consumer machines while 51 | maintaining per-partition ordering semantics. 52 | * Support for parallel data load into Hadoop. 53 | 54 | -------------------------------------------------------------------------------- /debian/kafka.kafka-mirror.default: -------------------------------------------------------------------------------- 1 | # whether to allow init.d script to start a Kafka MirrorMaker daemon ("yes", "no") 2 | KAFKA_MIRROR_START=no 3 | 4 | # User and group to run as 5 | KAFKA_USER=kafka 6 | KAFKA_GROUP=kafka 7 | KAFKA_CONFIG=/etc/kafka 8 | 9 | # The default JMX_PORT for Kafka MirrorMaker is 9993. 10 | # Set JMX_PORT to something else to override this. 11 | #JMX_PORT=9993 12 | 13 | #KAFKA_JMX_OPTS=${KAFKA_JMX_OPTS:="-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false"} 14 | 15 | # Memory sizes, and logging configuration 16 | #KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$KAFKA_CONFIG/log4j.properties" 17 | 18 | # Default GC settings. 19 | #KAFKA_JVM_PERFORMANCE_OPTS="-server -XX:+UseCompressedOops -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:+CMSClassUnloadingEnabled -XX:+CMSScavengeBeforeRemark -XX:+DisableExplicitGC" 20 | # LinkedIn recommended GC settings. See: https://kafka.apache.org/081/ops.html 21 | # You need Java 7 u51 or greater to use the G1 GC. 22 | #KAFKA_JVM_PERFORMANCE_OPTS="-server -XX:PermSize=48m -XX:MaxPermSize=48m -XX:+UseG1GC -XX:MaxGCPauseMillis=20 -XX:InitiatingHeapOccupancyPercent=35" 23 | 24 | #KAFKA_HEAP_OPTS="-Xmx512M -Xms512M" 25 | #KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:${KAFKA_CONFIG}/log4j.properties" 26 | #KAFKA_OPTS="" 27 | 28 | # 29 | # MirrorMaker options: 30 | # 31 | 32 | # Set this to a space separated list of consumer.properties files. 33 | # By default, /etc/init.d/kafka-mirror will use any files that match 34 | # /etc/kafka/mirror/consumer*.properties 35 | #KAFKA_MIRROR_CONSUMER_CONFIGS= 36 | 37 | #KAFKA_MIRROR_PRODUCER_CONFIG=/etc/kafka/mirror/producer.properties 38 | 39 | # Only set one of the following. 40 | #KAFKA_MIRROR_WHITELIST='.*' 41 | #KAFKA_MIRROR_BLACKLIST='' 42 | 43 | #KAFKA_MIRROR_NUM_STREAMS=1 44 | #KAFKA_MIRROR_NUM_PRODUCERS=1 45 | #KAFKA_MIRROR_QUEUE_SIZE=10000 46 | -------------------------------------------------------------------------------- /debian/README.Debian: -------------------------------------------------------------------------------- 1 | kafka for Debian 2 | ---------------- 3 | This package is created using a custom build system based on Makefiles 4 | instead of the standard sbt build system used by upstream. The reason 5 | for this is to satisfy debian policy that no internet connection should 6 | be required during package building as well as security concers about 7 | the downloaded JAR files by sbt 8 | 9 | How to build this package 10 | ------------------------- 11 | 12 | This repository is be used to build the kafka package using 13 | git-buildpackage. Below we illustrate one of the ways this can work for 14 | you: 15 | 16 | * Install some mandatory packages 17 | $ apt-get install git-buildpackage 18 | 19 | * And some not so mandatory but nice to have packages 20 | $ apt-get install pbuilder cowbuilder 21 | 22 | Note that pbuilder/cowbuilder is not strictly required but it will save 23 | you from installing the dependencies manually 24 | 25 | * Create a cowbuilder environment (we assume you are building against 26 | precise pangolin here) 27 | 28 | $ sudo DIST=precise git-pbuilder create --components "main universe" 29 | 30 | * Clone the repo 31 | $ git clone http://gerrit.wikimedia.org/r/operations/debs/kafka 32 | $ cd kafka 33 | 34 | * Should the current version in the repo not be enough, set upstream and 35 | fetch 36 | 37 | $ git remote add upstream http://git-wip-us.apache.org/repos/asf/kafka.git 38 | $ git fetch upstream 39 | 40 | * Checkout the version you want to build against 41 | $ git checkout debian- 42 | 43 | If the current version is not enough 44 | $ git checkout -b debian- 45 | 46 | should be a valid git tag. You can get them via git tag 47 | 48 | * Merge the debian branch on you new branch 49 | $ git merge debian 50 | 51 | * Build 52 | $ DIST=precise git-buildpackage -us -uc --git-builder="git-pbuilder" \ 53 | --git-cleaner="fakeroot debian/rules clean" \ 54 | --git-debian-branch=debian- 55 | 56 | Or assuming you don't change the debian/gbp.conf already provided 57 | $ DIST=precise git-buildpackage -us -uc --git-debian-branch=debian- 58 | -------------------------------------------------------------------------------- /debian/rules: -------------------------------------------------------------------------------- 1 | #!/usr/bin/make -f 2 | 3 | export JAVA_HOME?=/usr/lib/jvm/java-1.7.0-openjdk-amd64 4 | export PREFIX=/usr 5 | export SYSCONFDIR=/etc/kafka 6 | export VERSION=0.8 7 | 8 | # Put depended upon jars in here 9 | export CLASSPATH=/usr/share/java/asm3.jar:/usr/share/java/cglib.jar:/usr/share/java/commons-cli.jar:/usr/share/java/commons-codec.jar:/usr/share/java/commons-collections.jar:/usr/share/java/commons-compress.jar:/usr/share/java/commons-el.jar:/usr/share/java/commons-httpclient.jar:/usr/share/java/commons-io.jar:/usr/share/java/commons-lang.jar:/usr/share/java/commons-logging.jar:/usr/share/java/commons-net.jar:/usr/share/java/hsqldb.jar:/usr/share/java/jackson-core.jar:/usr/share/java/jackson-mapper.jar:/usr/share/java/jets3t.jar:/usr/share/java/jetty-util.jar:/usr/share/java/jetty.jar:/usr/share/java/joda-time.jar:/usr/share/java/jsp-api-2.1.jar:/usr/share/java/junit4.jar:/usr/share/java/log4j-1.2.jar:/usr/share/java/objenesis.jar:/usr/share/java/oro.jar:/usr/share/java/paranamer.jar:/usr/share/java/qdox.jar:/usr/share/java/scala-library.jar:/usr/share/java/servlet-api-2.5.jar:/usr/share/java/slf4j-api.jar:/usr/share/java/slf4j-api.jar:/usr/share/java/snappy-java.jar:/usr/share/java/velocity-1.7.jar:/usr/share/java/xmlenc.jar:/usr/share/java/zookeeper.jar:$(CURDIR)/ext/avro-1.4.0.jar:$(CURDIR)/ext/easymock-3.0.jar:$(CURDIR)/ext/hadoop-core-0.20.2.jar:$(CURDIR)/ext/jasper-compiler-5.5.12.jar:$(CURDIR)/ext/jasper-runtime-5.5.12.jar:$(CURDIR)/ext/jopt-simple-3.2.jar:$(CURDIR)/ext/jsp-2.1-6.1.14.jar:$(CURDIR)/ext/jsp-api-2.1-6.1.14.jar:$(CURDIR)/ext/kfs-0.3.jar:$(CURDIR)/ext/metrics-annotation-2.2.0.jar:$(CURDIR)/ext/metrics-core-2.2.0.jar:$(CURDIR)/ext/metrics-ganglia-2.2.0.jar:$(CURDIR)/ext/metrics-graphite-2.2.0.jar:$(CURDIR)/ext/kafka-ganglia-1.0.0.jar/$(CURDIR)/ext/paranamer-ant-2.2.jar:$(CURDIR)/ext/paranamer-generator-2.2.jar:$(CURDIR)/ext/pig-0.8.0.jar:$(CURDIR)/ext/scalatest-1.2.jar:$(CURDIR)/ext/zkclient-0.3.jar: 10 | 11 | %: 12 | dh $@ --with javahelper 13 | 14 | override_dh_installinit: 15 | # call regular dh_installinit to get 16 | # kafka.init and kafka.default installed. 17 | dh_installinit 18 | 19 | # also install kafka-mirror init.d and default scripts 20 | dh_installinit --name=kafka-mirror 21 | -------------------------------------------------------------------------------- /debian/patches/logging_to_var_log.patch: -------------------------------------------------------------------------------- 1 | --- a/config/log4j.properties 2013-06-12 10:09:28.531748537 +0000 2 | +++ b/config/log4j.properties 2013-06-12 15:09:40.967449096 +0000 3 | @@ -15,17 +15,23 @@ 4 | 5 | kafka.logs.dir=logs 6 | 7 | -log4j.rootLogger=INFO, stdout 8 | +log4j.rootLogger=INFO, stdout, fileAppender 9 | 10 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender 11 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout 12 | log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n 13 | +log4j.appender.stdout.Threshold=WARN 14 | 15 | -log4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender 16 | -log4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH 17 | -log4j.appender.kafkaAppender.File=${kafka.logs.dir}/server.log 18 | -log4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout 19 | -log4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n 20 | +log4j.appender.fileAppender=org.apache.log4j.RollingFileAppender 21 | +log4j.appender.fileAppender.File=/var/log/kafka/kafka.log 22 | +log4j.appender.fileAppender.layout=org.apache.log4j.PatternLayout 23 | +log4j.appender.fileAppender.layout.ConversionPattern=[%d] %-4r [%t] %-5p %c %x - %m%n 24 | +log4j.appender.fileAppender.MaxFileSize=100MB 25 | +log4j.appender.fileAppender.MaxBackupIndex=2 26 | +log4j.appender.fileAppender.Threshold=INFO 27 | + 28 | +# Turn on all our debugging info 29 | +#log4j.logger.kafka=INFO 30 | 31 | log4j.appender.stateChangeAppender=org.apache.log4j.DailyRollingFileAppender 32 | log4j.appender.stateChangeAppender.DatePattern='.'yyyy-MM-dd-HH 33 | @@ -57,22 +63,21 @@ 34 | #log4j.logger.kafka.perf=DEBUG, kafkaAppender 35 | #log4j.logger.kafka.perf.ProducerPerformance$ProducerThread=DEBUG, kafkaAppender 36 | #log4j.logger.org.I0Itec.zkclient.ZkClient=DEBUG 37 | -log4j.logger.kafka=INFO, kafkaAppender 38 | 39 | -log4j.logger.kafka.network.RequestChannel$=WARN, requestAppender 40 | -log4j.additivity.kafka.network.RequestChannel$=false 41 | +#log4j.logger.kafka.network.RequestChannel$=WARN, requestAppender 42 | +#log4j.additivity.kafka.network.RequestChannel$=false 43 | 44 | #log4j.logger.kafka.network.Processor=TRACE, requestAppender 45 | #log4j.logger.kafka.server.KafkaApis=TRACE, requestAppender 46 | #log4j.additivity.kafka.server.KafkaApis=false 47 | -log4j.logger.kafka.request.logger=WARN, requestAppender 48 | -log4j.additivity.kafka.request.logger=false 49 | - 50 | -log4j.logger.kafka.controller=TRACE, controllerAppender 51 | -log4j.additivity.kafka.controller=false 52 | - 53 | -log4j.logger.kafka.log.LogCleaner=INFO, cleanerAppender 54 | -log4j.additivity.kafka.log.LogCleaner=false 55 | - 56 | -log4j.logger.state.change.logger=TRACE, stateChangeAppender 57 | -log4j.additivity.state.change.logger=false 58 | +#log4j.logger.kafka.request.logger=WARN, requestAppender 59 | +#log4j.additivity.kafka.request.logger=false 60 | +# 61 | +#log4j.logger.kafka.controller=TRACE, controllerAppender 62 | +#log4j.additivity.kafka.controller=false 63 | +# 64 | +#log4j.logger.kafka.log.LogCleaner=INFO, cleanerAppender 65 | +#log4j.additivity.kafka.log.LogCleaner=false 66 | +# 67 | +#log4j.logger.state.change.logger=TRACE, stateChangeAppender 68 | +#log4j.additivity.state.change.logger=false 69 | -------------------------------------------------------------------------------- /debian/copyright: -------------------------------------------------------------------------------- 1 | Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ 2 | Upstream-Name: kafka 3 | Source: http://kafka.apache.org/ 4 | 5 | Files: * 6 | Copyright: 2012 The Apache Software Foundation. 7 | License: Apache-2 8 | 9 | Files: lib/sbt-lauch.jar 10 | Copyright: 2008, 2009, 2010 Mark Harrah, Jason Zaugg 11 | License: BSD-3-clause 12 | 13 | Files: debian/* 14 | Copyright: 2013 Alexandros Kosiaris 15 | License: GPL-2+ 16 | 17 | Files: ext/* 18 | Copyright: FIXME 19 | License: FIXME 20 | 21 | License: Apache-2 22 | Licensed under the Apache License, Version 2.0 (the "License"); you may not 23 | use this file except in compliance with the License. You may obtain a copy of 24 | the License at 25 | . 26 | http://www.apache.org/licenses/LICENSE-2.0 27 | . 28 | Unless required by applicable law or agreed to in writing, software 29 | distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 30 | WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 31 | License for the specific language overning permissions and limitations under 32 | the License. 33 | . 34 | On Debian systems, the Apache 2.0 license can be found in 35 | /usr/share/common-licenses/Apache-2.0. 36 | 37 | License: BSD-3-clause 38 | Redistribution and use in source and binary forms, with or without 39 | modification, are permitted provided that the following conditions 40 | are met: 41 | 1. Redistributions of source code must retain the above copyright 42 | notice, this list of conditions and the following disclaimer. 43 | 2. Redistributions in binary form must reproduce the above copyright 44 | notice, this list of conditions and the following disclaimer in the 45 | documentation and/or other materials provided with the distribution. 46 | 3. The name of the author may not be used to endorse or promote products 47 | derived from this software without specific prior written permission. 48 | . 49 | THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 50 | IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 51 | OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 52 | IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, 53 | INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT 54 | NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 55 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 56 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 57 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF 58 | THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 59 | 60 | License: GPL-2+ 61 | This program is free software; you can redistribute it and/or modify 62 | it under the terms of the GNU General Public License as published by 63 | the Free Software Foundation; either version 2 of the License, or 64 | (at your option) any later version. 65 | . 66 | This program is distributed in the hope that it will be useful, 67 | but WITHOUT ANY WARRANTY; without even the implied warranty of 68 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 69 | GNU General Public License for more details. 70 | . 71 | You should have received a copy of the GNU General Public License along 72 | with this program; if not, write to the Free Software Foundation, Inc., 73 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 74 | . 75 | On Debian systems, the full text of the GNU General Public 76 | License version 2 can be found in the file 77 | `/usr/share/common-licenses/GPL-2'. 78 | -------------------------------------------------------------------------------- /debian/kafka.init: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # /etc/init.d/kafka -- startup script for the kafka distributed publish-subscribe messaging system 4 | # 5 | # Written by Alexandros Kosiaris 6 | # 7 | ### BEGIN INIT INFO 8 | # Provides: kafka 9 | # Required-Start: $local_fs $remote_fs $network 10 | # Required-Stop: $local_fs $remote_fs $network 11 | # Should-Start: $named 12 | # Should-Stop: $named 13 | # Default-Start: 2 3 4 5 14 | # Default-Stop: 0 1 6 15 | # Short-Description: Start kafka 16 | # Description: Start the kafka distributed publish-subscribe messaging system 17 | ### END INIT INFO 18 | 19 | set -e 20 | 21 | PATH=/bin:/usr/bin:/sbin:/usr/sbin 22 | NAME="$(basename "$0" | sed 's/^[KS][0-9]\{2\}//')" 23 | DESC="broker" 24 | DEFAULT=/etc/default/$NAME 25 | 26 | if [ `id -u` -ne 0 ]; then 27 | echo "You need root privileges to run this script" 28 | exit 1 29 | fi 30 | 31 | # Make sure kafka is started with system locale 32 | if [ -r /etc/default/locale ]; then 33 | . /etc/default/locale 34 | export LANG 35 | fi 36 | 37 | . /lib/lsb/init-functions 38 | 39 | if [ -r /etc/default/rcS ]; then 40 | . /etc/default/rcS 41 | fi 42 | 43 | # # The following variables can be overwritten in $DEFAULT 44 | # 45 | # Run kafka as this user ID and group ID 46 | KAFKA_USER=kafka 47 | KAFKA_GROUP=kafka 48 | 49 | KAFKA_CONFIG=/etc/kafka 50 | KAFKA_START=yes 51 | 52 | JDK_DIRS="/usr/lib/jvm/java-7-openjdk-amd64 /usr/lib/jvm/default-java /usr/lib/jvm/java-6-sun /usr/lib/jvm/java-1.5.0-sun /usr/lib/j2sdk1.5-sun /usr/lib/j2sdk1.5-ibm" 53 | 54 | # Look for the right JVM to use 55 | for jdir in $JDK_DIRS; do 56 | if [ -r "$jdir/bin/java" -a -z "${JAVA_HOME}" ]; then 57 | JAVA_HOME="$jdir" 58 | fi 59 | done 60 | export JAVA_HOME 61 | 62 | # Default Java options 63 | # Set java.awt.headless=true if JAVA_OPTS is not set so the 64 | # Xalan XSL transformer can work without X11 display on JDK 1.4+ 65 | if [ -z "$JAVA_OPTS" ]; then 66 | JAVA_OPTS="-Djava.awt.headless=true" 67 | fi 68 | 69 | # End of variables that can be overwritten in $DEFAULT 70 | 71 | # overwrite settings from default file 72 | if [ -f "$DEFAULT" ]; then 73 | . "$DEFAULT" 74 | fi 75 | 76 | # If ulimit nofiles was set in $DEFAULT, 77 | # then change the limit for this process now. 78 | # This will not set nofiles higher than 79 | # system limit. 80 | if [ -n "$KAFKA_NOFILES_ULIMIT" ]; then 81 | ulimit -n "$KAFKA_NOFILES_ULIMIT" 82 | fi 83 | 84 | # Define other required variables 85 | KAFKA_PID="/var/run/$NAME.pid" 86 | DAEMON="${JAVA_HOME}/bin/java" 87 | 88 | kafka_sh() { 89 | # Escape any double quotes in the value of JAVA_OPTS 90 | JAVA_OPTS="$(echo $JAVA_OPTS | sed 's/\"/\\\"/g')" 91 | KAFKA_OPTS="${KAFKA_OPTS} ${JAVA_OPTS}" 92 | 93 | # Export Kafka environment variables from $DEFAULT file so 94 | # /usr/sbin/kafka will recognize them 95 | export KAFKA_CONFIG JMX_PORT KAFKA_JMX_OPTS SCALA_VERSION KAFKA_JVM_PERFORMANCE_OPTS KAFKA_HEAP_OPTS KAFKA_LOG4J_OPTS KAFKA_OPTS 96 | 97 | # Run as a daemon 98 | set +e 99 | 100 | start-stop-daemon --start -b -u "$KAFKA_USER" -g "$KAFKA_GROUP" \ 101 | -c "$KAFKA_USER" -m -p "$KAFKA_PID" \ 102 | -x "/usr/sbin/kafka" -- "server-start" "${KAFKA_CONFIG}/server.properties" 103 | status="$?" 104 | set +a -e 105 | return $status 106 | } 107 | 108 | case "$1" in 109 | start) 110 | if [ -n "$KAFKA_START" -a "$KAFKA_START" != "yes" ]; then 111 | log_failure_msg "KAFKA_START not set to 'yes' in $DEFAULT, not starting" 112 | exit 0 113 | fi 114 | 115 | if [ -z "$JAVA_HOME" ]; then 116 | log_failure_msg "no JDK found - please set JAVA_HOME" 117 | exit 1 118 | fi 119 | 120 | log_daemon_msg "Starting $DESC" "$NAME" 121 | if start-stop-daemon --test --start --pidfile "$KAFKA_PID" \ 122 | --user "$KAFKA_USER" --exec "${JAVA_HOME}/bin/java" \ 123 | >/dev/null; then 124 | 125 | kafka_sh start 126 | sleep 5 127 | if start-stop-daemon --test --start --pidfile "$KAFKA_PID" \ 128 | --user "$KAFKA_USER" --exec "${JAVA_HOME}/bin/java" \ 129 | >/dev/null; then 130 | if [ -f "$KAFKA_PID" ]; then 131 | rm -f "$KAFKA_PID" 132 | fi 133 | log_end_msg 1 134 | else 135 | log_end_msg 0 136 | fi 137 | else 138 | log_progress_msg "(already running)" 139 | log_end_msg 0 140 | fi 141 | ;; 142 | stop) 143 | log_daemon_msg "Stopping $DESC" "$NAME" 144 | 145 | set +e 146 | if [ -f "$KAFKA_PID" ]; then 147 | start-stop-daemon --stop --pidfile "$KAFKA_PID" \ 148 | --user "$KAFKA_USER" \ 149 | --retry=TERM/20/KILL/5 >/dev/null 150 | if [ $? -eq 1 ]; then 151 | log_progress_msg "$DESC is not running but pid file exists, cleaning up" 152 | elif [ $? -eq 3 ]; then 153 | PID="`cat $KAFKA_PID`" 154 | log_failure_msg "Failed to stop $NAME (pid $PID)" 155 | exit 1 156 | fi 157 | rm -f "$KAFKA_PID" 158 | else 159 | log_progress_msg "(not running)" 160 | fi 161 | log_end_msg 0 162 | set -e 163 | ;; 164 | status) 165 | set +e 166 | start-stop-daemon --test --start --pidfile "$KAFKA_PID" \ 167 | --user "$KAFKA_USER" --exec "${JAVA_HOME}/bin/java" \ 168 | >/dev/null 2>&1 169 | if [ "$?" = "0" ]; then 170 | 171 | if [ -f "$KAFKA_PID" ]; then 172 | log_success_msg "$DESC is not running, but pid file exists." 173 | exit 1 174 | else 175 | log_success_msg "$DESC is not running." 176 | exit 3 177 | fi 178 | else 179 | log_success_msg "$DESC is running with pid `cat $KAFKA_PID`" 180 | fi 181 | set -e 182 | ;; 183 | restart|force-reload) 184 | if [ -f "$KAFKA_PID" ]; then 185 | $0 stop 186 | sleep 1 187 | fi 188 | $0 start 189 | ;; 190 | try-restart) 191 | if start-stop-daemon --test --start --pidfile "$KAFKA_PID" \ 192 | --user "$KAFKA_USER" --exec "${JAVA_HOME}/bin/java" \ 193 | >/dev/null; then 194 | $0 start 195 | fi 196 | ;; 197 | *) 198 | log_success_msg "Usage: $0 {start|stop|restart|try-restart|force-reload|status}" 199 | exit 1 200 | ;; 201 | esac 202 | 203 | exit 0 204 | -------------------------------------------------------------------------------- /debian/kafka.kafka-mirror.init: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # /etc/init.d/kafka-mirror -- startup script for the Kafka MirrorMaker. 4 | # See: https://cwiki.apache.org/confluence/display/KAFKA/Kafka+mirroring+%28MirrorMaker%29 5 | # 6 | # Written by Alexandros Kosiaris and Andrew Otto 7 | # 8 | ### BEGIN INIT INFO 9 | # Provides: kafka-mirror 10 | # Required-Start: $local_fs $remote_fs $network 11 | # Required-Stop: $local_fs $remote_fs $network 12 | # Should-Start: $named 13 | # Should-Stop: $named 14 | # Default-Start: 2 3 4 5 15 | # Default-Stop: 0 1 6 16 | # Short-Description: Start kafka MirrorMaker 17 | # Description: Start the Kafka MirrorMaker tool used to consume from multiple kafka clusters and produce into another. 18 | ### END INIT INFO 19 | 20 | set -e 21 | 22 | PATH=/bin:/usr/bin:/sbin:/usr/sbin 23 | NAME="$(basename "$0" | sed 's/^[KS][0-9]\{2\}//')" 24 | DESC="MirrorMaker" 25 | DEFAULT=/etc/default/$NAME 26 | 27 | if [ `id -u` -ne 0 ]; then 28 | echo "You need root privileges to run this script" 29 | exit 1 30 | fi 31 | 32 | # Make sure kafka is started with system locale 33 | if [ -r /etc/default/locale ]; then 34 | . /etc/default/locale 35 | export LANG 36 | fi 37 | 38 | . /lib/lsb/init-functions 39 | 40 | if [ -r /etc/default/rcS ]; then 41 | . /etc/default/rcS 42 | fi 43 | 44 | # The following variables can be overwritten in $DEFAULT 45 | 46 | # Run kafka as this user ID and group ID 47 | KAFKA_USER=kafka 48 | KAFKA_GROUP=kafka 49 | KAFKA_MIRROR_START=yes 50 | KAFKA_CONFIG=/etc/kafka 51 | 52 | # Read in consumer config files from /etc/kafka/mirror. 53 | KAFKA_MIRROR_CONSUMER_CONFIGS=$(ls $KAFKA_CONFIG/mirror/consumer*.properties || true) # Don't error out yet if files don't exist. 54 | KAFKA_MIRROR_PRODUCER_CONFIG=$KAFKA_CONFIG/mirror/producer.properties 55 | 56 | KAFKA_MIRROR_NUM_STREAMS=1 57 | KAFKA_MIRROR_NUM_PRODUCERS=1 58 | KAFKA_MIRROR_QUEUE_SIZE=10000 59 | KAFKA_MIRROR_WHITELIST='.*' 60 | KAFKA_MIRROR_BLACKLIST='' 61 | 62 | JDK_DIRS="/usr/lib/jvm/java-7-openjdk-amd64 /usr/lib/jvm/default-java /usr/lib/jvm/java-6-sun /usr/lib/jvm/java-1.5.0-sun /usr/lib/j2sdk1.5-sun /usr/lib/j2sdk1.5-ibm" 63 | 64 | # Look for the right JVM to use. 65 | for jdir in $JDK_DIRS; do 66 | if [ -r "$jdir/bin/java" -a -z "${JAVA_HOME}" ]; then 67 | JAVA_HOME="$jdir" 68 | fi 69 | done 70 | export JAVA_HOME 71 | 72 | # Default Java options 73 | # Set java.awt.headless=true if JAVA_OPTS is not set so the 74 | # Xalan XSL transformer can work without X11 display on JDK 1.4+ 75 | if [ -z "$JAVA_OPTS" ]; then 76 | JAVA_OPTS="-Djava.awt.headless=true" 77 | fi 78 | 79 | # End of variables that can be overwritten in $DEFAULT 80 | 81 | # overwrite settings from default file 82 | if [ -f "$DEFAULT" ]; then 83 | . "$DEFAULT" 84 | fi 85 | 86 | # Define other required variables 87 | KAFKA_MIRROR_PID="/var/run/$NAME.pid" 88 | 89 | 90 | kafka_mirror_sh() { 91 | # Escape any double quotes in the value of JAVA_OPTS 92 | JAVA_OPTS="$(echo $JAVA_OPTS | sed 's/\"/\\\"/g')" 93 | KAFKA_OPTS="${KAFKA_OPTS} ${JAVA_OPTS}" 94 | 95 | # Export Kafka environment variables from $DEFAULT file so 96 | # /usr/sbin/kafka will recognize them 97 | export KAFKA_CONFIG JMX_PORT KAFKA_JMX_OPTS SCALA_VERSION KAFKA_JVM_PERFORMANCE_OPTS KAFKA_HEAP_OPTS KAFKA_LOG4J_OPTS KAFKA_OPTS 98 | 99 | 100 | # Define the command to run kafka as a daemon 101 | # set -a tells sh to export assigned variables to spawned shells. 102 | KAFKA_MIRROR_ARGS=" 103 | --num.streams $KAFKA_MIRROR_NUM_STREAMS \ 104 | --num.producers $KAFKA_MIRROR_NUM_PRODUCERS \ 105 | --queue.size $KAFKA_MIRROR_QUEUE_SIZE \ 106 | --producer.config $KAFKA_MIRROR_PRODUCER_CONFIG " 107 | 108 | if [ -n "$KAFKA_MIRROR_WHITELIST" ]; then 109 | KAFKA_MIRROR_ARGS="$KAFKA_MIRROR_ARGS --whitelist '$KAFKA_MIRROR_WHITELIST' " 110 | else 111 | KAFKA_MIRROR_ARGS="$KAFKA_MIRROR_ARGS --blacklist '$KAFKA_MIRROR_BLACKLIST' " 112 | fi 113 | 114 | # Add all consumer config files to KAFKA_MIRROR_ARGS 115 | for CONSUMER_CONFIG in $KAFKA_MIRROR_CONSUMER_CONFIGS; do 116 | KAFKA_MIRROR_ARGS="$KAFKA_MIRROR_ARGS --consumer.config $CONSUMER_CONFIG" 117 | done 118 | 119 | # Run as a daemon 120 | set +e 121 | 122 | start-stop-daemon --start -b -u "$KAFKA_USER" -g "$KAFKA_GROUP" \ 123 | -c "$KAFKA_USER" -m -p "$KAFKA_MIRROR_PID" \ 124 | -x "/usr/sbin/kafka" -- "mirror-maker" "${KAFKA_MIRROR_ARGS}" 125 | status="$?" 126 | set +a -e 127 | return $status 128 | } 129 | 130 | case "$1" in 131 | start) 132 | if [ -n "$KAFKA_MIRROR_START" -a "$KAFKA_MIRROR_START" != "yes" ]; then 133 | log_failure_msg "KAFKA_MIRROR_START not set to 'yes' in $DEFAULT, not starting" 134 | exit 0 135 | fi 136 | 137 | if [ -z "$JAVA_HOME" ]; then 138 | log_failure_msg "no JDK found - please set JAVA_HOME" 139 | exit 1 140 | fi 141 | 142 | # 143 | # MirrorMaker argument checking: 144 | # 145 | 146 | if [ -z "$KAFKA_MIRROR_CONSUMER_CONFIGS" ]; then 147 | echo "No consumer config files provided." 148 | exit 1 149 | fi 150 | if [ -z "$KAFKA_MIRROR_PRODUCER_CONFIG" ]; then 151 | echo "No producer config file provided." 152 | exit 1 153 | fi 154 | 155 | if [ -n "$KAFKA_MIRROR_WHITELIST" -a -n "$KAFKA_MIRROR_BLACKLIST" ]; then 156 | echo "Must set exactly one of KAFKA_MIRROR_WHITELIST and KAFKA_MIRROR_BLACKLIST, not both." 157 | exit 1 158 | elif [ -z "$KAFKA_MIRROR_WHITELIST" -a -z "$KAFKA_MIRROR_BLACKLIST"]; then 159 | echo "Must set exactly one of KAFKA_MIRROR_WHITELIST or KAFKA_MIRROR_BLACKLIST." 160 | exit 1 161 | fi 162 | 163 | if [ ! -r "$KAFKA_MIRROR_PRODUCER_CONFIG" ]; then 164 | echo "Cannot read producer config file '$KAFKA_MIRROR_PRODUCER_CONFIG'." 165 | exit 1 166 | fi 167 | 168 | log_daemon_msg "Starting $DESC" "$NAME" 169 | if start-stop-daemon --test --start --pidfile "$KAFKA_MIRROR_PID" \ 170 | --user $KAFKA_USER --exec "$JAVA_HOME/bin/java" \ 171 | >/dev/null; then 172 | 173 | kafka_mirror_sh start 174 | sleep 5 175 | if start-stop-daemon --test --start --pidfile "$KAFKA_MIRROR_PID" \ 176 | --user $KAFKA_USER --exec "$JAVA_HOME/bin/java" \ 177 | >/dev/null; then 178 | if [ -f "$KAFKA_MIRROR_PID" ]; then 179 | rm -f "$KAFKA_MIRROR_PID" 180 | fi 181 | log_end_msg 1 182 | else 183 | log_end_msg 0 184 | fi 185 | else 186 | log_progress_msg "(already running)" 187 | log_end_msg 0 188 | fi 189 | ;; 190 | stop) 191 | log_daemon_msg "Stopping $DESC" "$NAME" 192 | 193 | set +e 194 | if [ -f "$KAFKA_MIRROR_PID" ]; then 195 | start-stop-daemon --stop --pidfile "$KAFKA_MIRROR_PID" \ 196 | --user "$KAFKA_USER" \ 197 | --retry=TERM/20/KILL/5 >/dev/null 198 | if [ $? -eq 1 ]; then 199 | log_progress_msg "$DESC is not running but pid file exists, cleaning up" 200 | elif [ $? -eq 3 ]; then 201 | PID="`cat $KAFKA_MIRROR_PID`" 202 | log_failure_msg "Failed to stop $NAME (pid $PID)" 203 | exit 1 204 | fi 205 | rm -f "$KAFKA_MIRROR_PID" 206 | else 207 | log_progress_msg "(not running)" 208 | fi 209 | log_end_msg 0 210 | set -e 211 | ;; 212 | status) 213 | set +e 214 | start-stop-daemon --test --start --pidfile "$KAFKA_MIRROR_PID" \ 215 | --user $KAFKA_USER --exec "$JAVA_HOME/bin/java" \ 216 | >/dev/null 2>&1 217 | if [ "$?" = "0" ]; then 218 | 219 | if [ -f "$KAFKA_MIRROR_PID" ]; then 220 | log_success_msg "$DESC is not running, but pid file exists." 221 | exit 1 222 | else 223 | log_success_msg "$DESC is not running." 224 | exit 3 225 | fi 226 | else 227 | log_success_msg "$DESC is running with pid `cat $KAFKA_MIRROR_PID`" 228 | fi 229 | set -e 230 | ;; 231 | restart|force-reload) 232 | if [ -f "$KAFKA_MIRROR_PID" ]; then 233 | $0 stop 234 | sleep 1 235 | fi 236 | $0 start 237 | ;; 238 | try-restart) 239 | if start-stop-daemon --test --start --pidfile "$KAFKA_MIRROR_PID" \ 240 | --user $KAFKA_USER --exec "$JAVA_HOME/bin/java" \ 241 | >/dev/null; then 242 | $0 start 243 | fi 244 | ;; 245 | *) 246 | log_success_msg "Usage: $0 {start|stop|restart|try-restart|force-reload|status}" 247 | exit 1 248 | ;; 249 | esac 250 | 251 | exit 0 252 | -------------------------------------------------------------------------------- /debian/patches/our-own-build-system.patch: -------------------------------------------------------------------------------- 1 | From: Alexandros Kosiaris 2 | Date: Tue, 28 May 2013 17:46:15 +0000 3 | Subject: Introducing our own build system 4 | 5 | Introducing our own build system for the kafka project based on 6 | calling scalac and a set of simple Makefiles. Will compile per 7 | directory {core, perf, examples, contrib}, install configuration 8 | files and depend on either system provided JAR libraries or the 9 | ones shipped in ext/. Install targets by default go to 10 | /usr/local/share/java, packaging system will probably override that. 11 | It supports PREFIX, DESTDIR, SYSCONFDIR. 12 | CLASSPATH variable is meant to be overriden more by the caller. Some 13 | sane defaults are discovered but they should not be relied on too much. 14 | The version discovery system is rather naive and will fail in many 15 | cases to correctly identify the version. Since this is not trivial to 16 | fix we allow the caller to override version. 17 | --- 18 | Makefile | 66 ++++++++++++++++++++++++++++++++++++++ 19 | config/Makefile | 9 ++++++ 20 | contrib/Makefile | 23 +++++++++++++ 21 | contrib/hadoop-consumer/Makefile | 20 ++++++++++++ 22 | contrib/hadoop-producer/Makefile | 20 ++++++++++++ 23 | core/Makefile | 20 ++++++++++++ 24 | examples/Makefile | 19 +++++++++++ 25 | perf/Makefile | 20 ++++++++++++ 26 | 8 files changed, 197 insertions(+) 27 | create mode 100644 Makefile 28 | create mode 100644 config/Makefile 29 | create mode 100644 contrib/Makefile 30 | create mode 100644 contrib/hadoop-consumer/Makefile 31 | create mode 100644 contrib/hadoop-producer/Makefile 32 | create mode 100644 core/Makefile 33 | create mode 100644 examples/Makefile 34 | create mode 100644 perf/Makefile 35 | 36 | diff --git a/Makefile b/Makefile 37 | new file mode 100644 38 | index 0000000..b2a9961 39 | --- /dev/null 40 | +++ b/Makefile 41 | @@ -0,0 +1,66 @@ 42 | +export SCALAC=scalac 43 | +export JAVAC=javac 44 | +export MKDIR_P=mkdir -p 45 | +export RM_RF=rm -rf 46 | +export JAR=jar 47 | +export INSTALL=install -D -m 644 48 | + 49 | +# Figure this out as well as other dependencies 50 | +export SCALA_LIBRARY=/usr/share/java/scala-library-2.9.1.jar 51 | + 52 | +# DESTDIR 53 | +ifndef PREFIX 54 | +export PREFIX=/usr/local 55 | +endif 56 | +ifndef SYSCONFDIR 57 | +export SYSCONFDIR=/usr/local/etc/kafka 58 | +endif 59 | +export LIBPATH=$(PREFIX)/share/kafka 60 | +export DESTDIR 61 | + 62 | +# Release specifics 63 | +tag=$(shell git describe --abbrev=0) 64 | +ver=$(shell echo $(tag) | sed -e 's/kafka-//' -e 's/-incubating-candidate-[0-9]//') 65 | +ifndef VERSION 66 | +export VERSION=$(ver) 67 | +endif 68 | + 69 | +# This is a fallback in case CLASSPATH is not defined by the caller 70 | +# It will find and add to the CLASSPATH variable all jars in $(PREFIX)/usr/share 71 | +# Hopefully CLASSPATH will be defined and this will be unused 72 | +SYS_CLASSDIR:=$(PREFIX)/share/java 73 | +SYS_CLASSJARS:=$(shell find $(SYS_CLASSDIR) -name "*.jar") 74 | +export SYS_CLASSPATH:=$(shell echo $(SYS_CLASSJARS) | sed -e 's/ /:/g') 75 | + 76 | +# Same here, just this is done for the shipped jar dependencies. 77 | +EXT_CLASSDIR:=$(CURDIR)/ext 78 | +EXT_CLASSJARS:=$(shell find $(EXT_CLASSDIR) -name "*.jar") 79 | +export EXT_CLASSPATH:=$(shell echo $(EXT_CLASSJARS) | sed -e 's/ /:/g') 80 | + 81 | +ifndef CLASSPATH 82 | +export CLASSPATH=$(SYS_CLASSPATH):$(EXT_CLASSPATH) 83 | +endif 84 | +export CFLAGS=-usejavacp 85 | + 86 | +SUBDIRS=core examples contrib perf config 87 | + 88 | +BUILDDIRS=$(SUBDIRS:%=build-%) 89 | +CLEANDIRS=$(SUBDIRS:%=clean-%) 90 | +INSTALLDIRS=$(SUBDIRS:%=install-%) 91 | + 92 | +all: $(BUILDDIRS) 93 | + 94 | +install: all $(INSTALLDIRS) 95 | + 96 | +clean: $(CLEANDIRS) 97 | + 98 | +$(BUILDDIRS): $(TARGET_PATH) 99 | + $(MAKE) -C $(@:build-%=%) all 100 | + 101 | +$(CLEANDIRS): $(TARGET_PATH) 102 | + $(MAKE) -C $(@:clean-%=%) clean 103 | + 104 | +$(INSTALLDIRS): $(TARGET_PATH) 105 | + $(MAKE) -C $(@:install-%=%) install 106 | + 107 | +.PHONY: clean install all 108 | diff --git a/config/Makefile b/config/Makefile 109 | new file mode 100644 110 | index 0000000..211f667 111 | --- /dev/null 112 | +++ b/config/Makefile 113 | @@ -0,0 +1,15 @@ 114 | +CONFFILES=log4j.properties tools-log4j.properties server.properties zookeeper.properties 115 | +MIRROR_CONFFILES=consumer.properties producer.properties 116 | +MIRROR_CONFFILES_EXAMPLES=$(MIRROR_CONFFILES:.properties=.properties.example) 117 | + 118 | +all: 119 | + 120 | +clean: 121 | + 122 | +install: all $(MIRROR_CONFFILES_EXAMPLES) 123 | + $(MKDIR_P) $(DESTDIR)/$(SYSCONFDIR) 124 | + $(INSTALL) -o root -g root $(CONFFILES) $(DESTDIR)/$(SYSCONFDIR) 125 | + $(MKDIR_P) $(DESTDIR)/$(SYSCONFDIR)/mirror 126 | +%.properties.example: %.properties 127 | + $(MKDIR_P) $(DESTDIR)/$(SYSCONFDIR)/mirror 128 | + $(INSTALL) -T -o root -g root $< $(DESTDIR)/$(SYSCONFDIR)/mirror/$@ 129 | diff --git a/contrib/Makefile b/contrib/Makefile 130 | new file mode 100644 131 | index 0000000..c214ca3 132 | --- /dev/null 133 | +++ b/contrib/Makefile 134 | @@ -0,0 +1,23 @@ 135 | +SUBDIRS=hadoop-consumer hadoop-producer 136 | +BUILDDIRS=$(SUBDIRS:%=build-%) 137 | +CLEANDIRS=$(SUBDIRS:%=clean-%) 138 | +INSTALLDIRS=$(SUBDIRS:%=install-%) 139 | + 140 | +export CORE_TARGET_PATH=$(dir $(CURDIR))core/target 141 | + 142 | +.PHONY: all install clean 143 | + 144 | +all: $(BUILDDIRS) 145 | +install: $(INSTALLDIRS) all 146 | +clean: $(CLEANDIRS) 147 | + 148 | + 149 | +$(BUILDDIRS): $(TARGET_PATH) 150 | + $(MAKE) -C $(@:build-%=%) all 151 | + 152 | +$(CLEANDIRS): $(TARGET_PATH) 153 | + $(MAKE) -C $(@:clean-%=%) clean 154 | + 155 | +$(INSTALLDIRS): $(TARGET_PATH) 156 | + $(MAKE) -C $(@:install-%=%) install 157 | + 158 | diff --git a/contrib/hadoop-consumer/Makefile b/contrib/hadoop-consumer/Makefile 159 | new file mode 100644 160 | index 0000000..b13c8c7 161 | --- /dev/null 162 | +++ b/contrib/hadoop-consumer/Makefile 163 | @@ -0,0 +1,20 @@ 164 | +SOURCES=$(shell find -name "*.java") 165 | + 166 | +LIB_CLASSJARS:=$(shell find lib -name "*.jar") 167 | +TARGET_PATH=$(CURDIR)/target 168 | +CLASSPATH:=$(CLASSPATH):$(SCALA_LIBRARY):$(LIB_CLASSJARS):$(CORE_TARGET_PATH) 169 | + 170 | +$(TARGET_PATH): 171 | + $(MKDIR_P) $(TARGET_PATH) 172 | + 173 | +all: $(SOURCES) $(TARGET_PATH) 174 | + $(JAVAC) -d $(TARGET_PATH) $(SOURCES) 175 | + 176 | +clean: 177 | + $(RM_RF) $(TARGET_PATH) 178 | + $(RM_RF) hadoop-consumer-$(VERSION).jar 179 | + 180 | +install: all 181 | + $(JAR) cfv hadoop-consumer-$(VERSION).jar -C target . 182 | + $(INSTALL) -o root -g root hadoop-consumer-$(VERSION).jar $(DESTDIR)$(LIBPATH)/hadoop-consumer-$(VERSION).jar 183 | + 184 | diff --git a/contrib/hadoop-producer/Makefile b/contrib/hadoop-producer/Makefile 185 | new file mode 100644 186 | index 0000000..0e9b2ae 187 | --- /dev/null 188 | +++ b/contrib/hadoop-producer/Makefile 189 | @@ -0,0 +1,20 @@ 190 | +SOURCES=$(shell find -name "*.java") 191 | + 192 | +LIB_CLASSJARS:=$(shell find lib -name "*.jar") 193 | +TARGET_PATH=$(CURDIR)/target 194 | +CLASSPATH:=$(CLASSPATH):$(SCALA_LIBRARY):$(LIB_CLASSJARS):$(CORE_TARGET_PATH) 195 | + 196 | +$(TARGET_PATH): 197 | + $(MKDIR_P) $(TARGET_PATH) 198 | + 199 | +all: $(SOURCES) $(TARGET_PATH) 200 | + $(JAVAC) -d $(TARGET_PATH) $(SOURCES) 201 | + 202 | +clean: 203 | + $(RM_RF) $(TARGET_PATH) 204 | + $(RM_RF) hadoop-producer-$(VERSION).jar 205 | + 206 | +install: all 207 | + $(JAR) cfve hadoop-producer-$(VERSION).jar kafka.bridge.examples.TextPublisher -C target . 208 | + $(INSTALL) -o root -g root hadoop-producer-$(VERSION).jar $(DESTDIR)$(LIBPATH)/hadoop-producer-$(VERSION).jar 209 | + 210 | diff --git a/core/Makefile b/core/Makefile 211 | new file mode 100644 212 | index 0000000..7441725 213 | --- /dev/null 214 | +++ b/core/Makefile 215 | @@ -0,0 +1,20 @@ 216 | +SOURCES=$(shell find src/main -name "*.scala" -or -name "*.java") 217 | +JAVA_SOURCES=$(shell find src/main -name "*.java") 218 | + 219 | +TARGET_PATH=$(CURDIR)/target 220 | +CLASSPATH:=$(CLASSPATH):$(TARGET_PATH) 221 | + 222 | +$(TARGET_PATH): 223 | + $(MKDIR_P) $(TARGET_PATH) 224 | + 225 | +all: $(SOURCES) $(TARGET_PATH) 226 | + $(SCALAC) $(CFLAGS) -d $(TARGET_PATH) $(SOURCES) 227 | + $(JAVAC) -d $(TARGET_PATH) $(JAVA_SOURCES) 228 | + 229 | +clean: 230 | + $(RM_RF) $(TARGET_PATH) 231 | + $(RM_RF) kafka-$(VERSION).jar 232 | + 233 | +install: 234 | + $(JAR) cfv kafka-$(VERSION).jar -C target . 235 | + $(INSTALL) -o root -g root kafka-$(VERSION).jar $(DESTDIR)$(LIBPATH)/kafka-$(VERSION).jar 236 | diff --git a/examples/Makefile b/examples/Makefile 237 | new file mode 100644 238 | index 0000000..8d5e751 239 | --- /dev/null 240 | +++ b/examples/Makefile 241 | @@ -0,0 +1,19 @@ 242 | +SOURCES=$(shell find -name "*.java") 243 | + 244 | +TARGET_PATH=$(CURDIR)/target 245 | +CORE_TARGET_PATH=$(dir $(CURDIR))core/target 246 | +CLASSPATH:=$(CLASSPATH):$(SCALA_LIBRARY):$(CORE_TARGET_PATH) 247 | + 248 | +$(TARGET_PATH): 249 | + $(MKDIR_P) $(TARGET_PATH) 250 | + 251 | +all: $(SOURCES) $(TARGET_PATH) 252 | + $(JAVAC) -d $(TARGET_PATH) $(SOURCES) 253 | + 254 | +clean: 255 | + $(RM_RF) $(TARGET_PATH) 256 | + $(RM_RF) kafka-java-examples-$(VERSION).jar 257 | + 258 | +install: all 259 | + $(JAR) cfv kafka-java-examples-$(VERSION).jar -C target . 260 | + $(INSTALL) -o root -g root kafka-java-examples-$(VERSION).jar $(DESTDIR)$(LIBPATH)/kafka-java-examples-$(VERSION).jar 261 | diff --git a/perf/Makefile b/perf/Makefile 262 | new file mode 100644 263 | index 0000000..daeea0d 264 | --- /dev/null 265 | +++ b/perf/Makefile 266 | @@ -0,0 +1,20 @@ 267 | +SOURCES=$(shell find -name "*.scala" -or -name "*.java") 268 | + 269 | +TARGET_PATH=$(CURDIR)/target 270 | + 271 | +CORE_TARGET_PATH=$(dir $(CURDIR))core/target 272 | +CLASSPATH:=$(CLASSPATH):$(CORE_TARGET_PATH) 273 | + 274 | +$(TARGET_PATH): 275 | + $(MKDIR_P) $(TARGET_PATH) 276 | + 277 | +all: $(SOURCES) $(TARGET_PATH) 278 | + $(SCALAC) $(CFLAGS) -d $(TARGET_PATH) $(SOURCES) 279 | + 280 | +clean: 281 | + $(RM_RF) $(TARGET_PATH) 282 | + $(RM_RF) kafka-perf-$(VERSION).jar 283 | + 284 | +install: all 285 | + $(JAR) cfv kafka-perf-$(VERSION).jar -C target . 286 | + $(INSTALL) -o root -g root kafka-perf-$(VERSION).jar $(DESTDIR)$(LIBPATH)/kafka-perf-$(VERSION).jar 287 | -- 288 | -------------------------------------------------------------------------------- /debian/bin/kafka: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # kafka 4 | # 5 | # Wrapper script for Kafka executable classes. 6 | # This script is a single replacement for the 7 | # shell scripts shipped with Kafka source bin/ 8 | # directory. 9 | # 10 | 11 | SCRIPT_NAME=$(basename "$0") 12 | KAFKA_CONFIG=${KAFKA_CONFIG:-/etc/kafka} 13 | LOG_DIR=${LOG_DIR:-/var/log/kafka} 14 | 15 | # Set ZOOKEEPER_OPT if ZOOKEEPER_URL is set and --zookeeper has not 16 | # also been passed in as a CLI arg. This will be included 17 | # in command functions that take a --zookeeper argument. 18 | if [ -n "${ZOOKEEPER_URL}" -a -z "$(echo $@ | grep -- --zookeeper)" ]; then 19 | ZOOKEEPER_OPT="--zookeeper ${ZOOKEEPER_URL}" 20 | fi 21 | 22 | 23 | USAGE="Usage: 24 | 25 | $SCRIPT_NAME [opts] 26 | Run $SCRIPT_NAME with zero arguments/options to see command usage. 27 | 28 | Commands: 29 | $SCRIPT_NAME topic [opts] 30 | 31 | $SCRIPT_NAME console-producer [opts] 32 | $SCRIPT_NAME console-consumer [opts] 33 | $SCRIPT_NAME simple-consumer-shell [opts] 34 | $SCRIPT_NAME replay-log-producer [opts] 35 | 36 | $SCRIPT_NAME mirror-maker [opts] 37 | $SCRIPT_NAME consumer-offset-checker [opts] 38 | 39 | $SCRIPT_NAME add-partitions [opts] 40 | $SCRIPT_NAME reassign-partitions [opts] 41 | $SCRIPT_NAME check-reassignment-status [opts] 42 | $SCRIPT_NAME preferred-replica-election [opts] 43 | $SCRIPT_NAME controlled-shutdown [opts] 44 | 45 | $SCRIPT_NAME producer-perf-test [opts] 46 | $SCRIPT_NAME consumer-perf-test [opts] 47 | $SCRIPT_NAME simple-consumer-perf-test [opts] 48 | 49 | $SCRIPT_NAME server-start [-daemon] (Default: $KAFKA_CONFIG/server.properties) 50 | $SCRIPT_NAME server-stop 51 | 52 | $SCRIPT_NAME zookeeper-start [-daemon] (Default: $KAFKA_CONFIG/zookeeper.properties) 53 | $SCRIPT_NAME zookeeper-stop 54 | $SCRIPT_NAME zookeeper-shell [opts] 55 | 56 | Environment Variables: 57 | ZOOKEEPER_URL - If this is set, any commands that take a --zookeeper flag will be passed with this value. 58 | KAFKA_CONFIG - location of Kafka config files. Default: /etc/kafka 59 | JMX_PORT - Set this to expose JMX. This is set by default for brokers and producers. 60 | KAFKA_JVM_PERFORMANCE_OPTS - Any special JVM perfomance options. This is set by default. 61 | KAFKA_HEAP_OPTS - Any special JVM memory heap options. This is set by default. 62 | KAFKA_LOG4J_OPTS - Any log4j options. Especially -Dlog4j.configuration. This is set by default. 63 | KAFKA_OPTS - Any extra options you want to pass. 64 | " 65 | 66 | usage() { echo "${USAGE}"; } 67 | 68 | 69 | # Builds CLASSPATH and execs java with appropriate main class 70 | kafka_run_class() { 71 | if [ $# -lt 1 ]; 72 | then 73 | echo "USAGE: $0 [-daemon] [-name servicename] [-loggc] classname [opts]" 74 | exit 1 75 | fi 76 | 77 | if [ -z "$SCALA_VERSION" ]; then 78 | SCALA_VERSION=2.8.0 79 | fi 80 | 81 | # if these were not yet set (by user or defaults file), 82 | # then go ahead and set them. 83 | if [ -z "$KAFKA_JMX_OPTS" ]; then 84 | KAFKA_JMX_OPTS="-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false " 85 | fi 86 | if [ -n "$JMX_PORT" ]; then 87 | KAFKA_JMX_OPTS="$KAFKA_JMX_OPTS -Dcom.sun.management.jmxremote.port=$JMX_PORT " 88 | fi 89 | 90 | # Log4j settings 91 | if [ -z "$KAFKA_LOG4J_OPTS" ]; then 92 | KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$KAFKA_CONFIG/tools-log4j.properties" 93 | fi 94 | 95 | if [ -z "$KAFKA_OPTS" ]; then 96 | KAFKA_OPTS="" 97 | fi 98 | 99 | if [ -z "$JAVA_HOME" ]; then 100 | JAVA="java" 101 | else 102 | JAVA="$JAVA_HOME/bin/java" 103 | fi 104 | 105 | # Memory options 106 | if [ -z "$KAFKA_HEAP_OPTS" ]; then 107 | KAFKA_HEAP_OPTS="-Xmx256M" 108 | fi 109 | 110 | # JVM performance options 111 | if [ -z "$KAFKA_JVM_PERFORMANCE_OPTS" ]; then 112 | KAFKA_JVM_PERFORMANCE_OPTS="-server -XX:+UseCompressedOops -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:+CMSClassUnloadingEnabled -XX:+CMSScavengeBeforeRemark -XX:+DisableExplicitGC -Djava.awt.headless=true" 113 | fi 114 | 115 | while [ $# -gt 0 ]; do 116 | COMMAND=$1 117 | case $COMMAND in 118 | -name) 119 | DAEMON_NAME=$2 120 | CONSOLE_OUTPUT_FILE=$LOG_DIR/$DAEMON_NAME.out 121 | shift 2 122 | ;; 123 | -loggc) 124 | if [ -z "$KAFKA_GC_LOG_OPTS"] ; then 125 | GC_LOG_ENABLED="true" 126 | fi 127 | shift 128 | ;; 129 | -daemon) 130 | DAEMON_MODE="true" 131 | shift 132 | ;; 133 | *) 134 | break 135 | ;; 136 | esac 137 | done 138 | 139 | # GC options 140 | GC_FILE_SUFFIX='-gc.log' 141 | GC_LOG_FILE_NAME='' 142 | if [ "x$GC_LOG_ENABLED" = "xtrue" ]; then 143 | GC_LOG_FILE_NAME=$DAEMON_NAME$GC_FILE_SUFFIX 144 | KAFKA_GC_LOG_OPTS="-Xloggc:$LOG_DIR/$GC_LOG_FILE_NAME -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+PrintGCTimeStamps " 145 | fi 146 | 147 | # Setting the classpath to all the needed dependencies. 148 | CLASSPATH=$CLASSPATH:/usr/share/java/scala-library.jar:/usr/share/java/jetty.jar:/usr/share/java/jetty-util.jar:/usr/share/java/zookeeper.jar:/usr/share/java/velocity-1.7.jar:/usr/share/java/slf4j-api.jar:/usr/share/java/slf4j-api.jar:/usr/share/java/servlet-api-2.5.jar:/usr/share/java/jsp-api-2.1.jar:/usr/share/java/qdox.jar:/usr/share/java/paranamer.jar:/usr/share/java/oro.jar:/usr/share/java/objenesis.jar:/usr/share/java/log4j-1.2-1.2.16.jar:/usr/share/java/junit4.jar:/usr/share/java/joda-time.jar:/usr/share/java/jets3t.jar:/usr/share/java/jackson-core.jar:/usr/share/java/jackson-mapper.jar:/usr/share/java/hsqldb.jar:/usr/share/java/commons-cli.jar:/usr/share/java/commons-codec.jar:/usr/share/java/commons-collections.jar:/usr/share/java/commons-compress.jar:/usr/share/java/commons-el.jar:/usr/share/java/commons-httpclient.jar:/usr/share/java/commons-io.jar:/usr/share/java/commons-lang.jar:/usr/share/java/commons-logging.jar:/usr/share/java/commons-net.jar:/usr/share/java/cglib.jar:/usr/share/java/asm3.jar:/usr/share/java/snappy-java-1.0.4.1.jar:/usr/share/java/jasper-runtime-5.5.12.jar:/usr/share/java/jsp-api-2.1-6.1.14.jar:/usr/share/java/avro-1.4.0.jar:/usr/share/java/kfs-0.3.jar:/usr/share/java/jsp-2.1-6.1.14.jar:/usr/share/java/scalatest-1.2.jar:/usr/share/java/paranamer-ant-2.2.jar:/usr/share/java/paranamer-generator-2.2.jar:/usr/share/java/pig-0.8.0.jar:/usr/share/java/zkclient-0.3.jar:/usr/share/java/easymock-3.0.jar:/usr/share/java/jasper-compiler-5.5.12.jar:/usr/share/java/hadoop-core-0.20.2.jar:/usr/share/java/xmlenc-0.52.jar:/usr/share/java/jopt-simple-3.2.jar:/usr/share/kafka/kafka.jar:/usr/share/kafka/hadoop-consumer.jar:/usr/share/kafka/hadoop-producer.jar:/usr/share/kafka/kafka-kafka-examples.jar:/usr/share/kafka/kafka-perf.jar:/usr/share/java/metrics-annotation-2.2.0.jar:/usr/share/java/metrics-core-2.2.0.jar:/usr/share/java/metrics-ganglia-2.2.0.jar:/usr/share/java/metrics-graphite-2.2.0.jar:/usr/share/kafka/kafka-ganglia-1.0.0.jar 149 | 150 | # Launch mode 151 | if [ "x$DAEMON_MODE" = "xtrue" ]; then 152 | nohup $JAVA $KAFKA_HEAP_OPTS $KAFKA_JVM_PERFORMANCE_OPTS $KAFKA_GC_LOG_OPTS $KAFKA_JMX_OPTS $KAFKA_LOG4J_OPTS -cp $CLASSPATH $KAFKA_OPTS "$@" > "$CONSOLE_OUTPUT_FILE" 2>&1 < /dev/null & 153 | else 154 | exec $JAVA $KAFKA_HEAP_OPTS $KAFKA_JVM_PERFORMANCE_OPTS $KAFKA_GC_LOG_OPTS $KAFKA_JMX_OPTS $KAFKA_LOG4J_OPTS -cp $CLASSPATH $KAFKA_OPTS "$@" 155 | fi 156 | } 157 | 158 | cmd_topic() { 159 | kafka_run_class kafka.admin.TopicCommand $ZOOKEEPER_OPT "$@" 160 | } 161 | 162 | cmd_console_producer() { 163 | export JMX_PORT="${JMX_PORT:-9990}" 164 | export KAFKA_HEAP_OPTS="${KAFKA_HEAP_OPTS:--Xmx512M}" 165 | kafka_run_class kafka.producer.ConsoleProducer "$@" 166 | } 167 | 168 | cmd_console_consumer() { 169 | export KAFKA_HEAP_OPTS="${KAFKA_HEAP_OPTS:--Xmx512M}" 170 | kafka_run_class kafka.consumer.ConsoleConsumer $ZOOKEEPER_OPT "$@" 171 | } 172 | 173 | cmd_consumer_shell() { 174 | kafka_run_class kafka.tools.ConsumerShell $ZOOKEEPER_OPT "$@" 175 | } 176 | 177 | cmd_producer_shell() { 178 | export JMX_PORT="${JMX_PORT:-9991}" 179 | kafka_run_class kafka.tools.ProducerShell "$@" 180 | } 181 | 182 | cmd_simple_consumer_shell() { 183 | kafka_run_class kafka.tools.SimpleConsumerShell $ZOOKEEPER_OPT "$@" 184 | } 185 | 186 | cmd_replay_log_producer() { 187 | export JMX_PORT="${JMX_PORT:-9992}" 188 | kafka_run_class kafka.tools.ReplayLogProducer $ZOOKEEPER_OPT "$@" 189 | } 190 | 191 | cmd_mirror_maker() { 192 | export JMX_PORT="${JMX_PORT:-9993}" 193 | export KAFKA_LOG4J_OPTS="${KAFKA_LOG4J_OPTS:--Dlog4j.configuration=file:${KAFKA_CONFIG}/log4j.properties}" 194 | export KAFKA_HEAP_OPTS="${KAFKA_HEAP_OPTS:--Xmx1G -Xms1G}" 195 | kafka_run_class daemon kafkaMirror kafka.tools.MirrorMaker "$@" 196 | } 197 | 198 | cmd_consumer_offset_checker() { 199 | kafka_run_class kafka.tools.ConsumerOffsetChecker $ZOOKEEPER_OPT "$@" 200 | } 201 | 202 | cmd_add_partitions() { 203 | kafka_run_class kafka.admin.AddPartitionsCommand $ZOOKEEPER_OPT "$@" 204 | } 205 | 206 | cmd_reassign_partitions() { 207 | kafka_run_class kafka.admin.ReassignPartitionsCommand $ZOOKEEPER_OPT "$@" 208 | } 209 | 210 | cmd_check_reassignment_status() { 211 | kafka_run_class kafka.admin.CheckReassignmentStatus $ZOOKEEPER_OPT "$@" 212 | } 213 | 214 | cmd_preferred_replica_election() { 215 | kafka_run_class kafka.admin.PreferredReplicaLeaderElectionCommand $ZOOKEEPER_OPT "$@" 216 | } 217 | 218 | cmd_controlled_shutdown() { 219 | kafka_run_class kafka.admin.ShutdownBroker $ZOOKEEPER_OPT "$@" 220 | } 221 | 222 | 223 | cmd_producer_perf_test() { 224 | export KAFKA_HEAP_OPTS="${KAFKA_HEAP_OPTS:-"-Xmx512M"}" 225 | kafka_run_class kafka.perf.ProducerPerformance "$@" 226 | } 227 | cmd_consumer_perf_test() { 228 | export KAFKA_HEAP_OPTS="${KAFKA_HEAP_OPTS:-"-Xmx512M"}" 229 | kafka_run_class kafka.perf.ConsumerPerformance $ZOOKEEPER_OPT "$@" 230 | } 231 | 232 | cmd_simple_consumer_perf_test() { 233 | export KAFKA_HEAP_OPTS="${KAFKA_HEAP_OPTS:-"-Xmx512M"}" 234 | kafka_run_class kafka.tools.SimpleConsumerPerformance $ZOOKEEPER_OPT "$@" 235 | } 236 | 237 | cmd_server_start() { 238 | EXTRA_ARGS="-name kafkaServer -loggc" 239 | 240 | COMMAND=$1 241 | case $COMMAND in 242 | -daemon) 243 | EXTRA_ARGS="-daemon "$EXTRA_ARGS 244 | shift 245 | ;; 246 | *) 247 | ;; 248 | esac 249 | 250 | # default server.properties to $KAFKA_CONFIG/server.properties 251 | server_properties=${1:-"${KAFKA_CONFIG}/server.properties"} 252 | 253 | export JMX_PORT="${JMX_PORT:-9999}" 254 | export KAFKA_LOG4J_OPTS="${KAFKA_LOG4J_OPTS:--Dlog4j.configuration=file:${KAFKA_CONFIG}/log4j.properties}" 255 | export KAFKA_HEAP_OPTS="${KAFKA_HEAP_OPTS:--Xmx1G -Xms1G}" 256 | 257 | kafka_run_class "${EXTRA_ARGS}" kafka.Kafka "${server_properties}" 258 | } 259 | 260 | cmd_server_stop() { 261 | # kafka-server-stop.sh in Kafka repository uses SIGINT instead of SIGTERM 262 | # here, but in testing SIGINT did not kill zookeeper. 263 | ps ax | grep -i 'kafka\.Kafka' | grep java | grep -v grep | awk '{print $1}' | xargs kill -SIGTERM 264 | } 265 | 266 | cmd_zookeeper_start() { 267 | EXTRA_ARGS="-name zookeeper -loggc" 268 | 269 | COMMAND=$1 270 | case "${COMMAND}" in 271 | -daemon) 272 | EXTRA_ARGS="-daemon ${EXTRA_ARGS}" 273 | shift 274 | ;; 275 | *) 276 | ;; 277 | esac 278 | 279 | # default zookeeper.properties to $KAFKA_CONFIG/zookeeper.properties 280 | zookeeper_properties=${1:-"${KAFKA_CONFIG}/zookeeper.properties"} 281 | 282 | export JMX_PORT="${JMX_PORT:-9998}" 283 | export KAFKA_LOG4J_OPTS="${KAFKA_LOG4J_OPTS:--Dlog4j.configuration=file:${KAFKA_CONFIG}/log4j.properties}" 284 | export KAFKA_HEAP_OPTS="${KAFKA_HEAP_OPTS:--Xmx512M -Xms512M}" 285 | 286 | kafka_run_class "${EXTRA_ARGS}" org.apache.zookeeper.server.quorum.QuorumPeerMain "${zookeeper_properties}" 287 | } 288 | 289 | cmd_zookeeper_stop() { 290 | # zookeeper-server-stop.sh in Kafka repository uses SIGINT instead of SIGTERM 291 | # here, but in testing SIGINT did not kill zookeeper. 292 | ps ax | grep -i 'zookeeper' | grep -v grep | awk '{print $1}' | xargs kill -SIGTERM 293 | } 294 | 295 | cmd_zookeeper_shell() { 296 | if [ -z "${ZOOKEEPER_URL}" -a $# -eq 0 ]; 297 | then 298 | echo "USAGE: ${SCRIPT_NAME} zookeeper-shell zookeeper_host:port[/path] [args...]" 299 | exit 1 300 | fi 301 | 302 | # Prefer the passed in $1 over ZOOKEEPER_URL for -server arg 303 | if [ -n "$1" ]; 304 | then 305 | ARGS="-server $@" 306 | else 307 | ARGS="-server ${ZOOKEEPER_URL} $@" 308 | fi 309 | 310 | echo kafka_run_class org.apache.zookeeper.ZooKeeperMain $ARGS 311 | } 312 | 313 | 314 | 315 | 316 | # if no commands, print usage. 317 | if [ $# -lt 1 ]; then 318 | usage && exit 0 319 | fi 320 | 321 | # parse cli args 322 | while test $# != 0 323 | do 324 | case "$1" in 325 | topic | \ 326 | console-producer | \ 327 | console-consumer | \ 328 | simple-consumer-shell | \ 329 | replay-log-producer | \ 330 | mirror-maker | \ 331 | consumer-offset-checker | \ 332 | add-partitions | \ 333 | reassign-partitions | \ 334 | check-reassignment-status | \ 335 | preferred-replica-election | \ 336 | controlled-shutdown | \ 337 | producer-perf-test | \ 338 | consumer-perf-test | \ 339 | simple-consumer-perf-test | \ 340 | server-start | \ 341 | server-stop | \ 342 | zookeeper-start | \ 343 | zookeeper-stop | \ 344 | zookeeper-shell ) 345 | command="$1"; shift; break; 346 | shift; 347 | break; 348 | ;; 349 | -h|--help|help) 350 | usage 351 | exit 0 352 | ;; 353 | *) 354 | echo "Invalid command : '$1'. Aborting." >&2 355 | exit 1 356 | ;; 357 | esac 358 | done 359 | 360 | # convert dashes to underscores 361 | function_name=$(echo "${command}" | tr - _) 362 | # call the command function 363 | "cmd_$function_name" "$@" 364 | --------------------------------------------------------------------------------