├── .gitignore
├── LICENSE
├── README.md
├── build.gradle
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
├── kafka-graphite-clients
└── src
│ ├── main
│ └── java
│ │ └── org
│ │ └── apache
│ │ └── kafka
│ │ └── common
│ │ └── metrics
│ │ └── GraphiteReporter.java
│ └── test
│ └── java
│ └── org
│ └── apache
│ └── kafka
│ └── common
│ └── metrics
│ └── GraphiteReporterTest.java
├── kafka-graphite
└── src
│ ├── main
│ └── scala
│ │ └── kafka
│ │ └── metrics
│ │ ├── KafkaGraphiteMetricsConfig.scala
│ │ └── KafkaGraphiteMetricsReporter.scala
│ └── test
│ ├── resources
│ └── log4j.properties
│ └── scala
│ └── kafka
│ └── metrics
│ └── KafkaGraphiteMetricsReporterTest.scala
└── settings.gradle
/.gitignore:
--------------------------------------------------------------------------------
1 | # Created by .ignore support plugin (hsz.mobi)
2 | ### Java template
3 | *.class
4 |
5 | # Mobile Tools for Java (J2ME)
6 | .mtj.tmp/
7 |
8 | # Package Files #
9 | *.jar
10 | *.war
11 | *.ear
12 |
13 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
14 | hs_err_pid*
15 |
16 |
17 | ### Gradle template
18 | .gradle
19 | build/
20 | gradle.properties
21 |
22 | # Ignore Gradle GUI config
23 | gradle-app.setting
24 |
25 | # Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored)
26 | !gradle-wrapper.jar
27 |
28 |
29 | ### JetBrains template
30 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion
31 |
32 | *.iml
33 |
34 | ## Directory-based project format:
35 | .idea/
36 | # if you remove the above rule, at least ignore the following:
37 |
38 | # User-specific stuff:
39 | # .idea/workspace.xml
40 | # .idea/tasks.xml
41 | # .idea/dictionaries
42 |
43 | # Sensitive or high-churn files:
44 | # .idea/dataSources.ids
45 | # .idea/dataSources.xml
46 | # .idea/sqlDataSources.xml
47 | # .idea/dynamic.xml
48 | # .idea/uiDesigner.xml
49 |
50 | # Gradle:
51 | # .idea/gradle.xml
52 | # .idea/libraries
53 |
54 | # Mongo Explorer plugin:
55 | # .idea/mongoSettings.xml
56 |
57 | ## File-based project format:
58 | *.ipr
59 | *.iws
60 |
61 | ## Plugin-specific files:
62 |
63 | # IntelliJ
64 | /out/
65 |
66 | # mpeltonen/sbt-idea plugin
67 | .idea_modules/
68 |
69 | # JIRA plugin
70 | atlassian-ide-plugin.xml
71 |
72 | # Crashlytics plugin (for Android Studio and IntelliJ)
73 | com_crashlytics_export_strings.xml
74 | crashlytics.properties
75 | crashlytics-build.properties
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright 2017 Alexander Pakulov
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Kafka Graphite Metrics Reporter
2 | ===============================
3 | This plugin allows to export data from Kafka instance to Graphite directly. Kafka 0.10.2.+ support.
4 |
5 | Plugin supports multiple interfaces:
6 | - Broker (kafka.metrics.KafkaMetricsReporter)
7 | - Clients (org.apache.kafka.common.metrics.MetricsReporter)
8 |
9 | Install
10 | -------
11 | Clients plugin could be integrated by defining next dependencies.
12 |
13 | Maven
14 |
15 | ``` xml
16 |
17 | com.pakulov.kafka
18 | kafka-graphite-clients
19 | 0.10.2
20 |
21 | ```
22 |
23 | Gradle
24 |
25 | ``` groovy
26 | compile 'com.pakulov.kafka:kafka-graphite-clients:0.10.2'
27 | ```
28 |
29 | Build
30 | -----
31 | Broker plugin could be compiled with different Scala versions, use *scalaVersion* Gradle's property to define version
32 |
33 | ```
34 | ./gradlew -PscalaVersion=2.10.5 build
35 | ```
36 |
37 | There is also a way to build a deb package
38 |
39 | ```
40 | ./gradlew buildDeb
41 | ```
42 |
43 | Usage
44 | -----
45 | At first you have to configure kafka reporters *server.properties* file
46 |
47 | For **KafkaMetricsReporter** use:
48 | * `kafka.metrics.reporters=kafka.metrics.KafkaGraphiteMetricsReporter`
49 | * `kafka.graphite.metrics.jvm.enabled`: Controls JVM metrics output (default: true)
50 |
51 | For **MetricsReporter** use:
52 | * `metric.reporters=org.apache.kafka.common.metrics.GraphiteReporter`
53 |
54 | Plugin has a set of possible parameters:
55 | * `kafka.metrics.polling.interval.secs=60`: Polling interval that will be used for all Kafka metrics
56 | * `kafka.graphite.metrics.reporter.enabled`: Enables actual plugin (default: false)
57 | * `kafka.graphite.metrics.host`: The graphite host to connect to (default: localhost)
58 | * `kafka.graphite.metrics.port`: The port to connect to (default: 2003)
59 | * `kafka.graphite.metrics.prefix`: The metric prefix that's sent with metric names (default: kafka)
60 | * `kafka.graphite.metrics.include`: A regular expression allowing explicitly include certain metrics (default: null)
61 | * `kafka.graphite.metrics.exclude`: A regular expression allowing you to exclude certain metrics (default: null)
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | buildscript {
2 | repositories {
3 | mavenCentral()
4 | jcenter()
5 | }
6 |
7 | dependencies {
8 | classpath 'org.gradle.api.plugins:gradle-nexus-plugin:0.7.1'
9 | classpath 'nl.javadude.gradle.plugins:license-gradle-plugin:0.9.0'
10 | classpath 'com.netflix.nebula:gradle-ospackage-plugin:2.2.6'
11 | }
12 | }
13 |
14 | subprojects {
15 | group 'com.pakulov.kafka'
16 | version '0.10.2'
17 |
18 | ext {
19 | kafkaVersion = "0.10.2.1"
20 | }
21 |
22 | repositories {
23 | mavenCentral()
24 | }
25 |
26 | apply plugin: 'license'
27 | apply plugin: 'maven-publish'
28 | apply plugin: 'nexus'
29 | apply plugin: 'java'
30 |
31 | sourceCompatibility = 1.7
32 | targetCompatibility = 1.7
33 |
34 | modifyPom {
35 | project {
36 | name 'Kafka Graphite Metrics Reporter'
37 | description 'Graphite integration for Kafka Metrics Reporter'
38 | url 'https://github.com/apakulov/kafka-graphite'
39 | inceptionYear '2015'
40 |
41 | scm {
42 | url 'https://github.com/apakulov/kafka-graphite'
43 | connection 'scm:git:git@github.com:apakulov/kafka-graphite.git'
44 | developerConnection 'scm:git:ssh@github.com:apakulov/kafka-graphite.git'
45 | }
46 |
47 | licenses {
48 | license {
49 | name 'The Apache Software License, Version 2.0'
50 | url 'http://www.apache.org/licenses/LICENSE-2.0.txt'
51 | distribution 'repo'
52 | }
53 | }
54 |
55 | developers {
56 | developer {
57 | id 'apakulov'
58 | name 'Alexander Pakulov'
59 | email 'a.pakulov@gmail.com'
60 | }
61 | }
62 | }
63 | }
64 |
65 | publishing {
66 | publications {
67 | maven(MavenPublication) {
68 | artifact jar
69 | }
70 | }
71 | }
72 |
73 | license {
74 | header rootProject.file('LICENSE')
75 | }
76 | }
77 |
78 | project(':kafka-graphite') {
79 | apply plugin: 'os-package'
80 | apply plugin: 'scala'
81 |
82 | ext {
83 | ext.baseScalaVersion = '2.11'
84 | if (project.hasProperty('scalaVersion')) {
85 | String scalaVersion = project.property('scalaVersion');
86 | ext.baseScalaVersion = scalaVersion.substring(0, scalaVersion.lastIndexOf('.'))
87 | }
88 | }
89 |
90 | dependencies {
91 | compile "org.apache.kafka:kafka_$baseScalaVersion:$kafkaVersion"
92 | compile "com.yammer.metrics:metrics-graphite:2.2.0"
93 |
94 | testCompile "org.scalatest:scalatest_$baseScalaVersion:3.0.3"
95 | }
96 |
97 | project.afterEvaluate {
98 | project.tasks.each {
99 | task -> if (task.name.toLowerCase().endsWith('jar'))
100 | task.baseName = "kafka_${baseScalaVersion}-graphite"
101 | }
102 | }
103 |
104 | ospackage {
105 | release = '1'
106 | os = 'LINUX'
107 |
108 | into '/opt/kafka'
109 |
110 | from(jar.outputs.files) {
111 | into 'libs'
112 | }
113 |
114 | from(configurations.compile) {
115 | include 'metrics-graphite*'
116 | into 'libs'
117 | }
118 | }
119 | }
120 |
121 | project(':kafka-graphite-clients') {
122 | dependencies {
123 | compile "org.apache.kafka:kafka-clients:$kafkaVersion"
124 | testCompile "org.hamcrest:hamcrest-junit:2.0.0.0",
125 | 'org.powermock:powermock-core:1.6.1',
126 | 'org.powermock:powermock-module-junit4:1.6.1',
127 | 'org.powermock:powermock-api-mockito:1.6.1'
128 | }
129 | }
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apakulov/kafka-graphite/1b2bf9e92bbbeaba10e028891fb457e862125d75/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Mon Jan 23 12:51:42 PST 2017
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-3.3-bin.zip
7 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Attempt to set APP_HOME
10 | # Resolve links: $0 may be a link
11 | PRG="$0"
12 | # Need this for relative symlinks.
13 | while [ -h "$PRG" ] ; do
14 | ls=`ls -ld "$PRG"`
15 | link=`expr "$ls" : '.*-> \(.*\)$'`
16 | if expr "$link" : '/.*' > /dev/null; then
17 | PRG="$link"
18 | else
19 | PRG=`dirname "$PRG"`"/$link"
20 | fi
21 | done
22 | SAVED="`pwd`"
23 | cd "`dirname \"$PRG\"`/" >/dev/null
24 | APP_HOME="`pwd -P`"
25 | cd "$SAVED" >/dev/null
26 |
27 | APP_NAME="Gradle"
28 | APP_BASE_NAME=`basename "$0"`
29 |
30 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
31 | DEFAULT_JVM_OPTS=""
32 |
33 | # Use the maximum available, or set MAX_FD != -1 to use that value.
34 | MAX_FD="maximum"
35 |
36 | warn ( ) {
37 | echo "$*"
38 | }
39 |
40 | die ( ) {
41 | echo
42 | echo "$*"
43 | echo
44 | exit 1
45 | }
46 |
47 | # OS specific support (must be 'true' or 'false').
48 | cygwin=false
49 | msys=false
50 | darwin=false
51 | nonstop=false
52 | case "`uname`" in
53 | CYGWIN* )
54 | cygwin=true
55 | ;;
56 | Darwin* )
57 | darwin=true
58 | ;;
59 | MINGW* )
60 | msys=true
61 | ;;
62 | NONSTOP* )
63 | nonstop=true
64 | ;;
65 | esac
66 |
67 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
68 |
69 | # Determine the Java command to use to start the JVM.
70 | if [ -n "$JAVA_HOME" ] ; then
71 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
72 | # IBM's JDK on AIX uses strange locations for the executables
73 | JAVACMD="$JAVA_HOME/jre/sh/java"
74 | else
75 | JAVACMD="$JAVA_HOME/bin/java"
76 | fi
77 | if [ ! -x "$JAVACMD" ] ; then
78 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
79 |
80 | Please set the JAVA_HOME variable in your environment to match the
81 | location of your Java installation."
82 | fi
83 | else
84 | JAVACMD="java"
85 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
86 |
87 | Please set the JAVA_HOME variable in your environment to match the
88 | location of your Java installation."
89 | fi
90 |
91 | # Increase the maximum file descriptors if we can.
92 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
93 | MAX_FD_LIMIT=`ulimit -H -n`
94 | if [ $? -eq 0 ] ; then
95 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
96 | MAX_FD="$MAX_FD_LIMIT"
97 | fi
98 | ulimit -n $MAX_FD
99 | if [ $? -ne 0 ] ; then
100 | warn "Could not set maximum file descriptor limit: $MAX_FD"
101 | fi
102 | else
103 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
104 | fi
105 | fi
106 |
107 | # For Darwin, add options to specify how the application appears in the dock
108 | if $darwin; then
109 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
110 | fi
111 |
112 | # For Cygwin, switch paths to Windows format before running java
113 | if $cygwin ; then
114 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
115 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
116 | JAVACMD=`cygpath --unix "$JAVACMD"`
117 |
118 | # We build the pattern for arguments to be converted via cygpath
119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
120 | SEP=""
121 | for dir in $ROOTDIRSRAW ; do
122 | ROOTDIRS="$ROOTDIRS$SEP$dir"
123 | SEP="|"
124 | done
125 | OURCYGPATTERN="(^($ROOTDIRS))"
126 | # Add a user-defined pattern to the cygpath arguments
127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
129 | fi
130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
131 | i=0
132 | for arg in "$@" ; do
133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
135 |
136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
138 | else
139 | eval `echo args$i`="\"$arg\""
140 | fi
141 | i=$((i+1))
142 | done
143 | case $i in
144 | (0) set -- ;;
145 | (1) set -- "$args0" ;;
146 | (2) set -- "$args0" "$args1" ;;
147 | (3) set -- "$args0" "$args1" "$args2" ;;
148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
154 | esac
155 | fi
156 |
157 | # Escape application args
158 | save ( ) {
159 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
160 | echo " "
161 | }
162 | APP_ARGS=$(save "$@")
163 |
164 | # Collect all arguments for the java command, following the shell quoting and substitution rules
165 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
166 |
167 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
168 | if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
169 | cd "$(dirname "$0")"
170 | fi
171 |
172 | exec "$JAVACMD" "$@"
173 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | set DIRNAME=%~dp0
12 | if "%DIRNAME%" == "" set DIRNAME=.
13 | set APP_BASE_NAME=%~n0
14 | set APP_HOME=%DIRNAME%
15 |
16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
17 | set DEFAULT_JVM_OPTS=
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windows variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 |
53 | :win9xME_args
54 | @rem Slurp the command line arguments.
55 | set CMD_LINE_ARGS=
56 | set _SKIP=2
57 |
58 | :win9xME_args_slurp
59 | if "x%~1" == "x" goto execute
60 |
61 | set CMD_LINE_ARGS=%*
62 |
63 | :execute
64 | @rem Setup the command line
65 |
66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
67 |
68 | @rem Execute Gradle
69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
70 |
71 | :end
72 | @rem End local scope for the variables with windows NT shell
73 | if "%ERRORLEVEL%"=="0" goto mainEnd
74 |
75 | :fail
76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
77 | rem the _cmd.exe /c_ return code!
78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
79 | exit /b 1
80 |
81 | :mainEnd
82 | if "%OS%"=="Windows_NT" endlocal
83 |
84 | :omega
85 |
--------------------------------------------------------------------------------
/kafka-graphite-clients/src/main/java/org/apache/kafka/common/metrics/GraphiteReporter.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2017 Alexander Pakulov
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package org.apache.kafka.common.metrics;
17 |
18 | import org.apache.kafka.common.MetricName;
19 | import org.apache.kafka.common.config.AbstractConfig;
20 | import org.apache.kafka.common.config.ConfigDef;
21 | import org.slf4j.Logger;
22 | import org.slf4j.LoggerFactory;
23 |
24 | import java.io.BufferedWriter;
25 | import java.io.IOException;
26 | import java.io.OutputStreamWriter;
27 | import java.io.Writer;
28 | import java.net.Socket;
29 | import java.util.ArrayList;
30 | import java.util.Collections;
31 | import java.util.List;
32 | import java.util.Locale;
33 | import java.util.Map;
34 | import java.util.concurrent.ScheduledExecutorService;
35 | import java.util.concurrent.ScheduledThreadPoolExecutor;
36 | import java.util.concurrent.TimeUnit;
37 | import java.util.regex.Pattern;
38 |
39 | import static org.apache.kafka.common.metrics.GraphiteReporter.GraphiteConfig.GRAPHITE_HOST;
40 | import static org.apache.kafka.common.metrics.GraphiteReporter.GraphiteConfig.GRAPHITE_PORT;
41 | import static org.apache.kafka.common.metrics.GraphiteReporter.GraphiteConfig.INCLUDE;
42 | import static org.apache.kafka.common.metrics.GraphiteReporter.GraphiteConfig.EXCLUDE;
43 | import static org.apache.kafka.common.metrics.GraphiteReporter.GraphiteConfig.PREFIX;
44 | import static org.apache.kafka.common.metrics.GraphiteReporter.GraphiteConfig.REPORTER_ENABLED;
45 | import static org.apache.kafka.common.metrics.GraphiteReporter.GraphiteConfig.INTERVAL;
46 |
47 | public class GraphiteReporter implements MetricsReporter, Runnable {
48 | private static final Logger log = LoggerFactory.getLogger(GraphiteReporter.class);
49 |
50 | private List metricList = Collections.synchronizedList(new ArrayList());
51 | private final ScheduledExecutorService executor = new ScheduledThreadPoolExecutor(1);
52 | private GraphiteConfig config;
53 |
54 | private String prefix;
55 | private String hostname;
56 | private int port;
57 | private Pattern include;
58 | private Pattern exclude;
59 |
60 | @Override
61 | public void configure(final Map configs) {
62 | this.config = new GraphiteConfig(configs);
63 | }
64 |
65 | @Override
66 | public void init(List metrics) {
67 | this.hostname = config.getString(GRAPHITE_HOST);
68 | this.port = config.getInt(GRAPHITE_PORT);
69 | this.prefix = config.getString(PREFIX);
70 |
71 | final String includeRegex = config.getString(INCLUDE);
72 | final String excludeRegex = config.getString(EXCLUDE);
73 | this.include = includeRegex != null && !includeRegex.isEmpty() ? Pattern.compile(includeRegex) : null;
74 | this.exclude = excludeRegex != null && !excludeRegex.isEmpty() ? Pattern.compile(excludeRegex) : null;
75 |
76 | if (config.getBoolean(REPORTER_ENABLED)) {
77 | final int interval = config.getInt(INTERVAL);
78 |
79 | for (final KafkaMetric metric : metrics) {
80 | metricList.add(metric);
81 | }
82 | log.info("Configuring Kafka Graphite Reporter with host={}, port={}, prefix={} and include={}, exclude={}",
83 | hostname, port, prefix, includeRegex, excludeRegex);
84 | executor.scheduleAtFixedRate(this, interval, interval, TimeUnit.SECONDS);
85 | }
86 | }
87 |
88 | @Override
89 | public void metricChange(final KafkaMetric metric) {
90 | metricList.add(metric);
91 | }
92 |
93 | @Override
94 | public void metricRemoval(KafkaMetric metric) {
95 | metricList.remove(metric);
96 | }
97 |
98 | @Override
99 | public void close() {
100 | if (config.getBoolean(REPORTER_ENABLED)) {
101 | executor.submit(this);
102 | }
103 | executor.shutdown();
104 | try {
105 | // A 20 second timeout should be enough to finish the remaining tasks.
106 | if (executor.awaitTermination(20, TimeUnit.SECONDS)) {
107 | log.debug("Executor was shut down successfully.");
108 | } else {
109 | log.error("Timed out before executor was shut down! It's possible some metrics data were not sent out!");
110 | }
111 | } catch (InterruptedException e) {
112 | log.error("Unable to shutdown executor gracefully", e);
113 | }
114 | }
115 |
116 | /** This run method can be called for two purposes:
117 | * - As a scheduled task, see scheduleAtFixedRate
118 | * - As a final task when close() is called
119 | * However, since the size of the ScheduledExecutorService is 1, there's no need to synchronize it.
120 | */
121 | @Override
122 | public void run() {
123 | Socket socket = null;
124 | Writer writer = null;
125 | try {
126 | socket = new Socket(hostname, port);
127 | writer = new BufferedWriter(new OutputStreamWriter(socket.getOutputStream()));
128 |
129 | final long timestamp = System.currentTimeMillis() / 1000;
130 |
131 | for (KafkaMetric metric : metricList) {
132 | double value = metric.value();
133 | // DO NOT send an invalid value to graphite
134 | if (Double.NEGATIVE_INFINITY == value || Double.isNaN(value)) {
135 | continue;
136 | }
137 | final String name = sanitizeName(metric.metricName());
138 | if (null != include && !include.matcher(name).matches()) {
139 | continue;
140 | }
141 | if (null != exclude && exclude.matcher(name).matches()) {
142 | continue;
143 | }
144 |
145 | if (prefix != null && !prefix.isEmpty()) {
146 | writer.write(config.getString(PREFIX));
147 | writer.write('.');
148 | }
149 | writer.write("kafka.");
150 | writer.write(name);
151 | writer.write(' ');
152 | writer.write(String.format(Locale.US, "%2.2f", value));
153 | writer.write(' ');
154 | writer.write(Long.toString(timestamp));
155 | writer.write('\n');
156 | writer.flush();
157 | }
158 | } catch (Exception e) {
159 | log.warn("Error writing to Graphite", e);
160 | if (writer != null) {
161 | try {
162 | writer.flush();
163 | } catch (IOException e1) {
164 | log.error("Error while flushing writer:", e1);
165 | }
166 | }
167 | } finally {
168 | if (socket != null) {
169 | try {
170 | socket.close();
171 | } catch (IOException e) {
172 | log.error("Error while closing socket:", e);
173 | }
174 | }
175 | }
176 | }
177 |
178 | String sanitizeName(MetricName name) {
179 | StringBuilder result = new StringBuilder().append(name.group()).append('.');
180 | for (Map.Entry tag : name.tags().entrySet()) {
181 | result.append(tag.getValue().replace(".", "_")).append('.');
182 | }
183 | return result.append(name.name()).toString().replace(' ', '_');
184 | }
185 |
186 | public static class GraphiteConfig extends AbstractConfig {
187 | public static final String REPORTER_ENABLED = "kafka.graphite.metrics.reporter.enabled";
188 | public static final String GRAPHITE_HOST = "kafka.graphite.metrics.host";
189 | public static final String GRAPHITE_PORT = "kafka.graphite.metrics.port";
190 | public static final String PREFIX = "kafka.graphite.metrics.prefix";
191 | public static final String INCLUDE = "kafka.graphite.metrics.include";
192 | public static final String EXCLUDE = "kafka.graphite.metrics.exclude";
193 | public static final String INTERVAL = "kafka.metrics.polling.interval.secs";
194 |
195 | private static final ConfigDef configDefinition = new ConfigDef()
196 | .define(REPORTER_ENABLED, ConfigDef.Type.BOOLEAN, false, ConfigDef.Importance.LOW, "Enables actual plugin")
197 | .define(GRAPHITE_HOST, ConfigDef.Type.STRING, "localhost", ConfigDef.Importance.HIGH, "The graphite host to connect")
198 | .define(GRAPHITE_PORT, ConfigDef.Type.INT, 2003, ConfigDef.Importance.HIGH, "The port to connect")
199 | .define(PREFIX, ConfigDef.Type.STRING, "kafka", ConfigDef.Importance.MEDIUM, "The metric prefix that's sent with metric names")
200 | .define(INCLUDE, ConfigDef.Type.STRING, "", ConfigDef.Importance.LOW, "A regular expression allowing explicitly include certain metrics")
201 | .define(EXCLUDE, ConfigDef.Type.STRING, "", ConfigDef.Importance.LOW, "A regular expression allowing you to exclude certain metrics")
202 | .define(INTERVAL, ConfigDef.Type.INT, "60", ConfigDef.Importance.MEDIUM, "Polling interval that will be used for all Kafka metrics");
203 |
204 | private GraphiteConfig(Map, ?> originals) {
205 | super(configDefinition, originals);
206 | }
207 | }
208 | }
--------------------------------------------------------------------------------
/kafka-graphite-clients/src/test/java/org/apache/kafka/common/metrics/GraphiteReporterTest.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2017 Alexander Pakulov
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package org.apache.kafka.common.metrics;
17 |
18 | import org.apache.kafka.common.MetricName;
19 | import org.apache.kafka.common.metrics.stats.Count;
20 | import org.apache.kafka.common.utils.SystemTime;
21 | import org.junit.After;
22 | import org.junit.Before;
23 | import org.junit.Test;
24 | import org.junit.runner.RunWith;
25 | import org.powermock.core.classloader.annotations.PrepareForTest;
26 | import org.powermock.modules.junit4.PowerMockRunner;
27 | import org.powermock.reflect.Whitebox;
28 |
29 | import java.io.BufferedReader;
30 | import java.io.IOException;
31 | import java.io.InputStreamReader;
32 | import java.net.ServerSocket;
33 | import java.net.Socket;
34 | import java.util.*;
35 | import java.util.concurrent.Future;
36 | import java.util.concurrent.ScheduledExecutorService;
37 | import java.util.concurrent.TimeUnit;
38 |
39 | import static org.hamcrest.MatcherAssert.assertThat;
40 | import static org.hamcrest.Matchers.*;
41 | import static org.mockito.Mockito.*;
42 |
43 | @RunWith(PowerMockRunner.class)
44 | @PrepareForTest({GraphiteReporter.class})
45 | public class GraphiteReporterTest {
46 | private GraphiteMockServer graphiteServer;
47 | private GraphiteReporter graphiteReporter;
48 |
49 | @Before
50 | public void setUp() {
51 | graphiteServer = new GraphiteMockServer();
52 | graphiteServer.start();
53 | graphiteReporter = new GraphiteReporter();
54 | }
55 |
56 | @After
57 | public void tearDown() {
58 | graphiteServer.close();
59 | }
60 |
61 | @Test
62 | public void testCounterIncrement() throws Exception {
63 | Map configs = initializeConfigWithReporter();
64 | configs.put("kafka.graphite.metrics.jvm.enabled", "false");
65 | graphiteReporter.configure(configs);
66 |
67 | List metrics = new ArrayList<>();
68 | final KafkaMetric metric = createMetric("test");
69 | final Count counter = (Count) metric.measurable();
70 | final MetricConfig config = metric.config();
71 | metrics.add(metric);
72 | graphiteReporter.init(metrics);
73 |
74 |
75 | counter.record(config, 1, System.currentTimeMillis());
76 | Thread.sleep(2000);
77 | counter.record(config, 2, System.currentTimeMillis());
78 | Thread.sleep(2000);
79 |
80 | assertThat(graphiteServer.content, hasItem(containsString("group.topic.test 1.00")));
81 | assertThat(graphiteServer.content, hasItem(containsString("group.topic.test 2.00")));
82 | graphiteReporter.close();
83 | }
84 |
85 | @Test
86 | public void testExcludeData() throws Exception {
87 | Map configs = initializeConfigWithReporter();
88 | configs.put("kafka.graphite.metrics.exclude", ".*test.*");
89 | configs.put("kafka.graphite.metrics.jvm.enabled", "false");
90 | graphiteReporter.configure(configs);
91 |
92 | List metrics = new ArrayList<>();
93 | metrics.add(createMetric("valid"));
94 | metrics.add(createMetric("test"));
95 | graphiteReporter.init(metrics);
96 |
97 | Thread.sleep(2000);
98 |
99 | assertThat(graphiteServer.content, hasItem(containsString("group.topic.valid")));
100 | assertThat(graphiteServer.content, not(hasItem(containsString("group.topic.test"))));
101 | graphiteReporter.close();
102 | }
103 |
104 | @Test
105 | public void textIncludeData() throws Exception {
106 | Map configs = initializeConfigWithReporter();
107 | configs.put("kafka.graphite.metrics.include", ".*test.*");
108 | graphiteReporter.configure(configs);
109 |
110 | List metrics = new ArrayList<>();
111 | metrics.add(createMetric("valid"));
112 | metrics.add(createMetric("test"));
113 | graphiteReporter.init(metrics);
114 |
115 | Thread.sleep(2000);
116 |
117 | assertThat(graphiteServer.content, not(hasItem(containsString("group.topic.invalid"))));
118 | assertThat(graphiteServer.content, hasItem(containsString("group.topic.test")));
119 | graphiteReporter.close();
120 | }
121 |
122 | @Test
123 | public void testExcludeIncludeData() throws Exception {
124 | Map configs = initializeConfigWithReporter();
125 | configs.put("kafka.graphite.metrics.include", ".*valid.*");
126 | configs.put("kafka.graphite.metrics.exclude", ".*invalid.*");
127 | graphiteReporter.configure(configs);
128 |
129 | List metrics = new ArrayList<>();
130 | metrics.add(createMetric("valid"));
131 | metrics.add(createMetric("invalid"));
132 | metrics.add(createMetric("test"));
133 | graphiteReporter.init(metrics);
134 |
135 | Thread.sleep(2000);
136 |
137 | assertThat(graphiteServer.content, hasItem(containsString("group.topic.valid")));
138 | assertThat(graphiteServer.content, not(hasItem(containsString("group.topic.test"))));
139 | assertThat(graphiteServer.content, not(hasItem(containsString("group.topic.invalid"))));
140 | graphiteReporter.close();
141 | }
142 |
143 | @Test
144 | public void testRemoveMetric() throws Exception {
145 | Map configs = initializeConfigWithReporter();
146 | graphiteReporter.configure(configs);
147 |
148 | final KafkaMetric metricToRemove = createMetric("valid-to-remove");
149 | List metrics = Arrays.asList(createMetric("valid"), metricToRemove);
150 | graphiteReporter.init(metrics);
151 |
152 | Thread.sleep(2000);
153 |
154 | assertThat(graphiteServer.content, hasItem(containsString("group.topic.valid")));
155 | assertThat(graphiteServer.content, hasItem(containsString("group.topic.valid-to-remove")));
156 |
157 | graphiteReporter.metricRemoval(metricToRemove);
158 | graphiteServer.content.clear();
159 | Thread.sleep(2000);
160 |
161 | assertThat(graphiteServer.content, hasItem(containsString("group.topic.valid")));
162 | assertThat(graphiteServer.content, not(hasItem(containsString("group.topic.valid-to-remove"))));
163 | graphiteReporter.close();
164 | }
165 |
166 | @Test
167 | public void testInitFailure() {
168 | final Map configs = new HashMap<>();
169 | configs.put("metric.reporters", "org.apache.kafka.common.metrics.GraphiteReporter");
170 | configs.put("kafka.metrics.polling.interval.secs", "1");
171 | configs.put("kafka.graphite.metrics.reporter.enabled", "true");
172 | configs.put("kafka.graphite.metrics.host", "localhost");
173 | configs.put("kafka.graphite.metrics.port", "0");
174 |
175 | graphiteReporter.configure(configs);
176 | graphiteReporter.init(Collections.emptyList());
177 | graphiteReporter.close();
178 | }
179 |
180 | @Test
181 | public void testClose() throws InterruptedException {
182 | Map configs = initializeConfigWithReporter();
183 | graphiteReporter.configure(configs);
184 |
185 | ScheduledExecutorService mockExecutor = mock(ScheduledExecutorService.class);
186 | Future mockFuture = mock(Future.class);
187 | when(mockExecutor.submit(graphiteReporter)).thenReturn(mockFuture);
188 | when(mockExecutor.awaitTermination(20, TimeUnit.SECONDS)).thenReturn(true);
189 |
190 | Whitebox.setInternalState(graphiteReporter, "executor", mockExecutor);
191 | graphiteReporter.close();
192 |
193 | verify(mockExecutor).shutdown();
194 | verify(mockExecutor).submit(graphiteReporter);
195 | verify(mockExecutor).awaitTermination(20, TimeUnit.SECONDS);
196 | }
197 |
198 | private KafkaMetric createMetric(final String topicName) {
199 | final Map tags = new HashMap<>();
200 | tags.put("client-id", "topic");
201 | final MetricName group = new Metrics().metricName(topicName, "group", tags);
202 | return new KafkaMetric(new Object(), group, new Count(), new MetricConfig(), new SystemTime());
203 | }
204 |
205 | private Map initializeConfigWithReporter() {
206 | final Map configs = new HashMap<>();
207 | configs.put("metric.reporters", "org.apache.kafka.common.metrics.GraphiteReporter");
208 | configs.put("kafka.metrics.polling.interval.secs", "1");
209 | configs.put("kafka.graphite.metrics.reporter.enabled", "true");
210 | configs.put("kafka.graphite.metrics.host", "localhost");
211 | configs.put("kafka.graphite.metrics.port", String.valueOf(graphiteServer.port));
212 | return configs;
213 | }
214 |
215 | private static class GraphiteMockServer extends Thread {
216 | private List content = new ArrayList<>();
217 | private Socket socket;
218 | private ServerSocket server;
219 | private Integer port;
220 |
221 | public GraphiteMockServer() {
222 | try {
223 | this.server = new ServerSocket(new Random().nextInt(65000));
224 | } catch (IOException e) {
225 | throw new RuntimeException("Unable to start ServerSocket", e);
226 | }
227 | this.port = server.getLocalPort();
228 | }
229 |
230 | @Override
231 | public void run() {
232 | while (!server.isClosed()) {
233 | try {
234 | socket = server.accept();
235 | final BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(socket.getInputStream()));
236 | String str;
237 | while ((str = bufferedReader.readLine()) != null) {
238 | content.add(str);
239 | }
240 | } catch (IOException e) {
241 | // Bye-bye, I'm dying
242 | }
243 | }
244 | }
245 |
246 | public void close() {
247 | try {
248 | server.close();
249 | } catch (IOException e) {
250 | throw new RuntimeException(e);
251 | }
252 | }
253 | }
254 | }
--------------------------------------------------------------------------------
/kafka-graphite/src/main/scala/kafka/metrics/KafkaGraphiteMetricsConfig.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2017 Alexander Pakulov
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package kafka.metrics
17 |
18 | import java.util.regex.Pattern
19 |
20 | import kafka.utils.VerifiableProperties
21 |
22 | class KafkaGraphiteMetricsConfig(props: VerifiableProperties) extends KafkaMetricsConfig(props) {
23 |
24 | val host = props.getString("kafka.graphite.metrics.host", "localhost")
25 |
26 | var port = props.getInt("kafka.graphite.metrics.port", 2003)
27 |
28 | var prefix = props.getString("kafka.graphite.metrics.prefix", "kafka")
29 |
30 | var enabled = props.getBoolean("kafka.graphite.metrics.reporter.enabled", default = false)
31 |
32 | var include = getPattern("kafka.graphite.metrics.include", null)
33 |
34 | var exclude = getPattern("kafka.graphite.metrics.exclude", null)
35 |
36 | var jvm = props.getBoolean("kafka.graphite.metrics.jvm.enabled", default = true)
37 |
38 | private def getPattern(key: String, default: Pattern): Pattern = {
39 | if (!props.containsKey(key)) default
40 | else Pattern.compile(props.getProperty(key))
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/kafka-graphite/src/main/scala/kafka/metrics/KafkaGraphiteMetricsReporter.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2017 Alexander Pakulov
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package kafka.metrics
17 |
18 | import java.util.concurrent.TimeUnit
19 |
20 | import com.yammer.metrics.Metrics
21 | import com.yammer.metrics.core.{Clock, Metric, MetricName, MetricPredicate}
22 | import com.yammer.metrics.reporting.GraphiteReporter
23 | import com.yammer.metrics.reporting.GraphiteReporter.DefaultSocketProvider
24 | import kafka.utils.{VerifiableProperties, Logging}
25 | import scala.collection.JavaConversions._
26 |
27 | trait KafkaGraphiteMetricsReporterMBean extends KafkaMetricsReporterMBean
28 |
29 | class KafkaGraphiteMetricsReporter extends KafkaMetricsReporter
30 | with KafkaGraphiteMetricsReporterMBean
31 | with Logging {
32 |
33 | private var underlying: GraphiteReporter = _
34 | private var running = false
35 | private var initialized = false
36 |
37 | override def getMBeanName: String = "kafka:type=kafka.metrics.KafkaGraphiteMetricsReporter"
38 |
39 | override def init(props: VerifiableProperties) {
40 | synchronized {
41 | if (!initialized) {
42 | val metricsConfig = new KafkaGraphiteMetricsConfig(props)
43 | val socketProvider = new DefaultSocketProvider(metricsConfig.host, metricsConfig.port)
44 |
45 | val metricPredicate = new MetricPredicate {
46 | val include = Option(metricsConfig.include)
47 | val exclude = Option(metricsConfig.exclude)
48 |
49 | override def matches(name: MetricName, metric: Metric): Boolean = {
50 | if (include.isDefined && !include.get.matcher(groupMetricName(name)).matches()) {
51 | return false
52 | }
53 | if (exclude.isDefined && exclude.get.matcher(groupMetricName(name)).matches()) {
54 | return false
55 | }
56 | true
57 | }
58 |
59 | private def groupMetricName(name: MetricName): String = {
60 | val result = new StringBuilder().append(name.getGroup).append('.').append(name.getType).append('.')
61 | if (name.hasScope) {
62 | result.append(name.getScope).append('.')
63 | }
64 | result.append(name.getName).toString().replace(' ', '_')
65 | }
66 | }
67 |
68 | info("Configuring Kafka Graphite Reporter with host=%s, port=%d, prefix=%s and include=%s, exclude=%s, jvm=%s".format(
69 | metricsConfig.host, metricsConfig.port, metricsConfig.prefix, metricsConfig.include, metricsConfig.exclude, metricsConfig.jvm))
70 | underlying = new GraphiteReporter(Metrics.defaultRegistry, metricsConfig.prefix, metricPredicate,
71 | socketProvider, Clock.defaultClock) {
72 | override def printRegularMetrics(epoch: java.lang.Long) = {
73 | val metrics = getMetricsRegistry.groupedMetrics(predicate).toMap.values.flatten
74 | metrics.foreach { case (name: MetricName, metric: Metric) if metric != null =>
75 | try {
76 | metric.processWith(this, name, epoch)
77 | } catch {
78 | case e: Exception => error("Error printing regular metrics=" + name, e)
79 | }
80 | }
81 | }
82 | }
83 | // Controls JVM metrics output
84 | underlying.printVMMetrics = metricsConfig.jvm
85 | if (metricsConfig.enabled) {
86 | initialized = true
87 | startReporter(metricsConfig.pollingIntervalSecs)
88 | }
89 | }
90 | }
91 | }
92 |
93 | override def startReporter(pollingPeriodSecs: Long) {
94 | synchronized {
95 | if (initialized && !running) {
96 | underlying.start(pollingPeriodSecs, TimeUnit.SECONDS)
97 | running = true
98 | info("Started Kafka Graphite metrics reporter with polling period %d seconds".format(pollingPeriodSecs))
99 | }
100 | }
101 | }
102 |
103 | override def stopReporter() {
104 | synchronized {
105 | if (initialized && running) {
106 | underlying.shutdown()
107 | running = false
108 | info("Stopped Kafka Graphite metrics reporter")
109 | underlying = null
110 | }
111 | }
112 | }
113 | }
114 |
--------------------------------------------------------------------------------
/kafka-graphite/src/test/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | #
2 | # Copyright 2017 Alexander Pakulov
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | #
16 |
17 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
18 | log4j.appender.stdout.Target=System.out
19 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
20 | log4j.appender.stdout.layout.ConversionPattern=%d{ABSOLUTE} %5p (%r) (%C{1}.%M:%L) - %m%n
21 |
22 | log4j.rootLogger=INFO, stdout
--------------------------------------------------------------------------------
/kafka-graphite/src/test/scala/kafka/metrics/KafkaGraphiteMetricsReporterTest.scala:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2017 Alexander Pakulov
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package kafka.metrics
17 |
18 | import java.net._
19 | import java.util.Properties
20 |
21 | import com.yammer.metrics.Metrics
22 | import com.yammer.metrics.core.MetricName
23 | import kafka.utils.VerifiableProperties
24 | import org.scalatest.{BeforeAndAfter, Matchers, FlatSpec}
25 |
26 | import scala.collection.mutable.ListBuffer
27 | import scala.io.Source
28 | import scala.util.Random
29 |
30 | class GraphiteMockServer extends Thread {
31 | var socket: Socket = null
32 | var content: ListBuffer[String] = new ListBuffer[String]
33 | val server: ServerSocket = new ServerSocket(Random.nextInt(65000))
34 | val port: Int = server.getLocalPort
35 |
36 | override def run(): Unit = {
37 | while (!server.isClosed) {
38 | try {
39 | socket = server.accept()
40 | val iterator = Source.fromInputStream(socket.getInputStream).getLines()
41 | while (iterator.hasNext) {
42 | content += iterator.next
43 | }
44 | } catch {
45 | case e: SocketException => "Socket closed, good bye.."
46 | }
47 | }
48 | }
49 |
50 | def close(): Unit = {
51 | server.close()
52 | }
53 | }
54 |
55 | class KafkaGraphiteMetricsReporterTest extends FlatSpec with Matchers with BeforeAndAfter {
56 | var graphiteServer: GraphiteMockServer = null
57 |
58 | before {
59 | graphiteServer = new GraphiteMockServer
60 | graphiteServer.start()
61 | }
62 |
63 | after {
64 | graphiteServer.close()
65 | }
66 |
67 | it should "reportCounters" in {
68 | val props = new Properties
69 | props.put("kafka.metrics.reporters", "kafka.metrics.KafkaGraphiteMetricsReporter")
70 | props.put("kafka.metrics.polling.interval.secs", "1")
71 | props.put("kafka.graphite.metrics.reporter.enabled", "true")
72 | props.put("kafka.graphite.metrics.jvm.enabled", "false")
73 | props.put("kafka.graphite.metrics.port", graphiteServer.port.toString)
74 |
75 | val graphiteMetricsReporter = new KafkaGraphiteMetricsReporter()
76 | graphiteMetricsReporter.init(new VerifiableProperties(props))
77 |
78 | val counter = Metrics.defaultRegistry().newCounter(new MetricName("test", "type", "counter"))
79 | counter.inc()
80 | Thread.sleep(2000)
81 | counter.inc()
82 | Thread.sleep(2000)
83 |
84 | assert(graphiteServer.content.toList.exists(item => item.contains("type.counter.count 1")))
85 | assert(graphiteServer.content.toList.exists(item => item.contains("type.counter.count 2")))
86 |
87 | graphiteMetricsReporter.stopReporter()
88 | }
89 |
90 | it should "exclude data" in {
91 | val props = new Properties
92 | props.put("kafka.metrics.reporters", "kafka.metrics.KafkaGraphiteMetricsReporter")
93 | props.put("kafka.metrics.polling.interval.secs", "1")
94 | props.put("kafka.graphite.metrics.reporter.enabled", "true")
95 | props.put("kafka.graphite.metrics.exclude", ".*test.*")
96 | props.put("kafka.graphite.metrics.port", graphiteServer.port.toString)
97 |
98 | val graphiteMetricsReporter = new KafkaGraphiteMetricsReporter()
99 | graphiteMetricsReporter.init(new VerifiableProperties(props))
100 |
101 | Metrics.defaultRegistry().newCounter(new MetricName("valid", "type", "counter")).inc()
102 | Metrics.defaultRegistry().newCounter(new MetricName("test", "type", "counter")).inc()
103 |
104 | Thread.sleep(2000)
105 |
106 | assert(graphiteServer.content.toList.exists(item => item.contains("valid.type")))
107 | assert(!graphiteServer.content.toList.exists(item => item.contains("test.type")))
108 |
109 | graphiteMetricsReporter.stopReporter()
110 | }
111 |
112 | it should "include data" in {
113 | val props = new Properties
114 | props.put("kafka.metrics.reporters", "kafka.metrics.KafkaGraphiteMetricsReporter")
115 | props.put("kafka.metrics.polling.interval.secs", "1")
116 | props.put("kafka.graphite.metrics.reporter.enabled", "true")
117 | props.put("kafka.graphite.metrics.include", ".*test.*")
118 | props.put("kafka.graphite.metrics.port", graphiteServer.port.toString)
119 |
120 | val graphiteMetricsReporter = new KafkaGraphiteMetricsReporter()
121 | graphiteMetricsReporter.init(new VerifiableProperties(props))
122 |
123 | Metrics.defaultRegistry().newCounter(new MetricName("invalid", "type", "counter")).inc()
124 | Metrics.defaultRegistry().newCounter(new MetricName("test", "type", "counter")).inc()
125 |
126 | Thread.sleep(2000)
127 |
128 | assert(!graphiteServer.content.toList.exists(item => item.contains("invalid.type")))
129 | assert(graphiteServer.content.toList.exists(item => item.contains("test.type")))
130 |
131 | graphiteMetricsReporter.stopReporter()
132 | }
133 |
134 | it should "include and exclude data in the same time" in {
135 | val props = new Properties
136 | props.put("kafka.metrics.reporters", "kafka.metrics.KafkaGraphiteMetricsReporter")
137 | props.put("kafka.metrics.polling.interval.secs", "1")
138 | props.put("kafka.graphite.metrics.reporter.enabled", "true")
139 | props.put("kafka.graphite.metrics.include", ".*valid.*")
140 | props.put("kafka.graphite.metrics.exclude", ".*invalid.*")
141 | props.put("kafka.graphite.metrics.port", graphiteServer.port.toString)
142 |
143 | val graphiteMetricsReporter = new KafkaGraphiteMetricsReporter()
144 | graphiteMetricsReporter.init(new VerifiableProperties(props))
145 |
146 | Metrics.defaultRegistry().newCounter(new MetricName("valid", "type", "counter")).inc()
147 | Metrics.defaultRegistry().newCounter(new MetricName("invalid", "type", "counter")).inc()
148 | Metrics.defaultRegistry().newCounter(new MetricName("test", "type", "counter")).inc()
149 |
150 | Thread.sleep(2000)
151 |
152 | assert(graphiteServer.content.toList.exists(item => item.contains("valid.type")))
153 | assert(!graphiteServer.content.toList.exists(item => item.contains("test.type")))
154 | assert(!graphiteServer.content.toList.exists(item => item.contains("invalid.type")))
155 |
156 | graphiteMetricsReporter.stopReporter()
157 | }
158 |
159 | it should "include jvm metrics by default" in {
160 | val props = new Properties
161 | props.put("kafka.metrics.reporters", "kafka.metrics.KafkaGraphiteMetricsReporter")
162 | props.put("kafka.metrics.polling.interval.secs", "1")
163 | props.put("kafka.graphite.metrics.reporter.enabled", "true")
164 | props.put("kafka.graphite.metrics.port", graphiteServer.port.toString)
165 |
166 | val graphiteMetricsReporter = new KafkaGraphiteMetricsReporter()
167 | graphiteMetricsReporter.init(new VerifiableProperties(props))
168 |
169 | Thread.sleep(2000)
170 |
171 | assert(graphiteServer.content.toList.exists(item => item.contains("jvm")))
172 |
173 | graphiteMetricsReporter.stopReporter()
174 | }
175 |
176 | it should "allow to disable jvm metrics" in {
177 | val props = new Properties
178 | props.put("kafka.metrics.reporters", "kafka.metrics.KafkaGraphiteMetricsReporter")
179 | props.put("kafka.metrics.polling.interval.secs", "1")
180 | props.put("kafka.graphite.metrics.reporter.enabled", "true")
181 | props.put("kafka.graphite.metrics.jvm.enabled", "false")
182 | props.put("kafka.graphite.metrics.port", graphiteServer.port.toString)
183 |
184 | val graphiteMetricsReporter = new KafkaGraphiteMetricsReporter()
185 | graphiteMetricsReporter.init(new VerifiableProperties(props))
186 |
187 | Thread.sleep(2000)
188 |
189 | assert(!graphiteServer.content.toList.exists(item => item.contains("jvm")))
190 |
191 | graphiteMetricsReporter.stopReporter()
192 | }
193 | }
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | rootProject.name = 'kafka-graphite-reporter'
2 |
3 | include ':kafka-graphite'
4 | include ':kafka-graphite-clients'
--------------------------------------------------------------------------------