├── .gitignore ├── .travis.yml ├── README.adoc ├── docs ├── JMXCollector.adoc ├── definitions.adoc ├── index.html ├── index.pdf ├── overview.adoc └── paths.adoc ├── mvnw ├── mvnw.cmd ├── pics └── ShowApi.png ├── pom.xml └── src ├── docs ├── asciidoc │ ├── index.adoc │ ├── manual_content1.adoc │ └── manual_content2.adoc └── swagger │ └── swagger.json ├── main ├── docker │ └── Dockerfile ├── java │ └── org │ │ └── gnuhpc │ │ └── bigdata │ │ ├── KafkaRestSpringbootApplication.java │ │ ├── componet │ │ ├── OffsetStorage.java │ │ └── RestLogAspect.java │ │ ├── config │ │ ├── JMXConfig.java │ │ ├── KafkaConfig.java │ │ ├── SwaggerConfig.java │ │ ├── WebSecurityConfig.java │ │ └── ZookeeperConfig.java │ │ ├── constant │ │ ├── ConsumerGroupState.java │ │ ├── ConsumerState.java │ │ ├── ConsumerType.java │ │ ├── GeneralResponseState.java │ │ ├── ReassignmentState.java │ │ ├── ZkServerCommand.java │ │ └── ZkServerMode.java │ │ ├── controller │ │ ├── CollectorController.java │ │ ├── DebugController.java │ │ ├── KafkaController.java │ │ ├── SchemaRegistryController.java │ │ ├── SwaggerController.java │ │ ├── UserController.java │ │ └── ZookeeperController.java │ │ ├── exception │ │ ├── CollectorException.java │ │ ├── ExceptionLogAspect.java │ │ ├── GlobalExceptionHandler.java │ │ ├── KafkaErrorCode.java │ │ ├── KafkaExceptionHandler.java │ │ ├── RestErrorResponse.java │ │ └── ServiceNotAvailableException.java │ │ ├── model │ │ ├── AddPartition.java │ │ ├── BrokerInfo.java │ │ ├── ClusterInfo.java │ │ ├── ConsumerGroupDesc.java │ │ ├── ConsumerGroupDescFactory.java │ │ ├── ConsumerGroupMeta.java │ │ ├── CustomConfigEntry.java │ │ ├── CustomTopicPartitionInfo.java │ │ ├── GeneralResponse.java │ │ ├── HealthCheckResult.java │ │ ├── JMXAttribute.java │ │ ├── JMXClient.java │ │ ├── JMXComplexAttribute.java │ │ ├── JMXConfiguration.java │ │ ├── JMXFilter.java │ │ ├── JMXMetricData.java │ │ ├── JMXMetricDataV1.java │ │ ├── JMXQuery.java │ │ ├── JMXSimpleAttribute.java │ │ ├── JMXTabularAttribute.java │ │ ├── MemberDescription.java │ │ ├── PartitionAssignmentState.java │ │ ├── ReassignJsonWrapper.java │ │ ├── ReassignModel.java │ │ ├── ReassignStatus.java │ │ ├── ReassignWrapper.java │ │ ├── Record.java │ │ ├── SchemaRegistryMetadata.java │ │ ├── TopicBrief.java │ │ ├── TopicDetail.java │ │ ├── TopicMeta.java │ │ ├── TopicPartition.java │ │ ├── TopicPartitionReplicaAssignment.java │ │ ├── TwoTuple.java │ │ ├── User.java │ │ ├── ZkServerClient.java │ │ ├── ZkServerEnvironment.java │ │ └── ZkServerStat.java │ │ ├── security │ │ ├── BasicAuthenticationPoint.java │ │ └── UserDetailsServiceImp.java │ │ ├── serializer │ │ └── JsonJodaDateTimeSerializer.java │ │ ├── service │ │ ├── CollectorService.java │ │ ├── ConfluentSchemaService.java │ │ ├── KafkaAdminService.java │ │ ├── KafkaConsumerService.java │ │ ├── KafkaProducerService.java │ │ ├── UserService.java │ │ └── ZookeeperService.java │ │ ├── task │ │ └── FetchOffSetFromZkResult.java │ │ ├── utils │ │ ├── ByteUtils.java │ │ ├── CommonUtils.java │ │ ├── KafkaUtils.java │ │ ├── TimestampDeserializer.java │ │ └── ZookeeperUtils.java │ │ └── validator │ │ ├── ConsumerGroupExistConstraint.java │ │ ├── ConsumerGroupExistValidator.java │ │ ├── TopicExistConstraint.java │ │ ├── TopicExistValidator.java │ │ ├── ZkNodePathExistConstraint.java │ │ └── ZkNodePathExistValidator.java ├── resources │ ├── application-dev.yml │ ├── application-home.yml │ ├── application-tina.yml │ ├── application.yml │ ├── jmxtemplates │ │ ├── KafkaBrokerFilter.yml │ │ ├── KafkaConsumerFilter.yml │ │ ├── KafkaProducerFilter.yml │ │ └── KafkaTopicFilter.yml │ ├── kafka-zk-springboot-distribution.xml │ ├── log4j2.properties │ ├── security.yml │ └── start.sh └── scala │ └── org.gnuhpc.bigdata │ └── utils │ └── CollectionConvertor.scala └── test └── java └── org └── gnuhpc └── bigdata ├── Swagger2MarkupTest.java ├── service ├── KafkaAdminServiceTest.java └── avro │ ├── User.java │ └── user.avsc └── utils ├── KafkaStarterUtils.java └── ZkStarter.java /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by .ignore support plugin (hsz.mobi) 2 | ### Example user template template 3 | ### Example user template 4 | 5 | # IntelliJ project files 6 | .idea 7 | *.iml 8 | out 9 | gen### Java template 10 | # Compiled class file 11 | *.class 12 | 13 | # Log file 14 | *.log 15 | 16 | # BlueJ files 17 | *.ctxt 18 | 19 | # Mobile Tools for Java (J2ME) 20 | .mtj.tmp/ 21 | 22 | # Package Files # 23 | *.jar 24 | *.war 25 | *.ear 26 | *.zip 27 | *.tar.gz 28 | *.rar 29 | 30 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 31 | hs_err_pid* 32 | ### Maven template 33 | target/ 34 | pom.xml.tag 35 | pom.xml.releaseBackup 36 | pom.xml.versionsBackup 37 | pom.xml.next 38 | release.properties 39 | dependency-reduced-pom.xml 40 | buildNumber.properties 41 | .mvn/timing.properties 42 | logs/ 43 | 44 | # Avoid ignoring Maven wrapper jar file (.jar files are usually ignored) 45 | !/.mvn/wrapper/maven-wrapper.jar 46 | 47 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: java 2 | jdk: openjdk8 3 | dist: trusty 4 | install: 5 | - mvn -N io.takari:maven:wrapper 6 | - ./mvnw install -DskipTests=true -Dmaven.javadoc.skip=true -B -V 7 | script: 8 | - echo "skipping tests" 9 | before_install: 10 | - chmod +x mvnw 11 | -------------------------------------------------------------------------------- /README.adoc: -------------------------------------------------------------------------------- 1 | = Kafka REST API 2 | 3 | image:https://travis-ci.org/gnuhpc/Kafka-zk-restapi.svg?branch=master["Build Status", link="https://travis-ci.org/gnuhpc/Kafka-zk-restapi"] 4 | 5 | [[_overview]] 6 | == Overview 7 | Kafka/ZK REST API is to provide the production-ready endpoints to perform some administration/metric task for Kafka and Zookeeper. 8 | 9 | // tag::base-t[] 10 | .Following features are provided: 11 | * cluster/nodes/controller info describe 12 | * Broker List 13 | * Broker config get/update, dynamic config get/update/delete 14 | * Log dirs describe, filtered by brokers/topic/partition/logdirs 15 | * Topic create/delete/describe/list 16 | * Topic config create/update/list 17 | * Topic partition add and reassign, replicas movement between log directories is supported 18 | * Topic Partition leadership transfer back to preferred replica 19 | * Consumer group list/describe/delete 20 | * Consumer Group Lag check 21 | * Offset reset by earliest/latest/timestamp 22 | * Contents view of message by different decoder, avro is also supported 23 | * Schema management on Schema Registry is supported 24 | * Collect JMX metrics from brokers that expose JMX metrics + 25 | More details refer to https://github.com/gnuhpc/Kafka-zk-restapi/blob/master/docs/JMXCollector.adoc[JMXCollector API Specification] 26 | * Secure the REST API with Spring Security 27 | // end::base-t[] 28 | 29 | image::https://raw.githubusercontent.com/gnuhpc/Kafka-zk-restapi/master/pics/ShowApi.png[API] 30 | 31 | 32 | We have also opensourced a simple but useful UI for these APIs. Please check https://github.com/gnuhpc/Kafka-zk-restapi-UI for more details. 33 | 34 | === Installation and run 35 | First of all, Java 8 jre/jdk needed. 36 | 37 | You can download the release package at 38 | https://github.com/gnuhpc/Kafka-zk-restapi/releases 39 | 40 | You can get it running by unzip/untaring the distribution package, perform some configurations needed and run bin/start.sh 41 | 42 | === How to build 43 | 44 | You can build this restapi server using maven. 45 | 46 | . `$ git clone \https://github.com/gnuhpc/Kafka-zk-restapi.git` 47 | . `$ cd Kafka-zk-restapi/` 48 | . `$ mvn clean package -Dmaven.test.skip=true` 49 | 50 | After maven building, you will find zip/tar under directory: Kafka-zk-restapi/target. 51 | 52 | **New**: 53 | Now we support build docker image using spotify docker-maven-plugin. 54 | A docker image named kafka-rest-springboot:latest is created after building. If you want 55 | to build your own image, modify application property files in src/main/resources first. 56 | Details refer to <<_How to config,'How to config'>> 57 | 58 | === How to config 59 | You can get it running by unzip/untaring the distribution package and change the corresponding configuration in config directory: 60 | 61 | Change the following settings of application-ACTIVEENV.yml: 62 | 63 | * kafka.brokers: kafka broker addresses. 64 | * kafka.schemaregistry: Schema Registry address. If not exist, just provide a random URL. 65 | * kafka.healthcheck.topic: Topic name for health check. 66 | * kafka.sasl.enable/kafka.sasl.protocol/kafka.sasl.mechanism: Set kafka.sasl.enable to true if connect to a kafka cluster that adds SASL authentication. 67 | Remember to assign JAAS config file location to JVM property java.security.auth.login.config before starting this application. 68 | * zookeeper.uris: zookeeper addresses. 69 | * jmx.kafka.jmxurl/jmx.zookeeper.jmxurl: kafka/zookeeper jmx urls. If you want to use JMX Query Filter function, you can add your own custom filter files to JMXFilterTemplate directory in project root folder. 70 | More details refer to https://github.com/gnuhpc/Kafka-zk-restapi/blob/master/docs/JMXCollector.adoc[JMXCollector API Specification] 71 | * server.security.check/server.security.checkInitDelay/server.security.checkSecurityInterval: Security related parameters. 72 | See below: <<_security,'How to config security'>> 73 | 74 | === How to run 75 | ==== A. Pulling from Docker hub 76 | Step 1 : Prepare a directory that contains spring application property files as a volume to mount into the container. 77 | Custom your own parameters such as Kafka/zk addresses, schema registry url, security params, etc. 78 | 79 | Step 2 : Run the image. /XX/XX/config is the directory that noticed in Step 1. 80 | 81 | `$ docker run -p 8121:8121 -v /XX/XX/config:/app/config --env JAVA_OPTS='-Xms1g -Xmx1g' tinawenqiao/kafka-rest-springboot` 82 | 83 | Details refer to https://hub.docker.com/r/tinawenqiao/kafka-rest-springboot 84 | 85 | Note: Make sure advertised.listeners(kafka server property) is set correctly if you access kafka from docker image. 86 | 87 | ==== B. Run your own docker image 88 | 89 | `$ docker run -p 8121:8121 --env JAVA_OPTS='-Xms1g -Xmx1g' tinawenqiao/kafka-rest-springboot:latest` 90 | 91 | Note: Make sure advertised.listeners(kafka server property) is set correctly if you access kafka from docker image. 92 | 93 | See <<_How to build,'How to build'>> 94 | 95 | ==== C. Use the built tar/zip package 96 | . `$ tar -xvf kafka-zk-api-1.1.x-release-dist.tar` 97 | or 98 | `$ unzip kafka-zk-api-1.1.x-release-dist.zip` 99 | . `$ cd kafka-zk-api-1.1.x-release-dist` 100 | . `$ bin/start.sh` 101 | 102 | Note: If connect to a kafka cluster that adds SASL authentication, add -Djava.security.auth.login.config=jaas.conf to JVM property. 103 | 104 | [[_security]] 105 | === Security 106 | Public REST services without access control make the sensitive data under risk.Then we provide a simple authentication mechanism using Spring Security. 107 | In order to make the project lighter, we use yml file to store user information, not using database. 108 | 109 | Follow the steps to enable security feature: 110 | 111 | Step 1:Modify the application config file and set server.security.check to true. + 112 | 113 | * server.security.check: 114 | ** True: Add security for the API. Clients can access the API with valid username and password stored in security.yml, or the Swagger UI(http://127.0.0.1:8121/api) is only 115 | allowed to access. 116 | ** False: All the endpoints can be accessed without authentication. 117 | * server.security.checkInitDelay: The number of seconds of init delay for the timing thread to check the security file. 118 | * server.security.checkSecurityInterval: The number of seconds of check interval for the timing thread to check the security file. 119 | 120 | Step 2: Make sure security/security.yml exist in application root folder. 121 | 122 | Step 3: Use user controller API to add user to security file security/security.yml. + 123 | **Notice**: 124 | 125 | * The first user should be added manually. Password need to be encoded using bcrypt before saving to the yml file.For convenience, we provide CommonUtils to encode the password. 126 | * No need to restart server after adding new user or update user info. Timing thread introduced in Step 1 will refresh the user list according to your settings. 127 | 128 | === Support Kafka Version Information 129 | Currently, this rest api (master branch) supports Kafka 1.1.1 brokers. The master branch is the most active branch. 130 | 131 | *For other version of kafka rather than 1.1.1, please checkout the coresponding branch by calling the command:* 132 | 133 | git clone -b BRANCHNAME https://github.com/gnuhpc/Kafka-zk-restapi.git 134 | 135 | === URI scheme 136 | [%hardbreaks] 137 | __Host__ : localhost:8121 138 | __BasePath__ : / 139 | 140 | You can access Swagger-UI by accessing http://127.0.0.1:8121/api 141 | 142 | 143 | === https://github.com/gnuhpc/Kafka-zk-restapi/blob/master/docs/paths.adoc[API LIST for 1.1.1] 144 | 145 | 146 | * kafka-controller : Kafka Api 147 | * zookeeper-controller : Zookeeper Api 148 | * collector-controller : JMX Metric Collector Api 149 | * user-controller : User management Api 150 | 151 | 152 | === https://github.com/gnuhpc/Kafka-zk-restapi/blob/master/docs/definitions.adoc[Data Model Definitions for 1.1.1] 153 | 154 | 155 | === Version information 156 | [%hardbreaks] 157 | __Version__ : 1.1.1 158 | 159 | 160 | === Contact information 161 | [%hardbreaks] 162 | __Contact__ : gnuhpc 163 | __Contact Email__ : gnuhpc@gmail.com 164 | __Github__ : https://github.com/gnuhpc 165 | __Github__ : https://github.com/tinawenqiao 166 | 167 | 168 | -------------------------------------------------------------------------------- /docs/index.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gnuhpc/Kafka-zk-restapi/1b49ac85d84207bbe4c331ba7f31661d17bd377d/docs/index.pdf -------------------------------------------------------------------------------- /docs/overview.adoc: -------------------------------------------------------------------------------- 1 | = Kafka REST API SwaggerUI 2 | 3 | 4 | [[_overview]] 5 | == Overview 6 | Kafka REST API SwaggerUI 7 | 8 | 9 | === Version information 10 | [%hardbreaks] 11 | __Version__ : 0.1.0 12 | 13 | 14 | === Contact information 15 | [%hardbreaks] 16 | __Contact__ : gnuhpc 17 | __Contact Email__ : gnuhpc@gmail.com 18 | 19 | 20 | === URI scheme 21 | [%hardbreaks] 22 | __Host__ : localhost:8080 23 | __BasePath__ : / 24 | 25 | 26 | === Tags 27 | 28 | * collector-controller : Rest API for Collecting JMX Metric Data 29 | * kafka-controller : Kafka Controller 30 | * schema-registry-controller : Schema Registry Controller 31 | * user-controller : Security User Management Controller. 32 | * zookeeper-controller : Zookeeper Controller 33 | 34 | 35 | 36 | -------------------------------------------------------------------------------- /mvnw: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # ---------------------------------------------------------------------------- 3 | # Licensed to the Apache Software Foundation (ASF) under one 4 | # or more contributor license agreements. See the NOTICE file 5 | # distributed with this work for additional information 6 | # regarding copyright ownership. The ASF licenses this file 7 | # to you under the Apache License, Version 2.0 (the 8 | # "License"); you may not use this file except in compliance 9 | # with the License. You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, 14 | # software distributed under the License is distributed on an 15 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 16 | # KIND, either express or implied. See the License for the 17 | # specific language governing permissions and limitations 18 | # under the License. 19 | # ---------------------------------------------------------------------------- 20 | 21 | # ---------------------------------------------------------------------------- 22 | # Maven2 Start Up Batch script 23 | # 24 | # Required ENV vars: 25 | # ------------------ 26 | # JAVA_HOME - location of a JDK home dir 27 | # 28 | # Optional ENV vars 29 | # ----------------- 30 | # M2_HOME - location of maven2's installed home dir 31 | # MAVEN_OPTS - parameters passed to the Java VM when running Maven 32 | # e.g. to debug Maven itself, use 33 | # set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 34 | # MAVEN_SKIP_RC - flag to disable loading of mavenrc files 35 | # ---------------------------------------------------------------------------- 36 | 37 | if [ -z "$MAVEN_SKIP_RC" ] ; then 38 | 39 | if [ -f /etc/mavenrc ] ; then 40 | . /etc/mavenrc 41 | fi 42 | 43 | if [ -f "$HOME/.mavenrc" ] ; then 44 | . "$HOME/.mavenrc" 45 | fi 46 | 47 | fi 48 | 49 | # OS specific support. $var _must_ be set to either true or false. 50 | cygwin=false; 51 | darwin=false; 52 | mingw=false 53 | case "`uname`" in 54 | CYGWIN*) cygwin=true ;; 55 | MINGW*) mingw=true;; 56 | Darwin*) darwin=true 57 | # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home 58 | # See https://developer.apple.com/library/mac/qa/qa1170/_index.html 59 | if [ -z "$JAVA_HOME" ]; then 60 | if [ -x "/usr/libexec/java_home" ]; then 61 | export JAVA_HOME="`/usr/libexec/java_home`" 62 | else 63 | export JAVA_HOME="/Library/Java/Home" 64 | fi 65 | fi 66 | ;; 67 | esac 68 | 69 | if [ -z "$JAVA_HOME" ] ; then 70 | if [ -r /etc/gentoo-release ] ; then 71 | JAVA_HOME=`java-config --jre-home` 72 | fi 73 | fi 74 | 75 | if [ -z "$M2_HOME" ] ; then 76 | ## resolve links - $0 may be a link to maven's home 77 | PRG="$0" 78 | 79 | # need this for relative symlinks 80 | while [ -h "$PRG" ] ; do 81 | ls=`ls -ld "$PRG"` 82 | link=`expr "$ls" : '.*-> \(.*\)$'` 83 | if expr "$link" : '/.*' > /dev/null; then 84 | PRG="$link" 85 | else 86 | PRG="`dirname "$PRG"`/$link" 87 | fi 88 | done 89 | 90 | saveddir=`pwd` 91 | 92 | M2_HOME=`dirname "$PRG"`/.. 93 | 94 | # make it fully qualified 95 | M2_HOME=`cd "$M2_HOME" && pwd` 96 | 97 | cd "$saveddir" 98 | # echo Using m2 at $M2_HOME 99 | fi 100 | 101 | # For Cygwin, ensure paths are in UNIX format before anything is touched 102 | if $cygwin ; then 103 | [ -n "$M2_HOME" ] && 104 | M2_HOME=`cygpath --unix "$M2_HOME"` 105 | [ -n "$JAVA_HOME" ] && 106 | JAVA_HOME=`cygpath --unix "$JAVA_HOME"` 107 | [ -n "$CLASSPATH" ] && 108 | CLASSPATH=`cygpath --path --unix "$CLASSPATH"` 109 | fi 110 | 111 | # For Migwn, ensure paths are in UNIX format before anything is touched 112 | if $mingw ; then 113 | [ -n "$M2_HOME" ] && 114 | M2_HOME="`(cd "$M2_HOME"; pwd)`" 115 | [ -n "$JAVA_HOME" ] && 116 | JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" 117 | # TODO classpath? 118 | fi 119 | 120 | if [ -z "$JAVA_HOME" ]; then 121 | javaExecutable="`which javac`" 122 | if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then 123 | # readlink(1) is not available as standard on Solaris 10. 124 | readLink=`which readlink` 125 | if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then 126 | if $darwin ; then 127 | javaHome="`dirname \"$javaExecutable\"`" 128 | javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" 129 | else 130 | javaExecutable="`readlink -f \"$javaExecutable\"`" 131 | fi 132 | javaHome="`dirname \"$javaExecutable\"`" 133 | javaHome=`expr "$javaHome" : '\(.*\)/bin'` 134 | JAVA_HOME="$javaHome" 135 | export JAVA_HOME 136 | fi 137 | fi 138 | fi 139 | 140 | if [ -z "$JAVACMD" ] ; then 141 | if [ -n "$JAVA_HOME" ] ; then 142 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 143 | # IBM's JDK on AIX uses strange locations for the executables 144 | JAVACMD="$JAVA_HOME/jre/sh/java" 145 | else 146 | JAVACMD="$JAVA_HOME/bin/java" 147 | fi 148 | else 149 | JAVACMD="`which java`" 150 | fi 151 | fi 152 | 153 | if [ ! -x "$JAVACMD" ] ; then 154 | echo "Error: JAVA_HOME is not defined correctly." >&2 155 | echo " We cannot execute $JAVACMD" >&2 156 | exit 1 157 | fi 158 | 159 | if [ -z "$JAVA_HOME" ] ; then 160 | echo "Warning: JAVA_HOME environment variable is not set." 161 | fi 162 | 163 | CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher 164 | 165 | # traverses directory structure from process work directory to filesystem root 166 | # first directory with .mvn subdirectory is considered project base directory 167 | find_maven_basedir() { 168 | 169 | if [ -z "$1" ] 170 | then 171 | echo "Path not specified to find_maven_basedir" 172 | return 1 173 | fi 174 | 175 | basedir="$1" 176 | wdir="$1" 177 | while [ "$wdir" != '/' ] ; do 178 | if [ -d "$wdir"/.mvn ] ; then 179 | basedir=$wdir 180 | break 181 | fi 182 | # workaround for JBEAP-8937 (on Solaris 10/Sparc) 183 | if [ -d "${wdir}" ]; then 184 | wdir=`cd "$wdir/.."; pwd` 185 | fi 186 | # end of workaround 187 | done 188 | echo "${basedir}" 189 | } 190 | 191 | # concatenates all lines of a file 192 | concat_lines() { 193 | if [ -f "$1" ]; then 194 | echo "$(tr -s '\n' ' ' < "$1")" 195 | fi 196 | } 197 | 198 | BASE_DIR=`find_maven_basedir "$(pwd)"` 199 | if [ -z "$BASE_DIR" ]; then 200 | exit 1; 201 | fi 202 | 203 | export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"} 204 | echo $MAVEN_PROJECTBASEDIR 205 | MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" 206 | 207 | # For Cygwin, switch paths to Windows format before running java 208 | if $cygwin; then 209 | [ -n "$M2_HOME" ] && 210 | M2_HOME=`cygpath --path --windows "$M2_HOME"` 211 | [ -n "$JAVA_HOME" ] && 212 | JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` 213 | [ -n "$CLASSPATH" ] && 214 | CLASSPATH=`cygpath --path --windows "$CLASSPATH"` 215 | [ -n "$MAVEN_PROJECTBASEDIR" ] && 216 | MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"` 217 | fi 218 | 219 | WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain 220 | 221 | exec "$JAVACMD" \ 222 | $MAVEN_OPTS \ 223 | -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ 224 | "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ 225 | ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@" 226 | -------------------------------------------------------------------------------- /mvnw.cmd: -------------------------------------------------------------------------------- 1 | @REM ---------------------------------------------------------------------------- 2 | @REM Licensed to the Apache Software Foundation (ASF) under one 3 | @REM or more contributor license agreements. See the NOTICE file 4 | @REM distributed with this work for additional information 5 | @REM regarding copyright ownership. The ASF licenses this file 6 | @REM to you under the Apache License, Version 2.0 (the 7 | @REM "License"); you may not use this file except in compliance 8 | @REM with the License. You may obtain a copy of the License at 9 | @REM 10 | @REM http://www.apache.org/licenses/LICENSE-2.0 11 | @REM 12 | @REM Unless required by applicable law or agreed to in writing, 13 | @REM software distributed under the License is distributed on an 14 | @REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | @REM KIND, either express or implied. See the License for the 16 | @REM specific language governing permissions and limitations 17 | @REM under the License. 18 | @REM ---------------------------------------------------------------------------- 19 | 20 | @REM ---------------------------------------------------------------------------- 21 | @REM Maven2 Start Up Batch script 22 | @REM 23 | @REM Required ENV vars: 24 | @REM JAVA_HOME - location of a JDK home dir 25 | @REM 26 | @REM Optional ENV vars 27 | @REM M2_HOME - location of maven2's installed home dir 28 | @REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands 29 | @REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending 30 | @REM MAVEN_OPTS - parameters passed to the Java VM when running Maven 31 | @REM e.g. to debug Maven itself, use 32 | @REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 33 | @REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files 34 | @REM ---------------------------------------------------------------------------- 35 | 36 | @REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' 37 | @echo off 38 | @REM enable echoing my setting MAVEN_BATCH_ECHO to 'on' 39 | @if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% 40 | 41 | @REM set %HOME% to equivalent of $HOME 42 | if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") 43 | 44 | @REM Execute a user defined script before this one 45 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre 46 | @REM check for pre script, once with legacy .bat ending and once with .cmd ending 47 | if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" 48 | if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" 49 | :skipRcPre 50 | 51 | @setlocal 52 | 53 | set ERROR_CODE=0 54 | 55 | @REM To isolate internal variables from possible post scripts, we use another setlocal 56 | @setlocal 57 | 58 | @REM ==== START VALIDATION ==== 59 | if not "%JAVA_HOME%" == "" goto OkJHome 60 | 61 | echo. 62 | echo Error: JAVA_HOME not found in your environment. >&2 63 | echo Please set the JAVA_HOME variable in your environment to match the >&2 64 | echo location of your Java installation. >&2 65 | echo. 66 | goto error 67 | 68 | :OkJHome 69 | if exist "%JAVA_HOME%\bin\java.exe" goto init 70 | 71 | echo. 72 | echo Error: JAVA_HOME is set to an invalid directory. >&2 73 | echo JAVA_HOME = "%JAVA_HOME%" >&2 74 | echo Please set the JAVA_HOME variable in your environment to match the >&2 75 | echo location of your Java installation. >&2 76 | echo. 77 | goto error 78 | 79 | @REM ==== END VALIDATION ==== 80 | 81 | :init 82 | 83 | @REM Find the project base dir, i.e. the directory that contains the folder ".mvn". 84 | @REM Fallback to current working directory if not found. 85 | 86 | set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% 87 | IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir 88 | 89 | set EXEC_DIR=%CD% 90 | set WDIR=%EXEC_DIR% 91 | :findBaseDir 92 | IF EXIST "%WDIR%"\.mvn goto baseDirFound 93 | cd .. 94 | IF "%WDIR%"=="%CD%" goto baseDirNotFound 95 | set WDIR=%CD% 96 | goto findBaseDir 97 | 98 | :baseDirFound 99 | set MAVEN_PROJECTBASEDIR=%WDIR% 100 | cd "%EXEC_DIR%" 101 | goto endDetectBaseDir 102 | 103 | :baseDirNotFound 104 | set MAVEN_PROJECTBASEDIR=%EXEC_DIR% 105 | cd "%EXEC_DIR%" 106 | 107 | :endDetectBaseDir 108 | 109 | IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig 110 | 111 | @setlocal EnableExtensions EnableDelayedExpansion 112 | for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a 113 | @endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% 114 | 115 | :endReadAdditionalConfig 116 | 117 | SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" 118 | 119 | set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" 120 | set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain 121 | 122 | %MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* 123 | if ERRORLEVEL 1 goto error 124 | goto end 125 | 126 | :error 127 | set ERROR_CODE=1 128 | 129 | :end 130 | @endlocal & set ERROR_CODE=%ERROR_CODE% 131 | 132 | if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost 133 | @REM check for post script, once with legacy .bat ending and once with .cmd ending 134 | if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" 135 | if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" 136 | :skipRcPost 137 | 138 | @REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' 139 | if "%MAVEN_BATCH_PAUSE%" == "on" pause 140 | 141 | if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% 142 | 143 | exit /B %ERROR_CODE% 144 | -------------------------------------------------------------------------------- /pics/ShowApi.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gnuhpc/Kafka-zk-restapi/1b49ac85d84207bbe4c331ba7f31661d17bd377d/pics/ShowApi.png -------------------------------------------------------------------------------- /src/docs/asciidoc/index.adoc: -------------------------------------------------------------------------------- 1 | include::{generated}/overview.adoc[] 2 | include::manual_content1.adoc[] 3 | include::manual_content2.adoc[] 4 | include::{generated}/paths.adoc[] 5 | include::{generated}/security.adoc[] 6 | include::{generated}/definitions.adoc[] -------------------------------------------------------------------------------- /src/docs/asciidoc/manual_content1.adoc: -------------------------------------------------------------------------------- 1 | == Chapter of manual content 1 2 | 3 | This is some dummy text 4 | 5 | === Sub chapter 6 | 7 | Dummy text of sub chapter 8 | 9 | 10 | -------------------------------------------------------------------------------- /src/docs/asciidoc/manual_content2.adoc: -------------------------------------------------------------------------------- 1 | == Chapter of manual content 2 2 | 3 | This is some dummy text 4 | 5 | -------------------------------------------------------------------------------- /src/main/docker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM openjdk:8-jdk-alpine 2 | VOLUME /tmp 3 | COPY kafka-rest-springboot*.jar /app/lib/ 4 | #COPY kafka-rest-springboot-1.1.x-release-executable.jar /app/lib/kafka-rest-executable.jar 5 | COPY classes/config/* /app/config/ 6 | COPY classes/jmxtemplates/* /app/config/jmxtemplates/ 7 | COPY classes/security.yml /app/config/security.yml 8 | ENTRYPOINT ["java","-cp", "/app:/app/config/*:/app/lib/*","-Dspring.config.location=/app/config/", "-Dspring.main.allow-bean-definition-overriding=true","-jar","/app/lib/kafka-rest-springboot-1.1.x-release-executable.jar"] -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/KafkaRestSpringbootApplication.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata; 2 | 3 | import lombok.extern.log4j.Log4j2; 4 | import org.springframework.boot.SpringApplication; 5 | import org.springframework.boot.autoconfigure.SpringBootApplication; 6 | import org.springframework.scheduling.annotation.EnableScheduling; 7 | 8 | @SpringBootApplication 9 | @EnableScheduling 10 | @Log4j2 11 | public class KafkaRestSpringbootApplication { 12 | 13 | public static void main(String[] args) { 14 | log.info("+++++++++Kafka-zk Rest Application starting++++++++++"); 15 | SpringApplication.run(KafkaRestSpringbootApplication.class, args); 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/componet/OffsetStorage.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.componet; 2 | 3 | import java.util.Map; 4 | import java.util.concurrent.ConcurrentHashMap; 5 | import kafka.common.OffsetAndMetadata; 6 | import kafka.coordinator.group.GroupTopicPartition; 7 | import lombok.Data; 8 | import org.springframework.stereotype.Component; 9 | 10 | @Component 11 | @Data 12 | public class OffsetStorage { 13 | 14 | private static Map> consumerOffsets = 15 | new ConcurrentHashMap<>(); 16 | 17 | public Map> getMap() { 18 | return consumerOffsets; 19 | } 20 | 21 | public void put(String consumerGroup, Map offsetMap) { 22 | if (offsetMap != null) { 23 | consumerOffsets.put(consumerGroup, offsetMap); 24 | } 25 | } 26 | 27 | public void clear() { 28 | consumerOffsets.clear(); 29 | } 30 | 31 | public Map get(String consumerGroup) { 32 | return consumerOffsets.get(consumerGroup); 33 | } 34 | 35 | public void remove(String consumerGroup) { 36 | consumerOffsets.remove(consumerGroup); 37 | } 38 | 39 | @Override 40 | public String toString() { 41 | return consumerOffsets.toString(); 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/componet/RestLogAspect.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.componet; 2 | 3 | import java.util.Arrays; 4 | import javax.servlet.http.HttpServletRequest; 5 | import lombok.extern.log4j.Log4j2; 6 | import org.aspectj.lang.JoinPoint; 7 | import org.aspectj.lang.annotation.AfterReturning; 8 | import org.aspectj.lang.annotation.Aspect; 9 | import org.aspectj.lang.annotation.Before; 10 | import org.aspectj.lang.annotation.Pointcut; 11 | import org.springframework.stereotype.Component; 12 | import org.springframework.web.context.request.RequestContextHolder; 13 | import org.springframework.web.context.request.ServletRequestAttributes; 14 | 15 | @Aspect 16 | @Component 17 | @Log4j2 18 | public class RestLogAspect { 19 | 20 | ThreadLocal startTime = new ThreadLocal<>(); 21 | 22 | @Pointcut("execution(public * org.gnuhpc.bigdata.controller..*.*(..))") 23 | public void restServiceLog() {} 24 | 25 | @Before("restServiceLog()") 26 | public void doBefore(JoinPoint joinPoint) throws Throwable { 27 | startTime.set(System.currentTimeMillis()); 28 | // 接收到请求,记录请求内容 29 | ServletRequestAttributes attributes = 30 | (ServletRequestAttributes) RequestContextHolder.getRequestAttributes(); 31 | HttpServletRequest request = attributes.getRequest(); 32 | // 记录下请求内容 33 | log.info("===================== Controller Request ===================="); 34 | log.info("URL : " + request.getRequestURL().toString()); 35 | log.info("HTTP_METHOD : " + request.getMethod()); 36 | log.info("IP : " + request.getRemoteAddr()); 37 | log.info( 38 | "CLASS_METHOD : " 39 | + joinPoint.getSignature().getDeclaringTypeName() 40 | + "." 41 | + joinPoint.getSignature().getName()); 42 | log.info("ARGS : " + Arrays.toString(joinPoint.getArgs())); 43 | } 44 | 45 | @AfterReturning(returning = "ret", pointcut = "restServiceLog()") 46 | public void doAfterReturning(Object ret) throws Throwable { 47 | // 处理完请求,返回内容 48 | log.info("===================== Controller Response ================="); 49 | log.info("SPEND TIME : " + (System.currentTimeMillis() - startTime.get())); 50 | log.info("RESPONSE : " + ret); 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/config/JMXConfig.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.config; 2 | 3 | public class JMXConfig { 4 | 5 | public static final String JMX_CONNECT_TIMEOUT = "attribute.remote.x.request.waiting.timeout"; 6 | public static final String JMX_PROTOCOL = "service:jmx:rmi:///jndi/rmi://"; 7 | public static final String JMX_TEMPLATES_DIR = "jmxtemplates"; 8 | } 9 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/config/KafkaConfig.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.config; 2 | 3 | import com.fasterxml.jackson.annotation.JsonAutoDetect; 4 | import com.fasterxml.jackson.annotation.PropertyAccessor; 5 | import com.fasterxml.jackson.databind.ObjectMapper; 6 | import com.fasterxml.jackson.databind.SerializationFeature; 7 | import java.nio.ByteBuffer; 8 | import java.util.HashMap; 9 | import java.util.Map; 10 | import lombok.Data; 11 | import lombok.Getter; 12 | import lombok.extern.log4j.Log4j2; 13 | import org.apache.kafka.clients.consumer.ConsumerConfig; 14 | import org.apache.kafka.common.serialization.ByteBufferDeserializer; 15 | import org.gnuhpc.bigdata.componet.OffsetStorage; 16 | import org.gnuhpc.bigdata.service.KafkaConsumerService; 17 | import org.gnuhpc.bigdata.utils.KafkaUtils; 18 | import org.springframework.beans.factory.annotation.Value; 19 | import org.springframework.context.annotation.Bean; 20 | import org.springframework.context.annotation.Configuration; 21 | import org.springframework.context.annotation.Lazy; 22 | import org.springframework.http.converter.json.Jackson2ObjectMapperBuilder; 23 | import org.springframework.kafka.annotation.EnableKafka; 24 | import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory; 25 | import org.springframework.kafka.config.KafkaListenerContainerFactory; 26 | import org.springframework.kafka.core.DefaultKafkaConsumerFactory; 27 | import org.springframework.kafka.listener.AbstractMessageListenerContainer; 28 | import org.springframework.kafka.listener.ConcurrentMessageListenerContainer; 29 | 30 | /** Created by gnuhpc on 2017/7/12. */ 31 | @Log4j2 32 | @Data 33 | @EnableKafka 34 | //@Lazy 35 | @Configuration 36 | @Getter 37 | public class KafkaConfig { 38 | 39 | @Value("${kafka.brokers}") 40 | private String brokers; 41 | 42 | @Value("${kafka.schemaregistry}") 43 | private String schemaregistry; 44 | 45 | @Value("${kafka.offset.topic}") 46 | private String internalTopic; 47 | 48 | @Value("${kafka.offset.partitions}") 49 | private int internalTopicPartitions; 50 | 51 | @Value("${spring.kafka.consumer.group-id}") 52 | private String groupId; 53 | 54 | @Value("${kafka.healthcheck.topic}") 55 | private String healthCheckTopic; 56 | 57 | @Value("${kafka.sasl.enable}") 58 | private boolean kafkaSaslEnabled; 59 | 60 | @Value("${kafka.sasl.security.protocol}") 61 | private String saslSecurityProtocol; 62 | 63 | @Value("${kafka.sasl.mechanism}") 64 | private String saslMechianism; 65 | 66 | @Lazy 67 | @Bean(initMethod = "init", destroyMethod = "destroy") 68 | public KafkaUtils kafkaUtils() { 69 | return new KafkaUtils(); 70 | } 71 | 72 | @Bean 73 | public OffsetStorage offsetStorage() { 74 | return new OffsetStorage(); 75 | } 76 | 77 | @Bean 78 | public KafkaConsumerService kafkaConsumerService() { 79 | return new KafkaConsumerService(internalTopicPartitions); 80 | } 81 | 82 | @Bean 83 | public KafkaListenerContainerFactory> 84 | kafkaListenerContainerFactory() { 85 | ConcurrentKafkaListenerContainerFactory factory = 86 | new ConcurrentKafkaListenerContainerFactory<>(); 87 | factory.setConsumerFactory(consumerFactory()); 88 | factory.getContainerProperties().setAckMode(ConcurrentMessageListenerContainer.AckMode.MANUAL); 89 | return factory; 90 | } 91 | 92 | @Bean 93 | public DefaultKafkaConsumerFactory consumerFactory() { 94 | return new DefaultKafkaConsumerFactory<>(consumerConfigs()); 95 | } 96 | 97 | @Bean 98 | public Map consumerConfigs() { 99 | Map props = new HashMap<>(); 100 | props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers); 101 | props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId); 102 | props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false); 103 | props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); 104 | props.put(ConsumerConfig.EXCLUDE_INTERNAL_TOPICS_CONFIG, "false"); 105 | props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteBufferDeserializer.class); 106 | props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteBufferDeserializer.class); 107 | return props; 108 | } 109 | 110 | @Bean 111 | public Jackson2ObjectMapperBuilder objectMapperBuilder() { 112 | return new Jackson2ObjectMapperBuilder() { 113 | @Override 114 | public void configure(ObjectMapper objectMapper) { 115 | super.configure(objectMapper); 116 | objectMapper.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY); 117 | objectMapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); 118 | } 119 | }; 120 | } 121 | } 122 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/config/SwaggerConfig.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.config; 2 | 3 | import com.google.common.base.Predicates; 4 | import org.springframework.context.annotation.Bean; 5 | import org.springframework.context.annotation.Configuration; 6 | import springfox.documentation.builders.ApiInfoBuilder; 7 | import springfox.documentation.builders.PathSelectors; 8 | import springfox.documentation.builders.RequestHandlerSelectors; 9 | import springfox.documentation.service.ApiInfo; 10 | import springfox.documentation.service.Contact; 11 | import springfox.documentation.spi.DocumentationType; 12 | import springfox.documentation.spring.web.plugins.Docket; 13 | import springfox.documentation.swagger2.annotations.EnableSwagger2; 14 | 15 | /** Created by gnuhpc on 2017/7/16. */ 16 | @Configuration 17 | @EnableSwagger2 18 | public class SwaggerConfig { 19 | 20 | @Bean 21 | public Docket api() { 22 | return new Docket(DocumentationType.SWAGGER_2) 23 | .apiInfo(apiInfo()) 24 | .select() 25 | .apis(RequestHandlerSelectors.any()) 26 | .apis(Predicates.not(RequestHandlerSelectors.basePackage("org.springframework.boot"))) 27 | .paths(PathSelectors.any()) 28 | .build(); 29 | } 30 | 31 | private ApiInfo apiInfo() { 32 | return new ApiInfoBuilder() 33 | .title("Kafka REST API SwaggerUI") 34 | .description("Kafka REST API SwaggerUI") 35 | .contact(new Contact("gnuhpc", "https://github.com/gnuhpc", "gnuhpc@gmail.com")) 36 | .version("0.1.0") 37 | .build(); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/config/WebSecurityConfig.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.config; 2 | 3 | import org.gnuhpc.bigdata.security.BasicAuthenticationPoint; 4 | import org.gnuhpc.bigdata.security.UserDetailsServiceImp; 5 | import org.springframework.beans.factory.annotation.Autowired; 6 | import org.springframework.beans.factory.annotation.Value; 7 | import org.springframework.context.annotation.Bean; 8 | import org.springframework.context.annotation.Configuration; 9 | import org.springframework.context.annotation.Lazy; 10 | import org.springframework.http.HttpMethod; 11 | import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder; 12 | import org.springframework.security.config.annotation.web.builders.HttpSecurity; 13 | import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; 14 | import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; 15 | import org.springframework.security.config.http.SessionCreationPolicy; 16 | import org.springframework.security.core.userdetails.UserDetailsService; 17 | import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder; 18 | 19 | @Configuration 20 | @EnableWebSecurity 21 | @Lazy 22 | public class WebSecurityConfig extends WebSecurityConfigurerAdapter { 23 | 24 | public static final String SECURITY_FILE_PATH = "security.yml"; 25 | 26 | @Autowired private BasicAuthenticationPoint basicAuthenticationPoint; 27 | 28 | @Value("${server.security.check}") 29 | private boolean securityCheck; 30 | 31 | @Value("${server.security.checkInitDelay}") 32 | private int checkInitDelay; 33 | 34 | @Value("${server.security.checkSecurityInterval}") 35 | private int checkSecurityInterval; 36 | 37 | @Bean 38 | public UserDetailsService userDetailsService() { 39 | return new UserDetailsServiceImp(securityCheck, checkInitDelay, checkSecurityInterval); 40 | } 41 | 42 | @Bean 43 | public BCryptPasswordEncoder passwordEncoder() { 44 | return new BCryptPasswordEncoder(); 45 | } 46 | 47 | @Override 48 | protected void configure(HttpSecurity http) throws Exception { 49 | http.csrf().disable(); 50 | if (securityCheck) { 51 | http.authorizeRequests() 52 | .antMatchers("/api", "/swagger-ui.html", "/webjars/**", "/swagger-resources/**", "/v2/**") 53 | .permitAll() 54 | .antMatchers(HttpMethod.GET, "/**") 55 | .permitAll() 56 | .anyRequest() 57 | .authenticated(); 58 | http.httpBasic().authenticationEntryPoint(basicAuthenticationPoint); 59 | http.sessionManagement().sessionCreationPolicy(SessionCreationPolicy.STATELESS); 60 | } else { 61 | http.authorizeRequests().antMatchers("/**").permitAll().anyRequest().authenticated(); 62 | } 63 | } 64 | 65 | @Autowired 66 | public void configureGlobal(AuthenticationManagerBuilder auth) throws Exception { 67 | auth.userDetailsService(userDetailsService()).passwordEncoder(passwordEncoder()); 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/config/ZookeeperConfig.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.config; 2 | 3 | import com.google.common.net.HostAndPort; 4 | import java.util.Arrays; 5 | import java.util.List; 6 | import java.util.stream.Collectors; 7 | import lombok.Getter; 8 | import lombok.Setter; 9 | import lombok.extern.log4j.Log4j2; 10 | import org.gnuhpc.bigdata.utils.ZookeeperUtils; 11 | import org.springframework.boot.context.properties.ConfigurationProperties; 12 | import org.springframework.context.annotation.Bean; 13 | import org.springframework.context.annotation.Configuration; 14 | import org.springframework.context.annotation.Lazy; 15 | import org.springframework.stereotype.Component; 16 | 17 | /** Created by gnuhpc on 2017/7/16. */ 18 | @Log4j2 19 | @Setter 20 | @Getter 21 | @ConfigurationProperties(prefix = "zookeeper") 22 | @Lazy 23 | @Component 24 | @Configuration 25 | public class ZookeeperConfig { 26 | 27 | private String uris; 28 | 29 | @Bean(initMethod = "init", destroyMethod = "destroy") 30 | public ZookeeperUtils zookeeperUtils() { 31 | return new ZookeeperUtils(); 32 | } 33 | 34 | public List getHostAndPort() { 35 | return Arrays.stream(uris.split(",")).map(HostAndPort::fromString).collect(Collectors.toList()); 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/constant/ConsumerGroupState.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.constant; 2 | 3 | import java.util.HashMap; 4 | 5 | public enum ConsumerGroupState { 6 | UNKNOWN("Unknown"), 7 | PREPARING_REBALANCE("PreparingRebalance"), 8 | COMPLETING_REBALANCE("CompletingRebalance"), 9 | STABLE("Stable"), 10 | DEAD("Dead"), 11 | EMPTY("Empty"); 12 | 13 | private static final HashMap NAME_TO_ENUM; 14 | 15 | static { 16 | NAME_TO_ENUM = new HashMap<>(); 17 | for (ConsumerGroupState state : ConsumerGroupState.values()) { 18 | NAME_TO_ENUM.put(state.name, state); 19 | } 20 | } 21 | 22 | private final String name; 23 | 24 | ConsumerGroupState(String name) { 25 | this.name = name; 26 | } 27 | 28 | /** Parse a string into a consumer group state. */ 29 | public static ConsumerGroupState parse(String name) { 30 | ConsumerGroupState state = NAME_TO_ENUM.get(name); 31 | return state == null ? UNKNOWN : state; 32 | } 33 | 34 | @Override 35 | public String toString() { 36 | return name; 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/constant/ConsumerState.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.constant; 2 | 3 | public enum ConsumerState { 4 | RUNNING, 5 | PENDING 6 | } 7 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/constant/ConsumerType.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.constant; 2 | 3 | public enum ConsumerType { 4 | NEW, 5 | OLD, 6 | } 7 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/constant/GeneralResponseState.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.constant; 2 | 3 | public enum GeneralResponseState { 4 | success, 5 | failure 6 | } 7 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/constant/ReassignmentState.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.constant; 2 | 3 | public enum ReassignmentState { 4 | ReassignmentFailed(-1, "Reassignment Failed"), 5 | ReassignmentInProgress(0, "Reassignment In Progress"), 6 | ReassignmentCompleted(1, "Reassignment Completed"); 7 | 8 | private int status; 9 | private String msg; 10 | 11 | private ReassignmentState(int status, String msg) { 12 | this.status = status; 13 | this.msg = msg; 14 | } 15 | 16 | public static ReassignmentState valueOf(int status) { 17 | ReassignmentState[] reassignmentStatusList = values(); 18 | for (int i = 0; i < reassignmentStatusList.length; i++) { 19 | ReassignmentState reassignmentStatus = reassignmentStatusList[i]; 20 | if (reassignmentStatus.status == status) { 21 | return reassignmentStatus; 22 | } 23 | } 24 | 25 | throw new IllegalArgumentException("No matching constant for [" + status + "]"); 26 | } 27 | 28 | public int code() { 29 | return this.status; 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/constant/ZkServerCommand.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.constant; 2 | 3 | public enum ZkServerCommand { 4 | stat, 5 | envi, 6 | ruok, 7 | reqs, 8 | dump 9 | } 10 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/constant/ZkServerMode.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.constant; 2 | 3 | public enum ZkServerMode { 4 | Leader, 5 | Follower, 6 | Observer, 7 | Standalone, 8 | Down, 9 | Unknow 10 | } 11 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/controller/CollectorController.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.controller; 2 | 3 | import io.swagger.annotations.Api; 4 | import io.swagger.annotations.ApiOperation; 5 | import io.swagger.annotations.ApiParam; 6 | import java.util.HashMap; 7 | import java.util.List; 8 | import javax.validation.constraints.Pattern; 9 | import lombok.extern.log4j.Log4j2; 10 | import org.gnuhpc.bigdata.model.JMXMetricData; 11 | import org.gnuhpc.bigdata.model.JMXMetricDataV1; 12 | import org.gnuhpc.bigdata.model.JMXQuery; 13 | import org.gnuhpc.bigdata.service.CollectorService; 14 | import org.springframework.beans.factory.annotation.Autowired; 15 | import org.springframework.beans.factory.annotation.Value; 16 | import org.springframework.validation.annotation.Validated; 17 | import org.springframework.web.bind.annotation.GetMapping; 18 | import org.springframework.web.bind.annotation.PostMapping; 19 | import org.springframework.web.bind.annotation.RequestBody; 20 | import org.springframework.web.bind.annotation.RequestParam; 21 | import org.springframework.web.bind.annotation.RestController; 22 | 23 | @Log4j2 24 | @RestController 25 | @Validated 26 | @Api(value = "/jmx", description = "Rest API for Collecting JMX Metric Data") 27 | public class CollectorController { 28 | 29 | private static final String IP_AND_PORT_LIST_REGEX = 30 | "(([0-9]+(?:\\.[0-9]+){3}:[0-9]+,)*([0-9]+(?:\\.[0-9]+){3}:[0-9]+)+)|(default)"; 31 | @Autowired private CollectorService collectorService; 32 | 33 | @Value("${jmx.kafka.jmxurl}") 34 | private String jmxKafkaURL; 35 | 36 | @GetMapping("/jmx/v1") 37 | @ApiOperation(value = "Fetch all JMX metric data") 38 | public List collectJMXMetric( 39 | @Pattern(regexp = IP_AND_PORT_LIST_REGEX) 40 | @RequestParam 41 | @ApiParam(value = "Parameter jmxurl should be a comma-separated list of {IP:Port} or set" 42 | + " to \'default\'") 43 | String jmxurl) { 44 | if (jmxurl.equals("default")) { 45 | jmxurl = jmxKafkaURL; 46 | } 47 | 48 | log.debug("Collect JMX Metric Data Started."); 49 | return collectorService.collectJMXData(jmxurl); 50 | } 51 | 52 | @PostMapping("/jmx/v2") 53 | @ApiOperation(value = "Fetch JMX metric data with query filter. You can get the query filter " 54 | + "template through the API /jmx/v2/filters.") 55 | public List collectJMXMetric( 56 | @Pattern(regexp = IP_AND_PORT_LIST_REGEX) 57 | @RequestParam @ApiParam(value = "Parameter jmxurl should be a comma-separated list of " 58 | + "{IP:Port} or set to \'default\'") 59 | String jmxurl, 60 | @RequestBody JMXQuery jmxQuery) { 61 | if (jmxurl.equals("default")) { 62 | jmxurl = jmxKafkaURL; 63 | } 64 | 65 | log.debug("Collect JMX Metric Data Started."); 66 | 67 | return collectorService.collectJMXData(jmxurl, jmxQuery); 68 | } 69 | 70 | @GetMapping("/jmx/v2/filters") 71 | @ApiOperation(value = "List the query filter templates with the filterKey. If filterKey is set " 72 | + "to empty, it will return all the templates.") 73 | public HashMap listJMXFilterTemplate(@RequestParam String filterKey) { 74 | return collectorService.listJMXFilterTemplate(filterKey); 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/controller/DebugController.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.controller; 2 | 3 | import java.util.Arrays; 4 | import java.util.HashMap; 5 | import java.util.Map; 6 | import kafka.common.OffsetAndMetadata; 7 | import kafka.coordinator.group.GroupTopicPartition; 8 | import org.gnuhpc.bigdata.componet.OffsetStorage; 9 | import org.springframework.beans.factory.annotation.Autowired; 10 | import org.springframework.context.ApplicationContext; 11 | import org.springframework.web.bind.annotation.RequestMapping; 12 | import org.springframework.web.bind.annotation.RequestParam; 13 | import org.springframework.web.bind.annotation.RestController; 14 | import springfox.documentation.annotations.ApiIgnore; 15 | 16 | /** Created by gnuhpc on 2017/7/16. */ 17 | @RequestMapping("/debug") 18 | @RestController 19 | @ApiIgnore 20 | public class DebugController { 21 | 22 | @Autowired ApplicationContext appContext; 23 | 24 | @Autowired OffsetStorage offsetStorage; 25 | 26 | @RequestMapping("/beans") 27 | public Map beans(@RequestParam(required = false) String q) { 28 | Map retMap = new HashMap<>(); 29 | 30 | String[] retArray = 31 | Arrays.stream(appContext.getBeanDefinitionNames()) 32 | .filter( 33 | beanName -> 34 | (q == null || q.length() == 0) 35 | || beanName.toLowerCase().contains(q.trim().toLowerCase())) 36 | .toArray(String[]::new); 37 | 38 | retMap.put("beans", retArray); 39 | return retMap; 40 | } 41 | 42 | @RequestMapping("/offsets") 43 | public Map> offsets() { 44 | return offsetStorage.getMap(); 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/controller/SchemaRegistryController.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.controller; 2 | 3 | import io.confluent.kafka.schemaregistry.client.SchemaMetadata; 4 | import io.swagger.annotations.Api; 5 | import io.swagger.annotations.ApiOperation; 6 | import java.util.List; 7 | import org.gnuhpc.bigdata.model.SchemaRegistryMetadata; 8 | import org.gnuhpc.bigdata.service.ConfluentSchemaService; 9 | import org.springframework.beans.factory.annotation.Autowired; 10 | import org.springframework.context.annotation.Lazy; 11 | import org.springframework.web.bind.annotation.DeleteMapping; 12 | import org.springframework.web.bind.annotation.GetMapping; 13 | import org.springframework.web.bind.annotation.PathVariable; 14 | import org.springframework.web.bind.annotation.PostMapping; 15 | import org.springframework.web.bind.annotation.RequestMapping; 16 | import org.springframework.web.bind.annotation.RequestParam; 17 | import org.springframework.web.bind.annotation.RestController; 18 | 19 | @RestController 20 | @RequestMapping("/schemaregistry") 21 | @Api(value = "Control schema registry with Rest API") 22 | public class SchemaRegistryController { 23 | 24 | @Lazy 25 | @Autowired 26 | private ConfluentSchemaService confluentSchemaService; 27 | 28 | @GetMapping("/schemas/ids/{schemaId}") 29 | @ApiOperation(value = "Get schema by id") 30 | public SchemaRegistryMetadata getSchemaById(@PathVariable int schemaId) { 31 | return confluentSchemaService.getSchemaById(schemaId); 32 | } 33 | 34 | @GetMapping("/subjects") 35 | @ApiOperation(value = "List all subjects") 36 | public List lsAllSubjects() { 37 | return confluentSchemaService.getAllSubjects(); 38 | } 39 | 40 | @GetMapping("/subjects/{subject}") 41 | @ApiOperation(value = "Get latest schema by subject") 42 | public SchemaMetadata getSchemaBySubject(@PathVariable String subject) { 43 | return confluentSchemaService.getSchemaBySubject(subject); 44 | } 45 | 46 | @PostMapping("/subjects/{subject}/versions") 47 | @ApiOperation(value = "Register schema by subject") 48 | public int registerSchema(@PathVariable String subject, @RequestParam String schemaStr) { 49 | return confluentSchemaService.registerSchema(subject, schemaStr); 50 | } 51 | 52 | @PostMapping("/subjects/{subject}") 53 | @ApiOperation(value = "Check if a schema has already been registered under the specified subject") 54 | public SchemaRegistryMetadata checkSchemaExist(@PathVariable String subject, 55 | @RequestParam String schemaStr) { 56 | return confluentSchemaService.checkSchemaExist(subject, schemaStr); 57 | } 58 | 59 | @DeleteMapping("/subjects/{subject}") 60 | @ApiOperation(value = "Delete the specified subject and its associated compatibility level if " 61 | + "registered.") 62 | public List deleteSubject(@PathVariable String subject) { 63 | return confluentSchemaService.deleteSubject(subject); 64 | } 65 | 66 | @GetMapping("/subjects/{subject}/versions/{versionId}") 67 | @ApiOperation(value = "Get schema by subject and version") 68 | public SchemaMetadata getSchemaBySubjectAndVersion(@PathVariable String subject, 69 | @PathVariable int versionId) { 70 | return confluentSchemaService.getSchemaBySubjectAndVersion(subject, versionId); 71 | } 72 | 73 | @GetMapping("/subjects/{subject}/versions") 74 | @ApiOperation(value = "Get all versions for the specified subject") 75 | public List getAllVersionsBySubject(@PathVariable String subject) { 76 | return confluentSchemaService.getAllVersions(subject); 77 | } 78 | 79 | 80 | } 81 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/controller/SwaggerController.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.controller; 2 | 3 | import org.springframework.stereotype.Controller; 4 | import org.springframework.web.bind.annotation.GetMapping; 5 | import springfox.documentation.annotations.ApiIgnore; 6 | 7 | @Controller 8 | @ApiIgnore 9 | public class SwaggerController { 10 | 11 | @GetMapping(value = "/api") 12 | public String swagger() { 13 | return "redirect:swagger-ui.html"; 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/controller/UserController.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.controller; 2 | 3 | import io.swagger.annotations.Api; 4 | import io.swagger.annotations.ApiOperation; 5 | import java.util.List; 6 | import javax.validation.Valid; 7 | import lombok.extern.log4j.Log4j2; 8 | import org.gnuhpc.bigdata.constant.GeneralResponseState; 9 | import org.gnuhpc.bigdata.model.GeneralResponse; 10 | import org.gnuhpc.bigdata.model.User; 11 | import org.gnuhpc.bigdata.service.UserService; 12 | import org.springframework.beans.factory.annotation.Autowired; 13 | import org.springframework.validation.BindingResult; 14 | import org.springframework.web.bind.annotation.DeleteMapping; 15 | import org.springframework.web.bind.annotation.GetMapping; 16 | import org.springframework.web.bind.annotation.PathVariable; 17 | import org.springframework.web.bind.annotation.PostMapping; 18 | import org.springframework.web.bind.annotation.PutMapping; 19 | import org.springframework.web.bind.annotation.RequestBody; 20 | import org.springframework.web.bind.annotation.RestController; 21 | 22 | @Log4j2 23 | @RestController 24 | @Api(value = "/users", description = "Security User Management Controller.") 25 | public class UserController { 26 | 27 | @Autowired private UserService userService; 28 | 29 | @GetMapping("/users") 30 | @ApiOperation(value = "Get user list.") 31 | public List listUser() { 32 | return userService.listUser(); 33 | } 34 | 35 | @PostMapping("/users") 36 | @ApiOperation(value = "Add user.") 37 | public GeneralResponse addUser(@RequestBody @Valid User user, BindingResult results) { 38 | if (results.hasErrors()) { 39 | return GeneralResponse.builder() 40 | .state(GeneralResponseState.failure) 41 | .msg(results.getFieldError().getDefaultMessage()) 42 | .build(); 43 | } 44 | log.info("Receive add user request: username:" + user.getUsername()); 45 | return userService.addUser(user); 46 | } 47 | 48 | @PutMapping("/users") 49 | @ApiOperation(value = "Modify user information.") 50 | public GeneralResponse modifyUser(@RequestBody @Valid User user, BindingResult results) { 51 | if (results.hasErrors()) { 52 | return GeneralResponse.builder() 53 | .state(GeneralResponseState.failure) 54 | .msg(results.getFieldError().getDefaultMessage()) 55 | .build(); 56 | } 57 | log.info("Receive modify user request: username:" + user.getUsername()); 58 | return userService.modifyUser(user); 59 | } 60 | 61 | @DeleteMapping("/users/{username}") 62 | @ApiOperation(value = "Delete user.") 63 | public GeneralResponse delUser(@PathVariable String username) { 64 | log.info("Receive delete user request: username:" + username); 65 | return userService.delUser(username); 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/controller/ZookeeperController.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.controller; 2 | 3 | import com.google.common.net.HostAndPort; 4 | import io.swagger.annotations.Api; 5 | import io.swagger.annotations.ApiOperation; 6 | import java.util.List; 7 | import java.util.Map; 8 | import org.gnuhpc.bigdata.model.ZkServerEnvironment; 9 | import org.gnuhpc.bigdata.model.ZkServerStat; 10 | import org.gnuhpc.bigdata.service.ZookeeperService; 11 | import org.gnuhpc.bigdata.utils.ZookeeperUtils; 12 | import org.springframework.beans.factory.annotation.Autowired; 13 | import org.springframework.context.annotation.Lazy; 14 | import org.springframework.web.bind.annotation.GetMapping; 15 | import org.springframework.web.bind.annotation.RequestMapping; 16 | import org.springframework.web.bind.annotation.RequestParam; 17 | import org.springframework.web.bind.annotation.RestController; 18 | 19 | /** Created by gnuhpc on 2017/7/16. */ 20 | @RestController 21 | @RequestMapping("/zk") 22 | @Api(value = "Control Zookeeper with Rest API") 23 | public class ZookeeperController { 24 | 25 | @Lazy 26 | @Autowired private ZookeeperUtils zookeeperUtils; 27 | 28 | @Autowired private ZookeeperService zookeeperService; 29 | 30 | @GetMapping("/ls/path") 31 | @ApiOperation(value = "List a zookeeper path") 32 | public List ls(@RequestParam String path) { 33 | return zookeeperUtils.lsPath(path); 34 | } 35 | 36 | @GetMapping("/get/path") 37 | @ApiOperation(value = "Get data of a zookeeper path") 38 | public String get(@RequestParam String path) { 39 | return zookeeperUtils.getNodeData(path); 40 | } 41 | 42 | @GetMapping("/connstate") 43 | @ApiOperation(value = "Get the connection state of zookeeper") 44 | public String zkConnState() { 45 | return zookeeperUtils.getState(); 46 | } 47 | 48 | @GetMapping("/stat") 49 | @ApiOperation(value = "Get the service state of zookeeper") 50 | public Map getStat() { 51 | return zookeeperService.stat(); 52 | } 53 | 54 | @GetMapping("/env") 55 | @ApiOperation(value = "Get the environment information of zookeeper") 56 | public Map getEnv() { 57 | return zookeeperService.environment(); 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/exception/CollectorException.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.exception; 2 | 3 | public class CollectorException extends Exception { 4 | 5 | public CollectorException(String message, Throwable cause) { 6 | super(message, cause); 7 | } 8 | 9 | public CollectorException(String message) { 10 | super(message); 11 | } 12 | 13 | public CollectorException(Throwable cause) { 14 | super(cause); 15 | } 16 | 17 | public CollectorException() { 18 | super(); 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/exception/ExceptionLogAspect.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.exception; 2 | 3 | import lombok.extern.log4j.Log4j2; 4 | import org.aspectj.lang.JoinPoint; 5 | import org.aspectj.lang.annotation.Aspect; 6 | import org.aspectj.lang.annotation.Before; 7 | import org.aspectj.lang.annotation.Pointcut; 8 | import org.springframework.stereotype.Component; 9 | 10 | @Aspect 11 | @Component 12 | @Log4j2 13 | public class ExceptionLogAspect { 14 | 15 | @Pointcut("execution(public * org.gnuhpc.bigdata.exception..*.*(..))") 16 | public void exceptionLog() {} 17 | 18 | @Before("exceptionLog()") 19 | public void doBefore(JoinPoint joinPoint) throws Throwable { 20 | // 接收到请求,记录请求内容 21 | log.info("Exception happened!", (Exception) joinPoint.getArgs()[0]); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/exception/KafkaErrorCode.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.exception; 2 | 3 | public enum KafkaErrorCode { 4 | NO_ERROR, 5 | OFFSET_OUT_OF_RANGE, 6 | INVALID_MESSAGE, 7 | UNKNOWN_TOPIC_OR_PARTITION, 8 | INVALID_FETCH_SIZE, 9 | LEADER_NOT_AVAILABLE, 10 | NOT_LEADER_FOR_PARTITION, 11 | REQUEST_TIMED_OUT, 12 | BROKER_NOT_AVAILABLE, 13 | REPLICA_NOT_AVAILABLE, 14 | MESSAGE_SIZE_TOO_LARGE, 15 | STALE_CONTROLLER_EPOCH, 16 | OFFSET_METADATA_TOO_LARGE, 17 | OFFSETS_LOAD_IN_PROGRESS, 18 | CONSUMER_COORDINATOR_NOT_AVAILABLE, 19 | NOT_COORDINATOR_FOR_CONSUMER, 20 | SERVICE_DOWN, 21 | UNKNOWN; 22 | 23 | public static KafkaErrorCode getError(int errorCode) { 24 | if (errorCode < 0 || errorCode >= UNKNOWN.ordinal()) { 25 | return UNKNOWN; 26 | } else { 27 | return values()[errorCode]; 28 | } 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/exception/KafkaExceptionHandler.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.exception; 2 | 3 | import lombok.extern.log4j.Log4j2; 4 | import org.apache.kafka.common.errors.ApiException; 5 | import org.springframework.http.HttpStatus; 6 | import org.springframework.web.bind.annotation.ExceptionHandler; 7 | import org.springframework.web.bind.annotation.RestControllerAdvice; 8 | 9 | @Log4j2 10 | @RestControllerAdvice 11 | public class KafkaExceptionHandler { 12 | 13 | @ExceptionHandler(ApiException.class) 14 | public RestErrorResponse kafkaApiException(ApiException ex) { 15 | RestErrorResponse.Builder responseBuilder = new RestErrorResponse.Builder(); 16 | return responseBuilder 17 | .setStatus(HttpStatus.INTERNAL_SERVER_ERROR) 18 | .setCode(KafkaErrorCode.UNKNOWN.ordinal()) 19 | .setMessage("Api Exception happened!") 20 | .setDeveloperMessage(ex.getMessage()) 21 | .build(); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/exception/RestErrorResponse.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.exception; 2 | 3 | import com.fasterxml.jackson.annotation.JsonFormat; 4 | import java.time.LocalDateTime; 5 | import java.util.ArrayList; 6 | import java.util.List; 7 | import java.util.Set; 8 | import javax.validation.ConstraintViolation; 9 | import lombok.AllArgsConstructor; 10 | import lombok.Data; 11 | import lombok.EqualsAndHashCode; 12 | import org.hibernate.validator.internal.engine.path.PathImpl; 13 | import org.springframework.http.HttpStatus; 14 | import org.springframework.util.ObjectUtils; 15 | import org.springframework.validation.FieldError; 16 | import org.springframework.validation.ObjectError; 17 | 18 | @Data 19 | public class RestErrorResponse { 20 | 21 | private HttpStatus status; 22 | 23 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss") 24 | private LocalDateTime timestamp; 25 | 26 | private int code; 27 | private String message; 28 | private String developerMessage; 29 | private String moreInfoUrl; 30 | private List subErrorList; 31 | 32 | public RestErrorResponse() { 33 | // this.timestamp = new Date(); 34 | this.timestamp = LocalDateTime.now(); 35 | } 36 | 37 | public RestErrorResponse(HttpStatus status, String message, Throwable ex) { 38 | this(); 39 | this.status = status; 40 | this.code = status.value(); 41 | this.message = message; 42 | this.developerMessage = ex.getLocalizedMessage(); 43 | } 44 | 45 | public RestErrorResponse(HttpStatus status, String message, String moreInfoUrl, Throwable ex) { 46 | this(); 47 | this.status = status; 48 | this.code = status.value(); 49 | this.message = message; 50 | this.developerMessage = ex.getLocalizedMessage(); 51 | this.moreInfoUrl = moreInfoUrl; 52 | } 53 | 54 | public RestErrorResponse( 55 | HttpStatus status, int code, String message, String developerMessage, String moreInfoUrl) { 56 | this(); 57 | if (status == null) { 58 | throw new NullPointerException("HttpStatus argument cannot be null."); 59 | } 60 | this.status = status; 61 | this.code = code; 62 | this.message = message; 63 | this.developerMessage = developerMessage; 64 | this.moreInfoUrl = moreInfoUrl; 65 | } 66 | 67 | @Override 68 | public boolean equals(Object o) { 69 | if (this == o) { 70 | return true; 71 | } 72 | if (o instanceof RestErrorResponse) { 73 | RestErrorResponse re = (RestErrorResponse) o; 74 | return ObjectUtils.nullSafeEquals(getStatus(), re.getStatus()) 75 | && getCode() == re.getCode() 76 | && ObjectUtils.nullSafeEquals(getMessage(), re.getMessage()) 77 | && ObjectUtils.nullSafeEquals(getDeveloperMessage(), re.getDeveloperMessage()) 78 | && ObjectUtils.nullSafeEquals(getMoreInfoUrl(), re.getMoreInfoUrl()); 79 | } 80 | 81 | return false; 82 | } 83 | 84 | @Override 85 | public int hashCode() { 86 | //noinspection ThrowableResultOfMethodCallIgnored 87 | return ObjectUtils.nullSafeHashCode( 88 | new Object[] { 89 | getStatus(), getCode(), getMessage(), getDeveloperMessage(), getMoreInfoUrl() 90 | }); 91 | } 92 | 93 | public String toString() { 94 | //noinspection StringBufferReplaceableByString 95 | return new StringBuilder() 96 | .append(getStatus().value()) 97 | .append(" (") 98 | .append(getStatus().getReasonPhrase()) 99 | .append(" )") 100 | .toString(); 101 | } 102 | 103 | private void addSubError(RestSubError subError) { 104 | if (subErrorList == null) { 105 | subErrorList = new ArrayList<>(); 106 | } 107 | subErrorList.add(subError); 108 | } 109 | 110 | private void addValidationError( 111 | String object, String field, Object rejectedValue, String message) { 112 | addSubError(new RestValidationError(object, field, rejectedValue, message)); 113 | } 114 | 115 | private void addValidationError(String object, String message) { 116 | addSubError(new RestValidationError(object, message)); 117 | } 118 | 119 | private void addValidationError(FieldError fieldError) { 120 | this.addValidationError( 121 | fieldError.getObjectName(), 122 | fieldError.getField(), 123 | fieldError.getRejectedValue(), 124 | fieldError.getDefaultMessage()); 125 | } 126 | 127 | private void addValidationError(ObjectError objectError) { 128 | this.addValidationError(objectError.getObjectName(), objectError.getDefaultMessage()); 129 | } 130 | 131 | void addValidationError(List globalErrors) { 132 | globalErrors.forEach(this::addValidationError); 133 | } 134 | 135 | /** 136 | * Utility method for adding error of ConstraintViolation. Usually when a @Validated validation 137 | * fails. 138 | * 139 | * @param cv the ConstraintViolation 140 | */ 141 | private void addValidationError(ConstraintViolation cv) { 142 | this.addValidationError( 143 | cv.getRootBeanClass().getSimpleName(), 144 | ((PathImpl) cv.getPropertyPath()).getLeafNode().asString(), 145 | cv.getInvalidValue(), 146 | cv.getMessage()); 147 | } 148 | 149 | void addValidationErrors(List fieldErrors) { 150 | fieldErrors.forEach(this::addValidationError); 151 | } 152 | 153 | void addValidationErrors(Set> constraintViolations) { 154 | constraintViolations.forEach(this::addValidationError); 155 | } 156 | 157 | public static class Builder { 158 | 159 | private HttpStatus status; 160 | private int code; 161 | private String message; 162 | private String developerMessage; 163 | private String moreInfoUrl; 164 | 165 | public Builder() {} 166 | 167 | public Builder setStatus(int statusCode) { 168 | this.status = HttpStatus.valueOf(statusCode); 169 | return this; 170 | } 171 | 172 | public Builder setStatus(HttpStatus status) { 173 | this.status = status; 174 | return this; 175 | } 176 | 177 | public Builder setCode(int code) { 178 | this.code = code; 179 | return this; 180 | } 181 | 182 | public Builder setMessage(String message) { 183 | this.message = message; 184 | return this; 185 | } 186 | 187 | public Builder setDeveloperMessage(String developerMessage) { 188 | this.developerMessage = developerMessage; 189 | return this; 190 | } 191 | 192 | public Builder setMoreInfoUrl(String moreInfoUrl) { 193 | this.moreInfoUrl = moreInfoUrl; 194 | return this; 195 | } 196 | 197 | public RestErrorResponse build() { 198 | if (this.status == null) { 199 | this.status = HttpStatus.INTERNAL_SERVER_ERROR; 200 | } 201 | return new RestErrorResponse( 202 | this.status, this.code, this.message, this.developerMessage, this.moreInfoUrl); 203 | } 204 | } 205 | 206 | abstract class RestSubError {} 207 | 208 | @Data 209 | @EqualsAndHashCode(callSuper = false) 210 | @AllArgsConstructor 211 | class RestValidationError extends RestSubError { 212 | 213 | private String object; 214 | private String field; 215 | private Object rejectedValue; 216 | private String message; 217 | 218 | RestValidationError(String object, String message) { 219 | this.object = object; 220 | this.message = message; 221 | } 222 | } 223 | } 224 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/exception/ServiceNotAvailableException.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.exception; 2 | 3 | import lombok.Data; 4 | import lombok.EqualsAndHashCode; 5 | import org.gnuhpc.bigdata.constant.ZkServerMode; 6 | 7 | @Data 8 | @EqualsAndHashCode 9 | public class ServiceNotAvailableException extends RuntimeException { 10 | 11 | private String serviceType; 12 | private ZkServerMode serviceState; 13 | 14 | public ServiceNotAvailableException(String serviceType, ZkServerMode serviceState, String message) { 15 | super(message); 16 | this.serviceType = serviceType; 17 | this.serviceState = serviceState; 18 | } 19 | 20 | @Override 21 | public String toString() { 22 | return "ServiceNotAvailableException{" 23 | + "serviceType='" 24 | + serviceType 25 | + '\'' 26 | + ", serviceState='" 27 | + serviceState 28 | + '\'' 29 | + '}'; 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/AddPartition.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import java.util.List; 4 | 5 | import lombok.*; 6 | import lombok.extern.log4j.Log4j2; 7 | 8 | /** Created by gnuhpc on 2017/7/23. */ 9 | @Getter 10 | @Setter 11 | @Log4j2 12 | @ToString 13 | @Builder 14 | @NoArgsConstructor 15 | @AllArgsConstructor 16 | public class AddPartition { 17 | 18 | String topic; 19 | int numPartitionsAdded; 20 | List> replicaAssignment; 21 | } 22 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/BrokerInfo.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import com.fasterxml.jackson.annotation.JsonProperty; 4 | import com.fasterxml.jackson.databind.annotation.JsonDeserialize; 5 | import com.fasterxml.jackson.databind.annotation.JsonSerialize; 6 | import java.util.List; 7 | import java.util.Map; 8 | 9 | import lombok.*; 10 | import org.gnuhpc.bigdata.serializer.JsonJodaDateTimeSerializer; 11 | import org.gnuhpc.bigdata.utils.TimestampDeserializer; 12 | import org.joda.time.DateTime; 13 | 14 | @Getter 15 | @Setter 16 | @AllArgsConstructor 17 | @ToString 18 | @NoArgsConstructor 19 | public class BrokerInfo { 20 | private Map listener_security_protocol_map; 21 | 22 | private List endpoints; 23 | 24 | private int jmxport; 25 | 26 | private String host; 27 | 28 | public DateTime starttime; 29 | 30 | private int port; 31 | private int version; 32 | private String rack = ""; 33 | private int id = -1; 34 | 35 | @JsonProperty("jmxPort") 36 | public int getJmxport() { 37 | return jmxport; 38 | } 39 | 40 | @JsonProperty("jmx_port") 41 | public void setJmxPort(int jmxport) { 42 | this.jmxport = jmxport; 43 | } 44 | 45 | @JsonProperty("securityProtocol") 46 | public Map getListener_security_protocol_map() { 47 | return listener_security_protocol_map; 48 | } 49 | 50 | @JsonProperty("listener_security_protocol_map") 51 | public void setSecurityProtocol(Map listener_security_protocol_map) { 52 | this.listener_security_protocol_map = listener_security_protocol_map; 53 | } 54 | 55 | @JsonProperty("startTime") 56 | @JsonSerialize(using = JsonJodaDateTimeSerializer.class) 57 | public DateTime getStarttime() { 58 | return starttime; 59 | } 60 | 61 | @JsonProperty("timestamp") 62 | @JsonDeserialize(using = TimestampDeserializer.class) 63 | public void setStartTime(DateTime timestamp) { 64 | this.starttime = timestamp; 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/ClusterInfo.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import java.util.Collection; 4 | import lombok.Getter; 5 | import lombok.Setter; 6 | import org.apache.kafka.common.Node; 7 | 8 | @Getter 9 | @Setter 10 | public class ClusterInfo { 11 | private Collection nodes; 12 | private Node controller; 13 | private String clusterId; 14 | } 15 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/ConsumerGroupDesc.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import lombok.*; 4 | import lombok.extern.log4j.Log4j2; 5 | import org.apache.kafka.common.Node; 6 | import org.gnuhpc.bigdata.constant.ConsumerGroupState; 7 | import org.gnuhpc.bigdata.constant.ConsumerType; 8 | 9 | /** Created by gnuhpc on 2017/7/27. */ 10 | @Setter 11 | @Getter 12 | @Log4j2 13 | @ToString 14 | @EqualsAndHashCode 15 | @Builder(builderClassName = "ConsumerGroupDescBuilder") 16 | public class ConsumerGroupDesc implements Comparable { 17 | 18 | private String groupName; 19 | private ConsumerGroupState state; 20 | private String assignmentStrategy; 21 | private Node coordinator; 22 | private String topic; 23 | private int partitionId; 24 | private long currentOffset; 25 | private long logEndOffset; 26 | 27 | @Setter(AccessLevel.NONE) 28 | private long lag; 29 | 30 | private String consumerId; 31 | private String clientId; 32 | private String host = "-"; 33 | private ConsumerType type; 34 | 35 | @Override 36 | public int compareTo(ConsumerGroupDesc o) { 37 | if (this.topic.equals(o.topic)) { 38 | return (this.partitionId - o.partitionId); 39 | } else { 40 | return this.topic.compareTo(o.topic); 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/ConsumerGroupDescFactory.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import java.util.Map; 4 | import kafka.admin.AdminClient; 5 | import kafka.common.OffsetAndMetadata; 6 | import kafka.coordinator.group.GroupTopicPartition; 7 | import lombok.AllArgsConstructor; 8 | import lombok.NoArgsConstructor; 9 | import org.apache.kafka.clients.consumer.KafkaConsumer; 10 | import org.apache.kafka.common.TopicPartition; 11 | import org.gnuhpc.bigdata.constant.ConsumerGroupState; 12 | import org.gnuhpc.bigdata.constant.ConsumerType; 13 | import org.gnuhpc.bigdata.utils.KafkaUtils; 14 | 15 | @AllArgsConstructor 16 | @NoArgsConstructor 17 | public class ConsumerGroupDescFactory { 18 | 19 | private KafkaUtils kafkaUtils; 20 | 21 | public ConsumerGroupDesc makeOldConsumerGroupDesc( 22 | Map.Entry op, 23 | Map fetchOffSetFromZkResultList, 24 | String topic, 25 | String consumerGroup, 26 | TopicMeta topicMeta) { 27 | 28 | ConsumerGroupDesc.ConsumerGroupDescBuilder cgdBuilder = 29 | ConsumerGroupDesc.builder() 30 | .groupName(consumerGroup) 31 | .topic(topic) 32 | .partitionId(op.getKey()) 33 | .currentOffset(fetchOffSetFromZkResultList.get(op.getKey())) 34 | .logEndOffset( 35 | topicMeta 36 | .getTopicPartitionInfos() 37 | .stream() 38 | .filter(tpi -> tpi.getTopicPartitionInfo().partition() == op.getKey()) 39 | .findFirst() 40 | .get() 41 | .getEndOffset()); 42 | 43 | if (op.getValue().equals("none")) { 44 | cgdBuilder.consumerId("-"); 45 | cgdBuilder.host("-"); 46 | cgdBuilder.state(ConsumerGroupState.EMPTY); 47 | } else { 48 | cgdBuilder.consumerId(op.getValue()); 49 | cgdBuilder.host(op.getValue().replace(consumerGroup + "_", "")); 50 | cgdBuilder.state(ConsumerGroupState.STABLE); 51 | } 52 | cgdBuilder.type(ConsumerType.OLD); 53 | return cgdBuilder.build(); 54 | } 55 | 56 | public ConsumerGroupDesc makeNewRunningConsumerGroupDesc( 57 | TopicPartition tp, 58 | String consumerGroup, 59 | Map partitionEndOffsetMap, 60 | AdminClient.ConsumerSummary cs) { 61 | KafkaConsumer consumer = kafkaUtils.createNewConsumer(consumerGroup); 62 | ConsumerGroupDesc.ConsumerGroupDescBuilder cgdBuilder = 63 | ConsumerGroupDesc.builder() 64 | .groupName(consumerGroup) 65 | .topic(tp.topic()) 66 | .partitionId(tp.partition()) 67 | .consumerId(cs.clientId()) 68 | .host(cs.host()) 69 | .state(ConsumerGroupState.STABLE) 70 | .type(ConsumerType.NEW); 71 | 72 | long currentOffset = -1L; 73 | 74 | org.apache.kafka.clients.consumer.OffsetAndMetadata offset = 75 | consumer.committed(new TopicPartition(tp.topic(), tp.partition())); 76 | if (offset != null) { 77 | currentOffset = offset.offset(); 78 | } 79 | cgdBuilder.currentOffset(currentOffset); 80 | 81 | Long endOffset = partitionEndOffsetMap.get(tp.partition()); 82 | if (endOffset 83 | == null) { // if endOffset is null ,the partition of this topic has no leader replication 84 | cgdBuilder.logEndOffset(-1L); 85 | } else { 86 | cgdBuilder.logEndOffset(endOffset); 87 | } 88 | consumer.close(); 89 | 90 | return cgdBuilder.build(); 91 | } 92 | 93 | public ConsumerGroupDesc makeNewPendingConsumerGroupDesc( 94 | String consumerGroup, 95 | Map partitionEndOffsetMap, 96 | Map.Entry topicStorage, 97 | String topic) { 98 | Long partitionCurrentOffset = 99 | (topicStorage.getValue() == null) ? -1L : topicStorage.getValue().offset(); 100 | 101 | int partitionId = topicStorage.getKey().topicPartition().partition(); 102 | ConsumerGroupDesc.ConsumerGroupDescBuilder cgdBuilder = 103 | ConsumerGroupDesc.builder() 104 | .groupName(consumerGroup) 105 | .topic(topic) 106 | .consumerId("-") 107 | .partitionId(partitionId) 108 | .currentOffset(partitionCurrentOffset) 109 | .host("-") 110 | .state(ConsumerGroupState.EMPTY) 111 | .type(ConsumerType.NEW); 112 | 113 | Long endOffset = partitionEndOffsetMap.get(partitionId); 114 | 115 | if (endOffset 116 | == null) { // if endOffset is null ,the partition of this topic has no leader replication 117 | cgdBuilder.logEndOffset(-1L); 118 | } else { 119 | cgdBuilder.logEndOffset(endOffset); 120 | } 121 | 122 | return cgdBuilder.build(); 123 | } 124 | } 125 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/ConsumerGroupMeta.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import java.util.List; 4 | import lombok.Builder; 5 | import lombok.Data; 6 | import org.apache.kafka.common.Node; 7 | import org.gnuhpc.bigdata.constant.ConsumerGroupState; 8 | 9 | @Data 10 | @Builder 11 | public class ConsumerGroupMeta { 12 | 13 | private String groupId; 14 | private ConsumerGroupState state; 15 | private String assignmentStrategy; 16 | private Node coordinator; 17 | private List members; 18 | } 19 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/CustomConfigEntry.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import org.apache.kafka.clients.admin.ConfigEntry.ConfigSource; 6 | 7 | @Data 8 | @AllArgsConstructor 9 | public class CustomConfigEntry { 10 | 11 | private String name; 12 | private String value; 13 | private boolean isSensitive; 14 | private boolean isReadOnly; 15 | private ConfigSource source; 16 | } 17 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/CustomTopicPartitionInfo.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import java.io.Serializable; 4 | import lombok.AccessLevel; 5 | import lombok.EqualsAndHashCode; 6 | import lombok.Getter; 7 | import lombok.Setter; 8 | import lombok.ToString; 9 | import org.apache.commons.collections4.CollectionUtils; 10 | import org.apache.kafka.common.TopicPartitionInfo; 11 | 12 | @Getter 13 | @Setter 14 | @ToString 15 | @EqualsAndHashCode 16 | public class CustomTopicPartitionInfo 17 | implements Comparable, Serializable { 18 | 19 | private TopicPartitionInfo topicPartitionInfo; 20 | private boolean in_sync; 21 | private long startOffset; 22 | private long endOffset; 23 | 24 | @Setter(AccessLevel.NONE) 25 | private long messageAvailable; 26 | 27 | public void setIn_sync() { 28 | if (topicPartitionInfo.isr() != null 29 | && topicPartitionInfo.replicas() != null 30 | && topicPartitionInfo.isr().size() == topicPartitionInfo.replicas().size()) { 31 | in_sync = 32 | CollectionUtils.isEqualCollection( 33 | topicPartitionInfo.isr(), topicPartitionInfo.replicas()); 34 | } else { 35 | in_sync = false; 36 | } 37 | } 38 | 39 | public void setMessageAvailable() { 40 | this.messageAvailable = this.endOffset - this.startOffset; 41 | } 42 | 43 | @Override 44 | public int compareTo(CustomTopicPartitionInfo topicPartitionInfo) { 45 | if (this.topicPartitionInfo.partition() < topicPartitionInfo.topicPartitionInfo.partition()) { 46 | return -1; 47 | } else if (this.topicPartitionInfo.partition() 48 | == topicPartitionInfo.topicPartitionInfo.partition()) { 49 | return 0; 50 | } else { 51 | return 1; 52 | } 53 | } 54 | 55 | public TopicPartitionInfo getTopicPartitionInfo() { 56 | return this.topicPartitionInfo; 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/GeneralResponse.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Builder; 5 | import lombok.Data; 6 | import lombok.ToString; 7 | import lombok.extern.log4j.Log4j2; 8 | import org.gnuhpc.bigdata.constant.GeneralResponseState; 9 | 10 | @Data 11 | @Log4j2 12 | @AllArgsConstructor 13 | @Builder 14 | @ToString 15 | public class GeneralResponse { 16 | 17 | private GeneralResponseState state; 18 | private String msg; 19 | private Object data; 20 | } 21 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/HealthCheckResult.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import com.fasterxml.jackson.annotation.JsonFormat; 4 | import java.time.LocalDateTime; 5 | import lombok.Data; 6 | import lombok.Getter; 7 | import lombok.Setter; 8 | 9 | @Getter 10 | @Setter 11 | @Data 12 | public class HealthCheckResult { 13 | 14 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss") 15 | private LocalDateTime timestamp; 16 | 17 | public String status; 18 | public String msg; 19 | 20 | public HealthCheckResult() { 21 | this.timestamp = LocalDateTime.now(); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/JMXClient.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import static com.google.common.base.Preconditions.checkNotNull; 4 | 5 | import java.io.IOException; 6 | import java.net.MalformedURLException; 7 | import java.net.SocketTimeoutException; 8 | import java.util.HashMap; 9 | import java.util.Map; 10 | import java.util.concurrent.ArrayBlockingQueue; 11 | import java.util.concurrent.BlockingQueue; 12 | import java.util.concurrent.ExecutorService; 13 | import java.util.concurrent.Executors; 14 | import java.util.concurrent.ThreadFactory; 15 | import java.util.concurrent.TimeUnit; 16 | import javax.management.remote.JMXConnector; 17 | import javax.management.remote.JMXConnectorFactory; 18 | import javax.management.remote.JMXServiceURL; 19 | import lombok.Getter; 20 | import lombok.Setter; 21 | import lombok.extern.log4j.Log4j2; 22 | import org.gnuhpc.bigdata.config.JMXConfig; 23 | import org.gnuhpc.bigdata.exception.CollectorException; 24 | 25 | @Getter 26 | @Setter 27 | @Log4j2 28 | public class JMXClient { 29 | 30 | private String ip; 31 | private String port; 32 | private JMXConnector jmxConnector = null; 33 | private static final ThreadFactory daemonThreadFactory = new DaemonThreadFactory(); 34 | private String jmxServiceURL; 35 | private Map jmxEnv; 36 | private static final long CONNECTION_TIMEOUT = 10000; 37 | private static final long JMX_TIMEOUT = 20; 38 | 39 | public JMXClient() { 40 | jmxEnv = new HashMap<>(); 41 | jmxEnv.put(JMXConfig.JMX_CONNECT_TIMEOUT, CONNECTION_TIMEOUT); 42 | } 43 | 44 | public JMXClient(String host) { 45 | this(); 46 | String[] ipAndPort = host.split(":"); 47 | this.ip = ipAndPort[0]; 48 | this.port = ipAndPort[1]; 49 | this.jmxServiceURL = 50 | new StringBuilder() 51 | .append(JMXConfig.JMX_PROTOCOL) 52 | .append(this.ip) 53 | .append(":") 54 | .append(this.port) 55 | .append("/jmxrmi") 56 | .toString(); 57 | } 58 | 59 | public JMXConnector connect() throws CollectorException { 60 | try { 61 | JMXServiceURL jmxServiceURL = new JMXServiceURL(this.jmxServiceURL); 62 | jmxConnector = JMXConnectorFactory.connect(jmxServiceURL, jmxEnv); 63 | } catch (MalformedURLException e) { 64 | throw new CollectorException( 65 | String.format( 66 | "%s occurred. URL: %s. Reason: %s", 67 | e.getClass().getCanonicalName(), this.jmxServiceURL, e.getCause()), 68 | e); 69 | } catch (IOException e) { 70 | throw new CollectorException( 71 | String.format( 72 | "%s occurred. URL: %s. Reason: %s", 73 | e.getClass().getCanonicalName(), this.jmxServiceURL, e.getCause()), 74 | e); 75 | } 76 | return jmxConnector; 77 | } 78 | 79 | /** 80 | * This code comes from Datadog jmxFetch. 81 | * https://github.com/DataDog/jmxfetch/blob/master/src/main/java/org/datadog/jmxfetch/Connection.java 82 | */ 83 | public JMXConnector connectWithTimeout() throws IOException, InterruptedException { 84 | JMXServiceURL url = new JMXServiceURL(this.jmxServiceURL); 85 | 86 | BlockingQueue mailbox = new ArrayBlockingQueue(1); 87 | 88 | ExecutorService executor = Executors.newSingleThreadExecutor(daemonThreadFactory); 89 | executor.submit( 90 | () -> { 91 | try { 92 | JMXConnector connector = JMXConnectorFactory.connect(url, jmxEnv); 93 | if (!mailbox.offer(connector)) { 94 | connector.close(); 95 | } 96 | } catch (Throwable t) { 97 | mailbox.offer(t); 98 | } 99 | }); 100 | Object result; 101 | try { 102 | result = mailbox.poll(JMX_TIMEOUT, TimeUnit.SECONDS); 103 | if (result == null) { 104 | if (!mailbox.offer("")) { 105 | result = mailbox.take(); 106 | } 107 | } 108 | } catch (InterruptedException e) { 109 | throw e; 110 | } finally { 111 | executor.shutdown(); 112 | } 113 | if (result == null) { 114 | log.warn("Connection timed out: " + url); 115 | throw new SocketTimeoutException("Connection timed out: " + url); 116 | } 117 | if (result instanceof JMXConnector) { 118 | jmxConnector = (JMXConnector) result; 119 | return jmxConnector; 120 | } 121 | try { 122 | throw (Throwable) result; 123 | } catch (Throwable e) { 124 | throw new IOException(e.toString(), e); 125 | } 126 | } 127 | 128 | public void close() throws CollectorException { 129 | checkNotNull(jmxConnector); 130 | try { 131 | jmxConnector.close(); 132 | } catch (IOException e) { 133 | throw new CollectorException("Cannot close connection. ", e); 134 | } 135 | } 136 | 137 | private static class DaemonThreadFactory implements ThreadFactory { 138 | 139 | public Thread newThread(Runnable r) { 140 | Thread t = Executors.defaultThreadFactory().newThread(r); 141 | t.setDaemon(true); 142 | return t; 143 | } 144 | } 145 | } 146 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/JMXComplexAttribute.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import java.io.IOException; 4 | import java.util.ArrayList; 5 | import java.util.HashMap; 6 | import java.util.Iterator; 7 | import java.util.LinkedHashMap; 8 | import java.util.LinkedList; 9 | import java.util.Map; 10 | import javax.management.AttributeNotFoundException; 11 | import javax.management.InstanceNotFoundException; 12 | import javax.management.MBeanAttributeInfo; 13 | import javax.management.MBeanException; 14 | import javax.management.MBeanServerConnection; 15 | import javax.management.ObjectName; 16 | import javax.management.ReflectionException; 17 | import javax.management.openmbean.CompositeData; 18 | 19 | public class JMXComplexAttribute extends JMXAttribute { 20 | 21 | private HashMap> subAttributeList; 22 | 23 | public JMXComplexAttribute( 24 | MBeanAttributeInfo attribute, ObjectName beanName, MBeanServerConnection connection) { 25 | super(attribute, beanName, connection); 26 | this.subAttributeList = new HashMap<>(); 27 | } 28 | 29 | @Override 30 | public LinkedList> getMetrics() 31 | throws AttributeNotFoundException, InstanceNotFoundException, MBeanException, 32 | ReflectionException, IOException { 33 | 34 | LinkedList> metrics = new LinkedList>(); 35 | 36 | for (Map.Entry> pair : subAttributeList.entrySet()) { 37 | String subAttribute = pair.getKey(); 38 | HashMap metric = pair.getValue(); 39 | if (metric.get(ALIAS) == null) { 40 | metric.put(ALIAS, convertMetricName(getAlias(subAttribute))); 41 | } 42 | if (metric.get(METRIC_TYPE) == null) { 43 | metric.put("domain", getBeanName().getDomain()); 44 | metric.put("beanName", getBeanName().toString()); 45 | metric.put("attributeName", subAttribute); 46 | metric.put(METRIC_TYPE, getMetricType(subAttribute)); 47 | } 48 | 49 | /* 50 | if (metric.get("tags") == null) { 51 | metric.put("tags", getTags()); 52 | } 53 | */ 54 | 55 | metric.put("value", castToDouble(getValue(subAttribute), subAttribute)); 56 | metrics.add(metric); 57 | } 58 | return metrics; 59 | } 60 | 61 | private Object getMetricType(String subAttribute) { 62 | String subAttributeName = getAttribute().getName() + "." + subAttribute; 63 | String metricType = null; 64 | 65 | JMXFilter include = getMatchingConf().getInclude(); 66 | if (include.getAttribute() instanceof LinkedHashMap) { 67 | LinkedHashMap> attribute = 68 | (LinkedHashMap>) (include.getAttribute()); 69 | metricType = attribute.get(subAttributeName).get(METRIC_TYPE); 70 | if (metricType == null) { 71 | metricType = attribute.get(subAttributeName).get("type"); 72 | } 73 | } 74 | 75 | if (metricType == null) { 76 | metricType = "gauge"; 77 | } 78 | 79 | return metricType; 80 | } 81 | 82 | private Object getValue(String subAttribute) 83 | throws AttributeNotFoundException, InstanceNotFoundException, MBeanException, 84 | ReflectionException, IOException { 85 | 86 | Object value = this.getJmxValue(); 87 | String attributeType = getAttribute().getType(); 88 | 89 | if ("javax.management.openmbean.CompositeData".equals(attributeType)) { 90 | CompositeData data = (CompositeData) value; 91 | return data.get(subAttribute); 92 | } else if (("java.util.HashMap".equals(attributeType)) 93 | || ("java.util.Map".equals(attributeType))) { 94 | Map data = (Map) value; 95 | return data.get(subAttribute); 96 | } 97 | throw new NumberFormatException(); 98 | } 99 | 100 | @Override 101 | public boolean match(JMXConfiguration configuration) { 102 | if (!matchDomain(configuration) 103 | || !matchBean(configuration) 104 | || excludeMatchDomain(configuration) 105 | || excludeMatchBean(configuration)) { 106 | return false; 107 | } 108 | 109 | try { 110 | populateSubAttributeList(getJmxValue()); 111 | } catch (Exception e) { 112 | return false; 113 | } 114 | 115 | return matchAttribute(configuration) && !excludeMatchAttribute(configuration); 116 | } 117 | 118 | private void populateSubAttributeList(Object attributeValue) { 119 | String attributeType = getAttribute().getType(); 120 | if ("javax.management.openmbean.CompositeData".equals(attributeType)) { 121 | CompositeData data = (CompositeData) attributeValue; 122 | for (String key : data.getCompositeType().keySet()) { 123 | this.subAttributeList.put(key, new HashMap()); 124 | } 125 | } else if (("java.util.HashMap".equals(attributeType)) 126 | || ("java.util.Map".equals(attributeType))) { 127 | Map data = (Map) attributeValue; 128 | for (String key : data.keySet()) { 129 | this.subAttributeList.put(key, new HashMap()); 130 | } 131 | } 132 | } 133 | 134 | private boolean excludeMatchAttribute(JMXConfiguration configuration) { 135 | JMXFilter exclude = configuration.getExclude(); 136 | if (exclude == null) { 137 | return false; 138 | } 139 | if (exclude.getAttribute() != null && matchSubAttribute(exclude, getAttributeName(), false)) { 140 | return true; 141 | } 142 | 143 | Iterator it = subAttributeList.keySet().iterator(); 144 | while (it.hasNext()) { 145 | String subAttribute = it.next(); 146 | if (matchSubAttribute(exclude, getAttributeName() + "." + subAttribute, false)) { 147 | it.remove(); 148 | } 149 | } 150 | return subAttributeList.size() <= 0; 151 | } 152 | 153 | private boolean matchAttribute(JMXConfiguration configuration) { 154 | if (matchSubAttribute(configuration.getInclude(), getAttributeName(), true)) { 155 | return true; 156 | } 157 | 158 | Iterator it = subAttributeList.keySet().iterator(); 159 | 160 | while (it.hasNext()) { 161 | String subAttribute = it.next(); 162 | if (!matchSubAttribute( 163 | configuration.getInclude(), getAttributeName() + "." + subAttribute, true)) { 164 | it.remove(); 165 | } 166 | } 167 | 168 | return subAttributeList.size() > 0; 169 | } 170 | 171 | private boolean matchSubAttribute( 172 | JMXFilter params, String subAttributeName, boolean matchOnEmpty) { 173 | if ((params.getAttribute() instanceof LinkedHashMap) 174 | && ((LinkedHashMap) (params.getAttribute())) 175 | .containsKey(subAttributeName)) { 176 | return true; 177 | } else if ((params.getAttribute() instanceof ArrayList 178 | && ((ArrayList) (params.getAttribute())).contains(subAttributeName))) { 179 | return true; 180 | } else if (params.getAttribute() == null) { 181 | return matchOnEmpty; 182 | } 183 | return false; 184 | } 185 | } 186 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/JMXConfiguration.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import com.fasterxml.jackson.annotation.JsonCreator; 4 | import com.fasterxml.jackson.annotation.JsonProperty; 5 | import java.util.ArrayList; 6 | import java.util.HashMap; 7 | import java.util.HashSet; 8 | import java.util.Iterator; 9 | import java.util.LinkedHashMap; 10 | import java.util.LinkedList; 11 | import java.util.Map; 12 | import java.util.Set; 13 | import lombok.Getter; 14 | import lombok.Setter; 15 | 16 | @Getter 17 | @Setter 18 | public class JMXConfiguration { 19 | 20 | private JMXFilter include; 21 | private JMXFilter exclude; 22 | 23 | /** 24 | * Access JMXConfiguration elements more easily 25 | * 26 | *

Also provides helper methods to extract common information among JMXFilters. 27 | */ 28 | @JsonCreator 29 | public JMXConfiguration( 30 | @JsonProperty("include") JMXFilter include, @JsonProperty("exclude") JMXFilter exclude) { 31 | this.include = include; 32 | this.exclude = exclude; 33 | } 34 | 35 | private Boolean hasInclude() { 36 | return getInclude() != null; 37 | } 38 | 39 | /** 40 | * JMXFilter a configuration list to keep the ones with `include` JMXFilters. 41 | * 42 | * @param configurationList the configuration list to JMXFilter 43 | * @return a configuration list 44 | */ 45 | private static LinkedList getIncludeConfigurationList( 46 | LinkedList configurationList) { 47 | LinkedList includeConfigList = 48 | new LinkedList(configurationList); 49 | Iterator confItr = includeConfigList.iterator(); 50 | 51 | while (confItr.hasNext()) { 52 | JMXConfiguration conf = confItr.next(); 53 | if (!conf.hasInclude()) { 54 | confItr.remove(); 55 | } 56 | } 57 | return includeConfigList; 58 | } 59 | 60 | /** 61 | * Extract `include` JMXFilters from the configuration list and index then by domain name. 62 | * 63 | * @param configurationList the configuration list to process 64 | * @return JMXFilters by domain name 65 | */ 66 | private static HashMap> getIncludeJMXFiltersByDomain( 67 | LinkedList configurationList) { 68 | HashMap> includeJMXFiltersByDomain = 69 | new HashMap>(); 70 | 71 | for (JMXConfiguration conf : configurationList) { 72 | JMXFilter JMXFilter = conf.getInclude(); 73 | LinkedList JMXFilters = new LinkedList(); 74 | 75 | // Convert bean name, to a proper JMXFilter, i.e. a hash 76 | if (!JMXFilter.isEmptyBeanName()) { 77 | ArrayList beanNames = JMXFilter.getBeanNames(); 78 | 79 | for (String beanName : beanNames) { 80 | String[] splitBeanName = beanName.split(":"); 81 | String domain = splitBeanName[0]; 82 | String rawBeanParameters = splitBeanName[1]; 83 | HashMap beanParametersHash = 84 | JMXAttribute.getBeanParametersHash(rawBeanParameters); 85 | beanParametersHash.put("domain", domain); 86 | JMXFilters.add(new JMXFilter(beanParametersHash)); 87 | } 88 | } else { 89 | JMXFilters.add(JMXFilter); 90 | } 91 | 92 | for (JMXFilter f : JMXFilters) { 93 | // Retrieve the existing JMXFilters for the domain, add the new JMXFilters 94 | LinkedList domainJMXFilters; 95 | String domainName = f.getDomain(); 96 | 97 | if (includeJMXFiltersByDomain.containsKey(domainName)) { 98 | domainJMXFilters = includeJMXFiltersByDomain.get(domainName); 99 | } else { 100 | domainJMXFilters = new LinkedList(); 101 | } 102 | 103 | domainJMXFilters.add(f); 104 | includeJMXFiltersByDomain.put(domainName, domainJMXFilters); 105 | } 106 | } 107 | return includeJMXFiltersByDomain; 108 | } 109 | 110 | /** 111 | * Extract, among JMXFilters, bean key parameters in common. 112 | * 113 | * @param JMXFiltersByDomain JMXFilters by domain name 114 | * @return common bean key parameters by domain name 115 | */ 116 | private static HashMap> getCommonBeanKeysByDomain( 117 | HashMap> JMXFiltersByDomain) { 118 | HashMap> beanKeysIntersectionByDomain = new HashMap>(); 119 | 120 | for (Map.Entry> JMXFiltersEntry : JMXFiltersByDomain.entrySet()) { 121 | String domainName = JMXFiltersEntry.getKey(); 122 | LinkedList mJMXFilters = JMXFiltersEntry.getValue(); 123 | 124 | // Compute keys intersection 125 | Set keysIntersection = new HashSet(mJMXFilters.getFirst().keySet()); 126 | 127 | for (JMXFilter f : mJMXFilters) { 128 | keysIntersection.retainAll(f.keySet()); 129 | } 130 | 131 | // Remove special parameters 132 | for (String param : JMXAttribute.getExcludedBeanParams()) { 133 | keysIntersection.remove(param); 134 | } 135 | beanKeysIntersectionByDomain.put(domainName, keysIntersection); 136 | } 137 | 138 | return beanKeysIntersectionByDomain; 139 | } 140 | 141 | /** 142 | * Build a map of common bean keys->values, with the specified bean keys, among the given 143 | * JMXFilters. 144 | * 145 | * @param beanKeysByDomain bean keys by domain name 146 | * @param JMXFiltersByDomain JMXFilters by domain name 147 | * @return bean pattern (keys->values) by domain name 148 | */ 149 | private static HashMap> getCommonScopeByDomain( 150 | HashMap> beanKeysByDomain, 151 | HashMap> JMXFiltersByDomain) { 152 | // Compute a common scope a among JMXFilters by domain name 153 | HashMap> commonScopeByDomain = 154 | new HashMap>(); 155 | 156 | for (Map.Entry> commonParametersByDomainEntry : 157 | beanKeysByDomain.entrySet()) { 158 | String domainName = commonParametersByDomainEntry.getKey(); 159 | Set commonParameters = commonParametersByDomainEntry.getValue(); 160 | LinkedList JMXFilters = JMXFiltersByDomain.get(domainName); 161 | LinkedHashMap commonScope = new LinkedHashMap(); 162 | 163 | for (String parameter : commonParameters) { 164 | // Check if all values associated with the parameters are the same 165 | String commonValue = null; 166 | Boolean hasCommonValue = true; 167 | 168 | for (JMXFilter f : JMXFilters) { 169 | ArrayList parameterValues = f.getParameterValues(parameter); 170 | 171 | if (parameterValues.size() != 1 172 | || (commonValue != null && !commonValue.equals(parameterValues.get(0)))) { 173 | hasCommonValue = false; 174 | break; 175 | } 176 | commonValue = parameterValues.get(0); 177 | } 178 | if (hasCommonValue) { 179 | commonScope.put(parameter, commonValue); 180 | } 181 | } 182 | commonScopeByDomain.put(domainName, commonScope); 183 | } 184 | 185 | return commonScopeByDomain; 186 | } 187 | 188 | /** 189 | * Stringify a bean pattern. 190 | * 191 | * @param domain domain name 192 | * @param beanScope map of bean keys-> values 193 | * @return string pattern identifying the bean scope 194 | */ 195 | private static String beanScopeToString(String domain, LinkedHashMap beanScope) { 196 | StringBuffer resultBuf = new StringBuffer(); 197 | 198 | // Domain 199 | domain = (domain != null) ? domain : "*"; 200 | resultBuf.append(domain); 201 | resultBuf.append(":"); 202 | 203 | // Scope parameters 204 | for (Map.Entry beanScopeEntry : beanScope.entrySet()) { 205 | String param = beanScopeEntry.getKey(); 206 | String value = beanScopeEntry.getValue(); 207 | 208 | resultBuf.append(param); 209 | resultBuf.append("="); 210 | resultBuf.append(value); 211 | resultBuf.append(","); 212 | } 213 | 214 | resultBuf.append("*"); 215 | 216 | return resultBuf.toString(); 217 | } 218 | 219 | /** 220 | * Find, among the JMXConfiguration list, a potential common bean pattern by domain name. 221 | * 222 | * @param JMXConfigurationList the JMXConfiguration list to process 223 | * @return common bean pattern strings 224 | */ 225 | public static LinkedList getGreatestCommonScopes( 226 | LinkedList JMXConfigurationList) { 227 | LinkedList result = new LinkedList(); 228 | if (JMXConfigurationList == null || JMXConfigurationList.isEmpty()) { 229 | return result; 230 | } 231 | LinkedList includeConfigList = 232 | getIncludeConfigurationList(JMXConfigurationList); 233 | HashMap> includeJMXFiltersByDomain = 234 | getIncludeJMXFiltersByDomain(includeConfigList); 235 | HashMap> parametersIntersectionByDomain = 236 | getCommonBeanKeysByDomain(includeJMXFiltersByDomain); 237 | HashMap> commonBeanScopeByDomain = 238 | getCommonScopeByDomain(parametersIntersectionByDomain, includeJMXFiltersByDomain); 239 | 240 | for (Map.Entry> beanScopeEntry : 241 | commonBeanScopeByDomain.entrySet()) { 242 | String domain = beanScopeEntry.getKey(); 243 | LinkedHashMap beanScope = beanScopeEntry.getValue(); 244 | 245 | result.add(beanScopeToString(domain, beanScope)); 246 | } 247 | 248 | return result; 249 | } 250 | } 251 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/JMXFilter.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import com.fasterxml.jackson.annotation.JsonCreator; 4 | import java.util.ArrayList; 5 | import java.util.HashMap; 6 | import java.util.Set; 7 | import java.util.regex.Pattern; 8 | import lombok.Getter; 9 | import lombok.Setter; 10 | 11 | @Getter 12 | @Setter 13 | public class JMXFilter { 14 | 15 | HashMap filter; 16 | Pattern domainRegex; 17 | ArrayList beanRegexes = null; 18 | // ArrayList excludeTags = null; 19 | // HashMap additionalTags = null; 20 | 21 | /** 22 | * A simple class to manipulate include/exclude filter elements more easily A filter may contain: 23 | * - A domain (key: 'domain') or a domain regex (key: 'domain_regex') - Bean names (key: 'bean' or 24 | * 'bean_name') or bean regexes (key: 'bean_regex') - Attributes (key: 'attribute') - Additional 25 | * bean parameters (other keys) 26 | */ 27 | @JsonCreator 28 | @SuppressWarnings("unchecked") 29 | public JMXFilter(Object filter) { 30 | HashMap castFilter; 31 | if (filter != null) { 32 | castFilter = (HashMap) filter; 33 | } else { 34 | castFilter = new HashMap(); 35 | } 36 | this.filter = castFilter; 37 | } 38 | 39 | public String toString() { 40 | return this.filter.toString(); 41 | } 42 | 43 | public Set keySet() { 44 | return filter.keySet(); 45 | } 46 | 47 | @SuppressWarnings({"unchecked", "serial"}) 48 | private static ArrayList toStringArrayList(final Object toCast) { 49 | // Return object as an ArrayList wherever it's defined as 50 | // list or not 51 | // 52 | // ### Example 53 | // object: 54 | // - firstValue 55 | // - secondValue 56 | // ### OR 57 | // object: singleValue 58 | // ### 59 | if (toCast instanceof String) { 60 | ArrayList toCastList = new ArrayList<>(); 61 | toCastList.add(toCast.toString()); 62 | return toCastList; 63 | } 64 | return (ArrayList) toCast; 65 | } 66 | 67 | public ArrayList getBeanNames() { 68 | if (isEmptyBeanName()) { 69 | return new ArrayList(); 70 | } 71 | final Object beanNames = 72 | (filter.get("bean") != null) ? filter.get("bean") : filter.get("bean_name"); 73 | // Return bean names as an ArrayList wherever it's defined as 74 | // list or not 75 | // 76 | // ### Example 77 | // bean: 78 | // - 79 | // org.apache.cassandra.db:type=Caches,keyspace=system,cache=HintsColumnFamilyKeyCache 80 | // - org.datadog.jmxfetch.test:type=type=SimpleTestJavaApp 81 | // ### OR 82 | // bean: org.datadog.jmxfetch.test:type=type=SimpleTestJavaApp 83 | // ### 84 | return toStringArrayList(beanNames); 85 | } 86 | 87 | private static ArrayList toPatternArrayList(final Object toCast) { 88 | ArrayList patternArrayList = new ArrayList(); 89 | ArrayList stringArrayList = toStringArrayList(toCast); 90 | for (String string : stringArrayList) { 91 | patternArrayList.add(Pattern.compile(string)); 92 | } 93 | 94 | return patternArrayList; 95 | } 96 | 97 | public ArrayList getBeanRegexes() { 98 | // Return bean regexes as an ArrayList of Pattern whether it's defined as 99 | // a list or not 100 | 101 | if (this.beanRegexes == null) { 102 | if (filter.get("bean_regex") == null) { 103 | this.beanRegexes = new ArrayList(); 104 | } else { 105 | final Object beanRegexNames = filter.get("bean_regex"); 106 | this.beanRegexes = toPatternArrayList(beanRegexNames); 107 | } 108 | } 109 | 110 | return this.beanRegexes; 111 | } 112 | 113 | /* 114 | public ArrayList getExcludeTags() { 115 | // Return excluded tags as an ArrayList whether it's defined as a list or not 116 | 117 | if (this.excludeTags == null) { 118 | if (filter.get("exclude_tags") == null){ 119 | this.excludeTags = new ArrayList(); 120 | } else { 121 | final Object exclude_tags = filter.get("exclude_tags"); 122 | this.excludeTags = toStringArrayList(exclude_tags); 123 | } 124 | } 125 | 126 | return this.excludeTags; 127 | } 128 | 129 | public HashMap getAdditionalTags() { 130 | // Return additional tags 131 | if (this.additionalTags == null) { 132 | if (filter.get("tags") == null){ 133 | this.additionalTags = new HashMap(); 134 | } else { 135 | this.additionalTags = (HashMap)filter.get("tags"); 136 | } 137 | } 138 | 139 | return this.additionalTags; 140 | } 141 | */ 142 | 143 | public String getDomain() { 144 | return (String) filter.get("domain"); 145 | } 146 | 147 | public Pattern getDomainRegex() { 148 | if (this.filter.get("domain_regex") == null) { 149 | return null; 150 | } 151 | 152 | if (this.domainRegex == null) { 153 | this.domainRegex = Pattern.compile((String) this.filter.get("domain_regex")); 154 | } 155 | 156 | return this.domainRegex; 157 | } 158 | 159 | public Object getAttribute() { 160 | return filter.get("attribute"); 161 | } 162 | 163 | public ArrayList getParameterValues(String parameterName) { 164 | // Return bean attributes values as an ArrayList wherever it's defined as 165 | // list or not 166 | // 167 | // ### Example 168 | // bean_parameter: 169 | // - exampleType1 170 | // - exampleType2 171 | // ### OR 172 | // bean_parameter: onlyOneType 173 | // ### 174 | final Object beanValues = filter.get(parameterName); 175 | return toStringArrayList(beanValues); 176 | } 177 | 178 | public boolean isEmptyBeanName() { 179 | return (filter.get("bean") == null && filter.get("bean_name") == null); 180 | } 181 | } 182 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/JMXMetricData.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import com.fasterxml.jackson.annotation.JsonFormat; 4 | import java.time.LocalDateTime; 5 | import java.util.HashMap; 6 | import java.util.LinkedList; 7 | import lombok.Getter; 8 | import lombok.Setter; 9 | 10 | @Getter 11 | @Setter 12 | public class JMXMetricData { 13 | 14 | private String host; 15 | 16 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss") 17 | private LocalDateTime timestamp; 18 | 19 | private Boolean collected; 20 | private LinkedList> metrics; 21 | private String msg; 22 | 23 | public JMXMetricData(String host, LinkedList> metrics) { 24 | this.host = host; 25 | this.timestamp = LocalDateTime.now(); 26 | this.metrics = metrics; 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/JMXMetricDataV1.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import com.fasterxml.jackson.annotation.JsonFormat; 4 | import java.time.LocalDateTime; 5 | import java.util.Map; 6 | import lombok.Getter; 7 | import lombok.Setter; 8 | 9 | @Getter 10 | @Setter 11 | public class JMXMetricDataV1 { 12 | 13 | private String host; 14 | 15 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss") 16 | private LocalDateTime timestamp; 17 | 18 | private Boolean collected; 19 | private Map mbeanInfo; 20 | private String msg; 21 | 22 | public JMXMetricDataV1(String host, Map mbeanInfo) { 23 | this.host = host; 24 | this.timestamp = LocalDateTime.now(); 25 | this.mbeanInfo = mbeanInfo; 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/JMXQuery.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import java.util.LinkedList; 4 | import lombok.Getter; 5 | import lombok.Setter; 6 | 7 | @Getter 8 | @Setter 9 | public class JMXQuery { 10 | 11 | private LinkedList filters; 12 | } 13 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/JMXSimpleAttribute.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import java.io.IOException; 4 | import java.util.ArrayList; 5 | import java.util.HashMap; 6 | import java.util.LinkedHashMap; 7 | import java.util.LinkedList; 8 | import javax.management.AttributeNotFoundException; 9 | import javax.management.InstanceNotFoundException; 10 | import javax.management.MBeanAttributeInfo; 11 | import javax.management.MBeanException; 12 | import javax.management.MBeanServerConnection; 13 | import javax.management.ObjectName; 14 | import javax.management.ReflectionException; 15 | import lombok.Getter; 16 | import lombok.Setter; 17 | 18 | @Getter 19 | @Setter 20 | public class JMXSimpleAttribute extends JMXAttribute { 21 | 22 | private String metricType; 23 | 24 | public JMXSimpleAttribute( 25 | MBeanAttributeInfo attribute, ObjectName beanName, MBeanServerConnection connection) { 26 | super(attribute, beanName, connection); 27 | } 28 | 29 | @Override 30 | public LinkedList> getMetrics() 31 | throws AttributeNotFoundException, InstanceNotFoundException, MBeanException, 32 | ReflectionException, IOException { 33 | HashMap metric = new HashMap(); 34 | 35 | metric.put("domain", getBeanName().getDomain()); 36 | metric.put("beanName", getBeanName().toString()); 37 | metric.put("attributeName", getAttributeName()); 38 | metric.put("alias", getAlias()); 39 | metric.put("value", castToDouble(getValue(), null)); 40 | // metric.put("tags", getTags()); 41 | metric.put("metric_type", getMetricType()); 42 | LinkedList> metrics = new LinkedList>(); 43 | metrics.add(metric); 44 | return metrics; 45 | } 46 | 47 | public boolean match(JMXConfiguration configuration) { 48 | return matchDomain(configuration) 49 | && matchBean(configuration) 50 | && matchAttribute(configuration) 51 | && !(excludeMatchDomain(configuration) 52 | || excludeMatchBean(configuration) 53 | || excludeMatchAttribute(configuration)); 54 | } 55 | 56 | private boolean matchAttribute(JMXConfiguration configuration) { 57 | JMXFilter include = configuration.getInclude(); 58 | if (include.getAttribute() == null) { 59 | return true; 60 | } else if ((include.getAttribute() instanceof LinkedHashMap) 61 | && ((LinkedHashMap) (include.getAttribute())) 62 | .containsKey(getAttributeName())) { 63 | return true; 64 | 65 | } else if ((include.getAttribute() instanceof ArrayList 66 | && ((ArrayList) (include.getAttribute())).contains(getAttributeName()))) { 67 | return true; 68 | } 69 | 70 | return false; 71 | } 72 | 73 | private boolean excludeMatchAttribute(JMXConfiguration configuration) { 74 | JMXFilter exclude = configuration.getExclude(); 75 | if (exclude == null) { 76 | return false; 77 | } 78 | if (exclude.getAttribute() == null) { 79 | return false; 80 | } else if ((exclude.getAttribute() instanceof LinkedHashMap) 81 | && ((LinkedHashMap) (exclude.getAttribute())) 82 | .containsKey(getAttributeName())) { 83 | return true; 84 | 85 | } else if ((exclude.getAttribute() instanceof ArrayList 86 | && ((ArrayList) (exclude.getAttribute())).contains(getAttributeName()))) { 87 | return true; 88 | } 89 | return false; 90 | } 91 | 92 | private Object getValue() 93 | throws AttributeNotFoundException, InstanceNotFoundException, MBeanException, 94 | ReflectionException, IOException, NumberFormatException { 95 | return this.getJmxValue(); 96 | } 97 | 98 | private String getMetricType() { 99 | JMXFilter include = getMatchingConf().getInclude(); 100 | if (metricType != null) { 101 | return metricType; 102 | } else if (include.getAttribute() instanceof LinkedHashMap) { 103 | LinkedHashMap> attribute = 104 | (LinkedHashMap>) (include.getAttribute()); 105 | metricType = attribute.get(getAttributeName()).get(METRIC_TYPE); 106 | if (metricType == null) { 107 | metricType = attribute.get(getAttributeName()).get("type"); 108 | } 109 | } 110 | 111 | if (metricType == null) { // Default to gauge 112 | metricType = "gauge"; 113 | } 114 | 115 | return metricType; 116 | } 117 | } 118 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/MemberDescription.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import java.util.List; 4 | import lombok.AllArgsConstructor; 5 | import lombok.Getter; 6 | import lombok.Setter; 7 | import org.apache.kafka.common.TopicPartition; 8 | 9 | @AllArgsConstructor 10 | @Getter 11 | @Setter 12 | public class MemberDescription implements Comparable { 13 | 14 | private String memberId; 15 | private String clientId; 16 | private String host; 17 | private List assignment; 18 | 19 | @Override 20 | public int compareTo(MemberDescription that) { 21 | return this.getClientId().compareTo(that.clientId); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/PartitionAssignmentState.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Builder; 5 | import lombok.Data; 6 | import org.apache.kafka.common.Node; 7 | 8 | @Data 9 | @Builder 10 | @AllArgsConstructor 11 | public class PartitionAssignmentState implements Comparable{ 12 | 13 | private String group; 14 | private Node coordinator; 15 | private String topic; 16 | private int partition; 17 | private long offset; 18 | private long lag; 19 | private String consumerId; 20 | private String host; 21 | private String clientId; 22 | private long logEndOffset; 23 | 24 | @Override 25 | public int compareTo(PartitionAssignmentState that) { 26 | if (this.getGroup().equals(that.getGroup())) { 27 | if (this.getTopic().equals(that.getTopic())) { 28 | return (this.partition - that.getPartition()); 29 | } else { 30 | return this.getTopic().compareTo(that.getTopic()); 31 | } 32 | } else { 33 | return this.getGroup().compareTo(that.getGroup()); 34 | } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/ReassignJsonWrapper.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import java.util.List; 4 | import java.util.Map; 5 | import lombok.Getter; 6 | import lombok.Setter; 7 | import lombok.ToString; 8 | 9 | /** Created by gnuhpc on 2017/7/25. */ 10 | @Setter 11 | @Getter 12 | @ToString 13 | public class ReassignJsonWrapper { 14 | 15 | private List> topics; 16 | private int version = 1; 17 | } 18 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/ReassignModel.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import com.fasterxml.jackson.annotation.JsonCreator; 4 | import com.fasterxml.jackson.annotation.JsonCreator.Mode; 5 | import com.fasterxml.jackson.annotation.JsonProperty; 6 | import java.beans.ConstructorProperties; 7 | import java.util.List; 8 | import lombok.AllArgsConstructor; 9 | import lombok.Builder; 10 | import lombok.Data; 11 | import lombok.Getter; 12 | import lombok.NoArgsConstructor; 13 | import lombok.Setter; 14 | 15 | /* 16 | { 17 | "version": 1, 18 | "partitions": [ 19 | { 20 | "topic": "first", 21 | "partition": 1, 22 | "replicas": [ 23 | 115 24 | ], 25 | "log_dirs": [ 26 | "any" 27 | ] 28 | }, 29 | { 30 | "topic": "first", 31 | "partition": 0, 32 | "replicas": [ 33 | 113 34 | ], 35 | "log_dirs": [ 36 | "any" 37 | ] 38 | } 39 | ] 40 | } 41 | */ 42 | @Getter 43 | @Setter 44 | @Data 45 | @NoArgsConstructor 46 | @AllArgsConstructor 47 | public class ReassignModel { 48 | public int version = 1; 49 | public List partitions; 50 | } 51 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/ReassignStatus.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import java.util.Map; 4 | import lombok.Data; 5 | import lombok.Getter; 6 | import lombok.Setter; 7 | import org.apache.kafka.common.TopicPartition; 8 | import org.apache.kafka.common.TopicPartitionReplica; 9 | 10 | @Getter 11 | @Setter 12 | @Data 13 | public class ReassignStatus { 14 | Map partitionsReassignStatus; 15 | Map replicasReassignStatus; 16 | boolean removeThrottle; 17 | String msg; 18 | } 19 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/ReassignWrapper.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import java.util.ArrayList; 6 | import java.util.HashMap; 7 | import java.util.List; 8 | import java.util.Map; 9 | import lombok.Getter; 10 | import lombok.Setter; 11 | import lombok.ToString; 12 | import lombok.extern.log4j.Log4j2; 13 | import org.apache.kafka.common.errors.ApiException; 14 | 15 | /** Created by gnuhpc on 2017/7/25. */ 16 | @Getter 17 | @Setter 18 | @ToString 19 | @Log4j2 20 | public class ReassignWrapper { 21 | 22 | private List topics; 23 | private List brokers; 24 | 25 | public String generateReassignJsonString() { 26 | ReassignJsonWrapper reassignJsonWrapper = new ReassignJsonWrapper(); 27 | List> topicList = new ArrayList<>(); 28 | for (String topic : topics) { 29 | Map topicMap = new HashMap<>(); 30 | topicMap.put("topic", topic); 31 | topicList.add(topicMap); 32 | } 33 | reassignJsonWrapper.setTopics(topicList); 34 | try { 35 | ObjectMapper objectMapper = new ObjectMapper(); 36 | return objectMapper.writeValueAsString(reassignJsonWrapper); 37 | } catch (JsonProcessingException exeception) { 38 | log.error("Serialize ReassignWrapper object to string error." + exeception); 39 | throw new ApiException("Serialize ReassignWrapper object to string error." + exeception); 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/Record.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import java.nio.ByteBuffer; 4 | import lombok.Builder; 5 | import lombok.Data; 6 | import lombok.Getter; 7 | import lombok.Setter; 8 | import lombok.extern.log4j.Log4j2; 9 | import org.apache.kafka.common.errors.ApiException; 10 | import org.apache.kafka.common.utils.Bytes; 11 | import org.gnuhpc.bigdata.utils.KafkaUtils; 12 | 13 | @Data 14 | @Getter 15 | @Setter 16 | @Builder 17 | @Log4j2 18 | public class Record { 19 | public String topic; 20 | public long offset; 21 | public Object key; 22 | public Object value; 23 | public long timestamp; 24 | String keyDecoder; 25 | String valueDecoder; 26 | 27 | public String getValueByDecoder(String decoder, Object value) { 28 | if (value == null) return null; 29 | Class type = KafkaUtils.DESERIALIZER_TYPE_MAP.get(decoder); 30 | try { 31 | if (String.class.isAssignableFrom(type)) { 32 | return value.toString(); 33 | } 34 | 35 | if (Short.class.isAssignableFrom(type)) { 36 | return value.toString(); 37 | } 38 | 39 | if (Integer.class.isAssignableFrom(type)) { 40 | return value.toString(); 41 | } 42 | 43 | if (Long.class.isAssignableFrom(type)) { 44 | return value.toString(); 45 | } 46 | 47 | if (Float.class.isAssignableFrom(type)) { 48 | return value.toString(); 49 | } 50 | 51 | if (Double.class.isAssignableFrom(type)) { 52 | return value.toString(); 53 | } 54 | 55 | if (Bytes.class.isAssignableFrom(type)) { 56 | Bytes bytes = (Bytes) value; 57 | return bytes.toString(); 58 | } 59 | 60 | if (byte[].class.isAssignableFrom(type)) { 61 | if (decoder.contains("AvroDeserializer")) { 62 | return value.toString(); 63 | } else { 64 | byte[] byteArray = (byte[]) value; 65 | return new String(byteArray); 66 | } 67 | } 68 | 69 | if (ByteBuffer.class.isAssignableFrom(type)) { 70 | ByteBuffer byteBuffer = (ByteBuffer) value; 71 | return new String(byteBuffer.array()); 72 | } 73 | } catch (Exception exception) { 74 | throw new ApiException("Record Cast exception." + exception); 75 | } 76 | 77 | throw new ApiException( 78 | "Unknown class. Supported types are: " 79 | + "String, Short, Integer, Long, Float, Double, ByteArray, ByteBuffer, Bytes"); 80 | } 81 | 82 | public String getValue() { 83 | log.info("getValue for value:" + value + " by decoder:" + valueDecoder); 84 | return getValueByDecoder(valueDecoder, value); 85 | } 86 | 87 | public String getKey() { 88 | log.info("getKeyValue for key:" + key + " by decoder:" + keyDecoder); 89 | return getValueByDecoder(keyDecoder, key); 90 | } 91 | 92 | @Override 93 | public String toString() { 94 | if (value != null) { 95 | return "topic:" 96 | + topic 97 | + ", offset:" 98 | + offset 99 | + ", key:" 100 | + getKey() 101 | + ", value:" 102 | + getValue() 103 | + ", timestamp:" 104 | + timestamp 105 | + ", keyDecoder:" 106 | + keyDecoder 107 | + ", valueDecoder:" 108 | + valueDecoder; 109 | } else { 110 | return "topic:" 111 | + topic 112 | + ", offset:" 113 | + offset 114 | + ", key:" 115 | + key 116 | + ", value:" 117 | + value 118 | + ", timestamp:" 119 | + timestamp 120 | + ", keyDecoder:" 121 | + keyDecoder 122 | + ", valueDecoder:" 123 | + valueDecoder; 124 | } 125 | } 126 | } 127 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/SchemaRegistryMetadata.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import lombok.Builder; 4 | import lombok.Data; 5 | import lombok.Getter; 6 | import lombok.Setter; 7 | 8 | @Data 9 | @Getter 10 | @Setter 11 | @Builder 12 | public class SchemaRegistryMetadata { 13 | private String subject; 14 | private int id; 15 | private int version; 16 | private String schema; 17 | } 18 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/TopicBrief.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | @Data 8 | @AllArgsConstructor 9 | @NoArgsConstructor 10 | public class TopicBrief { 11 | 12 | private String topic; 13 | private int numPartition; 14 | private double isrRate; 15 | private int replicationFactor; 16 | } 17 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/TopicDetail.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 Intel Corporation 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package org.gnuhpc.bigdata.model; 18 | 19 | import java.util.List; 20 | import java.util.Map; 21 | import java.util.Properties; 22 | import lombok.AllArgsConstructor; 23 | import lombok.Builder; 24 | import lombok.Getter; 25 | import lombok.NoArgsConstructor; 26 | import lombok.Setter; 27 | import lombok.ToString; 28 | 29 | @Setter 30 | @Getter 31 | @ToString 32 | @Builder 33 | @AllArgsConstructor 34 | @NoArgsConstructor 35 | public class TopicDetail { 36 | 37 | public static final int DEFAULT_PARTITION_NUMBER = 2; 38 | public static final int DEFAULT_REPLICATION_FACTOR = 2; 39 | 40 | private int partitions; 41 | private int factor; 42 | private String name; 43 | private Properties prop; 44 | private Map> replicasAssignments; 45 | } 46 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/TopicMeta.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import java.util.List; 4 | import lombok.Getter; 5 | import lombok.Setter; 6 | import lombok.extern.log4j.Log4j2; 7 | 8 | /** Created by gnuhpc on 2017/7/21. */ 9 | @Log4j2 10 | @Getter 11 | @Setter 12 | public class TopicMeta { 13 | 14 | private String topicName; 15 | private boolean internal; 16 | private int partitionCount; 17 | private int replicationFactor; 18 | private List topicPartitionInfos; 19 | 20 | public TopicMeta(String topicName) { 21 | this.topicName = topicName; 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/TopicPartition.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import lombok.Data; 4 | import lombok.Getter; 5 | import lombok.Setter; 6 | 7 | @Getter 8 | @Setter 9 | @Data 10 | public class TopicPartition { 11 | String topic; 12 | int partition; 13 | 14 | @Override 15 | public String toString() { 16 | return topic + "-" + partition; 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/TopicPartitionReplicaAssignment.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import java.util.List; 4 | import lombok.AllArgsConstructor; 5 | import lombok.Builder; 6 | import lombok.Data; 7 | import lombok.Getter; 8 | import lombok.NoArgsConstructor; 9 | import lombok.Setter; 10 | 11 | @Builder 12 | @Getter 13 | @Setter 14 | @Data 15 | @NoArgsConstructor 16 | @AllArgsConstructor 17 | public class TopicPartitionReplicaAssignment implements Comparable{ 18 | String topic; 19 | int partition; 20 | List replicas; 21 | List log_dirs; 22 | 23 | @Override 24 | public int compareTo(TopicPartitionReplicaAssignment that) { 25 | if (this.topic.equals(that.topic)) { 26 | return (this.partition - that.partition); 27 | } else { 28 | return this.topic.compareTo(that.topic); 29 | } 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/TwoTuple.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import lombok.Data; 4 | 5 | @Data 6 | public class TwoTuple { 7 | 8 | private A first; 9 | private B second; 10 | 11 | public TwoTuple(A a, B b) { 12 | this.first = a; 13 | this.second = b; 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/User.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import javax.validation.constraints.NotBlank; 4 | import javax.validation.constraints.NotNull; 5 | import lombok.AllArgsConstructor; 6 | import lombok.Getter; 7 | import lombok.NoArgsConstructor; 8 | import lombok.Setter; 9 | 10 | @Getter 11 | @Setter 12 | @AllArgsConstructor 13 | @NoArgsConstructor 14 | public class User { 15 | 16 | @NotNull(message = "Username can not be null.") 17 | @NotBlank(message = "Username can not be blank.") 18 | private String username; 19 | 20 | @NotNull(message = "Password can not be null.") 21 | @NotBlank(message = "Password can not be blank.") 22 | private String password; 23 | 24 | @NotNull(message = "Role can not be null.") 25 | @NotBlank(message = "Role can not be blank.") 26 | private String role; 27 | } 28 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/ZkServerClient.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | 6 | @Data 7 | @AllArgsConstructor 8 | public class ZkServerClient { 9 | 10 | private final String host; 11 | private final Integer port; 12 | private final Integer ops; 13 | private final Integer queued; 14 | private final Integer received; 15 | private final Integer sent; 16 | } 17 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/ZkServerEnvironment.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import java.util.HashMap; 4 | import java.util.Map; 5 | 6 | public class ZkServerEnvironment { 7 | 8 | private final Map attributes = new HashMap(); 9 | 10 | public void add(final String attribute, final String value) { 11 | attributes.put(attribute, value); 12 | } 13 | 14 | public Map getAttributes() { 15 | return attributes; 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/model/ZkServerStat.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.model; 2 | 3 | import java.util.List; 4 | import lombok.Builder; 5 | import lombok.NoArgsConstructor; 6 | import org.gnuhpc.bigdata.constant.ZkServerMode; 7 | 8 | @Builder 9 | public class ZkServerStat { 10 | 11 | private final String version; 12 | private final String buildDate; 13 | private final List clients; 14 | private final Integer minLatency; 15 | private final Integer avgLatency; 16 | private final Integer maxLatency; 17 | private final Integer received; 18 | private final Integer sent; 19 | private final Integer connections; 20 | private final Integer outstanding; 21 | private final String zxId; 22 | private final ZkServerMode mode; 23 | private final Integer nodes; 24 | private final String msg; 25 | } 26 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/security/BasicAuthenticationPoint.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.security; 2 | 3 | import com.fasterxml.jackson.core.JsonGenerator; 4 | import com.fasterxml.jackson.databind.JsonSerializer; 5 | import com.fasterxml.jackson.databind.ObjectMapper; 6 | import com.fasterxml.jackson.databind.SerializerProvider; 7 | import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; 8 | import java.io.IOException; 9 | import java.time.LocalDateTime; 10 | import java.time.format.DateTimeFormatter; 11 | import javax.servlet.ServletException; 12 | import javax.servlet.http.HttpServletRequest; 13 | import javax.servlet.http.HttpServletResponse; 14 | import lombok.NoArgsConstructor; 15 | import org.gnuhpc.bigdata.exception.RestErrorResponse; 16 | import org.springframework.boot.jackson.JsonComponent; 17 | import org.springframework.http.HttpStatus; 18 | import org.springframework.security.core.AuthenticationException; 19 | import org.springframework.security.web.authentication.www.BasicAuthenticationEntryPoint; 20 | import org.springframework.stereotype.Component; 21 | 22 | @Component 23 | public class BasicAuthenticationPoint extends BasicAuthenticationEntryPoint { 24 | 25 | @Override 26 | public void commence( 27 | HttpServletRequest request, HttpServletResponse response, AuthenticationException authEx) 28 | throws IOException, ServletException { 29 | response.addHeader("WWW-Authenticate", "Basic realm=" + getRealmName()); 30 | response.setStatus(HttpServletResponse.SC_UNAUTHORIZED); 31 | String error = "Authenciation Error:" + authEx.getClass().getCanonicalName(); 32 | RestErrorResponse restAuthenticationError = 33 | new RestErrorResponse(HttpStatus.UNAUTHORIZED, error, authEx); 34 | ObjectMapper mapper = new ObjectMapper(); 35 | JavaTimeModule javaTimeModule = new JavaTimeModule(); 36 | javaTimeModule.addSerializer(LocalDateTime.class, new LocalDateTimeSerializer()); 37 | mapper.registerModule(javaTimeModule); 38 | response.getWriter().print(mapper.writeValueAsString(restAuthenticationError)); 39 | } 40 | 41 | @Override 42 | public void afterPropertiesSet() throws Exception { 43 | setRealmName("Contact Big Data Infrastructure Team to get available accounts."); 44 | super.afterPropertiesSet(); 45 | } 46 | 47 | @JsonComponent 48 | @NoArgsConstructor 49 | private static class LocalDateTimeSerializer extends JsonSerializer { 50 | 51 | @Override 52 | public void serialize(LocalDateTime value, JsonGenerator gen, SerializerProvider sp) 53 | throws IOException { 54 | gen.writeString(value.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"))); 55 | } 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/security/UserDetailsServiceImp.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.security; 2 | 3 | import com.google.common.util.concurrent.ThreadFactoryBuilder; 4 | 5 | import java.io.File; 6 | import java.io.IOException; 7 | import java.util.ArrayList; 8 | import java.util.HashMap; 9 | import java.util.Map; 10 | import java.util.concurrent.Executors; 11 | import java.util.concurrent.ScheduledExecutorService; 12 | import java.util.concurrent.TimeUnit; 13 | import lombok.extern.log4j.Log4j2; 14 | import org.gnuhpc.bigdata.config.WebSecurityConfig; 15 | import org.gnuhpc.bigdata.model.User; 16 | import org.gnuhpc.bigdata.utils.CommonUtils; 17 | import org.springframework.core.io.ClassPathResource; 18 | import org.springframework.core.io.Resource; 19 | import org.springframework.security.core.userdetails.User.UserBuilder; 20 | import org.springframework.security.core.userdetails.UserDetails; 21 | import org.springframework.security.core.userdetails.UserDetailsService; 22 | import org.springframework.security.core.userdetails.UsernameNotFoundException; 23 | import org.springframework.util.ResourceUtils; 24 | 25 | @Log4j2 26 | public class UserDetailsServiceImp implements UserDetailsService { 27 | 28 | private ScheduledExecutorService securityFileChecker; 29 | private ArrayList userList = new ArrayList<>(); 30 | 31 | public UserDetailsServiceImp( 32 | boolean checkSecurity, int checkInitDelay, int checkSecurityInterval) { 33 | if (checkSecurity) { 34 | securityFileChecker = 35 | Executors.newSingleThreadScheduledExecutor( 36 | new ThreadFactoryBuilder().setNameFormat("securityFileChecker").build()); 37 | securityFileChecker.scheduleWithFixedDelay( 38 | new SecurityFileCheckerRunnable(), 39 | checkInitDelay, 40 | checkSecurityInterval, 41 | TimeUnit.SECONDS); 42 | userList = fetchUserListFromSecurtiyFile(); 43 | } 44 | } 45 | 46 | @Override 47 | public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException { 48 | User user = findUserByUsername(username); 49 | 50 | UserBuilder builder; 51 | if (user != null) { 52 | builder = org.springframework.security.core.userdetails.User.withUsername(username); 53 | builder.password(user.getPassword()); 54 | builder.roles(user.getRole()); 55 | } else { 56 | throw new UsernameNotFoundException("User not found."); 57 | } 58 | 59 | return builder.build(); 60 | } 61 | 62 | private User findUserByUsername(String username) { 63 | for (User user : userList) { 64 | if (username.equals(user.getUsername())) { 65 | return user; 66 | } 67 | } 68 | return null; 69 | } 70 | 71 | private ArrayList fetchUserListFromSecurtiyFile() { 72 | String securityFilePath = WebSecurityConfig.SECURITY_FILE_PATH; 73 | 74 | try { 75 | Resource resource = new ClassPathResource(securityFilePath); 76 | File file = resource.getFile(); 77 | 78 | HashMap accounts = CommonUtils.yamlParse(file); 79 | userList.clear(); 80 | accounts.forEach( 81 | (key, value) -> { 82 | String username = (String) key; 83 | Map userInfo = (Map) value; 84 | userList.add(new User(username, userInfo.get("password"), userInfo.get("role"))); 85 | }); 86 | } catch (IOException ioException) { 87 | log.error("Security file process exception.", ioException); 88 | } 89 | 90 | return userList; 91 | } 92 | 93 | private class SecurityFileCheckerRunnable implements Runnable { 94 | 95 | @Override 96 | public void run() { 97 | try { 98 | userList = fetchUserListFromSecurtiyFile(); 99 | } catch (Throwable t) { 100 | log.error("Uncaught exception in SecurityFileChecker thread", t); 101 | } 102 | } 103 | } 104 | } 105 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/serializer/JsonJodaDateTimeSerializer.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.serializer; 2 | 3 | import com.fasterxml.jackson.core.JsonGenerator; 4 | import com.fasterxml.jackson.databind.JsonSerializer; 5 | import com.fasterxml.jackson.databind.SerializerProvider; 6 | import java.io.IOException; 7 | import org.joda.time.DateTime; 8 | import org.joda.time.format.DateTimeFormatter; 9 | import org.joda.time.format.ISODateTimeFormat; 10 | 11 | /** Created by gnuhpc on 2017/7/19. */ 12 | public class JsonJodaDateTimeSerializer extends JsonSerializer { 13 | 14 | private static DateTimeFormatter formatter = ISODateTimeFormat.dateTime(); 15 | 16 | @Override 17 | public void serialize( 18 | DateTime value, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) 19 | throws IOException { 20 | jsonGenerator.writeString(formatter.print(value)); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/service/ConfluentSchemaService.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.service; 2 | 3 | import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; 4 | import io.confluent.kafka.schemaregistry.client.SchemaMetadata; 5 | import io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig; 6 | import io.confluent.kafka.serializers.KafkaAvroDeserializer; 7 | import io.confluent.kafka.serializers.KafkaAvroSerializer; 8 | import java.util.ArrayList; 9 | import java.util.Collection; 10 | import java.util.HashMap; 11 | import java.util.List; 12 | import java.util.Map; 13 | import java.util.stream.Collectors; 14 | import javax.annotation.PostConstruct; 15 | import org.apache.avro.Schema; 16 | import org.apache.avro.generic.GenericRecord; 17 | import org.apache.kafka.common.errors.ApiException; 18 | import org.gnuhpc.bigdata.config.KafkaConfig; 19 | import org.gnuhpc.bigdata.model.SchemaRegistryMetadata; 20 | import org.springframework.beans.factory.annotation.Autowired; 21 | import org.springframework.context.annotation.Lazy; 22 | import org.springframework.stereotype.Service; 23 | 24 | @Service 25 | public class ConfluentSchemaService { 26 | @Lazy 27 | @Autowired 28 | private KafkaConfig kafkaConfig; 29 | 30 | private KafkaAvroSerializer avroSerializer; 31 | private KafkaAvroSerializer avroSerializerForKey; 32 | private KafkaAvroDeserializer avroDeserializer; 33 | private KafkaAvroDeserializer avroDeserializerForKey; 34 | private CachedSchemaRegistryClient schemaRegistryClient; 35 | 36 | @PostConstruct 37 | public void init() { 38 | String schemaRegistryURL = kafkaConfig.getSchemaregistry(); 39 | List schemaRegistryURLList = new ArrayList<>(); 40 | for (String url : schemaRegistryURL.split(",")) { 41 | schemaRegistryURLList.add(url); 42 | } 43 | 44 | int maxSchemaObject = AbstractKafkaAvroSerDeConfig.MAX_SCHEMAS_PER_SUBJECT_DEFAULT; 45 | this.schemaRegistryClient = new CachedSchemaRegistryClient( 46 | schemaRegistryURLList, maxSchemaObject); 47 | 48 | Map propMap = new HashMap<>(); 49 | propMap.put("schema.registry.url", schemaRegistryURL); 50 | 51 | avroSerializer = new KafkaAvroSerializer(); 52 | avroSerializer.configure(propMap, false); 53 | avroSerializerForKey = new KafkaAvroSerializer(); 54 | avroSerializerForKey.configure(propMap, true); 55 | 56 | avroDeserializer = new KafkaAvroDeserializer(); 57 | avroDeserializer.configure(propMap, false); 58 | avroDeserializerForKey = new KafkaAvroDeserializer(); 59 | avroDeserializerForKey.configure(propMap, true); 60 | } 61 | 62 | public byte[] serializeAvroToBytes(String topic, GenericRecord avroRecord) { 63 | return avroSerializer.serialize(topic, avroRecord); 64 | } 65 | 66 | public byte[] serializeAvroToBytesForKey(String topic, GenericRecord avroRecord) { 67 | return avroSerializerForKey.serialize(topic, avroRecord); 68 | } 69 | 70 | public Object deserializeBytesToObject(String topic, byte[] avroBytearray) { 71 | return avroDeserializer.deserialize(topic, avroBytearray); 72 | } 73 | 74 | public Object deserializeBytesToObject(String topic, byte[] avroBytearray, Schema schema) { 75 | return avroDeserializer.deserialize(topic, avroBytearray, schema); 76 | } 77 | 78 | public Object deserializeBytesToObjectForKey(String topic, byte[] avroBytearray) { 79 | return avroDeserializerForKey.deserialize(topic, avroBytearray); 80 | } 81 | 82 | public List getAllSubjects() { 83 | try { 84 | Collection subjects = this.schemaRegistryClient.getAllSubjects(); 85 | List allSubjects = new ArrayList<>(); 86 | SchemaMetadata schemaMetadata; 87 | SchemaRegistryMetadata schemaRegistryMetadata; 88 | 89 | for (String subject : subjects) { 90 | schemaMetadata = schemaRegistryClient.getLatestSchemaMetadata(subject); 91 | schemaRegistryMetadata = SchemaRegistryMetadata.builder().subject(subject) 92 | .id(schemaMetadata.getId()) 93 | .version(schemaMetadata.getVersion()).schema(schemaMetadata.getSchema()).build(); 94 | allSubjects.add(schemaRegistryMetadata); 95 | } 96 | return allSubjects; 97 | } catch (Exception exception) { 98 | throw new ApiException("ConfluentSchemaService getAllSubjects exception : " + exception); 99 | } 100 | } 101 | 102 | public int registerSchema(String subject, String schemaStr) { 103 | int schemaId; 104 | Schema schema = null; 105 | try { 106 | schema = new Schema.Parser().parse(schemaStr); 107 | schemaId = schemaRegistryClient.register(subject, schema); 108 | return schemaId; 109 | } catch (Exception exception) { 110 | throw new ApiException( 111 | "ConfluentSchemaService registerSchema for subject:" + subject + " with schema:" 112 | + schemaStr + " exception : " + exception); 113 | } 114 | } 115 | 116 | public SchemaRegistryMetadata getSchemaById(int schemaId) { 117 | try { 118 | List allSubjects = getAllSubjects(); 119 | List filteredSubjects = allSubjects.stream() 120 | .filter(p -> p.getId() == schemaId).collect(Collectors.toList()); 121 | SchemaRegistryMetadata schemaRegistryMetadata = 122 | filteredSubjects.size() >= 1 ? filteredSubjects.get(0) : null; 123 | return schemaRegistryMetadata; 124 | } catch (Exception exception) { 125 | throw new ApiException( 126 | "ConfluentSchemaService getSchemaById for schemaId:" + schemaId + " exception : " 127 | + exception); 128 | } 129 | } 130 | 131 | public SchemaMetadata getSchemaBySubject(String subject) { 132 | try { 133 | SchemaMetadata schemaMetadata = schemaRegistryClient.getLatestSchemaMetadata(subject); 134 | return schemaMetadata; 135 | } catch (Exception exception) { 136 | throw new ApiException( 137 | "ConfluentSchemaService getSchemaBySubject for subject:" + subject + " exception : " 138 | + exception); 139 | } 140 | } 141 | 142 | public SchemaMetadata getSchemaBySubjectAndVersion(String subject, int version) { 143 | try { 144 | SchemaMetadata schemaMetadata = schemaRegistryClient.getSchemaMetadata(subject, version); 145 | return schemaMetadata; 146 | } catch (Exception exception) { 147 | throw new ApiException( 148 | "ConfluentSchemaService getSchemaBySubjectAndVersion for subject:" + subject 149 | + ", version:" + version + " exception : " + exception); 150 | } 151 | } 152 | 153 | public SchemaRegistryMetadata checkSchemaExist(String subject, String schemaStr) { 154 | try { 155 | Schema schema = new Schema.Parser().parse(schemaStr); 156 | int schemaId = schemaRegistryClient.getId(subject, schema); 157 | return getSchemaById(schemaId); 158 | } catch (Exception exception) { 159 | throw new ApiException( 160 | "ConfluentSchemaService checkSchemaExist for subject:" + subject 161 | + ", schemaStr:" + schemaStr + " exception : " + exception); 162 | } 163 | } 164 | 165 | public List getAllVersions(String subject) { 166 | try { 167 | List allVersions = schemaRegistryClient.getAllVersions(subject); 168 | return allVersions; 169 | } catch (Exception exception) { 170 | throw new ApiException( 171 | "ConfluentSchemaService getAllVersions for subject:" + subject + " exception : " 172 | + exception); 173 | } 174 | } 175 | 176 | public List deleteSubject(String subject) { 177 | try { 178 | List allDeletedVersions = schemaRegistryClient.deleteSubject(subject); 179 | return allDeletedVersions; 180 | } catch (Exception exception) { 181 | throw new ApiException( 182 | "ConfluentSchemaService deleteSubject for subject:" + subject + " exception : " 183 | + exception); 184 | } 185 | } 186 | } 187 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/service/KafkaConsumerService.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.service; 2 | 3 | import java.nio.ByteBuffer; 4 | import java.util.Arrays; 5 | import java.util.HashMap; 6 | import java.util.Map; 7 | import kafka.common.OffsetAndMetadata; 8 | import kafka.coordinator.group.GroupMetadataManager; 9 | import kafka.coordinator.group.GroupTopicPartition; 10 | import kafka.coordinator.group.OffsetKey; 11 | import lombok.extern.log4j.Log4j2; 12 | import org.apache.kafka.clients.consumer.ConsumerRecord; 13 | import org.apache.kafka.common.TopicPartition; 14 | import org.gnuhpc.bigdata.componet.OffsetStorage; 15 | import org.springframework.beans.factory.annotation.Autowired; 16 | import org.springframework.kafka.annotation.KafkaListener; 17 | import org.springframework.kafka.listener.ConsumerSeekAware; 18 | import org.springframework.kafka.support.KafkaHeaders; 19 | import org.springframework.messaging.handler.annotation.Header; 20 | 21 | @Log4j2 22 | public class KafkaConsumerService implements ConsumerSeekAware { 23 | 24 | @Autowired private OffsetStorage offsetStorage; 25 | 26 | private boolean[] resetInitArray; 27 | 28 | private final ThreadLocal seekCallBack = new ThreadLocal<>(); 29 | 30 | public KafkaConsumerService(int internalTopicPartitions) { 31 | this.resetInitArray = new boolean[internalTopicPartitions]; 32 | Arrays.fill(resetInitArray, true); 33 | } 34 | 35 | /** Listening offset thread method. */ 36 | @KafkaListener(topics = "${kafka.offset.topic}") 37 | public void onMessage( 38 | ConsumerRecord record, 39 | @Header(KafkaHeaders.RECEIVED_PARTITION_ID) int partition, 40 | @Header(KafkaHeaders.RECEIVED_TOPIC) String topic) { 41 | // set the offset of the partition being processed to the beginning. If already set, ignore it. 42 | // if (resetInitArray[partition]) { 43 | // long beginningOffset = kafkaAdminService.getBeginningOffset(topic, partition); 44 | // this.seekCallBack.get().seek(topic, partition, beginningOffset + 1); 45 | // resetInitArray[partition] = false; 46 | // } 47 | 48 | // Parse the commit offset message and store it in offsetStorage 49 | Map offsetMap; 50 | if (record.key() != null) { 51 | Object offsetKey = GroupMetadataManager.readMessageKey(record.key()); 52 | 53 | log.debug(offsetKey); 54 | if (offsetKey instanceof OffsetKey) { 55 | GroupTopicPartition groupTopicPartition = ((OffsetKey) offsetKey).key(); 56 | if (offsetStorage.get(groupTopicPartition.group()) != null) { 57 | offsetMap = offsetStorage.get(groupTopicPartition.group()); 58 | } else { 59 | offsetMap = new HashMap<>(); 60 | } 61 | OffsetAndMetadata offsetValue = GroupMetadataManager.readOffsetMessageValue(record.value()); 62 | offsetMap.put(groupTopicPartition, offsetValue); 63 | offsetStorage.put(groupTopicPartition.group(), offsetMap); 64 | } 65 | } 66 | } 67 | 68 | @Override 69 | public void registerSeekCallback(ConsumerSeekCallback consumerSeekCallback) { 70 | // set the seekcallback for resetting the offset 71 | this.seekCallBack.set(consumerSeekCallback); 72 | } 73 | 74 | @Override 75 | public void onPartitionsAssigned( 76 | Map map, ConsumerSeekCallback consumerSeekCallback) { 77 | System.out.println(); 78 | } 79 | 80 | @Override 81 | public void onIdleContainer( 82 | Map map, ConsumerSeekCallback consumerSeekCallback) {} 83 | } 84 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/service/KafkaProducerService.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.service; 2 | 3 | import org.springframework.beans.factory.annotation.Autowired; 4 | import org.springframework.kafka.core.KafkaTemplate; 5 | import org.springframework.stereotype.Service; 6 | 7 | @Service 8 | public class KafkaProducerService { 9 | 10 | @Autowired private KafkaTemplate kafkaTemplate; 11 | 12 | public void send(String topic, String data) { 13 | kafkaTemplate.send(topic, data); 14 | kafkaTemplate.flush(); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/service/UserService.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.service; 2 | 3 | import java.io.File; 4 | import java.io.IOException; 5 | import java.util.ArrayList; 6 | import java.util.HashMap; 7 | import java.util.List; 8 | import lombok.Getter; 9 | import lombok.Setter; 10 | import lombok.extern.log4j.Log4j2; 11 | import org.gnuhpc.bigdata.config.WebSecurityConfig; 12 | import org.gnuhpc.bigdata.constant.GeneralResponseState; 13 | import org.gnuhpc.bigdata.model.GeneralResponse; 14 | import org.gnuhpc.bigdata.model.User; 15 | import org.gnuhpc.bigdata.utils.CommonUtils; 16 | import org.springframework.core.io.ClassPathResource; 17 | import org.springframework.core.io.Resource; 18 | import org.springframework.stereotype.Service; 19 | 20 | @Getter 21 | @Setter 22 | @Log4j2 23 | @Service 24 | public class UserService { 25 | 26 | private HashMap accounts; 27 | 28 | public List listUser() { 29 | List userList = new ArrayList<>(); 30 | try { 31 | Resource resource = new ClassPathResource(WebSecurityConfig.SECURITY_FILE_PATH); 32 | File file = resource.getFile(); 33 | accounts = CommonUtils.yamlParse(file); 34 | accounts.forEach( 35 | (username, value) -> { 36 | userList.add((String) username); 37 | }); 38 | } catch (IOException ioException) { 39 | log.error("Failed to get user list. Reason : " + ioException.getLocalizedMessage()); 40 | } 41 | 42 | return userList; 43 | } 44 | 45 | public GeneralResponse addUser(User user) { 46 | String username = user.getUsername(); 47 | try { 48 | boolean exist = checkUserExist(username); 49 | if (!exist) { 50 | return saveUserInfo(user); 51 | } else { 52 | log.info("Failed to add user. Reason : User " + username + " already exists."); 53 | return GeneralResponse.builder() 54 | .state(GeneralResponseState.failure) 55 | .msg("Failed to add user. Reason : User " + username + " already exists.") 56 | .build(); 57 | } 58 | } catch (IOException ioException) { 59 | log.error( 60 | "Failed to add user " + username + ". Reason : " + ioException.getLocalizedMessage()); 61 | return GeneralResponse.builder() 62 | .state(GeneralResponseState.failure) 63 | .msg("Failed to add user " + username + ". Reason : " + ioException.getLocalizedMessage()) 64 | .build(); 65 | } 66 | } 67 | 68 | public GeneralResponse modifyUser(User user) { 69 | String username = user.getUsername(); 70 | try { 71 | boolean exist = checkUserExist(username); 72 | if (exist) { 73 | return saveUserInfo(user); 74 | } else { 75 | log.info("Failed to modify user. Reason : User " + username + " does not exist."); 76 | return GeneralResponse.builder() 77 | .state(GeneralResponseState.failure) 78 | .msg("Failed to modify user. Reason : User " + username + " does not exist.") 79 | .build(); 80 | } 81 | } catch (IOException ioException) { 82 | log.error( 83 | "Failed to modify user " + username + ". Reason : " + ioException.getLocalizedMessage()); 84 | return GeneralResponse.builder() 85 | .state(GeneralResponseState.failure) 86 | .msg( 87 | "Failed to modify user " 88 | + username 89 | + ". Reason : " 90 | + ioException.getLocalizedMessage()) 91 | .build(); 92 | } 93 | } 94 | 95 | public GeneralResponse delUser(String username) { 96 | try { 97 | boolean exist = checkUserExist(username); 98 | if (exist) { 99 | accounts.remove(username); 100 | CommonUtils.yamlWrite(WebSecurityConfig.SECURITY_FILE_PATH, accounts); 101 | return GeneralResponse.builder() 102 | .state(GeneralResponseState.success) 103 | .msg("Delete user " + username + " successfully.") 104 | .build(); 105 | } else { 106 | log.info("Failed to delete user. Reason : User " + username + " does not exist."); 107 | return GeneralResponse.builder() 108 | .state(GeneralResponseState.failure) 109 | .msg("Failed to delete user. Reason : User " + username + " does not exist.") 110 | .build(); 111 | } 112 | } catch (IOException ioException) { 113 | log.error( 114 | "Failed to delete user " + username + ". Reason : " + ioException.getLocalizedMessage()); 115 | return GeneralResponse.builder() 116 | .state(GeneralResponseState.failure) 117 | .msg( 118 | "Failed to delete user " 119 | + username 120 | + ". Reason : " 121 | + ioException.getLocalizedMessage()) 122 | .build(); 123 | } 124 | } 125 | 126 | public boolean checkUserExist(String username) throws IOException { 127 | Resource resource = new ClassPathResource(WebSecurityConfig.SECURITY_FILE_PATH); 128 | File file = resource.getFile(); 129 | accounts = CommonUtils.yamlParse(file); 130 | if (accounts.containsKey(username)) { 131 | return true; 132 | } 133 | return false; 134 | } 135 | 136 | public GeneralResponse saveUserInfo(User user) throws IOException { 137 | String username = user.getUsername(); 138 | String encodedPassword = CommonUtils.encode(user.getPassword()); 139 | HashMap userInfo = new HashMap<>(); 140 | 141 | userInfo.put("password", encodedPassword); 142 | userInfo.put("role", user.getRole()); 143 | accounts.put(username, userInfo); 144 | Resource resource = new ClassPathResource(WebSecurityConfig.SECURITY_FILE_PATH); 145 | File file = resource.getFile(); 146 | CommonUtils.yamlWrite(file, accounts); 147 | return GeneralResponse.builder() 148 | .state(GeneralResponseState.success) 149 | .msg("Save user " + username + " info successfully.") 150 | .build(); 151 | } 152 | } 153 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/service/ZookeeperService.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.service; 2 | 3 | import com.google.common.net.HostAndPort; 4 | import java.util.HashMap; 5 | import java.util.List; 6 | import java.util.Map; 7 | import lombok.extern.log4j.Log4j2; 8 | import org.gnuhpc.bigdata.constant.ZkServerCommand; 9 | import org.gnuhpc.bigdata.exception.ServiceNotAvailableException; 10 | import org.gnuhpc.bigdata.model.ZkServerEnvironment; 11 | import org.gnuhpc.bigdata.model.ZkServerStat; 12 | import org.gnuhpc.bigdata.utils.ZookeeperUtils; 13 | import org.springframework.beans.factory.annotation.Autowired; 14 | import org.springframework.context.annotation.Lazy; 15 | import org.springframework.stereotype.Service; 16 | 17 | @Service 18 | @Log4j2 19 | public class ZookeeperService { 20 | @Lazy 21 | @Autowired private ZookeeperUtils zookeeperUtils; 22 | 23 | public Map stat() { 24 | List hostAndPortList = zookeeperUtils.getZookeeperConfig().getHostAndPort(); 25 | Map result = new HashMap<>(); 26 | for (int i = 0; i < hostAndPortList.size(); i++) { 27 | HostAndPort hp = hostAndPortList.get(i); 28 | try { 29 | result.put( 30 | hp, 31 | zookeeperUtils.parseStatResult( 32 | zookeeperUtils.executeCommand( 33 | hp.getHostText(), hp.getPort(), ZkServerCommand.stat.toString()))); 34 | } catch (ServiceNotAvailableException serviceNotAvailbleException) { 35 | log.warn( 36 | "Execute " 37 | + ZkServerCommand.stat.toString() 38 | + " command failed. Exception:" 39 | + serviceNotAvailbleException); 40 | result.put( 41 | hp, 42 | ZkServerStat.builder() 43 | .mode(serviceNotAvailbleException.getServiceState()) 44 | .msg(serviceNotAvailbleException.getMessage()) 45 | .build()); 46 | } 47 | } 48 | return result; 49 | } 50 | 51 | public Map environment() { 52 | List hostAndPortList = zookeeperUtils.getZookeeperConfig().getHostAndPort(); 53 | Map result = new HashMap<>(); 54 | for (int i = 0; i < hostAndPortList.size(); i++) { 55 | HostAndPort hp = hostAndPortList.get(i); 56 | try { 57 | result.put( 58 | hp, 59 | zookeeperUtils.parseEnvResult( 60 | zookeeperUtils.executeCommand( 61 | hp.getHostText(), hp.getPort(), ZkServerCommand.envi.toString()))); 62 | } catch (ServiceNotAvailableException serviceNotAvailbleException) { 63 | log.warn( 64 | "Execute " 65 | + ZkServerCommand.envi.toString() 66 | + " command failed. Exception:" 67 | + serviceNotAvailbleException); 68 | ZkServerEnvironment zkServerEnvironment = new ZkServerEnvironment(); 69 | zkServerEnvironment.add("mode", serviceNotAvailbleException.getServiceState().toString()); 70 | zkServerEnvironment.add("msg", serviceNotAvailbleException.getMessage()); 71 | result.put(hp, zkServerEnvironment); 72 | } 73 | } 74 | return result; 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/task/FetchOffSetFromZkResult.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.task; 2 | 3 | import lombok.AllArgsConstructor; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | @Data 8 | @NoArgsConstructor 9 | @AllArgsConstructor 10 | public class FetchOffSetFromZkResult { 11 | 12 | private String topic; 13 | private int parition; 14 | private long offset; 15 | } 16 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/utils/ByteUtils.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.utils; 2 | 3 | import java.nio.ByteBuffer; 4 | 5 | public class ByteUtils { 6 | 7 | private static ByteBuffer buffer = ByteBuffer.allocate(Long.BYTES); 8 | 9 | public static byte[] longToBytes(long x) { 10 | buffer.putLong(0, x); 11 | return buffer.array(); 12 | } 13 | 14 | public static long bytesToLong(byte[] bytes) { 15 | buffer.put(bytes, 0, bytes.length); 16 | buffer.flip(); // need flip 17 | return buffer.getLong(); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/utils/CommonUtils.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.utils; 2 | 3 | import com.fasterxml.jackson.databind.ObjectMapper; 4 | import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; 5 | import java.io.File; 6 | import java.io.IOException; 7 | import java.util.HashMap; 8 | import lombok.Getter; 9 | import lombok.Setter; 10 | import lombok.extern.log4j.Log4j2; 11 | import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder; 12 | 13 | @Log4j2 14 | @Getter 15 | @Setter 16 | public class CommonUtils { 17 | 18 | public static final String PROJECT_ROOT_FOLDER = CommonUtils.getProjectRootPath(); 19 | 20 | public static String encode(CharSequence rawPassword) { 21 | return new BCryptPasswordEncoder().encode(rawPassword); 22 | } 23 | 24 | public static String getProjectRootPath() { 25 | String workingDir = System.getProperty("user.dir"); 26 | log.info("Current working directory : " + workingDir); 27 | return workingDir; 28 | } 29 | 30 | public static HashMap yamlParse(String filePath) throws IOException { 31 | ObjectMapper mapperForYaml = new ObjectMapper(new YAMLFactory()); 32 | File file = new File(filePath); 33 | HashMap yamlHash = new HashMap<>(); 34 | yamlHash = mapperForYaml.readValue(file, yamlHash.getClass()); 35 | 36 | return yamlHash; 37 | } 38 | 39 | public static HashMap yamlParse(File file) throws IOException { 40 | ObjectMapper mapperForYaml = new ObjectMapper(new YAMLFactory()); 41 | HashMap yamlHash = new HashMap<>(); 42 | yamlHash = mapperForYaml.readValue(file, yamlHash.getClass()); 43 | 44 | return yamlHash; 45 | } 46 | 47 | public static void yamlWrite(String filePath, Object object) throws IOException { 48 | File file = new File(filePath); 49 | ObjectMapper mapperForYaml = new ObjectMapper(new YAMLFactory()); 50 | mapperForYaml.writeValue(file, object); 51 | } 52 | 53 | public static void yamlWrite(File file, Object object) throws IOException { 54 | ObjectMapper mapperForYaml = new ObjectMapper(new YAMLFactory()); 55 | mapperForYaml.writeValue(file, object); 56 | } 57 | 58 | public static void main(String[] args) throws IOException { 59 | /* 60 | String rawPassword = "admin"; 61 | String encodedPassword = CommonUtils.encode(rawPassword); 62 | System.out.println("rawPassword:" + rawPassword + ", encodedPassword:" + encodedPassword); 63 | System.out.println("workingDir:" + CommonUtils.PROJECT_ROOT_FOLDER); 64 | */ 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/utils/KafkaUtils.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.utils; 2 | 3 | import kafka.admin.AdminClient; 4 | import lombok.Getter; 5 | import lombok.Setter; 6 | import lombok.extern.log4j.Log4j2; 7 | import org.apache.kafka.clients.CommonClientConfigs; 8 | import org.apache.kafka.clients.consumer.ConsumerConfig; 9 | import org.apache.kafka.clients.consumer.KafkaConsumer; 10 | import org.apache.kafka.clients.producer.KafkaProducer; 11 | import org.apache.kafka.clients.producer.ProducerConfig; 12 | import org.apache.kafka.common.Node; 13 | import org.apache.kafka.common.PartitionInfo; 14 | import org.apache.kafka.common.config.SaslConfigs; 15 | import org.apache.kafka.common.serialization.Serdes; 16 | import org.apache.kafka.common.serialization.StringDeserializer; 17 | import org.apache.kafka.common.serialization.StringSerializer; 18 | import org.apache.kafka.common.utils.Bytes; 19 | import org.gnuhpc.bigdata.config.KafkaConfig; 20 | import org.gnuhpc.bigdata.config.ZookeeperConfig; 21 | import org.springframework.beans.factory.annotation.Autowired; 22 | import org.springframework.context.annotation.Configuration; 23 | import org.springframework.context.annotation.Lazy; 24 | 25 | import java.nio.ByteBuffer; 26 | import java.util.HashMap; 27 | import java.util.List; 28 | import java.util.Map; 29 | import java.util.Properties; 30 | 31 | /** 32 | * Created by gnuhpc on 2017/7/12. 33 | */ 34 | @Log4j2 35 | @Getter 36 | @Setter 37 | @Lazy 38 | @Configuration 39 | public class KafkaUtils { 40 | 41 | @Autowired 42 | private KafkaConfig kafkaConfig; 43 | @Autowired 44 | private ZookeeperConfig zookeeperConfig; 45 | 46 | private KafkaProducer producer; 47 | 48 | private AdminClient adminClient = null; 49 | 50 | public static final String DEFAULTCP = "kafka-rest-consumergroup"; 51 | public static final Map> DESERIALIZER_TYPE_MAP = new HashMap() { 52 | { 53 | put("StringDeserializer", String.class); 54 | put("ShortDeserializer", Short.class); 55 | put("IntegerDeserializer", Integer.class); 56 | put("LongDeserializer", Long.class); 57 | put("FloatDeserializer", Float.class); 58 | put("DoubleDeserializer", Double.class); 59 | put("ByteArrayDeserializer", byte[].class); 60 | put("ByteBufferDeserializer", ByteBuffer.class); 61 | put("BytesDeserializer", Bytes.class); 62 | put("AvroDeserializer", byte[].class); 63 | put("KafkaAvroDeserializer", byte[].class); 64 | } 65 | }; 66 | 67 | public static final Map> SERIALIZER_TYPE_MAP = new HashMap() { 68 | { 69 | put("StringSerializer", String.class); 70 | put("ShortSerializer", Short.class); 71 | put("IntegerSerializer", Integer.class); 72 | put("LongSerializer", Long.class); 73 | put("FloatSerializer", Float.class); 74 | put("DoubleSerializer", Double.class); 75 | put("ByteArraySerializer", byte[].class); 76 | put("ByteBufferSerializer", ByteBuffer.class); 77 | put("BytesSerializer", Bytes.class); 78 | put("AvroSerializer", byte[].class); 79 | put("KafkaAvroSerializer", byte[].class); 80 | } 81 | }; 82 | 83 | public void init() { 84 | } 85 | 86 | public void destroy() { 87 | log.info("Kafka destorying..."); 88 | } 89 | 90 | public KafkaConsumer createNewConsumer() { 91 | return createNewConsumer(DEFAULTCP); 92 | } 93 | 94 | public KafkaConsumer createNewConsumer(String consumerGroup) { 95 | Properties properties = initProps(); 96 | properties.put(ConsumerConfig.GROUP_ID_CONFIG, consumerGroup); 97 | properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); 98 | properties.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "30000"); 99 | properties.put(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, "100000000"); 100 | properties.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "5"); 101 | properties.put( 102 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getCanonicalName()); 103 | properties.put( 104 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, 105 | StringDeserializer.class.getCanonicalName()); 106 | 107 | return new KafkaConsumer(properties); 108 | } 109 | 110 | public KafkaConsumer createNewConsumerByClientId(String consumerGroup, String clientId) { 111 | Properties properties = initProps(); 112 | properties.put(ConsumerConfig.GROUP_ID_CONFIG, consumerGroup); 113 | properties.put(ConsumerConfig.CLIENT_ID_CONFIG, clientId); 114 | properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); 115 | properties.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "30000"); 116 | properties.put(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, "100000000"); 117 | properties.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "5"); 118 | properties.put( 119 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getCanonicalName()); 120 | properties.put( 121 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, 122 | StringDeserializer.class.getCanonicalName()); 123 | 124 | return new KafkaConsumer(properties); 125 | } 126 | 127 | public KafkaConsumer createNewConsumer(String consumerGroup, String keyDecoder, 128 | String valueDecoder, int maxRecords) 129 | throws ClassNotFoundException { 130 | Properties properties = initProps(); 131 | if (keyDecoder == null || keyDecoder.isEmpty()) { 132 | properties.put( 133 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, 134 | StringDeserializer.class.getCanonicalName()); 135 | } else { 136 | Class keyType = KafkaUtils.DESERIALIZER_TYPE_MAP.get(keyDecoder); 137 | String keyDese = Serdes.serdeFrom(keyType).deserializer().getClass().getCanonicalName(); 138 | properties.put( 139 | ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, 140 | Class.forName(keyDese).getCanonicalName()); 141 | } 142 | 143 | if (valueDecoder == null || valueDecoder.isEmpty()) { 144 | properties.put( 145 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, 146 | StringDeserializer.class.getCanonicalName()); 147 | } else { 148 | Class valueType = KafkaUtils.DESERIALIZER_TYPE_MAP.get(valueDecoder); 149 | String valDese = Serdes.serdeFrom(valueType).deserializer().getClass().getCanonicalName(); 150 | properties.put( 151 | ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, 152 | Class.forName(valDese).getCanonicalName()); 153 | } 154 | 155 | properties.put(ConsumerConfig.GROUP_ID_CONFIG, consumerGroup); 156 | properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); 157 | properties.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "30000"); 158 | properties.put(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, "100000000"); 159 | properties.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, maxRecords); 160 | 161 | log.info("Consumer properties:" + properties); 162 | KafkaConsumer kafkaConsumer = new KafkaConsumer(properties); 163 | return kafkaConsumer; 164 | } 165 | 166 | public KafkaProducer createProducer() { 167 | Properties prop = initProps(); 168 | prop.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, 169 | "org.apache.kafka.common.serialization.StringSerializer"); 170 | prop.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, 171 | "org.apache.kafka.common.serialization.StringSerializer"); 172 | prop.setProperty(ProducerConfig.RETRIES_CONFIG, "3"); 173 | prop.setProperty(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, "10000"); 174 | producer = new KafkaProducer(prop); 175 | 176 | return producer; 177 | } 178 | 179 | public KafkaProducer createProducer(String keyEncoder, String valueEncoder) throws ClassNotFoundException { 180 | Properties prop = initProps(); 181 | if (keyEncoder == null || keyEncoder.isEmpty()) { 182 | prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getCanonicalName()); 183 | } else { 184 | Class keyType = KafkaUtils.SERIALIZER_TYPE_MAP.get(keyEncoder); 185 | String keySe = Serdes.serdeFrom(keyType).serializer().getClass().getCanonicalName(); 186 | prop.put( 187 | ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, 188 | Class.forName(keySe).getCanonicalName()); 189 | } 190 | 191 | if (valueEncoder == null || valueEncoder.isEmpty()) { 192 | prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getCanonicalName()); 193 | } else { 194 | Class valueType = KafkaUtils.SERIALIZER_TYPE_MAP.get(valueEncoder); 195 | String valSe = Serdes.serdeFrom(valueType).serializer().getClass().getCanonicalName(); 196 | prop.put( 197 | ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, 198 | Class.forName(valSe).getCanonicalName()); 199 | } 200 | 201 | prop.setProperty(ProducerConfig.RETRIES_CONFIG, "3"); 202 | prop.setProperty(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, "10000"); 203 | producer = new KafkaProducer(prop); 204 | 205 | return producer; 206 | } 207 | 208 | public Node getLeader(String topic, int partitionId) { 209 | KafkaConsumer consumer = createNewConsumer(DEFAULTCP); 210 | List tmList = consumer.partitionsFor(topic); 211 | 212 | PartitionInfo partitionInfo = 213 | tmList.stream().filter(pi -> pi.partition() == partitionId).findFirst().get(); 214 | consumer.close(); 215 | return partitionInfo.leader(); 216 | } 217 | 218 | public AdminClient createAdminClient() { 219 | Properties props = initProps(); 220 | 221 | if(adminClient == null){ 222 | synchronized (AdminClient.class){ 223 | adminClient = AdminClient.create(props); 224 | } 225 | } 226 | return adminClient; 227 | } 228 | 229 | private Properties initProps() { 230 | Properties p = new Properties(); 231 | 232 | p.setProperty(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, kafkaConfig.getBrokers()); 233 | 234 | if (kafkaConfig.isKafkaSaslEnabled()) { 235 | p.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, kafkaConfig.getSaslSecurityProtocol()); 236 | p.put(SaslConfigs.SASL_MECHANISM, kafkaConfig.getSaslMechianism() ); 237 | } 238 | 239 | return p; 240 | } 241 | } 242 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/utils/TimestampDeserializer.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.utils; 2 | 3 | import com.fasterxml.jackson.core.JsonParser; 4 | import com.fasterxml.jackson.core.JsonProcessingException; 5 | import com.fasterxml.jackson.databind.DeserializationContext; 6 | import com.fasterxml.jackson.databind.JsonDeserializer; 7 | import java.io.IOException; 8 | import lombok.Getter; 9 | import lombok.Setter; 10 | import org.joda.time.DateTime; 11 | 12 | @Getter 13 | @Setter 14 | public class TimestampDeserializer extends JsonDeserializer { 15 | public TimestampDeserializer() { 16 | 17 | } 18 | 19 | @Override 20 | public DateTime deserialize( 21 | JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { 22 | String timestamp = jp.getText().trim(); 23 | 24 | try { 25 | return new DateTime(Long.valueOf(timestamp)); 26 | } catch (NumberFormatException e) { 27 | return null; 28 | } 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/validator/ConsumerGroupExistConstraint.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.validator; 2 | 3 | import java.lang.annotation.Documented; 4 | import java.lang.annotation.ElementType; 5 | import java.lang.annotation.Retention; 6 | import java.lang.annotation.RetentionPolicy; 7 | import java.lang.annotation.Target; 8 | import javax.validation.Constraint; 9 | import javax.validation.Payload; 10 | 11 | @Documented 12 | @Constraint(validatedBy = ConsumerGroupExistValidator.class) 13 | @Target({ElementType.METHOD, ElementType.FIELD, ElementType.PARAMETER}) 14 | @Retention(RetentionPolicy.RUNTIME) 15 | public @interface ConsumerGroupExistConstraint { 16 | 17 | String message() default "Non-exist Consumer Group!"; 18 | 19 | Class[] groups() default {}; 20 | 21 | Class[] payload() default {}; 22 | } 23 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/validator/ConsumerGroupExistValidator.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.validator; 2 | 3 | import javax.validation.ConstraintValidator; 4 | import javax.validation.ConstraintValidatorContext; 5 | import org.gnuhpc.bigdata.service.KafkaAdminService; 6 | import org.springframework.beans.factory.annotation.Autowired; 7 | 8 | public class ConsumerGroupExistValidator 9 | implements ConstraintValidator { 10 | 11 | @Autowired private KafkaAdminService kafkaAdminService; 12 | 13 | public void initialize(ConsumerGroupExistConstraint constraint) {} 14 | 15 | public boolean isValid(String consumerGroup, ConstraintValidatorContext context) { 16 | return kafkaAdminService.isNewConsumerGroup(consumerGroup) 17 | || kafkaAdminService.isOldConsumerGroup(consumerGroup); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/validator/TopicExistConstraint.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.validator; 2 | 3 | import java.lang.annotation.Documented; 4 | import java.lang.annotation.ElementType; 5 | import java.lang.annotation.Retention; 6 | import java.lang.annotation.RetentionPolicy; 7 | import java.lang.annotation.Target; 8 | import javax.validation.Constraint; 9 | import javax.validation.Payload; 10 | 11 | @Documented 12 | @Constraint(validatedBy = TopicExistValidator.class) 13 | @Target({ElementType.METHOD, ElementType.FIELD, ElementType.PARAMETER}) 14 | @Retention(RetentionPolicy.RUNTIME) 15 | public @interface TopicExistConstraint { 16 | 17 | String message() default "Non-exist Topic!"; 18 | 19 | Class[] groups() default {}; 20 | 21 | Class[] payload() default {}; 22 | } 23 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/validator/TopicExistValidator.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.validator; 2 | 3 | import javax.validation.ConstraintValidator; 4 | import javax.validation.ConstraintValidatorContext; 5 | import org.gnuhpc.bigdata.service.KafkaAdminService; 6 | import org.springframework.beans.factory.annotation.Autowired; 7 | 8 | public class TopicExistValidator implements ConstraintValidator { 9 | 10 | @Autowired private KafkaAdminService kafkaAdminService; 11 | 12 | public void initialize(TopicExistConstraint constraint) {} 13 | 14 | public boolean isValid(String topic, ConstraintValidatorContext context) { 15 | return kafkaAdminService.existTopic(topic); 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/validator/ZkNodePathExistConstraint.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.validator; 2 | 3 | import java.lang.annotation.ElementType; 4 | import java.lang.annotation.Retention; 5 | import java.lang.annotation.RetentionPolicy; 6 | import java.lang.annotation.Target; 7 | import javax.validation.Constraint; 8 | import javax.validation.Payload; 9 | 10 | @Constraint(validatedBy = ZkNodePathExistValidator.class) 11 | @Target({ElementType.METHOD, ElementType.FIELD, ElementType.PARAMETER}) 12 | @Retention(RetentionPolicy.RUNTIME) 13 | public @interface ZkNodePathExistConstraint { 14 | 15 | String message() default "Non-exist ZooKeeper Node path!"; 16 | 17 | Class[] groups() default {}; 18 | 19 | Class[] payload() default {}; 20 | } 21 | -------------------------------------------------------------------------------- /src/main/java/org/gnuhpc/bigdata/validator/ZkNodePathExistValidator.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.validator; 2 | 3 | import javax.validation.ConstraintValidator; 4 | import javax.validation.ConstraintValidatorContext; 5 | import org.gnuhpc.bigdata.utils.ZookeeperUtils; 6 | import org.springframework.beans.factory.annotation.Autowired; 7 | import org.springframework.context.annotation.Lazy; 8 | 9 | public class ZkNodePathExistValidator 10 | implements ConstraintValidator { 11 | 12 | @Lazy 13 | @Autowired private ZookeeperUtils zookeeperUtils; 14 | 15 | public void initialize(ZkNodePathExistConstraint constraint) {} 16 | 17 | public boolean isValid(String path, ConstraintValidatorContext context) { 18 | return (zookeeperUtils.getNodePathStat(path) != null); 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /src/main/resources/application-dev.yml: -------------------------------------------------------------------------------- 1 | kafka: 2 | brokers: DPFTMP01:9092,DPFTMP03:9092,DPFTMP04:9092,DPFTMP05:9092,DPFTMP06:9092,DPFTMP07:9092,DPFTMP08:9092,DPFTMP09:9092,DPFTMP10:9092 3 | schemaregistry: http://localhost:8081 4 | offset: 5 | topic: "__consumer_offsets" 6 | partitions: 50 7 | reset: true 8 | healthcheck: 9 | topic: "health" 10 | sasl: 11 | enable: false 12 | security.protocol: "SASL_PLAINTEXT" 13 | mechanism: "SCRAM-SHA-512" 14 | 15 | zookeeper: 16 | uris: DPFTMP06:2181,DPFTMP07:2181,DPFTMP08:2181,DPFTMP09:2181,DPFTMP10:2181 17 | 18 | jmx: 19 | kafka: 20 | jmxurl: localhost:19999,localhost:29999,localhost:39999 21 | zookeeper: 22 | jmxurl: localhost:49999 23 | 24 | server: 25 | port: 8121 26 | context-path: / 27 | debug: true 28 | security: 29 | check: false 30 | checkInitDelay: 30 31 | checkSecurityInterval: 5 32 | 33 | spring: 34 | kafka: 35 | bootstrap-servers: ${kafka.brokers} 36 | consumer: 37 | group-id: "__kafka-zk-rest" 38 | mvc: 39 | throw-exception-if-no-handler-found: true 40 | output: 41 | ansi: 42 | enabled: detect 43 | aop: 44 | auto: true 45 | main: 46 | allow-bean-definition-overriding: true 47 | 48 | -------------------------------------------------------------------------------- /src/main/resources/application-home.yml: -------------------------------------------------------------------------------- 1 | kafka: 2 | brokers: 192.168.42.28:19092,192.168.42.28:19093,192.168.42.28:19095 3 | schemaregistry: http://localhost:8081 4 | offset: 5 | topic: "__consumer_offsets" 6 | partitions: 50 7 | reset: true 8 | healthcheck: 9 | topic: "health" 10 | sasl: 11 | enable: false 12 | security.protocol: "SASL_PLAINTEXT" 13 | mechanism: "SCRAM-SHA-512" 14 | 15 | 16 | zookeeper: 17 | uris: 192.168.42.28:2183 18 | 19 | jmx: 20 | kafka: 21 | jmxurl: localhost:19999,localhost:29999,localhost:39999 22 | zookeeper: 23 | jmxurl: localhost:49999 24 | 25 | server: 26 | port: 8121 27 | context-path: / 28 | debug: true 29 | security: 30 | check: false 31 | checkInitDelay: 30 32 | checkSecurityInterval: 5 33 | 34 | spring: 35 | kafka: 36 | bootstrap-servers: ${kafka.brokers} 37 | consumer: 38 | group-id: "kafka-zk-rest" 39 | mvc: 40 | throw-exception-if-no-handler-found: true 41 | output: 42 | ansi: 43 | enabled: detect 44 | aop: 45 | auto: true 46 | main: 47 | allow-bean-definition-overriding: true 48 | 49 | 50 | 51 | 52 | -------------------------------------------------------------------------------- /src/main/resources/application-tina.yml: -------------------------------------------------------------------------------- 1 | kafka: 2 | brokers: 192.168.42.28:19092,192.168.42.28:19093,192.168.42.28:19095 3 | schemaregistry: http://localhost:8081 4 | offset: 5 | topic: "__consumer_offsets" 6 | partitions: 50 7 | reset: true 8 | healthcheck: 9 | topic: "health" 10 | sasl: 11 | enable: false 12 | security.protocol: "SASL_PLAINTEXT" 13 | mechanism: "SCRAM-SHA-512" 14 | 15 | 16 | zookeeper: 17 | uris: 192.168.42.28:2183 18 | 19 | jmx: 20 | kafka: 21 | jmxurl: localhost:19999,localhost:29999,localhost:39999 22 | zookeeper: 23 | jmxurl: localhost:49999 24 | 25 | server: 26 | port: 8121 27 | context-path: / 28 | debug: true 29 | security: 30 | check: false 31 | checkInitDelay: 30 32 | checkSecurityInterval: 5 33 | 34 | spring: 35 | kafka: 36 | bootstrap-servers: ${kafka.brokers} 37 | consumer: 38 | group-id: "kafka-zk-rest" 39 | mvc: 40 | throw-exception-if-no-handler-found: true 41 | output: 42 | ansi: 43 | enabled: detect 44 | aop: 45 | auto: true 46 | main: 47 | allow-bean-definition-overriding: true 48 | 49 | 50 | 51 | 52 | -------------------------------------------------------------------------------- /src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | profiles: 3 | active: tina 4 | 5 | main: 6 | allow-bean-definition-overriding: true 7 | 8 | server: 9 | error: 10 | whitelabel: 11 | enabled: false 12 | -------------------------------------------------------------------------------- /src/main/resources/jmxtemplates/KafkaBrokerFilter.yml: -------------------------------------------------------------------------------- 1 | filters: 2 | # 3 | # Kafka volume metrics. 4 | # Notice: BytesInPerSec, BytesOutPerSec and MessagesInPerSec are performance metrics. 5 | # 6 | - include: 7 | domain: kafka.server 8 | bean: kafka.server:type=BrokerTopicMetrics,name=BytesInPerSec 9 | attribute: 10 | Count: 11 | metric_type: KAFKA_BROKER_VOLUMN 12 | alias: BytesIn 13 | OneMinuteRate: 14 | metric_type: KAFKA_BROKER_PERF 15 | alias: BytesInPerSec 16 | - include: 17 | domain: kafka.server 18 | bean: kafka.server:type=BrokerTopicMetrics,name=BytesOutPerSec 19 | attribute: 20 | Count: 21 | metric_type: KAFKA_BROKER_VOLUMN 22 | alias: BytesOut 23 | OneMinuteRate: 24 | metric_type: KAFKA_BROKER_PERF 25 | alias: BytesOutPerSec 26 | - include: 27 | domain: kafka.server 28 | bean: kafka.server:type=BrokerTopicMetrics,name=MessagesInPerSec 29 | attribute: 30 | Count: 31 | metric_type: KAFKA_BROKER_VOLUMN 32 | alias: MessagesIn 33 | OneMinuteRate: 34 | metric_type: KAFKA_BROKER_PERF 35 | alias: MessagesInPerSec 36 | 37 | # 38 | # Kafka performance metrics 39 | # 40 | - include: 41 | domain: kafka.network 42 | bean: kafka.network:type=RequestMetrics,name=RequestsPerSec,request=Produce 43 | attribute: 44 | OneMinuteRate: 45 | metric_type: KAFKA_BROKER_PERF 46 | alias: RequestsPerSec_Produce 47 | - include: 48 | domain: kafka.network 49 | bean: kafka.network:type=RequestMetrics,name=RequestsPerSec,request=FetchConsumer 50 | attribute: 51 | OneMinuteRate: 52 | metric_type: KAFKA_BROKER_PERF 53 | alias: RequestsPerSec_FetchConsumer 54 | - include: 55 | domain: kafka.network 56 | bean: kafka.network:type=RequestMetrics,name=RequestsPerSec,request=FetchFollower 57 | attribute: 58 | OneMinuteRate: 59 | metric_type: KAFKA_BROKER_PERF 60 | alias: RequestsPerSec_FetchFollower 61 | - include: 62 | domain: kafka.network 63 | bean: kafka.network:type=RequestMetrics,name=TotalTimeMs,request=Produce 64 | attribute: 65 | Mean: 66 | metric_type: KAFKA_BROKER_PERF 67 | alias: TotalTimeMs_Produce_Mean 68 | Count: 69 | metric_type: KAFKA_BROKER_PERF 70 | alias: TotalTimeMs_Produce_Count 71 | - include: 72 | domain: kafka.network 73 | bean: kafka.network:type=RequestMetrics,name=TotalTimeMs,request=FetchConsumer 74 | attribute: 75 | Mean: 76 | metric_type: KAFKA_BROKER_PERF 77 | alias: TotalTimeMs_FetchConsumer_Mean 78 | Count: 79 | metric_type: KAFKA_BROKER_PERF 80 | alias: TotalTimeMs_FetchConsumer_Count 81 | - include: 82 | domain: kafka.network 83 | bean: kafka.network:type=RequestMetrics,name=TotalTimeMs,request=FetchFollower 84 | attribute: 85 | Mean: 86 | metric_type: KAFKA_BROKER_PERF 87 | alias: TotalTimeMs_FetchFollower_Mean 88 | Count: 89 | metric_type: KAFKA_BROKER_PERF 90 | alias: TotalTimeMs_FetchFollower_Count 91 | - include: 92 | domain: kafka.network 93 | bean: kafka.network:type=SocketServer,name=NetworkProcessorAvgIdlePercent 94 | attribute: 95 | Value: 96 | metric_type: KAFKA_BROKER_PERF 97 | alias: NetworkProcessorAvgIdlePercent 98 | - include: 99 | domain: kafka.server 100 | bean: kafka.server:type=KafkaRequestHandlerPool,name=RequestHandlerAvgIdlePercent 101 | attribute: 102 | FiveMinuteRate: 103 | metric_type: KAFKA_BROKER_PERF 104 | alias: RequestHandlerAvgIdlePercent 105 | - include: 106 | domain: kafka.server 107 | bean: kafka.server:type=ReplicaFetcherManager,name=MaxLag,clientId=Replica 108 | attribute: 109 | Value: 110 | metric_type: KAFKA_BROKER_PERF 111 | alias: MaxLagBtwFollowerLeader 112 | # 113 | # Kafka availability metrics 114 | # 115 | - include: 116 | domain: java.lang 117 | bean: java.lang:type=Runtime 118 | attribute: 119 | Uptime: 120 | metric_type: KAFKA_BROKER_AVAIL 121 | alias: JVMUptime 122 | - include: 123 | domain: kafka.controller 124 | bean: kafka.controller:type=KafkaController,name=OfflinePartitionsCount 125 | attribute: 126 | Value: 127 | metric_type: KAFKA_BROKER_AVAIL 128 | alias: OfflinePartitionsCount 129 | - include: 130 | domain: kafka.controller 131 | bean: kafka.controller:type=KafkaController,name=ActiveControllerCount 132 | attribute: 133 | Value: 134 | metric_type: KAFKA_BROKER_AVAIL 135 | alias: ActiveControllerCount 136 | - include: 137 | domain: kafka.controller 138 | bean: kafka.controller:type=ControllerStats,name=LeaderElectionRateAndTimeMs 139 | attribute: 140 | Count: 141 | metric_type: KAFKA_BROKER_AVAIL 142 | alias: LeaderElectionRateAndTimeMs 143 | - include: 144 | domain: kafka.controller 145 | bean: kafka.controller:type=ControllerStats,name=UncleanLeaderElectionsPerSec 146 | attribute: 147 | Count: 148 | metric_type: KAFKA_BROKER_AVAIL 149 | alias: UncleanLeaderElectionsPerSec 150 | - include: 151 | domain: kafka.server 152 | bean: kafka.server:type=ReplicaManager,name=UnderReplicatedPartitions 153 | attribute: 154 | Value: 155 | metric_type: KAFKA_BROKER_AVAIL 156 | alias: UnderReplicatedPartitions 157 | - include: 158 | domain: kafka.server 159 | bean: kafka.server:type=ReplicaManager,name=PartitionCount 160 | attribute: 161 | Value: 162 | metric_type: KAFKA_BROKER_AVAIL 163 | alias: PartitionCount 164 | - include: 165 | domain: kafka.server 166 | bean: kafka.server:type=ReplicaManager,name=IsrShrinksPerSec 167 | attribute: 168 | Count: 169 | metric_type: KAFKA_BROKER_AVAIL 170 | alias: IsrShrinksPerSec 171 | - include: 172 | domain: kafka.server 173 | bean: kafka.server:type=ReplicaManager,name=IsrExpandsPerSec 174 | attribute: 175 | Count: 176 | metric_type: KAFKA_BROKER_AVAIL 177 | alias: IsrExpandsPerSec 178 | - include: 179 | domain: kafka.server 180 | bean: kafka.server:type=ReplicaManager,name=LeaderCount 181 | attribute: 182 | Value: 183 | metric_type: KAFKA_BROKER_AVAIL 184 | alias: LeaderCount 185 | -------------------------------------------------------------------------------- /src/main/resources/jmxtemplates/KafkaConsumerFilter.yml: -------------------------------------------------------------------------------- 1 | filters: 2 | - include: 3 | domain: kafka.consumer 4 | bean_regex: kafka.consumer:type=ConsumerFetcherManager,name=MaxLag,clientId=([-.\w]+) 5 | attribute: 6 | Value: 7 | metric_type: KAFKA_CONSUMER_OLD_HIGH 8 | alias: MaxLag 9 | - include: 10 | domain: kafka.consumer 11 | bean_regex: kafka.consumer:type=ConsumerFetcherManager,name=MinFetchRate,clientId=([-.\w]+) 12 | attribute: 13 | Value: 14 | metric_type: KAFKA_CONSUMER_OLD_HIGH 15 | alias: MinFetchRate 16 | - include: 17 | domain: kafka.consumer 18 | bean_regex: kafka.consumer:type=ConsumerTopicMetrics,name=MessagesPerSec,clientId=([-.\w]+) 19 | attribute: 20 | Count: 21 | metric_type: KAFKA_CONSUMER_OLD_HIGH 22 | alias: MessagesPerSec 23 | - include: 24 | domain: kafka.consumer 25 | bean_regex: kafka.consumer:type=ConsumerTopicMetrics,name=BytesPerSec,clientId=([-.\w]+) 26 | attribute: 27 | Count: 28 | metric_type: KAFKA_CONSUMER_OLD_HIGH 29 | alias: BytesPerSec 30 | - include: 31 | domain: kafka.consumer 32 | bean_regex: kafka.consumer:type=ZookeeperConsumerConnector,name=KafkaCommitsPerSec,clientId=([-.\w]+) 33 | attribute: 34 | Count: 35 | metric_type: KAFKA_CONSUMER_OLD_HIGH 36 | alias: KafkaCommitsPerSec 37 | - include: 38 | domain: kafka.consumer 39 | bean_regex: kafka.consumer:type=ZookeeperConsumerConnector,name=OwnedPartitionsCount,clientId=([-.\w]+),groupId=([-.\w]+) 40 | attribute: 41 | Value: 42 | metric_type: KAFKA_CONSUMER_OLD_HIGH 43 | alias: OwnedPartitionsCount -------------------------------------------------------------------------------- /src/main/resources/jmxtemplates/KafkaProducerFilter.yml: -------------------------------------------------------------------------------- 1 | filters: 2 | - include: 3 | domain: kafka.producer 4 | bean_regex: kafka.producer:type=producer-metrics,client-id=([-.\w]+) 5 | attribute: 6 | request-rate: 7 | metric_type: KAFKA_PRODUCER 8 | alias: request-rate 9 | request-size-avg: 10 | metric_type: KAFKA_PRODUCER 11 | alias: request-size-avg 12 | # There is some problem. I can't find producer-topic-metrics in Kafka 0.10 13 | - include: 14 | domain: kafka.producer 15 | bean_regex: kafka.producer:type=producer-topic-metrics,client-id=([-.\w]+) 16 | attribute: 17 | byte-rate: 18 | metric_type: KAFKA_PRODUCER 19 | alias: byte-rate-topic 20 | 21 | -------------------------------------------------------------------------------- /src/main/resources/jmxtemplates/KafkaTopicFilter.yml: -------------------------------------------------------------------------------- 1 | filters: 2 | - include: 3 | domain: kafka.server 4 | bean: kafka.server:type=BrokerTopicMetrics,name=BytesInPerSec,topic={topicname} 5 | attribute: 6 | Count: 7 | metric_type: KAFKA_TOPIC_PERF 8 | alias: BytesInPerSec 9 | - include: 10 | domain: kafka.server 11 | bean: kafka.server:type=BrokerTopicMetrics,name=BytesOutPerSec,topic={topicname} 12 | attribute: 13 | Count: 14 | metric_type: KAFKA_TOPIC_PERF 15 | alias: BytesOutPerSec 16 | - include: 17 | domain: kafka.server 18 | bean: kafka.server:type=BrokerTopicMetrics,name=MessagesInPerSec,topic={topicname} 19 | attribute: 20 | Count: 21 | metric_type: KAFKA_TOPIC_PERF 22 | alias: MessagesInPerSec 23 | - include: 24 | domain: kafka.server 25 | bean: kafka.server:type=BrokerTopicMetrics,name=BytesRejectedPerSec,topic={topicname} 26 | attribute: 27 | Count: 28 | metric_type: KAFKA_TOPIC_PERF 29 | alias: BytesRejectedPerSec 30 | - include: 31 | domain: kafka.server 32 | bean: kafka.server:type=BrokerTopicMetrics,name=FailedFetchRequestsPerSec,topic={topicname} 33 | attribute: 34 | Count: 35 | metric_type: KAFKA_TOPIC_PERF 36 | alias: FailedFetchRequestsPerSec 37 | - include: 38 | domain: kafka.server 39 | bean: kafka.server:type=BrokerTopicMetrics,name=FailedProduceRequestsPerSec,topic={topicname} 40 | attribute: 41 | Count: 42 | metric_type: KAFKA_TOPIC_PERF 43 | alias: FailedProduceRequestsPerSec 44 | - include: 45 | domain: kafka.server 46 | bean: kafka.server:type=BrokerTopicMetrics,name=TotalFetchRequestsPerSec,topic={topicname} 47 | attribute: 48 | Count: 49 | metric_type: KAFKA_TOPIC_PERF 50 | alias: TotalFetchRequestsPerSec 51 | - include: 52 | domain: kafka.server 53 | bean: kafka.server:type=BrokerTopicMetrics,name=TotalProduceRequestsPerSec,topic={topicname} 54 | attribute: 55 | Count: 56 | metric_type: KAFKA_TOPIC_PERF 57 | alias: TotalProduceRequestsPerSec -------------------------------------------------------------------------------- /src/main/resources/kafka-zk-springboot-distribution.xml: -------------------------------------------------------------------------------- 1 | 2 | assembly 3 | 4 | tar 5 | zip 6 | 7 | 8 | 9 | src/main/resources 10 | bin 11 | 12 | start.sh 13 | start.bat 14 | 15 | 0755 16 | 17 | 18 | src/main/resources 19 | config 20 | 21 | *.properties 22 | *.xml 23 | *.yml 24 | env/dev/* 25 | env/qa/* 26 | env/uat/* 27 | env/prod/* 28 | jmxtemplates/* 29 | 30 | 31 | 32 | target 33 | lib 34 | 35 | *release*.jar 36 | 37 | 38 | 39 | src/main/resources 40 | logs 41 | 0755 42 | 43 | **/* 44 | 45 | 46 | 47 | ${project.build.directory}/asciidoc 48 | docs 49 | 50 | md/* 51 | html/* 52 | pdf/* 53 | 54 | 0755 55 | 56 | 57 | -------------------------------------------------------------------------------- /src/main/resources/log4j2.properties: -------------------------------------------------------------------------------- 1 | status = warn 2 | 3 | appender.console.type = Console 4 | appender.console.name = LogToConsole 5 | appender.console.layout.type = PatternLayout 6 | appender.console.layout.pattern = [%-5level] %d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %c{1} - %msg%n 7 | 8 | # Rotate log file 9 | appender.rolling.type = RollingFile 10 | appender.rolling.name = LogToRollingFile 11 | appender.rolling.fileName = logs/app.log 12 | appender.rolling.filePattern = logs/$${date:yyyy-MM}/app-%d{MM-dd-yyyy}-%i.log.gz 13 | appender.rolling.layout.type = PatternLayout 14 | appender.rolling.layout.pattern = %d %p %C{1.} [%t] %m%n 15 | appender.rolling.policies.type = Policies 16 | appender.rolling.policies.time.type = TimeBasedTriggeringPolicy 17 | appender.rolling.policies.size.type = SizeBasedTriggeringPolicy 18 | appender.rolling.policies.size.size=10MB 19 | appender.rolling.strategy.type = DefaultRolloverStrategy 20 | appender.rolling.strategy.max = 10 21 | 22 | # Log to console and rolling file 23 | logger.app.name = org.gnuhpc.bigdata 24 | logger.app.level = debug 25 | logger.app.additivity = false 26 | logger.app.appenderRef.rolling.ref = LogToRollingFile 27 | logger.app.appenderRef.console.ref = LogToConsole 28 | 29 | rootLogger.level = info 30 | rootLogger.appenderRef.stdout.ref = LogToConsole 31 | Copy 32 | 33 | -------------------------------------------------------------------------------- /src/main/resources/security.yml: -------------------------------------------------------------------------------- 1 | --- 2 | admin: 3 | password: "$2a$10$cwkLeAFbPSNWEvjnL.w2FeoEPIv.MMEb0Pk541TiuqGRHP.x8ReoK" 4 | role: "admin" 5 | -------------------------------------------------------------------------------- /src/main/resources/start.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | ENV=$1 3 | if [[ $ENV == '' ]] 4 | then 5 | ENV="dev" 6 | fi 7 | 8 | 9 | unset CDPATH 10 | export basedir=$(cd `dirname $0`/..; pwd) 11 | configdir=${basedir}/config 12 | libdir=${basedir}/lib 13 | logdir=${basedir}/logs 14 | 15 | chmod 755 ${logdir} 16 | 17 | java -Xms512m -Xmx512m -server -Xloggc:${logdir}/gc.log -verbose:gc -XX:+PrintGCDetails -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=${logdir} -cp ${basedir}:${configdir}/*:${libdir}/* -Dbasedir=${basedir} -Dlogging.config=${configdir}/log4j2.properties -Dspring.config.location=${configdir}/ -jar ${libdir}/kafka*-rest-springboot*.jar 18 | -------------------------------------------------------------------------------- /src/main/scala/org.gnuhpc.bigdata/utils/CollectionConvertor.scala: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.utils 2 | 3 | import scala.collection.JavaConverters 4 | 5 | object CollectionConvertor { 6 | 7 | def seqConvertJavaList[A](seq: Seq[A]): java.util.List[A] = { 8 | return JavaConverters.seqAsJavaListConverter(seq).asJava 9 | } 10 | 11 | def mapConvertJavaMap[A, B](map: scala.collection.Map[A, B]): java.util.Map[A, B] = { 12 | return JavaConverters.mapAsJavaMapConverter(map).asJava 13 | } 14 | 15 | def listConvertJavaList[A](list: List[A]): java.util.List[A] = { 16 | return JavaConverters.bufferAsJavaListConverter(list.toBuffer).asJava; 17 | } 18 | 19 | def optionListConvertJavaList[A](list: List[A]): java.util.List[A] = { 20 | return JavaConverters.bufferAsJavaListConverter(list.toBuffer).asJava; 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /src/test/java/org/gnuhpc/bigdata/Swagger2MarkupTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * Copyright 2015 the original author or authors. 4 | * 5 | * Licensed under the Apache License, Version 2.0 (the "License"); 6 | * you may not use this file except in compliance with the License. 7 | * You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | * 17 | * 18 | */ 19 | package org.gnuhpc.bigdata; 20 | 21 | import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; 22 | import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; 23 | 24 | import java.io.BufferedWriter; 25 | import java.nio.charset.StandardCharsets; 26 | import java.nio.file.Files; 27 | import java.nio.file.Paths; 28 | import org.gnuhpc.bigdata.config.SwaggerConfig; 29 | import org.junit.Test; 30 | import org.junit.runner.RunWith; 31 | import org.springframework.beans.factory.annotation.Autowired; 32 | import org.springframework.boot.test.autoconfigure.restdocs.AutoConfigureRestDocs; 33 | import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; 34 | import org.springframework.boot.test.context.SpringBootTest; 35 | import org.springframework.http.MediaType; 36 | import org.springframework.mock.web.MockHttpServletResponse; 37 | import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; 38 | import org.springframework.test.context.web.WebAppConfiguration; 39 | import org.springframework.test.web.servlet.MockMvc; 40 | import org.springframework.test.web.servlet.MvcResult; 41 | 42 | @WebAppConfiguration 43 | @RunWith(SpringJUnit4ClassRunner.class) 44 | @AutoConfigureRestDocs(outputDir = "build/asciidoc/snippets") 45 | @SpringBootTest(classes = {KafkaRestSpringbootApplication.class, SwaggerConfig.class}) 46 | @AutoConfigureMockMvc 47 | public class Swagger2MarkupTest { 48 | 49 | @Autowired private MockMvc mockMvc; 50 | 51 | @Test 52 | public void createSpringfoxSwaggerJson() throws Exception { 53 | // String designFirstSwaggerLocation = 54 | // Swagger2MarkupTest.class.getResource("/swagger.yaml").getPath(); 55 | 56 | String outputDir = System.getProperty("io.springfox.staticdocs.outputDir"); 57 | MvcResult mvcResult = 58 | this.mockMvc 59 | .perform(get("/v2/api-docs").accept(MediaType.APPLICATION_JSON)) 60 | .andExpect(status().isOk()) 61 | .andReturn(); 62 | 63 | MockHttpServletResponse response = mvcResult.getResponse(); 64 | String swaggerJson = response.getContentAsString(); 65 | Files.createDirectories(Paths.get(outputDir)); 66 | try (BufferedWriter writer = 67 | Files.newBufferedWriter(Paths.get(outputDir, "swagger.json"), StandardCharsets.UTF_8)) { 68 | writer.write(swaggerJson); 69 | } 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /src/test/java/org/gnuhpc/bigdata/service/avro/user.avsc: -------------------------------------------------------------------------------- 1 | {"namespace": "org.gnuhpc.bigdata.service.avro", 2 | "type": "record", 3 | "name": "User", 4 | "fields": [ 5 | {"name": "name", "type": "string"}, 6 | {"name": "favorite_number", "type": ["int", "null"]}, 7 | {"name": "favorite_color", "type": ["string", "null"]} 8 | ] 9 | } -------------------------------------------------------------------------------- /src/test/java/org/gnuhpc/bigdata/utils/KafkaStarterUtils.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.utils; 2 | 3 | import java.io.File; 4 | import java.util.Properties; 5 | import kafka.admin.TopicCommand; 6 | import kafka.server.KafkaConfig; 7 | import kafka.server.KafkaServerStartable; 8 | import kafka.zk.KafkaZkClient; 9 | import org.I0Itec.zkclient.ZkClient; 10 | import org.apache.kafka.common.errors.TopicExistsException; 11 | import org.apache.kafka.common.utils.Time; 12 | 13 | /** 14 | * Utilities to start Kafka during unit tests. 15 | */ 16 | public class KafkaStarterUtils { 17 | 18 | public static final int DEFAULT_KAFKA_PORT = 19099; 19 | public static final int DEFAULT_BROKER_ID = 0; 20 | public static final String DEFAULT_ZK_STR = ZkStarter.DEFAULT_ZK_STR + "/kafka"; 21 | public static final String DEFAULT_KAFKA_BROKER = "localhost:" + DEFAULT_KAFKA_PORT; 22 | 23 | public static Properties getDefaultKafkaConfiguration() { 24 | return new Properties(); 25 | } 26 | 27 | public static KafkaServerStartable startServer(final int port, final int brokerId, 28 | final String zkStr, final Properties configuration) { 29 | // Create the ZK nodes for Kafka, if needed 30 | int indexOfFirstSlash = zkStr.indexOf('/'); 31 | if (indexOfFirstSlash != -1) { 32 | String bareZkUrl = zkStr.substring(0, indexOfFirstSlash); 33 | String zkNodePath = zkStr.substring(indexOfFirstSlash); 34 | ZkClient client = new ZkClient(bareZkUrl); 35 | client.createPersistent(zkNodePath, true); 36 | client.close(); 37 | } 38 | 39 | File logDir = new File("/tmp/kafka-" + Double.toHexString(Math.random())); 40 | logDir.mkdirs(); 41 | logDir.deleteOnExit(); 42 | 43 | configureKafkaPort(configuration, port); 44 | configureZkConnectionString(configuration, zkStr); 45 | configureBrokerId(configuration, brokerId); 46 | configureKafkaLogDirectory(configuration, logDir); 47 | KafkaConfig config = new KafkaConfig(configuration); 48 | 49 | KafkaServerStartable serverStartable = new KafkaServerStartable(config); 50 | serverStartable.startup(); 51 | 52 | return serverStartable; 53 | } 54 | 55 | public static void configureSegmentSizeBytes(Properties properties, int segmentSize) { 56 | properties.put("log.segment.bytes", Integer.toString(segmentSize)); 57 | } 58 | 59 | public static void configureLogRetentionSizeBytes(Properties properties, int logRetentionSizeBytes) { 60 | properties.put("log.retention.bytes", Integer.toString(logRetentionSizeBytes)); 61 | } 62 | 63 | public static void configureKafkaLogDirectory(Properties configuration, File logDir) { 64 | configuration.put("log.dirs", logDir.getAbsolutePath()); 65 | } 66 | 67 | public static void configureBrokerId(Properties configuration, int brokerId) { 68 | configuration.put("broker.id", Integer.toString(brokerId)); 69 | } 70 | 71 | public static void configureZkConnectionString(Properties configuration, String zkStr) { 72 | configuration.put("zookeeper.connect", zkStr); 73 | } 74 | 75 | public static void configureKafkaPort(Properties configuration, int port) { 76 | configuration.put("port", Integer.toString(port)); 77 | } 78 | 79 | public static void stopServer(KafkaServerStartable serverStartable) { 80 | serverStartable.shutdown(); 81 | } 82 | 83 | public static void createTopic(String kafkaTopic, String zkStr) { 84 | // TopicCommand.main() will call System.exit() finally, which will break maven-surefire-plugin 85 | try { 86 | String[] args = new String[]{"--create", "--zookeeper", zkStr, "--replication-factor", "1", 87 | "--partitions", "1", "--topic", kafkaTopic}; 88 | KafkaZkClient zkClient = KafkaZkClient.apply(zkStr, false, 30000, 30000, Integer.MAX_VALUE, Time.SYSTEM,"kafka.server", 89 | "SessionExpireListener"); 90 | TopicCommand.TopicCommandOptions opts = new TopicCommand.TopicCommandOptions(args); 91 | TopicCommand.createTopic(zkClient, opts); 92 | } catch (TopicExistsException e) { 93 | // Catch TopicExistsException otherwise it will break maven-surefire-plugin 94 | System.out.println("Topic already existed"); 95 | } 96 | } 97 | } 98 | -------------------------------------------------------------------------------- /src/test/java/org/gnuhpc/bigdata/utils/ZkStarter.java: -------------------------------------------------------------------------------- 1 | package org.gnuhpc.bigdata.utils; 2 | 3 | import java.io.File; 4 | import java.io.IOException; 5 | import java.net.ServerSocket; 6 | import java.net.Socket; 7 | import java.util.Random; 8 | import java.util.concurrent.TimeUnit; 9 | import lombok.extern.log4j.Log4j2; 10 | import org.I0Itec.zkclient.ZkClient; 11 | import org.apache.zookeeper.server.ZooKeeperServerMain; 12 | import org.apache.zookeeper.server.quorum.QuorumPeerConfig; 13 | 14 | @Log4j2 15 | public class ZkStarter { 16 | 17 | // private static final Logger LOGGER = LoggerFactory.getLogger(ZkStarter.class); 18 | public static final int DEFAULT_ZK_TEST_PORT = getAvailablePort(); 19 | public static final String DEFAULT_ZK_STR = "localhost:" + DEFAULT_ZK_TEST_PORT; 20 | 21 | private static PublicZooKeeperServerMain _zookeeperServerMain = null; 22 | private static String _zkDataDir = null; 23 | 24 | private static int getAvailablePort() { 25 | int port = 0; 26 | 27 | while (true) { 28 | try { 29 | port = new Random().nextInt(10000) + 10000; 30 | (new Socket("127.0.0.1", port)).close(); 31 | new ServerSocket(port).close(); 32 | // Successful connection means the port is taken. 33 | } catch (Exception e) { 34 | // Could not connect. 35 | break; 36 | } 37 | } 38 | 39 | return port; 40 | } 41 | 42 | /** 43 | * Silly class to make protected methods public. 44 | */ 45 | static class PublicZooKeeperServerMain extends ZooKeeperServerMain { 46 | 47 | @Override 48 | public void initializeAndRun(String[] args) 49 | throws QuorumPeerConfig.ConfigException, IOException { 50 | super.initializeAndRun(args); 51 | } 52 | 53 | @Override 54 | public void shutdown() { 55 | super.shutdown(); 56 | } 57 | } 58 | 59 | /** 60 | * Starts an empty local Zk instance on the default port 61 | */ 62 | public static void startLocalZkServer() { 63 | // DEFAULT_ZK_TEST_PORT = new Random().nextInt(10000) + 10000; 64 | // DEFAULT_ZK_STR = "localhost:" + DEFAULT_ZK_TEST_PORT; 65 | try { 66 | startLocalZkServer(DEFAULT_ZK_TEST_PORT); 67 | } catch (Exception e) { 68 | log.error("Failed to start ZK: " + e); 69 | } 70 | } 71 | 72 | /** 73 | * Starts a local Zk instance with a generated empty data directory 74 | * 75 | * @param port The port to listen on 76 | */ 77 | public static void startLocalZkServer(final int port) { 78 | startLocalZkServer(port, org.apache.commons.io.FileUtils.getTempDirectoryPath() + File.separator 79 | + "test-" + System.currentTimeMillis()); 80 | } 81 | 82 | /** 83 | * Starts a local Zk instance 84 | * 85 | * @param port The port to listen on 86 | * @param dataDirPath The path for the Zk data directory 87 | */ 88 | public synchronized static void startLocalZkServer(final int port, final String dataDirPath) { 89 | if (_zookeeperServerMain != null) { 90 | throw new RuntimeException("Zookeeper server is already started!"); 91 | } 92 | 93 | // Start the local ZK server 94 | try { 95 | _zookeeperServerMain = new PublicZooKeeperServerMain(); 96 | log.info("Zookeeper data path - " + dataDirPath); 97 | _zkDataDir = dataDirPath; 98 | final String[] args = new String[]{ 99 | Integer.toString(port), dataDirPath 100 | }; 101 | new Thread() { 102 | @Override 103 | public void run() { 104 | try { 105 | _zookeeperServerMain.initializeAndRun(args); 106 | } catch (QuorumPeerConfig.ConfigException e) { 107 | log.warn("Caught exception while starting ZK", e); 108 | } catch (IOException e) { 109 | log.warn("Caught exception while starting ZK", e); 110 | } 111 | } 112 | }.start(); 113 | } catch (Exception e) { 114 | log.warn("Caught exception while starting ZK", e); 115 | throw new RuntimeException(e); 116 | } 117 | 118 | // Wait until the ZK server is started 119 | ZkClient client = new ZkClient("localhost:" + port, 10000); 120 | client.waitUntilConnected(10L, TimeUnit.SECONDS); 121 | client.close(); 122 | } 123 | 124 | /** 125 | * Stops a local Zk instance, deleting its data directory 126 | */ 127 | public static void stopLocalZkServer() { 128 | try { 129 | stopLocalZkServer(true); 130 | } catch (Exception e) { 131 | log.error("Failed to stop ZK: " + e); 132 | } 133 | } 134 | 135 | /** 136 | * Stops a local Zk instance. 137 | * 138 | * @param deleteDataDir Whether or not to delete the data directory 139 | */ 140 | public synchronized static void stopLocalZkServer(final boolean deleteDataDir) { 141 | if (_zookeeperServerMain != null) { 142 | try { 143 | // Shut down ZK 144 | _zookeeperServerMain.shutdown(); 145 | _zookeeperServerMain = null; 146 | 147 | // Delete the data dir 148 | if (deleteDataDir) { 149 | org.apache.commons.io.FileUtils.deleteDirectory(new File(_zkDataDir)); 150 | } 151 | } catch (Exception e) { 152 | log.warn("Caught exception while stopping ZK server", e); 153 | throw new RuntimeException(e); 154 | } 155 | } 156 | } 157 | } 158 | 159 | --------------------------------------------------------------------------------