├── .gitignore ├── LICENSE ├── README.md ├── pom.xml └── src └── main ├── assembly └── src.xml ├── bin ├── kafkaUI.bat └── kafkaUI.sh ├── java └── com │ └── jq │ └── kafkaui │ ├── KafkaUIApplication.java │ ├── conf │ ├── LogInterceptor.java │ ├── WebConfig.java │ ├── WebSocketConfig.java │ └── WebSocketServer.java │ ├── controller │ ├── KafkaController.java │ ├── RedisController.java │ └── ZookeeperController.java │ ├── dao │ ├── KafkaSourceDao.java │ ├── RedisSourceDao.java │ └── ZKSourceDao.java │ ├── domain │ ├── Auth.java │ ├── KafkaSource.java │ ├── RedisSource.java │ ├── Result.java │ ├── Topic.java │ └── ZKSource.java │ ├── dto │ ├── ResponseDto.java │ └── SourceInfo.java │ ├── service │ ├── KafkaService.java │ ├── RedisService.java │ └── ZKService.java │ └── util │ ├── IPUtil.java │ ├── KafkaUtil.java │ ├── RedisUtil.java │ └── ZKProcessor.java ├── resources ├── application.properties └── data.db ├── sql ├── ddl_mysql.sql └── ddl_sqlite.sql └── webapp ├── .browserslistrc ├── .gitignore ├── babel.config.js ├── package.json ├── public ├── favicon.ico └── index.html ├── src ├── App.vue ├── assets │ └── logo.png ├── components │ ├── HelloWorld.vue │ ├── about │ │ ├── auth │ │ │ ├── commonAuth.vue │ │ │ └── zkAuth.vue │ │ ├── authority.vue │ │ └── donate.vue │ ├── common │ │ ├── GroupTable.vue │ │ ├── dataTag.vue │ │ └── list.vue │ ├── kafka │ │ ├── config.vue │ │ ├── consumer.vue │ │ ├── kafkaSelect.vue │ │ ├── manage.vue │ │ ├── manage │ │ │ ├── cluster.vue │ │ │ ├── group.vue │ │ │ └── topic.vue │ │ ├── operate │ │ │ ├── consume.vue │ │ │ └── produce.vue │ │ └── producer.vue │ ├── redis │ │ ├── addKey.vue │ │ ├── config.vue │ │ └── manage.vue │ ├── tool │ │ ├── json.vue │ │ └── time.vue │ └── zk │ │ ├── config.vue │ │ └── manage.vue ├── i18n │ ├── i18n.js │ └── langs │ │ ├── cn.js │ │ ├── en.js │ │ └── index.js ├── js │ └── auth.js ├── main.js ├── plugins │ ├── element.js │ └── vxe.js ├── router │ └── index.js ├── store │ └── index.js ├── theme │ ├── fonts │ │ ├── element-icons.ttf │ │ └── element-icons.woff │ └── index.css └── views │ └── Home.vue └── vue.config.js /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | target/ 3 | *.iml 4 | logs/ 5 | src/main/resources/static -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # kafkaUI-lite 2 | 3 | ## 介绍 4 | 5 | - 史上最轻便好用的kafka ui界面客户端工具,可以在生产消息、消费消息、管理topic、管理group;可以支持管理多个kafka集群 6 | - 部署简便,可以一键启动,不需要配置数据库、不需要搭建web容器 7 | - 支持zookeeper ui界面化操作;支持多环境管理 8 | - 支持redis ui界面化操作;支持多环境管理 9 | - 支持权限控制,可以自定义不同环境的新增、修改、删除权限;默认分配只读权限,避免用户的误操作 10 | 11 | ## 官网地址 12 |

13 | 👉 https://kafkaui.51dbapi.com 👈 14 |

15 | 16 | ## 体验地址 17 | 18 | - 试用请点击[体验地址](https://support.51dbapi.com/#/kafkaui/demo) 19 | 20 | ## 软件截图 21 | 22 | ### kafka操作 23 | 24 | #### 查看所有kafka集群 25 | ![](https://freakchicken.gitee.io/images/kafkaui/20210522/kafka_config.png) 26 | 27 | #### 消费消息 28 | ![](https://freakchicken.gitee.io/images/kafkaui/20210605/consume.png) 29 | 30 | #### 生产消息 31 | ![](https://freakchicken.gitee.io/images/kafkaui/20210605/produce.png) 32 | 33 | #### 管理topic 34 | ![](https://freakchicken.gitee.io/images/kafkaui/20210522/topic_manage.png) 35 | 36 | #### 查看topic详情 37 | ![](https://freakchicken.gitee.io/images/kafkaui/20210522/topic_detail.png) 38 | 39 | #### 查看topic被消费的所有group 40 | ![](https://freakchicken.gitee.io/images/kafkaui/20210522/view_group.png) 41 | 42 | #### 管理集群 43 | ![](https://freakchicken.gitee.io/images/kafkaui/20210522/cluster_manage.png) 44 | 45 | #### 管理group 46 | ![](https://freakchicken.gitee.io/images/kafkaui/20210522/group_manage.png) 47 | 48 | #### 查看group消费偏移量详情 49 | ![](https://freakchicken.gitee.io/images/kafkaui/20210522/group_detail.png) 50 | 51 | #### 创建topic 52 | ![](https://freakchicken.gitee.io/images/kafkaui/20210522/add_topic.png) 53 | 54 | ### zookeeper操作 55 | #### 查看所有zk集群 56 | ![](https://freakchicken.gitee.io/images/kafkaui/20210522/zk_config.png) 57 | #### 添加zk集群 58 | ![](https://freakchicken.gitee.io/images/kafkaui/20210522/zk_env_add.png) 59 | 60 | #### 查看节点数据 61 | ![](https://freakchicken.gitee.io/images/kafkaui/20210522/zk_manage.png) 62 | #### 添加节点 63 | ![](https://freakchicken.gitee.io/images/kafkaui/20210522/zk_add_node.png) 64 | 65 | ### redis操作 66 | 67 | #### 添加redis环境 68 | 69 | ![](https://freakchicken.gitee.io/images/kafkaui/redis_add_source_20210112143453.jpg) 70 | 71 | #### 查看所有redis环境 72 | 73 | ![](https://freakchicken.gitee.io/images/kafkaui/redis_sources_20210112143546.jpg) 74 | 75 | #### 添加redis key 76 | 77 | ![](https://freakchicken.gitee.io/images/kafkaui/redis_add_hash_20210112143738.jpg) 78 | ![](https://freakchicken.gitee.io/images/kafkaui/redis_add_string_20210112143815.jpg) 79 | ![](https://freakchicken.gitee.io/images/kafkaui/redis_add_list_20210112143759.jpg) 80 | 81 | #### 查看数据 82 | 83 | ![](https://freakchicken.gitee.io/images/kafkaui/redis_string_detail_20210112143708.jpg) 84 | ![](https://freakchicken.gitee.io/images/kafkaui/redis_set_detail_20210112143642.jpg) 85 | ![](https://freakchicken.gitee.io/images/kafkaui/redis_hash_detail_20210112143618.jpg) 86 | 87 | #### 删除redis key 88 | 89 | ![](https://freakchicken.gitee.io/images/kafkaui/redis_delete_key_20210112143842.jpg) 90 | 91 | ### 权限控制 92 | 93 | ![](https://freakchicken.gitee.io/images/kafkaui/20210201/auth.jpg) 94 | 95 | 96 | 97 | 98 | 99 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | com.jq 8 | kafka-ui-lite 9 | 1.2.11 10 | 11 | 12 | freakchicken 13 | 14 | 15 | 16 | org.springframework.boot 17 | spring-boot-starter-parent 18 | 2.1.3.RELEASE 19 | 20 | 21 | 22 | 23 | 24 | tar 25 | 26 | 27 | 28 | org.apache.maven.plugins 29 | maven-jar-plugin 30 | 31 | 32 | **/*.xml 33 | **/*.properties 34 | **/*.db 35 | 36 | 37 | 38 | 39 | default-jar 40 | package 41 | 42 | jar 43 | 44 | 45 | 46 | 47 | 48 | 49 | maven-assembly-plugin 50 | 51 | 52 | src/main/assembly/src.xml 53 | 54 | 55 | 56 | 57 | make-assembly 58 | package 59 | 60 | single 61 | 62 | 63 | 64 | 65 | 66 | com.spotify 67 | docker-maven-plugin 68 | 1.2.2 69 | 70 | 71 | 72 | ${docker.image.prefix}/${project.artifactId} 73 | openjdk:8 74 | 75 | 76 | 77 | 78 | 79 | 80 | ${project.build.finalName}/bin 81 | ["sh", "kafkaUI.sh", "start"] 82 | 83 | 8889 84 | 85 | 86 | ${project.version} 87 | 88 | true 89 | 90 | 91 | / 92 | ${project.build.directory} 93 | 94 | ${project.build.finalName}-bin.tar.gz 95 | 96 | 97 | docker-hub 98 | https://index.docker.io/v1/ 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | singleJar 109 | 110 | true 111 | 112 | 113 | 114 | 115 | org.springframework.boot 116 | spring-boot-maven-plugin 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | org.springframework.boot 127 | spring-boot-starter-web 128 | 129 | 130 | 131 | org.springframework.boot 132 | spring-boot-starter-websocket 133 | 134 | 135 | 136 | org.apache.kafka 137 | kafka-clients 138 | 2.6.0 139 | 140 | 141 | 142 | org.xerial 143 | sqlite-jdbc 144 | 3.36.0.3 145 | 146 | 147 | org.mybatis.spring.boot 148 | mybatis-spring-boot-starter 149 | 1.3.1 150 | 151 | 152 | com.alibaba 153 | druid 154 | 1.1.9 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | org.projectlombok 165 | lombok 166 | true 167 | 168 | 169 | 170 | redis.clients 171 | jedis 172 | 2.9.0 173 | 174 | 175 | 176 | org.apache.curator 177 | curator-framework 178 | 2.12.0 179 | 180 | 181 | org.apache.curator 182 | curator-recipes 183 | 2.12.0 184 | 185 | 186 | com.alibaba 187 | fastjson 188 | 1.2.62 189 | 190 | 191 | commons-io 192 | commons-io 193 | 2.6 194 | 195 | 196 | mysql 197 | mysql-connector-java 198 | 5.1.34 199 | 200 | 201 | 202 | 203 | 204 | 205 | kafka-ui-lite-${project.version} 206 | 207 | 208 | 209 | maven-clean-plugin 210 | 3.1.0 211 | 212 | true 213 | 214 | 215 | ${basedir}/src/main/resources/static/ 216 | 217 | **/*.* 218 | 219 | 220 | 221 | 222 | 223 | 224 | 225 | 226 | org.codehaus.mojo 227 | exec-maven-plugin 228 | 229 | 230 | 231 | exec-cnpm-install 232 | validate 233 | 234 | exec 235 | 236 | 237 | cnpm 238 | 239 | install 240 | 241 | ${basedir}/src/main/webapp 242 | 243 | 244 | 245 | 246 | exec-cnpm-run-build 247 | validate 248 | 249 | exec 250 | 251 | 252 | npm 253 | 254 | run 255 | build 256 | 257 | ${basedir}/src/main/webapp 258 | 259 | 260 | 261 | 262 | 263 | 264 | 265 | org.apache.maven.plugins 266 | maven-antrun-plugin 267 | 3.0.0 268 | 269 | 270 | move-dist-to-static 271 | validate 272 | 273 | run 274 | 275 | 276 | 277 | 278 | 279 | 280 | 281 | 282 | 283 | 284 | 285 | 286 | 287 | 288 | 289 | 290 | 291 | 292 | 293 | 294 | 295 | 296 | 297 | 298 | 299 | -------------------------------------------------------------------------------- /src/main/assembly/src.xml: -------------------------------------------------------------------------------- 1 | 2 | bin 3 | 4 | tar.gz 5 | 6 | true 7 | ${project.build.finalName} 8 | 9 | 10 | true 11 | lib 12 | false 13 | 14 | 15 | 16 | 17 | ${project.basedir}/src/main/bin 18 | bin 19 | unix 20 | 744 21 | 22 | *.sh 23 | 24 | 25 | 26 | ${project.basedir}/src/main/bin 27 | bin 28 | windows 29 | 30 | *.bat 31 | 32 | 33 | 34 | ${project.basedir}/src/main/resources 35 | conf 36 | 37 | 38 | 39 | 40 | *.properties 41 | *.xml 42 | *.db 43 | 44 | 45 | 46 | 47 | ${project.basedir}/src/main/sql 48 | sql 49 | unix 50 | 644 51 | 52 | *.sql 53 | 54 | 55 | 56 | 57 | -------------------------------------------------------------------------------- /src/main/bin/kafkaUI.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | set home=%~dp0 3 | 4 | set conf_dir=%home%..\conf 5 | set lib_dir=%home%..\lib\* 6 | set log_dir=%home%..\logs 7 | 8 | 9 | java -Dlogging.file=%log_dir%\kafkaUI-lite.log -classpath %conf_dir%;%lib_dir% com.jq.kafkaui.KafkaUIApplication 10 | pause -------------------------------------------------------------------------------- /src/main/bin/kafkaUI.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | bool=false 4 | 5 | while getopts d opt; 6 | do 7 | case $opt in 8 | d) echo "running in daemon" 9 | bool=true 10 | shift 11 | ;; 12 | ?) echo "$opt is an invalid option" 13 | ;; 14 | esac 15 | done 16 | 17 | 18 | BIN_DIR=$(dirname $0) 19 | BIN_DIR=$( 20 | cd "$BIN_DIR" 21 | pwd 22 | ) 23 | # 安装包部署的目录 24 | HOME=$BIN_DIR/.. 25 | PID=$BIN_DIR/server.pid 26 | 27 | export CONF_DIR=$HOME/conf 28 | export LIB_JARS=$HOME/lib/* 29 | export LOG_DIR=$HOME/logs 30 | 31 | 32 | 33 | if [ $1 = "start" ]; then 34 | if [ "$bool" = "false" ]; then 35 | java -Dlogging.file=$LOG_DIR/kafkaUI-lite.log -classpath $CONF_DIR:$LIB_JARS com.jq.kafkaui.KafkaUIApplication 36 | else 37 | nohup java -Dlogging.file=$LOG_DIR/kafkaUI-lite.log -classpath $CONF_DIR:$LIB_JARS com.jq.kafkaui.KafkaUIApplication >/dev/null 2>&1 & 38 | echo $! >$PID 39 | fi 40 | elif [ $1 = "stop" ]; then 41 | TARGET_PID=$(cat $PID) 42 | kill $TARGET_PID 43 | 44 | else 45 | echo "parameter invalid" 46 | fi 47 | -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/KafkaUIApplication.java: -------------------------------------------------------------------------------- 1 | package com.jq.kafkaui; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | import org.springframework.scheduling.annotation.EnableAsync; 6 | 7 | /** 8 | * @program: kafkaUI 9 | * @description: 10 | * @author: jiangqiang 11 | * @create: 2020-10-26 18:03 12 | **/ 13 | @EnableAsync 14 | @SpringBootApplication 15 | public class KafkaUIApplication { 16 | public static void main(String[] args) { 17 | SpringApplication.run(KafkaUIApplication.class, args); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/conf/LogInterceptor.java: -------------------------------------------------------------------------------- 1 | package com.jq.kafkaui.conf; 2 | 3 | import com.alibaba.fastjson.JSON; 4 | import com.jq.kafkaui.util.IPUtil; 5 | import lombok.extern.slf4j.Slf4j; 6 | import org.springframework.stereotype.Component; 7 | import org.springframework.web.servlet.HandlerInterceptor; 8 | import org.springframework.web.servlet.ModelAndView; 9 | 10 | import javax.servlet.http.HttpServletRequest; 11 | import javax.servlet.http.HttpServletResponse; 12 | import java.util.Map; 13 | 14 | /** 15 | * 日志拦截器 16 | * 17 | * @author jiangqiang 18 | * @date 2019年3月19日下午4:30:56 19 | */ 20 | @Component 21 | @Slf4j 22 | public class LogInterceptor implements HandlerInterceptor { 23 | 24 | @Override 25 | public void afterCompletion(HttpServletRequest arg0, HttpServletResponse arg1, Object arg2, Exception arg3) throws Exception { 26 | } 27 | 28 | @Override 29 | public void postHandle(HttpServletRequest arg0, HttpServletResponse arg1, Object arg2, ModelAndView arg3) throws Exception { 30 | 31 | } 32 | 33 | @Override 34 | public boolean preHandle(HttpServletRequest request, HttpServletResponse arg1, Object arg2) throws Exception { 35 | String servletPath = request.getServletPath(); 36 | Map map = request.getParameterMap(); 37 | 38 | log.info("url={};params={}", servletPath, JSON.toJSONString(map)); 39 | 40 | return true; 41 | } 42 | 43 | } -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/conf/WebConfig.java: -------------------------------------------------------------------------------- 1 | package com.jq.kafkaui.conf; 2 | 3 | import org.springframework.beans.factory.annotation.Autowired; 4 | import org.springframework.context.annotation.Configuration; 5 | import org.springframework.web.servlet.config.annotation.InterceptorRegistry; 6 | import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; 7 | 8 | /** 9 | * 添加日志拦截 10 | * @author jiangqiang 11 | * @date 2019年4月14日上午12:10:36 12 | */ 13 | @Configuration 14 | public class WebConfig implements WebMvcConfigurer { 15 | 16 | @Autowired 17 | private LogInterceptor logInterceptor; 18 | 19 | /** 20 | * 添加拦截器 21 | */ 22 | @Override 23 | public void addInterceptors(InterceptorRegistry registry) { 24 | registry.addInterceptor(logInterceptor).addPathPatterns("/**").excludePathPatterns("/health"); 25 | } 26 | 27 | 28 | } -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/conf/WebSocketConfig.java: -------------------------------------------------------------------------------- 1 | package com.jq.kafkaui.conf; 2 | 3 | import org.springframework.context.annotation.Bean; 4 | import org.springframework.context.annotation.Configuration; 5 | import org.springframework.web.socket.server.standard.ServerEndpointExporter; 6 | 7 | @Configuration 8 | public class WebSocketConfig { 9 | 10 | @Bean 11 | public ServerEndpointExporter serverEndpointExporter() { 12 | return new ServerEndpointExporter(); 13 | } 14 | 15 | } -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/conf/WebSocketServer.java: -------------------------------------------------------------------------------- 1 | package com.jq.kafkaui.conf; 2 | 3 | import com.jq.kafkaui.dao.KafkaSourceDao; 4 | import com.jq.kafkaui.dto.SourceInfo; 5 | import com.jq.kafkaui.service.KafkaService; 6 | import com.jq.kafkaui.util.KafkaUtil; 7 | import lombok.extern.slf4j.Slf4j; 8 | import org.apache.kafka.clients.consumer.ConsumerRecord; 9 | import org.apache.kafka.clients.consumer.ConsumerRecords; 10 | import org.apache.kafka.clients.consumer.KafkaConsumer; 11 | import org.springframework.beans.factory.annotation.Autowired; 12 | import org.springframework.stereotype.Component; 13 | 14 | import javax.websocket.*; 15 | import javax.websocket.server.PathParam; 16 | import javax.websocket.server.ServerEndpoint; 17 | import java.io.IOException; 18 | import java.time.Duration; 19 | import java.util.HashMap; 20 | import java.util.Map; 21 | import java.util.Optional; 22 | import java.util.concurrent.CopyOnWriteArraySet; 23 | 24 | @Component 25 | @ServerEndpoint("/push/websocket") 26 | @Slf4j 27 | public class WebSocketServer { 28 | 29 | // @Autowired 30 | // KafkaSourceDao kafkaSourceDao; 31 | 32 | //静态变量,用来记录当前在线连接数。应该把它设计成线程安全的。 33 | private static int onlineCount = 0; 34 | //concurrent包的线程安全Set,用来存放每个客户端对应的MyWebSocket对象。 35 | private static CopyOnWriteArraySet webSocketSet = new CopyOnWriteArraySet(); 36 | 37 | //与某个客户端的连接会话,需要通过它来给客户端发送数据 38 | private Session session; 39 | 40 | //接收sid 41 | private String sid = ""; 42 | 43 | Map params = new HashMap<>(); 44 | 45 | public static KafkaService kafkaService; 46 | 47 | @Autowired 48 | public void setKafkaService(KafkaService kafkaService) { 49 | WebSocketServer.kafkaService = kafkaService; 50 | } 51 | 52 | 53 | 54 | public WebSocketServer() { 55 | } 56 | 57 | /** 58 | * 连接建立成功调用的方法 59 | */ 60 | @OnOpen 61 | public void onOpen(Session session, @PathParam("sid") String sid) { 62 | this.session = session; 63 | webSocketSet.add(this); //加入set中 64 | addOnlineCount(); //在线数加1 65 | log.info("有新窗口开始监听:" + sid + ",当前在线人数为" + getOnlineCount()); 66 | this.sid = sid; 67 | 68 | String queryString = session.getQueryString(); 69 | log.info(queryString); 70 | String[] array = queryString.split("&"); 71 | for (String p : array) { 72 | String[] split = p.split("="); 73 | params.put(split[0], split[1]); 74 | } 75 | Integer sourceId = Optional.ofNullable(params.get("sourceId")).map(Integer::parseInt) 76 | .orElseThrow(() -> new RuntimeException("缺乏参数 sourceId 无法建立链接")); 77 | 78 | // int sourceId = Integer.parseInt(params.get("sourceId")); 79 | // System.out.println(kafkaSourceDao); 80 | SourceInfo sourceInfo = kafkaService.getSourceInfo(sourceId); 81 | consume(this.session, sourceInfo, params.get("topic"), params.get("group"), params.get("offset")); 82 | } 83 | 84 | public void consume(Session session, SourceInfo sourceInfo, String topic, String group, String offset) { 85 | 86 | new Thread(new Runnable() { 87 | @Override 88 | public void run() { 89 | KafkaConsumer consumer = KafkaUtil.getConsumer(sourceInfo, topic, group, offset); 90 | while (session.isOpen()) { 91 | ConsumerRecords records = consumer.poll(Duration.ofMillis(100)); 92 | for (ConsumerRecord record : records) { 93 | try { 94 | session.getBasicRemote().sendText(record.value()); 95 | } catch (IOException e) { 96 | e.printStackTrace(); 97 | } 98 | } 99 | } 100 | consumer.close(); 101 | log.info("kafka consumer closed"); 102 | 103 | } 104 | }).start(); 105 | 106 | } 107 | 108 | /** 109 | * 连接关闭调用的方法 110 | */ 111 | @OnClose 112 | public void onClose() { 113 | webSocketSet.remove(this); //从set中删除 114 | subOnlineCount(); //在线数减1 115 | log.info("有一连接关闭!当前在线人数为" + getOnlineCount()); 116 | } 117 | 118 | /** 119 | * 收到客户端消息后调用的方法 120 | * 121 | * @param message 客户端发送过来的消息 122 | */ 123 | @OnMessage 124 | public void onMessage(String message, Session session) { 125 | //log.info("收到来自窗口"+sid+"的信息:"+message); 126 | if ("heart".equals(message)) { 127 | try { 128 | sendMessage("heartOk"); 129 | } catch (IOException e) { 130 | e.printStackTrace(); 131 | } 132 | } 133 | } 134 | 135 | /** 136 | * @param session 137 | * @param error 138 | */ 139 | @OnError 140 | public void onError(Session session, Throwable error) { 141 | log.error("发生错误"); 142 | error.printStackTrace(); 143 | } 144 | 145 | /** 146 | * 实现服务器主动推送 147 | */ 148 | public void sendMessage(String message) throws IOException { 149 | this.session.getBasicRemote().sendText(message); 150 | } 151 | 152 | /** 153 | * 群发自定义消息 154 | */ 155 | public static void sendInfo(String message) throws IOException { 156 | 157 | for (WebSocketServer item : webSocketSet) { 158 | try { 159 | //这里可以设定只推送给这个sid的,为null则全部推送 160 | // if(sid==null) { 161 | 162 | item.sendMessage(message); 163 | log.info("推送消息到窗口" + item.sid + ",推送内容:" + message); 164 | // }else if(item.sid.equals(sid)){ 165 | // item.sendMessage(message); 166 | // } 167 | } catch (IOException e) { 168 | continue; 169 | } 170 | } 171 | } 172 | 173 | public static synchronized int getOnlineCount() { 174 | return onlineCount; 175 | } 176 | 177 | public static synchronized void addOnlineCount() { 178 | WebSocketServer.onlineCount++; 179 | } 180 | 181 | public static synchronized void subOnlineCount() { 182 | WebSocketServer.onlineCount--; 183 | } 184 | 185 | } -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/controller/KafkaController.java: -------------------------------------------------------------------------------- 1 | package com.jq.kafkaui.controller; 2 | 3 | import com.alibaba.fastjson.JSONObject; 4 | import com.jq.kafkaui.domain.KafkaSource; 5 | import com.jq.kafkaui.dto.ResponseDto; 6 | import com.jq.kafkaui.dto.SourceInfo; 7 | import com.jq.kafkaui.service.KafkaService; 8 | import com.jq.kafkaui.util.KafkaUtil; 9 | import org.apache.kafka.clients.producer.Producer; 10 | import org.apache.kafka.clients.producer.ProducerRecord; 11 | import org.apache.kafka.clients.producer.RecordMetadata; 12 | import org.springframework.beans.factory.annotation.Autowired; 13 | import org.springframework.web.bind.annotation.PathVariable; 14 | import org.springframework.web.bind.annotation.RequestMapping; 15 | import org.springframework.web.bind.annotation.RequestParam; 16 | import org.springframework.web.bind.annotation.RestController; 17 | 18 | import javax.servlet.http.HttpServletRequest; 19 | import java.util.List; 20 | import java.util.Optional; 21 | import java.util.concurrent.ExecutionException; 22 | import java.util.concurrent.Future; 23 | 24 | /** 25 | * @program: kafkaUI 26 | * @description: 27 | * @author: jiangqiang 28 | * @create: 2020-10-28 20:06 29 | **/ 30 | @RestController 31 | @RequestMapping("/kafka") 32 | public class KafkaController { 33 | 34 | @Autowired 35 | KafkaService kafkaService; 36 | 37 | @RequestMapping("/getTopics") 38 | public ResponseDto getTopics(Integer sourceId) { 39 | SourceInfo sourceInfo = kafkaService.getSourceInfo(sourceId); 40 | return KafkaUtil.listTopicsWithOptions(sourceInfo, null); 41 | } 42 | 43 | @RequestMapping("/getIp") 44 | public String getIpAndPort(HttpServletRequest request) { 45 | //获取浏览器访问地址中的ip和端口,防止容器运行时候产生问题 46 | return request.getServerName() + ":" + request.getServerPort(); 47 | } 48 | 49 | @RequestMapping("/getSource") 50 | public List getAllSource() { 51 | return kafkaService.getAllSource(); 52 | } 53 | 54 | @RequestMapping("/getAllSourceAuth") 55 | public List getSourceAuth() { 56 | return kafkaService.getAllSourceAuth(); 57 | } 58 | 59 | @RequestMapping("/deleteSource/{id}") 60 | public String deleteSource(@PathVariable Integer id) { 61 | kafkaService.deleteSource(id); 62 | return "success"; 63 | } 64 | 65 | @RequestMapping("/add") 66 | public String addSource(KafkaSource source) { 67 | kafkaService.add(source); 68 | return "success"; 69 | } 70 | 71 | @RequestMapping("/getBroker") 72 | public String getBroker(Integer sourceId) { 73 | return Optional.ofNullable(kafkaService.getSourceInfo(sourceId)) 74 | .map(SourceInfo::getBroker).orElse(null); 75 | } 76 | 77 | @RequestMapping("/createTopic") 78 | public String createTopic(Integer sourceId, String name, 79 | @RequestParam(defaultValue = "1") Integer partition, 80 | @RequestParam(defaultValue = "1") Integer replica) throws Exception { 81 | SourceInfo sourceInfo = kafkaService.getSourceInfo(sourceId); 82 | KafkaUtil.createTopic(sourceInfo, name, partition, replica); 83 | return "success"; 84 | 85 | } 86 | 87 | @RequestMapping("/deleteTopic") 88 | public boolean deleteTopic(Integer sourceId, String topic) { 89 | SourceInfo sourceInfo = kafkaService.getSourceInfo(sourceId); 90 | KafkaUtil.deleteTopic(sourceInfo, topic); 91 | return true; 92 | 93 | } 94 | 95 | @RequestMapping("/searchTopic") 96 | public ResponseDto searchTopic(Integer sourceId, String topic) { 97 | SourceInfo sourceInfo = kafkaService.getSourceInfo(sourceId); 98 | ResponseDto responseDto = KafkaUtil.listTopicsWithOptions(sourceInfo, topic); 99 | return responseDto; 100 | 101 | } 102 | 103 | @RequestMapping("/getTopicDetail") 104 | public JSONObject getTopicDetail(Integer sourceId, String topic) throws Exception { 105 | SourceInfo sourceInfo = kafkaService.getSourceInfo(sourceId); 106 | return KafkaUtil.getTopicDetail(sourceInfo, topic); 107 | } 108 | 109 | @RequestMapping("/produce") 110 | public String produce(Integer sourceId, String topic, String message, Boolean batch) throws ExecutionException, InterruptedException { 111 | SourceInfo sourceInfo = kafkaService.getSourceInfo(sourceId); 112 | Producer producer = KafkaUtil.getProducer(sourceInfo); 113 | if (batch) { 114 | String[] messages = message.split("\n"); 115 | for (String ms : messages) { 116 | Future send = producer.send(new ProducerRecord<>(topic, ms)); 117 | send.get(); 118 | } 119 | } else { 120 | Future send = producer.send(new ProducerRecord<>(topic, message)); 121 | send.get(); 122 | } 123 | producer.close(); 124 | return "success"; 125 | } 126 | 127 | @RequestMapping("/cluster/info") 128 | public ResponseDto getClusterInfo(Integer sourceId) { 129 | SourceInfo sourceInfo = kafkaService.getSourceInfo(sourceId); 130 | return KafkaUtil.clusterInfo(sourceInfo); 131 | } 132 | 133 | @RequestMapping("/group/all") 134 | public ResponseDto getAllGroups(Integer sourceId) { 135 | SourceInfo sourceInfo = kafkaService.getSourceInfo(sourceId); 136 | ResponseDto allGroups = KafkaUtil.getAllGroups(sourceInfo, null); 137 | return allGroups; 138 | } 139 | 140 | @RequestMapping("/group/search") 141 | public ResponseDto getAllGroups(Integer sourceId, String keyword) { 142 | SourceInfo sourceInfo = kafkaService.getSourceInfo(sourceId); 143 | ResponseDto allGroups = KafkaUtil.getAllGroups(sourceInfo, keyword); 144 | return allGroups; 145 | } 146 | 147 | @RequestMapping("/group/detail") 148 | public ResponseDto getGroupDetail(Integer sourceId, String group) { 149 | SourceInfo sourceInfo = kafkaService.getSourceInfo(sourceId); 150 | ResponseDto groupInfo = KafkaUtil.getGroupInfo(sourceInfo, group); 151 | return groupInfo; 152 | } 153 | 154 | @RequestMapping("/group/delete") 155 | public ResponseDto deleteGroup(Integer sourceId, String group) { 156 | SourceInfo sourceInfo = kafkaService.getSourceInfo(sourceId); 157 | return KafkaUtil.deleteGroup(sourceInfo, group); 158 | } 159 | 160 | @RequestMapping("/auth") 161 | public void auth(String param) throws Exception { 162 | kafkaService.auth(param); 163 | } 164 | 165 | 166 | @RequestMapping("/getGroupsByTopic") 167 | public ResponseDto getGroupByTopic(Integer sourceId, String topic) { 168 | SourceInfo sourceInfo = kafkaService.getSourceInfo(sourceId); 169 | return KafkaUtil.getGroupByTopic(sourceInfo, topic); 170 | } 171 | } 172 | -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/controller/RedisController.java: -------------------------------------------------------------------------------- 1 | package com.jq.kafkaui.controller; 2 | 3 | import com.alibaba.fastjson.JSONObject; 4 | import com.jq.kafkaui.domain.RedisSource; 5 | import com.jq.kafkaui.domain.Result; 6 | import com.jq.kafkaui.service.RedisService; 7 | import lombok.extern.slf4j.Slf4j; 8 | import org.springframework.beans.factory.annotation.Autowired; 9 | import org.springframework.web.bind.annotation.PathVariable; 10 | import org.springframework.web.bind.annotation.RequestMapping; 11 | import org.springframework.web.bind.annotation.RestController; 12 | 13 | import java.util.List; 14 | import java.util.Set; 15 | 16 | /** 17 | * @program: kafkaUI 18 | * @description: 19 | * @author: jiangqiang 20 | * @create: 2020-11-12 17:38 21 | **/ 22 | @Slf4j 23 | @RestController 24 | @RequestMapping("/redis") 25 | public class RedisController { 26 | 27 | @Autowired 28 | RedisService redisService; 29 | 30 | @RequestMapping("/getAllSource") 31 | public List getAllSource() { 32 | return redisService.getAllSource(); 33 | } 34 | 35 | @RequestMapping("/getAllSourceAuth") 36 | public List getAllSourceAuth() { 37 | return redisService.getAllSourceAuth(); 38 | } 39 | 40 | @RequestMapping("/deleteSource/{id}") 41 | public String deleteSource(@PathVariable Integer id) { 42 | redisService.deleteSource(id); 43 | return "success"; 44 | } 45 | 46 | @RequestMapping("/add") 47 | public String addSource(RedisSource source) { 48 | redisService.addSource(source); 49 | return "success"; 50 | } 51 | 52 | @RequestMapping("/getAllKeys") 53 | public Set getAllKeys(Integer sourceId, Integer db) { 54 | return redisService.getAllKeys(sourceId, db); 55 | } 56 | 57 | @RequestMapping("/getData") 58 | public JSONObject getData(Integer sourceId, Integer db, String key) { 59 | JSONObject data = redisService.getData(sourceId, db, key); 60 | return data; 61 | } 62 | 63 | @RequestMapping("/connect") 64 | public boolean connect(RedisSource source) { 65 | boolean connect = redisService.connect(source); 66 | return connect; 67 | } 68 | 69 | @RequestMapping("/addKey") 70 | public Result addKey(Integer sourceId, Integer db, String key, String type, String value) { 71 | try { 72 | 73 | Result result = redisService.addKey(sourceId, db, key, type, value); 74 | return result; 75 | } catch (Exception e) { 76 | log.error(e.getMessage(), e); 77 | return Result.fail(e.getMessage()); 78 | } 79 | } 80 | 81 | @RequestMapping("/deleteKey") 82 | public void deleteKey(Integer sourceId, Integer db, String key) { 83 | redisService.deleteKey(sourceId, db, key); 84 | } 85 | 86 | @RequestMapping("/auth") 87 | public void auth(String param) throws Exception { 88 | redisService.auth(param); 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/controller/ZookeeperController.java: -------------------------------------------------------------------------------- 1 | package com.jq.kafkaui.controller; 2 | 3 | import com.alibaba.fastjson.JSONObject; 4 | import com.jq.kafkaui.domain.ZKSource; 5 | import com.jq.kafkaui.service.ZKService; 6 | import org.springframework.beans.factory.annotation.Autowired; 7 | import org.springframework.web.bind.annotation.PathVariable; 8 | import org.springframework.web.bind.annotation.RequestMapping; 9 | import org.springframework.web.bind.annotation.RestController; 10 | 11 | import java.util.List; 12 | 13 | /** 14 | * @program: kafkaUI 15 | * @description: 16 | * @author: jiangqiang 17 | * @create: 2020-11-13 15:26 18 | **/ 19 | @RestController 20 | @RequestMapping("/zookeeper") 21 | public class ZookeeperController { 22 | 23 | @Autowired 24 | ZKService zkService; 25 | 26 | @RequestMapping("/getAllSource") 27 | public List getAllSource() { 28 | return zkService.getAllSource(); 29 | } 30 | 31 | @RequestMapping("/getAllSourceAuth") 32 | public List getAllSourceAuth() { 33 | return zkService.getAllSourceAuth(); 34 | } 35 | 36 | @RequestMapping("/deleteSource/{id}") 37 | public String deleteSource(@PathVariable Integer id) { 38 | zkService.deleteSource(id); 39 | return "success"; 40 | } 41 | 42 | @RequestMapping("/add") 43 | public String addSource(ZKSource source) { 44 | zkService.addSource(source); 45 | return "success"; 46 | } 47 | 48 | @RequestMapping("/getAllNodes") 49 | public List getAllNodes(Integer sourceId) { 50 | String address = zkService.getAddressById(sourceId); 51 | return zkService.getAllNodes(address); 52 | } 53 | 54 | @RequestMapping("/getRootNodes") 55 | public List getRootNodes(Integer sourceId) throws Exception { 56 | String address = zkService.getAddressById(sourceId); 57 | return zkService.getRootNodes(address); 58 | } 59 | 60 | @RequestMapping("/getNodes") 61 | public List getNodes(Integer sourceId, String path) throws Exception { 62 | String address = zkService.getAddressById(sourceId); 63 | return zkService.getNodes(address, path); 64 | } 65 | 66 | @RequestMapping("/getData") 67 | public String getData(Integer sourceId, String path) { 68 | 69 | String address = zkService.getAddressById(sourceId); 70 | return zkService.getData(address, path); 71 | } 72 | 73 | @RequestMapping("/setData") 74 | public boolean setData(Integer sourceId, String path, String data) throws Exception { 75 | String address = zkService.getAddressById(sourceId); 76 | zkService.setData(address, path, data); 77 | return true; 78 | } 79 | 80 | @RequestMapping("/createNode") 81 | public boolean createNode(Integer sourceId, String path, String data, Boolean recursion) throws Exception { 82 | String address = zkService.getAddressById(sourceId); 83 | zkService.createNode(address, path, data, recursion); 84 | return true; 85 | } 86 | 87 | @RequestMapping("/removeNode") 88 | public boolean createNode(Integer sourceId, String path) throws Exception { 89 | String address = zkService.getAddressById(sourceId); 90 | zkService.removeNode(address, path); 91 | return true; 92 | } 93 | 94 | @RequestMapping("/connect") 95 | public boolean connect(String address) { 96 | // String address = zkService.getAddressById(sourceId); 97 | boolean connect = zkService.connect(address); 98 | return connect; 99 | } 100 | 101 | @RequestMapping("/auth") 102 | public void auth(String param) throws Exception { 103 | zkService.auth(param); 104 | } 105 | 106 | } 107 | -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/dao/KafkaSourceDao.java: -------------------------------------------------------------------------------- 1 | package com.jq.kafkaui.dao; 2 | 3 | import com.jq.kafkaui.domain.Auth; 4 | import com.jq.kafkaui.domain.KafkaSource; 5 | import com.jq.kafkaui.dto.SourceInfo; 6 | import org.apache.ibatis.annotations.*; 7 | 8 | import java.util.List; 9 | 10 | @Mapper 11 | public interface KafkaSourceDao { 12 | 13 | @Select("SELECT * FROM source") 14 | @Results({@Result(property = "name", column = "name"), 15 | @Result(property = "source", column = "source"), 16 | @Result(property = "id", column = "id")}) 17 | List getAll(); 18 | 19 | @Insert("insert into source (name, broker, username, password) values (#{name}, #{broker}, #{username}, #{password})") 20 | @Options(useGeneratedKeys = true, keyProperty = "id", keyColumn = "id") 21 | void insert(KafkaSource source); 22 | 23 | @Delete("delete from source where id = #{id}") 24 | void delete(Integer id); 25 | 26 | @Delete("delete from kafka_auth where source_id = #{sourceId}") 27 | int deleteAuth(Integer sourceId); 28 | 29 | @Insert("insert into kafka_auth (source_id,add_auth,update_auth,remove_auth) values(#{sourceId},#{add},#{update}, #{remove})") 30 | void insertAuth(Integer sourceId, Integer add, Integer update, Integer remove); 31 | 32 | @Select("select add_auth,update_auth,remove_auth from kafka_auth where source_id = #{sourceId}") 33 | Auth getAuthBySource(Integer sourceId); 34 | 35 | @Update({"update kafka_auth set add_auth=#{add}, update_auth=#{update}, remove_auth=#{remove} where source_id = #{id}"}) 36 | int updateAuth(int id, int add, int update, int remove); 37 | 38 | @Select("SELECT broker,username,password FROM source where id=#{sourceId}") 39 | SourceInfo selectById(Integer sourceId); 40 | } 41 | 42 | -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/dao/RedisSourceDao.java: -------------------------------------------------------------------------------- 1 | package com.jq.kafkaui.dao; 2 | 3 | import com.jq.kafkaui.domain.Auth; 4 | import com.jq.kafkaui.domain.RedisSource; 5 | import org.apache.ibatis.annotations.*; 6 | 7 | import java.util.List; 8 | 9 | /** 10 | * @program: kafkaUI 11 | * @description: 12 | * @author: jiangqiang 13 | * @create: 2020-11-12 17:42 14 | **/ 15 | @Mapper 16 | public interface RedisSourceDao { 17 | 18 | @Select("SELECT * FROM redis_source") 19 | @Results({@Result(property = "name", column = "name"), 20 | @Result(property = "ip", column = "ip"), 21 | @Result(property = "port", column = "port"), 22 | @Result(property = "password", column = "password"), 23 | @Result(property = "id", column = "id")}) 24 | List getAll(); 25 | 26 | @Select("SELECT * FROM redis_source where id = #{id}") 27 | RedisSource selectById(Integer id); 28 | 29 | @Insert({"insert into redis_source (name, ip,port,password) values (#{name}, #{ip}, #{port}, #{password})"}) 30 | @Options(useGeneratedKeys = true, keyProperty = "id", keyColumn = "id") 31 | void insert(RedisSource source); 32 | 33 | @Delete("delete from redis_source where id = #{id}") 34 | void delete(Integer id); 35 | 36 | 37 | @Delete("delete from redis_auth where source_id = #{sourceId}") 38 | int deleteAuth(Integer sourceId); 39 | 40 | @Insert("insert into redis_auth (source_id,add_auth,update_auth,remove_auth) values(#{sourceId},#{add},#{update}, #{remove})") 41 | void insertAuth(Integer sourceId, Integer add, Integer update, Integer remove); 42 | 43 | @Select("select add_auth,update_auth,remove_auth from redis_auth where source_id = #{sourceId}") 44 | Auth getAuthBySource(Integer sourceId); 45 | 46 | @Update({"update redis_auth set add_auth=#{add}, update_auth=#{update}, remove_auth=#{remove} where source_id = #{id}"}) 47 | int updateAuth(int id, int add, int update, int remove); 48 | 49 | } 50 | -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/dao/ZKSourceDao.java: -------------------------------------------------------------------------------- 1 | package com.jq.kafkaui.dao; 2 | 3 | import com.jq.kafkaui.domain.Auth; 4 | import com.jq.kafkaui.domain.ZKSource; 5 | import org.apache.ibatis.annotations.*; 6 | 7 | import java.util.List; 8 | 9 | /** 10 | * @program: kafkaUI 11 | * @description: 12 | * @author: jiangqiang 13 | * @create: 2020-11-13 15:28 14 | **/ 15 | @Mapper 16 | public interface ZKSourceDao { 17 | 18 | @Results({@Result(property = "name", column = "name"), 19 | @Result(property = "address", column = "address"), 20 | @Result(property = "id", column = "id")}) 21 | 22 | @Select("SELECT * FROM zookeeper_source") 23 | List getAll(); 24 | 25 | @Insert({"insert into zookeeper_source (name, address) values (#{name}, #{address})"}) 26 | @Options(useGeneratedKeys = true, keyProperty = "id", keyColumn = "id") 27 | void insert(ZKSource source); 28 | 29 | @Delete("delete from zookeeper_source where id = #{id}") 30 | void delete(Integer id); 31 | 32 | @Select("SELECT address FROM zookeeper_source where id = #{id}") 33 | String getAddress(Integer id); 34 | 35 | @Delete("delete from zookeeper_auth where source_id = #{sourceId}") 36 | int deleteAuth(Integer sourceId); 37 | 38 | @Insert("insert into zookeeper_auth (source_id,add_auth,update_auth,remove_auth) values(#{sourceId},#{add},#{update}, #{remove})") 39 | void insertAuth(Integer sourceId, Integer add, Integer update, Integer remove); 40 | 41 | @Select("select add_auth,update_auth,remove_auth from zookeeper_auth where source_id = #{sourceId}") 42 | Auth getAuthBySource(Integer sourceId); 43 | 44 | @Update({"update zookeeper_auth set add_auth=#{add}, update_auth=#{update}, remove_auth=#{remove} where source_id = #{id}"}) 45 | int updateAuth(int id, int add, int update, int remove); 46 | 47 | // @Select("SELECT broker FROM source where id=#{sourceId}") 48 | // String selectById(Integer sourceId); 49 | } 50 | -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/domain/Auth.java: -------------------------------------------------------------------------------- 1 | package com.jq.kafkaui.domain; 2 | 3 | import lombok.Data; 4 | 5 | /** 6 | * @program: kafkaUI 7 | * @description: 8 | * @author: jiangqiang 9 | * @create: 2021-01-15 17:05 10 | **/ 11 | @Data 12 | public class Auth { 13 | Integer add_auth; 14 | Integer update_auth; 15 | Integer remove_auth; 16 | } 17 | -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/domain/KafkaSource.java: -------------------------------------------------------------------------------- 1 | package com.jq.kafkaui.domain; 2 | 3 | import com.alibaba.fastjson.JSONObject; 4 | import lombok.Data; 5 | 6 | @Data 7 | public class KafkaSource { 8 | Integer id; 9 | String name; 10 | String broker; 11 | String password; 12 | String username; 13 | 14 | JSONObject auth; 15 | } 16 | -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/domain/RedisSource.java: -------------------------------------------------------------------------------- 1 | package com.jq.kafkaui.domain; 2 | 3 | import com.alibaba.fastjson.JSONObject; 4 | import lombok.Data; 5 | 6 | /** 7 | * @program: kafkaUI 8 | * @description: 9 | * @author: jiangqiang 10 | * @create: 2020-11-12 17:40 11 | **/ 12 | @Data 13 | public class RedisSource { 14 | 15 | Integer id; 16 | String name; 17 | String ip; 18 | Integer port; 19 | Integer db; 20 | String password; 21 | 22 | JSONObject auth; 23 | } 24 | -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/domain/Result.java: -------------------------------------------------------------------------------- 1 | package com.jq.kafkaui.domain; 2 | 3 | import lombok.Data; 4 | 5 | /** 6 | * @program: kafkaUI 7 | * @description: 8 | * @author: jiangqiang 9 | * @create: 2021-01-12 14:15 10 | **/ 11 | @Data 12 | public class Result { 13 | boolean success; 14 | Object data; 15 | String message; 16 | 17 | public static Result fail(String message) { 18 | Result result = new Result(); 19 | result.setSuccess(false); 20 | result.setMessage(message); 21 | return result; 22 | } 23 | 24 | public static Result success(String message) { 25 | Result result = new Result(); 26 | result.setSuccess(true); 27 | result.setMessage(message); 28 | return result; 29 | } 30 | 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/domain/Topic.java: -------------------------------------------------------------------------------- 1 | package com.jq.kafkaui.domain; 2 | 3 | import lombok.Data; 4 | 5 | /** 6 | * @program: kafkaUI 7 | * @description: 8 | * @author: jiangqiang 9 | * @create: 2020-10-30 10:02 10 | **/ 11 | @Data 12 | public class Topic { 13 | 14 | String name; 15 | boolean isInternal; 16 | } 17 | -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/domain/ZKSource.java: -------------------------------------------------------------------------------- 1 | package com.jq.kafkaui.domain; 2 | 3 | import com.alibaba.fastjson.JSONObject; 4 | import lombok.Data; 5 | 6 | /** 7 | * @program: kafkaUI 8 | * @description: 9 | * @author: jiangqiang 10 | * @create: 2020-11-13 15:27 11 | **/ 12 | @Data 13 | public class ZKSource { 14 | Integer id; 15 | String name; 16 | String address; 17 | 18 | JSONObject auth; 19 | } 20 | -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/dto/ResponseDto.java: -------------------------------------------------------------------------------- 1 | package com.jq.kafkaui.dto; 2 | 3 | import lombok.Data; 4 | 5 | @Data 6 | public class ResponseDto { 7 | boolean success; 8 | String message; 9 | Object data; 10 | 11 | public static ResponseDto fail(String message) { 12 | ResponseDto responseDto = new ResponseDto(); 13 | responseDto.setSuccess(false); 14 | responseDto.setMessage(message); 15 | return responseDto; 16 | } 17 | 18 | public static ResponseDto success(String message, Object data) { 19 | ResponseDto responseDto = new ResponseDto(); 20 | responseDto.setSuccess(true); 21 | responseDto.setData(data); 22 | responseDto.setMessage(message); 23 | return responseDto; 24 | } 25 | 26 | public static ResponseDto success(Object data) { 27 | ResponseDto responseDto = new ResponseDto(); 28 | responseDto.setSuccess(true); 29 | responseDto.setData(data); 30 | return responseDto; 31 | } 32 | 33 | public static ResponseDto success() { 34 | ResponseDto responseDto = new ResponseDto(); 35 | responseDto.setSuccess(true); 36 | return responseDto; 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/dto/SourceInfo.java: -------------------------------------------------------------------------------- 1 | package com.jq.kafkaui.dto; 2 | 3 | import lombok.Data; 4 | 5 | @Data 6 | public class SourceInfo { 7 | 8 | String broker; 9 | String userName; 10 | String password; 11 | 12 | } 13 | -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/service/KafkaService.java: -------------------------------------------------------------------------------- 1 | package com.jq.kafkaui.service; 2 | 3 | import com.alibaba.fastjson.JSON; 4 | import com.alibaba.fastjson.JSONObject; 5 | import com.jq.kafkaui.dao.KafkaSourceDao; 6 | import com.jq.kafkaui.domain.Auth; 7 | import com.jq.kafkaui.domain.KafkaSource; 8 | import com.jq.kafkaui.dto.SourceInfo; 9 | import com.jq.kafkaui.util.IPUtil; 10 | import lombok.extern.slf4j.Slf4j; 11 | import org.springframework.beans.factory.annotation.Autowired; 12 | import org.springframework.beans.factory.annotation.Value; 13 | import org.springframework.stereotype.Service; 14 | import org.springframework.transaction.annotation.Transactional; 15 | 16 | import javax.servlet.http.HttpServletRequest; 17 | import java.util.List; 18 | import java.util.Set; 19 | 20 | @Service 21 | @Slf4j 22 | public class KafkaService { 23 | @Value("${server.port}") 24 | String port; 25 | 26 | @Autowired 27 | KafkaSourceDao kafkaSourceDao; 28 | 29 | public List getAllSource() { 30 | return kafkaSourceDao.getAll(); 31 | } 32 | 33 | public List getAllSourceAuth() { 34 | List list = kafkaSourceDao.getAll(); 35 | list.stream().forEach(t -> { 36 | Auth auth = kafkaSourceDao.getAuthBySource(t.getId()); 37 | JSONObject authO = new JSONObject(); 38 | authO.put("add", auth.getAdd_auth().intValue() == 1 ? true : false); 39 | authO.put("update", auth.getUpdate_auth().intValue() == 1 ? true : false); 40 | authO.put("remove", auth.getRemove_auth().intValue() == 1 ? true : false); 41 | t.setAuth(authO); 42 | }); 43 | return list; 44 | 45 | } 46 | 47 | // public String getIpAndPort(HttpServletRequest request) { 48 | // // 通过命令行读取host参数 java -Dhost=192.168.33.201 -jar kafkaUI.jar 49 | // String ip = System.getProperty("host"); 50 | // if (ip == null) 51 | //// ip = IPUtil.getIpAddress(); 52 | // ip = request.getServerName(); 53 | // return ip + ":" + port; 54 | // 55 | // } 56 | @Transactional 57 | public void add(KafkaSource source) { 58 | kafkaSourceDao.insert(source); 59 | 60 | kafkaSourceDao.insertAuth(source.getId(), 0, 0, 0); 61 | } 62 | 63 | @Transactional 64 | public void deleteSource(Integer id) { 65 | kafkaSourceDao.delete(id); 66 | kafkaSourceDao.deleteAuth(id); 67 | } 68 | 69 | @Transactional 70 | public void auth(String param) { 71 | JSONObject jo = JSON.parseObject(param); 72 | Set keys = jo.keySet(); 73 | keys.stream().forEach(key -> { 74 | JSONObject auth = jo.getJSONObject(key); 75 | int add = auth.getBoolean("add") ? 1 : 0; 76 | int update = auth.getBoolean("update") ? 1 : 0; 77 | int remove = auth.getBoolean("remove") ? 1 : 0; 78 | int i = kafkaSourceDao.updateAuth(Integer.parseInt(key), add, update, remove); 79 | }); 80 | 81 | } 82 | 83 | public SourceInfo getSourceInfo(Integer sourceId) { 84 | return kafkaSourceDao.selectById(sourceId); 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/service/RedisService.java: -------------------------------------------------------------------------------- 1 | package com.jq.kafkaui.service; 2 | 3 | import com.alibaba.fastjson.JSON; 4 | import com.alibaba.fastjson.JSONObject; 5 | import com.jq.kafkaui.dao.RedisSourceDao; 6 | import com.jq.kafkaui.domain.Auth; 7 | import com.jq.kafkaui.domain.RedisSource; 8 | import com.jq.kafkaui.domain.Result; 9 | import com.jq.kafkaui.domain.ZKSource; 10 | import com.jq.kafkaui.util.RedisUtil; 11 | import lombok.extern.slf4j.Slf4j; 12 | import org.springframework.beans.factory.annotation.Autowired; 13 | import org.springframework.stereotype.Service; 14 | import org.springframework.transaction.annotation.Transactional; 15 | import redis.clients.jedis.Jedis; 16 | 17 | import java.util.HashMap; 18 | import java.util.List; 19 | import java.util.Map; 20 | import java.util.Set; 21 | import java.util.stream.Collectors; 22 | 23 | /** 24 | * @program: kafkaUI 25 | * @description: 26 | * @author: jiangqiang 27 | * @create: 2020-11-12 17:39 28 | **/ 29 | @Service 30 | @Slf4j 31 | public class RedisService { 32 | 33 | @Autowired 34 | RedisSourceDao sourceDao; 35 | 36 | public void addSource(RedisSource source) { 37 | sourceDao.insert(source); 38 | sourceDao.insertAuth(source.getId(), 0, 0, 0); 39 | } 40 | 41 | public void deleteSource(Integer id) { 42 | sourceDao.delete(id); 43 | sourceDao.deleteAuth(id); 44 | } 45 | 46 | public List getAllSource() { 47 | return sourceDao.getAll(); 48 | } 49 | 50 | public Set getAllKeys(Integer sourceId, int db) { 51 | RedisSource redisSource = sourceDao.selectById(sourceId); 52 | 53 | RedisUtil redisPool = new RedisUtil(); 54 | Jedis client = redisPool.getClient(redisSource.getIp(), redisSource.getPort(), redisSource.getPassword(), db); 55 | Set allKeys = redisPool.getAllKeys(client); 56 | client.close(); 57 | return allKeys; 58 | } 59 | 60 | public JSONObject getData(Integer sourceId, Integer db, String key) { 61 | JSONObject jo = new JSONObject(); 62 | 63 | RedisSource redisSource = sourceDao.selectById(sourceId); 64 | 65 | RedisUtil redisUtil = new RedisUtil(); 66 | Jedis jedis = redisUtil.getClient(redisSource.getIp(), redisSource.getPort(), redisSource.getPassword(), db); 67 | String type = jedis.type(key); 68 | jo.put("type", type); 69 | 70 | if (type.equalsIgnoreCase("string")) { 71 | String data = jedis.get(key); 72 | jo.put("value", data); 73 | 74 | } else if (type.equalsIgnoreCase("hash")) { 75 | Map data = jedis.hgetAll(key); 76 | List collect = data.keySet().stream().map(t -> { 77 | JSONObject object = new JSONObject(); 78 | object.put("key", t); 79 | object.put("value", data.get(t)); 80 | return object; 81 | }).collect(Collectors.toList()); 82 | jo.put("value", collect); 83 | 84 | } else if (type.equalsIgnoreCase("list")) { 85 | List data = redisUtil.getList(jedis, key); 86 | List list = data.stream().map(t -> { 87 | JSONObject oo = new JSONObject(); 88 | oo.put("value", t); 89 | return oo; 90 | }).collect(Collectors.toList()); 91 | jo.put("value", list); 92 | 93 | } else if (type.equalsIgnoreCase("set")) { 94 | Set data = redisUtil.getSet(jedis, key); 95 | List list = data.stream().map(t -> { 96 | JSONObject oo = new JSONObject(); 97 | oo.put("value", t); 98 | return oo; 99 | }).collect(Collectors.toList()); 100 | jo.put("value", list); 101 | 102 | } 103 | redisUtil.closeConnction(jedis); 104 | return jo; 105 | 106 | } 107 | 108 | public boolean connect(RedisSource redisSource) { 109 | RedisUtil redisUtil = new RedisUtil(); 110 | try { 111 | 112 | Jedis jedis = redisUtil.getClient(redisSource.getIp(), redisSource.getPort(), redisSource.getPassword(), 0); 113 | jedis.close(); 114 | return true; 115 | } catch (Exception e) { 116 | log.error(e.getMessage(), e); 117 | return false; 118 | } 119 | 120 | } 121 | 122 | public Result addKey(Integer sourceId, Integer db, String key, String type, String value) { 123 | RedisSource redisSource = sourceDao.selectById(sourceId); 124 | RedisUtil redisUtil = new RedisUtil(); 125 | 126 | Jedis jedis = redisUtil.getClient(redisSource.getIp(), redisSource.getPort(), redisSource.getPassword(), db); 127 | if (jedis.exists(key)) { 128 | return Result.fail("key已存在,不可添加,添加可能覆盖数据"); 129 | } 130 | if ("string".equals(type)) { 131 | jedis.set(key, value); 132 | } else if ("set".equals(type)) { 133 | List list = JSON.parseArray(value, JSONObject.class); 134 | List data = list.stream().map(t -> t.getString("value")).collect(Collectors.toList()); 135 | redisUtil.setSet(jedis, key, data); 136 | } else if ("list".equals(type)) { 137 | List list = JSON.parseArray(value, JSONObject.class); 138 | List data = list.stream().map(t -> t.getString("value")).collect(Collectors.toList()); 139 | redisUtil.listSet(jedis, key, data); 140 | } else if ("hash".equals(type)) { 141 | Map map = new HashMap<>(); 142 | List list = JSON.parseArray(value, JSONObject.class); 143 | list.stream().forEach(t -> { 144 | map.put(t.getString("key"), t.getString("value")); 145 | }); 146 | redisUtil.hashSet(jedis, key, map); 147 | } 148 | 149 | redisUtil.closeConnction(jedis); 150 | return Result.success("添加redis key 成功"); 151 | } 152 | 153 | public void deleteKey(Integer sourceId, Integer db, String key) { 154 | RedisSource redisSource = sourceDao.selectById(sourceId); 155 | RedisUtil redisUtil = new RedisUtil(); 156 | Jedis jedis = redisUtil.getClient(redisSource.getIp(), redisSource.getPort(), redisSource.getPassword(), db); 157 | jedis.del(key); 158 | redisUtil.closeConnction(jedis); 159 | } 160 | 161 | public List getAllSourceAuth() { 162 | List all = sourceDao.getAll(); 163 | all.stream().forEach(t -> { 164 | Auth auth = sourceDao.getAuthBySource(t.getId()); 165 | JSONObject authO = new JSONObject(); 166 | authO.put("add", auth.getAdd_auth().intValue() == 1 ? true : false); 167 | authO.put("update", auth.getUpdate_auth().intValue() == 1 ? true : false); 168 | authO.put("remove", auth.getRemove_auth().intValue() == 1 ? true : false); 169 | t.setAuth(authO); 170 | }); 171 | return all; 172 | } 173 | 174 | @Transactional 175 | public void auth(String param) { 176 | JSONObject jo = JSON.parseObject(param); 177 | Set keys = jo.keySet(); 178 | keys.stream().forEach(key -> { 179 | JSONObject auth = jo.getJSONObject(key); 180 | int add = auth.getBoolean("add") ? 1 : 0; 181 | int update = auth.getBoolean("update") ? 1 : 0; 182 | int remove = auth.getBoolean("remove") ? 1 : 0; 183 | int i = sourceDao.updateAuth(Integer.parseInt(key), add, update, remove); 184 | }); 185 | 186 | } 187 | } 188 | -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/service/ZKService.java: -------------------------------------------------------------------------------- 1 | package com.jq.kafkaui.service; 2 | 3 | import com.alibaba.fastjson.JSON; 4 | import com.alibaba.fastjson.JSONObject; 5 | import com.jq.kafkaui.dao.ZKSourceDao; 6 | import com.jq.kafkaui.domain.Auth; 7 | import com.jq.kafkaui.domain.ZKSource; 8 | import com.jq.kafkaui.util.ZKProcessor; 9 | import lombok.extern.slf4j.Slf4j; 10 | import org.apache.curator.framework.CuratorFramework; 11 | import org.springframework.beans.factory.annotation.Autowired; 12 | import org.springframework.stereotype.Service; 13 | import org.springframework.transaction.annotation.Transactional; 14 | 15 | import java.util.List; 16 | import java.util.Set; 17 | import java.util.concurrent.TimeUnit; 18 | 19 | /** 20 | * @program: kafkaUI 21 | * @description: 22 | * @author: jiangqiang 23 | * @create: 2020-11-13 15:30 24 | **/ 25 | @Service 26 | @Slf4j 27 | public class ZKService { 28 | 29 | @Autowired 30 | ZKSourceDao sourceDao; 31 | 32 | @Transactional 33 | public void addSource(ZKSource source) { 34 | sourceDao.insert(source); 35 | sourceDao.insertAuth(source.getId(), 0, 0, 0); 36 | } 37 | 38 | @Transactional 39 | public void deleteSource(Integer id) { 40 | sourceDao.delete(id); 41 | sourceDao.deleteAuth(id); 42 | } 43 | 44 | public List getAllSource() { 45 | return sourceDao.getAll(); 46 | } 47 | 48 | public List getAllNodes(String address) { 49 | ZKProcessor zkProcessor = new ZKProcessor(address); 50 | return zkProcessor.getAllNode(); 51 | } 52 | 53 | public String getData(String address, String path) { 54 | ZKProcessor zkProcessor = new ZKProcessor(address); 55 | return zkProcessor.getValue(path); 56 | } 57 | 58 | public List getRootNodes(String address) throws Exception { 59 | ZKProcessor zkProcessor = new ZKProcessor(address); 60 | CuratorFramework client = zkProcessor.getClient(); 61 | List allSon = zkProcessor.getAllSon(client, "/"); 62 | client.close(); 63 | return allSon; 64 | } 65 | 66 | public List getNodes(String address, String path) throws Exception { 67 | ZKProcessor zkProcessor = new ZKProcessor(address); 68 | CuratorFramework client = zkProcessor.getClient(); 69 | List allSon = zkProcessor.getAllSon(client, path); 70 | client.close(); 71 | return allSon; 72 | } 73 | 74 | public boolean connect(String address) { 75 | CuratorFramework client = null; 76 | try { 77 | ZKProcessor zkProcessor = new ZKProcessor(address); 78 | client = zkProcessor.getClient(); 79 | // client.blockUntilConnected(5, TimeUnit.SECONDS); 80 | List allSon = zkProcessor.getAllSon(client, "/"); 81 | return true; 82 | } catch (Exception e) { 83 | log.error(e.getMessage(), e); 84 | return false; 85 | } finally { 86 | client.close(); 87 | } 88 | } 89 | 90 | public void setData(String address, String path, String data) throws Exception { 91 | ZKProcessor zkProcessor = new ZKProcessor(address); 92 | zkProcessor.setValue(path, data); 93 | } 94 | 95 | public void createNode(String address, String path, String data, boolean recursion) throws Exception { 96 | ZKProcessor zkProcessor = new ZKProcessor(address); 97 | zkProcessor.createNode(path, data, recursion); 98 | } 99 | 100 | public void removeNode(String address, String path) throws Exception { 101 | ZKProcessor zkProcessor = new ZKProcessor(address); 102 | zkProcessor.removeNode(path); 103 | } 104 | 105 | public List getAllSourceAuth() { 106 | List all = sourceDao.getAll(); 107 | all.stream().forEach(t -> { 108 | Auth auth = sourceDao.getAuthBySource(t.getId()); 109 | JSONObject authO = new JSONObject(); 110 | authO.put("add", auth.getAdd_auth().intValue() == 1 ? true : false); 111 | authO.put("update", auth.getUpdate_auth().intValue() == 1 ? true : false); 112 | authO.put("remove", auth.getRemove_auth().intValue() == 1 ? true : false); 113 | t.setAuth(authO); 114 | }); 115 | return all; 116 | } 117 | 118 | @Transactional 119 | public void auth(String param) { 120 | JSONObject jo = JSON.parseObject(param); 121 | Set keys = jo.keySet(); 122 | keys.stream().forEach(key -> { 123 | JSONObject auth = jo.getJSONObject(key); 124 | int add = auth.getBoolean("add") ? 1 : 0; 125 | int update = auth.getBoolean("update") ? 1 : 0; 126 | int remove = auth.getBoolean("remove") ? 1 : 0; 127 | int i = sourceDao.updateAuth(Integer.parseInt(key), add, update, remove); 128 | }); 129 | 130 | } 131 | 132 | public String getAddressById(Integer sourceId) { 133 | return sourceDao.getAddress(sourceId); 134 | } 135 | } 136 | -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/util/IPUtil.java: -------------------------------------------------------------------------------- 1 | package com.jq.kafkaui.util; 2 | 3 | import java.net.Inet4Address; 4 | import java.net.InetAddress; 5 | import java.net.NetworkInterface; 6 | import java.util.Enumeration; 7 | 8 | public class IPUtil { 9 | 10 | public static void main(String[] args) { 11 | System.out.println("本机IP:" + getIpAddress()); 12 | } 13 | 14 | public static String getIpAddress() { 15 | try { 16 | Enumeration allNetInterfaces = NetworkInterface.getNetworkInterfaces(); 17 | InetAddress ip = null; 18 | while (allNetInterfaces.hasMoreElements()) { 19 | NetworkInterface netInterface = (NetworkInterface) allNetInterfaces.nextElement(); 20 | if (netInterface.isLoopback() || netInterface.isVirtual() || !netInterface.isUp()) { 21 | continue; 22 | } else { 23 | Enumeration addresses = netInterface.getInetAddresses(); 24 | while (addresses.hasMoreElements()) { 25 | ip = addresses.nextElement(); 26 | if (ip != null && ip instanceof Inet4Address) { 27 | return ip.getHostAddress(); 28 | } 29 | } 30 | } 31 | } 32 | } catch (Exception e) { 33 | System.err.println("IP地址获取失败" + e.toString()); 34 | } 35 | return ""; 36 | } 37 | 38 | } -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/util/KafkaUtil.java: -------------------------------------------------------------------------------- 1 | package com.jq.kafkaui.util; 2 | 3 | import com.alibaba.fastjson.JSONObject; 4 | import com.jq.kafkaui.domain.Topic; 5 | import com.jq.kafkaui.dto.ResponseDto; 6 | import com.jq.kafkaui.dto.SourceInfo; 7 | import lombok.extern.slf4j.Slf4j; 8 | import org.apache.kafka.clients.admin.*; 9 | import org.apache.kafka.clients.consumer.KafkaConsumer; 10 | import org.apache.kafka.clients.consumer.OffsetAndMetadata; 11 | import org.apache.kafka.clients.producer.KafkaProducer; 12 | import org.apache.kafka.clients.producer.Producer; 13 | import org.apache.kafka.common.Node; 14 | import org.apache.kafka.common.PartitionInfo; 15 | import org.apache.kafka.common.TopicPartition; 16 | import org.apache.kafka.common.protocol.types.Field; 17 | import org.springframework.util.StringUtils; 18 | 19 | import java.util.*; 20 | import java.util.concurrent.ExecutionException; 21 | import java.util.stream.Collectors; 22 | 23 | /** 24 | * @program: kafkaUI 25 | * @description: 26 | * @author: jiangqiang 27 | * @create: 2020-10-28 20:05 28 | **/ 29 | @Slf4j 30 | public class KafkaUtil { 31 | 32 | public static AdminClient createAdminClientByProperties(SourceInfo sourceInfo) { 33 | 34 | Properties prop = getCommonProperties(sourceInfo); 35 | 36 | prop.setProperty(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, sourceInfo.getBroker()); 37 | prop.setProperty(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, "2000"); 38 | prop.setProperty(AdminClientConfig.DEFAULT_API_TIMEOUT_MS_CONFIG, "2000"); 39 | return AdminClient.create(prop); 40 | } 41 | 42 | private static Properties getCommonProperties(SourceInfo sourceInfo) { 43 | Properties prop = new Properties(); 44 | String userName = sourceInfo.getUserName(); 45 | String password = sourceInfo.getPassword(); 46 | if (!StringUtils.isEmpty(userName) && !StringUtils.isEmpty(password)) { 47 | prop.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username=" 48 | + userName + " password=" + password + ";"); 49 | prop.put("security.protocol", "SASL_PLAINTEXT"); 50 | prop.put("sasl.mechanism", "PLAIN"); 51 | } 52 | return prop; 53 | } 54 | 55 | 56 | public static ResponseDto listTopicsWithOptions(SourceInfo sourceInfo, String keyword) { 57 | AdminClient adminClient = null; 58 | try { 59 | // 创建AdminClient客户端对象 60 | adminClient = createAdminClientByProperties(sourceInfo); 61 | 62 | ListTopicsOptions options = new ListTopicsOptions(); 63 | // 列出内部的Topic 64 | options.listInternal(true); 65 | 66 | // 列出所有的topic 67 | ListTopicsResult result = adminClient.listTopics(options); 68 | Collection topicListings = result.listings().get(); 69 | 70 | List collect = topicListings.stream().map(t -> { 71 | Topic topic = new Topic(); 72 | topic.setName(t.name()); 73 | topic.setInternal(t.isInternal()); 74 | return topic; 75 | }).sorted(Comparator.comparing(t -> t.getName())).collect(Collectors.toList()); 76 | 77 | if (keyword != null) { 78 | collect = collect.stream().filter(t -> t.getName().contains(keyword)).collect(Collectors.toList()); 79 | } 80 | 81 | ResponseDto success = ResponseDto.success(collect); 82 | return success; 83 | } catch (Exception e) { 84 | log.error(e.getMessage(), e); 85 | return ResponseDto.fail(e.getMessage()); 86 | } finally { 87 | 88 | adminClient.close(); 89 | } 90 | 91 | } 92 | 93 | public static void createTopic(SourceInfo sourceInfo, String topic, Integer partition, Integer replica) throws Exception { 94 | AdminClient adminClient = null; 95 | try { 96 | adminClient = createAdminClientByProperties(sourceInfo); 97 | List topicList = new ArrayList(); 98 | NewTopic newTopic = new NewTopic(topic, partition, replica.shortValue()); 99 | topicList.add(newTopic); 100 | CreateTopicsResult result = adminClient.createTopics(topicList); 101 | result.all().get(); 102 | result.values().forEach((name, future) -> System.out.println("topicName:" + name)); 103 | } catch (Exception e) { 104 | 105 | } finally { 106 | 107 | adminClient.close(); 108 | } 109 | 110 | } 111 | 112 | public static Producer getProducer(SourceInfo sourceInfo) { 113 | 114 | Properties props = getCommonProperties(sourceInfo); 115 | props.put("bootstrap.servers", sourceInfo.getBroker()); 116 | props.put("acks", "all"); 117 | props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 118 | props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 119 | 120 | Producer producer = new KafkaProducer<>(props); 121 | 122 | return producer; 123 | 124 | } 125 | 126 | public static KafkaConsumer getConsumer(SourceInfo sourceInfo, String topic, String group, String offset) { 127 | Properties props = getCommonProperties(sourceInfo); 128 | props.setProperty("bootstrap.servers", sourceInfo.getBroker()); 129 | props.setProperty("group.id", group); 130 | props.setProperty("enable.auto.commit", "true"); 131 | props.setProperty("auto.commit.interval.ms", "1000"); 132 | props.setProperty("auto.offset.reset", offset); 133 | props.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 134 | props.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 135 | KafkaConsumer consumer = new KafkaConsumer<>(props); 136 | 137 | consumer.subscribe(Collections.singleton(topic)); 138 | return consumer; 139 | 140 | } 141 | 142 | public static KafkaConsumer getConsumer(SourceInfo sourceInfo, Collection topics, String group, String offset) { 143 | Properties props = getCommonProperties(sourceInfo); 144 | props.setProperty("bootstrap.servers", sourceInfo.getBroker()); 145 | props.setProperty("group.id", group); 146 | props.setProperty("enable.auto.commit", "true"); 147 | props.setProperty("auto.commit.interval.ms", "1000"); 148 | props.setProperty("auto.offset.reset", offset); 149 | props.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 150 | props.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 151 | KafkaConsumer consumer = new KafkaConsumer<>(props); 152 | 153 | consumer.subscribe(topics); 154 | return consumer; 155 | 156 | } 157 | 158 | public static void main(String[] args) throws Exception { 159 | 160 | /*KafkaConsumer consumer = getConsumer("47.92.117.90:9092", "aaa", "test", "earliest"); 161 | List partitionInfoSet = consumer.partitionsFor("aaa"); 162 | 163 | List collect = partitionInfoSet.stream().map(partitionInfo -> new TopicPartition(partitionInfo.topic(), partitionInfo.partition())) 164 | .collect(Collectors.toList()); 165 | // consumer.assign(collect); 166 | Map topicPartitionLongMap = consumer.endOffsets(collect); 167 | Map topicPartitionLongMap1 = consumer.beginningOffsets(collect); 168 | 169 | // consumer.poll(Duration.ofMillis(0)); 170 | // consumer.assign(collect); 171 | // List positions = collect.stream().map(t -> { 172 | // long position = consumer.position(t); 173 | // return position; 174 | // }).collect(Collectors.toList()); 175 | 176 | System.out.println();*/ 177 | } 178 | 179 | public static void deleteTopic(SourceInfo sourceInfo, String name) { 180 | AdminClient adminClient = createAdminClientByProperties(sourceInfo); 181 | List list = new ArrayList<>(); 182 | list.add(name); 183 | adminClient.deleteTopics(list); 184 | adminClient.close(); 185 | } 186 | 187 | public static JSONObject node2Json(Node node) { 188 | JSONObject leaderNode = new JSONObject(); 189 | leaderNode.put("id", node.id()); 190 | leaderNode.put("host", node.host()); 191 | leaderNode.put("port", node.port()); 192 | leaderNode.put("rack", node.rack()); 193 | // leaderNode.put("port",node.port()); 194 | return leaderNode; 195 | } 196 | 197 | public static JSONObject getTopicDetail(SourceInfo sourceInfo, String topic) throws Exception { 198 | AdminClient adminClient = createAdminClientByProperties(sourceInfo); 199 | 200 | List list = new ArrayList<>(); 201 | list.add(topic); 202 | DescribeTopicsResult result = adminClient.describeTopics(list); 203 | Map map = result.all().get(); 204 | TopicDescription topicDescription = map.get(topic); 205 | 206 | JSONObject res = new JSONObject(); 207 | res.put("isInternal", topicDescription.isInternal()); 208 | res.put("name", topicDescription.name()); 209 | 210 | KafkaConsumer consumer = getConsumer(sourceInfo, topic, "KafkaUI-lite", "earliest"); 211 | List topicPartitions = topicDescription.partitions().stream().map(t -> { 212 | TopicPartition topicPartition = new TopicPartition(topic, t.partition()); 213 | return topicPartition; 214 | }).collect(Collectors.toList()); 215 | Map endOffsets = consumer.endOffsets(topicPartitions); 216 | Map beginningOffsets = consumer.beginningOffsets(topicPartitions); 217 | 218 | List collect = topicDescription.partitions().stream().map(t -> { 219 | JSONObject p = new JSONObject(); 220 | Node leader = t.leader(); 221 | log.info(leader.toString()); 222 | 223 | List replicas = t.replicas().stream().map(r -> node2Json(r)).collect(Collectors.toList()); 224 | 225 | List isr = t.isr().stream().map(r -> node2Json(r)).collect(Collectors.toList()); 226 | Long endOffset = endOffsets.get(new TopicPartition(topic, t.partition())); 227 | Long beginningOffset = beginningOffsets.get(new TopicPartition(topic, t.partition())); 228 | 229 | p.put("partition", t.partition()); 230 | 231 | // leaderNode.put("id",leader.id()); 232 | p.put("leader", node2Json(leader)); 233 | p.put("replicas", replicas); 234 | p.put("isr", isr); 235 | p.put("endOffset", endOffset); 236 | p.put("beginningOffset", beginningOffset); 237 | 238 | return p; 239 | 240 | }).collect(Collectors.toList()); 241 | res.put("partitions", collect); 242 | 243 | System.out.println(res.toJSONString()); 244 | adminClient.close(); 245 | return res; 246 | } 247 | 248 | public static ResponseDto clusterInfo(SourceInfo sourceInfo) { 249 | AdminClient client = null; 250 | try { 251 | client = createAdminClientByProperties(sourceInfo); 252 | DescribeClusterResult describeClusterResult = client.describeCluster(); 253 | Node controller = describeClusterResult.controller().get(); 254 | Collection nodes = describeClusterResult.nodes().get(); 255 | List collect = nodes.stream().map(node -> { 256 | JSONObject jo = new JSONObject(); 257 | jo.put("host", node.host()); 258 | jo.put("port", node.port()); 259 | jo.put("idStr", node.idString()); 260 | jo.put("id", node.id()); 261 | if (node.id() == controller.id()) { 262 | jo.put("controller", true); 263 | } else { 264 | jo.put("controller", false); 265 | } 266 | return jo; 267 | }).collect(Collectors.toList()); 268 | return ResponseDto.success(collect); 269 | } catch (Exception e) { 270 | return ResponseDto.fail(e.getMessage()); 271 | } finally { 272 | client.close(); 273 | } 274 | 275 | } 276 | 277 | public static ResponseDto getAllGroups(SourceInfo sourceInfo, String keyword) { 278 | AdminClient client = null; 279 | try { 280 | client = createAdminClientByProperties(sourceInfo); 281 | ListConsumerGroupsResult listConsumerGroupsResult = client.listConsumerGroups(); 282 | Collection consumerGroupListings = listConsumerGroupsResult.all().get(); 283 | List collect = consumerGroupListings.stream().map(t -> { 284 | JSONObject jo = new JSONObject(); 285 | jo.put("name", t.groupId()); 286 | return jo; 287 | }).collect(Collectors.toList()); 288 | if (keyword != null) { 289 | collect = collect.stream().filter(t -> t.getString("name").contains(keyword)).collect(Collectors.toList()); 290 | } 291 | 292 | return ResponseDto.success(collect); 293 | } catch (Exception e) { 294 | return ResponseDto.fail(e.getMessage()); 295 | } finally { 296 | client.close(); 297 | } 298 | 299 | } 300 | 301 | public static ResponseDto getGroupByTopic(SourceInfo sourceInfo, String topic) { 302 | AdminClient client = null; 303 | try { 304 | client = createAdminClientByProperties(sourceInfo); 305 | 306 | AdminClient finalClient = client; 307 | 308 | List collect = client.listConsumerGroups().all().get().parallelStream().map(t -> t.groupId()).filter(group -> { 309 | long count = 0; 310 | try { 311 | count = finalClient.listConsumerGroupOffsets(group).partitionsToOffsetAndMetadata().get().keySet().stream().filter(p -> { 312 | return p.topic().equals(topic); 313 | }).count(); 314 | } catch (InterruptedException e) { 315 | e.printStackTrace(); 316 | } catch (ExecutionException e) { 317 | e.printStackTrace(); 318 | } 319 | return count > 0; 320 | }).map(t -> { 321 | JSONObject object = new JSONObject(); 322 | object.put("value", t); 323 | return object; 324 | }).collect(Collectors.toList()); 325 | return ResponseDto.success(collect); 326 | } catch (Exception e) { 327 | return ResponseDto.fail(e.getMessage()); 328 | } finally { 329 | client.close(); 330 | } 331 | 332 | } 333 | 334 | public static ResponseDto getGroupInfo(SourceInfo sourceInfo, String group) { 335 | AdminClient client = null; 336 | try { 337 | 338 | client = createAdminClientByProperties(sourceInfo); 339 | ListConsumerGroupOffsetsResult listConsumerGroupOffsetsResult = client.listConsumerGroupOffsets(group); 340 | 341 | Map topicPartitionOffsetAndMetadataMap = listConsumerGroupOffsetsResult.partitionsToOffsetAndMetadata().get(); 342 | // Collection values = topicPartitionOffsetAndMetadataMap.values(); 343 | 344 | Set topicPartitions = topicPartitionOffsetAndMetadataMap.keySet(); 345 | 346 | Set topics = topicPartitions.stream().map(t -> t.topic()).collect(Collectors.toSet()); 347 | KafkaConsumer consumer = getConsumer(sourceInfo, topics, group, "earliest"); 348 | Map endOffsets = consumer.endOffsets(topicPartitions); 349 | 350 | List collect = topicPartitions.stream().map(t -> { 351 | OffsetAndMetadata offsetAndMetadata = topicPartitionOffsetAndMetadataMap.get(t); 352 | long offset = offsetAndMetadata.offset(); 353 | JSONObject jsonObject = new JSONObject(); 354 | jsonObject.put("topic", t.topic()); 355 | jsonObject.put("partition", t.partition()); 356 | jsonObject.put("offset", offset); 357 | TopicPartition topicPartition = new TopicPartition(t.topic(), t.partition()); 358 | Long endOffset = endOffsets.get(topicPartition); 359 | jsonObject.put("endOffset", endOffset); 360 | jsonObject.put("lag", endOffset - offset); 361 | return jsonObject; 362 | }).sorted(Comparator.comparing(KafkaUtil::comparingByName).thenComparing(KafkaUtil::comparingByPartition)) 363 | .collect(Collectors.toList()); 364 | 365 | 366 | return ResponseDto.success(collect); 367 | } catch (Exception e) { 368 | return ResponseDto.fail(e.getMessage()); 369 | } finally { 370 | client.close(); 371 | } 372 | 373 | } 374 | 375 | private static String comparingByName(JSONObject jo) { 376 | return jo.getString("topic"); 377 | } 378 | 379 | private static Integer comparingByPartition(JSONObject jo) { 380 | return jo.getInteger("partition"); 381 | } 382 | 383 | public static ResponseDto deleteGroup(SourceInfo sourceInfo, String group) { 384 | AdminClient client = null; 385 | try { 386 | client = createAdminClientByProperties(sourceInfo); 387 | List list = new ArrayList<>(); 388 | list.add(group); 389 | DeleteConsumerGroupsResult deleteConsumerGroupsResult = client.deleteConsumerGroups(list); 390 | Void aVoid = deleteConsumerGroupsResult.all().get(); 391 | return ResponseDto.success(); 392 | } catch (Exception e) { 393 | return ResponseDto.fail(e.getMessage()); 394 | } finally { 395 | client.close(); 396 | } 397 | 398 | } 399 | 400 | } 401 | -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/util/RedisUtil.java: -------------------------------------------------------------------------------- 1 | 2 | package com.jq.kafkaui.util; 3 | 4 | import org.springframework.util.StringUtils; 5 | import redis.clients.jedis.Jedis; 6 | 7 | import java.util.List; 8 | import java.util.Map; 9 | import java.util.Set; 10 | 11 | public class RedisUtil { 12 | 13 | public Jedis getClient(String ip, int port, String password, int db) { 14 | // JedisPoolConfig jcon = new JedisPoolConfig(); 15 | // jcon.setMaxTotal(200); 16 | // jcon.setMaxIdle(50); 17 | // jcon.setTestOnBorrow(true); 18 | // jcon.setTestOnReturn(true); 19 | // JedisPool jp = new JedisPool(jcon, ip, port, 100, password, db); 20 | // Jedis jedis = jp.getResource(); 21 | Jedis jedis = new Jedis(ip, port); 22 | 23 | if (!StringUtils.isEmpty(password)) { 24 | String auth = jedis.auth(password); 25 | System.out.println(auth); 26 | } 27 | String select = jedis.select(db); 28 | System.out.println(select); 29 | return jedis; 30 | } 31 | 32 | public Set getAllKeys(Jedis jedis) { 33 | Set keys = jedis.keys("*"); 34 | return keys; 35 | } 36 | 37 | public String getString(Jedis jedis, String key) { 38 | String value = jedis.get(key); 39 | return value; 40 | } 41 | 42 | public Map getHash(Jedis jedis, String key) { 43 | Map map = jedis.hgetAll(key); 44 | return map; 45 | } 46 | 47 | public Set getSet(Jedis jedis, String key) { 48 | return jedis.smembers(key); 49 | } 50 | 51 | public List getList(Jedis jedis, String key) { 52 | return jedis.lrange(key, 0, -1); 53 | } 54 | 55 | public void hashSet(Jedis jedis, String key, Map map) { 56 | jedis.hmset(key, map); 57 | } 58 | 59 | public void setSet(Jedis jedis, String key, List list) { 60 | String[] array = list.toArray(new String[list.size()]); 61 | jedis.sadd(key, array); 62 | } 63 | 64 | public void listSet(Jedis jedis, String key, List list) { 65 | String[] array = list.toArray(new String[list.size()]); 66 | jedis.rpush(key, array); 67 | } 68 | 69 | public void closeConnction(Jedis jedis) { 70 | if (jedis != null) { 71 | jedis.close(); 72 | } 73 | } 74 | 75 | public static void main(String[] args) { 76 | RedisUtil redisUtil = new RedisUtil(); 77 | Jedis jedis = redisUtil.getClient("10.1.254.38", 6379, "123456", 6); 78 | List test3 = redisUtil.getList(jedis, "test3"); 79 | test3.forEach(t -> { 80 | System.out.println(t); 81 | }); 82 | // jedis.lpush("list","aa"); 83 | // jedis.lpush("list","bb"); 84 | // jedis.lrange("list", 0, -1); 85 | } 86 | 87 | } -------------------------------------------------------------------------------- /src/main/java/com/jq/kafkaui/util/ZKProcessor.java: -------------------------------------------------------------------------------- 1 | package com.jq.kafkaui.util; 2 | 3 | import com.alibaba.fastjson.JSONObject; 4 | import org.apache.curator.RetryPolicy; 5 | import org.apache.curator.framework.CuratorFramework; 6 | import org.apache.curator.framework.CuratorFrameworkFactory; 7 | import org.apache.curator.retry.ExponentialBackoffRetry; 8 | import org.apache.zookeeper.CreateMode; 9 | 10 | import java.util.ArrayList; 11 | import java.util.List; 12 | 13 | public class ZKProcessor { 14 | 15 | String url; 16 | 17 | public ZKProcessor(String url) { 18 | this.url = url; 19 | } 20 | 21 | /** 22 | * 获取zk客户端 23 | * 24 | * @return 25 | */ 26 | public CuratorFramework getClient() { 27 | RetryPolicy retryPolicy = new ExponentialBackoffRetry(1000, 1); 28 | CuratorFramework client = CuratorFrameworkFactory.builder() 29 | .connectString(url) 30 | .sessionTimeoutMs(1000) // 会话超时时间 31 | .connectionTimeoutMs(1000) // 连接超时时间 32 | .retryPolicy(retryPolicy) 33 | .build(); 34 | client.start(); 35 | return client; 36 | } 37 | 38 | //获取zk一个节点下的所有子孙节点 39 | public List getAllChildren(CuratorFramework client, String path) throws Exception { 40 | 41 | List list = new ArrayList<>(); 42 | List children = client.getChildren().forPath(path); 43 | for (String s : children) { 44 | JSONObject obj = new JSONObject(true); 45 | obj.put("label", s); 46 | String thisPath = path + (path.equals("/") ? "" : "/") + s; 47 | obj.put("path", thisPath); 48 | // obj.put("value", new String(client.getData().forPath(thisPath))); 49 | 50 | List grandChildren = getAllChildren(client, thisPath); 51 | if (grandChildren.size() > 0) 52 | obj.put("children", grandChildren); 53 | list.add(obj); 54 | } 55 | return list; 56 | 57 | } 58 | 59 | //获取zk一个节点下的所有子节点 60 | public List getAllSon(CuratorFramework client, String path) throws Exception { 61 | 62 | List list = new ArrayList<>(); 63 | List children = client.getChildren().forPath(path); 64 | for (String s : children) { 65 | JSONObject obj = new JSONObject(true); 66 | obj.put("label", s); 67 | String thisPath = path + (path.equals("/") ? "" : "/") + s; 68 | obj.put("path", thisPath); 69 | byte[] bytes = client.getData().forPath(thisPath); 70 | if (bytes != null) 71 | obj.put("value", new String(bytes)); 72 | list.add(obj); 73 | } 74 | 75 | return list; 76 | 77 | } 78 | 79 | //获取zk上的所有数据,返回json string 80 | public List getAllNode() { 81 | CuratorFramework client = getClient(); 82 | List children = null; 83 | try { 84 | children = getAllChildren(client, "/"); 85 | 86 | return children; 87 | } catch (Exception e) { 88 | e.printStackTrace(); 89 | } finally { 90 | client.close(); 91 | } 92 | return null; 93 | } 94 | 95 | //删除zk上的某个节点 96 | public void removeNode(String path) throws Exception { 97 | 98 | CuratorFramework client = getClient(); 99 | client.delete().guaranteed().deletingChildrenIfNeeded().forPath(path); 100 | client.close(); 101 | } 102 | 103 | public String getValue(String path) { 104 | CuratorFramework client = getClient(); 105 | try { 106 | 107 | byte[] bytes = client.getData().forPath(path); 108 | return new String(bytes); 109 | } catch (Exception e) { 110 | e.printStackTrace(); 111 | } finally { 112 | client.close(); 113 | } 114 | return null; 115 | } 116 | 117 | public void setValue(String path, String data) throws Exception { 118 | CuratorFramework client = getClient(); 119 | client.setData().forPath(path, data.getBytes("utf-8")); 120 | client.close(); 121 | } 122 | 123 | public void createNode(String path, String data, boolean recursion) throws Exception { 124 | CuratorFramework client = getClient(); 125 | 126 | if (recursion) { 127 | client.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath(path, data.getBytes("utf-8")); 128 | } else { 129 | client.create().withMode(CreateMode.PERSISTENT).forPath(path, data.getBytes("utf-8")); 130 | } 131 | client.close(); 132 | } 133 | 134 | //递归调用 135 | // public void get(JSONArray jsonArray, TreeItem root) { 136 | // for (int i = 0; i < jsonArray.size(); i++) { 137 | // JSONObject jsonObject = jsonArray.getJSONObject(i); 138 | // String name = jsonObject.getString("name"); 139 | // String path = jsonObject.getString("path"); 140 | // String value = jsonObject.getString("value"); 141 | // ZKNode node = new ZKNode(name, path, value); 142 | // TreeItem item = new TreeItem<>(node); 143 | // 144 | // item.setExpanded(true); 145 | // JSONArray children = jsonObject.getJSONArray("children"); 146 | // if (children != null && children.size() > 0) { 147 | // get(children, item); 148 | // } 149 | // root.getChildren().add(item); 150 | // } 151 | // } 152 | 153 | public static void main(String[] args) throws Exception { 154 | ZKProcessor zkProcessor = new ZKProcessor("localhost:2181"); 155 | 156 | } 157 | 158 | } 159 | -------------------------------------------------------------------------------- /src/main/resources/application.properties: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/freakchick/kafkaUI-lite/d3bc2248d28005398e8975413835adb7f661573f/src/main/resources/application.properties -------------------------------------------------------------------------------- /src/main/resources/data.db: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/freakchick/kafkaUI-lite/d3bc2248d28005398e8975413835adb7f661573f/src/main/resources/data.db -------------------------------------------------------------------------------- /src/main/sql/ddl_mysql.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE `kafka_auth` 2 | ( 3 | `id` int(11) NOT NULL AUTO_INCREMENT, 4 | `source_id` int(11) NOT NULL, 5 | `add_auth` int(11), 6 | `update_auth` int(11), 7 | `remove_auth` int(11), 8 | PRIMARY KEY (`id`) 9 | ) ENGINE = InnoDB 10 | DEFAULT CHARSET = utf8; 11 | 12 | CREATE TABLE `zookeeper_auth` 13 | ( 14 | `id` int(11) NOT NULL AUTO_INCREMENT, 15 | `source_id` int(11) NOT NULL, 16 | `add_auth` int(11), 17 | `update_auth` int(11), 18 | `remove_auth` int(11), 19 | PRIMARY KEY (`id`) 20 | ) ENGINE = InnoDB 21 | DEFAULT CHARSET = utf8; 22 | 23 | CREATE TABLE `redis_auth` 24 | ( 25 | `id` int(11) NOT NULL AUTO_INCREMENT, 26 | `source_id` int(11) NOT NULL, 27 | `add_auth` int(11), 28 | `update_auth` int(11), 29 | `remove_auth` int(11), 30 | PRIMARY KEY (`id`) 31 | ) ENGINE = InnoDB 32 | DEFAULT CHARSET = utf8; 33 | 34 | CREATE TABLE `source` 35 | ( 36 | `id` int(11) NOT NULL AUTO_INCREMENT, 37 | `name` varchar(255) DEFAULT NULL, 38 | `broker` varchar(255) DEFAULT NULL, 39 | `username` varchar(255) DEFAULT NULL, 40 | `password` varchar(255) DEFAULT NULL 41 | PRIMARY KEY (`id`) 42 | ) ENGINE = InnoDB 43 | DEFAULT CHARSET = utf8; 44 | 45 | CREATE TABLE `zookeeper_source` 46 | ( 47 | `id` int(11) NOT NULL AUTO_INCREMENT, 48 | `address` varchar(255) DEFAULT NULL, 49 | `name` varchar(255) DEFAULT NULL, 50 | PRIMARY KEY (`id`) 51 | ) ENGINE = InnoDB 52 | DEFAULT CHARSET = utf8; 53 | 54 | CREATE TABLE `redis_source` 55 | ( 56 | `id` int(11) NOT NULL AUTO_INCREMENT, 57 | `name` varchar(255) DEFAULT NULL, 58 | `ip` varchar(255) DEFAULT NULL, 59 | `port` int(11), 60 | `password` varchar(255) DEFAULT NULL, 61 | PRIMARY KEY (`id`) 62 | ) ENGINE = InnoDB 63 | DEFAULT CHARSET = utf8; 64 | -------------------------------------------------------------------------------- /src/main/sql/ddl_sqlite.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE "kafka_auth" 2 | ( 3 | "id" integer NOT NULL PRIMARY KEY AUTOINCREMENT, 4 | "source_id" INTEGER NOT NULL, 5 | "add_auth" integer, 6 | "update_auth" integer, 7 | "remove_auth" integer 8 | ); 9 | 10 | CREATE TABLE "zookeeper_auth" 11 | ( 12 | "id" integer NOT NULL PRIMARY KEY AUTOINCREMENT, 13 | "source_id" INTEGER NOT NULL, 14 | "add_auth" integer, 15 | "update_auth" integer, 16 | "remove_auth" integer 17 | ); 18 | 19 | CREATE TABLE "redis_auth" 20 | ( 21 | "id" integer NOT NULL PRIMARY KEY AUTOINCREMENT, 22 | "source_id" INTEGER NOT NULL, 23 | "add_auth" integer, 24 | "update_auth" integer, 25 | "remove_auth" integer 26 | ); 27 | 28 | CREATE TABLE "source" 29 | ( 30 | "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, 31 | "name" text(64), 32 | "broker" text(64), 33 | "username" text(64), 34 | "password" text(64) 35 | ); 36 | 37 | CREATE TABLE "zookeeper_source" 38 | ( 39 | "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, 40 | "address" TEXT, 41 | "name" TEXT 42 | ); 43 | 44 | CREATE TABLE "redis_source" 45 | ( 46 | "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, 47 | "name" TEXT, 48 | "ip" TEXT, 49 | "port" integer, 50 | "password" TEXT 51 | ); 52 | -------------------------------------------------------------------------------- /src/main/webapp/.browserslistrc: -------------------------------------------------------------------------------- 1 | > 1% 2 | last 2 versions 3 | not dead 4 | -------------------------------------------------------------------------------- /src/main/webapp/.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | node_modules/ 3 | package-lock.json 4 | /dist 5 | 6 | 7 | # local env files 8 | .env.local 9 | .env.*.local 10 | 11 | # Log files 12 | npm-debug.log* 13 | yarn-debug.log* 14 | yarn-error.log* 15 | pnpm-debug.log* 16 | 17 | # Editor directories and files 18 | .idea 19 | .vscode 20 | *.suo 21 | *.ntvs* 22 | *.njsproj 23 | *.sln 24 | *.sw? 25 | -------------------------------------------------------------------------------- /src/main/webapp/babel.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | "presets": [ 3 | "@vue/cli-plugin-babel/preset" 4 | ], 5 | "plugins": [ 6 | [ 7 | "component", 8 | { 9 | "libraryName": "element-ui", 10 | "styleLibraryName": "theme-chalk" 11 | } 12 | ], 13 | [ 14 | "import", 15 | { 16 | "libraryName": "vxe-table", 17 | "style": true // 样式是否也按需加载 18 | } 19 | ] 20 | ] 21 | } -------------------------------------------------------------------------------- /src/main/webapp/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "kafka-ui-lite", 3 | "version": "0.1.0", 4 | "private": true, 5 | "scripts": { 6 | "serve": "vue-cli-service serve", 7 | "build": "vue-cli-service build" 8 | }, 9 | "dependencies": { 10 | "axios": "^0.21.0", 11 | "core-js": "^3.6.5", 12 | "element-ui": "^2.4.5", 13 | "qs.js": "^0.1.12", 14 | "vue": "^2.6.11", 15 | "vue-axios": "^3.1.3", 16 | "vue-clipboard2": "^0.3.1", 17 | "vue-i18n": "^8.22.4", 18 | "vue-router": "^3.2.0", 19 | "vuex": "^3.4.0", 20 | "vxe-table": "^3.3.9", 21 | "xe-utils": "^3.3.0" 22 | }, 23 | "devDependencies": { 24 | "@vue/cli-plugin-babel": "~4.5.0", 25 | "@vue/cli-plugin-router": "~4.5.0", 26 | "@vue/cli-plugin-vuex": "^4.5.10", 27 | "@vue/cli-service": "~4.5.0", 28 | "babel-plugin-component": "^1.1.1", 29 | "babel-plugin-import": "^1.13.3", 30 | "node-sass": "^5.0.0", 31 | "sass-loader": "^10.0.5", 32 | "vue-cli-plugin-element": "^1.0.1", 33 | "vue-template-compiler": "^2.6.11" 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/main/webapp/public/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/freakchick/kafkaUI-lite/d3bc2248d28005398e8975413835adb7f661573f/src/main/webapp/public/favicon.ico -------------------------------------------------------------------------------- /src/main/webapp/public/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | <%= htmlWebpackPlugin.options.title %> 9 | 10 | 11 | 12 | 15 | 16 |
17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /src/main/webapp/src/App.vue: -------------------------------------------------------------------------------- 1 | 6 | 7 | 18 | 19 | 66 | -------------------------------------------------------------------------------- /src/main/webapp/src/assets/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/freakchick/kafkaUI-lite/d3bc2248d28005398e8975413835adb7f661573f/src/main/webapp/src/assets/logo.png -------------------------------------------------------------------------------- /src/main/webapp/src/components/HelloWorld.vue: -------------------------------------------------------------------------------- 1 | 32 | 33 | 41 | 42 | 43 | 59 | -------------------------------------------------------------------------------- /src/main/webapp/src/components/about/auth/commonAuth.vue: -------------------------------------------------------------------------------- 1 | 20 | 21 | 42 | 43 | 46 | -------------------------------------------------------------------------------- /src/main/webapp/src/components/about/auth/zkAuth.vue: -------------------------------------------------------------------------------- 1 | 20 | 21 | 62 | 63 | -------------------------------------------------------------------------------- /src/main/webapp/src/components/about/authority.vue: -------------------------------------------------------------------------------- 1 | 17 | 18 | 91 | 92 | -------------------------------------------------------------------------------- /src/main/webapp/src/components/about/donate.vue: -------------------------------------------------------------------------------- 1 | 16 | 17 | 22 | 23 | 34 | -------------------------------------------------------------------------------- /src/main/webapp/src/components/common/GroupTable.vue: -------------------------------------------------------------------------------- 1 | 16 | 17 | 44 | 45 | -------------------------------------------------------------------------------- /src/main/webapp/src/components/common/dataTag.vue: -------------------------------------------------------------------------------- 1 | 10 | 11 | 17 | 18 | 49 | -------------------------------------------------------------------------------- /src/main/webapp/src/components/common/list.vue: -------------------------------------------------------------------------------- 1 | 6 | 7 | 18 | 19 | -------------------------------------------------------------------------------- /src/main/webapp/src/components/kafka/config.vue: -------------------------------------------------------------------------------- 1 | 47 | 48 | 101 | 102 | 105 | -------------------------------------------------------------------------------- /src/main/webapp/src/components/kafka/consumer.vue: -------------------------------------------------------------------------------- 1 | 52 | 53 | 215 | 216 | 312 | -------------------------------------------------------------------------------- /src/main/webapp/src/components/kafka/kafkaSelect.vue: -------------------------------------------------------------------------------- 1 | 6 | 7 | 42 | 43 | 46 | -------------------------------------------------------------------------------- /src/main/webapp/src/components/kafka/manage.vue: -------------------------------------------------------------------------------- 1 | 18 | 19 | 43 | 44 | 47 | -------------------------------------------------------------------------------- /src/main/webapp/src/components/kafka/manage/cluster.vue: -------------------------------------------------------------------------------- 1 | 24 | 25 | 61 | 62 | -------------------------------------------------------------------------------- /src/main/webapp/src/components/kafka/manage/group.vue: -------------------------------------------------------------------------------- 1 | 48 | 49 | 118 | 119 | 124 | -------------------------------------------------------------------------------- /src/main/webapp/src/components/kafka/manage/topic.vue: -------------------------------------------------------------------------------- 1 | 128 | 129 | 252 | 253 | 266 | -------------------------------------------------------------------------------- /src/main/webapp/src/components/kafka/operate/consume.vue: -------------------------------------------------------------------------------- 1 | 17 | 18 | 61 | 62 | 79 | -------------------------------------------------------------------------------- /src/main/webapp/src/components/kafka/operate/produce.vue: -------------------------------------------------------------------------------- 1 | 19 | 20 | 60 | 61 | 64 | -------------------------------------------------------------------------------- /src/main/webapp/src/components/kafka/producer.vue: -------------------------------------------------------------------------------- 1 | 55 | 56 | 150 | 151 | -------------------------------------------------------------------------------- /src/main/webapp/src/components/redis/addKey.vue: -------------------------------------------------------------------------------- 1 | 72 | 73 | 139 | 140 | -------------------------------------------------------------------------------- /src/main/webapp/src/components/redis/config.vue: -------------------------------------------------------------------------------- 1 | 47 | 48 | 119 | 120 | 123 | -------------------------------------------------------------------------------- /src/main/webapp/src/components/redis/manage.vue: -------------------------------------------------------------------------------- 1 | 112 | 113 | 228 | 229 | 258 | -------------------------------------------------------------------------------- /src/main/webapp/src/components/tool/json.vue: -------------------------------------------------------------------------------- 1 | 11 | 12 | 35 | 36 | -------------------------------------------------------------------------------- /src/main/webapp/src/components/tool/time.vue: -------------------------------------------------------------------------------- 1 | 33 | 34 | 78 | 79 | -------------------------------------------------------------------------------- /src/main/webapp/src/components/zk/config.vue: -------------------------------------------------------------------------------- 1 | 42 | 43 | 98 | 99 | 102 | -------------------------------------------------------------------------------- /src/main/webapp/src/components/zk/manage.vue: -------------------------------------------------------------------------------- 1 | 66 | 67 | 210 | 211 | 230 | -------------------------------------------------------------------------------- /src/main/webapp/src/i18n/i18n.js: -------------------------------------------------------------------------------- 1 | import Vue from 'vue' 2 | import locale from 'element-ui/lib/locale'; 3 | import VueI18n from 'vue-i18n' 4 | import messages from './langs' 5 | Vue.use(VueI18n) 6 | //从localStorage获取语言选择。 7 | const i18n = new VueI18n({ 8 | locale: localStorage.lang || 'cn', //初始未选择默认 cn 中文 9 | messages, 10 | }) 11 | locale.i18n((key, value) => i18n.t(key, value)) //兼容element 12 | 13 | export default i18n 14 | -------------------------------------------------------------------------------- /src/main/webapp/src/i18n/langs/cn.js: -------------------------------------------------------------------------------- 1 | 2 | import zhLocale from 'element-ui/lib/locale/lang/zh-CN' //引入element语言包 3 | const cn = { 4 | config: '配置', 5 | manage: '管理', 6 | ...zhLocale 7 | } 8 | 9 | export default cn; 10 | -------------------------------------------------------------------------------- /src/main/webapp/src/i18n/langs/en.js: -------------------------------------------------------------------------------- 1 | import enLocale from 'element-ui/lib/locale/lang/en' //引入element语言包 2 | const en = { 3 | config: 'config', 4 | manage: 'manage', 5 | ...enLocale 6 | } 7 | 8 | export default en; 9 | -------------------------------------------------------------------------------- /src/main/webapp/src/i18n/langs/index.js: -------------------------------------------------------------------------------- 1 | import en from './en'; 2 | import cn from './cn'; 3 | export default { 4 | en: en, 5 | cn: cn 6 | } 7 | -------------------------------------------------------------------------------- /src/main/webapp/src/js/auth.js: -------------------------------------------------------------------------------- 1 | //vuex 中的权限信息刷新 2 | 3 | function initZK(that) { 4 | 5 | that.axios.post("/zookeeper/getAllSourceAuth").then((response) => { 6 | let obj = {} 7 | for (let item of response.data) { 8 | obj[item.id] = item.auth 9 | } 10 | console.log('我被调用成功') 11 | that.$store.commit('setZKAuth', obj) 12 | }).catch((error) => { 13 | that.$message.error("查询所有zookeeper权限失败") 14 | }) 15 | } 16 | 17 | function initKafka(that) { 18 | that.axios.post("/kafka/getAllSourceAuth").then((response) => { 19 | let obj = {} 20 | for (let item of response.data) { 21 | obj[item.id] = item.auth 22 | } 23 | that.$store.commit('setKafkaAuth', obj) 24 | }).catch((error) => { 25 | that.$message.error("查询所有kafka权限失败") 26 | }) 27 | } 28 | 29 | function initRedis(that) { 30 | that.axios.post("/redis/getAllSourceAuth").then((response) => { 31 | let obj = {} 32 | for (let item of response.data) { 33 | obj[item.id] = item.auth 34 | } 35 | that.$store.commit('setRedisAuth', obj) 36 | }).catch((error) => { 37 | that.$message.error("查询所有redis权限失败") 38 | }) 39 | } 40 | 41 | export { 42 | initZK, initKafka, initRedis 43 | } -------------------------------------------------------------------------------- /src/main/webapp/src/main.js: -------------------------------------------------------------------------------- 1 | import Vue from 'vue' 2 | import App from './App.vue' 3 | import router from './router' 4 | import './plugins/element.js' 5 | import axios from 'axios' 6 | import VueAxios from 'vue-axios' 7 | import qs from 'qs' 8 | 9 | import './theme/index.css' 10 | 11 | import i18n from './i18n/i18n' 12 | 13 | import VueClipboard from 'vue-clipboard2' 14 | import store from './store' 15 | 16 | Vue.config.productionTip = false 17 | 18 | //使用vue-axios,这样才可以全局使用this.axios调用 19 | Vue.use(VueAxios, axios); 20 | Vue.use(VueClipboard) 21 | 22 | import './plugins/vxe.js' 23 | // import 'xe-utils' 24 | // import VXETable from 'vxe-table' 25 | // import 'vxe-table/lib/style.css' 26 | // 27 | // Vue.use(VXETable) 28 | 29 | // axios.defaults.baseURL = '/api' 30 | 31 | axios.defaults.headers = {'Content-Type': 'application/x-www-form-urlencoded'} 32 | //全局拦截post请求的参数,用qs序列化 33 | axios.interceptors.request.use(config => { 34 | //form表单提交multipart/form-data的时候,不需要序列化参数 35 | if (config.method === 'post' && config.headers['Content-Type'] === 'application/x-www-form-urlencoded') { 36 | config.data = qs.stringify(config.data, {indices: false}); 37 | } 38 | return config 39 | }) 40 | 41 | // axios.interceptors.response.use(response => { 42 | // // console.log(response.data.success) 43 | // // if (!response.data.success) { 44 | // // this.$message.error(response.data.message) 45 | // // } 46 | // }, error => { 47 | // // console.log(error) 48 | // }) 49 | 50 | new Vue({ 51 | router, 52 | store, 53 | i18n, 54 | render: h => h(App) 55 | }).$mount('#app') 56 | -------------------------------------------------------------------------------- /src/main/webapp/src/plugins/element.js: -------------------------------------------------------------------------------- 1 | import Vue from 'vue' 2 | // import { Button,ELCONTAINER } from 'element-ui' 3 | 4 | import { 5 | // Pagination, 6 | Link, 7 | Dialog, 8 | Autocomplete, 9 | // Dropdown, 10 | // DropdownMenu, 11 | // DropdownItem, 12 | Menu, 13 | Submenu, 14 | MenuItem, 15 | MenuItemGroup, 16 | Input, 17 | InputNumber, 18 | Radio, 19 | RadioGroup, 20 | RadioButton, 21 | Checkbox, 22 | CheckboxButton, 23 | CheckboxGroup, 24 | // Switch, 25 | Select, 26 | Option, 27 | OptionGroup, 28 | Button, 29 | ButtonGroup, 30 | Table, 31 | TableColumn, 32 | DatePicker, 33 | // TimeSelect, 34 | // TimePicker, 35 | Popover, 36 | Popconfirm, 37 | Tooltip, 38 | Breadcrumb, 39 | BreadcrumbItem, 40 | Form, 41 | FormItem, 42 | Tabs, 43 | TabPane, 44 | Tag, 45 | Tree, 46 | Alert, 47 | // Slider, 48 | // Icon, 49 | Row, 50 | Col, 51 | // Upload, 52 | // Progress, 53 | // Spinner, 54 | // Badge, 55 | // Card, 56 | // Rate, 57 | // Steps, 58 | // Step, 59 | // Carousel, 60 | // CarouselItem, 61 | Collapse, 62 | CollapseItem, 63 | // Cascader, 64 | // ColorPicker, 65 | // Transfer, 66 | Container, 67 | Header, 68 | Aside, 69 | Main, 70 | Footer, 71 | // Timeline, 72 | // TimelineItem, 73 | // Link, 74 | // Divider, 75 | // Image, 76 | // Calendar, 77 | // Backtop, 78 | // PageHeader, 79 | // CascaderPanel, 80 | // Loading, 81 | // MessageBox, 82 | Message 83 | // Notification 84 | } from 'element-ui'; 85 | 86 | // Vue.use(Pagination); 87 | Vue.use(Link); 88 | Vue.use(Dialog); 89 | Vue.use(Autocomplete); 90 | // Vue.use(Dropdown); 91 | // Vue.use(DropdownMenu); 92 | // Vue.use(DropdownItem); 93 | Vue.use(Menu); 94 | Vue.use(Submenu); 95 | Vue.use(MenuItem); 96 | Vue.use(MenuItemGroup); 97 | Vue.use(Input); 98 | Vue.use(InputNumber); 99 | Vue.use(Radio); 100 | Vue.use(RadioGroup); 101 | Vue.use(RadioButton); 102 | Vue.use(Checkbox); 103 | 104 | Vue.use(CheckboxButton); 105 | Vue.use(CheckboxGroup); 106 | // Vue.use(Switch); 107 | 108 | 109 | Vue.use(Select); 110 | Vue.use(Option); 111 | Vue.use(OptionGroup); 112 | Vue.use(Button); 113 | Vue.use(ButtonGroup); 114 | Vue.use(Table); 115 | Vue.use(TableColumn); 116 | 117 | Vue.use(DatePicker); 118 | // Vue.use(TimeSelect); 119 | // Vue.use(TimePicker); 120 | Vue.use(Popover); 121 | Vue.use(Popconfirm ); 122 | 123 | Vue.use(Tooltip); 124 | Vue.use(Breadcrumb); 125 | Vue.use(BreadcrumbItem); 126 | 127 | Vue.use(Form); 128 | Vue.use(FormItem); 129 | Vue.use(Tree); 130 | // 131 | Vue.use(Tabs); 132 | Vue.use(TabPane); 133 | Vue.use(Tag); 134 | // 135 | Vue.use(Alert); 136 | // Vue.use(Slider); 137 | // Vue.use(Icon); 138 | Vue.use(Row); 139 | Vue.use(Col); 140 | // Vue.use(Upload); 141 | // Vue.use(Progress); 142 | // Vue.use(Spinner); 143 | // Vue.use(Badge); 144 | // Vue.use(Card); 145 | // Vue.use(Rate); 146 | // Vue.use(Steps); 147 | // Vue.use(Step); 148 | // Vue.use(Carousel); 149 | // Vue.use(CarouselItem); 150 | Vue.use(Collapse); 151 | Vue.use(CollapseItem); 152 | // Vue.use(Cascader); 153 | // Vue.use(ColorPicker); 154 | // Vue.use(Transfer); 155 | Vue.use(Container); 156 | Vue.use(Header); 157 | Vue.use(Aside); 158 | Vue.use(Main); 159 | Vue.use(Footer); 160 | /* 161 | Vue.use(Timeline); 162 | Vue.use(TimelineItem); 163 | Vue.use(Link); 164 | Vue.use(Divider); 165 | Vue.use(Image); 166 | Vue.use(Calendar); 167 | Vue.use(Backtop); 168 | Vue.use(PageHeader); 169 | Vue.use(CascaderPanel); 170 | 171 | Vue.use(Loading.directive); 172 | */ 173 | 174 | Vue.prototype.$message = Message 175 | // Vue.use(Button) 176 | -------------------------------------------------------------------------------- /src/main/webapp/src/plugins/vxe.js: -------------------------------------------------------------------------------- 1 | import Vue from 'vue' 2 | import XEUtils from 'xe-utils' 3 | import { 4 | VXETable, 5 | Column, 6 | Header, 7 | Table 8 | } from 'vxe-table' 9 | import zhCN from 'vxe-table/lib/locale/lang/zh-CN' 10 | 11 | // 按需加载的方式默认是不带国际化的,自定义国际化需要自行解析占位符 '{0}',例如: 12 | VXETable.setup({ 13 | i18n: (key, args) => XEUtils.toFormatString(XEUtils.get(zhCN, key), args) 14 | }) 15 | 16 | 17 | Vue.use(Column) 18 | Vue.use(Header) 19 | Vue.use(Table) -------------------------------------------------------------------------------- /src/main/webapp/src/router/index.js: -------------------------------------------------------------------------------- 1 | import Vue from 'vue' 2 | import VueRouter from 'vue-router' 3 | import Home from '../views/Home.vue' 4 | import kafkaConfig from '../components/kafka/config' 5 | import produce from '../components/kafka/operate/produce' 6 | import consume from '../components/kafka/operate/consume' 7 | import kafkaManage from '../components/kafka/manage' 8 | import zkConfig from '../components/zk/config' 9 | import zkManage from '../components/zk/manage' 10 | import redisConfig from '../components/redis/config' 11 | import redisManage from '../components/redis/manage' 12 | import donate from '../components/about/donate' 13 | import authority from '../components/about/authority' 14 | import json from '../components/tool/json' 15 | import time from '../components/tool/time' 16 | 17 | Vue.use(VueRouter) 18 | 19 | const routes = [ 20 | { 21 | path: '/', name: 'Home', component: Home, 22 | children: [ 23 | {path: '/kafka/config', name: 'kafkaConfig', component: kafkaConfig}, 24 | {path: '/kafka/produce', name: 'produce', component: produce}, 25 | {path: '/kafka/consume', name: 'consume', component: consume}, 26 | {path: '/kafka/manage', name: 'kafkaManage', component: kafkaManage}, 27 | {path: '/zk/config', name: 'zkConfig', component: zkConfig}, 28 | {path: '/zk/manage', name: 'zkManage', component: zkManage}, 29 | {path: '/redis/config', name: 'redisConfig', component: redisConfig}, 30 | {path: '/redis/manage', name: 'redisManage', component: redisManage}, 31 | {path: '/about/donate', name: 'donate', component: donate}, 32 | {path: '/about/authority', name: 'authority', component: authority}, 33 | {path: '/tool/json', name: 'json', component: json}, 34 | {path: '/tool/time', name: 'time', component: time} 35 | ] 36 | }/*, 37 | { 38 | path: '/about', 39 | name: 'About', 40 | // route level code-splitting 41 | // this generates a separate chunk (about.[hash].js) for this route 42 | // which is lazy-loaded when the route is visited. 43 | component: () => import(/!* webpackChunkName: "about" *!/ '../views/About.vue') 44 | }*/ 45 | ] 46 | 47 | const router = new VueRouter({ 48 | routes 49 | }) 50 | 51 | export default router 52 | -------------------------------------------------------------------------------- /src/main/webapp/src/store/index.js: -------------------------------------------------------------------------------- 1 | import Vue from 'vue' 2 | import Vuex from 'vuex' 3 | 4 | Vue.use(Vuex) 5 | 6 | export default new Vuex.Store({ 7 | state: { 8 | kafkaAuth: {}, 9 | zkAuth: {}, 10 | redisAuth: {} 11 | }, 12 | mutations: { 13 | setKafkaAuth(state, payload) { 14 | state.kafkaAuth = payload 15 | }, 16 | setZKAuth(state, payload) { 17 | state.zkAuth = payload 18 | }, 19 | setRedisAuth(state, payload) { 20 | state.redisAuth = payload 21 | } 22 | }, 23 | actions: {}, 24 | getters: { 25 | getKafkaAuth: (state) => (sourceId) => { 26 | let a = state.kafkaAuth[sourceId] 27 | console.log("getters...", a) 28 | return a 29 | }, 30 | getZKAuth: (state) => (sourceId) => { 31 | let a = state.zkAuth[sourceId] 32 | return a 33 | }, 34 | getRedisAuth: (state) => (sourceId) => { 35 | return state.redisAuth[sourceId] 36 | } 37 | } 38 | 39 | }) 40 | -------------------------------------------------------------------------------- /src/main/webapp/src/theme/fonts/element-icons.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/freakchick/kafkaUI-lite/d3bc2248d28005398e8975413835adb7f661573f/src/main/webapp/src/theme/fonts/element-icons.ttf -------------------------------------------------------------------------------- /src/main/webapp/src/theme/fonts/element-icons.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/freakchick/kafkaUI-lite/d3bc2248d28005398e8975413835adb7f661573f/src/main/webapp/src/theme/fonts/element-icons.woff -------------------------------------------------------------------------------- /src/main/webapp/src/views/Home.vue: -------------------------------------------------------------------------------- 1 | 85 | 86 | 114 | 115 | 157 | -------------------------------------------------------------------------------- /src/main/webapp/vue.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | devServer: { 3 | proxy: "http://127.0.0.1:8889", //开发环境的跨域问题解决 4 | port: 8181 5 | } 6 | } 7 | --------------------------------------------------------------------------------