├── .github └── workflows │ ├── docker.yml │ └── kafka-map.yml ├── .gitignore ├── .mvn └── wrapper │ ├── maven-wrapper.jar │ └── maven-wrapper.properties ├── Dockerfile ├── LICENSE ├── README-zh_CN.md ├── README.md ├── base.dockerignore ├── build.sh ├── graalvm └── reflect-config.json ├── pom.xml ├── screenshot ├── brokers.png ├── clusters.png ├── consumer-message.png ├── consumer-subscription.png ├── consumers.png ├── delay-message.png ├── import-cluster.png ├── producer-message.png ├── topic-info-broker.png ├── topic-info-config.png ├── topic-info-consumer-reset-offset.png ├── topic-info-consumer.png ├── topic-info-partition.png └── topics.png ├── src ├── main │ ├── java │ │ └── cn │ │ │ └── typesafe │ │ │ └── km │ │ │ ├── KafkaMapApplication.java │ │ │ ├── config │ │ │ ├── CacheConfig.java │ │ │ ├── Constant.java │ │ │ ├── WebConfig.java │ │ │ └── WebFluxConfig.java │ │ │ ├── controller │ │ │ ├── AccountController.java │ │ │ ├── BrokerController.java │ │ │ ├── ClusterController.java │ │ │ ├── ConsumerGroupController.java │ │ │ ├── TopicController.java │ │ │ ├── dto │ │ │ │ ├── LoginAccount.java │ │ │ │ ├── PageResult.java │ │ │ │ └── PasswordChange.java │ │ │ └── handle │ │ │ │ └── GlobalExceptionHandler.java │ │ │ ├── delay │ │ │ ├── DelayMessage.java │ │ │ ├── DelayMessageHelper.java │ │ │ ├── DelayMessageListener.java │ │ │ └── DelayMessageRunner.java │ │ │ ├── entity │ │ │ ├── Cluster.java │ │ │ └── User.java │ │ │ ├── interceptor │ │ │ └── AuthInterceptor.java │ │ │ ├── repository │ │ │ ├── ClusterRepository.java │ │ │ └── UserRepository.java │ │ │ ├── service │ │ │ ├── BrokerService.java │ │ │ ├── ClusterService.java │ │ │ ├── ConsumerGroupService.java │ │ │ ├── MessageService.java │ │ │ ├── TopicService.java │ │ │ ├── UserService.java │ │ │ └── dto │ │ │ │ ├── Broker.java │ │ │ │ ├── ConsumerGroup.java │ │ │ │ ├── ConsumerGroupDescribe.java │ │ │ │ ├── ConsumerGroupInfo.java │ │ │ │ ├── ConsumerMessage.java │ │ │ │ ├── LiveMessage.java │ │ │ │ ├── Partition.java │ │ │ │ ├── ResetOffset.java │ │ │ │ ├── ServerConfig.java │ │ │ │ ├── Topic.java │ │ │ │ ├── TopicData.java │ │ │ │ ├── TopicForCreate.java │ │ │ │ ├── TopicInfo.java │ │ │ │ └── TopicOffset.java │ │ │ └── util │ │ │ ├── ID.java │ │ │ ├── Json.java │ │ │ ├── Networks.java │ │ │ ├── Sign.java │ │ │ └── Web.java │ └── resources │ │ ├── application.yml │ │ └── logback.xml └── test │ └── java │ └── cn │ └── typesafe │ └── km │ ├── DelayQueueTest.java │ └── LogTest.java └── web ├── .env ├── .gitignore ├── index.html ├── package.json ├── src ├── App.css ├── App.jsx ├── assets │ └── react.svg ├── common │ ├── env.js │ └── request.js ├── components │ ├── Broker.jsx │ ├── BrokerConfig.jsx │ ├── Cluster.jsx │ ├── ClusterModal.jsx │ ├── ConsumerGroup.jsx │ ├── ConsumerGroupInfo.jsx │ ├── Info.jsx │ ├── Login.css │ ├── Login.jsx │ ├── SendMessageModal.jsx │ ├── Topic.jsx │ ├── TopicBroker.jsx │ ├── TopicConfig.jsx │ ├── TopicConsumerGroup.jsx │ ├── TopicConsumerGroupOffset.jsx │ ├── TopicData.jsx │ ├── TopicDataLive.jsx │ ├── TopicInfo.jsx │ ├── TopicModal.jsx │ └── TopicPartition.jsx ├── hook │ └── withRouter.jsx ├── index.css ├── locales │ ├── en_US.js │ └── zh_CN.js ├── main.jsx ├── router.jsx └── utils │ └── utils.jsx ├── vite.config.js └── yarn.lock /.github/workflows/docker.yml: -------------------------------------------------------------------------------- 1 | name: Docker Image Build 2 | 3 | on: 4 | workflow_dispatch: 5 | push: 6 | tags: 7 | - "v*" 8 | 9 | jobs: 10 | docker_image_build: 11 | runs-on: ubuntu-latest 12 | permissions: 13 | contents: read 14 | packages: write 15 | steps: 16 | - name: Get version 17 | id: get_version 18 | run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//} 19 | - name: Private Actions Checkout 20 | uses: actions/checkout@v2.3.4 21 | - name: Docker Setup QEMU 22 | uses: docker/setup-qemu-action@v1.2.0 23 | - name: Docker Setup Buildx 24 | uses: docker/setup-buildx-action@v1.6.0 25 | - name: Docker Login 26 | uses: docker/login-action@v1.10.0 27 | with: 28 | username: ${{ secrets.DOCKERHUB_USERNAME }} 29 | password: ${{ secrets.DOCKERHUB_PASSWORD }} 30 | - name: Build and push Docker images 31 | uses: docker/build-push-action@v2.7.0 32 | with: 33 | context: . 34 | platforms: linux/arm64,linux/amd64 35 | file: Dockerfile 36 | push: true 37 | tags: | 38 | ${{ secrets.DOCKERHUB_USERNAME }}/kafka-map:latest 39 | ${{ secrets.DOCKERHUB_USERNAME }}/kafka-map:${{ steps.get_version.outputs.VERSION }} 40 | docker_hub_description: 41 | needs: [ docker_image_build ] 42 | name: Change DockerHub Description 43 | runs-on: ubuntu-latest 44 | steps: 45 | - name: Private Actions Checkout 46 | uses: actions/checkout@v2.3.4 47 | - name: Docker Hub Description 48 | uses: peter-evans/dockerhub-description@v2 49 | with: 50 | username: ${{ secrets.DOCKERHUB_USERNAME }} 51 | password: ${{ secrets.DOCKERHUB_PASSWORD }} 52 | repository: ${{ secrets.DOCKERHUB_USERNAME }}/kafka-map 53 | -------------------------------------------------------------------------------- /.github/workflows/kafka-map.yml: -------------------------------------------------------------------------------- 1 | name: KafkaMap Build 2 | 3 | on: 4 | workflow_dispatch: 5 | push: 6 | tags: 7 | - "v*" 8 | 9 | jobs: 10 | kafka-map-build: 11 | name: KafkaMap Build 12 | runs-on: ubuntu-latest 13 | steps: 14 | - name: Get version 15 | id: get_version 16 | run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//} 17 | - name: Private Actions Checkout 18 | uses: actions/checkout@v2.3.4 19 | - name: node Setup 20 | uses: actions/setup-node@v2 21 | with: 22 | node-version: '16' 23 | - name: npm install 24 | run: | 25 | cd web 26 | npm install --global yarn 27 | yarn 28 | - uses: actions/setup-java@v3 29 | with: 30 | distribution: 'temurin' 31 | java-version: '17' 32 | cache: 'maven' 33 | - name: Build with Maven 34 | run: | 35 | sh build.sh 36 | mkdir kafka-map 37 | cp target/kafka-map*.jar kafka-map/kafka-map.jar 38 | cp src/main/resources/application.yml kafka-map/application.yml 39 | tar zcvf kafka-map.tar.gz kafka-map/ 40 | - name: release 41 | uses: softprops/action-gh-release@v1 42 | with: 43 | files: kafka-map.tar.gz 44 | env: 45 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 46 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | HELP.md 2 | target/ 3 | !.mvn/wrapper/maven-wrapper.jar 4 | !**/src/main/**/target/ 5 | !**/src/test/**/target/ 6 | 7 | ### STS ### 8 | .apt_generated 9 | .classpath 10 | .factorypath 11 | .project 12 | .settings 13 | .springBeans 14 | .sts4-cache 15 | 16 | ### IntelliJ IDEA ### 17 | .idea 18 | *.iws 19 | *.iml 20 | *.ipr 21 | 22 | ### NetBeans ### 23 | /nbproject/private/ 24 | /nbbuild/ 25 | /dist/ 26 | /nbdist/ 27 | /.nb-gradle/ 28 | build/ 29 | !**/src/main/**/build/ 30 | !**/src/test/**/build/ 31 | 32 | ### VS Code ### 33 | .vscode/ 34 | 35 | /data/ 36 | /logs/ 37 | /screenshot/ 38 | /src/main/web/build 39 | /src/main/web/node_modules 40 | -------------------------------------------------------------------------------- /.mvn/wrapper/maven-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dushixiang/kafka-map/cd9f6261df172bfa37ef776f3666447e75a39386/.mvn/wrapper/maven-wrapper.jar -------------------------------------------------------------------------------- /.mvn/wrapper/maven-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.6/apache-maven-3.8.6-bin.zip 2 | wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar 3 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # 2 | # Build stage 3 | # 4 | FROM node:16 AS front-build 5 | 6 | WORKDIR /app 7 | 8 | COPY web . 9 | 10 | RUN yarn config set network-timeout 300000 && yarn && yarn build 11 | 12 | FROM maven:3-amazoncorretto-17 AS build 13 | 14 | WORKDIR /app 15 | 16 | COPY src src 17 | COPY pom.xml pom.xml 18 | COPY LICENSE LICENSE 19 | COPY --from=front-build /app/dist src/main/resources/static 20 | 21 | RUN mvn -f pom.xml clean package -Dmaven.test.skip=true 22 | 23 | 24 | # base image to build a JRE 25 | FROM amazoncorretto:17.0.8-alpine as corretto-jdk 26 | 27 | # required for strip-debug to work 28 | RUN apk add --no-cache binutils 29 | 30 | # Build small JRE image 31 | RUN $JAVA_HOME/bin/jlink \ 32 | --verbose \ 33 | --add-modules ALL-MODULE-PATH \ 34 | --strip-debug \ 35 | --no-man-pages \ 36 | --no-header-files \ 37 | --compress=2 \ 38 | --output /customjre 39 | 40 | # 41 | # Package stage 42 | # 43 | FROM alpine:latest 44 | 45 | ENV JAVA_HOME=/jre 46 | ENV PATH="${JAVA_HOME}/bin:${PATH}" 47 | 48 | # copy JRE from the base image 49 | COPY --from=corretto-jdk /customjre $JAVA_HOME 50 | 51 | ENV SERVER_PORT 8080 52 | ENV DEFAULT_USERNAME admin 53 | ENV DEFAULT_PASSWORD admin 54 | 55 | WORKDIR /usr/local/kafka-map 56 | 57 | COPY --from=build /app/target/*.jar kafka-map.jar 58 | COPY --from=build /app/LICENSE LICENSE 59 | 60 | EXPOSE $SERVER_PORT 61 | 62 | ENTRYPOINT ["/jre/bin/java", "-Djava.security.egd=file:/dev/./urandom", "-jar", "/usr/local/kafka-map/kafka-map.jar", "--server.port=${SERVER_PORT}", "--default.username=${DEFAULT_USERNAME}", "--default.password=${DEFAULT_PASSWORD}"] -------------------------------------------------------------------------------- /README-zh_CN.md: -------------------------------------------------------------------------------- 1 | # kafka map 2 | 3 | [English](./README.md) | 简体中文 4 | 5 | 为编程插上翅膀,给`kafka`装上导航。 6 | 7 | ## 简介 8 | 9 | `kafka-map`是使用`Java17`和`React`开发的一款`kafka`可视化工具。 10 | 11 | 目前支持的功能有: 12 | 13 | - 多集群管理 14 | - 集群状态监控(分区数量、副本数量、存储大小、offset) 15 | - 主题创建、删除、扩容(删除需配置delete.topic.enable = true) 16 | - broker状态监控 17 | - 消费者组查看、删除 18 | - 重置offset 19 | - 消息查询(支持String和json方式展示) 20 | - 发送消息(支持向指定的topic和partition发送字符串消息) 21 | - 延迟消息(通过扩展使kafka支持18个级别的延迟消息) 22 | 23 | ## 截图 24 | 25 | ### 添加集群 26 | 27 | ![添加集群](./screenshot/import-cluster.png) 28 | 29 | ### 集群管理 30 | 31 | ![集群管理](./screenshot/clusters.png) 32 | 33 | ### broker 34 | 35 | ![broker](./screenshot/brokers.png) 36 | 37 | ### 主题管理 38 | 39 | ![主题管理](./screenshot/topics.png) 40 | 41 | ### 消费组 42 | 43 | ![消费组](./screenshot/consumers.png) 44 | 45 | ### 查看消费组已订阅主题 46 | 47 | ![消费组详情](./screenshot/consumer-subscription.png) 48 | 49 | ### topic详情——分区 50 | 51 | ![topic详情——分区](./screenshot/topic-info-partition.png) 52 | 53 | ### topic详情——broker 54 | 55 | ![topic详情——broker](./screenshot/topic-info-broker.png) 56 | 57 | ### topic详情——消费组 58 | 59 | ![topic详情——消费组](./screenshot/topic-info-consumer.png) 60 | 61 | ### topic详情——消费组重置offset 62 | 63 | ![topic详情——消费组重置offset](./screenshot/topic-info-consumer-reset-offset.png) 64 | 65 | ### topic详情——配置信息 66 | 67 | ![topic详情——配置信息](./screenshot/topic-info-config.png) 68 | 69 | ### 生产消息 70 | 71 | ![消费消息](./screenshot/producer-message.png) 72 | 73 | ### 消费消息 74 | 75 | ![消费消息](./screenshot/consumer-message.png) 76 | 77 | ### 延迟消息 78 | 79 | ![延迟消息](./screenshot/delay-message.png) 80 | 81 | ## 协议与条款 82 | 83 | 如您需要在企业网络中使用 `kafka-map` ,建议先征求 IT 管理员的同意。下载、使用或分发 `kafka-map` 前,您必须同意 [协议](./LICENSE) 条款与限制。本项目不提供任何担保,亦不承担任何责任。 84 | 85 | ## 依赖环境 86 | 87 | - Java17 或更高版本 88 | - Apache Kafka 1.1.0 或更高版本 89 | 90 | ## docker方式安装 91 | 92 | 环境变量 93 | 94 | | 参数 | 含义 | 95 | |---|---| 96 | | DEFAULT_USERNAME | 初始登录账号 | 97 | | DEFAULT_PASSWORD | 初始登录密码 | 98 | 99 | ```shell 100 | docker run -d \ 101 | -p 8080:8080 \ 102 | -v /opt/kafka-map/data:/usr/local/kafka-map/data \ 103 | -e DEFAULT_USERNAME=admin \ 104 | -e DEFAULT_PASSWORD=admin \ 105 | --name kafka-map \ 106 | --restart always dushixiang/kafka-map:latest 107 | ``` 108 | 109 | ## 原生方式安装 110 | 111 | 下载 112 | ```shell 113 | wget https://github.com/dushixiang/kafka-map/releases/latest/download/kafka-map.tgz 114 | ``` 115 | 116 | 解压 117 | ```shell 118 | tar -zxvf kafka-map.tgz -C /usr/local/ 119 | ``` 120 | 121 | ### 前台运行 122 | ```shell 123 | # kafka-map文件夹 124 | cd /usr/local/kafka-map 125 | # 根据需求自行修改配置 126 | vi application.yml 127 | # 启动 128 | java -jar kafka-map.jar 129 | ``` 130 | 131 | ### 系统服务方式运行 132 | 133 | ```shell 134 | cat <> /etc/systemd/system/kafka-map.service 135 | [Unit] 136 | Description=kafka map service 137 | After=network.target 138 | 139 | [Service] 140 | WorkingDirectory=/usr/local/kafka-map 141 | ExecStart=/usr/bin/java -jar /usr/local/kafka-map/kafka-map.jar 142 | Restart=on-failure 143 | 144 | [Install] 145 | WantedBy=multi-user.target 146 | EOF 147 | ``` 148 | 149 | 重载系统服务&&设置开机启动&&启动服务&&查看状态 150 | 151 | ```shell 152 | systemctl daemon-reload 153 | systemctl enable kafka-map 154 | systemctl start kafka-map 155 | systemctl status kafka-map 156 | ``` 157 | 158 | ### 使用 159 | 160 | 接下来使用浏览器打开服务器的 `8080` 端口即可访问。 161 | 162 | ## FAQ 163 | 164 |
165 | 不想使用8080端口怎么办? 166 | 167 | 在启动命令上增加 `--server.port=1234` 即可修改端口为 `1234`。 168 | 169 | ```shell 170 | # 示例 171 | java -jar kafka-map.jar --server.port=1234 172 | ``` 173 |
-------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # kafka map 2 | 3 | English | [简体中文](./README-zh_CN.md) 4 | 5 | Add wings to programming and install navigation to `kafka`。 6 | 7 | ## Introduction 8 | 9 | `kafka-map` is a `kafka` visualization tool developed using `Java17` and `React`. 10 | 11 | Supported features: 12 | 13 | - Multi-cluster management. 14 | - Cluster status monitoring (number of partitions, number of replicas, storage size, offset). 15 | - Topic create, delete, expansion (delete needs to configure delete.topic.enable = true). 16 | - Broker status monitoring. 17 | - Consumer group view and delete. 18 | - Reset offset. 19 | - Topic data view and search (Support String and json display). 20 | - Send message to Topic 21 | - Delay message (supports 18 levels of delayed messages). 22 | 23 | ## Screenshot 24 | 25 | ### Import cluster 26 | 27 | ![添加集群](./screenshot/import-cluster.png) 28 | 29 | ### Clusters 30 | 31 | ![集群管理](./screenshot/clusters.png) 32 | 33 | ### Brokers 34 | 35 | ![broker](./screenshot/brokers.png) 36 | 37 | ### Topics 38 | 39 | ![主题管理](./screenshot/topics.png) 40 | 41 | ### Consumer Groups 42 | 43 | ![消费组](./screenshot/consumers.png) 44 | 45 | ### Consumer Group Subscription 46 | 47 | ![消费组详情](./screenshot/consumer-subscription.png) 48 | 49 | ### Topic Partition 50 | 51 | ![topic详情——分区](./screenshot/topic-info-partition.png) 52 | 53 | ### Topic Brokers 54 | 55 | ![topic详情——broker](./screenshot/topic-info-broker.png) 56 | 57 | ### Topic Consumer Groups 58 | 59 | ![topic详情——消费组](./screenshot/topic-info-consumer.png) 60 | 61 | ### Topic Consumer Groups Reset Offset 62 | 63 | ![topic详情——消费组重置offset](./screenshot/topic-info-consumer-reset-offset.png) 64 | 65 | ### Topic Configs 66 | 67 | ![topic详情——配置信息](./screenshot/topic-info-config.png) 68 | 69 | ### Produce Message 70 | 71 | ![消费消息](./screenshot/producer-message.png) 72 | 73 | ### Consume Message 74 | 75 | ![消费消息](./screenshot/consumer-message.png) 76 | 77 | ### Delay Message 78 | 79 | ![延迟消息](./screenshot/delay-message.png) 80 | 81 | ## Disclaimer 82 | 83 | Developers wishing to use `kafka-map` within a corporate network are advised to seek approval from their administrators or management before using the tool. By downloading, using, or distributing `kafka-map`, you agree to the [LICENSE](./LICENSE) terms & conditions. No warranty or liability is provided. 84 | 85 | ## Required 86 | 87 | - Java17 or higher 88 | - Apache Kafka 1.1.0 or higher 89 | 90 | ## install by docker 91 | 92 | end 93 | 94 | | Param | Description | 95 | |---|---| 96 | | DEFAULT_USERNAME | Initial login username | 97 | | DEFAULT_PASSWORD | Initial login password | 98 | 99 | ```shell 100 | docker run -d \ 101 | -p 8080:8080 \ 102 | -v /opt/kafka-map/data:/usr/local/kafka-map/data \ 103 | -e DEFAULT_USERNAME=admin \ 104 | -e DEFAULT_PASSWORD=admin \ 105 | --name kafka-map \ 106 | --restart always dushixiang/kafka-map:latest 107 | ``` 108 | 109 | ## install by native 110 | 111 | download 112 | ```shell 113 | wget https://github.com/dushixiang/kafka-map/releases/latest/download/kafka-map.tgz 114 | ``` 115 | 116 | unzip 117 | ```shell 118 | tar -zxvf kafka-map.tgz -C /usr/local/ 119 | ``` 120 | 121 | ### Running in the foreground 122 | ```shell 123 | # kafka-map dir 124 | cd /usr/local/kafka-map 125 | # Modify the configuration according to your needs 126 | vi application.yml 127 | # run 128 | java -jar kafka-map.jar 129 | ``` 130 | 131 | ### Running in System service 132 | 133 | ```shell 134 | cat <> /etc/systemd/system/kafka-map.service 135 | [Unit] 136 | Description=kafka map service 137 | After=network.target 138 | 139 | [Service] 140 | WorkingDirectory=/usr/local/kafka-map 141 | ExecStart=/usr/bin/java -jar /usr/local/kafka-map/kafka-map.jar 142 | Restart=on-failure 143 | 144 | [Install] 145 | WantedBy=multi-user.target 146 | EOF 147 | ``` 148 | 149 | Reload system service && set boot auto-start && start service && view status 150 | 151 | ```shell 152 | systemctl daemon-reload 153 | systemctl enable kafka-map 154 | systemctl start kafka-map 155 | systemctl status kafka-map 156 | ``` 157 | 158 | ### Usage 159 | 160 | Then use the browser to open the server's port `8080` to access. 161 | 162 | ## FAQ 163 | 164 |
165 | What if I don't want to use port 8080? 166 | 167 | Add `--server.port=1234` to the startup command to modify the port to `1234`. 168 | 169 | ```shell 170 | # example 171 | java -jar kafka-map.jar --server.port=1234 172 | ``` 173 |
-------------------------------------------------------------------------------- /base.dockerignore: -------------------------------------------------------------------------------- 1 | .github 2 | .idea 3 | data 4 | logs 5 | screenshot 6 | target 7 | src/main/web/node_modules/ 8 | base.dockerignore 9 | README.md 10 | -------------------------------------------------------------------------------- /build.sh: -------------------------------------------------------------------------------- 1 | cd web 2 | yarn && yarn build 3 | rm -rf ../src/main/resources/static 4 | mv dist ../src/main/resources/static 5 | 6 | echo "build frontend success" 7 | 8 | cd ../ 9 | mvn -f pom.xml clean package -Dmaven.test.skip=true 10 | 11 | echo "build kafka-map success" -------------------------------------------------------------------------------- /graalvm/reflect-config.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "com.github.benmanes.caffeine.cache.PSW", 4 | "allDeclaredConstructors": true 5 | }, 6 | { 7 | "name": "com.github.benmanes.caffeine.cache.PSWMS", 8 | "allDeclaredConstructors": true 9 | }, 10 | { 11 | "name": "com.github.benmanes.caffeine.cache.SSLA", 12 | "allDeclaredConstructors": true 13 | }, 14 | { 15 | "name": "com.github.benmanes.caffeine.cache.SSLMSW", 16 | "allDeclaredConstructors": true 17 | }, 18 | { 19 | "name": "com.github.benmanes.caffeine.cache.SSMSW", 20 | "allDeclaredConstructors": true 21 | }, 22 | { 23 | "name": "com.github.benmanes.caffeine.cache.SSLMSA", 24 | "allDeclaredConstructors": true 25 | }, 26 | { 27 | "name": "com.github.benmanes.caffeine.cache.PSAMS", 28 | "allDeclaredConstructors": true 29 | }, 30 | { 31 | "name": "org.hibernate.community.dialect.SQLiteDialect", 32 | "allDeclaredConstructors": true 33 | } 34 | ] -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 4.0.0 5 | 6 | 7 | org.springframework.boot 8 | spring-boot-starter-parent 9 | 3.0.6 10 | 11 | 12 | 13 | cn.typesafe 14 | kafka-map 15 | 1.3.2 16 | kafka-map 17 | a simple kafka manager 18 | 19 | 20 | 17 21 | 22 | 23 | 24 | 25 | org.springframework.boot 26 | spring-boot-starter-data-jpa 27 | 28 | 29 | org.springframework.boot 30 | spring-boot-starter-web 31 | 32 | 33 | 34 | org.projectlombok 35 | lombok 36 | true 37 | 38 | 39 | 40 | org.springframework.boot 41 | spring-boot-starter-test 42 | test 43 | 44 | 45 | 46 | org.xerial 47 | sqlite-jdbc 48 | 3.39.4.1 49 | 50 | 51 | 52 | org.hibernate.orm 53 | hibernate-community-dialects 54 | 55 | 56 | 57 | org.springframework.boot 58 | spring-boot-starter-validation 59 | 60 | 61 | 62 | compile 63 | com.google.guava 64 | guava 65 | 31.1-jre 66 | 67 | 68 | compile 69 | com.github.ben-manes.caffeine 70 | caffeine 71 | 3.1.5 72 | 73 | 74 | compile 75 | org.springframework.security 76 | spring-security-crypto 77 | 6.0.2 78 | 79 | 80 | compile 81 | org.apache.kafka 82 | kafka-clients 83 | 3.4.0 84 | 85 | 86 | 87 | org.springframework.boot 88 | spring-boot-starter-webflux 89 | 90 | 91 | 92 | 93 | 94 | 95 | spring-repo 96 | Spring Repository 97 | https://repo.spring.io/release 98 | 99 | 100 | 101 | 102 | 103 | 104 | org.graalvm.buildtools 105 | native-maven-plugin 106 | 107 | false 108 | 109 | -H:ReflectionConfigurationFiles=./graalvm/reflect-config.json 110 | 111 | 112 | 113 | 114 | org.springframework.boot 115 | spring-boot-maven-plugin 116 | 117 | 118 | org.apache.maven.plugins 119 | maven-compiler-plugin 120 | 121 | 17 122 | 17 123 | 124 | 125 | 126 | 127 | 128 | -------------------------------------------------------------------------------- /screenshot/brokers.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dushixiang/kafka-map/cd9f6261df172bfa37ef776f3666447e75a39386/screenshot/brokers.png -------------------------------------------------------------------------------- /screenshot/clusters.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dushixiang/kafka-map/cd9f6261df172bfa37ef776f3666447e75a39386/screenshot/clusters.png -------------------------------------------------------------------------------- /screenshot/consumer-message.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dushixiang/kafka-map/cd9f6261df172bfa37ef776f3666447e75a39386/screenshot/consumer-message.png -------------------------------------------------------------------------------- /screenshot/consumer-subscription.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dushixiang/kafka-map/cd9f6261df172bfa37ef776f3666447e75a39386/screenshot/consumer-subscription.png -------------------------------------------------------------------------------- /screenshot/consumers.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dushixiang/kafka-map/cd9f6261df172bfa37ef776f3666447e75a39386/screenshot/consumers.png -------------------------------------------------------------------------------- /screenshot/delay-message.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dushixiang/kafka-map/cd9f6261df172bfa37ef776f3666447e75a39386/screenshot/delay-message.png -------------------------------------------------------------------------------- /screenshot/import-cluster.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dushixiang/kafka-map/cd9f6261df172bfa37ef776f3666447e75a39386/screenshot/import-cluster.png -------------------------------------------------------------------------------- /screenshot/producer-message.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dushixiang/kafka-map/cd9f6261df172bfa37ef776f3666447e75a39386/screenshot/producer-message.png -------------------------------------------------------------------------------- /screenshot/topic-info-broker.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dushixiang/kafka-map/cd9f6261df172bfa37ef776f3666447e75a39386/screenshot/topic-info-broker.png -------------------------------------------------------------------------------- /screenshot/topic-info-config.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dushixiang/kafka-map/cd9f6261df172bfa37ef776f3666447e75a39386/screenshot/topic-info-config.png -------------------------------------------------------------------------------- /screenshot/topic-info-consumer-reset-offset.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dushixiang/kafka-map/cd9f6261df172bfa37ef776f3666447e75a39386/screenshot/topic-info-consumer-reset-offset.png -------------------------------------------------------------------------------- /screenshot/topic-info-consumer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dushixiang/kafka-map/cd9f6261df172bfa37ef776f3666447e75a39386/screenshot/topic-info-consumer.png -------------------------------------------------------------------------------- /screenshot/topic-info-partition.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dushixiang/kafka-map/cd9f6261df172bfa37ef776f3666447e75a39386/screenshot/topic-info-partition.png -------------------------------------------------------------------------------- /screenshot/topics.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dushixiang/kafka-map/cd9f6261df172bfa37ef776f3666447e75a39386/screenshot/topics.png -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/KafkaMapApplication.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km; 2 | 3 | import cn.typesafe.km.service.ClusterService; 4 | import cn.typesafe.km.service.UserService; 5 | import jakarta.annotation.Resource; 6 | import lombok.SneakyThrows; 7 | import lombok.extern.slf4j.Slf4j; 8 | import org.springframework.boot.CommandLineRunner; 9 | import org.springframework.boot.SpringApplication; 10 | import org.springframework.boot.autoconfigure.SpringBootApplication; 11 | import org.springframework.context.annotation.Configuration; 12 | import org.springframework.scheduling.annotation.EnableScheduling; 13 | 14 | import java.nio.file.Files; 15 | import java.nio.file.Path; 16 | import java.nio.file.Paths; 17 | 18 | @Configuration 19 | //@RegisterReflectionForBinding({SQLiteDialect.class,}) 20 | @Slf4j 21 | @EnableScheduling 22 | @SpringBootApplication 23 | public class KafkaMapApplication implements CommandLineRunner { 24 | 25 | public static void main(String[] args) { 26 | initDatabaseDir(); 27 | SpringApplication.run(KafkaMapApplication.class, args); 28 | } 29 | 30 | @SneakyThrows 31 | public static void initDatabaseDir() { 32 | Path dbPath = Paths.get("data"); 33 | if (!Files.exists(dbPath)) { 34 | Files.createDirectory(dbPath); 35 | log.debug("create dir: {}", dbPath); 36 | } 37 | } 38 | 39 | @Resource 40 | private UserService userService; 41 | @Resource 42 | private ClusterService clusterService; 43 | 44 | @Override 45 | public void run(String... args) throws Exception { 46 | userService.initUser(); 47 | clusterService.restore(); 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/config/CacheConfig.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.config; 2 | 3 | import cn.typesafe.km.entity.User; 4 | import com.github.benmanes.caffeine.cache.Cache; 5 | import com.github.benmanes.caffeine.cache.Caffeine; 6 | import org.springframework.context.annotation.Bean; 7 | import org.springframework.context.annotation.Configuration; 8 | 9 | import java.util.concurrent.TimeUnit; 10 | 11 | /** 12 | * @author dushixiang 13 | * @date 2021/6/12 1:58 下午 14 | */ 15 | @Configuration 16 | public class CacheConfig { 17 | 18 | @Bean(name = "tokenManager") 19 | public Cache tokenManager() { 20 | return Caffeine.newBuilder() 21 | .maximumSize(100) 22 | .expireAfterWrite(2, TimeUnit.HOURS) 23 | .build(); 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/config/Constant.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.config; 2 | 3 | /** 4 | * @author dushixiang 5 | * @date 2021/4/10 3:17 下午 6 | */ 7 | public final class Constant { 8 | public static final String CONSUMER_GROUP_ID = "kafka-map"; 9 | 10 | public static final String DELAY_MESSAGE_ENABLED = "enabled"; 11 | public static final String DELAY_MESSAGE_DISABLED = "disabled"; 12 | } 13 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/config/WebConfig.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.config; 2 | 3 | import cn.typesafe.km.interceptor.AuthInterceptor; 4 | import lombok.SneakyThrows; 5 | import org.springframework.context.annotation.Bean; 6 | import org.springframework.context.annotation.Configuration; 7 | import org.springframework.web.servlet.config.annotation.CorsRegistry; 8 | import org.springframework.web.servlet.config.annotation.InterceptorRegistry; 9 | import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; 10 | 11 | import jakarta.annotation.PostConstruct; 12 | import jakarta.annotation.Resource; 13 | import java.nio.file.Files; 14 | import java.nio.file.Path; 15 | import java.nio.file.Paths; 16 | 17 | /** 18 | * @author dushixiang 19 | * @date 2021/3/28 5:37 下午 20 | */ 21 | @Configuration 22 | public class WebConfig { 23 | 24 | @Resource 25 | private AuthInterceptor authInterceptor; 26 | 27 | @Bean 28 | public WebMvcConfigurer corsConfigurer() { 29 | return new WebMvcConfigurer() { 30 | 31 | @Override 32 | public void addCorsMappings(CorsRegistry registry) { 33 | registry.addMapping("/**") 34 | .allowedOrigins("*") 35 | .allowCredentials(false) 36 | .allowedMethods("GET", "POST", "PUT", "DELETE", "OPTIONS") 37 | .allowedHeaders("*") 38 | .exposedHeaders("*"); 39 | } 40 | 41 | @Override 42 | public void addInterceptors(InterceptorRegistry registry) { 43 | registry.addInterceptor(authInterceptor) 44 | .addPathPatterns("/info", "change-password", "/brokers/**", "/clusters/**", "/consumerGroups/**", "/topics/**"); 45 | } 46 | }; 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/config/WebFluxConfig.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.config; 2 | 3 | import org.springframework.context.annotation.Configuration; 4 | import org.springframework.web.reactive.config.CorsRegistry; 5 | import org.springframework.web.reactive.config.WebFluxConfigurer; 6 | 7 | @Configuration 8 | public class WebFluxConfig implements WebFluxConfigurer { 9 | 10 | @Override 11 | public void addCorsMappings(CorsRegistry registry) { 12 | registry.addMapping("/**") 13 | .allowedOrigins("*") 14 | .allowCredentials(false) 15 | .allowedMethods("GET", "POST", "PUT", "DELETE", "OPTIONS") 16 | .allowedHeaders("*") 17 | .exposedHeaders("*"); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/controller/AccountController.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.controller; 2 | 3 | import cn.typesafe.km.controller.dto.LoginAccount; 4 | import cn.typesafe.km.controller.dto.PasswordChange; 5 | import cn.typesafe.km.entity.User; 6 | import cn.typesafe.km.service.UserService; 7 | import cn.typesafe.km.util.Web; 8 | import org.springframework.web.bind.annotation.GetMapping; 9 | import org.springframework.web.bind.annotation.PostMapping; 10 | import org.springframework.web.bind.annotation.RequestBody; 11 | import org.springframework.web.bind.annotation.RestController; 12 | 13 | import jakarta.annotation.Resource; 14 | import java.util.Map; 15 | 16 | /** 17 | * @author dushixiang 18 | * @date 2021/6/12 2:33 下午 19 | */ 20 | @RestController 21 | public class AccountController { 22 | 23 | @Resource 24 | private UserService userService; 25 | 26 | @PostMapping("/login") 27 | public Map login(@RequestBody LoginAccount loginAccount) { 28 | String token = userService.login(loginAccount); 29 | return Map.of( 30 | "token", token 31 | ); 32 | } 33 | 34 | @PostMapping("/logout") 35 | public void logout() { 36 | String token = Web.getToken(); 37 | userService.logout(token); 38 | } 39 | 40 | @GetMapping("/info") 41 | public User info() { 42 | String token = Web.getToken(); 43 | return userService.info(token); 44 | } 45 | 46 | @PostMapping("/change-password") 47 | public void changePassword(@RequestBody PasswordChange passwordChange) { 48 | String token = Web.getToken(); 49 | userService.changePassword(token, passwordChange); 50 | userService.logout(token); 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/controller/BrokerController.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.controller; 2 | 3 | import cn.typesafe.km.service.BrokerService; 4 | import cn.typesafe.km.service.dto.Broker; 5 | import cn.typesafe.km.service.dto.ServerConfig; 6 | import org.springframework.web.bind.annotation.*; 7 | 8 | import jakarta.annotation.Resource; 9 | import java.util.List; 10 | import java.util.Map; 11 | import java.util.concurrent.ExecutionException; 12 | 13 | /** 14 | * @author dushixiang 15 | * @date 2021/4/2 20:42 下午 16 | */ 17 | @RequestMapping("/brokers") 18 | @RestController 19 | public class BrokerController { 20 | 21 | @Resource 22 | private BrokerService brokerService; 23 | 24 | @GetMapping("") 25 | public List brokers(@RequestParam String clusterId) throws ExecutionException, InterruptedException { 26 | return brokerService.brokers(null, clusterId); 27 | } 28 | 29 | @GetMapping("/{id}/configs") 30 | public List configs(@PathVariable String id, @RequestParam String clusterId) throws ExecutionException, InterruptedException { 31 | return brokerService.getConfigs(id, clusterId); 32 | } 33 | 34 | @PutMapping("/{id}/configs") 35 | public void updateConfigs(@PathVariable String id, @RequestParam String clusterId, @RequestBody Map configs) throws ExecutionException, InterruptedException { 36 | brokerService.setConfigs(id, clusterId, configs); 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/controller/ClusterController.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.controller; 2 | 3 | import cn.typesafe.km.controller.dto.PageResult; 4 | import cn.typesafe.km.entity.Cluster; 5 | import cn.typesafe.km.repository.ClusterRepository; 6 | import cn.typesafe.km.service.BrokerService; 7 | import cn.typesafe.km.service.ClusterService; 8 | import cn.typesafe.km.service.ConsumerGroupService; 9 | import cn.typesafe.km.service.TopicService; 10 | import org.springframework.data.domain.*; 11 | import org.springframework.http.HttpStatus; 12 | import org.springframework.web.bind.annotation.*; 13 | 14 | import jakarta.annotation.Resource; 15 | import java.util.Arrays; 16 | import java.util.List; 17 | import java.util.Set; 18 | import java.util.concurrent.ExecutionException; 19 | 20 | /** 21 | * @author dushixiang 22 | * @date 2021/3/27 20:15 上午 23 | */ 24 | @RequestMapping("/clusters") 25 | @RestController 26 | public class ClusterController { 27 | 28 | @Resource 29 | private ClusterService clusterService; 30 | @Resource 31 | private TopicService topicService; 32 | @Resource 33 | private BrokerService brokerService; 34 | @Resource 35 | private ConsumerGroupService consumerGroupService; 36 | @Resource 37 | private ClusterRepository clusterRepository; 38 | 39 | @GetMapping("/paging") 40 | public PageResult page(@RequestParam(defaultValue = "1") Integer pageIndex, 41 | @RequestParam(defaultValue = "10") Integer pageSize, 42 | String name) throws ExecutionException, InterruptedException { 43 | 44 | PageRequest pageRequest = PageRequest.of(pageIndex - 1, pageSize, Sort.Direction.DESC, "created"); 45 | 46 | ExampleMatcher exampleMatcher = ExampleMatcher.matching() 47 | .withMatcher("name", ExampleMatcher.GenericPropertyMatchers.contains()); 48 | 49 | Cluster query = new Cluster(); 50 | query.setName(name); 51 | 52 | Example example = Example.of(query, exampleMatcher); 53 | Page page = clusterRepository.findAll(example, pageRequest); 54 | 55 | return PageResult.of(page.getTotalElements(), page.getContent()); 56 | } 57 | 58 | @GetMapping("/{clusterId}") 59 | public Cluster detail(@PathVariable String clusterId) throws ExecutionException, InterruptedException { 60 | Cluster cluster = clusterService.findById(clusterId); 61 | Set topicNames = topicService.topicNames(cluster.getId()); 62 | cluster.setTopicCount(topicNames.size()); 63 | cluster.setBrokerCount(brokerService.countBroker(cluster.getId())); 64 | cluster.setConsumerCount(consumerGroupService.countConsumerGroup(cluster.getId())); 65 | return cluster; 66 | } 67 | 68 | @GetMapping("") 69 | public List items() { 70 | return clusterRepository.findAll(); 71 | } 72 | 73 | @PostMapping("") 74 | @ResponseStatus(value = HttpStatus.CREATED) 75 | public void create(@RequestBody Cluster cluster) throws ExecutionException, InterruptedException { 76 | clusterService.create(cluster); 77 | } 78 | 79 | @ResponseStatus(value = HttpStatus.NO_CONTENT) 80 | @DeleteMapping("/{ids}") 81 | public void delete(@PathVariable String ids) { 82 | clusterService.deleteByIdIn(Arrays.asList(ids.split(","))); 83 | } 84 | 85 | @PutMapping("/{clusterId}") 86 | public void updateName(@PathVariable String clusterId, @RequestBody Cluster cluster) { 87 | clusterService.updateNameById(clusterId, cluster.getName()); 88 | } 89 | 90 | @PostMapping("/{clusterId}/enableDelayMessage") 91 | public void enableDelayMessage(@PathVariable String clusterId) { 92 | clusterService.enableDelayMessage(clusterId); 93 | } 94 | 95 | @PostMapping("/{clusterId}/disableDelayMessage") 96 | public void disableDelayMessage(@PathVariable String clusterId) { 97 | clusterService.disableDelayMessage(clusterId); 98 | topicService.deleteDelayMessageTopics(clusterId); 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/controller/ConsumerGroupController.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.controller; 2 | 3 | import cn.typesafe.km.service.ConsumerGroupService; 4 | import cn.typesafe.km.service.dto.ConsumerGroup; 5 | import cn.typesafe.km.service.dto.ConsumerGroupDescribe; 6 | import cn.typesafe.km.service.dto.ConsumerGroupInfo; 7 | import org.springframework.web.bind.annotation.*; 8 | 9 | import jakarta.annotation.Resource; 10 | import java.util.List; 11 | import java.util.concurrent.ExecutionException; 12 | 13 | /** 14 | * @author dushixiang 15 | * @date 2021/3/27 7:11 下午 16 | */ 17 | @RequestMapping("/consumerGroups") 18 | @RestController 19 | public class ConsumerGroupController { 20 | 21 | @Resource 22 | private ConsumerGroupService consumerGroupService; 23 | 24 | @GetMapping("/{groupId}") 25 | public ConsumerGroupInfo info(@PathVariable String groupId, @RequestParam String clusterId) throws ExecutionException, InterruptedException { 26 | return consumerGroupService.info(clusterId, groupId); 27 | } 28 | 29 | @GetMapping("/{groupId}/describe") 30 | public List describe(@PathVariable String groupId, @RequestParam String clusterId) throws ExecutionException, InterruptedException { 31 | return consumerGroupService.describe(clusterId, groupId); 32 | } 33 | 34 | @GetMapping("") 35 | public List list(@RequestParam String clusterId, String groupId) throws ExecutionException, InterruptedException { 36 | return consumerGroupService.consumerGroup(clusterId, groupId); 37 | } 38 | 39 | @DeleteMapping("/{groupId}") 40 | public void delete(@PathVariable String groupId, @RequestParam String clusterId) throws ExecutionException, InterruptedException { 41 | consumerGroupService.delete(clusterId, groupId); 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/controller/TopicController.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.controller; 2 | 3 | import cn.typesafe.km.service.BrokerService; 4 | import cn.typesafe.km.service.ConsumerGroupService; 5 | import cn.typesafe.km.service.MessageService; 6 | import cn.typesafe.km.service.TopicService; 7 | import cn.typesafe.km.service.dto.*; 8 | import org.springframework.http.codec.ServerSentEvent; 9 | import org.springframework.web.bind.annotation.*; 10 | import reactor.core.publisher.Flux; 11 | 12 | import jakarta.annotation.Resource; 13 | import java.util.ArrayList; 14 | import java.util.List; 15 | import java.util.Map; 16 | import java.util.Set; 17 | import java.util.concurrent.ExecutionException; 18 | 19 | /** 20 | * @author dushixiang 21 | * @date 2021/3/27 20:02 22 | */ 23 | @RequestMapping("/topics") 24 | @RestController 25 | public class TopicController { 26 | 27 | @Resource 28 | private TopicService topicService; 29 | @Resource 30 | private BrokerService brokerService; 31 | @Resource 32 | private ConsumerGroupService consumerGroupService; 33 | @Resource 34 | private MessageService messageService; 35 | 36 | @GetMapping("/names") 37 | public Set topicNames(@RequestParam String clusterId) throws ExecutionException, InterruptedException { 38 | return topicService.topicNames(clusterId); 39 | } 40 | 41 | @GetMapping("") 42 | public List topics(@RequestParam String clusterId, String name) throws ExecutionException, InterruptedException { 43 | return topicService.topics(clusterId, name); 44 | } 45 | 46 | @PostMapping("") 47 | public void create(@RequestBody TopicForCreate topic) throws ExecutionException, InterruptedException { 48 | topicService.createTopic(topic); 49 | } 50 | 51 | @PostMapping("/batch-delete") 52 | public void delete(@RequestBody ArrayList topics, @RequestParam String clusterId) throws ExecutionException, InterruptedException { 53 | topicService.deleteTopic(clusterId, topics); 54 | } 55 | 56 | @GetMapping("/{topic}") 57 | public TopicInfo info(@PathVariable String topic, @RequestParam String clusterId) throws ExecutionException, InterruptedException { 58 | return topicService.info(clusterId, topic); 59 | } 60 | 61 | @GetMapping("/{topic}/partitions") 62 | public List partitions(@PathVariable String topic, @RequestParam String clusterId) throws ExecutionException, InterruptedException { 63 | return topicService.partitions(topic, clusterId); 64 | } 65 | 66 | @GetMapping("/{topic}/brokers") 67 | public List brokers(@PathVariable String topic, @RequestParam String clusterId) throws ExecutionException, InterruptedException { 68 | return brokerService.brokers(Set.of(topic), clusterId); 69 | } 70 | 71 | @GetMapping("/{topic}/consumerGroups") 72 | public List consumerGroups(@PathVariable String topic, @RequestParam String clusterId) throws ExecutionException, InterruptedException { 73 | return consumerGroupService.consumerGroups(topic, clusterId); 74 | } 75 | 76 | @GetMapping("/{topic}/consumerGroups/{groupId}/offset") 77 | public List offset(@PathVariable String topic, @PathVariable String groupId, @RequestParam String clusterId) throws ExecutionException, InterruptedException { 78 | return consumerGroupService.offset(topic, groupId, clusterId); 79 | } 80 | 81 | @PutMapping("/{topic}/consumerGroups/{groupId}/offset") 82 | public void resetOffset(@PathVariable String topic, @PathVariable String groupId, @RequestParam String clusterId, @RequestBody ResetOffset resetOffset) throws ExecutionException, InterruptedException { 83 | consumerGroupService.resetOffset(topic, groupId, clusterId, resetOffset); 84 | } 85 | 86 | @PostMapping("/{topic}/partitions") 87 | public void createPartitions(@PathVariable String topic, @RequestParam String clusterId, @RequestParam Integer totalCount) throws ExecutionException, InterruptedException { 88 | topicService.createPartitions(clusterId, topic, totalCount); 89 | } 90 | 91 | @GetMapping("/{topic}/data") 92 | public List data(@PathVariable String topic, @RequestParam String clusterId, 93 | @RequestParam(defaultValue = "0") Integer partition, 94 | @RequestParam(defaultValue = "0") Long offset, 95 | @RequestParam(defaultValue = "100") Integer count, 96 | String keyFilter, 97 | String valueFilter) { 98 | return messageService.data(clusterId, topic, partition, offset, count, keyFilter, valueFilter); 99 | } 100 | 101 | @GetMapping("/{topic}/data/live") 102 | public Flux> liveData(@PathVariable String topic, @RequestParam String clusterId, 103 | @RequestParam(defaultValue = "0") Integer partition, 104 | String keyFilter, 105 | String valueFilter) { 106 | return messageService.liveData(clusterId, topic, partition, keyFilter, valueFilter); 107 | } 108 | 109 | @PostMapping("/{topic}/data") 110 | public long data(@PathVariable String topic, @RequestParam String clusterId, @RequestBody TopicData topicData) { 111 | return messageService.sendData(clusterId, topic, topicData); 112 | } 113 | 114 | @GetMapping("/{topic}/configs") 115 | public List getConfigs(@PathVariable String topic, @RequestParam String clusterId) throws ExecutionException, InterruptedException { 116 | return topicService.getConfigs(topic, clusterId); 117 | } 118 | 119 | @PutMapping("/{topic}/configs") 120 | public void updateConfigs(@PathVariable String topic, @RequestParam String clusterId, @RequestBody Map configs) throws ExecutionException, InterruptedException { 121 | topicService.setConfigs(topic, clusterId, configs); 122 | } 123 | 124 | } 125 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/controller/dto/LoginAccount.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.controller.dto; 2 | 3 | import lombok.Data; 4 | 5 | /** 6 | * @author dushixiang 7 | * @date 2021/6/10 2:38 下午 8 | */ 9 | @Data 10 | public class LoginAccount { 11 | private String username; 12 | private String password; 13 | private Boolean remember; 14 | } 15 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/controller/dto/PageResult.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.controller.dto; 2 | 3 | import lombok.Data; 4 | 5 | import java.util.List; 6 | 7 | /** 8 | * @author dushixiang 9 | * @date 2021/3/27 11:53 10 | */ 11 | @Data 12 | public class PageResult { 13 | private List items; 14 | private long total; 15 | 16 | public static PageResult of(long total, List items) { 17 | PageResult pageResult = new PageResult(); 18 | pageResult.setItems(items); 19 | pageResult.setTotal(total); 20 | return pageResult; 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/controller/dto/PasswordChange.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.controller.dto; 2 | 3 | import lombok.Data; 4 | 5 | /** 6 | * @author dushixiang 7 | * @date 2021/6/12 5:11 下午 8 | */ 9 | @Data 10 | public class PasswordChange { 11 | private String oldPassword; 12 | private String newPassword; 13 | } 14 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/controller/handle/GlobalExceptionHandler.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.controller.handle; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | import org.apache.kafka.common.errors.InvalidRequestException; 5 | import org.apache.kafka.common.errors.TimeoutException; 6 | import org.apache.kafka.common.errors.TopicExistsException; 7 | import org.springframework.http.HttpStatus; 8 | import org.springframework.web.bind.annotation.ControllerAdvice; 9 | import org.springframework.web.bind.annotation.ExceptionHandler; 10 | import org.springframework.web.bind.annotation.ResponseBody; 11 | import org.springframework.web.bind.annotation.ResponseStatus; 12 | 13 | import java.util.HashMap; 14 | import java.util.Map; 15 | 16 | @Slf4j 17 | @ControllerAdvice 18 | public class GlobalExceptionHandler { 19 | 20 | @ResponseStatus(value = HttpStatus.BAD_REQUEST) 21 | @ResponseBody 22 | @ExceptionHandler(value = TopicExistsException.class) 23 | public Map topicExistsException(TopicExistsException e) { 24 | String message = e.getMessage(); 25 | Map data = new HashMap<>(); 26 | data.put("message", message); 27 | return data; 28 | } 29 | 30 | @ResponseStatus(value = HttpStatus.BAD_REQUEST) 31 | @ResponseBody 32 | @ExceptionHandler(value = InvalidRequestException.class) 33 | public Map invalidRequestException(InvalidRequestException e) { 34 | String message = e.getMessage(); 35 | Map data = new HashMap<>(); 36 | data.put("message", message); 37 | return data; 38 | } 39 | 40 | @ResponseStatus(value = HttpStatus.BAD_REQUEST) 41 | @ResponseBody 42 | @ExceptionHandler(value = IllegalArgumentException.class) 43 | public Map illegalArgumentException(IllegalArgumentException e) { 44 | String message = e.getMessage(); 45 | Map data = new HashMap<>(); 46 | data.put("message", message); 47 | return data; 48 | } 49 | 50 | @ResponseStatus(value = HttpStatus.GATEWAY_TIMEOUT) 51 | @ResponseBody 52 | @ExceptionHandler(value = TimeoutException.class) 53 | public Map timeoutException(TimeoutException e) { 54 | String message = e.getMessage(); 55 | Map data = new HashMap<>(); 56 | data.put("message", message); 57 | log.error("kafka timeout", e); 58 | return data; 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/delay/DelayMessage.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.delay; 2 | 3 | import lombok.Data; 4 | 5 | @Data 6 | public class DelayMessage { 7 | // 消息级别,共18个 8 | private int level; 9 | // 目标消息主题 10 | private String topic; 11 | // 目标消息key 12 | private String key; 13 | // 目标消息value 14 | private String value; 15 | } 16 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/delay/DelayMessageHelper.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.delay; 2 | 3 | import com.google.common.util.concurrent.ThreadFactoryBuilder; 4 | 5 | import java.util.ArrayList; 6 | import java.util.LinkedHashMap; 7 | import java.util.List; 8 | import java.util.Map; 9 | import java.util.concurrent.ExecutorService; 10 | import java.util.concurrent.Executors; 11 | 12 | 13 | public class DelayMessageHelper { 14 | 15 | private final Map levels = new LinkedHashMap<>(); 16 | private final String servers; 17 | private final String groupId; 18 | private ExecutorService executorService; 19 | private final List runners = new ArrayList<>(); 20 | ; 21 | private DelayMessageListener delayMessageListener; 22 | 23 | public DelayMessageHelper(String servers, String groupId) { 24 | this.servers = servers; 25 | this.groupId = groupId; 26 | 27 | levels.put("__delay-seconds-1", 1000L); 28 | levels.put("__delay-seconds-5", 1000L * 5); 29 | levels.put("__delay-seconds-10", 1000L * 10); 30 | levels.put("__delay-seconds-30", 1000L * 30); 31 | levels.put("__delay-minutes-1", 1000L * 60); 32 | levels.put("__delay-minutes-2", 1000L * 60 * 2); 33 | levels.put("__delay-minutes-3", 1000L * 60 * 3); 34 | levels.put("__delay-minutes-4", 1000L * 60 * 4); 35 | levels.put("__delay-minutes-5", 1000L * 60 * 5); 36 | levels.put("__delay-minutes-6", 1000L * 60 * 6); 37 | levels.put("__delay-minutes-7", 1000L * 60 * 7); 38 | levels.put("__delay-minutes-8", 1000L * 60 * 8); 39 | levels.put("__delay-minutes-9", 1000L * 60 * 9); 40 | levels.put("__delay-minutes-10", 1000L * 60 * 10); 41 | levels.put("__delay-minutes-20", 1000L * 60 * 20); 42 | levels.put("__delay-minutes-30", 1000L * 60 * 30); 43 | levels.put("__delay-hours-1", 1000L * 60 * 60); 44 | levels.put("__delay-hours-2", 1000L * 60 * 60 * 2); 45 | } 46 | 47 | public void start() { 48 | this.executorService = Executors.newFixedThreadPool(levels.size() + 1, new ThreadFactoryBuilder().setNameFormat("level-%d").build()); 49 | 50 | for (Map.Entry entry : levels.entrySet()) { 51 | String topic = entry.getKey(); 52 | Long delayTime = entry.getValue(); 53 | DelayMessageRunner delayMessageRunner = new DelayMessageRunner(servers, groupId, topic, delayTime); 54 | this.executorService.execute(delayMessageRunner); 55 | this.runners.add(delayMessageRunner); 56 | } 57 | this.delayMessageListener = new DelayMessageListener(servers, groupId, new ArrayList<>(this.levels.keySet())); 58 | this.executorService.execute(this.delayMessageListener); 59 | } 60 | 61 | public void stop() { 62 | for (DelayMessageRunner runner : this.runners) { 63 | runner.shutdown(); 64 | } 65 | this.runners.clear(); 66 | 67 | if (this.delayMessageListener != null) { 68 | this.delayMessageListener.shutdown(); 69 | } 70 | if (this.executorService != null) { 71 | this.executorService.shutdown(); 72 | } 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/delay/DelayMessageListener.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.delay; 2 | 3 | import cn.typesafe.km.util.Json; 4 | import lombok.extern.slf4j.Slf4j; 5 | import org.apache.kafka.clients.consumer.ConsumerConfig; 6 | import org.apache.kafka.clients.consumer.ConsumerRecord; 7 | import org.apache.kafka.clients.consumer.ConsumerRecords; 8 | import org.apache.kafka.clients.consumer.KafkaConsumer; 9 | import org.apache.kafka.clients.producer.KafkaProducer; 10 | import org.apache.kafka.clients.producer.ProducerConfig; 11 | import org.apache.kafka.clients.producer.ProducerRecord; 12 | import org.apache.kafka.common.serialization.StringDeserializer; 13 | import org.apache.kafka.common.serialization.StringSerializer; 14 | 15 | import java.time.Duration; 16 | import java.util.Collections; 17 | import java.util.List; 18 | import java.util.Properties; 19 | 20 | 21 | @Slf4j 22 | public class DelayMessageListener implements Runnable { 23 | 24 | private final KafkaConsumer consumer; 25 | private final KafkaProducer producer; 26 | private volatile boolean running = true; 27 | private final List levelTopics; 28 | 29 | public DelayMessageListener(String servers, String groupId, List levelTopics) { 30 | this.levelTopics = levelTopics; 31 | this.consumer = createConsumer(servers, groupId); 32 | this.producer = createProducer(servers); 33 | } 34 | 35 | private KafkaConsumer createConsumer(String servers, String groupId) { 36 | Properties props = new Properties(); 37 | props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, servers); 38 | props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId); 39 | props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); 40 | props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"); 41 | props.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, "read_committed"); 42 | props.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, "5000"); 43 | return new KafkaConsumer<>(props, new StringDeserializer(), new StringDeserializer()); 44 | } 45 | 46 | private KafkaProducer createProducer(String servers) { 47 | Properties props = new Properties(); 48 | props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, servers); 49 | props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); 50 | props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); 51 | return new KafkaProducer<>(props); 52 | } 53 | 54 | @Override 55 | public void run() { 56 | consumer.subscribe(Collections.singletonList("delay-message")); 57 | do { 58 | ConsumerRecords consumerRecords = consumer.poll(Duration.ofMillis(200)); 59 | for (ConsumerRecord consumerRecord : consumerRecords) { 60 | String value = consumerRecord.value(); 61 | log.debug("pulled delay message: {}", value); 62 | try { 63 | DelayMessage delayMessage = Json.toJavaObject(value, DelayMessage.class); 64 | if (delayMessage.getLevel() < 0 || delayMessage.getLevel() >= levelTopics.size()) { 65 | ProducerRecord record = new ProducerRecord<>(delayMessage.getTopic(), delayMessage.getKey(), delayMessage.getValue()); 66 | producer.send(record); 67 | log.debug("send normal message to user topic: {}", delayMessage.getTopic()); 68 | } else { 69 | String internalDelayTopic = levelTopics.get(delayMessage.getLevel()); 70 | ProducerRecord record = new ProducerRecord<>(internalDelayTopic, null, value); 71 | producer.send(record); 72 | log.debug("send delay message to internal topic: {}", internalDelayTopic); 73 | } 74 | } catch (Exception e) { 75 | log.error("解析消息失败", e); 76 | } 77 | } 78 | } while (running); 79 | 80 | consumer.close(); 81 | log.debug("close external topic consumer"); 82 | producer.close(); 83 | log.debug("close external topic producer"); 84 | } 85 | 86 | public void shutdown() { 87 | this.running = false; 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/delay/DelayMessageRunner.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.delay; 2 | 3 | import cn.typesafe.km.util.Json; 4 | import lombok.SneakyThrows; 5 | import lombok.extern.slf4j.Slf4j; 6 | import org.apache.kafka.clients.consumer.*; 7 | import org.apache.kafka.clients.producer.KafkaProducer; 8 | import org.apache.kafka.clients.producer.ProducerConfig; 9 | import org.apache.kafka.clients.producer.ProducerRecord; 10 | import org.apache.kafka.clients.producer.RecordMetadata; 11 | import org.apache.kafka.common.TopicPartition; 12 | import org.apache.kafka.common.serialization.StringDeserializer; 13 | import org.apache.kafka.common.serialization.StringSerializer; 14 | 15 | import java.time.Duration; 16 | import java.util.*; 17 | import java.util.concurrent.ExecutionException; 18 | 19 | @Slf4j 20 | public class DelayMessageRunner implements Runnable { 21 | 22 | private final KafkaConsumer consumer; 23 | private final KafkaProducer producer; 24 | private final Object lock = new Object(); 25 | 26 | private final String topic; 27 | private final Long delayTime; 28 | private final Timer timer = new Timer(); 29 | private volatile boolean running = true; 30 | 31 | public DelayMessageRunner(String servers, String groupId, String topic, Long delayTime) { 32 | this.topic = topic; 33 | this.delayTime = delayTime; 34 | this.consumer = createConsumer(servers, groupId); 35 | this.producer = createProducer(servers); 36 | 37 | consumer.subscribe(Collections.singletonList(topic)); 38 | 39 | timer.schedule(new TimerTask() { 40 | @Override 41 | public void run() { 42 | synchronized (lock) { 43 | consumer.resume(consumer.paused()); 44 | lock.notify(); 45 | } 46 | } 47 | }, 0, 100); 48 | } 49 | 50 | private KafkaConsumer createConsumer(String servers, String groupId) { 51 | Properties props = new Properties(); 52 | props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, servers); 53 | props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId); 54 | props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); 55 | props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); // 需要处理早期未到期的数据 56 | props.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, "read_committed"); 57 | props.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, "5000"); 58 | return new KafkaConsumer<>(props, new StringDeserializer(), new StringDeserializer()); 59 | } 60 | 61 | KafkaProducer createProducer(String servers) { 62 | Properties props = new Properties(); 63 | props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, servers); 64 | props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); 65 | props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); 66 | return new KafkaProducer<>(props); 67 | } 68 | 69 | public void shutdown() { 70 | this.timer.cancel(); 71 | this.running = false; 72 | // 手动唤醒阻塞的线程使其退出循环 73 | synchronized (lock) { 74 | this.lock.notify(); 75 | } 76 | } 77 | 78 | @SneakyThrows 79 | @Override 80 | public void run() { 81 | do { 82 | synchronized (lock) { 83 | ConsumerRecords consumerRecords = consumer.poll(Duration.ofMillis(200)); 84 | 85 | if (consumerRecords.isEmpty()) { 86 | lock.wait(); 87 | continue; 88 | } 89 | 90 | log.debug("pulled {} messages form {}.", consumerRecords.count(), topic); 91 | boolean timed = false; 92 | for (ConsumerRecord consumerRecord : consumerRecords) { 93 | long timestamp = consumerRecord.timestamp(); 94 | TopicPartition topicPartition = new TopicPartition(consumerRecord.topic(), consumerRecord.partition()); 95 | if (timestamp + delayTime < System.currentTimeMillis()) { 96 | String value = consumerRecord.value(); 97 | DelayMessage delayMessage; 98 | try { 99 | delayMessage = Json.toJavaObject(value, DelayMessage.class); 100 | } catch (Exception e) { 101 | log.warn("Failed to parse json", e); 102 | continue; 103 | } 104 | String appTopic = delayMessage.getTopic(); 105 | String appKey = delayMessage.getKey(); 106 | String appValue = delayMessage.getValue(); 107 | 108 | // send to application topic 109 | ProducerRecord producerRecord = new ProducerRecord<>(appTopic, appKey, appValue); 110 | try { 111 | RecordMetadata recordMetadata = producer.send(producerRecord).get(); 112 | log.debug("send normal message to user topic={}, key={}, value={}, offset={}", appTopic, appKey, appValue, recordMetadata.offset()); 113 | // success. commit message 114 | OffsetAndMetadata offsetAndMetadata = new OffsetAndMetadata(consumerRecord.offset() + 1); 115 | HashMap metadataHashMap = new HashMap<>(); 116 | metadataHashMap.put(topicPartition, offsetAndMetadata); 117 | consumer.commitSync(metadataHashMap); 118 | } catch (ExecutionException e) { 119 | consumer.pause(Collections.singletonList(topicPartition)); 120 | consumer.seek(topicPartition, consumerRecord.offset()); 121 | timed = true; 122 | break; 123 | } 124 | } else { 125 | consumer.pause(Collections.singletonList(topicPartition)); 126 | consumer.seek(topicPartition, consumerRecord.offset()); 127 | timed = true; 128 | break; 129 | } 130 | } 131 | 132 | if (timed) { 133 | lock.wait(); 134 | } 135 | } 136 | } while (running); 137 | 138 | this.consumer.close(); 139 | log.debug("close internal topic consumer"); 140 | this.producer.close(); 141 | log.debug("close internal topic producer"); 142 | } 143 | } 144 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/entity/Cluster.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.entity; 2 | 3 | import com.fasterxml.jackson.annotation.JsonFormat; 4 | import lombok.Data; 5 | 6 | import jakarta.persistence.*; 7 | import java.util.Date; 8 | 9 | /** 10 | * @author dushixiang 11 | * @date 2021/3/27 9:45 下午 12 | */ 13 | @Table 14 | @Entity 15 | @Data 16 | public class Cluster { 17 | 18 | @Column(length = 36) 19 | @Id 20 | private String id; 21 | @Column(length = 200) 22 | private String name; 23 | @Column(length = 500) 24 | private String servers; 25 | @Column(length = 20) 26 | private String delayMessageStatus; 27 | private String controller; 28 | @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") 29 | private Date created; 30 | 31 | @Column(length = 200) 32 | private String securityProtocol; 33 | @Column(length = 200) 34 | private String saslMechanism; 35 | @Column(length = 200) 36 | private String authUsername; 37 | @Column(length = 200) 38 | private String authPassword; 39 | 40 | @Transient 41 | private Integer topicCount; 42 | @Transient 43 | private Integer brokerCount; 44 | @Transient 45 | private Integer consumerCount; 46 | } 47 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/entity/User.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.entity; 2 | 3 | import lombok.Data; 4 | 5 | import jakarta.persistence.Column; 6 | import jakarta.persistence.Entity; 7 | import jakarta.persistence.Id; 8 | import jakarta.persistence.Table; 9 | 10 | /** 11 | * @author dushixiang 12 | * @date 2021/6/10 1:11 下午 13 | */ 14 | @Table 15 | @Entity 16 | @Data 17 | public class User { 18 | @Column(length = 36) 19 | @Id 20 | private String id; 21 | @Column(length = 100, nullable = false, unique = true) 22 | private String username; 23 | @Column(length = 300) 24 | private String password; 25 | } 26 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/interceptor/AuthInterceptor.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.interceptor; 2 | 3 | import cn.typesafe.km.entity.User; 4 | import com.github.benmanes.caffeine.cache.Cache; 5 | import org.springframework.core.annotation.Order; 6 | import org.springframework.stereotype.Component; 7 | import org.springframework.util.StringUtils; 8 | import org.springframework.web.servlet.HandlerInterceptor; 9 | 10 | import jakarta.annotation.Resource; 11 | import jakarta.servlet.http.HttpServletRequest; 12 | import jakarta.servlet.http.HttpServletResponse; 13 | import java.io.PrintWriter; 14 | 15 | /** 16 | * @author dushixiang 17 | * @date 2021/6/12 1:30 下午 18 | */ 19 | @Order(1) 20 | @Component 21 | public class AuthInterceptor implements HandlerInterceptor { 22 | 23 | @Resource 24 | private Cache tokenManager; 25 | 26 | @Override 27 | public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception { 28 | 29 | if ("OPTIONS".equalsIgnoreCase(request.getMethod())) { 30 | return true; 31 | } 32 | 33 | String tokenHeader = request.getHeader("X-Auth-Token"); 34 | if (!StringUtils.hasText(tokenHeader)) { 35 | tokenHeader = request.getParameter("X-Auth-Token"); 36 | } 37 | 38 | if (StringUtils.hasText(tokenHeader)) { 39 | User user = tokenManager.getIfPresent(tokenHeader); 40 | if (user != null) { 41 | return true; 42 | } 43 | } 44 | 45 | try (PrintWriter writer = response.getWriter()) { 46 | response.addHeader("Content-Type", "application/json"); 47 | response.setStatus(401); 48 | writer.write("{\"code\":\"401\",\"message\":\"Unauthorized\"}"); 49 | } 50 | return false; 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/repository/ClusterRepository.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.repository; 2 | 3 | import cn.typesafe.km.entity.Cluster; 4 | import org.springframework.data.jpa.repository.JpaRepository; 5 | 6 | /** 7 | * @author dushixiang 8 | * @date 2021/3/27 11:14 上午 9 | */ 10 | public interface ClusterRepository extends JpaRepository { 11 | 12 | } 13 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/repository/UserRepository.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.repository; 2 | 3 | import cn.typesafe.km.entity.User; 4 | import org.springframework.data.jpa.repository.JpaRepository; 5 | 6 | import java.util.Optional; 7 | 8 | /** 9 | * @author dushixiang 10 | * @date 2021/6/10 1:13 下午 11 | */ 12 | public interface UserRepository extends JpaRepository { 13 | 14 | Optional findByUsername(String username); 15 | 16 | } 17 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/service/BrokerService.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.service; 2 | 3 | import cn.typesafe.km.service.dto.Broker; 4 | import cn.typesafe.km.service.dto.ServerConfig; 5 | import jakarta.annotation.Resource; 6 | import lombok.extern.slf4j.Slf4j; 7 | import org.apache.kafka.clients.admin.*; 8 | import org.apache.kafka.common.Node; 9 | import org.apache.kafka.common.TopicPartitionInfo; 10 | import org.apache.kafka.common.config.ConfigResource; 11 | import org.springframework.stereotype.Service; 12 | import org.springframework.util.CollectionUtils; 13 | 14 | import java.util.*; 15 | import java.util.concurrent.ExecutionException; 16 | import java.util.stream.Collectors; 17 | 18 | /** 19 | * @author dushixiang 20 | * @date 2021/4/2 20:45 21 | */ 22 | @Slf4j 23 | @Service 24 | public class BrokerService { 25 | 26 | @Resource 27 | private ClusterService clusterService; 28 | 29 | public int countBroker(String clusterId) throws ExecutionException, InterruptedException { 30 | AdminClient adminClient = clusterService.getAdminClient(clusterId); 31 | DescribeClusterResult describeClusterResult = adminClient.describeCluster(); 32 | return describeClusterResult.nodes().get().size(); 33 | } 34 | 35 | public List brokers(Set topicNames, String clusterId) throws ExecutionException, InterruptedException { 36 | 37 | AdminClient adminClient = clusterService.getAdminClient(clusterId); 38 | DescribeClusterResult describeClusterResult = adminClient.describeCluster(); 39 | Collection clusterDetails = describeClusterResult.nodes().get(); 40 | List brokers = new ArrayList<>(clusterDetails.size()); 41 | for (Node node : clusterDetails) { 42 | Broker broker = new Broker(); 43 | broker.setId(node.id()); 44 | broker.setHost(node.host()); 45 | broker.setPort(node.port()); 46 | brokers.add(broker); 47 | } 48 | 49 | 50 | if (CollectionUtils.isEmpty(topicNames)) { 51 | topicNames = adminClient.listTopics().names().get(); 52 | } 53 | Map stringTopicDescriptionMap = adminClient.describeTopics(topicNames).all().get(); 54 | for (TopicDescription topicDescription : stringTopicDescriptionMap.values()) { 55 | List partitions = topicDescription.partitions(); 56 | for (TopicPartitionInfo partitionInfo : partitions) { 57 | Node leader = partitionInfo.leader(); 58 | for (Broker broker : brokers) { 59 | if (leader != null && broker.getId() == leader.id()) { 60 | broker.getLeaderPartitions().add(partitionInfo.partition()); 61 | break; 62 | } 63 | } 64 | 65 | List replicas = partitionInfo.replicas(); 66 | for (Broker broker : brokers) { 67 | for (Node replica : replicas) { 68 | if (broker.getId() == replica.id()) { 69 | broker.getFollowerPartitions().add(partitionInfo.partition()); 70 | break; 71 | } 72 | } 73 | } 74 | } 75 | } 76 | 77 | if (!CollectionUtils.isEmpty(topicNames)) { 78 | // 使用topic过滤时只展示相关的broker 79 | brokers = brokers.stream() 80 | .filter(broker -> broker.getFollowerPartitions().size() > 0 || broker.getLeaderPartitions().size() > 0) 81 | .collect(Collectors.toList()); 82 | } 83 | 84 | return brokers; 85 | } 86 | 87 | public List getConfigs(String id, String clusterId) throws ExecutionException, InterruptedException { 88 | AdminClient adminClient = clusterService.getAdminClient(clusterId); 89 | ConfigResource configResource = new ConfigResource(ConfigResource.Type.BROKER, id); 90 | 91 | Config config = adminClient.describeConfigs(Collections.singletonList(configResource)).all().get().get(configResource); 92 | 93 | return config.entries() 94 | .stream() 95 | .map(entry -> { 96 | ServerConfig topicConfig = new ServerConfig(); 97 | topicConfig.setName(entry.name()); 98 | topicConfig.setValue(entry.value()); 99 | topicConfig.set_default(entry.isDefault()); 100 | topicConfig.setReadonly(entry.isReadOnly()); 101 | topicConfig.setSensitive(entry.isSensitive()); 102 | return topicConfig; 103 | }) 104 | .collect(Collectors.toList()); 105 | } 106 | 107 | public void setConfigs(String id, String clusterId, Map configs) throws ExecutionException, InterruptedException { 108 | AdminClient adminClient = clusterService.getAdminClient(clusterId); 109 | ConfigResource configResource = new ConfigResource(ConfigResource.Type.BROKER, id); 110 | 111 | List alterConfigOps = configs.entrySet() 112 | .stream() 113 | .map(e -> { 114 | String key = e.getKey(); 115 | String value = e.getValue(); 116 | ConfigEntry configEntry = new ConfigEntry(key, value); 117 | return new AlterConfigOp(configEntry, AlterConfigOp.OpType.SET); 118 | }) 119 | .collect(Collectors.toList()); 120 | 121 | Map> data = new HashMap<>(); 122 | data.put(configResource, alterConfigOps); 123 | 124 | adminClient.incrementalAlterConfigs(data).all().get(); 125 | } 126 | } 127 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/service/MessageService.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.service; 2 | 3 | import cn.typesafe.km.entity.Cluster; 4 | import cn.typesafe.km.service.dto.ConsumerMessage; 5 | import cn.typesafe.km.service.dto.LiveMessage; 6 | import cn.typesafe.km.service.dto.TopicData; 7 | import lombok.SneakyThrows; 8 | import org.apache.kafka.clients.consumer.ConsumerRecord; 9 | import org.apache.kafka.clients.consumer.KafkaConsumer; 10 | import org.apache.kafka.clients.producer.KafkaProducer; 11 | import org.apache.kafka.clients.producer.ProducerRecord; 12 | import org.apache.kafka.clients.producer.RecordMetadata; 13 | import org.apache.kafka.common.TopicPartition; 14 | import org.springframework.http.codec.ServerSentEvent; 15 | import org.springframework.stereotype.Service; 16 | import org.springframework.util.CollectionUtils; 17 | import org.springframework.util.StringUtils; 18 | import reactor.core.publisher.Flux; 19 | 20 | import jakarta.annotation.Resource; 21 | import java.time.Duration; 22 | import java.util.ArrayList; 23 | import java.util.Collections; 24 | import java.util.List; 25 | import java.util.stream.Collectors; 26 | 27 | /** 28 | * @author dushixiang 29 | * @date 2021/4/10 0:59 30 | */ 31 | @Service 32 | public class MessageService { 33 | 34 | @Resource 35 | private ClusterService clusterService; 36 | 37 | public List data(String clusterId, String topicName, Integer tPartition, Long startOffset, int count, String keyFilter, String valueFilter) { 38 | try (KafkaConsumer kafkaConsumer = clusterService.createConsumer(clusterId)) { 39 | 40 | TopicPartition topicPartition = new TopicPartition(topicName, tPartition); 41 | List topicPartitions = Collections.singletonList(topicPartition); 42 | kafkaConsumer.assign(topicPartitions); 43 | 44 | Long beginningOffset = kafkaConsumer.beginningOffsets(topicPartitions).get(topicPartition); 45 | if (startOffset < beginningOffset) { 46 | startOffset = beginningOffset; 47 | } 48 | kafkaConsumer.seek(topicPartition, startOffset); 49 | 50 | Long endOffset = kafkaConsumer.endOffsets(topicPartitions).get(topicPartition); 51 | long currentOffset = startOffset - 1; 52 | 53 | List> records = new ArrayList<>(count); 54 | 55 | int emptyPoll = 0; 56 | while (records.size() < count && currentOffset < endOffset) { 57 | List> polled = kafkaConsumer.poll(Duration.ofMillis(200)).records(topicPartition); 58 | 59 | if (!CollectionUtils.isEmpty(polled)) { 60 | 61 | for (ConsumerRecord consumerRecord : polled) { 62 | if (StringUtils.hasText(keyFilter)) { 63 | String key = consumerRecord.key(); 64 | if (StringUtils.hasText(key) && key.toLowerCase().contains(keyFilter.toLowerCase())) { 65 | records.add(consumerRecord); 66 | } 67 | continue; 68 | } 69 | 70 | if (StringUtils.hasText(valueFilter)) { 71 | String value = consumerRecord.value(); 72 | if (StringUtils.hasText(value) && value.toLowerCase().contains(valueFilter.toLowerCase())) { 73 | records.add(consumerRecord); 74 | } 75 | continue; 76 | } 77 | records.add(consumerRecord); 78 | } 79 | currentOffset = polled.get(polled.size() - 1).offset(); 80 | emptyPoll = 0; 81 | } else if (++emptyPoll == 3) { 82 | break; 83 | } 84 | } 85 | 86 | return records 87 | .subList(0, Math.min(count, records.size())) 88 | .stream() 89 | .map(record -> { 90 | int partition = record.partition(); 91 | long timestamp = record.timestamp(); 92 | String key = record.key(); 93 | String value = record.value(); 94 | long offset = record.offset(); 95 | 96 | ConsumerMessage consumerMessage = new ConsumerMessage(); 97 | consumerMessage.setTopic(topicName); 98 | consumerMessage.setOffset(offset); 99 | consumerMessage.setPartition(partition); 100 | consumerMessage.setTimestamp(timestamp); 101 | consumerMessage.setKey(key); 102 | consumerMessage.setValue(value); 103 | 104 | return consumerMessage; 105 | }).collect(Collectors.toList()); 106 | } 107 | } 108 | 109 | @SneakyThrows 110 | public long sendData(String clusterId, String topic, TopicData topicData) { 111 | Cluster cluster = clusterService.findById(clusterId); 112 | KafkaProducer kafkaProducer = clusterService.createProducer(cluster.getServers(), cluster.getSecurityProtocol(), cluster.getSaslMechanism(), cluster.getAuthUsername(), cluster.getAuthPassword()); 113 | ProducerRecord producerRecord = new ProducerRecord<>(topic, topicData.getPartition(), topicData.getKey(), topicData.getValue()); 114 | RecordMetadata recordMetadata = kafkaProducer.send(producerRecord).get(); 115 | return recordMetadata.offset(); 116 | } 117 | 118 | public Flux> liveData(String clusterId, String topicName, Integer tPartition, String keyFilter, String valueFilter) { 119 | 120 | KafkaConsumer kafkaConsumer = clusterService.createConsumer(clusterId); 121 | TopicPartition topicPartition = new TopicPartition(topicName, tPartition); 122 | List topicPartitions = Collections.singletonList(topicPartition); 123 | kafkaConsumer.assign(topicPartitions); 124 | 125 | Long endOffset = kafkaConsumer.endOffsets(topicPartitions).get(topicPartition); 126 | kafkaConsumer.seek(topicPartition, endOffset); 127 | 128 | return Flux 129 | .interval(Duration.ofSeconds(1)) 130 | .doFinally(x -> { 131 | kafkaConsumer.close(); 132 | }) 133 | .map(sequence -> { 134 | 135 | List> records = new ArrayList<>(); 136 | 137 | List> polled = kafkaConsumer.poll(Duration.ofMillis(200)).records(topicPartition); 138 | 139 | if (!CollectionUtils.isEmpty(polled)) { 140 | 141 | for (ConsumerRecord consumerRecord : polled) { 142 | if (StringUtils.hasText(keyFilter)) { 143 | String key = consumerRecord.key(); 144 | if (StringUtils.hasText(key) && key.toLowerCase().contains(keyFilter.toLowerCase())) { 145 | records.add(consumerRecord); 146 | } 147 | continue; 148 | } 149 | 150 | if (StringUtils.hasText(valueFilter)) { 151 | String value = consumerRecord.value(); 152 | if (StringUtils.hasText(value) && value.toLowerCase().contains(valueFilter.toLowerCase())) { 153 | records.add(consumerRecord); 154 | } 155 | continue; 156 | } 157 | records.add(consumerRecord); 158 | } 159 | } 160 | 161 | List data = records 162 | .stream() 163 | .map(record -> { 164 | int partition = record.partition(); 165 | long timestamp = record.timestamp(); 166 | String key = record.key(); 167 | String value = record.value(); 168 | long offset = record.offset(); 169 | 170 | ConsumerMessage consumerMessage = new ConsumerMessage(); 171 | consumerMessage.setTopic(topicName); 172 | consumerMessage.setOffset(offset); 173 | consumerMessage.setPartition(partition); 174 | consumerMessage.setTimestamp(timestamp); 175 | consumerMessage.setKey(key); 176 | consumerMessage.setValue(value); 177 | 178 | return consumerMessage; 179 | }).collect(Collectors.toList()); 180 | 181 | Long currBeginningOffset = kafkaConsumer.beginningOffsets(topicPartitions).get(topicPartition); 182 | Long currEndOffset = kafkaConsumer.endOffsets(topicPartitions).get(topicPartition); 183 | 184 | LiveMessage liveMessage = new LiveMessage(); 185 | liveMessage.setBeginningOffset(currBeginningOffset); 186 | liveMessage.setEndOffset(currEndOffset); 187 | liveMessage.setPartition(tPartition); 188 | liveMessage.setMessages(data); 189 | 190 | return ServerSentEvent.builder() 191 | .id(String.valueOf(sequence)) 192 | .event("topic-message-event") 193 | .data(liveMessage) 194 | .build(); 195 | }); 196 | } 197 | } 198 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/service/UserService.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.service; 2 | 3 | import cn.typesafe.km.controller.dto.LoginAccount; 4 | import cn.typesafe.km.controller.dto.PasswordChange; 5 | import cn.typesafe.km.entity.User; 6 | import cn.typesafe.km.repository.UserRepository; 7 | import cn.typesafe.km.util.ID; 8 | import com.github.benmanes.caffeine.cache.Cache; 9 | import lombok.extern.slf4j.Slf4j; 10 | import org.springframework.beans.factory.annotation.Value; 11 | import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder; 12 | import org.springframework.security.crypto.password.PasswordEncoder; 13 | import org.springframework.stereotype.Service; 14 | import org.springframework.util.Assert; 15 | import org.springframework.util.CollectionUtils; 16 | 17 | import jakarta.annotation.Resource; 18 | import java.util.NoSuchElementException; 19 | 20 | /** 21 | * @author dushixiang 22 | * @date 2021/6/12 1:17 下午 23 | */ 24 | @Slf4j 25 | @Service 26 | public class UserService { 27 | 28 | @Value("${default.username}") 29 | private String username; 30 | @Value("${default.password}") 31 | private String password; 32 | 33 | private final PasswordEncoder passwordEncoder = new BCryptPasswordEncoder(); 34 | 35 | @Resource 36 | private UserRepository userRepository; 37 | @Resource 38 | private Cache tokenManager; 39 | 40 | public void initUser() { 41 | if (CollectionUtils.isEmpty(userRepository.findAll())) { 42 | User user = new User(); 43 | user.setId(ID.uuid()); 44 | user.setUsername(username); 45 | String encodePassword = this.passwordEncoder.encode(password); 46 | user.setPassword(encodePassword); 47 | userRepository.saveAndFlush(user); 48 | log.info("初始用户名和密码为: {}/{}", username, password); 49 | } 50 | } 51 | 52 | public String login(LoginAccount loginAccount) { 53 | User user = userRepository.findByUsername(loginAccount.getUsername()) 54 | .orElseThrow(() -> new IllegalArgumentException("用户名或密码错误")); 55 | boolean matches = passwordEncoder.matches(loginAccount.getPassword(), user.getPassword()); 56 | if (!matches) { 57 | throw new IllegalArgumentException("用户名或密码错误"); 58 | } 59 | 60 | String token = ID.uuid(); 61 | tokenManager.put(token, user); 62 | return token; 63 | } 64 | 65 | public void logout(String token) { 66 | tokenManager.invalidate(token); 67 | } 68 | 69 | public User info(String token) { 70 | User user = tokenManager.getIfPresent(token); 71 | if (user != null) { 72 | user.setPassword(null); 73 | } 74 | return user; 75 | } 76 | 77 | public void changePassword(String token, PasswordChange passwordChange) { 78 | User tokenUser = tokenManager.getIfPresent(token); 79 | Assert.isTrue(tokenUser != null, "获取用户信息失败"); 80 | User user = userRepository.findById(tokenUser.getId()).orElseThrow(() -> new NoSuchElementException("获取用户信息失败")); 81 | boolean matches = passwordEncoder.matches(passwordChange.getOldPassword(), user.getPassword()); 82 | if (!matches) { 83 | throw new IllegalArgumentException("原密码不正确"); 84 | } 85 | String encodePassword = this.passwordEncoder.encode(passwordChange.getNewPassword()); 86 | user.setPassword(encodePassword); 87 | userRepository.saveAndFlush(user); 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/service/dto/Broker.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.service.dto; 2 | 3 | import lombok.Data; 4 | 5 | import java.util.ArrayList; 6 | import java.util.List; 7 | 8 | /** 9 | * @author dushixiang 10 | * @date 2021/4/2 20:41 下午 11 | */ 12 | @Data 13 | public class Broker { 14 | private int id; 15 | private String host; 16 | private int port; 17 | private List leaderPartitions = new ArrayList<>(); 18 | private List followerPartitions = new ArrayList<>(); 19 | } 20 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/service/dto/ConsumerGroup.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.service.dto; 2 | 3 | import lombok.Data; 4 | 5 | import java.util.Set; 6 | 7 | /** 8 | * @author dushixiang 9 | * @date 2021/4/2 20:35 下午 10 | */ 11 | @Data 12 | public class ConsumerGroup { 13 | private String groupId; 14 | private Long lag; 15 | private Set topics; 16 | } 17 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/service/dto/ConsumerGroupDescribe.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.service.dto; 2 | 3 | import lombok.Data; 4 | 5 | /** 6 | * @author dushixiang 7 | * @date 2021/4/18 10:10 下午 8 | */ 9 | @Data 10 | public class ConsumerGroupDescribe { 11 | private String groupId; 12 | private String topic; 13 | private int partition; 14 | private Long currentOffset; 15 | private Long logBeginningOffset; 16 | private Long logEndOffset; 17 | private Long lag; 18 | private String consumerId; 19 | private String host; 20 | private String clientId; 21 | } 22 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/service/dto/ConsumerGroupInfo.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.service.dto; 2 | 3 | import lombok.Data; 4 | 5 | import java.util.Set; 6 | 7 | /** 8 | * @author dushixiang 9 | * @date 2021/4/11 7:31 下午 10 | */ 11 | @Data 12 | public class ConsumerGroupInfo { 13 | private String groupId; 14 | private Set topics; 15 | } 16 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/service/dto/ConsumerMessage.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.service.dto; 2 | 3 | import lombok.Data; 4 | 5 | /** 6 | * @author dushixiang 7 | * @date 2021/4/10 0:56 8 | */ 9 | @Data 10 | public class ConsumerMessage { 11 | private String topic; 12 | private int partition; 13 | private Long offset; 14 | private Long timestamp; 15 | private String key; 16 | private String value; 17 | } 18 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/service/dto/LiveMessage.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.service.dto; 2 | 3 | import lombok.Data; 4 | 5 | import java.util.List; 6 | 7 | @Data 8 | public class LiveMessage { 9 | private int partition; 10 | private long beginningOffset; 11 | private long endOffset; 12 | private List messages; 13 | } 14 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/service/dto/Partition.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.service.dto; 2 | 3 | import lombok.Data; 4 | 5 | import java.util.List; 6 | 7 | /** 8 | * @author dushixiang 9 | * @date 2021/4/2 20:32 上午 10 | */ 11 | @Data 12 | public class Partition { 13 | private int partition; 14 | private Node leader; 15 | private List isr; 16 | private List replicas; 17 | private long beginningOffset; 18 | private long endOffset; 19 | 20 | @Data 21 | public static class Node { 22 | private int id; 23 | private String host; 24 | private int port; 25 | private Long logSize = 0L; 26 | 27 | public Node() { 28 | } 29 | 30 | public Node(int id, String host, int port) { 31 | this.id = id; 32 | this.host = host; 33 | this.port = port; 34 | } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/service/dto/ResetOffset.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.service.dto; 2 | 3 | import lombok.Data; 4 | 5 | import jakarta.validation.constraints.NotBlank; 6 | 7 | /** 8 | * @author dushixiang 9 | * @date 2021/4/5 11:29 上午 10 | */ 11 | @Data 12 | public class ResetOffset { 13 | private int partition; 14 | @NotBlank 15 | private String seek; 16 | private Long offset; 17 | } 18 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/service/dto/ServerConfig.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.service.dto; 2 | 3 | import lombok.Data; 4 | 5 | /** 6 | * @author dushixiang 7 | * @date 2021/4/24 3:20 下午 8 | */ 9 | @Data 10 | public class ServerConfig { 11 | private String name; 12 | private String value; 13 | private boolean _default; 14 | private boolean readonly; 15 | private boolean sensitive; 16 | } 17 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/service/dto/Topic.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.service.dto; 2 | 3 | import lombok.Data; 4 | 5 | /** 6 | * @author dushixiang 7 | * @date 2021/3/27 12:03 下午 8 | */ 9 | @Data 10 | public class Topic { 11 | private String clusterId; 12 | private String name; 13 | private Integer partitionsCount; 14 | private Integer consumerGroupCount; 15 | private Integer replicaCount; 16 | private Long totalLogSize; 17 | } 18 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/service/dto/TopicData.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.service.dto; 2 | 3 | import lombok.Data; 4 | 5 | /** 6 | * @author dushixiang 7 | * @date 2021/6/7 9:56 下午 8 | */ 9 | @Data 10 | public class TopicData { 11 | private int partition; 12 | private String key; 13 | private String value; 14 | } 15 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/service/dto/TopicForCreate.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.service.dto; 2 | 3 | import lombok.Data; 4 | 5 | import jakarta.validation.constraints.NotBlank; 6 | import jakarta.validation.constraints.Size; 7 | 8 | /** 9 | * @author dushixiang 10 | * @date 2021/4/2 20:11 下午 11 | */ 12 | @Data 13 | public class TopicForCreate { 14 | private String clusterId; 15 | /** 16 | * topic名称 17 | */ 18 | @NotBlank 19 | private String name; 20 | /** 21 | * 分区数量 22 | */ 23 | @Size(min = 1) 24 | private int numPartitions; 25 | /** 26 | * 副本数量 27 | */ 28 | @Size(min = 1) 29 | private short replicationFactor; 30 | } 31 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/service/dto/TopicInfo.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.service.dto; 2 | 3 | import lombok.Data; 4 | 5 | import java.util.List; 6 | 7 | /** 8 | * @author dushixiang 9 | * @date 2021/4/10 3:25 下午 10 | */ 11 | @Data 12 | public class TopicInfo { 13 | private String clusterId; 14 | private String name; 15 | private Integer replicaCount; 16 | private Long totalLogSize; 17 | private List partitions; 18 | 19 | @Data 20 | public static class Partition { 21 | private int partition; 22 | private long beginningOffset; 23 | private long endOffset; 24 | 25 | public Partition(int partition, long beginningOffset, long endOffset) { 26 | this.partition = partition; 27 | this.beginningOffset = beginningOffset; 28 | this.endOffset = endOffset; 29 | } 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/service/dto/TopicOffset.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.service.dto; 2 | 3 | import lombok.Data; 4 | 5 | /** 6 | * @author dushixiang 7 | * @date 2021/3/27 7:07 下午 8 | */ 9 | @Data 10 | public class TopicOffset { 11 | private String topic; 12 | private int partition; 13 | private Long consumerOffset; 14 | private Long beginningOffset; 15 | private Long endOffset; 16 | private String groupId; 17 | } 18 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/util/ID.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.util; 2 | 3 | import java.util.UUID; 4 | 5 | /** 6 | * @author dushixiang 7 | * @date 2021/3/27 11:52 上午 8 | */ 9 | public final class ID { 10 | public static String uuid() { 11 | return UUID.randomUUID().toString(); 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/util/Json.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.util; 2 | 3 | import com.fasterxml.jackson.core.type.TypeReference; 4 | import com.fasterxml.jackson.databind.DeserializationFeature; 5 | import com.fasterxml.jackson.databind.ObjectMapper; 6 | import lombok.SneakyThrows; 7 | 8 | import java.util.List; 9 | 10 | public class Json { 11 | 12 | @SneakyThrows 13 | public static String toJsonString(Object o) { 14 | ObjectMapper objectMapper = new ObjectMapper(); 15 | return objectMapper.writeValueAsString(o); 16 | } 17 | 18 | @SneakyThrows 19 | public static T toJavaObject(String json, Class klass) { 20 | ObjectMapper objectMapper = new ObjectMapper(); 21 | objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); 22 | return objectMapper.readValue(json, klass); 23 | } 24 | 25 | @SneakyThrows 26 | public static List toJavaArray(String json) { 27 | ObjectMapper objectMapper = new ObjectMapper(); 28 | objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); 29 | return objectMapper.readValue(json, new TypeReference<>() {}); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/util/Networks.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.util; 2 | 3 | import java.io.IOException; 4 | import java.net.InetAddress; 5 | import java.net.InetSocketAddress; 6 | import java.net.Socket; 7 | 8 | public class Networks { 9 | 10 | public static boolean isHostReachable(String host, int timeout) { 11 | try { 12 | return InetAddress.getByName(host).isReachable(timeout); 13 | } catch (IOException ignored) { 14 | 15 | } 16 | return false; 17 | } 18 | 19 | public static boolean isHostConnected(String host, int port, int timeout) { 20 | try (Socket socket = new Socket()) { 21 | socket.connect(new InetSocketAddress(host, port), timeout); 22 | // InetAddress localAddress = socket.getLocalAddress(); 23 | // String hostName = localAddress.getHostName(); 24 | return true; 25 | } catch (Exception e) { 26 | return false; 27 | } 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/util/Sign.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.util; 2 | 3 | import org.springframework.util.DigestUtils; 4 | 5 | import java.util.Arrays; 6 | 7 | /** 8 | * @author dushixiang 9 | * @date 2021/3/27 14:05 10 | */ 11 | public final class Sign { 12 | 13 | public static String sign(String... input) { 14 | Arrays.sort(input); 15 | return DigestUtils.md5DigestAsHex(String.join("-", input).getBytes()); 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /src/main/java/cn/typesafe/km/util/Web.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km.util; 2 | 3 | import org.springframework.web.context.request.RequestContextHolder; 4 | import org.springframework.web.context.request.ServletRequestAttributes; 5 | 6 | import jakarta.servlet.ServletContext; 7 | import jakarta.servlet.http.HttpServletRequest; 8 | import java.util.Objects; 9 | 10 | public class Web { 11 | 12 | public static HttpServletRequest getRequest() { 13 | return ((ServletRequestAttributes) Objects.requireNonNull(RequestContextHolder.getRequestAttributes())).getRequest(); 14 | } 15 | 16 | public static ServletContext getServletContext() { 17 | return getRequest().getServletContext(); 18 | } 19 | 20 | public static String getToken() { 21 | return getRequest().getHeader("X-Auth-Token"); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | datasource: 3 | url: jdbc:sqlite:data/kafka-map.db 4 | driver-class-name: org.sqlite.JDBC 5 | jpa: 6 | hibernate: 7 | ddl-auto: update 8 | show-sql: true 9 | properties: 10 | hibernate: 11 | dialect: org.hibernate.community.dialect.SQLiteDialect 12 | defer-datasource-initialization: true 13 | sql: 14 | init: 15 | mode: always 16 | server: 17 | port: 8080 18 | 19 | default: 20 | # 初始化安装时的账号 21 | username: admin 22 | # 初始化安装时的密码 23 | password: admin -------------------------------------------------------------------------------- /src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 11 | 13 | 14 | 16 | 17 | 18 | 19 | ${CONSOLE_LOG_PATTERN} 20 | utf8 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | ERROR 30 | ACCEPT 31 | DENY 32 | 33 | 34 | 35 | ${LOG_HOME}/error.%d{yyyy-MM-dd}.%i.log 36 | 37 | 100MB 38 | 30 39 | 40 | 41 | 42 | [%d{yyyy-MM-dd HH:mm:ss.SSS}] [%thread] [%-5level] [%logger{40}:%line] - %msg%n 43 | 44 | 45 | 46 | 47 | 48 | 49 | INFO 50 | ACCEPT 51 | DENY 52 | 53 | 54 | 55 | ${LOG_HOME}/info.%d{yyyy-MM-dd}.%i.log 56 | 57 | 30 58 | 100MB 59 | 60 | 61 | 62 | [%d{yyyy-MM-dd HH:mm:ss.SSS}] [%thread] [%-5level] [%logger{40}:%line] - %msg%n 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | ${LOG_HOME}/total.%d{yyyy-MM-dd}.%i.log 71 | 72 | 30 73 | 100MB 74 | 75 | 76 | 77 | [%d{yyyy-MM-dd HH:mm:ss.SSS}] [%thread] [%-5level] [%logger{40}:%line] - %msg%n 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | -------------------------------------------------------------------------------- /src/test/java/cn/typesafe/km/DelayQueueTest.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km; 2 | 3 | import com.fasterxml.jackson.core.JsonProcessingException; 4 | import com.fasterxml.jackson.databind.JsonNode; 5 | import com.fasterxml.jackson.databind.ObjectMapper; 6 | import lombok.extern.slf4j.Slf4j; 7 | import org.apache.kafka.clients.consumer.*; 8 | import org.apache.kafka.clients.producer.KafkaProducer; 9 | import org.apache.kafka.clients.producer.ProducerConfig; 10 | import org.apache.kafka.clients.producer.ProducerRecord; 11 | import org.apache.kafka.common.TopicPartition; 12 | import org.apache.kafka.common.serialization.StringDeserializer; 13 | import org.apache.kafka.common.serialization.StringSerializer; 14 | import org.junit.jupiter.api.BeforeEach; 15 | import org.junit.jupiter.api.Test; 16 | import org.springframework.boot.test.context.SpringBootTest; 17 | 18 | import java.time.Duration; 19 | import java.util.*; 20 | import java.util.concurrent.ExecutionException; 21 | 22 | @Slf4j 23 | @SpringBootTest 24 | public class DelayQueueTest { 25 | 26 | private KafkaConsumer consumer; 27 | private KafkaProducer producer; 28 | private volatile Boolean exit = false; 29 | private final Object lock = new Object(); 30 | private final String servers = ""; 31 | 32 | @BeforeEach 33 | void initConsumer() { 34 | Properties props = new Properties(); 35 | props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, servers); 36 | props.put(ConsumerConfig.GROUP_ID_CONFIG, "d"); 37 | props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); 38 | props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); 39 | props.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, "read_committed"); 40 | props.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, "5000"); 41 | consumer = new KafkaConsumer<>(props, new StringDeserializer(), new StringDeserializer()); 42 | } 43 | 44 | @BeforeEach 45 | void initProducer() { 46 | Properties props = new Properties(); 47 | props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, servers); 48 | props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); 49 | props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); 50 | producer = new KafkaProducer<>(props); 51 | } 52 | 53 | @Test 54 | void testDelayQueue() throws JsonProcessingException, InterruptedException { 55 | String topic = "delay-minutes-1"; 56 | List topics = Collections.singletonList(topic); 57 | consumer.subscribe(topics); 58 | 59 | Timer timer = new Timer(); 60 | timer.schedule(new TimerTask() { 61 | @Override 62 | public void run() { 63 | synchronized (lock) { 64 | consumer.resume(consumer.paused()); 65 | lock.notify(); 66 | } 67 | } 68 | }, 0, 1000); 69 | 70 | do { 71 | 72 | synchronized (lock) { 73 | ConsumerRecords consumerRecords = consumer.poll(Duration.ofMillis(200)); 74 | 75 | if (consumerRecords.isEmpty()) { 76 | lock.wait(); 77 | continue; 78 | } 79 | 80 | boolean timed = false; 81 | for (ConsumerRecord consumerRecord : consumerRecords) { 82 | long timestamp = consumerRecord.timestamp(); 83 | TopicPartition topicPartition = new TopicPartition(consumerRecord.topic(), consumerRecord.partition()); 84 | if (timestamp + 60 * 1000 < System.currentTimeMillis()) { 85 | 86 | String value = consumerRecord.value(); 87 | ObjectMapper objectMapper = new ObjectMapper(); 88 | JsonNode jsonNode = objectMapper.readTree(value); 89 | JsonNode jsonNodeTopic = jsonNode.get("topic"); 90 | 91 | String appTopic = null, appKey = null, appValue = null; 92 | 93 | if (jsonNodeTopic != null) { 94 | appTopic = jsonNodeTopic.asText(); 95 | } 96 | if (appTopic == null) { 97 | continue; 98 | } 99 | JsonNode jsonNodeKey = jsonNode.get("key"); 100 | if (jsonNodeKey != null) { 101 | appKey = jsonNode.asText(); 102 | } 103 | 104 | JsonNode jsonNodeValue = jsonNode.get("value"); 105 | if (jsonNodeValue != null) { 106 | appValue = jsonNodeValue.asText(); 107 | } 108 | // send to application topic 109 | ProducerRecord producerRecord = new ProducerRecord<>(appTopic, appKey, appValue); 110 | try { 111 | producer.send(producerRecord).get(); 112 | // success. commit message 113 | OffsetAndMetadata offsetAndMetadata = new OffsetAndMetadata(consumerRecord.offset() + 1); 114 | HashMap metadataHashMap = new HashMap<>(); 115 | metadataHashMap.put(topicPartition, offsetAndMetadata); 116 | consumer.commitSync(metadataHashMap); 117 | } catch (ExecutionException e) { 118 | consumer.pause(Collections.singletonList(topicPartition)); 119 | consumer.seek(topicPartition, consumerRecord.offset()); 120 | timed = true; 121 | break; 122 | } 123 | } else { 124 | consumer.pause(Collections.singletonList(topicPartition)); 125 | consumer.seek(topicPartition, consumerRecord.offset()); 126 | timed = true; 127 | break; 128 | } 129 | } 130 | 131 | if (timed) { 132 | lock.wait(); 133 | } 134 | } 135 | } while (!exit); 136 | 137 | } 138 | } 139 | -------------------------------------------------------------------------------- /src/test/java/cn/typesafe/km/LogTest.java: -------------------------------------------------------------------------------- 1 | package cn.typesafe.km; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | import org.junit.jupiter.api.Test; 5 | import org.slf4j.Logger; 6 | import org.slf4j.LoggerFactory; 7 | 8 | @Slf4j 9 | public class LogTest { 10 | 11 | String poc = "${jndi:ldap://127.0.0.1:80/Object}"; 12 | 13 | private static final Logger logger = LoggerFactory.getLogger(LogTest.class); 14 | 15 | static { 16 | System.setProperty("com.sun.jndi.ldap.object.trustURLCodebase", "true"); 17 | } 18 | 19 | @Test 20 | public void test0() { 21 | log.error(poc); 22 | } 23 | 24 | @Test 25 | public void test1() { 26 | logger.error(poc); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /web/.env: -------------------------------------------------------------------------------- 1 | REACT_APP_ENV=production -------------------------------------------------------------------------------- /web/.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | pnpm-debug.log* 8 | lerna-debug.log* 9 | 10 | node_modules 11 | dist 12 | dist-ssr 13 | *.local 14 | 15 | # Editor directories and files 16 | .vscode/* 17 | !.vscode/extensions.json 18 | .idea 19 | .DS_Store 20 | *.suo 21 | *.ntvs* 22 | *.njsproj 23 | *.sln 24 | *.sw? 25 | -------------------------------------------------------------------------------- /web/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | KafkaMap 7 | 8 | 9 |
10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /web/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "web", 3 | "private": true, 4 | "version": "0.0.0", 5 | "type": "module", 6 | "scripts": { 7 | "dev": "vite", 8 | "build": "vite build", 9 | "preview": "vite preview" 10 | }, 11 | "dependencies": { 12 | "@ant-design/icons": "5.1.4", 13 | "@ant-design/pro-components": "2.6.8", 14 | "antd": "5.7.3", 15 | "axios": "^0.21.1", 16 | "dayjs": "^1.10.4", 17 | "qs": "^6.10.1", 18 | "react": "^18.2.0", 19 | "react-dom": "^18.2.0", 20 | "react-json-view": "^1.21.3", 21 | "react-query": "^3.39.2", 22 | "react-router-dom": "^6.4.3" 23 | }, 24 | "devDependencies": { 25 | "@types/react": "^18.0.24", 26 | "@types/react-dom": "^18.0.8", 27 | "@vitejs/plugin-react": "^2.2.0", 28 | "react-intl": "^5.17.2", 29 | "vite": "^3.2.3" 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /web/src/App.css: -------------------------------------------------------------------------------- 1 | .header { 2 | padding: 0; 3 | } 4 | 5 | .ant-layout-header { 6 | padding-inline: 0 !important; 7 | } 8 | 9 | .km-header { 10 | color: white; 11 | width: 80%; 12 | margin: 0 auto; 13 | position: relative; 14 | display: flex; 15 | align-items: center; 16 | height: 100%; 17 | } 18 | 19 | .km-header-logo { 20 | font-size: 18px; 21 | font-weight: 500; 22 | cursor: pointer; 23 | color: white; 24 | } 25 | 26 | .km-header-right { 27 | text-align: right; 28 | height: 100%; 29 | margin: 0 8px; 30 | } 31 | 32 | .km-header-right-item { 33 | cursor: pointer; 34 | display: inline-flex; 35 | align-items: center; 36 | justify-content: center; 37 | font-size: 16px; 38 | vertical-align: middle; 39 | height: 100%; 40 | } 41 | 42 | .km-container { 43 | width: 80%; 44 | margin: 0 auto; 45 | } 46 | 47 | .kd-content { 48 | margin-top: 20px; 49 | padding: 16px; 50 | background-color: white; 51 | border-radius: 4px; 52 | } 53 | 54 | .kd-page-header { 55 | background-color: white; 56 | margin-top: 20px; 57 | border-radius: 4px; 58 | } -------------------------------------------------------------------------------- /web/src/App.jsx: -------------------------------------------------------------------------------- 1 | import React, {Component} from 'react'; 2 | import './App.css'; 3 | import 'antd/dist/reset.css'; 4 | import {Button, ConfigProvider, Dropdown, Layout, Menu, Tooltip} from 'antd'; 5 | import {Link, Outlet} from 'react-router-dom'; 6 | import {NT_PACKAGE} from "./utils/utils.jsx"; 7 | import zhCN from 'antd/locale/zh_CN'; 8 | import enUS from 'antd/locale/en_US'; 9 | import zh_CN from './locales/zh_CN'; 10 | import en_US from './locales/en_US'; 11 | import dayjs from "dayjs"; 12 | import relativeTime from "dayjs/plugin/relativeTime"; 13 | import 'dayjs/locale/zh-cn'; 14 | import {FormattedMessage, IntlProvider} from 'react-intl'; 15 | import request from "./common/request"; 16 | import { 17 | GithubOutlined 18 | } from '@ant-design/icons'; 19 | 20 | const {Header, Content, Footer} = Layout; 21 | 22 | dayjs.extend(relativeTime); 23 | 24 | class App extends Component { 25 | 26 | state = { 27 | package: NT_PACKAGE(), 28 | locale: 'en-us', 29 | info: { 30 | username: '' 31 | } 32 | } 33 | 34 | componentDidMount() { 35 | let locale = localStorage.getItem('locale'); 36 | if (!locale) { 37 | locale = 'en-us'; 38 | } 39 | dayjs.locale(locale); 40 | this.setState({ 41 | locale: locale 42 | }) 43 | this.loadUserInfo(); 44 | } 45 | 46 | setLocale = (locale) => { 47 | localStorage.setItem('locale', locale); 48 | window.location.reload(); 49 | } 50 | 51 | getAntDesignLocale = (locale) => { 52 | switch (locale) { 53 | case 'en-us': 54 | return enUS; 55 | case 'zh-cn': 56 | return zhCN; 57 | default: 58 | return undefined; 59 | } 60 | } 61 | 62 | loadUserInfo = async () => { 63 | let info = await request.get('/info'); 64 | this.setState({ 65 | info: info 66 | }) 67 | } 68 | 69 | logout = async () => { 70 | await request.post('/logout'); 71 | window.location.reload(); 72 | } 73 | 74 | render() { 75 | 76 | const langItems = [ 77 | { 78 | label: { 79 | this.setLocale('zh-cn'); 80 | }}> 81 | 简体中文 82 | , 83 | key: 'zh-cn' 84 | }, 85 | { 86 | label: { 87 | this.setLocale('en-us'); 88 | }}> 89 | English 90 | , 91 | key: 'en-us' 92 | }, 93 | ]; 94 | 95 | const infoItems = [ 96 | { 97 | label: 98 | 99 | , 100 | key: 'info' 101 | }, 102 | { 103 | label: { 104 | this.logout(); 105 | }}> 106 | 107 | , 108 | key: 'logout' 109 | }, 110 | ]; 111 | 112 | let messages = {} 113 | messages['en-us'] = en_US; 114 | messages['zh-cn'] = zh_CN; 115 | 116 | return ( 117 | 118 | 119 | 120 |
121 |
122 |
123 | 124 | Kafka Map 125 | 126 |
127 |
128 | 129 | 130 | {this.state.info.username} 131 | 132 | 133 |
134 |
135 | 136 | 137 | 138 | 146 | 147 | 148 | 149 |
150 | 151 |
152 | 153 | 154 | 159 | 160 | 161 |
162 |
163 |
164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 |
kafka map ©2021 Created by dushixiang 172 | Version:{this.state.package['version']}
173 |
174 |
175 |
176 | 177 | ); 178 | } 179 | 180 | } 181 | 182 | export default App; 183 | -------------------------------------------------------------------------------- /web/src/assets/react.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /web/src/common/env.js: -------------------------------------------------------------------------------- 1 | function env() { 2 | if (import.meta.env.MODE === 'development') { 3 | // 本地开发环境 4 | return { 5 | server: '//127.0.0.1:8080', 6 | wsServer: 'ws://127.0.0.1:8080', 7 | prefix: '', 8 | } 9 | } else { 10 | // 生产环境 11 | let wsPrefix; 12 | if (window.location.protocol === 'https:') { 13 | wsPrefix = 'wss:' 14 | } else { 15 | wsPrefix = 'ws:' 16 | } 17 | return { 18 | server: '', 19 | wsServer: wsPrefix + window.location.host, 20 | prefix: window.location.protocol + '//' + window.location.host, 21 | } 22 | } 23 | } 24 | export default env(); 25 | 26 | export const server = env().server; 27 | export const wsServer = env().wsServer; 28 | export const prefix = env().prefix; -------------------------------------------------------------------------------- /web/src/common/request.js: -------------------------------------------------------------------------------- 1 | import axios from 'axios' 2 | import {server} from "./env"; 3 | import {message} from 'antd'; 4 | import {getHeaders} from "../utils/utils.jsx"; 5 | 6 | // 测试地址 7 | // axios.defaults.baseURL = server; 8 | // 线上地址 9 | axios.defaults.baseURL = server; 10 | 11 | const handleError = (error) => { 12 | console.log(error) 13 | if ("Network Error" === error.toString()) { 14 | message.error('网络异常'); 15 | return false; 16 | } 17 | if (error.response !== undefined && error.response.status === 401) { 18 | window.location.href = '#/login'; 19 | console.log('') 20 | return false; 21 | } 22 | 23 | if (error.response !== undefined) { 24 | let data = error.response.data; 25 | message.error(`${data.message}`, 10); 26 | return false; 27 | } 28 | return true; 29 | }; 30 | 31 | const handleResult = (result) => { 32 | if (result['code'] === 401) { 33 | window.location.href = '#/login'; 34 | return false; 35 | } 36 | return true; 37 | } 38 | 39 | const request = { 40 | 41 | get: function (url) { 42 | const headers = getHeaders(); 43 | 44 | return new Promise((resolve, reject) => { 45 | axios.get(url, {headers: headers}) 46 | .then((response) => { 47 | if (!handleResult(response.data)) { 48 | return; 49 | } 50 | resolve(response.data); 51 | }) 52 | .catch((error) => { 53 | handleError(error); 54 | reject(error); 55 | }); 56 | }) 57 | }, 58 | 59 | post: function (url, params) { 60 | 61 | const headers = getHeaders(); 62 | 63 | return new Promise((resolve, reject) => { 64 | axios.post(url, params, {headers: headers}) 65 | .then((response) => { 66 | if (!handleResult(response.data)) { 67 | return; 68 | } 69 | resolve(response.data); 70 | }) 71 | .catch((error) => { 72 | handleError(error); 73 | reject(error); 74 | }); 75 | }) 76 | }, 77 | 78 | put: function (url, params) { 79 | 80 | const headers = getHeaders(); 81 | 82 | return new Promise((resolve, reject) => { 83 | axios.put(url, params, {headers: headers}) 84 | .then((response) => { 85 | if (!handleResult(response.data)) { 86 | return; 87 | } 88 | resolve(response.data); 89 | }) 90 | .catch((error) => { 91 | handleError(error); 92 | reject(error); 93 | }); 94 | }) 95 | }, 96 | 97 | delete: function (url) { 98 | const headers = getHeaders(); 99 | 100 | return new Promise((resolve, reject) => { 101 | axios.delete(url, {headers: headers}) 102 | .then((response) => { 103 | if (!handleResult(response.data)) { 104 | return; 105 | } 106 | resolve(response.data); 107 | }) 108 | .catch((error) => { 109 | handleError(error); 110 | reject(error); 111 | }); 112 | }) 113 | }, 114 | 115 | patch: function (url, params) { 116 | const headers = getHeaders(); 117 | 118 | return new Promise((resolve, reject) => { 119 | axios.patch(url, params, {headers: headers}) 120 | .then((response) => { 121 | if (!handleResult(response.data)) { 122 | return; 123 | } 124 | resolve(response.data); 125 | }) 126 | .catch((error) => { 127 | handleError(error); 128 | reject(error); 129 | }); 130 | }) 131 | }, 132 | }; 133 | export default request -------------------------------------------------------------------------------- /web/src/components/Broker.jsx: -------------------------------------------------------------------------------- 1 | import React, {Component} from 'react'; 2 | import request from "../common/request"; 3 | import {Button, Col, Drawer, Row, Table, Tooltip, Typography} from "antd"; 4 | import {FormattedMessage} from "react-intl"; 5 | import withRouter from "../hook/withRouter.jsx"; 6 | import {PageHeader} from "@ant-design/pro-components"; 7 | import BrokerConfig from "./BrokerConfig.jsx"; 8 | 9 | const {Title} = Typography; 10 | 11 | class Broker extends Component { 12 | 13 | state = { 14 | loading: false, 15 | items: [], 16 | clusterId: '', 17 | configOpen: false, 18 | selectBrokerId: '' 19 | } 20 | 21 | componentDidMount() { 22 | let urlParams = new URLSearchParams(this.props.location.search); 23 | let clusterId = urlParams.get('clusterId'); 24 | let clusterName = urlParams.get('clusterName'); 25 | this.setState({ 26 | clusterName: clusterName, 27 | clusterId: clusterId, 28 | }) 29 | this.loadItems(clusterId); 30 | } 31 | 32 | async loadItems(clusterId) { 33 | this.setState({ 34 | loading: true 35 | }) 36 | let items = await request.get(`/brokers?clusterId=${clusterId}`); 37 | this.setState({ 38 | items: items, 39 | loading: false 40 | }) 41 | } 42 | 43 | render() { 44 | 45 | const columns = [{ 46 | title: 'ID', 47 | dataIndex: 'id', 48 | key: 'id' 49 | }, { 50 | title: 'Host', 51 | dataIndex: 'host', 52 | key: 'host', 53 | defaultSortOrder: 'ascend', 54 | }, { 55 | title: 'Port', 56 | dataIndex: 'port', 57 | key: 'port', 58 | }, { 59 | title: 'Partitions as Leader', 60 | dataIndex: 'leaderPartitions', 61 | key: 'leaderPartitions', 62 | render: (leaderPartitions, record, index) => { 63 | return 64 | 65 | ; 66 | } 67 | }, { 68 | title: 'Partitions as Follower', 69 | dataIndex: 'followerPartitions', 70 | key: 'followerPartitions', 71 | render: (followerPartitions, record, index) => { 72 | return 73 | 74 | ; 75 | } 76 | },{ 77 | title: , 78 | key: 'action', 79 | render: (text, record, index) => { 80 | return ( 81 |
82 | 90 |
91 | ) 92 | }, 93 | }]; 94 | 95 | return ( 96 |
97 |
98 | { 101 | this.props.navigate(-1); 102 | }} 103 | title={this.state.clusterName} 104 | subTitle={} 105 | /> 106 |
107 | 108 |
109 |
110 | 111 | 112 | Broker 113 | 114 | 115 | 116 | 117 | 118 |
119 | 129 | }} 130 | /> 131 | 132 | 133 | } 134 | placement="right" 135 | width={window.innerWidth * 0.8} 136 | destroyOnClose={true} 137 | onClose={() => { 138 | this.setState({ 139 | configOpen: false, 140 | selectBrokerId: '' 141 | }) 142 | }} 143 | open={this.state.configOpen} 144 | > 145 | 149 | 150 | 151 | ); 152 | } 153 | } 154 | 155 | export default withRouter(Broker); -------------------------------------------------------------------------------- /web/src/components/BrokerConfig.jsx: -------------------------------------------------------------------------------- 1 | import React, {useState} from 'react'; 2 | import {Form, Space, Table, Tag, Tooltip, Typography} from "antd"; 3 | import request from "../common/request.js"; 4 | import {FormattedMessage} from "react-intl"; 5 | import {Input} from "antd/lib"; 6 | import {useQuery} from "react-query"; 7 | 8 | const EditableCell = ({ 9 | editing, dataIndex, title, inputType, record, index, children, ...restProps 10 | }) => { 11 | const inputNode = ; 12 | return (); 25 | }; 26 | 27 | const BrokerConfig = ({clusterId, brokerId}) => { 28 | const [form] = Form.useForm(); 29 | const [editingName, setEditingName] = useState(''); 30 | 31 | const isEditing = (record) => record.name === editingName; 32 | 33 | let queryBrokerConfig = useQuery('get-broker-config', 34 | () => { 35 | return request.get(`/brokers/${brokerId}/configs?clusterId=${clusterId}`) 36 | }, { 37 | enabled: brokerId !== undefined && brokerId !== '' 38 | }); 39 | 40 | const edit = (record) => { 41 | form.setFieldsValue({ 42 | ...record, 43 | }); 44 | setEditingName(record.name); 45 | }; 46 | 47 | const cancel = () => { 48 | setEditingName(''); 49 | }; 50 | 51 | const save = async (key) => { 52 | try { 53 | const row = await form.validateFields(); 54 | let params = {}; 55 | params[key] = row['value']; 56 | 57 | await request.put(`/brokers/${brokerId}/configs?clusterId=${clusterId}`, params); 58 | queryBrokerConfig.refetch(); 59 | } finally { 60 | setEditingName(''); 61 | } 62 | }; 63 | 64 | const columns = [ 65 | { 66 | title: 'Name', 67 | dataIndex: 'name', 68 | ellipsis: true, 69 | render: (value) => { 70 | return 71 | {value} 72 | 73 | } 74 | }, 75 | { 76 | title: 'Value', 77 | dataIndex: 'value', 78 | editable: true, 79 | ellipsis: true, 80 | render: (value, record) => { 81 | if (record['_default'] === true) { 82 | return <> 83 | default 84 | 85 | {value} 86 | 87 | ; 88 | } else { 89 | return 90 | {value} 91 | ; 92 | } 93 | } 94 | }, 95 | { 96 | title: , dataIndex: 'operation', render: (_, record) => { 97 | const editable = isEditing(record); 98 | if (editable) { 99 | return 100 | save(record.name)}> 101 | 102 | 103 | 104 | 105 | 106 | 107 | } else { 108 | return edit(record)}> 110 | 111 | 112 | } 113 | }, 114 | },]; 115 | const mergedColumns = columns.map((col) => { 116 | if (!col.editable) { 117 | return col; 118 | } 119 | 120 | return { 121 | ...col, onCell: (record) => ({ 122 | record, 123 | inputType: 'text', 124 | dataIndex: col.dataIndex, 125 | title: col.title, 126 | editing: isEditing(record), 127 | }), 128 | }; 129 | }); 130 | 131 | return (
132 |
133 |
13 | {editing ? ( 22 | {inputNode} 23 | ) : (children)} 24 |
147 | 148 | ); 149 | }; 150 | 151 | export default BrokerConfig; -------------------------------------------------------------------------------- /web/src/components/ClusterModal.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import {Form, Input, Modal, Select} from "antd/lib/index"; 3 | import {FormattedMessage} from "react-intl"; 4 | 5 | const {TextArea} = Input; 6 | 7 | const ClusterModal = ({title, handleOk, handleCancel, confirmLoading, model}) => { 8 | 9 | const [form] = Form.useForm(); 10 | 11 | const formItemLayout = { 12 | labelCol: {span: 6}, 13 | wrapperCol: {span: 14}, 14 | }; 15 | 16 | if (model === null || model === undefined) { 17 | model = {} 18 | } 19 | 20 | return ( 21 | 22 | { 27 | form 28 | .validateFields() 29 | .then(values => { 30 | let success = handleOk(values); 31 | if (success === true) { 32 | form.resetFields(); 33 | } 34 | }); 35 | }} 36 | onCancel={handleCancel} 37 | confirmLoading={confirmLoading} 38 | okText={} 39 | cancelText={} 40 | > 41 | 42 |
43 | 44 | 45 | 46 | 47 | } name='name' 48 | rules={[{required: true, message: 'Please enter name'}]}> 49 | 50 | 51 | 52 | { 53 | model['id'] === undefined ? 54 | } name='servers' 55 | rules={[{required: true, message: 'Please enter broker servers'}]}> 56 |