├── .gitignore ├── README.md ├── pom.xml └── src └── main ├── java └── com │ └── github │ └── codingdebugallday │ └── client │ ├── api │ ├── controller │ │ └── v1 │ │ │ ├── ClusterController.java │ │ │ └── UploadJarController.java │ └── dto │ │ ├── ClusterDTO.java │ │ ├── GroupDTO.java │ │ ├── NodeDTO.java │ │ ├── NodeSettingInfo.java │ │ └── UploadJarDTO.java │ ├── app │ └── service │ │ ├── ApiClient.java │ │ ├── ClusterService.java │ │ ├── FlinkApi.java │ │ ├── FlinkCommonService.java │ │ ├── UploadJarService.java │ │ ├── impl │ │ ├── ClusterServiceImpl.java │ │ └── UploadJarServiceImpl.java │ │ ├── jars │ │ └── FlinkJarService.java │ │ ├── jm │ │ └── FlinkJobManagerService.java │ │ ├── jobs │ │ └── FlinkJobService.java │ │ ├── overview │ │ └── ClusterOverviewService.java │ │ └── tm │ │ └── FlinkTaskManagerService.java │ ├── domain │ ├── entity │ │ ├── ApiResult.java │ │ ├── Cluster.java │ │ ├── Node.java │ │ ├── UploadJar.java │ │ ├── jars │ │ │ ├── JarRunRequest.java │ │ │ ├── JarRunResponseBody.java │ │ │ └── JarUploadResponseBody.java │ │ ├── jobs │ │ │ ├── FlinkApiErrorResponse.java │ │ │ ├── JobDetailsInfo.java │ │ │ ├── JobExceptionsInfo.java │ │ │ ├── JobIdsWithStatusOverview.java │ │ │ ├── MultipleJobsDetails.java │ │ │ ├── SavepointInfo.java │ │ │ ├── SavepointTriggerRequestBody.java │ │ │ ├── TriggerResponse.java │ │ │ └── TriggerResponseWithSavepoint.java │ │ ├── overview │ │ │ └── DashboardConfiguration.java │ │ └── tm │ │ │ ├── TaskManagerDetail.java │ │ │ └── TaskManagerInfo.java │ └── repository │ │ ├── ClusterRepository.java │ │ ├── NodeRepository.java │ │ └── UploadJarRepository.java │ └── infra │ ├── autoconfigure │ ├── FlinkApiAutoConfiguration.java │ ├── GlobalExceptionHandlerAutoConfiguration.java │ └── MybatisPlusConfig.java │ ├── constants │ └── FlinkApiConstant.java │ ├── context │ └── FlinkApiContext.java │ ├── converter │ ├── ClusterConvertMapper.java │ ├── ClusterConvertUtil.java │ ├── NodeConvertMapper.java │ └── UploadJarConvertMapper.java │ ├── enums │ └── NodeTypeEnum.java │ ├── exceptions │ ├── FlinkApiCommonException.java │ ├── FlinkCommonException.java │ ├── GlobalExceptionHandler.java │ ├── RestTemplateErrorHandler.java │ └── package-info.java │ ├── handlers │ └── FutureTaskWorker.java │ ├── mapper │ ├── ClusterMapper.java │ ├── NodeMapper.java │ └── UploadJarMapper.java │ ├── repository │ └── impl │ │ ├── ClusterRepositoryImpl.java │ │ ├── NodeRepositoryImpl.java │ │ └── UploadJarRepositoryImpl.java │ └── utils │ ├── ApplicationContextHelper.java │ ├── FlinkApiUtil.java │ ├── FlinkCommonUtil.java │ ├── JSON.java │ ├── Preconditions.java │ ├── RestTemplateUtil.java │ ├── RetryUtil.java │ └── ThreadPoolUtil.java └── resources ├── META-INF └── spring.factories ├── application.yml ├── mapper ├── ClusterMapper.xml ├── NodeMapper.xml └── UploadJarMapper.xml └── sql └── flink_explore_1.0.2.sql /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by .ignore support plugin (hsz.mobi) 2 | ### Java template 3 | # Compiled class file 4 | *.class 5 | 6 | # Log file 7 | *.log 8 | 9 | # BlueJ files 10 | *.ctxt 11 | 12 | # Mobile Tools for Java (J2ME) 13 | .mtj.tmp/ 14 | 15 | # Package Files # 16 | *.jar 17 | *.war 18 | *.nar 19 | *.ear 20 | *.zip 21 | *.tar.gz 22 | *.rar 23 | 24 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 25 | hs_err_pid* 26 | 27 | ### Maven template 28 | target/ 29 | pom.xml.tag 30 | pom.xml.releaseBackup 31 | pom.xml.versionsBackup 32 | pom.xml.next 33 | release.properties 34 | dependency-reduced-pom.xml 35 | buildNumber.properties 36 | .mvn/timing.properties 37 | # https://github.com/takari/maven-wrapper#usage-without-binary-jar 38 | .mvn/wrapper/maven-wrapper.jar 39 | 40 | ### Gradle template 41 | .gradle 42 | /build/ 43 | 44 | # Ignore Gradle GUI config 45 | gradle-app.setting 46 | 47 | # Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored) 48 | !gradle-wrapper.jar 49 | 50 | # Cache of project 51 | .gradletasknamecache 52 | 53 | # # Work around https://youtrack.jetbrains.com/issue/IDEA-116898 54 | # gradle/wrapper/gradle-wrapper.properties 55 | 56 | ### SVN template 57 | .svn/ 58 | 59 | ### Eclipse template 60 | .metadata 61 | bin/ 62 | tmp/ 63 | *.tmp 64 | *.bak 65 | *.swp 66 | *~.nib 67 | local.properties 68 | .settings/ 69 | .loadpath 70 | .recommenders 71 | 72 | # External tool builders 73 | .externalToolBuilders/ 74 | 75 | # Locally stored "Eclipse launch configurations" 76 | *.launch 77 | 78 | # PyDev specific (Python IDE for Eclipse) 79 | *.pydevproject 80 | 81 | # CDT-specific (C/C++ Development Tooling) 82 | .cproject 83 | 84 | # CDT- autotools 85 | .autotools 86 | 87 | # Java annotation processor (APT) 88 | .factorypath 89 | 90 | # PDT-specific (PHP Development Tools) 91 | .buildpath 92 | 93 | # sbteclipse plugin 94 | .target 95 | 96 | # Tern plugin 97 | .tern-project 98 | 99 | # TeXlipse plugin 100 | .texlipse 101 | 102 | # STS (Spring Tool Suite) 103 | .springBeans 104 | 105 | # Code Recommenders 106 | .recommenders/ 107 | 108 | # Annotation Processing 109 | .apt_generated/ 110 | .apt_generated_test/ 111 | 112 | # Scala IDE specific (Scala & Java development for Eclipse) 113 | .cache-main 114 | .scala_dependencies 115 | .worksheet 116 | 117 | ### Redis template 118 | # Ignore redis binary dump (dump.rdb) files 119 | 120 | *.rdb 121 | 122 | ### macOS template 123 | # General 124 | .DS_Store 125 | .AppleDouble 126 | .LSOverride 127 | 128 | # Icon must end with two \r 129 | Icon 130 | 131 | # Thumbnails 132 | ._* 133 | 134 | # Files that might appear in the root of a volume 135 | .DocumentRevisions-V100 136 | .fseventsd 137 | .Spotlight-V100 138 | .TemporaryItems 139 | .Trashes 140 | .VolumeIcon.icns 141 | .com.apple.timemachine.donotpresent 142 | 143 | # Directories potentially created on remote AFP share 144 | .AppleDB 145 | .AppleDesktop 146 | Network Trash Folder 147 | Temporary Items 148 | .apdisk 149 | 150 | ### Example user template template 151 | ### Example user template 152 | 153 | # IntelliJ project files 154 | .idea 155 | *.iml 156 | out 157 | gen 158 | ### Windows template 159 | # Windows thumbnail cache files 160 | Thumbs.db 161 | Thumbs.db:encryptable 162 | ehthumbs.db 163 | ehthumbs_vista.db 164 | 165 | # Dump file 166 | *.stackdump 167 | 168 | # Folder config file 169 | [Dd]esktop.ini 170 | 171 | # Recycle Bin used on file shares 172 | $RECYCLE.BIN/ 173 | 174 | # Windows Installer files 175 | *.cab 176 | *.msi 177 | *.msix 178 | *.msm 179 | *.msp 180 | 181 | # Windows shortcuts 182 | *.lnk 183 | 184 | ### JetBrains template 185 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm 186 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 187 | 188 | # User-specific stuff 189 | .idea/**/workspace.xml 190 | .idea/**/tasks.xml 191 | .idea/**/usage.statistics.xml 192 | .idea/**/dictionaries 193 | .idea/**/shelf 194 | 195 | # Generated files 196 | .idea/**/contentModel.xml 197 | 198 | # Sensitive or high-churn files 199 | .idea/**/dataSources/ 200 | .idea/**/dataSources.ids 201 | .idea/**/dataSources.local.xml 202 | .idea/**/sqlDataSources.xml 203 | .idea/**/dynamic.xml 204 | .idea/**/uiDesigner.xml 205 | .idea/**/dbnavigator.xml 206 | 207 | # Gradle 208 | .idea/**/gradle.xml 209 | .idea/**/libraries 210 | 211 | # Gradle and Maven with auto-import 212 | # When using Gradle or Maven with auto-import, you should exclude module files, 213 | # since they will be recreated, and may cause churn. Uncomment if using 214 | # auto-import. 215 | # .idea/artifacts 216 | # .idea/compiler.xml 217 | # .idea/modules.xml 218 | # .idea/*.iml 219 | # .idea/modules 220 | # *.iml 221 | # *.ipr 222 | 223 | # CMake 224 | cmake-build-*/ 225 | 226 | # Mongo Explorer plugin 227 | .idea/**/mongoSettings.xml 228 | 229 | # File-based project format 230 | *.iws 231 | 232 | # IntelliJ 233 | out/ 234 | 235 | # mpeltonen/sbt-idea plugin 236 | .idea_modules/ 237 | 238 | # JIRA plugin 239 | atlassian-ide-plugin.xml 240 | 241 | # Cursive Clojure plugin 242 | .idea/replstate.xml 243 | 244 | # Crashlytics plugin (for Android Studio and IntelliJ) 245 | com_crashlytics_export_strings.xml 246 | crashlytics.properties 247 | crashlytics-build.properties 248 | fabric.properties 249 | 250 | # Editor-based Rest Client 251 | .idea/httpRequests 252 | 253 | # Android studio 3.1+ serialized cache file 254 | .idea/caches/build_file_checksums.ser 255 | 256 | src/main/java/com/github/codingdebugallday/client/*Application.java 257 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # flink-api-spring-boot-starter 2 | 3 | - ### 基于flink rest api开发的spring boot starter,可上传/运行jar等一系列操作 4 | 5 | # Quick Start 6 | 7 | 1. 执行```src/main/resources/sql/flink_explore.sql``` 8 | 2. jar已发布到中仓仓库,直接依赖即可 9 | ```pom 10 | 11 | com.github.codingdebugallday 12 | flink-api-spring-boot-starter 13 | 1.0.3.RELEASE 14 | 15 | ``` 16 | 3. spring boot配置文件如示例,```src/main/resources/application.yml``` 17 | 4. 创建自己的flink集群, 已内置```com/github/codingdebugallday/client/api/controller/v1/ClusterController.java```许多接口 18 | > url: http://localhost:9527/v1/{tenantId}/cluster 19 | 20 | > method: post 21 | ```json 22 | { 23 | "clusterCode": "hdspdev", 24 | "clusterDesc": "hdspdev", 25 | "jobManagerUrl": "http://hdspdev002:50100", 26 | "username": "root", 27 | "password": "m8rW2EQ0iDCcWlbH", 28 | "jobManagerStandbyUrl": "http://hdspdev001:50100", 29 | "enabledFlag": 1, 30 | "tenantId": 0, 31 | "nodeDTOList": [ 32 | { 33 | "nodeCode": "flink_hdspdev001", 34 | "nodeDesc": "hdspdev001", 35 | "nodeType":"MARSTER", 36 | "settingInfo": "{\"host\":\"hdspdev001\",\"username\":\"root\",\"password\":\"m8rW2EQ0iDCcWlbH\"}" 37 | }, 38 | { 39 | "nodeCode": "flink_hdspdev002", 40 | "nodeDesc": "hdspdev002", 41 | "nodeType":"SLAVE", 42 | "settingInfo": "{\"host\":\"hdspdev002\",\"username\":\"root\",\"password\":\"m8rW2EQ0iDCcWlbH\"}" 43 | }, 44 | { 45 | "nodeCode": "flink_hdspdev003", 46 | "nodeDesc": "hdspdev003", 47 | "nodeType":"SLAVE", 48 | "settingInfo": "{\"host\":\"hdspdev003\",\"username\":\"root\",\"password\":\"m8rW2EQ0iDCcWlbH\"}" 49 | }, 50 | { 51 | "nodeCode": "flink_hdspdev004", 52 | "nodeDesc": "hdspdev004", 53 | "nodeType":"SLAVE", 54 | "settingInfo": "{\"host\":\"hdspdev004\",\"username\":\"root\",\"password\":\"m8rW2EQ0iDCcWlbH\"}" 55 | }, 56 | { 57 | "nodeCode": "flink_hdspdev005", 58 | "nodeDesc": "hdspdev005", 59 | "nodeType":"SLAVE", 60 | "settingInfo": "{\"host\":\"hdspdev005\",\"username\":\"root\",\"password\":\"m8rW2EQ0iDCcWlbH\"}" 61 | }, 62 | { 63 | "nodeCode": "flink_hdspdev006", 64 | "nodeDesc": "hdspdev006", 65 | "nodeType":"SLAVE", 66 | "settingInfo": "{\"host\":\"hdspdev006\",\"username\":\"root\",\"password\":\"m8rW2EQ0iDCcWlbH\"}" 67 | } 68 | ] 69 | } 70 | ``` 71 | 4. 使用 72 | > 首先获取FlinkApiContext,然后通过clusterCode以及tenantId获取flinkApi, 73 | >flinkApi即可调用api,如uploadJar/runJar/jobList等 74 | > 75 | > 会自动重试3次,如jm master挂了,会切换到备用节点进行访问 76 | 77 | ```java 78 | @Autowired 79 | private FlinkApiContext flinkApiContext; 80 | 81 | FlinkApi flinkApi = flinkApiContext.get(clusterCode, tenantId); 82 | flinkApi.uploadJar(file) 83 | ``` 84 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | com.github.codingdebugallday 8 | flink-api-spring-boot-starter 9 | 1.0.4.RELEASE 10 | 11 | 12 | org.springframework.boot 13 | spring-boot-starter-parent 14 | 2.2.4.RELEASE 15 | 16 | 17 | 18 | 19 | UTF-8 20 | 1.8 21 | 1.8 22 | 2.1.2 23 | 3.3.1 24 | 3.0.0 25 | 1.3.1.Final 26 | 1.18.12 27 | 1.10.0 28 | 29 | 30 | flink-api-spring-boot-starter 31 | https://github.com/codingdebugallday/flink-api-spring-boot-starter 32 | flink api for spring boot 33 | 34 | 35 | 36 | The Apache Software License, Version 2.0 37 | http://www.apache.org/licenses/LICENSE-2.0.txt 38 | repo 39 | 40 | 41 | 42 | 43 | https://github.com/codingdebugallday/flink-api-spring-boot-starter 44 | https://github.com/codingdebugallday/flink-api-spring-boot-starter.git 45 | https://github.com/codingdebugallday/flink-api-spring-boot-starter.git 46 | 47 | 48 | 49 | 50 | 51 | abigballofmud 52 | codingdebugallday@163.com 53 | 54 | Developer 55 | 56 | +8 57 | 58 | 59 | 60 | 61 | 62 | com.google.guava 63 | guava 64 | 28.2-jre 65 | 66 | 67 | org.projectlombok 68 | lombok 69 | ${lombok.version} 70 | true 71 | 72 | 73 | 74 | com.github.ulisesbocchio 75 | jasypt-spring-boot-starter 76 | 3.0.2 77 | 78 | 79 | org.springframework.boot 80 | spring-boot-autoconfigure 81 | 82 | 83 | org.springframework.boot 84 | spring-boot-configuration-processor 85 | true 86 | 87 | 88 | com.vaadin.external.google 89 | android-json 90 | 91 | 92 | 93 | 94 | org.springframework.boot 95 | spring-boot-starter-jdbc 96 | 97 | 98 | com.baomidou 99 | mybatis-plus-boot-starter 100 | ${mybatis.plus.version} 101 | 102 | 103 | org.mybatis.spring.boot 104 | mybatis-spring-boot-starter 105 | ${mybatis.spring.starter.version} 106 | 107 | 108 | com.baomidou 109 | dynamic-datasource-spring-boot-starter 110 | ${dynamic.datasource.boot.version} 111 | 112 | 113 | org.springframework.boot 114 | spring-boot-starter-aop 115 | 116 | 117 | org.springframework.boot 118 | spring-boot-starter-web 119 | 120 | 121 | org.springframework.boot 122 | spring-boot-starter-test 123 | test 124 | 125 | 126 | org.junit.vintage 127 | junit-vintage-engine 128 | 129 | 130 | 131 | 132 | 133 | mysql 134 | mysql-connector-java 135 | 5.1.48 136 | 137 | 138 | 139 | javax.persistence 140 | persistence-api 141 | 1.0.2 142 | 143 | 144 | org.mapstruct 145 | mapstruct 146 | ${org.mapstruct.version} 147 | 148 | 149 | com.alibaba 150 | transmittable-thread-local 151 | 2.11.0 152 | 153 | 154 | org.apache.httpcomponents 155 | httpclient 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | ossrh 165 | https://oss.sonatype.org/content/repositories/snapshots 166 | 167 | 168 | ossrh 169 | https://oss.sonatype.org/service/local/staging/deploy/maven2/ 170 | 171 | 172 | 173 | 174 | 175 | 176 | com.github.ulisesbocchio 177 | jasypt-maven-plugin 178 | 3.0.2 179 | 180 | 181 | 182 | org.apache.maven.plugins 183 | maven-javadoc-plugin 184 | 3.2.0 185 | 186 | UTF-8 187 | UTF-8 188 | UTF-8 189 | 190 | 191 | 192 | attach-javadocs 193 | 194 | jar 195 | 196 | 197 | -Xdoclint:none 198 | 199 | 200 | 201 | 202 | 203 | 204 | org.apache.maven.plugins 205 | maven-source-plugin 206 | 3.2.1 207 | 208 | 209 | attach-sources 210 | 211 | jar-no-fork 212 | 213 | 214 | 215 | 216 | 217 | 218 | org.apache.maven.plugins 219 | maven-compiler-plugin 220 | 3.8.1 221 | 222 | 1.8 223 | 1.8 224 | UTF-8 225 | 226 | 227 | org.mapstruct 228 | mapstruct-processor 229 | ${org.mapstruct.version} 230 | 231 | 232 | 233 | org.projectlombok 234 | lombok 235 | ${lombok.version} 236 | 237 | 238 | 239 | 240 | 241 | 242 | org.apache.maven.plugins 243 | maven-gpg-plugin 244 | 1.6 245 | 246 | 247 | sign-artifacts 248 | verify 249 | 250 | sign 251 | 252 | 253 | 254 | 255 | 256 | 257 | org.sonatype.plugins 258 | nexus-staging-maven-plugin 259 | 1.6.8 260 | true 261 | 262 | ossrh 263 | https://oss.sonatype.org/ 264 | true 265 | 266 | 267 | 268 | 269 | org.apache.maven.plugins 270 | maven-release-plugin 271 | 2.5.3 272 | 273 | 274 | 275 | 276 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/api/controller/v1/ClusterController.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.api.controller.v1; 2 | 3 | import java.util.List; 4 | import java.util.Map; 5 | import javax.validation.Valid; 6 | 7 | import com.baomidou.mybatisplus.core.metadata.IPage; 8 | import com.baomidou.mybatisplus.core.metadata.OrderItem; 9 | import com.baomidou.mybatisplus.extension.plugins.pagination.Page; 10 | import com.github.codingdebugallday.client.api.dto.ClusterDTO; 11 | import com.github.codingdebugallday.client.app.service.ClusterService; 12 | import com.github.codingdebugallday.client.domain.entity.Cluster; 13 | import com.github.codingdebugallday.client.domain.entity.jobs.*; 14 | import com.github.codingdebugallday.client.domain.entity.overview.DashboardConfiguration; 15 | import com.github.codingdebugallday.client.domain.entity.tm.TaskManagerDetail; 16 | import com.github.codingdebugallday.client.domain.entity.tm.TaskManagerInfo; 17 | import com.github.codingdebugallday.client.domain.repository.ClusterRepository; 18 | import org.springframework.web.bind.annotation.*; 19 | 20 | /** 21 | *

22 | * description 23 | *

24 | * 25 | * @author isacc 2020/03/28 1:14 26 | * @since 1.0 27 | */ 28 | @RestController("flinkClusterController.v1") 29 | @RequestMapping("/v1/{tenantId}/cluster") 30 | public class ClusterController { 31 | 32 | private final ClusterRepository clusterRepository; 33 | private final ClusterService clusterService; 34 | 35 | public ClusterController(ClusterRepository clusterRepository, 36 | ClusterService clusterService) { 37 | this.clusterRepository = clusterRepository; 38 | this.clusterService = clusterService; 39 | } 40 | 41 | @GetMapping 42 | public IPage list(@PathVariable Long tenantId, 43 | ClusterDTO clusterDTO, 44 | Page clusterPage) { 45 | clusterDTO.setTenantId(tenantId); 46 | clusterPage.addOrder(OrderItem.desc(Cluster.FIELD_CLUSTER_ID)); 47 | return clusterRepository.pageAndSortDTO(clusterDTO, clusterPage); 48 | } 49 | 50 | @GetMapping("/{clusterId}") 51 | public ClusterDTO detail(@PathVariable Long tenantId, 52 | @PathVariable Long clusterId) { 53 | return clusterRepository.detail(tenantId, clusterId); 54 | } 55 | 56 | @PostMapping 57 | public ClusterDTO insert(@PathVariable Long tenantId, 58 | @RequestBody @Valid ClusterDTO clusterDTO) { 59 | clusterDTO.setTenantId(tenantId); 60 | return clusterService.insert(clusterDTO); 61 | } 62 | 63 | @PutMapping 64 | public ClusterDTO update(@PathVariable Long tenantId, 65 | @RequestBody @Valid ClusterDTO clusterDTO) { 66 | clusterDTO.setTenantId(tenantId); 67 | return clusterService.update(clusterDTO); 68 | } 69 | 70 | @DeleteMapping 71 | public void delete(@PathVariable Long tenantId, 72 | @RequestBody ClusterDTO clusterDTO) { 73 | clusterDTO.setTenantId(tenantId); 74 | clusterService.delete(clusterDTO); 75 | } 76 | 77 | @GetMapping("/overview/{clusterCode}/config") 78 | public DashboardConfiguration overviewConfig(@PathVariable Long tenantId, 79 | @PathVariable String clusterCode) { 80 | return clusterService.overviewConfig(tenantId, clusterCode); 81 | } 82 | 83 | @GetMapping("/overview/{clusterCode}") 84 | public Map overview(@PathVariable Long tenantId, 85 | @PathVariable String clusterCode) { 86 | return clusterService.overview(tenantId, clusterCode); 87 | } 88 | 89 | @GetMapping("/job/{clusterCode}/overview") 90 | public JobIdsWithStatusOverview jobList(@PathVariable Long tenantId, 91 | @PathVariable String clusterCode) { 92 | return clusterService.jobList(tenantId, clusterCode); 93 | } 94 | 95 | @GetMapping("/job/{clusterCode}/details") 96 | public MultipleJobsDetails jobsDetails(@PathVariable Long tenantId, 97 | @PathVariable String clusterCode) { 98 | return clusterService.jobsDetails(tenantId, clusterCode); 99 | } 100 | 101 | @GetMapping("/job/{clusterCode}/detail") 102 | public JobDetailsInfo jobDetail(@PathVariable Long tenantId, 103 | @PathVariable String clusterCode, 104 | String jobId) { 105 | return clusterService.jobDetail(tenantId, clusterCode, jobId); 106 | } 107 | 108 | @GetMapping("/job/{clusterCode}/yarn-cancel") 109 | public FlinkApiErrorResponse jobYarnCancel(@PathVariable Long tenantId, 110 | @PathVariable String clusterCode, 111 | String jobId) { 112 | return clusterService.jobYarnCancel(tenantId, clusterCode, jobId); 113 | } 114 | 115 | @PostMapping("/job/{clusterCode}/cancel-savepoint") 116 | public TriggerResponseWithSavepoint jobCancelOptionSavepoints(@PathVariable Long tenantId, 117 | @PathVariable String clusterCode, 118 | @RequestBody SavepointTriggerRequestBody savepointTriggerRequestBody) { 119 | return clusterService.jobCancelOptionSavepoints(tenantId, clusterCode, savepointTriggerRequestBody); 120 | } 121 | 122 | @GetMapping("/job/{clusterCode}/terminate") 123 | public FlinkApiErrorResponse jobTerminate(@PathVariable Long tenantId, 124 | @PathVariable String clusterCode, 125 | String jobId, 126 | @RequestParam(required = false) String mode) { 127 | return clusterService.jobTerminate(tenantId, clusterCode, jobId, mode); 128 | } 129 | 130 | @GetMapping("/job/{clusterCode}/rescale") 131 | public TriggerResponse jobRescale(@PathVariable Long tenantId, 132 | @PathVariable String clusterCode, 133 | String jobId, 134 | int parallelism) { 135 | return clusterService.jobRescale(tenantId, clusterCode, jobId, parallelism); 136 | } 137 | 138 | @GetMapping("/job/{clusterCode}/exception") 139 | public JobExceptionsInfo jobException(@PathVariable Long tenantId, 140 | @PathVariable String clusterCode, 141 | String jobId, 142 | @RequestParam(required = false) String maxExceptions) { 143 | return clusterService.jobException(tenantId, clusterCode, jobId, maxExceptions); 144 | } 145 | 146 | @GetMapping("/tm-list/{clusterCode}") 147 | public TaskManagerInfo taskMangerList(@PathVariable Long tenantId, 148 | @PathVariable String clusterCode) { 149 | return clusterService.taskMangerList(tenantId, clusterCode); 150 | } 151 | 152 | @GetMapping("/tm-list/{clusterCode}/detail") 153 | public TaskManagerDetail taskManagerDetail(@PathVariable Long tenantId, 154 | @PathVariable String clusterCode, 155 | String tmId) { 156 | return clusterService.taskManagerDetail(tenantId, clusterCode, tmId); 157 | } 158 | 159 | @GetMapping("/tm-list/{clusterCode}/log") 160 | public String taskManagerLog(@PathVariable Long tenantId, 161 | @PathVariable String clusterCode, 162 | String tmId) { 163 | return clusterService.taskManagerLog(tenantId, clusterCode, tmId); 164 | } 165 | 166 | @GetMapping("/tm-list/{clusterCode}/stdout") 167 | public String taskManagerStdout(@PathVariable Long tenantId, 168 | @PathVariable String clusterCode, 169 | String tmId) { 170 | return clusterService.taskManagerStdout(tenantId, clusterCode, tmId); 171 | } 172 | 173 | @GetMapping("/jm/{clusterCode}/config") 174 | public List> jobManagerConfig(@PathVariable Long tenantId, 175 | @PathVariable String clusterCode) { 176 | return clusterService.jobManagerConfig(tenantId, clusterCode); 177 | } 178 | 179 | @GetMapping("/jm/{clusterCode}/log") 180 | public String jobManagerLog(@PathVariable Long tenantId, 181 | @PathVariable String clusterCode) { 182 | return clusterService.jobManagerLog(tenantId, clusterCode); 183 | } 184 | 185 | @GetMapping("/jm/{clusterCode}/stdout") 186 | public String jobManagerStdout(@PathVariable Long tenantId, 187 | @PathVariable String clusterCode) { 188 | return clusterService.jobManagerStdout(tenantId, clusterCode); 189 | } 190 | 191 | } 192 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/api/controller/v1/UploadJarController.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.api.controller.v1; 2 | 3 | import com.baomidou.mybatisplus.core.metadata.IPage; 4 | import com.baomidou.mybatisplus.core.metadata.OrderItem; 5 | import com.baomidou.mybatisplus.extension.plugins.pagination.Page; 6 | import com.github.codingdebugallday.client.api.dto.GroupDTO; 7 | import com.github.codingdebugallday.client.api.dto.UploadJarDTO; 8 | import com.github.codingdebugallday.client.app.service.UploadJarService; 9 | import com.github.codingdebugallday.client.domain.entity.UploadJar; 10 | import com.github.codingdebugallday.client.domain.repository.UploadJarRepository; 11 | import org.springframework.validation.annotation.Validated; 12 | import org.springframework.web.bind.annotation.*; 13 | import org.springframework.web.multipart.MultipartFile; 14 | 15 | /** 16 | *

17 | * description 18 | *

19 | * 20 | * @author isacc 2020/4/7 10:12 21 | * @since 1.0 22 | */ 23 | @RestController("flinkUploadJarController.v1") 24 | @RequestMapping("/v1/{tenantId}/upload-jar") 25 | public class UploadJarController { 26 | 27 | private final UploadJarService uploadJarService; 28 | private final UploadJarRepository uploadJarRepository; 29 | 30 | public UploadJarController(UploadJarService uploadJarService, 31 | UploadJarRepository uploadJarRepository) { 32 | this.uploadJarService = uploadJarService; 33 | this.uploadJarRepository = uploadJarRepository; 34 | } 35 | 36 | @GetMapping 37 | public IPage list(@PathVariable Long tenantId, 38 | UploadJarDTO uploadJarDTO, 39 | Page uploadJarPage) { 40 | uploadJarDTO.setTenantId(tenantId); 41 | uploadJarPage.addOrder(OrderItem.desc(UploadJar.FIELD_UPLOAD_JAR_ID)); 42 | return uploadJarRepository.pageAndSortDTO(uploadJarDTO, uploadJarPage); 43 | } 44 | 45 | @GetMapping("/{id}") 46 | public UploadJarDTO detail(@PathVariable Long tenantId, 47 | @PathVariable Long id) { 48 | return uploadJarRepository.detail(tenantId, id); 49 | } 50 | 51 | @PostMapping 52 | public UploadJarDTO upload(@PathVariable Long tenantId, 53 | @RequestPart(value = "uploadJarDTO") @Validated(value = GroupDTO.Insert.class) UploadJarDTO uploadJarDTO, 54 | @RequestPart(value = "file") MultipartFile multipartFile) { 55 | uploadJarDTO.setTenantId(tenantId); 56 | return uploadJarService.upload(uploadJarDTO, multipartFile); 57 | } 58 | 59 | @PutMapping 60 | public UploadJarDTO update(@PathVariable Long tenantId, 61 | @RequestPart(value = "uploadJarDTO") @Validated(value = GroupDTO.Update.class) UploadJarDTO uploadJarDTO, 62 | @RequestPart(value = "file", required = false) MultipartFile multipartFile) { 63 | uploadJarDTO.setTenantId(tenantId); 64 | return uploadJarService.update(uploadJarDTO, multipartFile); 65 | } 66 | 67 | @DeleteMapping 68 | public void delete(@PathVariable Long tenantId, 69 | @RequestBody @Validated(value = GroupDTO.Delete.class) UploadJarDTO uploadJarDTO) { 70 | uploadJarDTO.setTenantId(tenantId); 71 | uploadJarService.delete(uploadJarDTO); 72 | } 73 | 74 | } 75 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/api/dto/ClusterDTO.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.api.dto; 2 | 3 | import java.io.Serializable; 4 | import java.time.LocalDateTime; 5 | import java.util.List; 6 | import java.util.Set; 7 | import javax.persistence.Transient; 8 | import javax.validation.Valid; 9 | import javax.validation.constraints.NotBlank; 10 | 11 | import com.fasterxml.jackson.annotation.JsonInclude; 12 | import lombok.*; 13 | 14 | /** 15 | *

16 | * description 17 | *

18 | * 19 | * @author isacc 2020/03/25 17:51 20 | * @since 1.0 21 | */ 22 | @Data 23 | @Builder 24 | @NoArgsConstructor 25 | @AllArgsConstructor 26 | @EqualsAndHashCode(callSuper = false) 27 | @JsonInclude(JsonInclude.Include.NON_NULL) 28 | public class ClusterDTO implements Serializable { 29 | 30 | private static final long serialVersionUID = 854464206375410197L; 31 | 32 | public static final String FIELD_CLUSTER_ID = "cluster_id"; 33 | 34 | private Long clusterId; 35 | 36 | @NotBlank 37 | private String clusterCode; 38 | 39 | private String clusterDesc; 40 | @NotBlank 41 | private String jobManagerUrl; 42 | 43 | /** 44 | * 若配置了Ha,这里是备用的jm,逗号分割 45 | */ 46 | private String jobManagerStandbyUrl; 47 | 48 | private Integer enabledFlag; 49 | 50 | private Long tenantId; 51 | private Long objectVersionNumber; 52 | private LocalDateTime creationDate; 53 | private Long createdBy; 54 | private LocalDateTime lastUpdateDate; 55 | private Long lastUpdatedBy; 56 | 57 | //===========other=========== 58 | 59 | @Transient 60 | private Set jobManagerStandbyUrlSet; 61 | @Transient 62 | private String host; 63 | @Transient 64 | private Integer port; 65 | @Transient 66 | @Valid 67 | private List nodeDTOList; 68 | 69 | } 70 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/api/dto/GroupDTO.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.api.dto; 2 | 3 | /** 4 | *

5 | * description 6 | *

7 | * 8 | * @author isacc 2020/04/07 16:00 9 | * @since 1.0 10 | */ 11 | public interface GroupDTO { 12 | 13 | /** 14 | * insert分组 15 | */ 16 | interface Insert { 17 | 18 | } 19 | 20 | /** 21 | * update分组 22 | */ 23 | interface Update { 24 | 25 | } 26 | 27 | /** 28 | * delete分组 29 | */ 30 | interface Delete { 31 | 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/api/dto/NodeDTO.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.api.dto; 2 | 3 | import java.io.Serializable; 4 | import java.time.LocalDateTime; 5 | import javax.validation.constraints.NotBlank; 6 | 7 | import com.fasterxml.jackson.annotation.JsonInclude; 8 | import lombok.*; 9 | 10 | /** 11 | *

12 | * description 13 | *

14 | * 15 | * @author isacc 2020/03/25 17:51 16 | * @since 1.0 17 | */ 18 | @Data 19 | @Builder 20 | @NoArgsConstructor 21 | @AllArgsConstructor 22 | @EqualsAndHashCode(callSuper = false) 23 | @JsonInclude(JsonInclude.Include.NON_NULL) 24 | public class NodeDTO implements Serializable { 25 | 26 | private static final long serialVersionUID = -5632592395382234039L; 27 | 28 | public static final String FIELD_NODE_ID = "nodeId"; 29 | 30 | private Long nodeId; 31 | 32 | private String clusterCode; 33 | @NotBlank 34 | private String nodeCode; 35 | /** 36 | * master/slave 37 | */ 38 | @NotBlank 39 | private String nodeType; 40 | 41 | private String nodeDesc; 42 | 43 | /** 44 | * { 45 | * "host": "xxx" 46 | * "username": "xxx", 47 | * "password": "xxx" 48 | * } 49 | */ 50 | @NotBlank 51 | private String settingInfo; 52 | 53 | private Integer enabledFlag; 54 | 55 | private Long tenantId; 56 | private Long objectVersionNumber; 57 | private LocalDateTime creationDate; 58 | private Long createdBy; 59 | private LocalDateTime lastUpdateDate; 60 | private Long lastUpdatedBy; 61 | 62 | } 63 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/api/dto/NodeSettingInfo.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.api.dto; 2 | 3 | import com.fasterxml.jackson.annotation.JsonInclude; 4 | import com.fasterxml.jackson.annotation.JsonProperty; 5 | import lombok.*; 6 | 7 | /** 8 | *

9 | * description 10 | *

11 | * 12 | * @author isacc 2020/04/02 15:21 13 | * @since 1.0 14 | */ 15 | @Builder 16 | @Data 17 | @EqualsAndHashCode(callSuper = false) 18 | @NoArgsConstructor 19 | @AllArgsConstructor 20 | @JsonInclude(JsonInclude.Include.NON_NULL) 21 | public class NodeSettingInfo { 22 | 23 | private String host; 24 | private String username; 25 | private String password; 26 | /** 27 | * 是否需要修改该节点的密码 28 | */ 29 | @Builder.Default 30 | @JsonProperty(access = JsonProperty.Access.WRITE_ONLY) 31 | private Boolean changePassword = false; 32 | } 33 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/api/dto/UploadJarDTO.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.api.dto; 2 | 3 | import java.io.Serializable; 4 | import java.time.LocalDateTime; 5 | import javax.validation.constraints.NotBlank; 6 | import javax.validation.constraints.NotNull; 7 | 8 | import com.fasterxml.jackson.annotation.JsonInclude; 9 | import lombok.*; 10 | 11 | /** 12 | *

13 | * description 14 | *

15 | * 16 | * @author isacc 2020/4/7 10:12 17 | * @since 1.0 18 | */ 19 | @Data 20 | @Builder 21 | @NoArgsConstructor 22 | @AllArgsConstructor 23 | @EqualsAndHashCode(callSuper = false) 24 | @JsonInclude(JsonInclude.Include.NON_NULL) 25 | public class UploadJarDTO implements Serializable { 26 | 27 | 28 | private static final long serialVersionUID = 7138278817927514343L; 29 | 30 | public static final String FIELD_UPLOAD_JAR_ID = "UPLOAD_JAR_ID"; 31 | @NotNull(groups = {GroupDTO.Update.class, GroupDTO.Delete.class}) 32 | private Long uploadJarId; 33 | 34 | @NotBlank(groups = GroupDTO.Insert.class) 35 | private String jarCode; 36 | @NotBlank(groups = {GroupDTO.Insert.class, GroupDTO.Update.class, GroupDTO.Delete.class}) 37 | private String clusterCode; 38 | 39 | private String jarDesc; 40 | @NotBlank(groups = GroupDTO.Insert.class) 41 | private String version; 42 | private String entryClass; 43 | 44 | /** 45 | * 是否是系统提供的(平台预先上传jar做为平台功能使用) 46 | */ 47 | @NotNull(groups = GroupDTO.Insert.class) 48 | private Integer systemProvided; 49 | 50 | private String filename; 51 | @NotBlank(groups = GroupDTO.Delete.class) 52 | private String jarName; 53 | private String status; 54 | 55 | private Long tenantId; 56 | @NotBlank(groups = {GroupDTO.Update.class}) 57 | private Long objectVersionNumber; 58 | private LocalDateTime creationDate; 59 | private Long createdBy; 60 | private LocalDateTime lastUpdateDate; 61 | private Long lastUpdatedBy; 62 | 63 | } 64 | 65 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/app/service/ApiClient.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.app.service; 2 | 3 | import com.github.codingdebugallday.client.api.dto.ClusterDTO; 4 | 5 | /** 6 | *

7 | * description 8 | *

9 | * 10 | * @author isacc 2020/03/26 23:11 11 | * @since 1.0 12 | */ 13 | public class ApiClient { 14 | 15 | private ClusterDTO clusterDTO; 16 | 17 | public ClusterDTO getClusterDTO() { 18 | return clusterDTO; 19 | } 20 | 21 | public void setClusterDTO(ClusterDTO clusterDTO) { 22 | this.clusterDTO = clusterDTO; 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/app/service/ClusterService.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.app.service; 2 | 3 | import java.util.List; 4 | import java.util.Map; 5 | 6 | import com.github.codingdebugallday.client.api.dto.ClusterDTO; 7 | import com.github.codingdebugallday.client.domain.entity.jobs.*; 8 | import com.github.codingdebugallday.client.domain.entity.overview.DashboardConfiguration; 9 | import com.github.codingdebugallday.client.domain.entity.tm.TaskManagerDetail; 10 | import com.github.codingdebugallday.client.domain.entity.tm.TaskManagerInfo; 11 | 12 | /** 13 | *

14 | * description 15 | *

16 | * 17 | * @author isacc 2020/03/31 12:00 18 | * @since 1.0 19 | */ 20 | public interface ClusterService { 21 | 22 | /** 23 | * 创建集群 24 | * 25 | * @param clusterDTO ClusterDTO 26 | * @return org.abigballofmud.flink.client.api.dto.ClusterDTO 27 | */ 28 | ClusterDTO insert(ClusterDTO clusterDTO); 29 | 30 | /** 31 | * 修改集群 32 | * 33 | * @param clusterDTO ClusterDTO 34 | * @return org.abigballofmud.flink.client.api.dto.ClusterDTO 35 | */ 36 | ClusterDTO update(ClusterDTO clusterDTO); 37 | 38 | /** 39 | * 删除集群 40 | * 41 | * @param clusterDTO ClusterDTO 42 | */ 43 | void delete(ClusterDTO clusterDTO); 44 | 45 | /** 46 | * 查看web ui的粗略信息 47 | * 48 | * @param tenantId 租户id 49 | * @param clusterCode clusterCode 50 | * @return DashboardConfiguration 51 | */ 52 | DashboardConfiguration overviewConfig(Long tenantId, String clusterCode); 53 | 54 | /** 55 | * 查看flink集群的粗略信息 56 | * 57 | * @param tenantId 租户id 58 | * @param clusterCode clusterCode 59 | * @return Map 60 | */ 61 | Map overview(Long tenantId, String clusterCode); 62 | 63 | /** 64 | * 概览flink job列表 65 | * 66 | * @param tenantId 租户id 67 | * @param clusterCode clusterCode 68 | * @return JobIdsWithStatusOverview 69 | */ 70 | JobIdsWithStatusOverview jobList(Long tenantId, String clusterCode); 71 | 72 | /** 73 | * flink job列表详情 74 | * 75 | * @param tenantId 租户id 76 | * @param clusterCode clusterCode 77 | * @return JobIdsWithStatusOverview 78 | */ 79 | MultipleJobsDetails jobsDetails(Long tenantId, String clusterCode); 80 | 81 | /** 82 | * flink job详情 83 | * 84 | * @param tenantId 租户id 85 | * @param clusterCode clusterCode 86 | * @param jobId jobId 87 | * @return JobDetailsInfo 88 | */ 89 | JobDetailsInfo jobDetail(Long tenantId, String clusterCode, String jobId); 90 | 91 | /** 92 | * 使用yarn停止flink job 93 | * 94 | * @param tenantId 租户id 95 | * @param clusterCode clusterCode 96 | * @param jobId jobId 97 | * @return FlinkApiErrorResponse 98 | */ 99 | FlinkApiErrorResponse jobYarnCancel(Long tenantId, String clusterCode, String jobId); 100 | 101 | /** 102 | * 直接停止job 103 | * 104 | * @param tenantId 租户id 105 | * @param clusterCode clusterCode 106 | * @param jobId jobId 107 | * @param mode mode 108 | * @return FlinkApiErrorResponse 109 | */ 110 | FlinkApiErrorResponse jobTerminate(Long tenantId, String clusterCode, String jobId, String mode); 111 | 112 | /** 113 | * 停止flink job并保存savepoint 114 | * 115 | * @param tenantId 租户id 116 | * @param clusterCode clusterCode 117 | * @param savepointTriggerRequestBody SavepointTriggerRequestBody 118 | * @return TriggerResponseWithSavepoint 119 | */ 120 | TriggerResponseWithSavepoint jobCancelOptionSavepoints(Long tenantId, String clusterCode, 121 | SavepointTriggerRequestBody savepointTriggerRequestBody); 122 | 123 | /** 124 | * 重新调节job 125 | * 126 | * @param tenantId 租户id 127 | * @param clusterCode clusterCode 128 | * @param jobId jobId 129 | * @param parallelism parallelism 130 | * @return TriggerResponse 131 | */ 132 | TriggerResponse jobRescale(Long tenantId, String clusterCode, String jobId, int parallelism); 133 | 134 | /** 135 | * Returns the non-recoverable exceptions that have been observed by the job. 136 | * The truncated flag defines whether more exceptions occurred, but are not listed, 137 | * because the response would otherwise get too big. 138 | * 139 | * @param tenantId 租户id 140 | * @param clusterCode clusterCode 141 | * @param jobId jobId 142 | * @param maxExceptions Comma-separated list of integer values that specifies the upper limit of exceptions to return 143 | * @return JobExceptionsInfo 144 | */ 145 | JobExceptionsInfo jobException(Long tenantId, String clusterCode, String jobId, String maxExceptions); 146 | 147 | /** 148 | * 获取flink taskmanager列表集合 149 | * 150 | * @param tenantId 租户id 151 | * @param clusterCode clusterCode 152 | * @return com.github.codingdebugallday.client.domain.entity.tm.TaskManagerInfo 153 | */ 154 | TaskManagerInfo taskMangerList(Long tenantId, String clusterCode); 155 | 156 | /** 157 | * 获取flink taskmanager的详情 158 | * 159 | * @param tenantId 租户id 160 | * @param clusterCode clusterCode 161 | * @param tmId taskmanager id 162 | * @return com.github.codingdebugallday.client.domain.entity.tm.TaskManagerDetail 163 | */ 164 | TaskManagerDetail taskManagerDetail(Long tenantId, String clusterCode, String tmId); 165 | 166 | /** 167 | * 获取flink taskmanager的日志 168 | * 169 | * @param tenantId 租户id 170 | * @param clusterCode clusterCode 171 | * @param tmId taskmanager id 172 | * @return String log 173 | */ 174 | String taskManagerLog(Long tenantId, String clusterCode, String tmId); 175 | 176 | /** 177 | * 获取flink taskmanager的标准输出 178 | * 179 | * @param tenantId 租户id 180 | * @param clusterCode clusterCode 181 | * @param tmId taskmanager id 182 | * @return String stdout 183 | */ 184 | String taskManagerStdout(Long tenantId, String clusterCode, String tmId); 185 | 186 | /** 187 | * 获取flink jobmanager配置信息 188 | * 189 | * @param tenantId 租户id 190 | * @param clusterCode clusterCode 191 | * @return java.util.List> 192 | */ 193 | List> jobManagerConfig(Long tenantId, String clusterCode); 194 | 195 | /** 196 | * 获取flink jobmanager日志 197 | * 198 | * @param tenantId 租户id 199 | * @param clusterCode clusterCode 200 | * @return String log 201 | */ 202 | String jobManagerLog(Long tenantId, String clusterCode); 203 | 204 | /** 205 | * 获取flink jobmanager标准输出 206 | * 207 | * @param tenantId 租户id 208 | * @param clusterCode clusterCode 209 | * @return String log 210 | */ 211 | String jobManagerStdout(Long tenantId, String clusterCode); 212 | 213 | } 214 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/app/service/FlinkApi.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.app.service; 2 | 3 | import java.io.File; 4 | import java.util.List; 5 | import java.util.Map; 6 | 7 | import com.github.codingdebugallday.client.app.service.jars.FlinkJarService; 8 | import com.github.codingdebugallday.client.app.service.jm.FlinkJobManagerService; 9 | import com.github.codingdebugallday.client.app.service.jobs.FlinkJobService; 10 | import com.github.codingdebugallday.client.app.service.overview.ClusterOverviewService; 11 | import com.github.codingdebugallday.client.app.service.tm.FlinkTaskManagerService; 12 | import com.github.codingdebugallday.client.domain.entity.jars.JarRunRequest; 13 | import com.github.codingdebugallday.client.domain.entity.jars.JarRunResponseBody; 14 | import com.github.codingdebugallday.client.domain.entity.jars.JarUploadResponseBody; 15 | import com.github.codingdebugallday.client.domain.entity.jobs.*; 16 | import com.github.codingdebugallday.client.domain.entity.overview.DashboardConfiguration; 17 | import com.github.codingdebugallday.client.domain.entity.tm.TaskManagerDetail; 18 | import com.github.codingdebugallday.client.domain.entity.tm.TaskManagerInfo; 19 | import org.springframework.web.client.RestTemplate; 20 | 21 | /** 22 | *

23 | * description 24 | *

25 | * 26 | * @author isacc 2020/03/26 21:55 27 | * @since 1.0 28 | */ 29 | public class FlinkApi { 30 | 31 | private final ApiClient apiClient; 32 | /** 33 | * flink jar 相关 api 34 | */ 35 | private final FlinkJarService flinkJarService; 36 | private final FlinkJobService flinkJobService; 37 | private final FlinkTaskManagerService flinkTaskManagerService; 38 | private final FlinkJobManagerService flinkJobManagerService; 39 | private final ClusterOverviewService clusterOverviewService; 40 | 41 | public FlinkApi(RestTemplate restTemplate) { 42 | this.apiClient = new ApiClient(); 43 | flinkJarService = new FlinkJarService(restTemplate); 44 | flinkJobService = new FlinkJobService(restTemplate); 45 | flinkTaskManagerService = new FlinkTaskManagerService(restTemplate); 46 | flinkJobManagerService = new FlinkJobManagerService(restTemplate); 47 | clusterOverviewService = new ClusterOverviewService(restTemplate); 48 | } 49 | 50 | public ApiClient getApiClient() { 51 | return apiClient; 52 | } 53 | 54 | //======================================================== 55 | //===================flink overview api=================== 56 | //======================================================== 57 | //======================================================== 58 | 59 | /** 60 | * Returns the configuration of the WebUI 61 | * 62 | * @return DashboardConfiguration 63 | */ 64 | public DashboardConfiguration overviewConfig() { 65 | return clusterOverviewService.overviewConfig(apiClient); 66 | } 67 | 68 | /** 69 | * Returns the Flink cluster Overview Info 70 | * 71 | * @return Map 72 | */ 73 | public Map overview() { 74 | return clusterOverviewService.overview(apiClient); 75 | } 76 | 77 | //======================================================== 78 | //===================flink jar api========================= 79 | //======================================================== 80 | //======================================================== 81 | 82 | /** 83 | * upload flink jar 84 | * 85 | * @param file flink jar file 86 | * @return org.abigballofmud.flink.api.domain.jars.JarUploadResponseBody 87 | */ 88 | public JarUploadResponseBody uploadJar(File file) { 89 | return flinkJarService.uploadJar(file, apiClient); 90 | } 91 | 92 | /** 93 | * delete flink jar 94 | * 95 | * @param jarId flink jar file 96 | */ 97 | public void deleteJar(String jarId) { 98 | flinkJarService.deleteJar(jarId, apiClient); 99 | } 100 | 101 | /** 102 | * run flink jar 103 | * 104 | * @param jarRunRequest JarRunRequest 105 | * @return org.abigballofmud.flink.api.domain.jars.JarRunResponseBody 106 | */ 107 | public JarRunResponseBody runJar(JarRunRequest jarRunRequest) { 108 | return flinkJarService.runJar(jarRunRequest, apiClient); 109 | } 110 | 111 | //======================================================== 112 | //===================flink job api========================= 113 | //======================================================== 114 | //======================================================== 115 | 116 | /** 117 | * Returns an overview over all jobs and their current state. 118 | * 119 | * @return JobIdsWithStatusOverview 120 | */ 121 | public JobIdsWithStatusOverview jobList() { 122 | return flinkJobService.jobList(apiClient); 123 | } 124 | 125 | /** 126 | * Returns an overview over all jobs. 127 | * 128 | * @return MultipleJobsDetails 129 | */ 130 | public MultipleJobsDetails jobsDetails() { 131 | return flinkJobService.jobsDetails(apiClient); 132 | } 133 | 134 | /** 135 | * Returns details of a job. 136 | * 137 | * @param jobId jobId 138 | */ 139 | public JobDetailsInfo jobDetail(String jobId) { 140 | return flinkJobService.jobsDetail(jobId, apiClient); 141 | } 142 | 143 | /** 144 | * cancel job use yarn 145 | * 146 | * @param jobId jobId 147 | * @return if has error return FlinkError 148 | */ 149 | public FlinkApiErrorResponse jobYarnCancel(String jobId) { 150 | return flinkJobService.jobYarnCancel(jobId, apiClient); 151 | } 152 | 153 | /** 154 | * Triggers a savepoint, and optionally cancels the job afterwards. 155 | * This async operation would return a 'triggerid' for further query identifier. 156 | * 157 | * @param savepointTriggerRequestBody SavepointTriggerRequestBody 158 | * @return TriggerResponseWithSavepoint 159 | */ 160 | public TriggerResponseWithSavepoint jobCancelOptionSavepoints(SavepointTriggerRequestBody savepointTriggerRequestBody) { 161 | return flinkJobService.jobCancelOptionSavepoints(savepointTriggerRequestBody, apiClient); 162 | } 163 | 164 | /** 165 | * Terminates a job 166 | * 167 | * @param jobId jobId 168 | * @param mode optional, the termination mode, the only supported value is: "cancel" 169 | * @return com.github.codingdebugallday.client.domain.entity.jobs.FlinkError 170 | */ 171 | public FlinkApiErrorResponse jobTerminate(String jobId, String mode) { 172 | return flinkJobService.jobTerminate(jobId, mode, apiClient); 173 | } 174 | 175 | /** 176 | * Triggers the rescaling of a job. 177 | * This async operation would return a 'triggerid' for further query identifier. 178 | * 179 | * @param jobId jobId 180 | * @param parallelism parallelism 181 | * @return com.github.codingdebugallday.client.domain.entity.jobs.TriggerResponse 182 | */ 183 | public TriggerResponse jobRescale(String jobId, int parallelism) { 184 | return flinkJobService.jobRescale(jobId, parallelism, apiClient); 185 | } 186 | 187 | /** 188 | * Returns the non-recoverable exceptions that have been observed by the job. 189 | * The truncated flag defines whether more exceptions occurred, but are not listed, 190 | * because the response would otherwise get too big. 191 | * 192 | * @param jobId jobId 193 | * @param maxExceptions Comma-separated list of integer values that specifies the upper limit of exceptions to return 194 | * @return JobExceptionsInfo 195 | */ 196 | public JobExceptionsInfo jobException(String jobId, String maxExceptions) { 197 | return flinkJobService.jobException(jobId, maxExceptions, apiClient); 198 | } 199 | 200 | //======================================================== 201 | //===================flink tm api========================= 202 | //======================================================== 203 | //======================================================== 204 | 205 | public TaskManagerInfo taskMangerList() { 206 | return flinkTaskManagerService.taskMangerList(apiClient); 207 | } 208 | 209 | public TaskManagerDetail taskManagerDetail(String tmId) { 210 | return flinkTaskManagerService.taskManagerDetail(tmId, apiClient); 211 | } 212 | 213 | public String taskManagerLog(String tmId) { 214 | return flinkTaskManagerService.taskManagerLog(tmId, apiClient); 215 | } 216 | 217 | public String taskManagerStdout(String tmId) { 218 | return flinkTaskManagerService.taskManagerStdout(tmId, apiClient); 219 | } 220 | 221 | //======================================================== 222 | //===================flink jm api========================= 223 | //======================================================== 224 | //======================================================== 225 | 226 | public List> jobManagerConfig() { 227 | return flinkJobManagerService.jobManagerConfig(apiClient); 228 | } 229 | 230 | public String jobManagerLog() { 231 | return flinkJobManagerService.jobManagerLog(apiClient); 232 | } 233 | 234 | public String jobManagerStdout() { 235 | return flinkJobManagerService.jobManagerStdout(apiClient); 236 | } 237 | } 238 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/app/service/FlinkCommonService.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.app.service; 2 | 3 | import com.github.codingdebugallday.client.infra.utils.RetryUtil; 4 | import lombok.extern.slf4j.Slf4j; 5 | import org.springframework.http.HttpEntity; 6 | import org.springframework.http.HttpMethod; 7 | import org.springframework.http.ResponseEntity; 8 | import org.springframework.lang.Nullable; 9 | import org.springframework.web.client.RestTemplate; 10 | 11 | /** 12 | *

13 | * description 14 | *

15 | * 16 | * @author isacc 2020/04/09 17:46 17 | * @since 1.0 18 | */ 19 | @Slf4j 20 | public class FlinkCommonService { 21 | 22 | /** 23 | * getForEntity 24 | * 25 | * @param restTemplate RestTemplate 26 | * @param url url 27 | * @param responseType Class 28 | * @param uriVariables Object... uriVariables 29 | * @return T 30 | */ 31 | public T getForEntity(RestTemplate restTemplate, 32 | String url, 33 | Class responseType, 34 | Object... uriVariables) { 35 | return RetryUtil.executeWithRetry(() -> { 36 | ResponseEntity responseEntity = restTemplate.getForEntity( 37 | url, 38 | responseType, 39 | uriVariables); 40 | log.debug("response, status: {}", responseEntity.getStatusCode()); 41 | return responseEntity; 42 | }, 3, 1000L, true).getBody(); 43 | } 44 | 45 | /** 46 | * exchange 47 | * 48 | * @param restTemplate RestTemplate 49 | * @param url url 50 | * @param method HttpMethod 51 | * @param requestEntity HttpEntity 52 | * @param responseType Class 53 | * @param uriVariables Object... uriVariables 54 | * @return T 55 | */ 56 | public T exchange(RestTemplate restTemplate, 57 | String url, 58 | HttpMethod method, 59 | @Nullable HttpEntity requestEntity, 60 | Class responseType, 61 | Object... uriVariables) { 62 | return RetryUtil.executeWithRetry(() -> { 63 | ResponseEntity responseEntity = restTemplate.exchange(url, method, 64 | requestEntity, responseType, uriVariables); 65 | log.debug("response, status: {}", responseEntity.getStatusCode()); 66 | return responseEntity; 67 | }, 3, 1000L, true).getBody(); 68 | } 69 | 70 | } 71 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/app/service/UploadJarService.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.app.service; 2 | 3 | import com.github.codingdebugallday.client.api.dto.UploadJarDTO; 4 | import org.springframework.web.multipart.MultipartFile; 5 | 6 | /** 7 | *

8 | * description 9 | *

10 | * 11 | * @author isacc 2020/04/07 10:48 12 | * @since 1.0 13 | */ 14 | public interface UploadJarService { 15 | 16 | /** 17 | * 上传jar 18 | * 19 | * @param uploadJarDTO UploadJarDTO 20 | * @param multipartFile jar file 21 | * @return com.github.codingdebugallday.client.api.dto.UploadJarDTO 22 | */ 23 | UploadJarDTO upload(UploadJarDTO uploadJarDTO, MultipartFile multipartFile); 24 | 25 | /** 26 | * 更新上传的jar 27 | * 28 | * @param uploadJarDTO UploadJarDTO 29 | * @param multipartFile jar file 30 | * @return com.github.codingdebugallday.client.api.dto.UploadJarDTO 31 | */ 32 | UploadJarDTO update(UploadJarDTO uploadJarDTO, MultipartFile multipartFile); 33 | 34 | /** 35 | * 删除之前通过flink api上传的jar 36 | * 37 | * @param uploadJarDTO UploadJarDTO 38 | */ 39 | void delete(UploadJarDTO uploadJarDTO); 40 | } 41 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/app/service/impl/ClusterServiceImpl.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.app.service.impl; 2 | 3 | import java.util.List; 4 | import java.util.Map; 5 | import java.util.Objects; 6 | import java.util.stream.Collectors; 7 | import javax.annotation.Resource; 8 | 9 | import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; 10 | import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; 11 | import com.github.codingdebugallday.client.api.dto.ClusterDTO; 12 | import com.github.codingdebugallday.client.api.dto.NodeDTO; 13 | import com.github.codingdebugallday.client.api.dto.NodeSettingInfo; 14 | import com.github.codingdebugallday.client.app.service.ClusterService; 15 | import com.github.codingdebugallday.client.app.service.FlinkApi; 16 | import com.github.codingdebugallday.client.domain.entity.Cluster; 17 | import com.github.codingdebugallday.client.domain.entity.Node; 18 | import com.github.codingdebugallday.client.domain.entity.jobs.*; 19 | import com.github.codingdebugallday.client.domain.entity.overview.DashboardConfiguration; 20 | import com.github.codingdebugallday.client.domain.entity.tm.TaskManagerDetail; 21 | import com.github.codingdebugallday.client.domain.entity.tm.TaskManagerInfo; 22 | import com.github.codingdebugallday.client.domain.repository.ClusterRepository; 23 | import com.github.codingdebugallday.client.infra.context.FlinkApiContext; 24 | import com.github.codingdebugallday.client.infra.converter.ClusterConvertMapper; 25 | import com.github.codingdebugallday.client.infra.converter.NodeConvertMapper; 26 | import com.github.codingdebugallday.client.infra.mapper.ClusterMapper; 27 | import com.github.codingdebugallday.client.infra.mapper.NodeMapper; 28 | import com.github.codingdebugallday.client.infra.utils.JSON; 29 | import org.jasypt.encryption.StringEncryptor; 30 | import org.springframework.stereotype.Service; 31 | import org.springframework.transaction.annotation.Transactional; 32 | 33 | /** 34 | *

35 | * description 36 | *

37 | * 38 | * @author isacc 2020/03/31 12:02 39 | * @since 1.0 40 | */ 41 | @Service("flinkClusterService") 42 | public class ClusterServiceImpl extends ServiceImpl implements ClusterService { 43 | 44 | private final ClusterMapper clusterMapper; 45 | private final NodeMapper nodeMapper; 46 | private final FlinkApiContext flinkApiContext; 47 | private final ClusterRepository clusterRepository; 48 | 49 | @Resource 50 | private StringEncryptor jasyptStringEncryptor; 51 | 52 | public ClusterServiceImpl(ClusterMapper clusterMapper, 53 | NodeMapper nodeMapper, 54 | FlinkApiContext flinkApiContext, 55 | ClusterRepository clusterRepository) { 56 | this.clusterMapper = clusterMapper; 57 | this.nodeMapper = nodeMapper; 58 | this.flinkApiContext = flinkApiContext; 59 | this.clusterRepository = clusterRepository; 60 | } 61 | 62 | @Override 63 | @Transactional(rollbackFor = Exception.class) 64 | public ClusterDTO insert(ClusterDTO clusterDTO) { 65 | Cluster cluster = ClusterConvertMapper.INSTANCE.dtoToEntity(clusterDTO); 66 | // 插集群表 67 | clusterMapper.insert(cluster); 68 | // 插节点表 69 | List nodeDTOList = genNodeList(clusterDTO).stream().map(node -> { 70 | nodeMapper.insert(node); 71 | return NodeConvertMapper.INSTANCE.entityToDTO(nodeMapper.selectById(node.getNodeId())); 72 | }).collect(Collectors.toList()); 73 | ClusterDTO dto = ClusterConvertMapper.INSTANCE.entityToDTO(cluster); 74 | dto.setNodeDTOList(nodeDTOList); 75 | return dto; 76 | } 77 | 78 | @Override 79 | @Transactional(rollbackFor = Exception.class) 80 | public ClusterDTO update(ClusterDTO clusterDTO) { 81 | Cluster cluster = ClusterConvertMapper.INSTANCE.dtoToEntity(clusterDTO); 82 | // 更新集群表 83 | clusterMapper.updateById(cluster); 84 | // flinkApiContext还需remove掉 85 | flinkApiContext.remove(clusterDTO.getClusterCode()); 86 | // 更新节点表 87 | // 需要删除的节点 88 | List existNodeList = selectByClusterCode(cluster.getClusterCode(), cluster.getTenantId()).stream() 89 | .map(Node::getNodeId) 90 | .collect(Collectors.toList()); 91 | List nodeList = genNodeList(clusterDTO); 92 | List curNodeList = nodeList.stream() 93 | .filter(node -> node.getNodeId() != null) 94 | .map(Node::getNodeId) 95 | .collect(Collectors.toList()); 96 | // 取差集 就是需要删除的节点 97 | existNodeList.removeAll(curNodeList); 98 | nodeMapper.deleteBatchIds(existNodeList); 99 | // 需要新增和更新 100 | List nodeDTOList = nodeList.stream().map(node -> { 101 | if (Objects.isNull(node.getNodeId())) { 102 | nodeMapper.insert(node); 103 | } else { 104 | nodeMapper.updateById(node); 105 | } 106 | return NodeConvertMapper.INSTANCE.entityToDTO(nodeMapper.selectById(node.getNodeId())); 107 | }).collect(Collectors.toList()); 108 | ClusterDTO dto = ClusterConvertMapper.INSTANCE.entityToDTO(cluster); 109 | dto.setNodeDTOList(nodeDTOList); 110 | return dto; 111 | } 112 | 113 | @Override 114 | @Transactional(rollbackFor = Exception.class) 115 | public void delete(ClusterDTO clusterDTO) { 116 | clusterRepository.delete(clusterDTO); 117 | // flinkApiContext还需remove掉 118 | flinkApiContext.remove(clusterDTO.getClusterCode()); 119 | } 120 | 121 | @Override 122 | public DashboardConfiguration overviewConfig(Long tenantId, String clusterCode) { 123 | FlinkApi flinkApi = flinkApiContext.get(clusterCode, tenantId); 124 | return flinkApi.overviewConfig(); 125 | } 126 | 127 | @Override 128 | public Map overview(Long tenantId, String clusterCode) { 129 | FlinkApi flinkApi = flinkApiContext.get(clusterCode, tenantId); 130 | return flinkApi.overview(); 131 | } 132 | 133 | @Override 134 | public JobIdsWithStatusOverview jobList(Long tenantId, String clusterCode) { 135 | FlinkApi flinkApi = flinkApiContext.get(clusterCode, tenantId); 136 | return flinkApi.jobList(); 137 | } 138 | 139 | @Override 140 | public MultipleJobsDetails jobsDetails(Long tenantId, String clusterCode) { 141 | FlinkApi flinkApi = flinkApiContext.get(clusterCode, tenantId); 142 | return flinkApi.jobsDetails(); 143 | } 144 | 145 | @Override 146 | public JobDetailsInfo jobDetail(Long tenantId, String clusterCode, String jobId) { 147 | FlinkApi flinkApi = flinkApiContext.get(clusterCode, tenantId); 148 | return flinkApi.jobDetail(jobId); 149 | } 150 | 151 | @Override 152 | public FlinkApiErrorResponse jobYarnCancel(Long tenantId, String clusterCode, String jobId) { 153 | FlinkApi flinkApi = flinkApiContext.get(clusterCode, tenantId); 154 | return flinkApi.jobYarnCancel(jobId); 155 | } 156 | 157 | @Override 158 | public FlinkApiErrorResponse jobTerminate(Long tenantId, String clusterCode, String jobId, String mode) { 159 | FlinkApi flinkApi = flinkApiContext.get(clusterCode, tenantId); 160 | return flinkApi.jobTerminate(jobId, mode); 161 | } 162 | 163 | @Override 164 | public TriggerResponseWithSavepoint jobCancelOptionSavepoints(Long tenantId, String clusterCode, 165 | SavepointTriggerRequestBody savepointTriggerRequestBody) { 166 | FlinkApi flinkApi = flinkApiContext.get(clusterCode, tenantId); 167 | return flinkApi.jobCancelOptionSavepoints(savepointTriggerRequestBody); 168 | } 169 | 170 | @Override 171 | public TriggerResponse jobRescale(Long tenantId, String clusterCode, String jobId, int parallelism) { 172 | FlinkApi flinkApi = flinkApiContext.get(clusterCode, tenantId); 173 | return flinkApi.jobRescale(jobId, parallelism); 174 | } 175 | 176 | @Override 177 | public JobExceptionsInfo jobException(Long tenantId, String clusterCode, String jobId, String maxExceptions) { 178 | FlinkApi flinkApi = flinkApiContext.get(clusterCode, tenantId); 179 | return flinkApi.jobException(jobId, maxExceptions); 180 | } 181 | 182 | @Override 183 | public TaskManagerInfo taskMangerList(Long tenantId, String clusterCode) { 184 | FlinkApi flinkApi = flinkApiContext.get(clusterCode, tenantId); 185 | return flinkApi.taskMangerList(); 186 | } 187 | 188 | @Override 189 | public TaskManagerDetail taskManagerDetail(Long tenantId, String clusterCode, String tmId) { 190 | FlinkApi flinkApi = flinkApiContext.get(clusterCode, tenantId); 191 | return flinkApi.taskManagerDetail(tmId); 192 | } 193 | 194 | @Override 195 | public String taskManagerLog(Long tenantId, String clusterCode, String tmId) { 196 | FlinkApi flinkApi = flinkApiContext.get(clusterCode, tenantId); 197 | return flinkApi.taskManagerLog(tmId); 198 | } 199 | 200 | @Override 201 | public String taskManagerStdout(Long tenantId, String clusterCode, String tmId) { 202 | FlinkApi flinkApi = flinkApiContext.get(clusterCode, tenantId); 203 | return flinkApi.taskManagerStdout(tmId); 204 | } 205 | 206 | @Override 207 | public List> jobManagerConfig(Long tenantId, String clusterCode) { 208 | FlinkApi flinkApi = flinkApiContext.get(clusterCode, tenantId); 209 | return flinkApi.jobManagerConfig(); 210 | } 211 | 212 | @Override 213 | public String jobManagerLog(Long tenantId, String clusterCode) { 214 | FlinkApi flinkApi = flinkApiContext.get(clusterCode, tenantId); 215 | return flinkApi.jobManagerLog(); 216 | } 217 | 218 | @Override 219 | public String jobManagerStdout(Long tenantId, String clusterCode) { 220 | FlinkApi flinkApi = flinkApiContext.get(clusterCode, tenantId); 221 | return flinkApi.jobManagerStdout(); 222 | } 223 | 224 | private List genNodeList(ClusterDTO clusterDTO) { 225 | return clusterDTO.getNodeDTOList().stream().map(nodeDTO -> { 226 | nodeDTO.setClusterCode(clusterDTO.getClusterCode()); 227 | nodeDTO.setTenantId(clusterDTO.getTenantId()); 228 | NodeSettingInfo nodeSettingInfo = JSON.toObj(nodeDTO.getSettingInfo(), NodeSettingInfo.class); 229 | // 第一次新增时对密码进行加密 或者 该节点需要对密码进行更新 230 | if (Objects.isNull(nodeDTO.getNodeId()) || 231 | Boolean.TRUE.equals(nodeSettingInfo.getChangePassword())) { 232 | nodeSettingInfo.setPassword(jasyptStringEncryptor.encrypt(nodeSettingInfo.getPassword())); 233 | } 234 | nodeDTO.setSettingInfo(JSON.toJson(nodeSettingInfo)); 235 | return NodeConvertMapper.INSTANCE.dtoToEntity(nodeDTO); 236 | }).collect(Collectors.toList()); 237 | } 238 | 239 | public List selectByClusterCode(String clusterCode, Long tenantId) { 240 | QueryWrapper queryWrapper = new QueryWrapper<>(); 241 | queryWrapper.eq(Node.FIELD_CLUSTER_CODE, clusterCode); 242 | queryWrapper.eq(Node.FIELD_TENANT_ID, tenantId); 243 | return nodeMapper.selectList(queryWrapper); 244 | } 245 | 246 | } 247 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/app/service/impl/UploadJarServiceImpl.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.app.service.impl; 2 | 3 | import java.util.Optional; 4 | import java.util.concurrent.CompletableFuture; 5 | import java.util.concurrent.ExecutorService; 6 | import javax.annotation.Resource; 7 | 8 | import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; 9 | import com.github.codingdebugallday.client.api.dto.UploadJarDTO; 10 | import com.github.codingdebugallday.client.app.service.FlinkApi; 11 | import com.github.codingdebugallday.client.app.service.UploadJarService; 12 | import com.github.codingdebugallday.client.domain.entity.UploadJar; 13 | import com.github.codingdebugallday.client.domain.entity.jars.JarUploadResponseBody; 14 | import com.github.codingdebugallday.client.infra.context.FlinkApiContext; 15 | import com.github.codingdebugallday.client.infra.converter.UploadJarConvertMapper; 16 | import com.github.codingdebugallday.client.infra.mapper.UploadJarMapper; 17 | import com.github.codingdebugallday.client.infra.utils.FlinkCommonUtil; 18 | import com.github.codingdebugallday.client.infra.utils.ThreadPoolUtil; 19 | import lombok.extern.slf4j.Slf4j; 20 | import org.springframework.stereotype.Service; 21 | import org.springframework.transaction.annotation.Transactional; 22 | import org.springframework.util.Assert; 23 | import org.springframework.web.multipart.MultipartFile; 24 | 25 | /** 26 | *

27 | * description 28 | *

29 | * 30 | * @author isacc 2020/04/07 11:02 31 | * @since 1.0 32 | */ 33 | @Service("flinkUploadJarService") 34 | @Slf4j 35 | public class UploadJarServiceImpl extends ServiceImpl implements UploadJarService { 36 | 37 | private final ExecutorService executorService = ThreadPoolUtil.getExecutorService(); 38 | 39 | @Resource 40 | private FlinkApiContext flinkApiContext; 41 | 42 | @Override 43 | @Transactional(rollbackFor = Exception.class) 44 | public UploadJarDTO update(UploadJarDTO uploadJarDTO, MultipartFile multipartFile) { 45 | // 更新 46 | UploadJar uploadJar = UploadJarConvertMapper.INSTANCE.dtoToEntity(uploadJarDTO); 47 | updateById(uploadJar); 48 | // 是否需要替换jar包 49 | Optional.ofNullable(multipartFile).ifPresent(jarFile -> { 50 | Assert.notNull(uploadJarDTO.getFilename(), "overwrite jar filename cannot be null"); 51 | FlinkApi flinkApi = flinkApiContext.get(uploadJarDTO.getClusterCode(), uploadJarDTO.getTenantId()); 52 | // 先删除以前上传的jar 53 | flinkApi.deleteJar(uploadJar.getFilename()); 54 | // 重新上传并更新 55 | uploadJarAndUpdateAsync(flinkApi, uploadJar, multipartFile); 56 | }); 57 | return UploadJarConvertMapper.INSTANCE.entityToDTO(uploadJar); 58 | } 59 | 60 | @Override 61 | @Transactional(rollbackFor = Exception.class) 62 | public void delete(UploadJarDTO uploadJarDTO) { 63 | // 删flink jar 64 | FlinkApi flinkApi = flinkApiContext.get(uploadJarDTO.getClusterCode(), uploadJarDTO.getTenantId()); 65 | flinkApi.deleteJar(uploadJarDTO.getJarName()); 66 | // 删表 67 | UploadJar uploadJar = UploadJarConvertMapper.INSTANCE.dtoToEntity(uploadJarDTO); 68 | this.removeById(uploadJar.getUploadJarId()); 69 | } 70 | 71 | @Override 72 | @Transactional(rollbackFor = Exception.class) 73 | public UploadJarDTO upload(UploadJarDTO uploadJarDTO, MultipartFile multipartFile) { 74 | // 插表 75 | UploadJar uploadJar = UploadJarConvertMapper.INSTANCE.dtoToEntity(uploadJarDTO); 76 | save(uploadJar); 77 | // 调用api上传jar 78 | FlinkApi flinkApi = flinkApiContext.get(uploadJarDTO.getClusterCode(), uploadJarDTO.getTenantId()); 79 | uploadJarAndUpdateAsync(flinkApi, uploadJar, multipartFile); 80 | return UploadJarConvertMapper.INSTANCE.entityToDTO(uploadJar); 81 | } 82 | 83 | private void uploadJarAndUpdateAsync(FlinkApi flinkApi, UploadJar uploadJar, MultipartFile multipartFile) { 84 | // 调用api上传jar 85 | CompletableFuture completableFuture = CompletableFuture.supplyAsync(() -> 86 | flinkApi.uploadJar(FlinkCommonUtil.multiPartFileToFile(multipartFile)), executorService); 87 | completableFuture.whenComplete((t, u) -> { 88 | // 回写上传后结果 89 | log.debug("jar upload response: {}", t); 90 | // 乐观锁去更新 91 | UploadJar entity = getById(uploadJar.getUploadJarId()); 92 | String filename = t.getFilename(); 93 | entity.setFilename(filename); 94 | entity.setStatus(t.getStatus()); 95 | entity.setJarName(filename.substring(filename.lastIndexOf('/') + 1)); 96 | this.updateById(entity); 97 | }).exceptionally(e -> { 98 | log.error("error: {}", e.getMessage()); 99 | return null; 100 | }); 101 | } 102 | 103 | } 104 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/app/service/jars/FlinkJarService.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.app.service.jars; 2 | 3 | import java.io.File; 4 | import java.util.Objects; 5 | 6 | import com.github.codingdebugallday.client.api.dto.ClusterDTO; 7 | import com.github.codingdebugallday.client.app.service.ApiClient; 8 | import com.github.codingdebugallday.client.app.service.FlinkCommonService; 9 | import com.github.codingdebugallday.client.domain.entity.jars.JarRunRequest; 10 | import com.github.codingdebugallday.client.domain.entity.jars.JarRunResponseBody; 11 | import com.github.codingdebugallday.client.domain.entity.jars.JarUploadResponseBody; 12 | import com.github.codingdebugallday.client.infra.constants.FlinkApiConstant; 13 | import com.github.codingdebugallday.client.infra.exceptions.FlinkApiCommonException; 14 | import com.github.codingdebugallday.client.infra.utils.JSON; 15 | import com.github.codingdebugallday.client.infra.utils.RestTemplateUtil; 16 | import lombok.extern.slf4j.Slf4j; 17 | import org.springframework.core.io.FileSystemResource; 18 | import org.springframework.http.HttpEntity; 19 | import org.springframework.http.HttpMethod; 20 | import org.springframework.http.HttpStatus; 21 | import org.springframework.util.Assert; 22 | import org.springframework.util.LinkedMultiValueMap; 23 | import org.springframework.util.MultiValueMap; 24 | import org.springframework.util.StringUtils; 25 | import org.springframework.web.client.RestTemplate; 26 | 27 | /** 28 | *

29 | * description 30 | *

31 | * 32 | * @author isacc 2020/03/26 22:42 33 | * @since 1.0 34 | */ 35 | @Slf4j 36 | public class FlinkJarService extends FlinkCommonService { 37 | 38 | private final RestTemplate restTemplate; 39 | 40 | public FlinkJarService(RestTemplate restTemplate) { 41 | this.restTemplate = restTemplate; 42 | } 43 | 44 | private boolean checkApiClient(ApiClient apiClient) { 45 | ClusterDTO clusterDTO = apiClient.getClusterDTO(); 46 | return !Objects.isNull(clusterDTO) && 47 | !StringUtils.isEmpty(clusterDTO.getJobManagerUrl()); 48 | } 49 | 50 | public JarRunResponseBody runJar(JarRunRequest jarRunRequest, ApiClient apiClient) { 51 | HttpEntity requestEntity = 52 | new HttpEntity<>((JSON.toJson(jarRunRequest)), RestTemplateUtil.applicationJsonHeaders()); 53 | ClusterDTO clusterDTO = apiClient.getClusterDTO(); 54 | try { 55 | return exchange(restTemplate, 56 | clusterDTO.getJobManagerUrl() + FlinkApiConstant.Jars.RUN_JAR, 57 | HttpMethod.POST, requestEntity, JarRunResponseBody.class, 58 | jarRunRequest.getJarId()); 59 | } catch (Exception e) { 60 | for (String url : clusterDTO.getJobManagerStandbyUrlSet()) { 61 | try { 62 | return exchange(restTemplate, url + FlinkApiConstant.Jars.RUN_JAR, 63 | HttpMethod.POST, requestEntity, JarRunResponseBody.class, 64 | jarRunRequest.getJarId()); 65 | } catch (Exception ex) { 66 | // ignore 67 | } 68 | } 69 | throw new FlinkApiCommonException(HttpStatus.INTERNAL_SERVER_ERROR.value(), "error.flink.jar.run"); 70 | } 71 | } 72 | 73 | 74 | /** 75 | * 上传flink jar 76 | * 77 | * @param file jar file 78 | * @param apiClient ApiClient 79 | * @return org.abigballofmud.flink.api.domain.jars.JarUploadResponseBody 80 | */ 81 | public JarUploadResponseBody uploadJar(File file, ApiClient apiClient) { 82 | // 校验apiClient中的flinkCluster 83 | Assert.isTrue(checkApiClient(apiClient), "Please check the flink jobManagerUrl and uploadJarPath are configured"); 84 | ClusterDTO clusterDTO = apiClient.getClusterDTO(); 85 | MultiValueMap body = new LinkedMultiValueMap<>(1); 86 | body.add("file", new FileSystemResource(file)); 87 | HttpEntity> requestEntity = new HttpEntity<>(body, RestTemplateUtil.applicationMultiDataHeaders()); 88 | try { 89 | // 先尝试使用jm 主url,三次失败后使用备用 90 | return exchange(restTemplate, 91 | clusterDTO.getJobManagerUrl() + FlinkApiConstant.Jars.UPLOAD_JAR, 92 | HttpMethod.POST, requestEntity, JarUploadResponseBody.class); 93 | } catch (Exception e) { 94 | // 若配置了HA 这里使用备用 95 | for (String url : clusterDTO.getJobManagerStandbyUrlSet()) { 96 | try { 97 | return exchange(restTemplate, 98 | url + FlinkApiConstant.Jars.UPLOAD_JAR, 99 | HttpMethod.POST, requestEntity, JarUploadResponseBody.class); 100 | } catch (Exception ex) { 101 | // ignore 102 | } 103 | } 104 | throw new FlinkApiCommonException(HttpStatus.INTERNAL_SERVER_ERROR.value(), "error.flink.jar.upload"); 105 | } 106 | } 107 | 108 | public void deleteJar(String jarId, ApiClient apiClient) { 109 | ClusterDTO clusterDTO = apiClient.getClusterDTO(); 110 | MultiValueMap body = new LinkedMultiValueMap<>(1); 111 | HttpEntity> requestEntity = new HttpEntity<>(body); 112 | try { 113 | // 先尝试使用jm 主url,三次失败后使用备用 114 | exchange(restTemplate, 115 | clusterDTO.getJobManagerUrl() + FlinkApiConstant.Jars.DELETE_JAR, 116 | HttpMethod.DELETE, requestEntity, String.class, jarId); 117 | } catch (Exception e) { 118 | // 若配置了HA 这里使用备用 119 | for (String url : clusterDTO.getJobManagerStandbyUrlSet()) { 120 | try { 121 | exchange(restTemplate, 122 | url + FlinkApiConstant.Jars.DELETE_JAR, 123 | HttpMethod.DELETE, requestEntity, String.class, jarId); 124 | } catch (Exception ex) { 125 | // ignore 126 | } 127 | } 128 | throw new FlinkApiCommonException(HttpStatus.INTERNAL_SERVER_ERROR.value(), "error.flink.jar.delete"); 129 | } 130 | } 131 | } 132 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/app/service/jm/FlinkJobManagerService.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.app.service.jm; 2 | 3 | import java.util.List; 4 | import java.util.Map; 5 | 6 | import com.github.codingdebugallday.client.api.dto.ClusterDTO; 7 | import com.github.codingdebugallday.client.app.service.ApiClient; 8 | import com.github.codingdebugallday.client.app.service.FlinkCommonService; 9 | import com.github.codingdebugallday.client.infra.constants.FlinkApiConstant; 10 | import com.github.codingdebugallday.client.infra.exceptions.FlinkApiCommonException; 11 | import lombok.extern.slf4j.Slf4j; 12 | import org.springframework.http.HttpStatus; 13 | import org.springframework.web.client.RestTemplate; 14 | 15 | /** 16 | *

17 | * description 18 | *

19 | * 20 | * @author isacc 2020/04/10 15:36 21 | * @since 1.0 22 | */ 23 | @Slf4j 24 | public class FlinkJobManagerService extends FlinkCommonService { 25 | 26 | private final RestTemplate restTemplate; 27 | 28 | public FlinkJobManagerService(RestTemplate restTemplate) { 29 | this.restTemplate = restTemplate; 30 | } 31 | 32 | @SuppressWarnings("unchecked") 33 | public List> jobManagerConfig(ApiClient apiClient) { 34 | ClusterDTO clusterDTO = apiClient.getClusterDTO(); 35 | try { 36 | return getForEntity(restTemplate, 37 | clusterDTO.getJobManagerUrl() + FlinkApiConstant.JobManager.JM_CONFIG, 38 | List.class); 39 | } catch (Exception e) { 40 | for (String url : clusterDTO.getJobManagerStandbyUrlSet()) { 41 | try { 42 | return getForEntity(restTemplate, 43 | url + FlinkApiConstant.JobManager.JM_CONFIG, 44 | List.class); 45 | } catch (Exception ex) { 46 | // ignore 47 | } 48 | } 49 | throw new FlinkApiCommonException(HttpStatus.INTERNAL_SERVER_ERROR.value(), "error.flink.jm.config"); 50 | } 51 | } 52 | 53 | public String jobManagerLog(ApiClient apiClient) { 54 | ClusterDTO clusterDTO = apiClient.getClusterDTO(); 55 | try { 56 | return getForEntity(restTemplate, 57 | clusterDTO.getJobManagerUrl() + FlinkApiConstant.JobManager.JM_LOG, 58 | String.class); 59 | } catch (Exception e) { 60 | for (String url : clusterDTO.getJobManagerStandbyUrlSet()) { 61 | try { 62 | return getForEntity(restTemplate, 63 | url + FlinkApiConstant.JobManager.JM_LOG, 64 | String.class); 65 | } catch (Exception ex) { 66 | // ignore 67 | } 68 | } 69 | throw new FlinkApiCommonException(HttpStatus.INTERNAL_SERVER_ERROR.value(), "error.flink.jm.log"); 70 | } 71 | } 72 | 73 | public String jobManagerStdout(ApiClient apiClient) { 74 | ClusterDTO clusterDTO = apiClient.getClusterDTO(); 75 | try { 76 | return getForEntity(restTemplate, 77 | clusterDTO.getJobManagerUrl() + FlinkApiConstant.JobManager.JM_STDOUT, 78 | String.class); 79 | } catch (Exception e) { 80 | for (String url : clusterDTO.getJobManagerStandbyUrlSet()) { 81 | try { 82 | return getForEntity(restTemplate, 83 | url + FlinkApiConstant.JobManager.JM_STDOUT, 84 | String.class); 85 | } catch (Exception ex) { 86 | // ignore 87 | } 88 | } 89 | throw new FlinkApiCommonException(HttpStatus.INTERNAL_SERVER_ERROR.value(), "error.flink.jm.stdout"); 90 | } 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/app/service/jobs/FlinkJobService.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.app.service.jobs; 2 | 3 | import java.util.Optional; 4 | import java.util.concurrent.TimeUnit; 5 | 6 | import com.github.codingdebugallday.client.api.dto.ClusterDTO; 7 | import com.github.codingdebugallday.client.app.service.ApiClient; 8 | import com.github.codingdebugallday.client.app.service.FlinkCommonService; 9 | import com.github.codingdebugallday.client.domain.entity.jobs.*; 10 | import com.github.codingdebugallday.client.infra.constants.FlinkApiConstant; 11 | import com.github.codingdebugallday.client.infra.exceptions.FlinkApiCommonException; 12 | import com.github.codingdebugallday.client.infra.utils.JSON; 13 | import com.github.codingdebugallday.client.infra.utils.RestTemplateUtil; 14 | import lombok.extern.slf4j.Slf4j; 15 | import org.springframework.http.HttpEntity; 16 | import org.springframework.http.HttpMethod; 17 | import org.springframework.http.HttpStatus; 18 | import org.springframework.util.CollectionUtils; 19 | import org.springframework.util.LinkedMultiValueMap; 20 | import org.springframework.util.MultiValueMap; 21 | import org.springframework.util.StringUtils; 22 | import org.springframework.web.client.RestTemplate; 23 | 24 | /** 25 | *

26 | * description 27 | *

28 | * 29 | * @author isacc 2020/04/08 11:35 30 | * @since 1.0 31 | */ 32 | @Slf4j 33 | public class FlinkJobService extends FlinkCommonService { 34 | 35 | private static final String STATUS_COMPLETED = "COMPLETED"; 36 | 37 | private final RestTemplate restTemplate; 38 | 39 | public FlinkJobService(RestTemplate restTemplate) { 40 | this.restTemplate = restTemplate; 41 | } 42 | 43 | public JobIdsWithStatusOverview jobList(ApiClient apiClient) { 44 | ClusterDTO clusterDTO = apiClient.getClusterDTO(); 45 | try { 46 | return getForEntity(restTemplate, 47 | clusterDTO.getJobManagerUrl() + FlinkApiConstant.Jobs.JOB_LIST, 48 | JobIdsWithStatusOverview.class); 49 | } catch (Exception e) { 50 | for (String url : clusterDTO.getJobManagerStandbyUrlSet()) { 51 | try { 52 | return getForEntity(restTemplate, 53 | url + FlinkApiConstant.Jobs.JOB_LIST, 54 | JobIdsWithStatusOverview.class); 55 | } catch (Exception ex) { 56 | // ignore 57 | } 58 | } 59 | throw new FlinkApiCommonException(HttpStatus.INTERNAL_SERVER_ERROR.value(), "error.flink.job.list"); 60 | } 61 | } 62 | 63 | public MultipleJobsDetails jobsDetails(ApiClient apiClient) { 64 | ClusterDTO clusterDTO = apiClient.getClusterDTO(); 65 | try { 66 | return getForEntity(restTemplate, 67 | clusterDTO.getJobManagerUrl() + FlinkApiConstant.Jobs.JOB_OVERVIEW, 68 | MultipleJobsDetails.class); 69 | } catch (Exception e) { 70 | for (String url : clusterDTO.getJobManagerStandbyUrlSet()) { 71 | try { 72 | return getForEntity(restTemplate, 73 | url + FlinkApiConstant.Jobs.JOB_OVERVIEW, 74 | MultipleJobsDetails.class); 75 | } catch (Exception ex) { 76 | // ignore 77 | } 78 | } 79 | throw new FlinkApiCommonException(HttpStatus.INTERNAL_SERVER_ERROR.value(), "error.flink.jobs.details"); 80 | } 81 | } 82 | 83 | public JobDetailsInfo jobsDetail(String jobId, ApiClient apiClient) { 84 | ClusterDTO clusterDTO = apiClient.getClusterDTO(); 85 | try { 86 | return getForEntity(restTemplate, 87 | clusterDTO.getJobManagerUrl() + FlinkApiConstant.Jobs.JOB_DETAIL, 88 | JobDetailsInfo.class, jobId); 89 | } catch (Exception e) { 90 | for (String url : clusterDTO.getJobManagerStandbyUrlSet()) { 91 | try { 92 | return getForEntity(restTemplate, 93 | url + FlinkApiConstant.Jobs.JOB_DETAIL, 94 | JobDetailsInfo.class, jobId); 95 | } catch (Exception ex) { 96 | // ignore 97 | } 98 | } 99 | throw new FlinkApiCommonException(HttpStatus.INTERNAL_SERVER_ERROR.value(), "error.flink.job.detail"); 100 | } 101 | } 102 | 103 | public FlinkApiErrorResponse jobYarnCancel(String jobId, ApiClient apiClient) { 104 | ClusterDTO clusterDTO = apiClient.getClusterDTO(); 105 | try { 106 | return getForEntity(restTemplate, 107 | clusterDTO.getJobManagerUrl() + FlinkApiConstant.Jobs.JOB_YARN_CANCEL, 108 | FlinkApiErrorResponse.class, jobId); 109 | } catch (Exception e) { 110 | for (String url : clusterDTO.getJobManagerStandbyUrlSet()) { 111 | try { 112 | return getForEntity(restTemplate, 113 | url + FlinkApiConstant.Jobs.JOB_YARN_CANCEL, 114 | FlinkApiErrorResponse.class, jobId); 115 | } catch (Exception ex) { 116 | // ignore 117 | } 118 | } 119 | throw new FlinkApiCommonException(HttpStatus.INTERNAL_SERVER_ERROR.value(), "error.flink.job.yarn.cancel"); 120 | } 121 | } 122 | 123 | public TriggerResponseWithSavepoint jobCancelOptionSavepoints(SavepointTriggerRequestBody savepointTriggerRequestBody, 124 | ApiClient apiClient) { 125 | HttpEntity requestEntity = 126 | new HttpEntity<>((JSON.toJson(savepointTriggerRequestBody)), RestTemplateUtil.applicationJsonHeaders()); 127 | ClusterDTO clusterDTO = apiClient.getClusterDTO(); 128 | TriggerResponse triggerResponse; 129 | SavepointInfo savepointInfo = null; 130 | try { 131 | triggerResponse = exchange(restTemplate, 132 | clusterDTO.getJobManagerUrl() + FlinkApiConstant.Jobs.JOB_CANCEL_WITH_SAVEPOINTS, 133 | HttpMethod.POST, requestEntity, TriggerResponse.class, savepointTriggerRequestBody.getJobId()); 134 | if (CollectionUtils.isEmpty(triggerResponse.getErrors())) { 135 | // 返回状态必须是COMPLETED才结束,否则重试 136 | savepointInfo = fetchSavepoint( 137 | clusterDTO.getJobManagerUrl() + FlinkApiConstant.Jobs.JOB_SAVEPOINT_STATUS, 138 | savepointTriggerRequestBody, triggerResponse); 139 | } 140 | return TriggerResponseWithSavepoint.builder() 141 | .savepointInfo(savepointInfo) 142 | .triggerResponse(triggerResponse).build(); 143 | } catch (Exception e) { 144 | for (String url : clusterDTO.getJobManagerStandbyUrlSet()) { 145 | try { 146 | triggerResponse = exchange(restTemplate, 147 | url + FlinkApiConstant.Jobs.JOB_CANCEL_WITH_SAVEPOINTS, 148 | HttpMethod.POST, requestEntity, TriggerResponse.class, savepointTriggerRequestBody.getJobId()); 149 | if (CollectionUtils.isEmpty(triggerResponse.getErrors())) { 150 | // 返回状态必须是COMPLETED才结束,否则重试 151 | savepointInfo = fetchSavepoint( 152 | url + FlinkApiConstant.Jobs.JOB_SAVEPOINT_STATUS, 153 | savepointTriggerRequestBody, triggerResponse); 154 | } 155 | return TriggerResponseWithSavepoint.builder() 156 | .savepointInfo(savepointInfo) 157 | .triggerResponse(triggerResponse).build(); 158 | } catch (Exception ex) { 159 | // ignore 160 | } 161 | } 162 | throw new FlinkApiCommonException(HttpStatus.INTERNAL_SERVER_ERROR.value(), "error.flink.jar.cancel.option.savepoint"); 163 | } 164 | } 165 | 166 | private SavepointInfo fetchSavepoint(String uri, 167 | SavepointTriggerRequestBody savepointTriggerRequestBody, 168 | TriggerResponse triggerResponse) { 169 | SavepointInfo savepointInfo = getForEntity(restTemplate, 170 | uri, 171 | SavepointInfo.class, 172 | savepointTriggerRequestBody.getJobId(), 173 | triggerResponse.getRequestId()); 174 | // 返回状态必须是COMPLETED才结束,否则重试 175 | while (savepointInfo != null && 176 | !STATUS_COMPLETED.equals(savepointInfo.getStatus().getId())) { 177 | // delay一下 防止请求过多 178 | try { 179 | TimeUnit.MILLISECONDS.sleep(100L); 180 | } catch (InterruptedException e) { 181 | Thread.currentThread().interrupt(); 182 | } 183 | savepointInfo = fetchSavepoint(uri, savepointTriggerRequestBody, triggerResponse); 184 | } 185 | return savepointInfo; 186 | } 187 | 188 | public FlinkApiErrorResponse jobTerminate(String jobId, String mode, ApiClient apiClient) { 189 | ClusterDTO clusterDTO = apiClient.getClusterDTO(); 190 | MultiValueMap body = new LinkedMultiValueMap<>(1); 191 | HttpEntity> requestEntity = new HttpEntity<>(body); 192 | try { 193 | return exchange(restTemplate, 194 | clusterDTO.getJobManagerUrl() + FlinkApiConstant.Jobs.JOB_TERMINATE, 195 | HttpMethod.PATCH, requestEntity, FlinkApiErrorResponse.class, 196 | jobId, Optional.ofNullable(mode).orElse("cancel")); 197 | } catch (Exception e) { 198 | for (String url : clusterDTO.getJobManagerStandbyUrlSet()) { 199 | try { 200 | exchange(restTemplate, 201 | url + FlinkApiConstant.Jobs.JOB_TERMINATE, 202 | HttpMethod.PATCH, requestEntity, FlinkApiErrorResponse.class, 203 | jobId, Optional.ofNullable(mode).orElse("cancel")); 204 | } catch (Exception ex) { 205 | // ignore 206 | } 207 | } 208 | throw new FlinkApiCommonException(HttpStatus.INTERNAL_SERVER_ERROR.value(), "error.flink.job.terminate"); 209 | } 210 | } 211 | 212 | public TriggerResponse jobRescale(String jobId, int parallelism, ApiClient apiClient) { 213 | ClusterDTO clusterDTO = apiClient.getClusterDTO(); 214 | MultiValueMap body = new LinkedMultiValueMap<>(1); 215 | HttpEntity> requestEntity = new HttpEntity<>(body); 216 | try { 217 | return exchange(restTemplate, 218 | clusterDTO.getJobManagerUrl() + FlinkApiConstant.Jobs.JOB_RESCALING, 219 | HttpMethod.PATCH, requestEntity, TriggerResponse.class, 220 | jobId, parallelism); 221 | } catch (Exception e) { 222 | for (String url : clusterDTO.getJobManagerStandbyUrlSet()) { 223 | try { 224 | return exchange(restTemplate, 225 | url + FlinkApiConstant.Jobs.JOB_RESCALING, 226 | HttpMethod.PATCH, requestEntity, TriggerResponse.class, 227 | jobId, parallelism); 228 | } catch (Exception ex) { 229 | // ignore 230 | } 231 | } 232 | throw new FlinkApiCommonException(HttpStatus.INTERNAL_SERVER_ERROR.value(), "error.flink.job.rescaling"); 233 | } 234 | } 235 | 236 | public JobExceptionsInfo jobException(String jobId, String maxExceptions, ApiClient apiClient) { 237 | ClusterDTO clusterDTO = apiClient.getClusterDTO(); 238 | String extraUrl; 239 | if (StringUtils.isEmpty(maxExceptions)) { 240 | extraUrl = FlinkApiConstant.Jobs.JOB_EXCEPTIONS; 241 | } else { 242 | extraUrl = String.format("%s?maxExceptions=%s", FlinkApiConstant.Jobs.JOB_EXCEPTIONS, maxExceptions); 243 | } 244 | try { 245 | return getForEntity(restTemplate, 246 | clusterDTO.getJobManagerUrl() + extraUrl, 247 | JobExceptionsInfo.class, jobId); 248 | } catch (Exception e) { 249 | for (String url : clusterDTO.getJobManagerStandbyUrlSet()) { 250 | try { 251 | return getForEntity(restTemplate, 252 | url + extraUrl, 253 | JobExceptionsInfo.class, jobId); 254 | } catch (Exception ex) { 255 | // ignore 256 | } 257 | } 258 | throw new FlinkApiCommonException(HttpStatus.INTERNAL_SERVER_ERROR.value(), "error.flink.job.exception"); 259 | } 260 | } 261 | } 262 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/app/service/overview/ClusterOverviewService.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.app.service.overview; 2 | 3 | import java.util.Map; 4 | 5 | import com.github.codingdebugallday.client.api.dto.ClusterDTO; 6 | import com.github.codingdebugallday.client.app.service.ApiClient; 7 | import com.github.codingdebugallday.client.app.service.FlinkCommonService; 8 | import com.github.codingdebugallday.client.domain.entity.overview.DashboardConfiguration; 9 | import com.github.codingdebugallday.client.infra.constants.FlinkApiConstant; 10 | import com.github.codingdebugallday.client.infra.exceptions.FlinkApiCommonException; 11 | import lombok.extern.slf4j.Slf4j; 12 | import org.springframework.http.HttpStatus; 13 | import org.springframework.web.client.RestTemplate; 14 | 15 | /** 16 | *

17 | * flink Cluster Overview 18 | *

19 | * 20 | * @author isacc 2020/5/9 11:27 21 | * @since 1.0 22 | */ 23 | @Slf4j 24 | public class ClusterOverviewService extends FlinkCommonService { 25 | 26 | private final RestTemplate restTemplate; 27 | 28 | public ClusterOverviewService(RestTemplate restTemplate) { 29 | this.restTemplate = restTemplate; 30 | } 31 | 32 | public DashboardConfiguration overviewConfig(ApiClient apiClient) { 33 | ClusterDTO clusterDTO = apiClient.getClusterDTO(); 34 | try { 35 | return getForEntity(restTemplate, 36 | clusterDTO.getJobManagerUrl() + FlinkApiConstant.Overview.CONFIG, 37 | DashboardConfiguration.class); 38 | } catch (Exception e) { 39 | for (String url : clusterDTO.getJobManagerStandbyUrlSet()) { 40 | try { 41 | return getForEntity(restTemplate, 42 | url + FlinkApiConstant.Overview.CONFIG, 43 | DashboardConfiguration.class); 44 | } catch (Exception ex) { 45 | // ignore 46 | } 47 | } 48 | throw new FlinkApiCommonException(HttpStatus.INTERNAL_SERVER_ERROR.value(), "error.flink.overview.config"); 49 | } 50 | } 51 | 52 | @SuppressWarnings("unchecked") 53 | public Map overview(ApiClient apiClient) { 54 | ClusterDTO clusterDTO = apiClient.getClusterDTO(); 55 | try { 56 | return getForEntity(restTemplate, 57 | clusterDTO.getJobManagerUrl() + FlinkApiConstant.Overview.VIEW, 58 | Map.class); 59 | } catch (Exception e) { 60 | for (String url : clusterDTO.getJobManagerStandbyUrlSet()) { 61 | try { 62 | return getForEntity(restTemplate, 63 | url + FlinkApiConstant.Overview.VIEW, 64 | Map.class); 65 | } catch (Exception ex) { 66 | // ignore 67 | } 68 | } 69 | throw new FlinkApiCommonException(HttpStatus.INTERNAL_SERVER_ERROR.value(), "error.flink.overview"); 70 | } 71 | } 72 | 73 | } 74 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/app/service/tm/FlinkTaskManagerService.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.app.service.tm; 2 | 3 | import com.github.codingdebugallday.client.api.dto.ClusterDTO; 4 | import com.github.codingdebugallday.client.app.service.ApiClient; 5 | import com.github.codingdebugallday.client.app.service.FlinkCommonService; 6 | import com.github.codingdebugallday.client.domain.entity.tm.TaskManagerDetail; 7 | import com.github.codingdebugallday.client.domain.entity.tm.TaskManagerInfo; 8 | import com.github.codingdebugallday.client.infra.constants.FlinkApiConstant; 9 | import com.github.codingdebugallday.client.infra.exceptions.FlinkApiCommonException; 10 | import lombok.extern.slf4j.Slf4j; 11 | import org.springframework.http.HttpStatus; 12 | import org.springframework.web.client.RestTemplate; 13 | 14 | /** 15 | *

16 | * description 17 | *

18 | * 19 | * @author isacc 2020/04/09 17:35 20 | * @since 1.0 21 | */ 22 | @Slf4j 23 | public class FlinkTaskManagerService extends FlinkCommonService { 24 | 25 | private final RestTemplate restTemplate; 26 | 27 | public FlinkTaskManagerService(RestTemplate restTemplate) { 28 | this.restTemplate = restTemplate; 29 | } 30 | 31 | public TaskManagerInfo taskMangerList(ApiClient apiClient) { 32 | ClusterDTO clusterDTO = apiClient.getClusterDTO(); 33 | try { 34 | return getForEntity(restTemplate, clusterDTO.getJobManagerUrl() + FlinkApiConstant.TaskManager.TM_LIST, 35 | TaskManagerInfo.class); 36 | } catch (Exception e) { 37 | for (String url : clusterDTO.getJobManagerStandbyUrlSet()) { 38 | try { 39 | return getForEntity(restTemplate, url + FlinkApiConstant.TaskManager.TM_LIST, 40 | TaskManagerInfo.class); 41 | } catch (Exception ex) { 42 | // ignore 43 | } 44 | } 45 | throw new FlinkApiCommonException(HttpStatus.INTERNAL_SERVER_ERROR.value(), "error.flink.tm.list"); 46 | } 47 | } 48 | 49 | public TaskManagerDetail taskManagerDetail(String tmId, ApiClient apiClient) { 50 | ClusterDTO clusterDTO = apiClient.getClusterDTO(); 51 | try { 52 | return getForEntity(restTemplate, 53 | clusterDTO.getJobManagerUrl() + FlinkApiConstant.TaskManager.TM_DETAIL, 54 | TaskManagerDetail.class, tmId); 55 | } catch (Exception e) { 56 | for (String url : clusterDTO.getJobManagerStandbyUrlSet()) { 57 | try { 58 | return getForEntity(restTemplate, 59 | url + FlinkApiConstant.TaskManager.TM_DETAIL, 60 | TaskManagerDetail.class, tmId); 61 | } catch (Exception ex) { 62 | // ignore 63 | } 64 | } 65 | throw new FlinkApiCommonException(HttpStatus.INTERNAL_SERVER_ERROR.value(), "error.flink.tm.detail"); 66 | } 67 | } 68 | 69 | public String taskManagerLog(String tmId, ApiClient apiClient) { 70 | ClusterDTO clusterDTO = apiClient.getClusterDTO(); 71 | try { 72 | return getForEntity(restTemplate, 73 | clusterDTO.getJobManagerUrl() + FlinkApiConstant.TaskManager.TM_LOG, 74 | String.class, tmId); 75 | } catch (Exception e) { 76 | for (String url : clusterDTO.getJobManagerStandbyUrlSet()) { 77 | try { 78 | return getForEntity(restTemplate, 79 | url + FlinkApiConstant.TaskManager.TM_LOG, 80 | String.class, tmId); 81 | } catch (Exception ex) { 82 | // ignore 83 | } 84 | } 85 | throw new FlinkApiCommonException(HttpStatus.INTERNAL_SERVER_ERROR.value(), "error.flink.tm.log"); 86 | } 87 | } 88 | 89 | public String taskManagerStdout(String tmId, ApiClient apiClient) { 90 | ClusterDTO clusterDTO = apiClient.getClusterDTO(); 91 | try { 92 | return getForEntity(restTemplate, 93 | clusterDTO.getJobManagerUrl() + FlinkApiConstant.TaskManager.TM_STDOUT, 94 | String.class, tmId); 95 | } catch (Exception e) { 96 | for (String url : clusterDTO.getJobManagerStandbyUrlSet()) { 97 | try { 98 | return getForEntity(restTemplate, 99 | url + FlinkApiConstant.TaskManager.TM_STDOUT, 100 | String.class, tmId); 101 | } catch (Exception ex) { 102 | // ignore 103 | } 104 | } 105 | throw new FlinkApiCommonException(HttpStatus.INTERNAL_SERVER_ERROR.value(), "error.flink.tm.stdout"); 106 | } 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/domain/entity/ApiResult.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.domain.entity; 2 | 3 | import java.io.Serializable; 4 | 5 | /** 6 | *

7 | * description 8 | *

9 | * 10 | * @author abigballofmud 2019/11/21 14:36 11 | * @since 1.0 12 | */ 13 | public class ApiResult implements Serializable { 14 | private static final long serialVersionUID = -8365417054147857986L; 15 | 16 | private Boolean failed; 17 | 18 | /** 19 | * "type": "warn" 20 | */ 21 | private String type; 22 | 23 | /** 24 | * -1,代表全局异常处理的错误 25 | * 0,成功 26 | * 正数,业务错误码 27 | */ 28 | private Integer code; 29 | 30 | /** 31 | * 描述 32 | */ 33 | private String message; 34 | 35 | /** 36 | * 数据 37 | */ 38 | private transient T data; 39 | 40 | private ApiResult() { 41 | } 42 | 43 | private ApiResult(Boolean failed, Integer code, String message, T data, String type) { 44 | this.failed = failed; 45 | this.code = code; 46 | this.message = message; 47 | this.data = data; 48 | this.type = type; 49 | } 50 | 51 | /** 52 | * 成功 53 | * 54 | * @param data 数据 55 | * @param 类型 56 | * @return Api 57 | */ 58 | public static ApiResult ok(T data) { 59 | return new ApiResult<>(false, 0, null, data, null); 60 | } 61 | 62 | /** 63 | * 成功 64 | * 65 | * @param data 数据 66 | * @param 类型 67 | * @return Api 68 | */ 69 | public static ApiResult success(T data) { 70 | return new ApiResult<>(false, 200, null, data, null); 71 | } 72 | 73 | /** 74 | * 失败 75 | * 76 | * @param code 业务错误码 77 | * @param message 信息 78 | * @return ApiResult 79 | */ 80 | public static ApiResult fail(Integer code, String message) { 81 | return new ApiResult<>(true, code, message, null, "error"); 82 | } 83 | 84 | /** 85 | * 失败 86 | * 87 | * @param T 88 | * @param code 业务错误码 89 | * @param message 信息 90 | * @return ApiResult 91 | */ 92 | public static ApiResult fail(T data, Integer code, String message) { 93 | return new ApiResult<>(true, code, message, data, "error"); 94 | } 95 | 96 | public Integer getCode() { 97 | return code; 98 | } 99 | 100 | public void setCode(Integer code) { 101 | this.code = code; 102 | } 103 | 104 | public String getMessage() { 105 | return message; 106 | } 107 | 108 | public void setMessage(String message) { 109 | this.message = message; 110 | } 111 | 112 | public T getData() { 113 | return data; 114 | } 115 | 116 | public void setData(T data) { 117 | this.data = data; 118 | } 119 | 120 | public Boolean getFailed() { 121 | return failed; 122 | } 123 | 124 | public void setFailed(Boolean failed) { 125 | this.failed = failed; 126 | } 127 | 128 | public String getType() { 129 | return type; 130 | } 131 | 132 | public void setType(String type) { 133 | this.type = type; 134 | } 135 | } 136 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/domain/entity/Cluster.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.domain.entity; 2 | 3 | import java.io.Serializable; 4 | import java.time.LocalDateTime; 5 | import javax.validation.constraints.NotBlank; 6 | 7 | import com.baomidou.mybatisplus.annotation.IdType; 8 | import com.baomidou.mybatisplus.annotation.TableId; 9 | import com.baomidou.mybatisplus.annotation.TableName; 10 | import com.baomidou.mybatisplus.annotation.Version; 11 | import com.fasterxml.jackson.annotation.JsonInclude; 12 | import lombok.*; 13 | 14 | /** 15 | *

16 | * description 17 | *

18 | * 19 | * @author isacc 2020/03/25 17:51 20 | * @since 1.0 21 | */ 22 | @Data 23 | @Builder 24 | @NoArgsConstructor 25 | @AllArgsConstructor 26 | @EqualsAndHashCode(callSuper = false) 27 | @JsonInclude(JsonInclude.Include.NON_NULL) 28 | @TableName(value = "flink_cluster") 29 | public class Cluster implements Serializable { 30 | 31 | private static final long serialVersionUID = 3479540350836927408L; 32 | 33 | public static final String FIELD_CLUSTER_ID = "cluster_id"; 34 | public static final String FIELD_TENANT_ID = "tenant_id"; 35 | public static final String FIELD_CLUSTER_CODE = "cluster_code"; 36 | public static final String FIELD_JOB_MANAGER_URL = "job_manager_url"; 37 | 38 | @TableId(type = IdType.AUTO) 39 | private Long clusterId; 40 | 41 | @NotBlank 42 | private String clusterCode; 43 | 44 | private String clusterDesc; 45 | @NotBlank 46 | private String jobManagerUrl; 47 | 48 | /** 49 | * 若配置了Ha,这里是备用的jm,逗号分割 50 | */ 51 | private String jobManagerStandbyUrl; 52 | 53 | private Integer enabledFlag; 54 | 55 | private Long tenantId; 56 | @Version 57 | private Long objectVersionNumber; 58 | private LocalDateTime creationDate; 59 | private Long createdBy; 60 | private LocalDateTime lastUpdateDate; 61 | private Long lastUpdatedBy; 62 | 63 | } 64 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/domain/entity/Node.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.domain.entity; 2 | 3 | import java.io.Serializable; 4 | import java.time.LocalDateTime; 5 | import javax.validation.constraints.NotBlank; 6 | 7 | import com.baomidou.mybatisplus.annotation.IdType; 8 | import com.baomidou.mybatisplus.annotation.TableId; 9 | import com.baomidou.mybatisplus.annotation.TableName; 10 | import com.baomidou.mybatisplus.annotation.Version; 11 | import com.fasterxml.jackson.annotation.JsonInclude; 12 | import lombok.*; 13 | 14 | /** 15 | *

16 | * description 17 | *

18 | * 19 | * @author isacc 2020/03/25 17:51 20 | * @since 1.0 21 | */ 22 | @Data 23 | @Builder 24 | @NoArgsConstructor 25 | @AllArgsConstructor 26 | @EqualsAndHashCode(callSuper = false) 27 | @JsonInclude(JsonInclude.Include.NON_NULL) 28 | @TableName(value = "flink_node") 29 | public class Node implements Serializable { 30 | 31 | private static final long serialVersionUID = 1208427498280311922L; 32 | 33 | public static final String FIELD_NODE_ID = "node_id"; 34 | public static final String FIELD_CLUSTER_CODE = "cluster_code"; 35 | public static final String FIELD_NODE_TYPE = "node_type"; 36 | public static final String FIELD_TENANT_ID = "tenant_id"; 37 | 38 | @TableId(type = IdType.AUTO) 39 | private Long nodeId; 40 | 41 | private String clusterCode; 42 | 43 | private String nodeCode; 44 | private String nodeType; 45 | 46 | private String nodeDesc; 47 | 48 | private String settingInfo; 49 | 50 | private Integer enabledFlag; 51 | 52 | private Long tenantId; 53 | @Version 54 | private Long objectVersionNumber; 55 | private LocalDateTime creationDate; 56 | private Long createdBy; 57 | private LocalDateTime lastUpdateDate; 58 | private Long lastUpdatedBy; 59 | 60 | } 61 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/domain/entity/UploadJar.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.domain.entity; 2 | 3 | import java.io.Serializable; 4 | import java.time.LocalDateTime; 5 | 6 | import com.baomidou.mybatisplus.annotation.IdType; 7 | import com.baomidou.mybatisplus.annotation.TableId; 8 | import com.baomidou.mybatisplus.annotation.TableName; 9 | import com.baomidou.mybatisplus.annotation.Version; 10 | import com.fasterxml.jackson.annotation.JsonInclude; 11 | import lombok.*; 12 | 13 | /** 14 | *

15 | * description 16 | *

17 | * 18 | * @author isacc 2020/4/7 10:12 19 | * @since 1.0 20 | */ 21 | @Data 22 | @Builder 23 | @NoArgsConstructor 24 | @AllArgsConstructor 25 | @EqualsAndHashCode(callSuper = false) 26 | @TableName(value = "flink_upload_jar") 27 | @JsonInclude(JsonInclude.Include.NON_NULL) 28 | public class UploadJar implements Serializable { 29 | 30 | private static final long serialVersionUID = 5997028378766205793L; 31 | 32 | public static final String FIELD_UPLOAD_JAR_ID = "UPLOAD_JAR_ID"; 33 | public static final String FIELD_TENANT_ID = "tenant_id"; 34 | public static final String FIELD_CLUSTER_CODE = "cluster_code"; 35 | public static final String FIELD_JAR_CODE = "jar_code"; 36 | public static final String FIELD_SYSTEM_PROVIDED = "system_provided"; 37 | public static final String FIELD_FILENAME = "filename"; 38 | 39 | @TableId(type = IdType.AUTO) 40 | private Long uploadJarId; 41 | 42 | private String jarCode; 43 | private String clusterCode; 44 | 45 | private String jarDesc; 46 | private String version; 47 | private String entryClass; 48 | 49 | /** 50 | * 是否是系统提供的(平台预先上传jar做为平台功能使用) 51 | */ 52 | private Integer systemProvided; 53 | 54 | private String filename; 55 | private String jarName; 56 | private String status; 57 | 58 | private Long tenantId; 59 | @Version 60 | private Long objectVersionNumber; 61 | private LocalDateTime creationDate; 62 | private Long createdBy; 63 | private LocalDateTime lastUpdateDate; 64 | private Long lastUpdatedBy; 65 | 66 | } 67 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/domain/entity/jars/JarRunRequest.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.domain.entity.jars; 2 | 3 | import java.util.List; 4 | import javax.validation.constraints.NotBlank; 5 | 6 | import com.fasterxml.jackson.annotation.JsonInclude; 7 | import com.fasterxml.jackson.annotation.JsonProperty; 8 | import lombok.*; 9 | 10 | /** 11 | *

12 | * description 13 | *

14 | * 15 | * @author isacc 2020/03/27 14:05 16 | * @since 1.0 17 | */ 18 | @NoArgsConstructor 19 | @AllArgsConstructor 20 | @EqualsAndHashCode(callSuper = false) 21 | @Data 22 | @Builder 23 | @JsonInclude(JsonInclude.Include.NON_NULL) 24 | public class JarRunRequest { 25 | 26 | /** 27 | * entryClass : org.apache.flink.streaming.examples.wordcount.WordCount 28 | * parallelism : 1 29 | * programArg : 30 | * savepointPath : 31 | * allowNonRestoredState : true 32 | */ 33 | @NotBlank 34 | private String entryClass; 35 | private Integer parallelism; 36 | private List programArgsList; 37 | private String savepointPath; 38 | private Boolean allowNonRestoredState; 39 | @JsonProperty(access = JsonProperty.Access.WRITE_ONLY) 40 | private String jarId; 41 | } 42 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/domain/entity/jars/JarRunResponseBody.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.domain.entity.jars; 2 | 3 | import java.util.List; 4 | 5 | import lombok.Data; 6 | import lombok.NoArgsConstructor; 7 | 8 | /** 9 | *

10 | * description 11 | *

12 | * 13 | * @author isacc 2020/03/27 14:04 14 | * @since 1.0 15 | */ 16 | @NoArgsConstructor 17 | @Data 18 | public class JarRunResponseBody { 19 | 20 | 21 | /** 22 | * jobid : c1fdcb57789886ac0b2c52a67d0083cc 23 | */ 24 | 25 | private String jobid; 26 | private List errors; 27 | } 28 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/domain/entity/jars/JarUploadResponseBody.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.domain.entity.jars; 2 | 3 | import java.util.List; 4 | 5 | import lombok.Data; 6 | import lombok.NoArgsConstructor; 7 | 8 | /** 9 | *

10 | * description 11 | *

12 | * 13 | * @author isacc 2020/03/26 22:47 14 | * @since 1.0 15 | */ 16 | @NoArgsConstructor 17 | @Data 18 | public class JarUploadResponseBody { 19 | 20 | 21 | /** 22 | * filename : /data/flink/upload_jars/flink-web-upload/184ffd19-0280-4aa8-8a64-fd5bc91a36e0_WordCount.jar 23 | * status : success 24 | */ 25 | private String filename; 26 | private String status; 27 | private List errors; 28 | } 29 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/domain/entity/jobs/FlinkApiErrorResponse.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.domain.entity.jobs; 2 | 3 | import java.util.List; 4 | 5 | import lombok.Data; 6 | import lombok.NoArgsConstructor; 7 | 8 | /** 9 | *

10 | * 一般无返回值可用此类 11 | *

12 | * 13 | * @author isacc 2020/04/08 17:43 14 | * @since 1.0 15 | */ 16 | @NoArgsConstructor 17 | @Data 18 | public class FlinkApiErrorResponse { 19 | 20 | private List errors; 21 | } 22 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/domain/entity/jobs/JobExceptionsInfo.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.domain.entity.jobs; 2 | 3 | import java.util.List; 4 | 5 | import com.fasterxml.jackson.annotation.JsonProperty; 6 | import lombok.Data; 7 | import lombok.NoArgsConstructor; 8 | 9 | /** 10 | *

11 | * description 12 | *

13 | * 14 | * @author isacc 2020/04/09 16:39 15 | * @since 1.0 16 | */ 17 | @NoArgsConstructor 18 | @Data 19 | public class JobExceptionsInfo { 20 | 21 | public static final String FIELD_NAME_ROOT_EXCEPTION = "root-exception"; 22 | public static final String FIELD_NAME_TIMESTAMP = "timestamp"; 23 | public static final String FIELD_NAME_ALL_EXCEPTIONS = "all-exceptions"; 24 | public static final String FIELD_NAME_TRUNCATED = "truncated"; 25 | 26 | @JsonProperty(FIELD_NAME_ROOT_EXCEPTION) 27 | private String rootException; 28 | 29 | @JsonProperty(FIELD_NAME_TIMESTAMP) 30 | private Long rootTimestamp; 31 | 32 | @JsonProperty(FIELD_NAME_ALL_EXCEPTIONS) 33 | private List allExceptions; 34 | 35 | @JsonProperty(FIELD_NAME_TRUNCATED) 36 | private boolean truncated; 37 | 38 | @NoArgsConstructor 39 | @Data 40 | public static class ExecutionExceptionInfo { 41 | public static final String FIELD_NAME_EXCEPTION = "exception"; 42 | public static final String FIELD_NAME_TASK = "task"; 43 | public static final String FIELD_NAME_LOCATION = "location"; 44 | public static final String FIELD_NAME_TIMESTAMP = "timestamp"; 45 | 46 | @JsonProperty(FIELD_NAME_EXCEPTION) 47 | private String exception; 48 | 49 | @JsonProperty(FIELD_NAME_TASK) 50 | private String task; 51 | 52 | @JsonProperty(FIELD_NAME_LOCATION) 53 | private String location; 54 | 55 | @JsonProperty(FIELD_NAME_TIMESTAMP) 56 | private long timestamp; 57 | } 58 | 59 | } 60 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/domain/entity/jobs/JobIdsWithStatusOverview.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.domain.entity.jobs; 2 | 3 | import java.util.List; 4 | 5 | import lombok.Data; 6 | import lombok.NoArgsConstructor; 7 | 8 | /** 9 | *

10 | * description 11 | *

12 | * 13 | * @author isacc 2020/04/08 11:49 14 | * @since 1.0 15 | */ 16 | @NoArgsConstructor 17 | @Data 18 | public class JobIdsWithStatusOverview { 19 | 20 | 21 | private List jobs; 22 | private List errors; 23 | 24 | @NoArgsConstructor 25 | @Data 26 | public static class JobIdWithStatus { 27 | /** 28 | * id : 017575ebda37f5f38f24e7bd024b7f7a 29 | * status : RUNNING 30 | */ 31 | 32 | private String id; 33 | private String status; 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/domain/entity/jobs/MultipleJobsDetails.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.domain.entity.jobs; 2 | 3 | import java.util.List; 4 | 5 | import com.fasterxml.jackson.annotation.JsonAlias; 6 | import lombok.Data; 7 | import lombok.NoArgsConstructor; 8 | 9 | /** 10 | *

11 | * description 12 | *

13 | * 14 | * @author isacc 2020/04/08 15:17 15 | * @since 1.0 16 | */ 17 | @NoArgsConstructor 18 | @Data 19 | public class MultipleJobsDetails { 20 | 21 | 22 | private List jobs; 23 | private List errors; 24 | 25 | @NoArgsConstructor 26 | @Data 27 | public static class JobDetail { 28 | /** 29 | * jid : 017575ebda37f5f38f24e7bd024b7f7a 30 | * name : SQL Job 31 | * state : RUNNING 32 | * start-time : 1586315540096 33 | * end-time : -1 34 | * duration : 14590420 35 | * last-modification : 1586315543790 36 | * tasks : {"total":2,"created":0,"scheduled":0,"deploying":0,"running":2,"finished":0,"canceling":0,"canceled":0,"failed":0,"reconciling":0} 37 | */ 38 | 39 | private String jid; 40 | private String name; 41 | private String state; 42 | @JsonAlias("start-time") 43 | private long startTime; 44 | @JsonAlias("end-time") 45 | private int endTime; 46 | private int duration; 47 | @JsonAlias("last-modification") 48 | private long lastModification; 49 | private Tasks tasks; 50 | 51 | @NoArgsConstructor 52 | @Data 53 | public static class Tasks { 54 | /** 55 | * total : 2 56 | * created : 0 57 | * scheduled : 0 58 | * deploying : 0 59 | * running : 2 60 | * finished : 0 61 | * canceling : 0 62 | * canceled : 0 63 | * failed : 0 64 | * reconciling : 0 65 | */ 66 | 67 | private int total; 68 | private int created; 69 | private int scheduled; 70 | private int deploying; 71 | private int running; 72 | private int finished; 73 | private int canceling; 74 | private int canceled; 75 | private int failed; 76 | private int reconciling; 77 | } 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/domain/entity/jobs/SavepointInfo.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.domain.entity.jobs; 2 | 3 | import com.fasterxml.jackson.annotation.JsonInclude; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | /** 8 | *

9 | * SavepointInfo 10 | *

11 | * 12 | * @author isacc 2020/5/15 16:42 13 | * @since 1.0 14 | */ 15 | @NoArgsConstructor 16 | @Data 17 | @JsonInclude(JsonInclude.Include.NON_NULL) 18 | public class SavepointInfo { 19 | 20 | 21 | /** 22 | * status : {"id":"COMPLETED"} 23 | * operation : {"location":"file:/E:/myGitCode/flink-api-spring-boot-starter/savepoint1/savepoint-c3d2ad-aff370603065"} 24 | */ 25 | 26 | private StatusBean status; 27 | private OperationBean operation; 28 | 29 | @NoArgsConstructor 30 | @Data 31 | public static class StatusBean { 32 | /** 33 | * id : COMPLETED 34 | */ 35 | 36 | private String id; 37 | } 38 | 39 | @NoArgsConstructor 40 | @Data 41 | public static class OperationBean { 42 | /** 43 | * location : file:/E:/myGitCode/flink-api-spring-boot-starter/savepoint1/savepoint-c3d2ad-aff370603065 44 | */ 45 | 46 | private String location; 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/domain/entity/jobs/SavepointTriggerRequestBody.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.domain.entity.jobs; 2 | 3 | import com.fasterxml.jackson.annotation.JsonAlias; 4 | import com.fasterxml.jackson.annotation.JsonInclude; 5 | import com.fasterxml.jackson.annotation.JsonProperty; 6 | import lombok.*; 7 | 8 | /** 9 | *

10 | * description 11 | *

12 | * 13 | * @author isacc 2020/04/08 17:55 14 | * @since 1.0 15 | */ 16 | @NoArgsConstructor 17 | @AllArgsConstructor 18 | @EqualsAndHashCode(callSuper = false) 19 | @Data 20 | @Builder 21 | @JsonInclude(JsonInclude.Include.NON_NULL) 22 | public class SavepointTriggerRequestBody { 23 | 24 | @JsonAlias({"targetDirectory"}) 25 | @JsonProperty("target-directory") 26 | private String targetDirectory; 27 | @JsonAlias({"cancelJob"}) 28 | @JsonProperty("cancel-job") 29 | @Builder.Default 30 | private Boolean cancelJob = false; 31 | @JsonProperty(access = JsonProperty.Access.WRITE_ONLY) 32 | private String jobId; 33 | } 34 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/domain/entity/jobs/TriggerResponse.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.domain.entity.jobs; 2 | 3 | import java.util.List; 4 | 5 | import com.fasterxml.jackson.annotation.JsonAlias; 6 | import com.fasterxml.jackson.annotation.JsonInclude; 7 | import lombok.Data; 8 | import lombok.NoArgsConstructor; 9 | 10 | /** 11 | *

12 | * description 13 | *

14 | * 15 | * @author isacc 2020/04/08 18:20 16 | * @since 1.0 17 | */ 18 | @NoArgsConstructor 19 | @Data 20 | @JsonInclude(JsonInclude.Include.NON_NULL) 21 | public class TriggerResponse { 22 | 23 | private List errors; 24 | 25 | /** 26 | * request-id : 09378564653658d04a3a21766d6054ff 27 | */ 28 | @JsonAlias("request-id") 29 | private String requestId; 30 | } 31 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/domain/entity/jobs/TriggerResponseWithSavepoint.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.domain.entity.jobs; 2 | 3 | import com.fasterxml.jackson.annotation.JsonInclude; 4 | import lombok.*; 5 | 6 | /** 7 | *

8 | * TriggerResponseWithSavepoint 9 | *

10 | * 11 | * @author isacc 2020/5/15 16:41 12 | * @since 1.0 13 | */ 14 | @Data 15 | @Builder 16 | @NoArgsConstructor 17 | @AllArgsConstructor 18 | @EqualsAndHashCode(callSuper = false) 19 | @JsonInclude(JsonInclude.Include.NON_NULL) 20 | public class TriggerResponseWithSavepoint { 21 | 22 | private TriggerResponse triggerResponse; 23 | 24 | private SavepointInfo savepointInfo; 25 | 26 | } 27 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/domain/entity/overview/DashboardConfiguration.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.domain.entity.overview; 2 | 3 | import com.fasterxml.jackson.annotation.JsonAlias; 4 | import lombok.Data; 5 | import lombok.NoArgsConstructor; 6 | 7 | /** 8 | *

9 | * ClusterConfigInfo 10 | *

11 | * 12 | * @author isacc 2020/5/9 11:37 13 | * @since 1.0 14 | */ 15 | @NoArgsConstructor 16 | @Data 17 | public class DashboardConfiguration { 18 | 19 | 20 | /** 21 | * refresh-interval : 3000 22 | * timezone-name : China Time 23 | * timezone-offset : 28800000 24 | * flink-version : 1.10.0 25 | * flink-revision : aa4eb8f @ 07.02.2020 @ 19:18:19 CET 26 | * features : {"web-submit":true} 27 | */ 28 | @JsonAlias("refresh-interval") 29 | private int refreshInterval; 30 | @JsonAlias("timezone-name") 31 | private String timezoneName; 32 | @JsonAlias("timezone-offset") 33 | private int timezoneOffset; 34 | @JsonAlias("flink-version") 35 | private String flinkVersion; 36 | @JsonAlias("flink-revision") 37 | private String flinkRevision; 38 | private FeaturesBean features; 39 | 40 | @NoArgsConstructor 41 | @Data 42 | public static class FeaturesBean { 43 | /** 44 | * web-submit : true 45 | */ 46 | @JsonAlias("web-submit") 47 | private boolean webSubmit; 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/domain/entity/tm/TaskManagerDetail.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.domain.entity.tm; 2 | 3 | import java.util.List; 4 | 5 | import lombok.Data; 6 | import lombok.NoArgsConstructor; 7 | 8 | /** 9 | *

10 | * description 11 | *

12 | * 13 | * @author isacc 2020/04/10 14:56 14 | * @since 1.0 15 | */ 16 | @NoArgsConstructor 17 | @Data 18 | public class TaskManagerDetail { 19 | 20 | 21 | /** 22 | * id : 136083a20c8d0dae2182320fc972179a 23 | * path : akka.tcp://flink@192.168.12.246:40418/user/taskmanager_0 24 | * dataPort : 34598 25 | * timeSinceLastHeartbeat : 1586501033738 26 | * slotsNumber : 3 27 | * freeSlots : 2 28 | * hardware : {"cpuCores":4,"physicalMemory":16825131008,"freeMemory":1749024768,"managedMemory":1505922928} 29 | * metrics : {"heapUsed":542676184,"heapCommitted":1749024768,"heapMax":1749024768,"nonHeapUsed":108611256,"nonHeapCommitted":112218112,"nonHeapMax":444596224,"directCount":11512,"directUsed":379639749,"directMax":379639747,"mappedCount":0,"mappedUsed":0,"mappedMax":0,"memorySegmentsAvailable":11479,"memorySegmentsTotal":11489,"garbageCollectors":[{"name":"G1_Young_Generation","count":47,"time":8794},{"name":"G1_Old_Generation","count":0,"time":0}]} 30 | */ 31 | 32 | private String id; 33 | private String path; 34 | private int dataPort; 35 | private long timeSinceLastHeartbeat; 36 | private int slotsNumber; 37 | private int freeSlots; 38 | private HardwareBean hardware; 39 | private MetricsBean metrics; 40 | 41 | @NoArgsConstructor 42 | @Data 43 | public static class HardwareBean { 44 | /** 45 | * cpuCores : 4 46 | * physicalMemory : 16825131008 47 | * freeMemory : 1749024768 48 | * managedMemory : 1505922928 49 | */ 50 | 51 | private int cpuCores; 52 | private long physicalMemory; 53 | private int freeMemory; 54 | private int managedMemory; 55 | } 56 | 57 | @NoArgsConstructor 58 | @Data 59 | public static class MetricsBean { 60 | /** 61 | * heapUsed : 542676184 62 | * heapCommitted : 1749024768 63 | * heapMax : 1749024768 64 | * nonHeapUsed : 108611256 65 | * nonHeapCommitted : 112218112 66 | * nonHeapMax : 444596224 67 | * directCount : 11512 68 | * directUsed : 379639749 69 | * directMax : 379639747 70 | * mappedCount : 0 71 | * mappedUsed : 0 72 | * mappedMax : 0 73 | * memorySegmentsAvailable : 11479 74 | * memorySegmentsTotal : 11489 75 | * garbageCollectors : [{"name":"G1_Young_Generation","count":47,"time":8794},{"name":"G1_Old_Generation","count":0,"time":0}] 76 | */ 77 | 78 | private int heapUsed; 79 | private int heapCommitted; 80 | private int heapMax; 81 | private int nonHeapUsed; 82 | private int nonHeapCommitted; 83 | private int nonHeapMax; 84 | private int directCount; 85 | private int directUsed; 86 | private int directMax; 87 | private int mappedCount; 88 | private int mappedUsed; 89 | private int mappedMax; 90 | private int memorySegmentsAvailable; 91 | private int memorySegmentsTotal; 92 | private List garbageCollectors; 93 | 94 | @NoArgsConstructor 95 | @Data 96 | public static class GarbageCollectorsBean { 97 | /** 98 | * name : G1_Young_Generation 99 | * count : 47 100 | * time : 8794 101 | */ 102 | 103 | private String name; 104 | private int count; 105 | private int time; 106 | } 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/domain/entity/tm/TaskManagerInfo.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.domain.entity.tm; 2 | 3 | import java.util.List; 4 | 5 | import lombok.Data; 6 | import lombok.NoArgsConstructor; 7 | 8 | /** 9 | *

10 | * description 11 | *

12 | * 13 | * @author isacc 2020/04/09 17:41 14 | * @since 1.0 15 | */ 16 | @NoArgsConstructor 17 | @Data 18 | public class TaskManagerInfo { 19 | 20 | 21 | private List taskmanagers; 22 | 23 | @NoArgsConstructor 24 | @Data 25 | public static class TaskmanagersBean { 26 | /** 27 | * id : 136083a20c8d0dae2182320fc972179a 28 | * path : akka.tcp://flink@192.168.12.246:40418/user/taskmanager_0 29 | * dataPort : 34598 30 | * timeSinceLastHeartbeat : 1586424961277 31 | * slotsNumber : 3 32 | * freeSlots : 2 33 | * hardware : {"cpuCores":4,"physicalMemory":16825131008,"freeMemory":1749024768,"managedMemory":1505922928} 34 | */ 35 | 36 | private String id; 37 | private String path; 38 | private int dataPort; 39 | private long timeSinceLastHeartbeat; 40 | private int slotsNumber; 41 | private int freeSlots; 42 | private HardwareBean hardware; 43 | 44 | @NoArgsConstructor 45 | @Data 46 | public static class HardwareBean { 47 | /** 48 | * cpuCores : 4 49 | * physicalMemory : 16825131008 50 | * freeMemory : 1749024768 51 | * managedMemory : 1505922928 52 | */ 53 | 54 | private int cpuCores; 55 | private long physicalMemory; 56 | private int freeMemory; 57 | private int managedMemory; 58 | } 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/domain/repository/ClusterRepository.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.domain.repository; 2 | 3 | import com.baomidou.mybatisplus.core.metadata.IPage; 4 | import com.baomidou.mybatisplus.extension.plugins.pagination.Page; 5 | import com.github.codingdebugallday.client.api.dto.ClusterDTO; 6 | import com.github.codingdebugallday.client.domain.entity.Cluster; 7 | 8 | /** 9 | *

10 | * description 11 | *

12 | * 13 | * @author isacc 2020/03/28 2:20 14 | * @since 1.0 15 | */ 16 | public interface ClusterRepository { 17 | 18 | /** 19 | * 分页条件查询flink集群 20 | * 21 | * @param clusterDTO ClusterDTO 22 | * @param clusterPage Page 23 | * @return com.baomidou.mybatisplus.core.metadata.IPage 24 | */ 25 | IPage pageAndSortDTO(ClusterDTO clusterDTO, Page clusterPage); 26 | 27 | /** 28 | * 条件删除flink集群 29 | * 30 | * @param clusterDTO ClusterDTO 31 | */ 32 | void delete(ClusterDTO clusterDTO); 33 | 34 | /** 35 | * 查看flink集群详情 36 | * 37 | * @param tenantId 租户id 38 | * @param clusterId flink clusterId 39 | * @return org.abigballofmud.flink.client.api.dto.ClusterDTO 40 | */ 41 | ClusterDTO detail(Long tenantId, Long clusterId); 42 | 43 | /** 44 | * 根据clusterCode获取唯一条 45 | * 46 | * @param clusterCode clusterCode 47 | * @param tenantId 租户id 48 | * @return org.abigballofmud.flink.client.api.dto.ClusterDTO 49 | */ 50 | ClusterDTO selectOne(String clusterCode, Long tenantId); 51 | } 52 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/domain/repository/NodeRepository.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.domain.repository; 2 | 3 | import java.util.List; 4 | 5 | import com.github.codingdebugallday.client.api.dto.NodeDTO; 6 | 7 | /** 8 | *

9 | * description 10 | *

11 | * 12 | * @author isacc 2020/03/28 2:20 13 | * @since 1.0 14 | */ 15 | public interface NodeRepository { 16 | 17 | /** 18 | * 根据clusterCode获取flink节点集合 19 | * 20 | * @param clusterCode clusterCode 21 | * @param tenantId 租户id 22 | * @return org.abigballofmud.flink.client.api.dto.ClusterDTO 23 | */ 24 | List selectByClusterCode(String clusterCode, Long tenantId); 25 | } 26 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/domain/repository/UploadJarRepository.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.domain.repository; 2 | 3 | import com.baomidou.mybatisplus.core.metadata.IPage; 4 | import com.baomidou.mybatisplus.extension.plugins.pagination.Page; 5 | import com.github.codingdebugallday.client.api.dto.UploadJarDTO; 6 | import com.github.codingdebugallday.client.domain.entity.UploadJar; 7 | 8 | /** 9 | *

10 | * description 11 | *

12 | * 13 | * @author isacc 2020/04/07 10:45 14 | * @since 1.0 15 | */ 16 | public interface UploadJarRepository { 17 | 18 | /** 19 | * 分页条件查询flink上传的jar 20 | * 21 | * @param uploadJarDTO UploadJarDTO 22 | * @param uploadJarPage Page 23 | * @return com.baomidou.mybatisplus.core.metadata.IPage 24 | */ 25 | IPage pageAndSortDTO(UploadJarDTO uploadJarDTO, Page uploadJarPage); 26 | 27 | /** 28 | * 详细查询上传jar的信息 29 | * 30 | * @param tenantId 租户id 31 | * @param uploadJarId id 32 | * @return com.github.codingdebugallday.client.api.dto.UploadJarDTO 33 | */ 34 | UploadJarDTO detail(Long tenantId, Long uploadJarId); 35 | 36 | /** 37 | * 查找该集群下最新版本的jar 38 | * 39 | * @param jarCode jarCode 40 | * @param clusterCode clusterCode 41 | * @param tenantId 租户id 42 | * @return com.github.codingdebugallday.client.api.dto.UploadJarDTO 43 | */ 44 | UploadJarDTO findMaxVersionJarByCode(String jarCode, String clusterCode, Long tenantId); 45 | } 46 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/autoconfigure/FlinkApiAutoConfiguration.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.autoconfigure; 2 | 3 | import com.github.codingdebugallday.client.domain.repository.ClusterRepository; 4 | import com.github.codingdebugallday.client.domain.repository.NodeRepository; 5 | import com.github.codingdebugallday.client.infra.context.FlinkApiContext; 6 | import com.github.codingdebugallday.client.infra.exceptions.RestTemplateErrorHandler; 7 | import org.mybatis.spring.annotation.MapperScan; 8 | import org.springframework.beans.factory.annotation.Qualifier; 9 | import org.springframework.context.annotation.Bean; 10 | import org.springframework.context.annotation.ComponentScan; 11 | import org.springframework.context.annotation.Configuration; 12 | import org.springframework.http.client.ClientHttpRequestFactory; 13 | import org.springframework.http.client.HttpComponentsClientHttpRequestFactory; 14 | import org.springframework.web.client.RestTemplate; 15 | 16 | /** 17 | *

18 | * description 19 | *

20 | * 21 | * @author isacc 2020/03/26 21:28 22 | * @since 1.0 23 | */ 24 | @Configuration 25 | @ComponentScan("com.github.codingdebugallday.client") 26 | @MapperScan({ 27 | "com.github.codingdebugallday.client.**.mapper" 28 | }) 29 | public class FlinkApiAutoConfiguration { 30 | 31 | @Bean 32 | public FlinkApiContext flinkApiContext(@Qualifier("flinkRestTemplate") RestTemplate restTemplate, 33 | ClusterRepository clusterRepository, 34 | NodeRepository nodeRepository) { 35 | return new FlinkApiContext(restTemplate, clusterRepository, nodeRepository); 36 | } 37 | 38 | @Bean("flinkRestTemplate") 39 | public RestTemplate restTemplate(ClientHttpRequestFactory clientHttpRequestFactory) { 40 | RestTemplate restTemplate = new RestTemplate(clientHttpRequestFactory); 41 | restTemplate.setErrorHandler(new RestTemplateErrorHandler()); 42 | return restTemplate; 43 | } 44 | 45 | @Bean 46 | public ClientHttpRequestFactory clientHttpRequestFactory() { 47 | HttpComponentsClientHttpRequestFactory factory = new HttpComponentsClientHttpRequestFactory(); 48 | factory.setReadTimeout(5000); 49 | factory.setConnectTimeout(15000); 50 | return factory; 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/autoconfigure/GlobalExceptionHandlerAutoConfiguration.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.autoconfigure; 2 | 3 | import com.github.codingdebugallday.client.infra.exceptions.GlobalExceptionHandler; 4 | import org.springframework.context.annotation.Bean; 5 | import org.springframework.context.annotation.Configuration; 6 | 7 | /** 8 | *

9 | * 全局异常处理配置 10 | *

11 | * 12 | * @author abigballofmud 2019/11/21 14:51 13 | * @since 1.0 14 | */ 15 | @Configuration 16 | public class GlobalExceptionHandlerAutoConfiguration { 17 | 18 | @Bean("flinkGlobalExceptionHandler") 19 | public GlobalExceptionHandler globalExceptionHandler() { 20 | return new GlobalExceptionHandler(); 21 | } 22 | 23 | } 24 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/autoconfigure/MybatisPlusConfig.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.autoconfigure; 2 | 3 | import com.baomidou.mybatisplus.extension.plugins.OptimisticLockerInterceptor; 4 | import com.baomidou.mybatisplus.extension.plugins.PaginationInterceptor; 5 | import com.baomidou.mybatisplus.extension.plugins.pagination.optimize.JsqlParserCountOptimize; 6 | import org.springframework.context.annotation.Bean; 7 | import org.springframework.context.annotation.Configuration; 8 | import org.springframework.transaction.annotation.EnableTransactionManagement; 9 | 10 | /** 11 | *

12 | * description 13 | *

14 | * 15 | * @author isacc 2020/03/28 1:59 16 | * @since 1.0 17 | */ 18 | @EnableTransactionManagement 19 | @Configuration 20 | public class MybatisPlusConfig { 21 | 22 | @Bean 23 | public PaginationInterceptor paginationInterceptor() { 24 | PaginationInterceptor paginationInterceptor = new PaginationInterceptor(); 25 | // 设置请求的页面大于最大页后操作, true调回到首页,false 继续请求 默认false 26 | // paginationInterceptor.setOverflow(false); 27 | // 设置最大单页限制数量,默认 500 条,-1 不受限制 28 | // paginationInterceptor.setLimit(500); 29 | // 开启 count 的 join 优化,只针对部分 left join 30 | paginationInterceptor.setCountSqlParser(new JsqlParserCountOptimize(true)); 31 | return paginationInterceptor; 32 | } 33 | 34 | @Bean 35 | public OptimisticLockerInterceptor optimisticLockerInterceptor() { 36 | return new OptimisticLockerInterceptor(); 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/constants/FlinkApiConstant.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.constants; 2 | 3 | /** 4 | *

5 | * description 6 | *

7 | * 8 | * @author isacc 2020/03/27 1:11 9 | * @since 1.0 10 | */ 11 | public final class FlinkApiConstant { 12 | 13 | private FlinkApiConstant() { 14 | } 15 | 16 | public static final class Jars { 17 | private Jars() { 18 | } 19 | 20 | /** 21 | * flink jar upload url 22 | */ 23 | public static final String UPLOAD_JAR = "/v1/jars/upload"; 24 | /** 25 | * flink jar delete url 26 | */ 27 | public static final String DELETE_JAR = "/v1/jars/{1}"; 28 | /** 29 | * running a jar previously uploaded via '/jars/upload' 30 | */ 31 | public static final String RUN_JAR = "/v1/jars/{1}/run"; 32 | } 33 | 34 | public static final class Overview { 35 | private Overview() { 36 | } 37 | /** 38 | * flink overview url 39 | */ 40 | public static final String CONFIG = "/v1/config"; 41 | public static final String VIEW = "/v1/overview"; 42 | } 43 | 44 | public static final class Jobs { 45 | private Jobs() { 46 | } 47 | /** 48 | * flink job url 49 | */ 50 | public static final String JOB_LIST = "/v1/jobs"; 51 | public static final String JOB_OVERVIEW = "/v1/jobs/overview"; 52 | public static final String JOB_DETAIL = "/v1/jobs/{1}"; 53 | public static final String JOB_YARN_CANCEL = "/v1/jobs/{1}/yarn-cancel"; 54 | public static final String JOB_CANCEL_WITH_SAVEPOINTS = "/v1/jobs/{1}/savepoints"; 55 | public static final String JOB_SAVEPOINT_STATUS = "/v1/jobs/{1}/savepoints/{2}"; 56 | public static final String JOB_TERMINATE = "/v1/jobs/{1}?mode={2}"; 57 | public static final String JOB_RESCALING = "/v1/jobs/{1}/rescaling?parallelism={2}"; 58 | public static final String JOB_EXCEPTIONS = "/v1/jobs/{1}/exceptions"; 59 | } 60 | 61 | public static final class TaskManager{ 62 | private TaskManager() { 63 | } 64 | /** 65 | * flink taskmanager url 66 | */ 67 | public static final String TM_LIST = "/v1/taskmanagers"; 68 | public static final String TM_DETAIL = "/v1/taskmanagers/{1}"; 69 | public static final String TM_LOG = "/v1/taskmanagers/{1}/log"; 70 | public static final String TM_STDOUT= "/v1/taskmanagers/{1}/stdout"; 71 | 72 | } 73 | 74 | public static final class JobManager{ 75 | private JobManager() { 76 | } 77 | /** 78 | * flink jobmanager url 79 | */ 80 | public static final String JM_CONFIG = "/v1/jobmanager/config"; 81 | public static final String JM_LOG = "/jobmanager/log"; 82 | public static final String JM_STDOUT = "/jobmanager/stdout"; 83 | 84 | } 85 | 86 | } 87 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/context/FlinkApiContext.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.context; 2 | 3 | import java.util.Map; 4 | import java.util.Objects; 5 | import java.util.Optional; 6 | import java.util.concurrent.ConcurrentHashMap; 7 | 8 | import com.github.codingdebugallday.client.api.dto.ClusterDTO; 9 | import com.github.codingdebugallday.client.app.service.FlinkApi; 10 | import com.github.codingdebugallday.client.domain.repository.ClusterRepository; 11 | import com.github.codingdebugallday.client.domain.repository.NodeRepository; 12 | import com.github.codingdebugallday.client.infra.exceptions.FlinkApiCommonException; 13 | import org.springframework.http.HttpStatus; 14 | import org.springframework.web.client.RestTemplate; 15 | 16 | /** 17 | *

18 | * description 19 | *

20 | * 21 | * @author isacc 2020/03/28 0:37 22 | * @since 1.0 23 | */ 24 | public class FlinkApiContext { 25 | 26 | private final Map flinkApiMap = new ConcurrentHashMap<>(16); 27 | 28 | private final RestTemplate flinkRestTemplate; 29 | 30 | private final ClusterRepository clusterRepository; 31 | private final NodeRepository nodeRepository; 32 | 33 | public FlinkApiContext(RestTemplate flinkRestTemplate, 34 | ClusterRepository clusterRepository, 35 | NodeRepository nodeRepository) { 36 | this.flinkRestTemplate = flinkRestTemplate; 37 | this.clusterRepository = clusterRepository; 38 | this.nodeRepository = nodeRepository; 39 | } 40 | 41 | public FlinkApi get(String clusterCode, Long tenantId) { 42 | if (Objects.isNull(flinkApiMap.get(clusterCode))) { 43 | FlinkApi flinkApi = new FlinkApi(flinkRestTemplate); 44 | ClusterDTO clusterDTO = 45 | Optional.ofNullable(clusterRepository.selectOne(clusterCode, tenantId)) 46 | .orElseThrow(() -> new FlinkApiCommonException(HttpStatus.BAD_REQUEST.value(), 47 | "no flink cluster of code[" + clusterCode + "], please check your clusterCode!")); 48 | clusterDTO.setNodeDTOList(nodeRepository.selectByClusterCode(clusterCode, tenantId)); 49 | flinkApi.getApiClient().setClusterDTO(clusterDTO); 50 | flinkApiMap.put(clusterCode, flinkApi); 51 | return flinkApi; 52 | } 53 | return flinkApiMap.get(clusterCode); 54 | } 55 | 56 | /** 57 | * 在删除了flink cluster后,需要remove掉 58 | * 59 | * @param clusterCode clusterCode 60 | */ 61 | public void remove(String clusterCode) { 62 | flinkApiMap.remove(clusterCode); 63 | } 64 | 65 | 66 | } 67 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/converter/ClusterConvertMapper.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.converter; 2 | 3 | import com.github.codingdebugallday.client.api.dto.ClusterDTO; 4 | import com.github.codingdebugallday.client.domain.entity.Cluster; 5 | import org.mapstruct.Mapper; 6 | import org.mapstruct.Mapping; 7 | import org.mapstruct.factory.Mappers; 8 | 9 | /** 10 | *

11 | * description 12 | *

13 | * 14 | * @author isacc 2020/03/31 12:12 15 | * @since 1.0 16 | */ 17 | @Mapper 18 | public interface ClusterConvertMapper { 19 | 20 | ClusterConvertMapper INSTANCE = Mappers.getMapper(ClusterConvertMapper.class); 21 | 22 | /** 23 | * entityToDTO 24 | * 25 | * @param cluster Cluster 26 | * @return org.abigballofmud.flink.client.api.dto.ClusterDTO 27 | */ 28 | @Mapping(target = "jobManagerStandbyUrlSet", 29 | expression = "java(com.github.codingdebugallday.client.infra.converter.ClusterConvertUtil.standbyUrlToSet(cluster.getJobManagerStandbyUrl()))") 30 | @Mapping(target = "host", 31 | expression = "java(com.github.codingdebugallday.client.infra.converter.ClusterConvertUtil.getJmHost(cluster.getJobManagerUrl()))") 32 | @Mapping(target = "port", 33 | expression = "java(com.github.codingdebugallday.client.infra.converter.ClusterConvertUtil.getJmPort(cluster.getJobManagerUrl()))") 34 | ClusterDTO entityToDTO(Cluster cluster); 35 | 36 | /** 37 | * dtoToEntity 38 | * 39 | * @param clusterDTO ClusterDTO 40 | * @return org.abigballofmud.flink.client.domain.entity.Cluster 41 | */ 42 | Cluster dtoToEntity(ClusterDTO clusterDTO); 43 | 44 | } 45 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/converter/ClusterConvertUtil.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.converter; 2 | 3 | import java.util.Set; 4 | import java.util.regex.Matcher; 5 | import java.util.regex.Pattern; 6 | import java.util.stream.Collectors; 7 | import java.util.stream.Stream; 8 | 9 | /** 10 | *

11 | * description 12 | *

13 | * 14 | * @author isacc 2020/04/01 10:27 15 | * @since 1.0 16 | */ 17 | public class ClusterConvertUtil { 18 | 19 | private static final Pattern JM_URL_REGEX = Pattern.compile("http://(.*?):(\\d+)"); 20 | 21 | private ClusterConvertUtil() { 22 | throw new IllegalStateException("util class"); 23 | } 24 | 25 | public static Set standbyUrlToSet(String jmStandbyUrl) { 26 | return Stream.of(jmStandbyUrl.split(";")).collect(Collectors.toSet()); 27 | } 28 | 29 | public static String getJmHost(String jmUrl){ 30 | Matcher matcher = JM_URL_REGEX.matcher(jmUrl); 31 | if(matcher.find()){ 32 | return matcher.group(1); 33 | } 34 | throw new IllegalStateException("flink job manager url not match, http://{ip}:{port}"); 35 | } 36 | 37 | public static Integer getJmPort(String jmUrl){ 38 | Matcher matcher = JM_URL_REGEX.matcher(jmUrl); 39 | if(matcher.find()){ 40 | return Integer.parseInt(matcher.group(2)); 41 | } 42 | throw new IllegalStateException("flink job manager url not match, http://{ip}:{port}"); 43 | } 44 | 45 | } 46 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/converter/NodeConvertMapper.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.converter; 2 | 3 | import com.github.codingdebugallday.client.api.dto.NodeDTO; 4 | import com.github.codingdebugallday.client.domain.entity.Node; 5 | import org.mapstruct.Mapper; 6 | import org.mapstruct.factory.Mappers; 7 | 8 | /** 9 | *

10 | * description 11 | *

12 | * 13 | * @author isacc 2020/03/31 12:12 14 | * @since 1.0 15 | */ 16 | @Mapper 17 | public interface NodeConvertMapper { 18 | 19 | NodeConvertMapper INSTANCE = Mappers.getMapper(NodeConvertMapper.class); 20 | 21 | /** 22 | * entityToDTO 23 | * 24 | * @param node Node 25 | * @return org.abigballofmud.flink.client.api.dto.NodeDTO 26 | */ 27 | NodeDTO entityToDTO(Node node); 28 | 29 | /** 30 | * dtoToEntity 31 | * 32 | * @param nodeDTO NodeDTO 33 | * @return org.abigballofmud.flink.client.domain.entity.Node 34 | */ 35 | Node dtoToEntity(NodeDTO nodeDTO); 36 | 37 | } 38 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/converter/UploadJarConvertMapper.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.converter; 2 | 3 | import com.github.codingdebugallday.client.api.dto.UploadJarDTO; 4 | import com.github.codingdebugallday.client.domain.entity.UploadJar; 5 | import org.mapstruct.Mapper; 6 | import org.mapstruct.factory.Mappers; 7 | 8 | /** 9 | *

10 | * description 11 | *

12 | * 13 | * @author isacc 2020/03/31 12:12 14 | * @since 1.0 15 | */ 16 | @Mapper 17 | public interface UploadJarConvertMapper { 18 | 19 | UploadJarConvertMapper INSTANCE = Mappers.getMapper(UploadJarConvertMapper.class); 20 | 21 | /** 22 | * entityToDTO 23 | * 24 | * @param uploadJar UploadJar 25 | * @return org.abigballofmud.flink.client.api.dto.ClusterDTO 26 | */ 27 | UploadJarDTO entityToDTO(UploadJar uploadJar); 28 | 29 | /** 30 | * dtoToEntity 31 | * 32 | * @param uploadJarDTO UploadJarDTO 33 | * @return org.abigballofmud.flink.client.domain.entity.Cluster 34 | */ 35 | UploadJar dtoToEntity(UploadJarDTO uploadJarDTO); 36 | 37 | } 38 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/enums/NodeTypeEnum.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.enums; 2 | 3 | /** 4 | *

5 | * description 6 | *

7 | * 8 | * @author isacc 2020/04/03 17:20 9 | * @since 1.0 10 | */ 11 | public enum NodeTypeEnum { 12 | 13 | /** 14 | * master 15 | */ 16 | MASTER, 17 | /** 18 | * slave 19 | */ 20 | SLAVE; 21 | } 22 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/exceptions/FlinkApiCommonException.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.exceptions; 2 | 3 | /** 4 | *

5 | * description 6 | *

7 | * 8 | * @author isacc 2020/03/25 21:10 9 | * @since 1.0 10 | */ 11 | public class FlinkApiCommonException extends RuntimeException { 12 | private static final long serialVersionUID = -7003755589740016882L; 13 | 14 | private final Integer code; 15 | 16 | public FlinkApiCommonException(Integer code, String msg) { 17 | super(msg); 18 | this.code = code; 19 | } 20 | 21 | public FlinkApiCommonException(Integer code, String msg, Throwable throwable) { 22 | super(msg, throwable); 23 | this.code = code; 24 | } 25 | 26 | public Integer getCode() { 27 | return code; 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/exceptions/FlinkCommonException.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.exceptions; 2 | 3 | /** 4 | *

5 | * description 6 | *

7 | * 8 | * @author isacc 2020/03/27 16:31 9 | * @since 1.0 10 | */ 11 | public class FlinkCommonException extends RuntimeException { 12 | 13 | private static final long serialVersionUID = -6167077238093684100L; 14 | 15 | public FlinkCommonException(String msg) { 16 | super(msg); 17 | } 18 | 19 | public FlinkCommonException(String msg, Throwable throwable) { 20 | super(msg, throwable); 21 | } 22 | 23 | public FlinkCommonException( Throwable throwable) { 24 | super(throwable); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/exceptions/GlobalExceptionHandler.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.exceptions; 2 | 3 | import com.github.codingdebugallday.client.domain.entity.ApiResult; 4 | import org.slf4j.Logger; 5 | import org.slf4j.LoggerFactory; 6 | import org.springframework.core.annotation.Order; 7 | import org.springframework.http.HttpStatus; 8 | import org.springframework.web.bind.annotation.ExceptionHandler; 9 | import org.springframework.web.bind.annotation.RestControllerAdvice; 10 | 11 | /** 12 | *

13 | * description 14 | *

15 | * 16 | * @author abigballofmud 2019/11/21 14:49 17 | * @since 1.0 18 | */ 19 | @Order(100) 20 | @RestControllerAdvice 21 | public class GlobalExceptionHandler { 22 | 23 | private static final Logger LOG = LoggerFactory.getLogger(GlobalExceptionHandler.class); 24 | 25 | @ExceptionHandler(FlinkApiCommonException.class) 26 | public ApiResult handleFlinkApiCommonException(FlinkApiCommonException e) { 27 | LOG.warn("Handle FlinkApiCommonException", e); 28 | Integer code = e.getCode(); 29 | String msg = e.getMessage(); 30 | return ApiResult.fail(code, msg); 31 | } 32 | 33 | @ExceptionHandler(FlinkCommonException.class) 34 | public ApiResult handleFlinkCommonException(FlinkCommonException e) { 35 | LOG.warn("Handle FlinkCommonException", e); 36 | String msg = e.getMessage(); 37 | return ApiResult.fail(HttpStatus.INTERNAL_SERVER_ERROR.value(), msg); 38 | } 39 | 40 | @ExceptionHandler(Exception.class) 41 | public ApiResult handleException(Exception e) { 42 | LOG.warn("Handle Exception", e); 43 | String msg = e.getMessage(); 44 | return ApiResult.fail(-1, msg); 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/exceptions/RestTemplateErrorHandler.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.exceptions; 2 | 3 | import java.io.IOException; 4 | 5 | import org.springframework.http.client.ClientHttpResponse; 6 | import org.springframework.web.client.ResponseErrorHandler; 7 | 8 | /** 9 | *

10 | * description 11 | *

12 | * 13 | * @author isacc 2020/3/26 21:40 14 | * @since 1.0 15 | */ 16 | public class RestTemplateErrorHandler implements ResponseErrorHandler { 17 | 18 | @Override 19 | public boolean hasError(ClientHttpResponse response) throws IOException { 20 | return true; 21 | } 22 | 23 | @Override 24 | public void handleError(ClientHttpResponse response) throws IOException { 25 | // ignore do nothing 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/exceptions/package-info.java: -------------------------------------------------------------------------------- 1 | @NonNullApi 2 | package com.github.codingdebugallday.client.infra.exceptions; 3 | 4 | import org.springframework.lang.NonNullApi; -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/handlers/FutureTaskWorker.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.handlers; 2 | 3 | import java.util.List; 4 | import java.util.concurrent.CompletableFuture; 5 | import java.util.function.Function; 6 | 7 | import lombok.AllArgsConstructor; 8 | import lombok.Data; 9 | 10 | /** 11 | *

12 | * description 13 | *

14 | * 15 | * @author isacc 2020/04/02 21:54 16 | * @since 1.0 17 | */ 18 | @Data 19 | @AllArgsConstructor 20 | public class FutureTaskWorker { 21 | 22 | /** 23 | * 需要异步执行的任务 24 | */ 25 | private List taskList; 26 | 27 | /** 28 | * 需要执行的方法 29 | */ 30 | private Function> workFunction; 31 | 32 | public CompletableFuture getAllCompletableFuture(){ 33 | return CompletableFuture.allOf(taskList.stream().map(workFunction).toArray(CompletableFuture[]::new)); 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/mapper/ClusterMapper.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.mapper; 2 | 3 | import com.baomidou.mybatisplus.core.mapper.BaseMapper; 4 | import com.github.codingdebugallday.client.domain.entity.Cluster; 5 | 6 | /** 7 | *

8 | * description 9 | *

10 | * 11 | * @author isacc 2020/03/28 2:29 12 | * @since 1.0 13 | */ 14 | public interface ClusterMapper extends BaseMapper { 15 | } 16 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/mapper/NodeMapper.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.mapper; 2 | 3 | import com.baomidou.mybatisplus.core.mapper.BaseMapper; 4 | import com.github.codingdebugallday.client.domain.entity.Node; 5 | 6 | /** 7 | *

8 | * description 9 | *

10 | * 11 | * @author isacc 2020/03/28 2:29 12 | * @since 1.0 13 | */ 14 | public interface NodeMapper extends BaseMapper { 15 | } 16 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/mapper/UploadJarMapper.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.mapper; 2 | 3 | import com.baomidou.mybatisplus.core.mapper.BaseMapper; 4 | import com.github.codingdebugallday.client.domain.entity.UploadJar; 5 | 6 | /** 7 | *

8 | * description 9 | *

10 | * 11 | * @author isacc 2020/03/28 2:29 12 | * @since 1.0 13 | */ 14 | public interface UploadJarMapper extends BaseMapper { 15 | } 16 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/repository/impl/ClusterRepositoryImpl.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.repository.impl; 2 | 3 | import java.util.Optional; 4 | import java.util.stream.Collectors; 5 | 6 | import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; 7 | import com.baomidou.mybatisplus.core.metadata.IPage; 8 | import com.baomidou.mybatisplus.extension.plugins.pagination.Page; 9 | import com.github.codingdebugallday.client.api.dto.ClusterDTO; 10 | import com.github.codingdebugallday.client.domain.entity.Cluster; 11 | import com.github.codingdebugallday.client.domain.repository.ClusterRepository; 12 | import com.github.codingdebugallday.client.infra.converter.ClusterConvertMapper; 13 | import com.github.codingdebugallday.client.infra.mapper.ClusterMapper; 14 | import org.springframework.beans.BeanUtils; 15 | import org.springframework.stereotype.Component; 16 | 17 | /** 18 | *

19 | * description 20 | *

21 | * 22 | * @author isacc 2020/03/28 2:24 23 | * @since 1.0 24 | */ 25 | @Component 26 | public class ClusterRepositoryImpl implements ClusterRepository { 27 | 28 | private final ClusterMapper clusterMapper; 29 | 30 | public ClusterRepositoryImpl(ClusterMapper clusterMapper) { 31 | this.clusterMapper = clusterMapper; 32 | } 33 | 34 | @Override 35 | public IPage pageAndSortDTO(ClusterDTO clusterDTO, Page clusterPage) { 36 | final QueryWrapper queryWrapper = this.commonQueryWrapper(clusterDTO); 37 | Page entityPage = clusterMapper.selectPage(clusterPage, queryWrapper); 38 | final Page dtoPage = new Page<>(); 39 | BeanUtils.copyProperties(entityPage, dtoPage); 40 | dtoPage.setRecords(entityPage.getRecords().stream() 41 | .map(ClusterConvertMapper.INSTANCE::entityToDTO) 42 | .collect(Collectors.toList())); 43 | return dtoPage; 44 | } 45 | 46 | @Override 47 | public void delete(ClusterDTO clusterDTO) { 48 | clusterMapper.delete(commonQueryWrapper(clusterDTO)); 49 | } 50 | 51 | @Override 52 | public ClusterDTO detail(Long tenantId, Long clusterId) { 53 | Cluster cluster = clusterMapper.selectOne(detailWrapper(tenantId, clusterId)); 54 | return ClusterConvertMapper.INSTANCE.entityToDTO(cluster); 55 | } 56 | 57 | @Override 58 | public ClusterDTO selectOne(String clusterCode,Long tenantId) { 59 | QueryWrapper queryWrapper = new QueryWrapper<>(); 60 | queryWrapper.eq(Cluster.FIELD_CLUSTER_CODE, clusterCode); 61 | queryWrapper.eq(Cluster.FIELD_TENANT_ID, tenantId); 62 | Cluster cluster = clusterMapper.selectOne(queryWrapper); 63 | return ClusterConvertMapper.INSTANCE.entityToDTO(cluster); 64 | } 65 | 66 | private QueryWrapper detailWrapper(Long tenantId, Long clusterId) { 67 | final QueryWrapper queryWrapper = new QueryWrapper<>(); 68 | queryWrapper.eq(Cluster.FIELD_TENANT_ID, tenantId); 69 | queryWrapper.eq(Cluster.FIELD_CLUSTER_ID, clusterId); 70 | return queryWrapper; 71 | } 72 | 73 | private QueryWrapper commonQueryWrapper(ClusterDTO clusterDTO) { 74 | final QueryWrapper queryWrapper = new QueryWrapper<>(); 75 | Optional.ofNullable(clusterDTO.getClusterCode()) 76 | .ifPresent(s -> queryWrapper.or().like(Cluster.FIELD_CLUSTER_CODE, s)); 77 | Optional.ofNullable(clusterDTO.getJobManagerUrl()) 78 | .ifPresent(s -> queryWrapper.or().like(Cluster.FIELD_JOB_MANAGER_URL, s)); 79 | queryWrapper.eq(Cluster.FIELD_TENANT_ID, clusterDTO.getTenantId()); 80 | return queryWrapper; 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/repository/impl/NodeRepositoryImpl.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.repository.impl; 2 | 3 | import java.util.List; 4 | import java.util.stream.Collectors; 5 | 6 | import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; 7 | import com.github.codingdebugallday.client.api.dto.NodeDTO; 8 | import com.github.codingdebugallday.client.domain.entity.Node; 9 | import com.github.codingdebugallday.client.domain.repository.NodeRepository; 10 | import com.github.codingdebugallday.client.infra.converter.NodeConvertMapper; 11 | import com.github.codingdebugallday.client.infra.mapper.NodeMapper; 12 | import org.springframework.stereotype.Component; 13 | 14 | /** 15 | *

16 | * description 17 | *

18 | * 19 | * @author isacc 2020/03/28 2:24 20 | * @since 1.0 21 | */ 22 | @Component 23 | public class NodeRepositoryImpl implements NodeRepository { 24 | 25 | private final NodeMapper nodeMapper; 26 | 27 | public NodeRepositoryImpl(NodeMapper nodeMapper) { 28 | this.nodeMapper = nodeMapper; 29 | } 30 | 31 | @Override 32 | public List selectByClusterCode(String clusterCode, Long tenantId) { 33 | QueryWrapper queryWrapper = new QueryWrapper<>(); 34 | queryWrapper.eq(Node.FIELD_CLUSTER_CODE, clusterCode); 35 | queryWrapper.eq(Node.FIELD_TENANT_ID, tenantId); 36 | return nodeMapper.selectList(queryWrapper).stream() 37 | .map(NodeConvertMapper.INSTANCE::entityToDTO) 38 | .collect(Collectors.toList()); 39 | } 40 | 41 | } 42 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/repository/impl/UploadJarRepositoryImpl.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.repository.impl; 2 | 3 | import java.util.Optional; 4 | import java.util.stream.Collectors; 5 | 6 | import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; 7 | import com.baomidou.mybatisplus.core.metadata.IPage; 8 | import com.baomidou.mybatisplus.extension.plugins.pagination.Page; 9 | import com.github.codingdebugallday.client.api.dto.UploadJarDTO; 10 | import com.github.codingdebugallday.client.domain.entity.UploadJar; 11 | import com.github.codingdebugallday.client.domain.repository.UploadJarRepository; 12 | import com.github.codingdebugallday.client.infra.converter.UploadJarConvertMapper; 13 | import com.github.codingdebugallday.client.infra.exceptions.FlinkCommonException; 14 | import com.github.codingdebugallday.client.infra.mapper.UploadJarMapper; 15 | import org.springframework.beans.BeanUtils; 16 | import org.springframework.stereotype.Component; 17 | 18 | /** 19 | *

20 | * description 21 | *

22 | * 23 | * @author isacc 2020/04/07 10:45 24 | * @since 1.0 25 | */ 26 | @Component 27 | public class UploadJarRepositoryImpl implements UploadJarRepository { 28 | 29 | private final UploadJarMapper uploadJarMapper; 30 | 31 | public UploadJarRepositoryImpl(UploadJarMapper uploadJarMapper) { 32 | this.uploadJarMapper = uploadJarMapper; 33 | } 34 | 35 | @Override 36 | public IPage pageAndSortDTO(UploadJarDTO uploadJarDTO, Page uploadJarPage) { 37 | final QueryWrapper queryWrapper = this.commonQueryWrapper(uploadJarDTO); 38 | Page entityPage = uploadJarMapper.selectPage(uploadJarPage, queryWrapper); 39 | final Page dtoPage = new Page<>(); 40 | BeanUtils.copyProperties(entityPage, dtoPage); 41 | dtoPage.setRecords(entityPage.getRecords().stream() 42 | .map(UploadJarConvertMapper.INSTANCE::entityToDTO) 43 | .collect(Collectors.toList())); 44 | return dtoPage; 45 | } 46 | 47 | @Override 48 | public UploadJarDTO detail(Long tenantId, Long uploadJarId) { 49 | UploadJar uploadJar = uploadJarMapper.selectOne(detailWrapper(tenantId, uploadJarId)); 50 | return UploadJarConvertMapper.INSTANCE.entityToDTO(uploadJar); 51 | } 52 | 53 | @Override 54 | public UploadJarDTO findMaxVersionJarByCode(String jarCode, String clusterCode, Long tenantId) { 55 | final QueryWrapper queryWrapper = new QueryWrapper<>(); 56 | queryWrapper.eq(UploadJar.FIELD_TENANT_ID, tenantId); 57 | queryWrapper.eq(UploadJar.FIELD_CLUSTER_CODE, clusterCode); 58 | queryWrapper.eq(UploadJar.FIELD_JAR_CODE, jarCode); 59 | Optional max = uploadJarMapper.selectList(queryWrapper).stream() 60 | .max((o1, o2) -> o1.getVersion().compareToIgnoreCase(o2.getVersion())); 61 | if (!max.isPresent()) { 62 | throw new FlinkCommonException("can not find upload jar"); 63 | } 64 | return UploadJarConvertMapper.INSTANCE.entityToDTO(max.get()); 65 | } 66 | 67 | private QueryWrapper detailWrapper(Long tenantId, Long uploadJarId) { 68 | final QueryWrapper queryWrapper = new QueryWrapper<>(); 69 | queryWrapper.eq(UploadJar.FIELD_TENANT_ID, tenantId); 70 | queryWrapper.eq(UploadJar.FIELD_UPLOAD_JAR_ID, uploadJarId); 71 | return queryWrapper; 72 | } 73 | 74 | private QueryWrapper commonQueryWrapper(UploadJarDTO uploadJarDTO) { 75 | final QueryWrapper queryWrapper = new QueryWrapper<>(); 76 | Optional.ofNullable(uploadJarDTO.getClusterCode()) 77 | .ifPresent(s -> queryWrapper.or().like(UploadJar.FIELD_CLUSTER_CODE, s)); 78 | Optional.ofNullable(uploadJarDTO.getJarCode()) 79 | .ifPresent(s -> queryWrapper.or().like(UploadJar.FIELD_JAR_CODE, s)); 80 | Optional.ofNullable(uploadJarDTO.getSystemProvided()) 81 | .ifPresent(s -> queryWrapper.or().eq(UploadJar.FIELD_SYSTEM_PROVIDED, s)); 82 | Optional.ofNullable(uploadJarDTO.getFilename()) 83 | .ifPresent(s -> queryWrapper.or().like(UploadJar.FIELD_FILENAME, s)); 84 | queryWrapper.eq(UploadJar.FIELD_TENANT_ID, uploadJarDTO.getTenantId()); 85 | return queryWrapper; 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/utils/ApplicationContextHelper.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.utils; 2 | 3 | import java.lang.reflect.Method; 4 | import java.util.concurrent.ScheduledExecutorService; 5 | import java.util.concurrent.ScheduledThreadPoolExecutor; 6 | import java.util.concurrent.TimeUnit; 7 | 8 | import org.slf4j.Logger; 9 | import org.slf4j.LoggerFactory; 10 | import org.springframework.beans.factory.support.DefaultListableBeanFactory; 11 | import org.springframework.context.ApplicationContext; 12 | import org.springframework.context.ApplicationContextAware; 13 | import org.springframework.context.support.AbstractRefreshableApplicationContext; 14 | import org.springframework.context.support.GenericApplicationContext; 15 | import org.springframework.lang.NonNull; 16 | import org.springframework.stereotype.Component; 17 | 18 | /** 19 | *

20 | * description 21 | *

22 | * 23 | * @author isacc 2020/03/25 21:05 24 | * @since 1.0 25 | */ 26 | @Component 27 | public class ApplicationContextHelper implements ApplicationContextAware { 28 | 29 | private static final Logger LOGGER = LoggerFactory.getLogger(ApplicationContextHelper.class); 30 | 31 | private static DefaultListableBeanFactory springFactory; 32 | 33 | private static ApplicationContext context; 34 | 35 | @Override 36 | public void setApplicationContext(@NonNull ApplicationContext applicationContext) { 37 | ApplicationContextHelper.setContext(applicationContext); 38 | if (applicationContext instanceof AbstractRefreshableApplicationContext) { 39 | AbstractRefreshableApplicationContext springContext = 40 | (AbstractRefreshableApplicationContext) applicationContext; 41 | ApplicationContextHelper.setFactory((DefaultListableBeanFactory) springContext.getBeanFactory()); 42 | } else if (applicationContext instanceof GenericApplicationContext) { 43 | GenericApplicationContext springContext = (GenericApplicationContext) applicationContext; 44 | ApplicationContextHelper.setFactory(springContext.getDefaultListableBeanFactory()); 45 | } 46 | } 47 | 48 | private static void setContext(ApplicationContext applicationContext) { 49 | ApplicationContextHelper.context = applicationContext; 50 | } 51 | 52 | private static void setFactory(DefaultListableBeanFactory springFactory) { 53 | ApplicationContextHelper.springFactory = springFactory; 54 | } 55 | 56 | public static DefaultListableBeanFactory getSpringFactory() { 57 | return springFactory; 58 | } 59 | 60 | public static ApplicationContext getContext() { 61 | return context; 62 | } 63 | 64 | /** 65 | * 异步从 ApplicationContextHelper 获取 bean 对象并设置到目标对象中,在某些启动期间需要初始化的bean可采用此方法。 66 | * 适用于实例方法注入。 67 | * 68 | * @param type bean type 69 | * @param target 目标类对象 70 | * @param setterMethod setter 方法,target 中需包含此方法名,且类型与 type 一致 71 | * @param type 72 | */ 73 | public static void asyncInstanceSetter(Class type, Object target, String setterMethod) { 74 | if (ApplicationContextHelper.getContext() != null) { 75 | try { 76 | Method method = target.getClass().getDeclaredMethod(setterMethod, type); 77 | method.setAccessible(true); 78 | method.invoke(target, ApplicationContextHelper.getContext().getBean(type)); 79 | } catch (Exception e) { 80 | throw new IllegalStateException(e); 81 | } 82 | LOGGER.info("setter {} to {} success.", type.getName(), target.getClass().getName()); 83 | return; 84 | } 85 | ScheduledExecutorService executorService = new ScheduledThreadPoolExecutor(1, r -> new Thread(r, "sync-setter")); 86 | executorService.scheduleAtFixedRate(() -> { 87 | if (ApplicationContextHelper.getContext() != null) { 88 | try { 89 | Method method = target.getClass().getDeclaredMethod(setterMethod, type); 90 | method.setAccessible(true); 91 | method.invoke(target, ApplicationContextHelper.getContext().getBean(type)); 92 | LOGGER.info("setter {} to {} success.", type.getName(), target.getClass().getName()); 93 | } catch (Exception e) { 94 | LOGGER.error("setter {} to {} failure.", type.getName(), target.getClass().getName(), e); 95 | } 96 | executorService.shutdown(); 97 | } 98 | }, 0, 1, TimeUnit.SECONDS); 99 | } 100 | 101 | /** 102 | * 异步从 ApplicationContextHelper 获取 bean 对象并设置到目标对象中,在某些启动期间需要初始化的bean可采用此方法。 103 | * 适用于静态方法注入。 104 | * 105 | * @param type bean type 106 | * @param target 目标类对象 107 | * @param setterMethod setter 方法,target 中需包含此方法名,且类型与 type 一致 108 | */ 109 | public static void asyncStaticSetter(Class type, Class target, String setterMethod) { 110 | if (ApplicationContextHelper.getContext() != null) { 111 | try { 112 | Method method = target.getDeclaredMethod(setterMethod, type); 113 | method.setAccessible(true); 114 | method.invoke(null, ApplicationContextHelper.getContext().getBean(type)); 115 | } catch (Exception e) { 116 | throw new IllegalStateException(e); 117 | } 118 | LOGGER.info("setter {} to {} success.", type.getName(), target.getName()); 119 | return; 120 | } 121 | ScheduledExecutorService executorService = new ScheduledThreadPoolExecutor(1, r -> new Thread(r, "sync-setter")); 122 | executorService.scheduleAtFixedRate(() -> { 123 | if (ApplicationContextHelper.getContext() != null) { 124 | try { 125 | Method method = target.getDeclaredMethod(setterMethod, type); 126 | method.setAccessible(true); 127 | method.invoke(target, ApplicationContextHelper.getContext().getBean(type)); 128 | LOGGER.info("setter {} to {} success.", type.getName(), target.getName()); 129 | } catch (Exception e) { 130 | LOGGER.error("setter {} to {} failure.", type.getName(), target.getName(), e); 131 | } 132 | executorService.shutdown(); 133 | } 134 | }, 0, 1, TimeUnit.SECONDS); 135 | } 136 | 137 | } 138 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/utils/FlinkApiUtil.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.utils; 2 | 3 | import com.github.codingdebugallday.client.api.dto.ClusterDTO; 4 | import com.github.codingdebugallday.client.app.service.ApiClient; 5 | 6 | /** 7 | *

8 | * description 9 | *

10 | * 11 | * @author isacc 2020/03/28 0:58 12 | * @since 1.0 13 | */ 14 | public class FlinkApiUtil { 15 | 16 | private FlinkApiUtil() { 17 | throw new IllegalStateException("util class"); 18 | } 19 | 20 | /** 21 | * 校验ApiClient中的FlinkCluster是否设置 22 | * clusterCode以及jobManagerUrl不能为空 23 | * 24 | * @param apiClient ApiClient 25 | * @return boolean 是否设置 26 | */ 27 | public boolean checkApiClient(ApiClient apiClient) { 28 | ClusterDTO clusterDTO = apiClient.getClusterDTO(); 29 | return Preconditions.checkAllNotNull(clusterDTO, 30 | clusterDTO.getClusterCode(), 31 | clusterDTO.getJobManagerUrl()); 32 | } 33 | 34 | } 35 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/utils/FlinkCommonUtil.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.utils; 2 | 3 | import java.io.File; 4 | import java.io.IOException; 5 | import java.util.Objects; 6 | 7 | import com.github.codingdebugallday.client.infra.exceptions.FlinkCommonException; 8 | import org.springframework.util.Assert; 9 | import org.springframework.util.FileCopyUtils; 10 | import org.springframework.web.multipart.MultipartFile; 11 | 12 | /** 13 | *

14 | * description 15 | *

16 | * 17 | * @author isacc 2020/04/07 11:18 18 | * @since 1.0 19 | */ 20 | public class FlinkCommonUtil { 21 | 22 | private FlinkCommonUtil() { 23 | throw new IllegalStateException("util class"); 24 | } 25 | 26 | public static File multiPartFileToFile(MultipartFile multipartFile) { 27 | Assert.notNull(multipartFile, "multipartFile must not be null"); 28 | try { 29 | File toFile = new File(Objects.requireNonNull(multipartFile.getOriginalFilename())); 30 | byte[] bytes = FileCopyUtils.copyToByteArray(multipartFile.getInputStream()); 31 | FileCopyUtils.copy(bytes, toFile); 32 | return toFile; 33 | } catch (IOException e) { 34 | throw new FlinkCommonException("multiPartFileToFile error", e); 35 | } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/utils/JSON.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.utils; 2 | 3 | import java.io.IOException; 4 | import java.util.ArrayList; 5 | import java.util.List; 6 | 7 | import com.fasterxml.jackson.databind.ObjectMapper; 8 | import com.fasterxml.jackson.databind.type.CollectionType; 9 | import com.github.codingdebugallday.client.infra.exceptions.FlinkCommonException; 10 | 11 | /** 12 | *

13 | * description 14 | *

15 | * 16 | * @author isacc 2020/03/25 21:09 17 | * @since 1.0 18 | */ 19 | public class JSON { 20 | 21 | private JSON() throws IllegalAccessException { 22 | throw new IllegalAccessException("util class"); 23 | } 24 | 25 | private static ObjectMapper objectMapper; 26 | 27 | static { 28 | objectMapper = ApplicationContextHelper.getContext().getBean(ObjectMapper.class); 29 | } 30 | 31 | public static T toObj(String json, Class clazz) { 32 | try { 33 | return objectMapper.readValue(json, clazz); 34 | } catch (IOException e) { 35 | throw new FlinkCommonException("error.jackson.read", e); 36 | } 37 | } 38 | 39 | public static List toArray(String json, Class clazz) { 40 | try { 41 | CollectionType type = objectMapper.getTypeFactory().constructCollectionType(ArrayList.class, clazz); 42 | return objectMapper.readValue(json, type); 43 | } catch (IOException e) { 44 | throw new FlinkCommonException("error.jackson.read", e); 45 | } 46 | } 47 | 48 | public static String toJson(T obj) { 49 | try { 50 | return objectMapper.writeValueAsString(obj); 51 | } catch (IOException e) { 52 | throw new FlinkCommonException("error.jackson.write", e); 53 | } 54 | } 55 | 56 | } 57 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/utils/Preconditions.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.utils; 2 | 3 | import java.util.Objects; 4 | import java.util.stream.Stream; 5 | 6 | import com.github.codingdebugallday.client.infra.exceptions.FlinkApiCommonException; 7 | import org.springframework.http.HttpStatus; 8 | 9 | /** 10 | *

11 | * description 12 | *

13 | * 14 | * @author abigballofmud 2019/11/22 11:49 15 | * @since 1.0 16 | */ 17 | public final class Preconditions { 18 | 19 | private Preconditions() { 20 | throw new IllegalStateException("util class!"); 21 | } 22 | 23 | public static T checkNotNull(T reference, String errorMessage) { 24 | if (Objects.isNull(reference)) { 25 | throw new FlinkApiCommonException(HttpStatus.INTERNAL_SERVER_ERROR.value(), errorMessage); 26 | } else { 27 | return reference; 28 | } 29 | } 30 | 31 | public static void checkNull(T reference, String errorMessage) { 32 | if (Objects.nonNull(reference)) { 33 | throw new FlinkApiCommonException(HttpStatus.INTERNAL_SERVER_ERROR.value(), errorMessage); 34 | } 35 | } 36 | 37 | @SafeVarargs 38 | public static boolean checkAllNotNull(T... reference) { 39 | return Stream.of(reference).noneMatch(Objects::isNull); 40 | } 41 | 42 | @SafeVarargs 43 | public static boolean checkAnyNotNull(T... reference) { 44 | return Stream.of(reference).anyMatch(Objects::isNull); 45 | } 46 | 47 | @SafeVarargs 48 | public static boolean checkAllNull(T... reference) { 49 | return Stream.of(reference).allMatch(Objects::isNull); 50 | } 51 | 52 | } 53 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/utils/RestTemplateUtil.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.utils; 2 | 3 | import org.springframework.http.HttpHeaders; 4 | import org.springframework.http.MediaType; 5 | 6 | /** 7 | *

8 | * description 9 | *

10 | * 11 | * @author isacc 2020/3/26 23:28 12 | * @since 1.0 13 | */ 14 | public final class RestTemplateUtil { 15 | 16 | private RestTemplateUtil() { 17 | throw new IllegalStateException("util class"); 18 | } 19 | 20 | /** 21 | * 封装请求头 22 | * 23 | * @return org.springframework.http.HttpHeaders 24 | */ 25 | public static HttpHeaders applicationJsonHeaders() { 26 | HttpHeaders httpHeaders = new HttpHeaders(); 27 | httpHeaders.setContentType(MediaType.APPLICATION_JSON); 28 | return httpHeaders; 29 | } 30 | 31 | /** 32 | * 封装请求头 33 | * 34 | * @return org.springframework.http.HttpHeaders 35 | */ 36 | public static HttpHeaders applicationMultiDataHeaders() { 37 | HttpHeaders httpHeaders = new HttpHeaders(); 38 | httpHeaders.setContentType(MediaType.MULTIPART_FORM_DATA); 39 | return httpHeaders; 40 | } 41 | 42 | } 43 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/utils/RetryUtil.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.utils; 2 | 3 | import java.util.List; 4 | import java.util.concurrent.*; 5 | 6 | import com.github.codingdebugallday.client.infra.exceptions.FlinkCommonException; 7 | import com.google.common.util.concurrent.ThreadFactoryBuilder; 8 | import org.slf4j.Logger; 9 | import org.slf4j.LoggerFactory; 10 | 11 | /** 12 | *

13 | * 重试工具类 14 | *

15 | * 16 | * @author isacc 2020/3/30 21:29 17 | * @since 1.0 18 | */ 19 | public final class RetryUtil { 20 | 21 | private RetryUtil() { 22 | throw new IllegalStateException("util class"); 23 | } 24 | 25 | private static final Logger LOG = LoggerFactory.getLogger(RetryUtil.class); 26 | 27 | private static final long MAX_SLEEP_MILLISECOND = 256 * 1000L; 28 | 29 | /** 30 | * 重试次数工具方法. 31 | * 32 | * @param callable 实际逻辑 33 | * @param retryTimes 最大重试次数(>1) 34 | * @param sleepTimeInMilliSecond 运行失败后休眠对应时间再重试 35 | * @param exponential 休眠时间是否指数递增 36 | * @param 返回值类型 37 | * @return 经过重试的callable的执行结果 38 | */ 39 | public static T executeWithRetry(Callable callable, 40 | int retryTimes, 41 | long sleepTimeInMilliSecond, 42 | boolean exponential) { 43 | Retry retry = new Retry(); 44 | return retry.doRetry(callable, retryTimes, sleepTimeInMilliSecond, exponential, null); 45 | } 46 | 47 | /** 48 | * 重试次数工具方法. 49 | * 50 | * @param callable 实际逻辑 51 | * @param retryTimes 最大重试次数(>1) 52 | * @param sleepTimeInMilliSecond 运行失败后休眠对应时间再重试 53 | * @param exponential 休眠时间是否指数递增 54 | * @param 返回值类型 55 | * @param retryExceptionClassList 出现指定的异常类型时才进行重试 56 | * @return 经过重试的callable的执行结果 57 | */ 58 | public static T executeWithRetry(Callable callable, 59 | int retryTimes, 60 | long sleepTimeInMilliSecond, 61 | boolean exponential, 62 | List> retryExceptionClassList) { 63 | Retry retry = new Retry(); 64 | return retry.doRetry(callable, retryTimes, sleepTimeInMilliSecond, exponential, retryExceptionClassList); 65 | } 66 | 67 | /** 68 | * 在外部线程执行并且重试。每次执行需要在timeoutMs内执行完,不然视为失败。 69 | * 执行异步操作的线程池从外部传入,线程池的共享粒度由外部控制。比如,HttpClientUtil共享一个线程池。 70 | *

71 | * 限制条件:仅仅能够在阻塞的时候interrupt线程 72 | * 73 | * @param callable 实际逻辑 74 | * @param retryTimes 最大重试次数(>1) 75 | * @param sleepTimeInMilliSecond 运行失败后休眠对应时间再重试 76 | * @param exponential 休眠时间是否指数递增 77 | * @param timeoutMs callable执行超时时间,毫秒 78 | * @param executor 执行异步操作的线程池 79 | * @param 返回值类型 80 | * @return 经过重试的callable的执行结果 81 | */ 82 | public static T asyncExecuteWithRetry(Callable callable, 83 | int retryTimes, 84 | long sleepTimeInMilliSecond, 85 | boolean exponential, 86 | long timeoutMs, 87 | ThreadPoolExecutor executor) { 88 | Retry retry = new AsyncRetry(timeoutMs, executor); 89 | return retry.doRetry(callable, retryTimes, sleepTimeInMilliSecond, exponential, null); 90 | } 91 | 92 | /** 93 | * 创建异步执行的线程池。特性如下: 94 | * core大小为0,初始状态下无线程,无初始消耗。 95 | * max大小为5,最多五个线程。 96 | * 60秒超时时间,闲置超过60秒线程会被回收。 97 | * 使用SynchronousQueue,任务不会排队,必须要有可用线程才能提交成功,否则会RejectedExecutionException。 98 | * 99 | * @return 线程池 100 | */ 101 | public static ThreadPoolExecutor createThreadPoolExecutor() { 102 | ThreadFactory namedThreadFactory = new ThreadFactoryBuilder() 103 | .setNameFormat("flink-api-pool-%d").build(); 104 | return new ThreadPoolExecutor(0, 5, 105 | 60L, TimeUnit.SECONDS, 106 | new SynchronousQueue<>(), namedThreadFactory); 107 | } 108 | 109 | 110 | private static class Retry { 111 | 112 | public T doRetry(Callable callable, int retryTimes, 113 | long sleepTimeInMilliSecond, boolean exponential, 114 | List> retryExceptionClassList) { 115 | if (null == callable) { 116 | throw new IllegalArgumentException("系统编程错误, 入参callable不能为空 ! "); 117 | } 118 | if (retryTimes < 1) { 119 | throw new IllegalArgumentException(String.format( 120 | "系统编程错误, 入参retryTimes[%d]不能小于1 !", retryTimes)); 121 | } 122 | Throwable t = null; 123 | for (int i = 0; i < retryTimes; i++) { 124 | try { 125 | return call(callable); 126 | } catch (Exception e) { 127 | t = e; 128 | if (i == 0) { 129 | LOG.error(String.format("Exception when calling callable, 异常Msg: %s", t.getMessage()), t); 130 | } 131 | doNeedRetry(retryExceptionClassList, e); 132 | doSleep(i, retryTimes, sleepTimeInMilliSecond, exponential, e); 133 | } 134 | } 135 | throw new FlinkCommonException(t); 136 | } 137 | 138 | protected T call(Callable callable) throws Exception { 139 | return callable.call(); 140 | } 141 | 142 | private static void doNeedRetry(List> retryExceptionClassList, 143 | Exception e) { 144 | if (null != retryExceptionClassList && !retryExceptionClassList.isEmpty()) { 145 | boolean needRetry = false; 146 | for (Class eachExceptionClass : retryExceptionClassList) { 147 | if (eachExceptionClass == e.getClass()) { 148 | needRetry = true; 149 | break; 150 | } 151 | } 152 | if (!needRetry) { 153 | throw new FlinkCommonException(e); 154 | } 155 | } 156 | } 157 | 158 | private static void doSleep(int i, 159 | int retryTimes, 160 | long sleepTimeInMilliSecond, 161 | boolean exponential, 162 | Exception e) { 163 | if (i + 1 < retryTimes && sleepTimeInMilliSecond > 0) { 164 | long startTime = System.currentTimeMillis(); 165 | long timeToSleep; 166 | if (exponential) { 167 | timeToSleep = sleepTimeInMilliSecond * (long) Math.pow(2, i); 168 | } else { 169 | timeToSleep = sleepTimeInMilliSecond; 170 | } 171 | if (timeToSleep >= MAX_SLEEP_MILLISECOND) { 172 | timeToSleep = MAX_SLEEP_MILLISECOND; 173 | } 174 | try { 175 | Thread.sleep(timeToSleep); 176 | } catch (InterruptedException ignored) { 177 | Thread.currentThread().interrupt(); 178 | } 179 | long realTimeSleep = System.currentTimeMillis() - startTime; 180 | LOG.error("Exception when calling callable, " + 181 | "即将尝试执行第{}次重试." + 182 | "本次重试计划等待[{}]ms," + 183 | "实际等待[{}]ms, " + 184 | "异常Msg: [{}}]" 185 | , i + 1, timeToSleep, realTimeSleep, e.getMessage()); 186 | } 187 | } 188 | } 189 | 190 | 191 | private static class AsyncRetry extends Retry { 192 | 193 | private final long timeoutMs; 194 | private final ThreadPoolExecutor executor; 195 | 196 | public AsyncRetry(long timeoutMs, ThreadPoolExecutor executor) { 197 | this.timeoutMs = timeoutMs; 198 | this.executor = executor; 199 | } 200 | 201 | /** 202 | * 使用传入的线程池异步执行任务,并且等待。 203 | *

204 | * future.get()方法,等待指定的毫秒数。如果任务在超时时间内结束,则正常返回。 205 | * 如果抛异常(可能是执行超时、执行异常、被其他线程cancel或interrupt),都记录日志并且往上抛异常。 206 | * 正常和非正常的情况都会判断任务是否结束,如果没有结束,则cancel任务。cancel参数为true,表示即使 207 | * 任务正在执行,也会interrupt线程。 208 | * 209 | * @param callable 实际逻辑 210 | * @param T 211 | * @return T 212 | */ 213 | @Override 214 | protected T call(Callable callable) { 215 | Future future = executor.submit(callable); 216 | Throwable t; 217 | try { 218 | return future.get(timeoutMs, TimeUnit.MILLISECONDS); 219 | } catch (Exception e) { 220 | t = e; 221 | LOG.warn("Try once failed", e); 222 | throw new FlinkCommonException("Try once failed", t); 223 | } finally { 224 | if (!future.isDone()) { 225 | future.cancel(true); 226 | LOG.warn("Try once task not done, cancel it, active count: {}", executor.getActiveCount()); 227 | } 228 | } 229 | } 230 | } 231 | 232 | } 233 | -------------------------------------------------------------------------------- /src/main/java/com/github/codingdebugallday/client/infra/utils/ThreadPoolUtil.java: -------------------------------------------------------------------------------- 1 | package com.github.codingdebugallday.client.infra.utils; 2 | 3 | import java.util.Objects; 4 | import java.util.concurrent.*; 5 | 6 | import com.alibaba.ttl.threadpool.TtlExecutors; 7 | import com.google.common.util.concurrent.ThreadFactoryBuilder; 8 | 9 | /** 10 | *

11 | * 线程池不允许使用Executors去创建,而是通过ThreadPoolExecutor的方式,这样的处理方式让写的同学更加明确线程池的运行规则,规避资源耗尽的风险。 说明:Executors各个方法的弊端: 12 | * 1)newFixedThreadPool和newSingleThreadExecutor: 13 | *   主要问题是堆积的请求处理队列可能会耗费非常大的内存,甚至OOM。 14 | * 2)newCachedThreadPool和newScheduledThreadPool: 15 | *   主要问题是线程数最大数是Integer.MAX_VALUE,可能会创建数量非常多的线程,甚至OOM。 16 | *

17 | * 18 | * @author abigballofmud 2019/11/21 17:37 19 | * @since 1.0 20 | */ 21 | public class ThreadPoolUtil { 22 | 23 | private static volatile ExecutorService executorService; 24 | 25 | private ThreadPoolUtil() { 26 | throw new IllegalStateException("Utility class"); 27 | } 28 | 29 | /** 30 | * guava的方式 31 | */ 32 | public static ExecutorService getExecutorService() { 33 | if (Objects.isNull(executorService)) { 34 | synchronized (ThreadPoolUtil.class) { 35 | /* 36 | * 使用谷歌的guava框架 37 | * ThreadPoolExecutor参数解释 38 | * 1.corePoolSize 常驻核心线程池大小 39 | * 2.maximumPoolSize 线程池最大线程数 必须>=1 40 | * 3.keepAliveTime 多余的空闲线程存活时间 41 | * 4.TimeUnit 时间单位 42 | * 5.BlockingQueue 任务队列 43 | * 6.ThreadFactory 线程工厂用于创建线程 44 | * 7.RejectedExecutionHandler 队列满了并且工作线程等于最大线程 线程拒绝策略 45 | */ 46 | if (Objects.isNull(executorService)) { 47 | ThreadFactory namedThreadFactory = new ThreadFactoryBuilder().setNameFormat("flink-pool-%d").build(); 48 | executorService = new ThreadPoolExecutor(8, 49 | 8, 50 | 1L, 51 | TimeUnit.MINUTES, 52 | new LinkedBlockingQueue<>(4), namedThreadFactory, new ThreadPoolExecutor.CallerRunsPolicy()); 53 | } 54 | } 55 | } 56 | return TtlExecutors.getTtlExecutorService(executorService); 57 | } 58 | 59 | } 60 | -------------------------------------------------------------------------------- /src/main/resources/META-INF/spring.factories: -------------------------------------------------------------------------------- 1 | org.springframework.boot.autoconfigure.EnableAutoConfiguration=\ 2 | com.github.codingdebugallday.client.infra.autoconfigure.FlinkApiAutoConfiguration,\ 3 | com.github.codingdebugallday.client.infra.autoconfigure.GlobalExceptionHandlerAutoConfiguration,\ 4 | com.github.codingdebugallday.client.infra.autoconfigure.MybatisPlusConfig -------------------------------------------------------------------------------- /src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | server: 2 | port: 9528 3 | spring: 4 | datasource: 5 | dynamic: 6 | primary: master #设置默认的数据源或者数据源组,默认值即为master 7 | datasource: 8 | master: 9 | username: root 10 | password: tse@9527 11 | driver-class-name: com.mysql.jdbc.Driver 12 | url: jdbc:mysql://localhost:3306/flink_explore?useUnicode=true&characterEncoding=utf-8&useSSL=false 13 | hikari: 14 | min-idle: 10 15 | max-pool-size: 15 16 | is-auto-commit: true 17 | idle-timeout: 30000 18 | pool-name: ExpendHikariCP 19 | max-lifetime: 1800000 20 | connection-timeout: 30000 21 | connection-test-query: SELECT 1 22 | 23 | mybatis: 24 | mapperLocations: classpath*:/mapper/*.xml 25 | configuration: 26 | mapUnderscoreToCamelCase: true 27 | 28 | jasypt: 29 | encryptor: 30 | password: ${JASYPT_ENCRYPTOR_PASSWORD:hUyhdphl2gXcBDEH5dy} 31 | 32 | logging: 33 | level: 34 | com.github.codingdebugallday.client: debug 35 | com.baomidou.dynamic: debug 36 | file: 37 | name: logs/flink_client.log -------------------------------------------------------------------------------- /src/main/resources/mapper/ClusterMapper.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /src/main/resources/mapper/NodeMapper.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | -------------------------------------------------------------------------------- /src/main/resources/mapper/UploadJarMapper.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | ${prefix}.jar_desc, 38 | ${prefix}.jar_code, 39 | ${prefix}.version, 40 | ${prefix}.system_provided, 41 | ${prefix}.entry_class, 42 | ${prefix}.filename, 43 | ${prefix}.jar_name, 44 | ${prefix}.status 45 | 46 | 47 | -------------------------------------------------------------------------------- /src/main/resources/sql/flink_explore_1.0.2.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Navicat Premium Data Transfer 3 | 4 | Source Server : local_mysql5.7.26 5 | Source Server Type : MySQL 6 | Source Server Version : 50726 7 | Source Host : localhost:3306 8 | Source Schema : flink_explore 9 | 10 | Target Server Type : MySQL 11 | Target Server Version : 50726 12 | File Encoding : 65001 13 | 14 | Date: 08/05/2020 17:33:32 15 | */ 16 | 17 | SET NAMES utf8mb4; 18 | SET FOREIGN_KEY_CHECKS = 0; 19 | 20 | -- ---------------------------- 21 | -- Table structure for flink_cluster 22 | -- ---------------------------- 23 | DROP TABLE IF EXISTS `flink_cluster`; 24 | CREATE TABLE `flink_cluster` ( 25 | `cluster_id` bigint(20) NOT NULL AUTO_INCREMENT, 26 | `cluster_code` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL, 27 | `cluster_desc` varchar(120) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL, 28 | `job_manager_url` varchar(120) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL, 29 | `job_manager_standby_url` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL COMMENT '逗号分割', 30 | `enabled_flag` tinyint(1) NULL DEFAULT 1, 31 | `tenant_id` bigint(20) NULL DEFAULT 0 COMMENT '租户ID', 32 | `object_version_number` bigint(20) NOT NULL DEFAULT 1 COMMENT '版本号', 33 | `creation_date` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0), 34 | `created_by` int(11) NOT NULL DEFAULT -1, 35 | `last_updated_by` int(11) NOT NULL DEFAULT -1, 36 | `last_update_date` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0), 37 | PRIMARY KEY (`cluster_id`) USING BTREE 38 | ) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_bin ROW_FORMAT = Dynamic; 39 | 40 | -- ---------------------------- 41 | -- Table structure for flink_node 42 | -- ---------------------------- 43 | DROP TABLE IF EXISTS `flink_node`; 44 | CREATE TABLE `flink_node` ( 45 | `node_id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '表ID,主键,供其他表做外键', 46 | `cluster_code` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL COMMENT 'flink集群编码', 47 | `node_type` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL COMMENT 'master/slave', 48 | `node_code` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL COMMENT '节点唯一编码', 49 | `node_desc` varchar(127) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL COMMENT '节点描述', 50 | `setting_info` text CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL COMMENT '节点配置信息', 51 | `tenant_id` bigint(20) NOT NULL DEFAULT 0 COMMENT '租户ID', 52 | `enabled_flag` tinyint(1) NOT NULL DEFAULT 1 COMMENT '默认启用', 53 | `object_version_number` bigint(20) NOT NULL DEFAULT 1 COMMENT '行版本号,用来处理锁', 54 | `creation_date` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0), 55 | `created_by` int(11) NOT NULL DEFAULT -1, 56 | `last_updated_by` int(11) NOT NULL DEFAULT -1, 57 | `last_update_date` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0), 58 | PRIMARY KEY (`node_id`) USING BTREE, 59 | UNIQUE INDEX `index_node_code_u1`(`node_code`, `tenant_id`) USING BTREE 60 | ) ENGINE = InnoDB AUTO_INCREMENT = 4 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_bin COMMENT = '流处理节点表' ROW_FORMAT = Dynamic; 61 | 62 | -- ---------------------------- 63 | -- Table structure for flink_sql_job 64 | -- ---------------------------- 65 | DROP TABLE IF EXISTS `flink_sql_job`; 66 | CREATE TABLE `flink_sql_job` ( 67 | `job_id` bigint(20) NOT NULL AUTO_INCREMENT, 68 | `job_code` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL COMMENT 'job唯一编码', 69 | `cluster_code` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL, 70 | `sql_upload_path` varchar(120) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL COMMENT 'sql文件上传路径', 71 | `content` text CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL COMMENT 'sql内容', 72 | `setting_info` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL COMMENT '执行sql任务的额外配置信息json格式', 73 | `job_status` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL COMMENT '执行sql的状态', 74 | `errors` text CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL COMMENT '执行sql任务的错误日志', 75 | `flink_job_id` varchar(60) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL COMMENT '运行后flink返回的jobid', 76 | `exec_jar_id` bigint(20) NULL DEFAULT NULL COMMENT '默认最新的flink sql jar也可指定flink sql jar,执行时异步更新', 77 | `savepoint_path` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL COMMENT 'savepoint path', 78 | `tenant_id` bigint(20) NULL DEFAULT 0 COMMENT '租户ID', 79 | `object_version_number` bigint(20) NOT NULL DEFAULT 1 COMMENT '版本号', 80 | `creation_date` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0), 81 | `created_by` int(11) NOT NULL DEFAULT -1, 82 | `last_updated_by` int(11) NOT NULL DEFAULT -1, 83 | `last_update_date` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0), 84 | PRIMARY KEY (`job_id`) USING BTREE, 85 | UNIQUE INDEX `index_job_code_u1`(`job_code`, `tenant_id`) USING BTREE 86 | ) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_bin ROW_FORMAT = Dynamic; 87 | 88 | -- ---------------------------- 89 | -- Table structure for flink_udf 90 | -- ---------------------------- 91 | DROP TABLE IF EXISTS `flink_udf`; 92 | CREATE TABLE `flink_udf` ( 93 | `udf_id` bigint(20) NOT NULL AUTO_INCREMENT, 94 | `udf_name` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL COMMENT 'udf名称', 95 | `udf_desc` varchar(60) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL COMMENT 'udf描述', 96 | `udf_type` varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL COMMENT 'jar/code', 97 | `udf_jar_path` varchar(127) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL COMMENT 'udf jar上传路径', 98 | `content` text CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL COMMENT 'udf代码/jar包udf类名', 99 | `cluster_code` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL COMMENT 'flink 集群编码', 100 | `udf_status` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL COMMENT '由于异步,这个字段作为回调使用', 101 | `tenant_id` bigint(20) NULL DEFAULT 0 COMMENT '租户ID', 102 | `object_version_number` bigint(20) NOT NULL DEFAULT 1 COMMENT '版本号', 103 | `creation_date` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0), 104 | `created_by` int(11) NOT NULL DEFAULT -1, 105 | `last_updated_by` int(11) NOT NULL DEFAULT -1, 106 | `last_update_date` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0), 107 | PRIMARY KEY (`udf_id`) USING BTREE, 108 | UNIQUE INDEX `index_udf_name_u1`(`udf_name`, `tenant_id`) USING BTREE 109 | ) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_bin ROW_FORMAT = Dynamic; 110 | 111 | -- ---------------------------- 112 | -- Table structure for flink_upload_jar 113 | -- ---------------------------- 114 | DROP TABLE IF EXISTS `flink_upload_jar`; 115 | CREATE TABLE `flink_upload_jar` ( 116 | `upload_jar_id` bigint(20) NOT NULL AUTO_INCREMENT, 117 | `jar_desc` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL, 118 | `jar_code` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL COMMENT 'jar的编码与version一起作为唯一标识', 119 | `version` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL COMMENT 'jar的version与jar_code一起作为唯一标识', 120 | `cluster_code` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL, 121 | `system_provided` tinyint(1) NOT NULL DEFAULT 0 COMMENT '是否是系统提供的(平台预先上传jar做为平台功能使用)', 122 | `entry_class` varchar(120) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL COMMENT '默认的主类', 123 | `filename` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL COMMENT '上传后返回,作为flink run jar的jar_id', 124 | `jar_name` varchar(120) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL COMMENT '上传后返回的filename,截取最后的名称', 125 | `status` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL COMMENT '上传后返回', 126 | `tenant_id` bigint(20) NOT NULL DEFAULT 0 COMMENT '租户ID', 127 | `object_version_number` bigint(20) NOT NULL DEFAULT 1 COMMENT '行版本号,用来处理锁', 128 | `creation_date` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0), 129 | `created_by` int(11) NOT NULL DEFAULT -1, 130 | `last_updated_by` int(11) NOT NULL DEFAULT -1, 131 | `last_update_date` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0), 132 | PRIMARY KEY (`upload_jar_id`) USING BTREE, 133 | UNIQUE INDEX `index_jar_u1`(`jar_code`, `version`, `tenant_id`) USING BTREE 134 | ) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_bin ROW_FORMAT = Dynamic; 135 | 136 | SET FOREIGN_KEY_CHECKS = 1; 137 | --------------------------------------------------------------------------------