├── .gitignore ├── LICENSE.txt ├── README.md ├── pom.xml └── src ├── main ├── java │ └── com │ │ └── bluebreezecf │ │ └── tools │ │ └── sparkjobserver │ │ └── api │ │ ├── ISparkJobServerClient.java │ │ ├── ISparkJobServerClientConstants.java │ │ ├── SparkJobBaseInfo.java │ │ ├── SparkJobConfig.java │ │ ├── SparkJobInfo.java │ │ ├── SparkJobJarInfo.java │ │ ├── SparkJobResult.java │ │ ├── SparkJobServerClientException.java │ │ ├── SparkJobServerClientFactory.java │ │ └── SparkJobServerClientImpl.java └── resources │ └── log4j.properties └── test ├── java └── com │ └── bluebreezecf │ └── tools │ └── sparkjobserver │ └── api │ └── SparkJobServiceClientImplTest.java └── resources ├── input-SparkJobServiceClientImpTest.json └── job-server-tests-2.11-0.8.0-SNAPSHOT.jar /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | .classpath 3 | .project 4 | .settings 5 | .idea/ 6 | spark-job-server-client.iml 7 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Apache License 2 | 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by the 14 | copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all other 17 | entities that control, are controlled by, or are under common control 18 | with that entity. For the purposes of this definition, "control" means 19 | (i) the power, direct or indirect, to cause the direction or management 20 | of such entity, whether by contract or otherwise, or (ii) ownership 21 | of fifty percent (50%) or more of the outstanding shares, or (iii) 22 | beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity exercising 25 | permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation source, 29 | and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical transformation 32 | or translation of a Source form, including but not limited to compiled 33 | object code, generated documentation, and conversions to other media 34 | types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a copyright 38 | notice that is included in or attached to the work (an example is provided 39 | in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object form, 42 | that is based on (or derived from) the Work and for which the editorial 43 | revisions, annotations, elaborations, or other modifications represent, 44 | as a whole, an original work of authorship. For the purposes of this 45 | License, Derivative Works shall not include works that remain separable 46 | from, or merely link (or bind by name) to the interfaces of, the Work 47 | and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including the 50 | original version of the Work and any modifications or additions to 51 | that Work or Derivative Works thereof, that is intentionally submitted 52 | to Licensor for inclusion in the Work by the copyright owner or by an 53 | individual or Legal Entity authorized to submit on behalf of the copyright 54 | owner. For the purposes of this definition, "submitted" means any form of 55 | electronic, verbal, or written communication sent to the Licensor or its 56 | representatives, including but not limited to communication on electronic 57 | mailing lists, source code control systems, and issue tracking systems 58 | that are managed by, or on behalf of, the Licensor for the purpose of 59 | discussing and improving the Work, but excluding communication that is 60 | conspicuously marked or otherwise designated in writing by the copyright 61 | owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. 68 | Subject to the terms and conditions of this License, each Contributor 69 | hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, 70 | royalty-free, irrevocable copyright license to reproduce, prepare 71 | Derivative Works of, publicly display, publicly perform, sublicense, and 72 | distribute the Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. 75 | Subject to the terms and conditions of this License, each Contributor 76 | hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, 77 | royalty- free, irrevocable (except as stated in this section) patent 78 | license to make, have made, use, offer to sell, sell, import, and 79 | otherwise transfer the Work, where such license applies only to those 80 | patent claims licensable by such Contributor that are necessarily 81 | infringed by their Contribution(s) alone or by combination of 82 | their Contribution(s) with the Work to which such Contribution(s) 83 | was submitted. If You institute patent litigation against any entity 84 | (including a cross-claim or counterclaim in a lawsuit) alleging that the 85 | Work or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses granted 87 | to You under this License for that Work shall terminate as of the date 88 | such litigation is filed. 89 | 90 | 4. Redistribution. 91 | You may reproduce and distribute copies of the Work or Derivative Works 92 | thereof in any medium, with or without modifications, and in Source or 93 | Object form, provided that You meet the following conditions: 94 | 95 | a. You must give any other recipients of the Work or Derivative Works 96 | a copy of this License; and 97 | 98 | b. You must cause any modified files to carry prominent notices stating 99 | that You changed the files; and 100 | 101 | c. You must retain, in the Source form of any Derivative Works that 102 | You distribute, all copyright, patent, trademark, and attribution 103 | notices from the Source form of the Work, excluding those notices 104 | that do not pertain to any part of the Derivative Works; and 105 | 106 | d. If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one of 111 | the following places: within a NOTICE text file distributed as part 112 | of the Derivative Works; within the Source form or documentation, 113 | if provided along with the Derivative Works; or, within a display 114 | generated by the Derivative Works, if and wherever such third-party 115 | notices normally appear. The contents of the NOTICE file are for 116 | informational purposes only and do not modify the License. You 117 | may add Your own attribution notices within Derivative Works that 118 | You distribute, alongside or as an addendum to the NOTICE text 119 | from the Work, provided that such additional attribution notices 120 | cannot be construed as modifying the License. You may add Your own 121 | copyright statement to Your modifications and may provide additional 122 | or different license terms and conditions for use, reproduction, or 123 | distribution of Your modifications, or for any such Derivative Works 124 | as a whole, provided Your use, reproduction, and distribution of the 125 | Work otherwise complies with the conditions stated in this License. 126 | 127 | 5. Submission of Contributions. 128 | Unless You explicitly state otherwise, any Contribution intentionally 129 | submitted for inclusion in the Work by You to the Licensor shall be 130 | under the terms and conditions of this License, without any additional 131 | terms or conditions. Notwithstanding the above, nothing herein shall 132 | supersede or modify the terms of any separate license agreement you may 133 | have executed with Licensor regarding such Contributions. 134 | 135 | 6. Trademarks. 136 | This License does not grant permission to use the trade names, trademarks, 137 | service marks, or product names of the Licensor, except as required for 138 | reasonable and customary use in describing the origin of the Work and 139 | reproducing the content of the NOTICE file. 140 | 141 | 7. Disclaimer of Warranty. 142 | Unless required by applicable law or agreed to in writing, Licensor 143 | provides the Work (and each Contributor provides its Contributions) on 144 | an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 145 | express or implied, including, without limitation, any warranties or 146 | conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR 147 | A PARTICULAR PURPOSE. You are solely responsible for determining the 148 | appropriateness of using or redistributing the Work and assume any risks 149 | associated with Your exercise of permissions under this License. 150 | 151 | 8. Limitation of Liability. 152 | In no event and under no legal theory, whether in tort (including 153 | negligence), contract, or otherwise, unless required by applicable law 154 | (such as deliberate and grossly negligent acts) or agreed to in writing, 155 | shall any Contributor be liable to You for damages, including any direct, 156 | indirect, special, incidental, or consequential damages of any character 157 | arising as a result of this License or out of the use or inability to 158 | use the Work (including but not limited to damages for loss of goodwill, 159 | work stoppage, computer failure or malfunction, or any and all other 160 | commercial damages or losses), even if such Contributor has been advised 161 | of the possibility of such damages. 162 | 163 | 9. Accepting Warranty or Additional Liability. 164 | While redistributing the Work or Derivative Works thereof, You may 165 | choose to offer, and charge a fee for, acceptance of support, warranty, 166 | indemnity, or other liability obligations and/or rights consistent with 167 | this License. However, in accepting such obligations, You may act only 168 | on Your own behalf and on Your sole responsibility, not on behalf of 169 | any other Contributor, and only if You agree to indemnify, defend, and 170 | hold each Contributor harmless for any liability incurred by, or claims 171 | asserted against, such Contributor by reason of your accepting any such 172 | warranty or additional liability. 173 | 174 | END OF TERMS AND CONDITIONS -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## Spark-Job-Server-Client 2 | 3 | ### Backgroud 4 | People always use curl or HUE to upload jar and run spark job in Spark Job Server. 5 | But the Spark Job Server official only presents the rest apis to upload job jar and 6 | run a job, doesn't give client lib with any language implementation. 7 | 8 | Now there is another option to communicate with spark job server in Java, that is Spark-Job-Server-Client, the Java Client of the Spark Job Server implementing the arranged Rest APIs. 9 | 10 | Spark-Job-Server-Client is a open-source program of **com.bluebreezecf** under Apache License v2. It aims to make the java applications use the spark more easily. 11 | 12 | ### How to compile 13 | You can execute the following commands to compile this client: 14 | ```shell 15 | git clone https://github.com/bluebreezecf/SparkJobServerClient.git 16 | cd SparkJobServerClient 17 | mvn clean package 18 | ``` 19 | Then you can find`spark-job-server-client-1.0.0.jar`in SparkJobServerClient/target, it is the main jar of spark-job-server-client. Besides, `spark-job-server-client-1.0.0-sources.jar`is the java source jar, and `spark-job-server-client-1.0.0-javadoc.jar` is the java doc api jar. 20 | 21 | ### How to set dependency 22 | Except the DIY way of getting the latest codes and compiling it, now the real and original **spark-job-server** has been published to [http://repo1.maven.org/maven2/](http://repo1.maven.org/maven2/com/bluebreezecf/spark-job-server-client/). So everyone, who has set the url as one of the remote maven repository, can download or get it via `mvn package` after setting the following dependency into your pom.xml. 23 | ```xml 24 | 25 | com.bluebreezecf 26 | spark-job-server-client 27 | 1.0.0 28 | 29 | ``` 30 | One can find all the releases of `spark-job-server-client` via [search.maven.org](http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22com.bluebreezecf%22%20AND%20a%3A%22spark-job-server-client%22`). 31 | 32 | ### How to use 33 | The following sample codes shows how to use spark-job-server-client: 34 | 35 | ```java 36 | import java.io.File; 37 | import java.util.HashMap; 38 | import java.util.List; 39 | import java.util.Map; 40 | 41 | import com.bluebreezecf.tools.sparkjobserver.api.ISparkJobServerClient; 42 | import com.bluebreezecf.tools.sparkjobserver.api.ISparkJobServerClientConstants; 43 | import com.bluebreezecf.tools.sparkjobserver.api.SparkJobConfig; 44 | import com.bluebreezecf.tools.sparkjobserver.api.SparkJobInfo; 45 | import com.bluebreezecf.tools.sparkjobserver.api.SparkJobJarInfo; 46 | import com.bluebreezecf.tools.sparkjobserver.api.SparkJobResult; 47 | import com.bluebreezecf.tools.sparkjobserver.api.SparkJobServerClientException; 48 | import com.bluebreezecf.tools.sparkjobserver.api.SparkJobServerClientFactory; 49 | 50 | /** 51 | * A sample shows how to use spark-job-server-client. 52 | * 53 | * @author bluebreezecf 54 | * @since 2014-09-16 55 | * 56 | */ 57 | public class SparkJobServerClientTest { 58 | 59 | public static void main(String[] args) { 60 | ISparkJobServerClient client = null; 61 | try { 62 | client = SparkJobServerClientFactory.getInstance().createSparkJobServerClient("http://localhost:8090/"); 63 | //GET /jars 64 | List jarInfos = client.getJars(); 65 | for (SparkJobJarInfo jarInfo: jarInfos) { 66 | System.out.println(jarInfo.toString()); 67 | } 68 | 69 | //POST /jars/ 70 | client.uploadSparkJobJar(new File("d:\\spark-examples_2.10-1.0.2.jar"), "spark-test"); 71 | 72 | //GET /contexts 73 | List contexts = client.getContexts(); 74 | System.out.println("Current contexts:"); 75 | for (String cxt: contexts) { 76 | System.out.println(cxt); 77 | } 78 | 79 | //POST /contexts/--Create context with name ctxTest and null parameter 80 | client.createContext("ctxTest", null); 81 | //POST /contexts/--Create context with parameters 82 | Map params = new HashMap(); 83 | params.put(ISparkJobServerClientConstants.PARAM_MEM_PER_NODE, "512m"); 84 | params.put(ISparkJobServerClientConstants.PARAM_NUM_CPU_CORES, "10"); 85 | client.createContext("cxtTest2", params); 86 | 87 | //DELETE /contexts/ 88 | client.deleteContext("ctxTest"); 89 | 90 | //GET /jobs 91 | List jobInfos = client.getJobs(); 92 | System.out.println("Current jobs:"); 93 | for (SparkJobInfo jobInfo: jobInfos) { 94 | System.out.println(jobInfo); 95 | } 96 | 97 | //Post /jobs---Create a new job 98 | params.put(ISparkJobServerClientConstants.PARAM_APP_NAME, "spark-test"); 99 | params.put(ISparkJobServerClientConstants.PARAM_CLASS_PATH, "spark.jobserver.WordCountExample"); 100 | //1.start a spark job asynchronously and just get the status information 101 | SparkJobResult result = client.startJob("input.string= fdsafd dfsf blullkfdsoflaw fsdfs", params); 102 | System.out.println(result); 103 | 104 | //2.start a spark job synchronously and wait until the result 105 | params.put(ISparkJobServerClientConstants.PARAM_CONTEXT, "cxtTest2"); 106 | params.put(ISparkJobServerClientConstants.PARAM_SYNC, "true"); 107 | result = client.startJob("input.string= fdsafd dfsf blullkfdsoflaw fsdffdsfsfs", params); 108 | System.out.println(result); 109 | 110 | //GET /jobs/---Gets the result or status of a specific job 111 | result = client.getJobResult("fdsfsfdfwfef"); 112 | System.out.println(result); 113 | 114 | //GET /jobs//config - Gets the job configuration 115 | SparkJobConfig jobConfig = client.getConfig("fdsfsfdfwfef"); 116 | System.out.println(jobConfig); 117 | } catch (SparkJobServerClientException e1) { 118 | e1.printStackTrace(); 119 | } catch (Exception e) { 120 | e.printStackTrace(); 121 | } 122 | } 123 | } 124 | ``` 125 | ### How to contribute 126 | Anyone interested in this program can do the following things: 127 | 1. `Fork` it to your own git repository. 128 | 2. Create a new branch for your feature via `git checkout -b your-new-feature`. 129 | 3. Add or modify new codes. 130 | 4. Commit the modifications through `git commit -am 'add your new feature'`. 131 | 5. Push the new branch by `git push origin your-new-feature`. 132 | 6. Create a new `pull request`. 133 | 134 | Any questions and discussions can be added in [SparkJobServerClient/issues] (https://github.com/bluebreezecf/SparkJobServerClient/issues) 135 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4.0.0 3 | com.bluebreezecf 4 | spark-job-server-client 5 | 1.0.0 6 | ${project.groupId}:${project.artifactId} 7 | Java Lib for Spark Job Server Rest API 8 | https://github.com/bluebreezecf/SparkJobServerClient 9 | 10 | 11 | The Apache Software License, Version 2.0 12 | http://www.apache.org/licenses/LICENSE-2.0.txt 13 | repo 14 | 15 | 16 | 17 | 18 | bluebreezecf 19 | Jiawei Shi 20 | shijianwei2020@163.com 21 | 22 | 23 | 24 | scm:git:git://github.com/bluebreezecf/SparkJobServerClient 25 | scm:git:git://github.com/bluebreezecf/SparkJobServerClient 26 | https://github.com/bluebreezecf/SparkJobServerClient/tree/master 27 | 28 | 29 | UTF-8 30 | 31 | 32 | 33 | log4j 34 | log4j 35 | 1.2.14 36 | 37 | 38 | org.apache.httpcomponents 39 | httpclient 40 | 4.5 41 | 42 | 43 | org.apache.httpcomponents 44 | httpcore 45 | 4.4.1 46 | 47 | 48 | net.sf.json-lib 49 | json-lib 50 | 2.4 51 | jdk15 52 | 53 | 54 | commons-io 55 | commons-io 56 | 2.5 57 | 58 | 59 | commons-beanutils 60 | commons-beanutils 61 | 1.8.0 62 | 63 | 64 | commons-codec 65 | commons-codec 66 | 1.6 67 | 68 | 69 | commons-collections 70 | commons-collections 71 | 3.2.2 72 | 73 | 74 | commons-lang 75 | commons-lang 76 | 2.5 77 | 78 | 79 | commons-logging 80 | commons-logging 81 | 1.1.1 82 | 83 | 84 | net.sf.ezmorph 85 | ezmorph 86 | 1.0.6 87 | 88 | 89 | org.hamcrest 90 | hamcrest-all 91 | 1.3 92 | test 93 | 94 | 95 | junit 96 | junit 97 | 4.12 98 | test 99 | 100 | 101 | 102 | 103 | ossrh 104 | https://oss.sonatype.org/content/repositories/snapshots 105 | 106 | 107 | ossrh 108 | https://oss.sonatype.org/service/local/staging/deploy/maven2/ 109 | 110 | 111 | 112 | 113 | 114 | org.sonatype.plugins 115 | nexus-staging-maven-plugin 116 | 1.6.7 117 | true 118 | 119 | ossrh 120 | https://oss.sonatype.org/ 121 | true 122 | 123 | 124 | 125 | org.apache.maven.plugins 126 | maven-release-plugin 127 | 2.5.3 128 | 129 | true 130 | false 131 | release 132 | deploy 133 | 134 | 135 | 136 | org.apache.maven.plugins 137 | maven-source-plugin 138 | 3.0.1 139 | 140 | 141 | attach-sources 142 | 143 | jar 144 | 145 | 146 | 147 | 148 | 149 | org.apache.maven.plugins 150 | maven-javadoc-plugin 151 | 2.10.4 152 | 153 | 154 | attach-javadocs 155 | 156 | jar 157 | 158 | 159 | 160 | 161 | 162 | org.apache.maven.plugins 163 | maven-surefire-plugin 164 | 2.17 165 | 166 | true 167 | 168 | 169 | 170 | org.apache.maven.plugins 171 | maven-gpg-plugin 172 | 1.6 173 | 174 | 175 | sign-artifacts 176 | verify 177 | 178 | sign 179 | 180 | 181 | 182 | 183 | 184 | 185 | -------------------------------------------------------------------------------- /src/main/java/com/bluebreezecf/tools/sparkjobserver/api/ISparkJobServerClient.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2014-2022 the original author or authors. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.bluebreezecf.tools.sparkjobserver.api; 18 | 19 | import java.io.File; 20 | import java.io.InputStream; 21 | import java.util.List; 22 | import java.util.Map; 23 | 24 | /** 25 | * A client implements all the Rest APIs described by the 26 | * Spark Job Server (https://github.com/ooyala/spark-jobserver). 27 | * 28 | * @author bluebreezecf 29 | * @since 2014-09-07 30 | */ 31 | public interface ISparkJobServerClient { 32 | /** 33 | * Lists all the information of jars for potential jobs to be running 34 | * in the Spark Cluster behind the Spark Job Server. 35 | * 36 | *

37 | * This method implements the Rest API 'GET /jars' of the 38 | * Spark Job Server. 39 | * 40 | * @return a list containing information of Spark Job jars 41 | * @throws SparkJobServerClientException error occurs when trying to get 42 | * information of spark job jars 43 | */ 44 | List getJars() throws SparkJobServerClientException; 45 | 46 | /** 47 | * Uploads a jar containing spark job to the Spark Job Server under 48 | * the given application name. 49 | * 50 | *

51 | * This method implements the Rest API 'POST /jars/<appName>' 52 | * of the Spark Job Server. 53 | * 54 | * @param jarData the instance of InputStream contains the 55 | * contents of the target jar file to be uploaded 56 | * @param appName the application name under which the related Spark Job 57 | * is about to run, meanwhile the application name also be the alias 58 | * name of the uploaded jar file. 59 | * @return true if the operation of uploading is successful, false otherwise 60 | * @throws SparkJobServerClientException if the given parameter jarData or 61 | * appName is null, or error occurs when uploading the related spark job 62 | * jar 63 | */ 64 | boolean uploadSparkJobJar(InputStream jarData, String appName) 65 | throws SparkJobServerClientException; 66 | 67 | /** 68 | * Uploads a jar containing spark job to the Spark Job Server under 69 | * the given application name. 70 | * 71 | *

72 | * This method implements the Rest API 'POST /jars/<appName>' 73 | * of the Spark Job Server. 74 | * 75 | * @param jarFile the jar file 76 | * @param appName the application name under which the related Spark Job 77 | * is about to run, meanwhile the application name also be the alias 78 | * name of the uploaded jar file. 79 | * @return true if the operation of uploading is successful, false otherwise 80 | * @throws SparkJobServerClientException if the given parameter jarData or 81 | * appName is null, or error occurs when uploading the related spark job 82 | * jar 83 | */ 84 | boolean uploadSparkJobJar(File jarFile, String appName) 85 | throws SparkJobServerClientException; 86 | 87 | /** 88 | * Lists all the contexts available in the Spark Job Server. 89 | * 90 | *

91 | * This method implements the Rest API 'GET /contexts ' 92 | * of the Spark Job Server. 93 | * 94 | * @return a list containing names of current contexts 95 | * @throws SparkJobServerClientException error occurs when trying to get 96 | * information of contexts 97 | */ 98 | List getContexts() throws SparkJobServerClientException; 99 | 100 | /** 101 | * Creates a new context in the Spark Job Server with the given context name. 102 | * 103 | *

104 | * This method implements the Rest API 'POST /contexts/<name>' 105 | * of the Spark Job Server. 106 | * 107 | * @param contextName the name of the new context to be created, it should be not null 108 | * and should begin with letter. 109 | * @param params a map containing the key-value pairs appended to appoint the context 110 | * settings if there is a need to configure the new created context, or null indicates 111 | * the new context with the default configuration 112 | * @return true if the operation of creating is successful, false it failed to create 113 | * the context because a context with the same name already exists 114 | * @throws SparkJobServerClientException when the given contextName is null or empty string, 115 | * or I/O error occurs while trying to create context in spark job server. 116 | */ 117 | boolean createContext(String contextName, Map params) throws SparkJobServerClientException; 118 | 119 | /** 120 | * Delete a context with the given name in the Spark Job Server. 121 | * All the jobs running in it will be stopped consequently. 122 | * 123 | *

124 | * This method implements the Rest API 'DELETE /contexts/<name>' 125 | * of the Spark Job Server. 126 | * 127 | * @param contextName the name of the target context to be deleted, it should be not null 128 | * and should begin with letter. 129 | * @return true if the operation of the deleting is successful, false otherwise 130 | * @throws SparkJobServerClientException when the given contextName is null or empty string, 131 | * or I/O error occurs while trying to delete context in spark job server. 132 | */ 133 | boolean deleteContext(String contextName) throws SparkJobServerClientException; 134 | 135 | /** 136 | * Lists the last N jobs in the Spark Job Server. 137 | * 138 | *

139 | * This method implements the Rest API 'GET /jobs' of the Spark 140 | * Job Server. 141 | * 142 | * @return a list containing information of the jobs 143 | * @throws SparkJobServerClientException error occurs when trying to get 144 | * information of jobs 145 | */ 146 | List getJobs() throws SparkJobServerClientException; 147 | 148 | /** 149 | * Lists the last N jobs in the Spark Job Server for the specified job status. 150 | * 151 | *

152 | * This method implements the Rest API 'GET /jobs?status=(RUNNING|ERROR|FINISHED| 153 | * STARTED|OK)' of the Spark 154 | * Job Server. 155 | *

156 | * @param jobStatus RUNNING OK ERROR FINISHED STARTED 157 | * @return a list containing information of the jobs for specified status 158 | * @throws SparkJobServerClientException error occurs when trying to get 159 | * information of jobs 160 | */ 161 | List getJobsByStatus(String jobStatus) throws SparkJobServerClientException; 162 | 163 | /** 164 | * Start a new job with the given parameters. 165 | * 166 | *

167 | * This method implements the Rest API 'POST /jobs' of the Spark 168 | * Job Server. 169 | * 170 | * @param data contains the the data processed by the target job. 171 | *

172 | * If it is null, it means the target spark job doesn't needs any data set 173 | * in the job configuration. 174 | *

175 | * If it is not null, the format should be like a key-value pair, such as 176 | * dataKey=dataValue, what the dataKey is determined by the 177 | * one used in the target spark job main class which is assigned by 178 | * ISparkJobServerClientConstants.PARAM_CLASS_PATH. 179 | * @param params a non-null map containing parameters to start the job. 180 | * the key should be the following ones: 181 | * i. ISparkJobServerClientConstants.PARAM_APP_NAME, necessary 182 | * one and should be one of the existing name in the calling of GET /jars. 183 | * That means the appName is the alias name of the uploaded spark job jars. 184 | * 185 | * ii.ISparkJobServerClientConstants.PARAM_CLASS_PATH, necessary one 186 | * 187 | * iii.ISparkJobServerClientConstants.PARAM_CONTEXT, optional one 188 | * 189 | * iv.ISparkJobServerClientConstants.PARAM_SYNC, optional one 190 | * 191 | * @return the corresponding job status or job result 192 | * @throws SparkJobServerClientException the given parameters exist null or empty value, 193 | * or I/O error occurs when trying to start the new job 194 | */ 195 | SparkJobResult startJob(String data, Map params) throws SparkJobServerClientException; 196 | 197 | /** 198 | * Start a new job with the given parameters. 199 | * 200 | *

201 | * This method implements the Rest API 'POST /jobs' of the Spark 202 | * Job Server. 203 | * 204 | * @param dataFile contains the the data processed by the target job. 205 | *

206 | * If it is null, it means the target spark job doesn't needs any data set 207 | * in the job configuration. 208 | *

209 | * If it is not null, the format should be Typesafe Config Style, such as 210 | * json, properties file etc. See http://github.com/typesafehub/config 211 | * what the keys in the file are determined by the 212 | * one used in the target spark job main class which is assigned by 213 | * ISparkJobServerClientConstants.PARAM_CLASS_PATH. 214 | * @param params a non-null map containing parameters to start the job. 215 | * the key should be the following ones: 216 | * i. ISparkJobServerClientConstants.PARAM_APP_NAME, necessary 217 | * one and should be one of the existing name in the calling of GET /jars. 218 | * That means the appName is the alias name of the uploaded spark job jars. 219 | * 220 | * ii.ISparkJobServerClientConstants.PARAM_CLASS_PATH, necessary one 221 | * 222 | * iii.ISparkJobServerClientConstants.PARAM_CONTEXT, optional one 223 | * 224 | * iv.ISparkJobServerClientConstants.PARAM_SYNC, optional one 225 | * 226 | * @return the corresponding job status or job result 227 | * @throws SparkJobServerClientException the given parameters exist null or empty value, 228 | * or I/O error occurs when trying to start the new job 229 | */ 230 | SparkJobResult startJob(File dataFile, Map params) throws SparkJobServerClientException; 231 | 232 | /** 233 | * Start a new job with the given parameters. 234 | * 235 | *

236 | * This method implements the Rest API 'POST /jobs' of the Spark 237 | * Job Server. 238 | * 239 | * @param dataFileStream contains the the data processed by the target job. 240 | *

241 | * If it is null, it means the target spark job doesn't needs any data set 242 | * in the job configuration. 243 | *

244 | * If it is not null, the format should be Typesafe Config Style, such as 245 | * json, properties file etc. See http://github.com/typesafehub/config 246 | * what the keys in the file are determined by the 247 | * one used in the target spark job main class which is assigned by 248 | * ISparkJobServerClientConstants.PARAM_CLASS_PATH. 249 | * @param params a non-null map containing parameters to start the job. 250 | * the key should be the following ones: 251 | * i. ISparkJobServerClientConstants.PARAM_APP_NAME, necessary 252 | * one and should be one of the existing name in the calling of GET /jars. 253 | * That means the appName is the alias name of the uploaded spark job jars. 254 | * 255 | * ii.ISparkJobServerClientConstants.PARAM_CLASS_PATH, necessary one 256 | * 257 | * iii.ISparkJobServerClientConstants.PARAM_CONTEXT, optional one 258 | * 259 | * iv.ISparkJobServerClientConstants.PARAM_SYNC, optional one 260 | * 261 | * @return the corresponding job status or job result 262 | * @throws SparkJobServerClientException the given parameters exist null or empty value, 263 | * or I/O error occurs when trying to start the new job 264 | */ 265 | SparkJobResult startJob(InputStream dataFileStream, Map params) throws SparkJobServerClientException; 266 | 267 | /** 268 | * Gets the result or status of a specific job in the Spark Job Server. 269 | * 270 | *

271 | * This method implements the Rest API 'GET /jobs/<jobId>' 272 | * of the Spark Job Server. 273 | * 274 | * @param jobId the id of the target job 275 | * @return the corresponding SparkJobResult instance if the job 276 | * with the given jobId exists, or null if there is no corresponding job or 277 | * the target job has no result. 278 | * @throws SparkJobServerClientException error occurs when trying to get 279 | * information of the target job 280 | */ 281 | SparkJobResult getJobResult(String jobId) throws SparkJobServerClientException; 282 | 283 | /** 284 | * Gets the job configuration of a specific job. 285 | * 286 | *

287 | * This method implements the Rest API 'GET /jobs/<jobId>/config' 288 | * of the Spark Job Server. 289 | * 290 | * @param jobId the id of the target job 291 | * @return the corresponding SparkJobConfig instance if the job 292 | * with the given jobId exists, or null if there is no corresponding job in 293 | * the spark job server. 294 | * @throws SparkJobServerClientException error occurs when trying to get 295 | * information of the target job configuration 296 | */ 297 | SparkJobConfig getConfig(String jobId) throws SparkJobServerClientException; 298 | 299 | /** 300 | * Kill the specified job 301 | * 302 | *

303 | * This method implements the Rest API 'DELETE /jobs/<jobId>' of the Spark 304 | * Job Server. 305 | * 306 | * @param jobId the id of the target job. 307 | * 308 | * @return the corresponding killed job status or killed job result 309 | * @throws SparkJobServerClientException if failed to kill a job, 310 | * or I/O error occurs when trying to kill existing job 311 | */ 312 | boolean killJob(String jobId) throws SparkJobServerClientException; 313 | 314 | } 315 | -------------------------------------------------------------------------------- /src/main/java/com/bluebreezecf/tools/sparkjobserver/api/ISparkJobServerClientConstants.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2014-2022 the original author or authors. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.bluebreezecf.tools.sparkjobserver.api; 18 | 19 | /** 20 | * Defines the constants used in public for 21 | * the spark job server client. 22 | * 23 | * @author bluebreezecf 24 | * @since 2014-09-11 25 | * 26 | */ 27 | public interface ISparkJobServerClientConstants { 28 | 29 | /** 30 | * Parameter used to create job context, indicates the executor 31 | * instances for current spark job context. 32 | *

33 | * It's value is integer. 34 | *

35 | * It is equivalent to PARAM_SPARK_CORES_MAX. 36 | */ 37 | String PARAM_NUM_EXECUTORS = "spark.executor.instances"; 38 | 39 | /** 40 | * Parameter used to create job context, indicates the number of cpu cores 41 | * for current spark job context. 42 | *

43 | * It's value is integer. 44 | *

45 | * It is equivalent to PARAM_SPARK_CORES_MAX. 46 | */ 47 | String PARAM_NUM_CPU_CORES = "num-cpu-cores"; 48 | 49 | /** 50 | * Parameter used to create job context, indicates the number of cpu cores 51 | * for current spark job context. 52 | *

53 | * It's value is integer. 54 | *

55 | * It is equivalent to PARAM_NUM_CPU_CORES. 56 | */ 57 | String PARAM_SPARK_CORES_MAX = "spark.cores.max"; 58 | 59 | /** 60 | * Parameter used to create job context, indicates the number of memeory 61 | * for each spark job executor. 62 | *

63 | * It's value is 'xxxm' such as 512m, where xxx is a integer, and m means MB. 64 | *

65 | * It is equivalent to PARAM_SPARK_EXECUTOR_MEMORY. 66 | */ 67 | String PARAM_MEM_PER_NODE = "mem-per-node"; 68 | 69 | /** 70 | * Parameter used to create job context, indicates the number of memeory 71 | * for each spark job executor. 72 | *

73 | * It's value is 'xxxm' such as 512m, where xxx is a integer, and m means MB. 74 | *

75 | * It is equivalent to PARAM_MEM_PER_NODE. 76 | */ 77 | String PARAM_SPARK_EXECUTOR_MEMORY = "spark.executor.memory"; 78 | 79 | /** 80 | * It's a necessary parameter used to assign a application name when 81 | * trying to start a new job. 82 | */ 83 | String PARAM_APP_NAME = "appName"; 84 | 85 | /** 86 | * It's a necessary parameter which is a full qualified class name such as 87 | * 'spark.jobserver.WorkCountExample', used to assign a spark 88 | * job main class when trying to start a new job. 89 | */ 90 | String PARAM_CLASS_PATH = "classPath"; 91 | 92 | /** 93 | * It's an optional parameter which is an existing context in the calling of 94 | * GET /contexts, used to assign a context in which the new 95 | * created job will run. 96 | * 97 | *

98 | * When it wants the new started job to run synchronize and gets the result, 99 | * It becomes a necessary one. 100 | */ 101 | String PARAM_CONTEXT = "context"; 102 | 103 | /** 104 | * It's an optional parameter which is used to create contexts using context 105 | * factory. 106 | */ 107 | String PARAM_CONTEXT_TYPE = "context-factory"; 108 | 109 | /** 110 | * It's an optional parameter which could be "true" or "false", 111 | * which is used to identify the client will wait for created 112 | * job 's finish and get the results. 113 | * 114 | *

115 | * When it wants the new started job to run synchronize and gets the result, 116 | * It becomes a necessary one. 117 | */ 118 | String PARAM_SYNC = "sync"; 119 | 120 | /** 121 | * It's an optional parameter which is the number of seconds the, 122 | * should wait before timeing out. Increasing this value may be 123 | * required when using sync on long jobs. 124 | *

125 | * It's value is integer. 126 | *

127 | */ 128 | String PARAM_TIMEOUT = "timeout"; 129 | } 130 | -------------------------------------------------------------------------------- /src/main/java/com/bluebreezecf/tools/sparkjobserver/api/SparkJobBaseInfo.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2014-2022 the original author or authors. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.bluebreezecf.tools.sparkjobserver.api; 18 | 19 | import java.util.Arrays; 20 | import java.util.HashSet; 21 | import java.util.Set; 22 | 23 | /** 24 | * It acts as an base class and holds the mutual attributes of SparkJobInfo 25 | * and SparkJobResult. 26 | * 27 | * @author bluebreezecf 28 | * @since 2014-09-15 29 | * 30 | */ 31 | class SparkJobBaseInfo { 32 | static final String INFO_EMPTY_VALUE = "empty value"; 33 | 34 | /** 35 | * Status value in a global job information (a SparkJobInfo 36 | * instance) or a job result/status information 37 | * (a SparkJobResult instance) 38 | */ 39 | static final String INFO_STATUS_ERROR = "ERROR"; 40 | 41 | /** 42 | * Status value in a global job information (a SparkJobInfo 43 | * instance) 44 | */ 45 | static final String INFO_STATUS_FINISHED = "FINISHED"; 46 | 47 | /** 48 | * Status value in a job status/result information (a SparkJobResult 49 | * instance) 50 | */ 51 | static final String INFO_STATUS_OK = "OK"; 52 | 53 | /** 54 | * Status value in a job status/result information (a SparkJobResult instance) 55 | */ 56 | static final String INFO_STATUS_STARTED = "STARTED"; 57 | 58 | /** 59 | * Status value in a job status/result information (a SparkJobResult instance) 60 | */ 61 | static final String INFO_STATUS_RUNNING = "RUNNING"; 62 | 63 | /** 64 | * Key of status information in the Spark Job Server's json response. 65 | */ 66 | static final String INFO_KEY_STATUS = "status"; 67 | 68 | /** 69 | * Key of result information in the Spark Job Server's json response. 70 | */ 71 | static final String INFO_KEY_RESULT = "result"; 72 | 73 | /** 74 | * Key of message information of a result item in the Spark Job Server's json response. 75 | *

76 | * It presents the global description of the error 77 | */ 78 | static final String INFO_KEY_RESULT_MESSAGE = "message"; 79 | 80 | /** 81 | * Key of error class information of a result item in the Spark Job Server's json response. 82 | *

83 | * It indicates the error class of current error message 84 | */ 85 | static final String INFO_KEY_RESULT_ERROR_CLASS = "errorClass"; 86 | 87 | /** 88 | * Key of stack class information of a result item in the Spark Job Server's json response. 89 | *

90 | * It shows the information of java/scala exception stack 91 | */ 92 | static final String INFO_KEY_RESULT_STACK = "stack"; 93 | 94 | /** 95 | * Key of context information of a result item in the Spark Job Server's json response. 96 | *

97 | * It's the context name. 98 | */ 99 | static final String INFO_KEY_CONTEXT = "context"; 100 | 101 | /** 102 | * Key of context information of a result item in the Spark Job Server's json response. 103 | *

104 | * It shows the job id of the target spark job 105 | */ 106 | static final String INFO_KEY_JOB_ID = "jobId"; 107 | 108 | static final Set ASYNC_STATUS = new HashSet(Arrays.asList(new String[]{INFO_STATUS_STARTED, INFO_STATUS_RUNNING})); 109 | static final Set COMPLETED = new HashSet(Arrays.asList(new String[]{INFO_STATUS_FINISHED, INFO_STATUS_OK})); 110 | 111 | private String status; 112 | private String message; 113 | private String errorClass; 114 | private String[] stack; 115 | private String context; 116 | private String jobId; 117 | protected String contents; 118 | 119 | public String getStatus() { 120 | return status; 121 | } 122 | void setStatus(String status) { 123 | this.status = status; 124 | } 125 | public String getMessage() { 126 | return message; 127 | } 128 | void setMessage(String message) { 129 | this.message = message; 130 | } 131 | public String getErrorClass() { 132 | return errorClass; 133 | } 134 | void setErrorClass(String errorClass) { 135 | this.errorClass = errorClass; 136 | } 137 | public String[] getStack() { 138 | return stack; 139 | } 140 | void setStack(String[] stack) { 141 | this.stack = stack; 142 | } 143 | 144 | public String getContext() { 145 | return context; 146 | } 147 | void setContext(String context) { 148 | this.context = context; 149 | } 150 | 151 | public String getJobId() { 152 | return jobId; 153 | } 154 | void setJobId(String jobId) { 155 | this.jobId = jobId; 156 | } 157 | } 158 | -------------------------------------------------------------------------------- /src/main/java/com/bluebreezecf/tools/sparkjobserver/api/SparkJobConfig.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2014-2022 the original author or authors. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.bluebreezecf.tools.sparkjobserver.api; 18 | 19 | import java.util.HashMap; 20 | import java.util.Map; 21 | import java.util.Map.Entry; 22 | import java.util.Set; 23 | 24 | /** 25 | * Presents the information of spark job configuration, when 26 | * calling GET /jobs/<jobId>/config to a 27 | * spark job server. 28 | * 29 | *

30 | * The application used this SparkJobConfig instance 31 | * should use the getConfigs() and parse the values 32 | * itself. 33 | * 34 | * @author bluebreezecf 35 | * @since 2014-09-11 36 | * 37 | */ 38 | public class SparkJobConfig { 39 | private Map configs = new HashMap(); 40 | void putConfigItem(String key, Object value) { 41 | this.configs.put(key, value); 42 | } 43 | 44 | /** 45 | * Gets all the configuration items. 46 | * 47 | * @return a map holding the key-value pairs of configuration items 48 | */ 49 | public Map getConfigs() { 50 | return new HashMap(this.configs); 51 | } 52 | 53 | /** 54 | * {@inheritDoc} 55 | */ 56 | @Override 57 | public String toString() { 58 | StringBuffer buff = new StringBuffer("SparkJobConfig\n{\n"); 59 | Set> items = configs.entrySet(); 60 | for (Entry item : items) { 61 | buff.append(" ").append(item.getKey()).append(": ") 62 | .append(item.getValue().toString()).append("\n"); 63 | } 64 | buff.append("}"); 65 | return buff.toString(); 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /src/main/java/com/bluebreezecf/tools/sparkjobserver/api/SparkJobInfo.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2014-2022 the original author or authors. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.bluebreezecf.tools.sparkjobserver.api; 18 | 19 | 20 | /** 21 | * Presents the information of spark job, when calling 22 | * GET /jobs to a spark job server. 23 | * 24 | * @author bluebreezecf 25 | * @since 2014-09-11 26 | * 27 | */ 28 | public class SparkJobInfo extends SparkJobBaseInfo { 29 | 30 | /** 31 | * Key of duration information in the Spark Job Server's json response. 32 | *

33 | * The value shows the execution time of the target spark job. 34 | */ 35 | static final String INFO_KEY_DURATION = "duration"; 36 | 37 | /** 38 | * Key of classPath information in the Spark Job Server's json response. 39 | *

40 | * The value shows the spark job main class which extends class of SparkJob. 41 | */ 42 | static final String INFO_KEY_CLASSPATH = "classPath"; 43 | 44 | /** 45 | * Key of startTime information in the Spark Job Server's json response. 46 | *

47 | * The value shows start time of the target spark job. 48 | */ 49 | static final String INFO_KEY_START_TIME = "startTime"; 50 | 51 | /** 52 | * Id of Context information in the Spark Job Server's json response. 53 | *

54 | * The value shows Unique Identification for the context of the target spark job. 55 | */ 56 | static final String INFO_CONTEXT_ID = "contextId"; 57 | 58 | private String duration; 59 | private String classPath; 60 | private String startTime; 61 | private String contextId; 62 | 63 | 64 | public String getDuration() { 65 | return duration; 66 | } 67 | void setDuration(String duration) { 68 | this.duration = duration; 69 | } 70 | public String getClassPath() { 71 | return classPath; 72 | } 73 | void setClassPath(String classPath) { 74 | this.classPath = classPath; 75 | } 76 | public String getStartTime() { 77 | return startTime; 78 | } 79 | void setStartTime(String startTime) { 80 | this.startTime = startTime; 81 | } 82 | public String getContextId() { 83 | return contextId; 84 | } 85 | void setContextId(String contextId) { 86 | this.contextId = contextId; 87 | } 88 | 89 | /** 90 | * {@inheritDoc} 91 | */ 92 | @Override 93 | public String toString() { 94 | StringBuffer buff = new StringBuffer("SparkJobInfo"); 95 | buff.append("{\n"); 96 | buff.append(" ").append(INFO_KEY_DURATION).append(": ") 97 | .append(this.getDuration() != null ? this.getDuration() : INFO_EMPTY_VALUE).append("\n") 98 | .append(" ").append(INFO_KEY_CLASSPATH).append(": ") 99 | .append(this.getClassPath() != null ? this.getClassPath() : INFO_EMPTY_VALUE).append("\n") 100 | .append(" ").append(INFO_KEY_START_TIME).append(": ") 101 | .append(this.getStartTime() != null ? this.getStartTime() : INFO_EMPTY_VALUE).append("\n") 102 | .append(" ").append(INFO_KEY_CONTEXT).append(": ") 103 | .append(this.getContext() != null ? this.getContext() : INFO_EMPTY_VALUE).append("\n") 104 | .append(" ").append(INFO_KEY_JOB_ID).append(": ") 105 | .append(this.getJobId() != null ? this.getJobId() : INFO_EMPTY_VALUE).append("\n") 106 | .append(" ").append(INFO_KEY_STATUS).append(": ") 107 | .append(this.getStatus() != null ? this.getStatus() : INFO_EMPTY_VALUE).append("\n") 108 | .append(" ").append(INFO_CONTEXT_ID).append(": ") 109 | .append(this.getContextId() != null ? this.getContextId() : INFO_EMPTY_VALUE).append("\n"); 110 | if (this.getMessage() != null) { 111 | buff.append(" ").append(INFO_KEY_RESULT).append(": {\n") 112 | .append(" ").append(INFO_KEY_RESULT_MESSAGE).append(": ").append(getMessage()).append("\n"); 113 | } 114 | if (this.getErrorClass() != null) { 115 | buff.append(" ").append(INFO_KEY_RESULT_ERROR_CLASS).append(": ").append(getErrorClass()).append("\n"); 116 | } 117 | if (this.getStack() != null) { 118 | buff.append(" ").append(INFO_KEY_RESULT_STACK).append(": ["); 119 | for (String stackItem : getStack()) { 120 | buff.append(" ").append(stackItem).append(",\n"); 121 | } 122 | buff.append(" ]"); 123 | } 124 | 125 | buff.append("}"); 126 | return buff.toString(); 127 | } 128 | } 129 | -------------------------------------------------------------------------------- /src/main/java/com/bluebreezecf/tools/sparkjobserver/api/SparkJobJarInfo.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2014-2022 the original author or authors. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.bluebreezecf.tools.sparkjobserver.api; 18 | 19 | /** 20 | * Presents the information of spark job jar files, when 21 | * calling GET /jars to a spark job server. 22 | * 23 | * @author bluebreezecf 24 | * @since 2014-09-11 25 | * 26 | */ 27 | public class SparkJobJarInfo { 28 | private static final String INFO_EMPTY_VALUE = "empty value"; 29 | static final String INFO_KEY_JAR_NAME = "jarName"; 30 | static final String INFO_KEY_UPLOADED_TIME = "uploadedTime"; 31 | 32 | private String jarName; 33 | private String uploadedTime; 34 | public String getJarName() { 35 | return jarName; 36 | } 37 | public void setJarName(String jarName) { 38 | this.jarName = jarName; 39 | } 40 | public String getUploadedTime() { 41 | return uploadedTime; 42 | } 43 | public void setUploadedTime(String uploadedTime) { 44 | this.uploadedTime = uploadedTime; 45 | } 46 | 47 | /** 48 | * {@inheritDoc} 49 | */ 50 | @Override 51 | public String toString() { 52 | StringBuffer buff = new StringBuffer("SparkJobJarInfo{\n"); 53 | buff.append(" ").append(INFO_KEY_JAR_NAME).append(": ") 54 | .append(this.getJarName() != null ? this.getJarName() : INFO_EMPTY_VALUE).append(",\n"); 55 | buff.append(" ").append(INFO_KEY_UPLOADED_TIME).append(": ") 56 | .append(this.getUploadedTime() != null ? this.getUploadedTime() : INFO_EMPTY_VALUE).append('\n'); 57 | buff.append("}"); 58 | return buff.toString(); 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /src/main/java/com/bluebreezecf/tools/sparkjobserver/api/SparkJobResult.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2014-2022 the original author or authors. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.bluebreezecf.tools.sparkjobserver.api; 18 | 19 | import java.util.HashMap; 20 | import java.util.Map; 21 | 22 | /** 23 | * Presents the information of spark job result, when calling 24 | * GET /jobs/<jobId> to a spark job server. 25 | * 26 | * @author bluebreezecf 27 | * @since 2014-09-15 28 | * 29 | */ 30 | public class SparkJobResult extends SparkJobBaseInfo { 31 | 32 | private String result; 33 | private Map extendAttributes = new HashMap(); 34 | 35 | SparkJobResult(String contents, String jobId) { 36 | this.contents = contents; 37 | setJobId(jobId); 38 | } 39 | 40 | SparkJobResult(String contents) { 41 | this(contents, null); 42 | } 43 | 44 | public String getResult() { 45 | return result; 46 | } 47 | 48 | void setResult(String result) { 49 | this.result = result; 50 | } 51 | 52 | void putExtendAttribute(String key, Object value) { 53 | this.extendAttributes.put(key, value); 54 | } 55 | 56 | public Map getExtendAttributes() { 57 | return new HashMap(this.extendAttributes); 58 | } 59 | 60 | /** 61 | * {@inheritDoc} 62 | */ 63 | @Override 64 | public String toString() { 65 | StringBuffer buff = new StringBuffer("SparkJobResult\n"); 66 | buff.append(contents); 67 | return buff.toString(); 68 | } 69 | 70 | /** 71 | * Judges current SparkJobResult instance represents the 72 | * status information of a asynchronous running spark job or not. 73 | * 74 | * @return true indicates it contains asynchronous running status of a 75 | * spark job, false otherwise 76 | */ 77 | public boolean containsAsynStatus() { 78 | return SparkJobBaseInfo.ASYNC_STATUS.contains(getStatus()); 79 | } 80 | 81 | /** 82 | * Judges the queried target job doesn't exist or not. 83 | * 84 | * @return true indicates the related job doesn't exist, false otherwise 85 | */ 86 | public boolean jobNotExists() { 87 | return SparkJobBaseInfo.INFO_STATUS_ERROR.equals(getStatus()) 88 | && getResult() != null && getResult().contains("No such job ID"); 89 | } 90 | 91 | /** 92 | * Judges current SparkJobResult instance contains 93 | * error information of a failed spark job or not. 94 | * 95 | * @return true indicates it contains error message, false otherwise 96 | */ 97 | public boolean containsErrorInfo() { 98 | return SparkJobBaseInfo.INFO_STATUS_ERROR.equals(getStatus()) 99 | && getMessage() != null; 100 | } 101 | 102 | /** 103 | * Judges current SparkJobResult instance contains 104 | * custom-defined extend attributes of result or not 105 | * 106 | * @return true indicates it contains custom-defined extend attributes, false otherwise 107 | */ 108 | public boolean containsExtendAttributes() { 109 | return !extendAttributes.isEmpty(); 110 | } 111 | 112 | // /** 113 | // * Sets the status information of the asynchronous running job. 114 | // * 115 | // * @param buff the existing contents 116 | // */ 117 | // private void setAsynStatusInfo(StringBuffer buff) { 118 | // if (buff != null) { 119 | // buff.append(" ").append(SparkJobBaseInfo.INFO_KEY_RESULT).append(": {\n") 120 | // .append(" ").append(SparkJobBaseInfo.INFO_KEY_JOB_ID).append(": ").append(getJobId()).append(",\n") 121 | // .append(" ").append(SparkJobBaseInfo.INFO_KEY_CONTEXT).append(": ").append(getContext()).append(",\n") 122 | // .append(" }\n"); 123 | // } 124 | // } 125 | // 126 | // /** 127 | // * Sets the information for non-existence job. 128 | // * 129 | // * @param buff the existing contents 130 | // */ 131 | // private void setNotExistsInfo(StringBuffer buff) { 132 | // if (buff != null) { 133 | // buff.append(" ").append(SparkJobBaseInfo.INFO_KEY_RESULT).append(": ") 134 | // .append(getResult()).append(",\n"); 135 | // } 136 | // } 137 | // 138 | // /** 139 | // * Sets the error information of the target failed job. 140 | // * 141 | // * @param buff the existing contents 142 | // */ 143 | // private void setErrorInfo(StringBuffer buff) { 144 | // if (buff != null) { 145 | // buff.append(" ").append(SparkJobBaseInfo.INFO_STATUS_ERROR).append(": {\n") 146 | // .append(" ").append(SparkJobBaseInfo.INFO_KEY_RESULT_MESSAGE).append(": ").append(getMessage()).append(",\n"); 147 | // if (getErrorClass() != null) { 148 | // buff.append(" ").append(SparkJobBaseInfo.INFO_KEY_RESULT_ERROR_CLASS).append(": ") 149 | // .append(getErrorClass()).append(",\n"); 150 | // } 151 | // if (getStack() != null && getStack().length > 0) { 152 | // buff.append(" ").append(SparkJobBaseInfo.INFO_KEY_RESULT_STACK).append(": ["); 153 | // for (String stackItem : getStack()) { 154 | // buff.append(stackItem).append(", "); 155 | // } 156 | // buff.append("]\n"); 157 | // } 158 | // } 159 | // } 160 | // 161 | // /** 162 | // * Sets the information of the extend attributes. 163 | // * 164 | // * @param buff the existing contents 165 | // */ 166 | // private void setExtendAttributesInfo(StringBuffer buff) { 167 | // if (buff != null) { 168 | // Set> contents = extendAttributes.entrySet(); 169 | // for (Entry item : contents) { 170 | // buff.append(" ").append(item.getKey()).append(": ") 171 | // .append(item.getValue().toString()).append(",\n"); 172 | // } 173 | // } 174 | // } 175 | } 176 | -------------------------------------------------------------------------------- /src/main/java/com/bluebreezecf/tools/sparkjobserver/api/SparkJobServerClientException.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2014-2022 the original author or authors. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.bluebreezecf.tools.sparkjobserver.api; 18 | 19 | /** 20 | * The exception indicates errors occurs when using instance 21 | * of ISparkJobServerClient. 22 | * 23 | * @author bluebreezecf 24 | * @since 2014-09-07 25 | * 26 | */ 27 | public class SparkJobServerClientException extends Exception { 28 | 29 | private static final long serialVersionUID = -5065403696198358625L; 30 | 31 | /** 32 | * Constructs a new SparkJobServerClientException instance 33 | * with the specified detail message. The cause is not initialized, and 34 | * may subsequently be initialized by a call to initCause. 35 | * 36 | * @param message the detail message. The detail message is saved for 37 | * later retrieval by the getMessage() method. 38 | */ 39 | public SparkJobServerClientException(String message) { 40 | super(message); 41 | } 42 | 43 | /** 44 | * Constructs a new SparkJobServerClientException instance 45 | * with the specified detail message and cause. 46 | * 47 | * @param message the detail message (which is saved for later retrieval 48 | * by the {@link #getMessage()} method). 49 | * @param cause the cause (which is saved for later retrieval by the 50 | * {@link #getCause()} method). (A null value is 51 | * permitted, and indicates that the cause is nonexistent or 52 | * unknown.) 53 | */ 54 | public SparkJobServerClientException(String message, Throwable cause) { 55 | super(message, cause); 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /src/main/java/com/bluebreezecf/tools/sparkjobserver/api/SparkJobServerClientFactory.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2014-2022 the original author or authors. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.bluebreezecf.tools.sparkjobserver.api; 18 | 19 | import java.net.MalformedURLException; 20 | import java.net.URL; 21 | import java.util.Map; 22 | import java.util.concurrent.ConcurrentHashMap; 23 | 24 | import org.apache.log4j.Logger; 25 | 26 | /** 27 | * The factory is responsible for creating instance of ISparkJobServerClient 28 | * to communicate with the Spark Job Server with the arranged rest apis. 29 | * 30 | * @author bluebreezecf 31 | * @since 2014-09-07 32 | * 33 | */ 34 | public final class SparkJobServerClientFactory { 35 | private static final SparkJobServerClientFactory INSTANCE = new SparkJobServerClientFactory(); 36 | 37 | private static Logger logger = Logger.getLogger(SparkJobServerClientFactory.class); 38 | 39 | private static Map jobServerClientCache 40 | = new ConcurrentHashMap(); 41 | 42 | /** 43 | * The default constructor of SparkJobServerClientFactory. 44 | */ 45 | private SparkJobServerClientFactory() { 46 | } 47 | 48 | /** 49 | * Gets the unique instance of SparkJobServerClientFactory. 50 | * @return the instance of SparkJobServerClientFactory 51 | */ 52 | public static SparkJobServerClientFactory getInstance() { 53 | return INSTANCE; 54 | } 55 | 56 | /** 57 | * Creates an instance of ISparkJobServerClient with the given url. 58 | * 59 | * @param url the url of the target Spark Job Server 60 | * @return the corresponding ISparkJobServerClient instance 61 | * @throws SparkJobServerClientException error occurs when trying to create the 62 | * target spark job server client 63 | */ 64 | public ISparkJobServerClient createSparkJobServerClient(String url) 65 | throws SparkJobServerClientException { 66 | if (!isValidUrl(url)) { 67 | throw new SparkJobServerClientException("Invalid url can't be used to create a spark job server client."); 68 | } 69 | String sparkJobServerUrl = url.trim(); 70 | ISparkJobServerClient sparkJobServerClient = jobServerClientCache.get(sparkJobServerUrl); 71 | if (null == sparkJobServerClient) { 72 | sparkJobServerClient = new SparkJobServerClientImpl(url); 73 | jobServerClientCache.put(url, sparkJobServerClient); 74 | } 75 | return sparkJobServerClient; 76 | } 77 | 78 | /** 79 | * Creates an instance of ISparkJobServerClient with the given url, username and password. 80 | * 81 | * @param url the url of the target Spark Job Server 82 | * @param jobServerUsername the username for authentication of target Spark Job Server 83 | * @param jobServerPassword the password for authentication of the target Spark Job Server 84 | * @return the corresponding ISparkJobServerClient instance 85 | * @throws SparkJobServerClientException error occurs when trying to create the 86 | * target spark job server client 87 | */ 88 | public ISparkJobServerClient createSparkJobServerClient(String url, String jobServerUsername, 89 | String jobServerPassword) throws SparkJobServerClientException { 90 | if (!isValidUrl(url)) { 91 | throw new SparkJobServerClientException("Invalid url can't be used to create a spark job server client."); 92 | } 93 | if (jobServerUsername == null || jobServerUsername.isEmpty()) { 94 | throw new SparkJobServerClientException("Invalid username can't be null or empty."); 95 | } 96 | String sparkJobServerUrl = url.trim(); 97 | jobServerUsername = jobServerUsername.trim(); 98 | ISparkJobServerClient sparkJobServerClient = jobServerClientCache.get(sparkJobServerUrl + "_@_" + jobServerUsername + "_@_" + jobServerPassword); 99 | if (null == sparkJobServerClient) { 100 | sparkJobServerClient = new SparkJobServerClientImpl(url, jobServerUsername, jobServerPassword); 101 | jobServerClientCache.put(sparkJobServerUrl + "_@_" + jobServerUsername + "_@_" + jobServerPassword, sparkJobServerClient); 102 | } 103 | return sparkJobServerClient; 104 | } 105 | 106 | /** 107 | * Creates an instance of ISparkJobServerClient with the given 108 | * url, username and password and Http Connection, Request, Socket Timeouts 109 | * 110 | * @param url the url of the target Spark Job Server 111 | * @param jobServerUsername the username for authentication of target Spark Job Server 112 | * @param jobServerPassword the password for authentication of the target Spark Job Server 113 | * @param connectionTimeOut 114 | * @param connectionReqTimeOut 115 | * @param socketTimeOut 116 | * @return the corresponding ISparkJobServerClient instance 117 | * @throws SparkJobServerClientException error occurs when trying to create the 118 | * target spark job server client 119 | */ 120 | public ISparkJobServerClient createSparkJobServerClient(String url, String jobServerUsername, 121 | String jobServerPassword, Integer connectionTimeOut, 122 | Integer connectionReqTimeOut, 123 | Integer socketTimeOut) throws SparkJobServerClientException { 124 | if (!isValidUrl(url)) { 125 | throw new SparkJobServerClientException("Invalid url can't be used to create a spark job server client."); 126 | } 127 | if (jobServerUsername == null || jobServerUsername.isEmpty()) { 128 | throw new SparkJobServerClientException("Invalid username can't be null or empty."); 129 | } 130 | String sparkJobServerUrl = url.trim(); 131 | jobServerUsername = jobServerUsername.trim(); 132 | String cacheKey = sparkJobServerUrl + "_@_" + jobServerUsername 133 | + "_@_" + jobServerPassword + "_@_" + connectionTimeOut + "_@_" + connectionReqTimeOut + 134 | "_@_" + socketTimeOut; 135 | ISparkJobServerClient sparkJobServerClient = jobServerClientCache.get(cacheKey); 136 | if (null == sparkJobServerClient) { 137 | sparkJobServerClient = new SparkJobServerClientImpl(url, jobServerUsername, jobServerPassword, 138 | connectionTimeOut, connectionReqTimeOut, socketTimeOut); 139 | jobServerClientCache.put(cacheKey, sparkJobServerClient); 140 | } 141 | return sparkJobServerClient; 142 | } 143 | 144 | /** 145 | * Checks the given url is valid or not. 146 | * 147 | * @param url the url to be checked 148 | * @return true if it is valid, false otherwise 149 | */ 150 | private boolean isValidUrl(String url) { 151 | if (url == null || url.trim().length() <= 0) { 152 | logger.error("The given url is null or empty."); 153 | return false; 154 | } 155 | try { 156 | new URL(url); 157 | } catch (MalformedURLException me) { 158 | StringBuffer buff = new StringBuffer("The given url "); 159 | buff.append(url).append(" is invalid."); 160 | logger.error(buff.toString(), me); 161 | } 162 | return true; 163 | } 164 | } 165 | -------------------------------------------------------------------------------- /src/main/java/com/bluebreezecf/tools/sparkjobserver/api/SparkJobServerClientImpl.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2014-2022 the original author or authors. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.bluebreezecf.tools.sparkjobserver.api; 18 | 19 | import java.io.BufferedInputStream; 20 | import java.io.BufferedReader; 21 | import java.io.Closeable; 22 | import java.io.File; 23 | import java.io.FileInputStream; 24 | import java.io.FileNotFoundException; 25 | import java.io.IOException; 26 | import java.io.InputStream; 27 | import java.io.InputStreamReader; 28 | import java.util.ArrayList; 29 | import java.util.Arrays; 30 | import java.util.Base64; 31 | import java.util.Iterator; 32 | import java.util.List; 33 | import java.util.Map; 34 | import java.util.stream.Collectors; 35 | import net.sf.json.JSONArray; 36 | import net.sf.json.JSONObject; 37 | import org.apache.commons.io.IOUtils; 38 | import org.apache.http.HttpEntity; 39 | import org.apache.http.HttpResponse; 40 | import org.apache.http.HttpStatus; 41 | import org.apache.http.client.config.RequestConfig; 42 | import org.apache.http.client.methods.HttpDelete; 43 | import org.apache.http.client.methods.HttpGet; 44 | import org.apache.http.client.methods.HttpPost; 45 | import org.apache.http.client.methods.HttpRequestBase; 46 | import org.apache.http.entity.ByteArrayEntity; 47 | import org.apache.http.entity.StringEntity; 48 | import org.apache.http.impl.client.CloseableHttpClient; 49 | import org.apache.http.impl.client.HttpClientBuilder; 50 | import org.apache.log4j.Logger; 51 | 52 | 53 | /** 54 | * The default client implementation of ISparkJobServerClient. 55 | * With the specific rest api, it can provide abilities to submit and manage 56 | * Apache Spark jobs, jars, and job contexts in the Spark Job Server. 57 | * 58 | * @author bluebreezecf 59 | * @since 2014-09-07 60 | * 61 | */ 62 | class SparkJobServerClientImpl implements ISparkJobServerClient { 63 | private static Logger logger = Logger.getLogger(SparkJobServerClientImpl.class); 64 | private static final int BUFFER_SIZE = 512 * 1024; 65 | private static final int DEFAULT_CONNECTION_TIMEOUT=60000; 66 | private static final int DEFAULT_SOCKET_TIMEOUT=60000; 67 | private static final int DEFAULT_REQUEST_TIMEOUT=60000; 68 | private String jobServerUrl; 69 | private String jobServerUsername; 70 | private String jobServerPassword; 71 | private Integer connectionTimeOut; 72 | private Integer connectionReqTimeOut; 73 | private Integer socketTimeOut; 74 | private static List INFO_JOBS_STATUS = Arrays.asList("OK", "STARTED", "RUNNING", "ACCEPTED", "ERROR"); 75 | 76 | /** 77 | * Constructs an instance of SparkJobServerClientImpl 78 | * with the given spark job server url. 79 | * 80 | * @param jobServerUrl a url pointing to a existing spark job server 81 | */ 82 | SparkJobServerClientImpl(String jobServerUrl) { 83 | if (!jobServerUrl.endsWith("/")) { 84 | jobServerUrl = jobServerUrl + "/"; 85 | } 86 | this.jobServerUrl = jobServerUrl; 87 | this.socketTimeOut = DEFAULT_SOCKET_TIMEOUT; 88 | this.connectionReqTimeOut = DEFAULT_REQUEST_TIMEOUT; 89 | this.connectionTimeOut = DEFAULT_CONNECTION_TIMEOUT; 90 | } 91 | 92 | /** 93 | * Constructs an instance of SparkJobServerClientImpl 94 | * with the given spark job server url. 95 | * 96 | * @param jobServerUrl a url pointing to a existing spark job server 97 | * @param connectionTimeOut TimeOut in milliseconds for to establish Connection 98 | * @param connectionReqTimeOut TimeOut in milliseconds for a connection Request 99 | * @param socketTimeOut Timeout in milliseconds for Socket to transfer data 100 | */ 101 | SparkJobServerClientImpl(String jobServerUrl, Integer connectionTimeOut, Integer connectionReqTimeOut, Integer socketTimeOut) { 102 | if (!jobServerUrl.endsWith("/")) { 103 | jobServerUrl = jobServerUrl + "/"; 104 | } 105 | this.jobServerUrl = jobServerUrl; 106 | this.socketTimeOut = socketTimeOut == null ? DEFAULT_SOCKET_TIMEOUT : socketTimeOut; 107 | this.connectionReqTimeOut = connectionReqTimeOut == null ? DEFAULT_REQUEST_TIMEOUT : connectionReqTimeOut; 108 | this.connectionTimeOut = connectionTimeOut == null ? DEFAULT_CONNECTION_TIMEOUT : connectionTimeOut; 109 | } 110 | 111 | /** 112 | * Constructs an instance of SparkJobServerClientImpl 113 | * with the given spark job server url, username and password. 114 | * 115 | * @param jobServerUrl a url pointing to a existing spark job server 116 | * @param jobServerUsername a username to a existing spark job server 117 | * @param jobServerPassword a password to a existing spark job server 118 | */ 119 | SparkJobServerClientImpl(String jobServerUrl, String jobServerUsername, 120 | String jobServerPassword) { 121 | this(jobServerUrl); 122 | this.jobServerUsername = jobServerUsername; 123 | this.jobServerPassword = jobServerPassword; 124 | } 125 | 126 | /** 127 | * Constructs an instance of SparkJobServerClientImpl 128 | * with the given spark job server url, username and password with HTTP TimeOuts. 129 | * 130 | * @param jobServerUrl a url pointing to a existing spark job server 131 | * @param jobServerUsername a username to a existing spark job server 132 | * @param jobServerPassword a password to a existing spark job server 133 | * @param connectionTimeOut TimeOut in milliseconds for to establish Connection 134 | * @param connectionReqTimeOut TimeOut in milliseconds for a connection Request 135 | * @param socketTimeOut Timeout in milliseconds for Socket to transfer data 136 | */ 137 | SparkJobServerClientImpl(String jobServerUrl, String jobServerUsername, 138 | String jobServerPassword, Integer connectionTimeOut, Integer connectionReqTimeOut, Integer socketTimeOut) { 139 | this(jobServerUrl, connectionTimeOut, connectionReqTimeOut, socketTimeOut); 140 | this.jobServerUsername = jobServerUsername; 141 | this.jobServerPassword = jobServerPassword; 142 | } 143 | 144 | /** 145 | * Constructs an instance of RequestConfig 146 | * with configured timeouts for the connection. 147 | */ 148 | private RequestConfig getRequestConfig() { 149 | RequestConfig.Builder requestConfig = RequestConfig.custom(); 150 | requestConfig.setConnectTimeout(this.connectionTimeOut); 151 | requestConfig.setConnectionRequestTimeout(this.connectionReqTimeOut); 152 | requestConfig.setSocketTimeout(this.socketTimeOut); 153 | return requestConfig.build(); 154 | } 155 | 156 | /** 157 | * Set Authorization Header in the given HttpRequestBase. 158 | * 159 | * @param requestBase HttpRequestBase for the specified endPoint URL 160 | */ 161 | private void setAuthorization(HttpRequestBase requestBase) { 162 | String authHeader = getBasicAuthHeader(); 163 | if (authHeader != null) { 164 | requestBase.setHeader("Authorization", authHeader); 165 | } 166 | } 167 | 168 | /** 169 | * {@inheritDoc} 170 | */ 171 | public List getJars() 172 | throws SparkJobServerClientException { 173 | List sparkJobJarInfos = new ArrayList<>(); 174 | final CloseableHttpClient httpClient = buildClient(); 175 | try { 176 | HttpGet getMethod = new HttpGet(jobServerUrl + "jars"); 177 | getMethod.setConfig(getRequestConfig()); 178 | setAuthorization(getMethod); 179 | HttpResponse response = httpClient.execute(getMethod); 180 | int statusCode = response.getStatusLine().getStatusCode(); 181 | String resContent = getResponseContent(response.getEntity()); 182 | if (statusCode == HttpStatus.SC_OK) { 183 | JSONObject jsonObj = JSONObject.fromObject(resContent); 184 | Iterator keyIter = jsonObj.keys(); 185 | while (keyIter.hasNext()) { 186 | String jarName = (String)keyIter.next(); 187 | String uploadedTime = (String)jsonObj.get(jarName); 188 | SparkJobJarInfo sparkJobJarInfo = new SparkJobJarInfo(); 189 | sparkJobJarInfo.setJarName(jarName); 190 | sparkJobJarInfo.setUploadedTime(uploadedTime); 191 | sparkJobJarInfos.add(sparkJobJarInfo); 192 | } 193 | } else { 194 | logError(statusCode, resContent, true); 195 | } 196 | } catch (Exception e) { 197 | processException("Error occurs when trying to get information of jars:", e); 198 | } finally { 199 | close(httpClient); 200 | } 201 | return sparkJobJarInfos; 202 | } 203 | 204 | /** 205 | * {@inheritDoc} 206 | */ 207 | public boolean uploadSparkJobJar(InputStream jarData, String appName) 208 | throws SparkJobServerClientException { 209 | if (jarData == null || appName == null || appName.trim().length() == 0) { 210 | throw new SparkJobServerClientException("Invalid parameters."); 211 | } 212 | HttpPost postMethod = new HttpPost(jobServerUrl + "jars/" + appName); 213 | postMethod.setConfig(getRequestConfig()); 214 | setAuthorization(postMethod); 215 | final CloseableHttpClient httpClient = buildClient(); 216 | try { 217 | ByteArrayEntity entity = new ByteArrayEntity(IOUtils.toByteArray(jarData)); 218 | postMethod.setEntity(entity); 219 | entity.setContentType("application/java-archive"); 220 | HttpResponse response = httpClient.execute(postMethod); 221 | int statusCode = response.getStatusLine().getStatusCode(); 222 | getResponseContent(response.getEntity()); 223 | if (statusCode == HttpStatus.SC_OK) { 224 | return true; 225 | } 226 | } catch (Exception e) { 227 | logger.error("Error occurs when uploading spark job jars:", e); 228 | } finally { 229 | close(httpClient); 230 | closeStream(jarData); 231 | } 232 | return false; 233 | } 234 | 235 | /** 236 | * {@inheritDoc} 237 | */ 238 | public boolean uploadSparkJobJar(File jarFile, String appName) 239 | throws SparkJobServerClientException { 240 | if (jarFile == null 241 | || !jarFile.getName().endsWith(".jar") 242 | || !jarFile.getName().endsWith(".jar.original") 243 | || appName == null 244 | || appName.trim().length() == 0) 245 | { 246 | throw new SparkJobServerClientException("Invalid parameters."); 247 | } 248 | InputStream jarIn = null; 249 | try { 250 | jarIn = new FileInputStream(jarFile); 251 | } catch (FileNotFoundException fnfe) { 252 | String errorMsg = "Error occurs when getting stream of the given jar file"; 253 | logger.error(errorMsg, fnfe); 254 | throw new SparkJobServerClientException(errorMsg, fnfe); 255 | } 256 | return uploadSparkJobJar(jarIn, appName); 257 | } 258 | 259 | /** 260 | * {@inheritDoc} 261 | */ 262 | public List getContexts() throws SparkJobServerClientException { 263 | List contexts = new ArrayList(); 264 | final CloseableHttpClient httpClient = buildClient(); 265 | try { 266 | HttpGet getMethod = new HttpGet(jobServerUrl + "contexts"); 267 | getMethod.setConfig(getRequestConfig()); 268 | setAuthorization(getMethod); 269 | HttpResponse response = httpClient.execute(getMethod); 270 | int statusCode = response.getStatusLine().getStatusCode(); 271 | String resContent = getResponseContent(response.getEntity()); 272 | if (statusCode == HttpStatus.SC_OK) { 273 | JSONArray jsonArray = JSONArray.fromObject(resContent); 274 | Iterator iter = jsonArray.iterator(); 275 | while (iter.hasNext()) { 276 | contexts.add((String)iter.next()); 277 | } 278 | } else { 279 | logError(statusCode, resContent, true); 280 | } 281 | } catch (Exception e) { 282 | processException("Error occurs when trying to get information of contexts:", e); 283 | } finally { 284 | close(httpClient); 285 | } 286 | return contexts; 287 | } 288 | 289 | /** 290 | * {@inheritDoc} 291 | */ 292 | public boolean createContext(String contextName, Map params) 293 | throws SparkJobServerClientException { 294 | final CloseableHttpClient httpClient = buildClient(); 295 | try { 296 | //TODO add a check for the validation of contextName naming 297 | if (!isNotEmpty(contextName)) { 298 | throw new SparkJobServerClientException("The given contextName is null or empty."); 299 | } 300 | StringBuffer postUrlBuff = new StringBuffer(jobServerUrl); 301 | postUrlBuff.append("contexts/").append(contextName); 302 | if (params != null && !params.isEmpty()) { 303 | postUrlBuff.append('?'); 304 | int num = params.size(); 305 | for (String key : params.keySet()) { 306 | postUrlBuff.append(key).append('=').append(params.get(key)); 307 | num--; 308 | if (num > 0) { 309 | postUrlBuff.append('&'); 310 | } 311 | } 312 | 313 | } 314 | HttpPost postMethod = new HttpPost(postUrlBuff.toString()); 315 | postMethod.setConfig(getRequestConfig()); 316 | setAuthorization(postMethod); 317 | HttpResponse response = httpClient.execute(postMethod); 318 | int statusCode = response.getStatusLine().getStatusCode(); 319 | String resContent = getResponseContent(response.getEntity()); 320 | if (statusCode == HttpStatus.SC_OK) { 321 | return true; 322 | } else { 323 | logError(statusCode, resContent, false); 324 | } 325 | } catch (Exception e) { 326 | processException("Error occurs when trying to create a context:", e); 327 | } finally { 328 | close(httpClient); 329 | } 330 | return false; 331 | } 332 | 333 | /** 334 | * {@inheritDoc} 335 | */ 336 | public boolean deleteContext(String contextName) 337 | throws SparkJobServerClientException { 338 | final CloseableHttpClient httpClient = buildClient(); 339 | try { 340 | //TODO add a check for the validation of contextName naming 341 | if (!isNotEmpty(contextName)) { 342 | throw new SparkJobServerClientException("The given contextName is null or empty."); 343 | } 344 | StringBuffer postUrlBuff = new StringBuffer(jobServerUrl); 345 | postUrlBuff.append("contexts/").append(contextName); 346 | 347 | HttpDelete deleteMethod = new HttpDelete(postUrlBuff.toString()); 348 | deleteMethod.setConfig(getRequestConfig()); 349 | setAuthorization(deleteMethod); 350 | HttpResponse response = httpClient.execute(deleteMethod); 351 | int statusCode = response.getStatusLine().getStatusCode(); 352 | String resContent = getResponseContent(response.getEntity()); 353 | if (statusCode == HttpStatus.SC_OK) { 354 | return true; 355 | } else { 356 | logError(statusCode, resContent, false); 357 | } 358 | } catch (Exception e) { 359 | processException("Error occurs when trying to delete the target context:", e); 360 | } finally { 361 | close(httpClient); 362 | } 363 | return false; 364 | } 365 | 366 | /** 367 | * {@inheritDoc} 368 | */ 369 | public List getJobs() throws SparkJobServerClientException { 370 | List sparkJobInfos = new ArrayList(); 371 | final CloseableHttpClient httpClient = buildClient(); 372 | try { 373 | HttpGet getMethod = new HttpGet(jobServerUrl + "jobs"); 374 | getMethod.setConfig(getRequestConfig()); 375 | setAuthorization(getMethod); 376 | HttpResponse response = httpClient.execute(getMethod); 377 | int statusCode = response.getStatusLine().getStatusCode(); 378 | String resContent = getResponseContent(response.getEntity()); 379 | if (statusCode == HttpStatus.SC_OK) { 380 | JSONArray jsonArray = JSONArray.fromObject(resContent); 381 | Iterator iter = jsonArray.iterator(); 382 | while (iter.hasNext()) { 383 | JSONObject jsonObj = (JSONObject)iter.next(); 384 | SparkJobInfo jobInfo = createSparkJobInfo(jsonObj); 385 | sparkJobInfos.add(jobInfo); 386 | } 387 | } else { 388 | logError(statusCode, resContent, true); 389 | } 390 | } catch (Exception e) { 391 | processException("Error occurs when trying to get information of jobs:", e); 392 | } finally { 393 | close(httpClient); 394 | } 395 | return sparkJobInfos; 396 | } 397 | 398 | /** 399 | * {@inheritDoc} 400 | */ 401 | public List getJobsByStatus(String jobStatus) throws SparkJobServerClientException { 402 | if (!INFO_JOBS_STATUS.contains(jobStatus.toUpperCase())) { 403 | throw new SparkJobServerClientException("Invalid Job Status " + 404 | jobStatus + ". Supported Job Status : " + 405 | INFO_JOBS_STATUS.toString()); 406 | } 407 | List sparkJobInfos = new ArrayList(); 408 | final CloseableHttpClient httpClient = buildClient(); 409 | try { 410 | HttpGet getMethod = new HttpGet(jobServerUrl + "jobs?status=" + jobStatus); 411 | getMethod.setConfig(getRequestConfig()); 412 | setAuthorization(getMethod); 413 | HttpResponse response = httpClient.execute(getMethod); 414 | int statusCode = response.getStatusLine().getStatusCode(); 415 | String resContent = getResponseContent(response.getEntity()); 416 | if (statusCode == HttpStatus.SC_OK) { 417 | JSONArray jsonArray = JSONArray.fromObject(resContent); 418 | Iterator iter = jsonArray.iterator(); 419 | while (iter.hasNext()) { 420 | JSONObject jsonObj = (JSONObject)iter.next(); 421 | SparkJobInfo jobInfo = createSparkJobInfo(jsonObj); 422 | sparkJobInfos.add(jobInfo); 423 | } 424 | } else { 425 | logError(statusCode, resContent, true); 426 | } 427 | } catch (Exception e) { 428 | processException("Error occurs when trying to get information of jobs:", e); 429 | } finally { 430 | close(httpClient); 431 | } 432 | return sparkJobInfos; 433 | } 434 | 435 | /** 436 | * Constructs an instance of SparkJobInfo 437 | * from the given spark job JSON. 438 | * 439 | * @param jsonObj spark job json returned by spark job server 440 | */ 441 | private SparkJobInfo createSparkJobInfo(JSONObject jsonObj) { 442 | SparkJobInfo toReturn = new SparkJobInfo(); 443 | toReturn.setDuration(jsonObj.getString(SparkJobInfo.INFO_KEY_DURATION)); 444 | toReturn.setClassPath(jsonObj.getString(SparkJobInfo.INFO_KEY_CLASSPATH)); 445 | toReturn.setStartTime(jsonObj.getString(SparkJobInfo.INFO_KEY_START_TIME)); 446 | toReturn.setContext(jsonObj.getString(SparkJobBaseInfo.INFO_KEY_CONTEXT)); 447 | toReturn.setStatus(jsonObj.getString(SparkJobBaseInfo.INFO_KEY_STATUS)); 448 | toReturn.setJobId(jsonObj.getString(SparkJobBaseInfo.INFO_KEY_JOB_ID)); 449 | toReturn.setContextId(jsonObj.getString(SparkJobInfo.INFO_CONTEXT_ID)); 450 | setErrorDetails(SparkJobBaseInfo.INFO_KEY_RESULT, jsonObj, toReturn); 451 | return toReturn; 452 | } 453 | 454 | /** 455 | * {@inheritDoc} 456 | */ 457 | public SparkJobResult startJob(String data, Map params) throws SparkJobServerClientException { 458 | final CloseableHttpClient httpClient = buildClient(); 459 | try { 460 | if (params == null || params.isEmpty()) { 461 | throw new SparkJobServerClientException("The given params is null or empty."); 462 | } 463 | if (params.containsKey(ISparkJobServerClientConstants.PARAM_APP_NAME) && 464 | params.containsKey(ISparkJobServerClientConstants.PARAM_CLASS_PATH)) { 465 | StringBuffer postUrlBuff = new StringBuffer(jobServerUrl); 466 | postUrlBuff.append("jobs?"); 467 | int num = params.size(); 468 | for (String key : params.keySet()) { 469 | postUrlBuff.append(key).append('=').append(params.get(key)); 470 | num--; 471 | if (num > 0) { 472 | postUrlBuff.append('&'); 473 | } 474 | } 475 | HttpPost postMethod = new HttpPost(postUrlBuff.toString()); 476 | postMethod.setConfig(getRequestConfig()); 477 | setAuthorization(postMethod); 478 | if (data != null) { 479 | StringEntity strEntity = new StringEntity(data); 480 | strEntity.setContentEncoding("UTF-8"); 481 | strEntity.setContentType("text/plain"); 482 | postMethod.setEntity(strEntity); 483 | } 484 | 485 | HttpResponse response = httpClient.execute(postMethod); 486 | String resContent = getResponseContent(response.getEntity()); 487 | int statusCode = response.getStatusLine().getStatusCode(); 488 | if (statusCode == HttpStatus.SC_OK || statusCode == HttpStatus.SC_ACCEPTED) { 489 | return parseResult(resContent); 490 | } else { 491 | logError(statusCode, resContent, true); 492 | } 493 | } else { 494 | throw new SparkJobServerClientException("The given params should contains appName and classPath"); 495 | } 496 | } catch (Exception e) { 497 | processException("Error occurs when trying to start a new job:", e); 498 | } finally { 499 | close(httpClient); 500 | } 501 | return null; 502 | } 503 | 504 | /** 505 | * {@inheritDoc} 506 | */ 507 | public SparkJobResult startJob(InputStream dataFileStream, Map params) throws SparkJobServerClientException { 508 | BufferedReader br = null; 509 | try { 510 | br = new BufferedReader(new InputStreamReader(dataFileStream)); 511 | String data = br.lines().collect(Collectors.joining(System.lineSeparator())); 512 | return startJob(data, params); 513 | } catch (Exception e) { 514 | processException("Error occurs when reading inputstream:", e); 515 | } finally { 516 | closeStream(br); 517 | } 518 | return null; 519 | } 520 | 521 | /** 522 | * {@inheritDoc} 523 | */ 524 | public SparkJobResult startJob(File dataFile, Map params) throws SparkJobServerClientException { 525 | InputStream dataFileStream = null; 526 | try { 527 | dataFileStream = new FileInputStream(dataFile); 528 | return startJob(dataFileStream, params); 529 | } catch (Exception e) { 530 | processException("Error occurs when reading file:", e); 531 | } finally { 532 | closeStream(dataFileStream); 533 | } 534 | return null; 535 | } 536 | 537 | /** 538 | * {@inheritDoc} 539 | */ 540 | public SparkJobResult getJobResult(String jobId) throws SparkJobServerClientException { 541 | final CloseableHttpClient httpClient = buildClient(); 542 | try { 543 | if (!isNotEmpty(jobId)) { 544 | throw new SparkJobServerClientException("The given jobId is null or empty."); 545 | } 546 | HttpGet getMethod = new HttpGet(jobServerUrl + "jobs/" + jobId); 547 | getMethod.setConfig(getRequestConfig()); 548 | setAuthorization(getMethod); 549 | HttpResponse response = httpClient.execute(getMethod); 550 | String resContent = getResponseContent(response.getEntity()); 551 | int statusCode = response.getStatusLine().getStatusCode(); 552 | if (statusCode == HttpStatus.SC_OK) { 553 | final SparkJobResult jobResult = parseResult(resContent); 554 | jobResult.setJobId(jobId); 555 | return jobResult; 556 | } else if (statusCode == HttpStatus.SC_NOT_FOUND) { 557 | return new SparkJobResult(resContent, jobId); 558 | } else { 559 | logError(statusCode, resContent, true); 560 | } 561 | } catch (Exception e) { 562 | processException("Error occurs when trying to get information of the target job:", e); 563 | } finally { 564 | close(httpClient); 565 | } 566 | return null; 567 | } 568 | 569 | /** 570 | * {@inheritDoc} 571 | */ 572 | public SparkJobConfig getConfig(String jobId) throws SparkJobServerClientException { 573 | final CloseableHttpClient httpClient = buildClient(); 574 | try { 575 | if (!isNotEmpty(jobId)) { 576 | throw new SparkJobServerClientException("The given jobId is null or empty."); 577 | } 578 | HttpGet getMethod = new HttpGet(jobServerUrl + "jobs/" + jobId + "/config"); 579 | getMethod.setConfig(getRequestConfig()); 580 | setAuthorization(getMethod); 581 | HttpResponse response = httpClient.execute(getMethod); 582 | String resContent = getResponseContent(response.getEntity()); 583 | JSONObject jsonObj = JSONObject.fromObject(resContent); 584 | SparkJobConfig jobConfg = new SparkJobConfig(); 585 | Iterator keyIter = jsonObj.keys(); 586 | while (keyIter.hasNext()) { 587 | String key = (String)keyIter.next(); 588 | jobConfg.putConfigItem(key, jsonObj.get(key)); 589 | } 590 | return jobConfg; 591 | } catch (Exception e) { 592 | processException("Error occurs when trying to get information of the target job config:", e); 593 | } finally { 594 | close(httpClient); 595 | } 596 | return null; 597 | } 598 | 599 | @Override 600 | public boolean killJob(String jobId) throws SparkJobServerClientException { 601 | /*ISparkJobServerClient client = SparkJobServerClientFactory.getInstance().createSparkJobServerClient(this.jobServerUrl); 602 | */ 603 | final CloseableHttpClient httpClient = buildClient(); 604 | try { 605 | if (!isNotEmpty(jobId)) { 606 | throw new SparkJobServerClientException("The JobId cannot Null or empty."); 607 | } 608 | 609 | HttpDelete deleteMethod = new HttpDelete(this.jobServerUrl + "jobs/" + jobId); 610 | deleteMethod.setConfig(getRequestConfig()); 611 | setAuthorization(deleteMethod); 612 | HttpResponse response = httpClient.execute(deleteMethod); 613 | int statusCode = response.getStatusLine().getStatusCode(); 614 | String resContent = getResponseContent(response.getEntity()); 615 | if (statusCode == HttpStatus.SC_OK) { 616 | return true; 617 | } else { 618 | throw new SparkJobServerClientException("Error while killing the Job. Status code : "+statusCode+" Response : "+resContent); 619 | } 620 | } catch (Exception e) { 621 | processException("Error occured when trying to delete the target job:Message:"+e.getMessage(), e); 622 | } finally { 623 | close(httpClient); 624 | } 625 | return false; 626 | } 627 | 628 | /** 629 | * Gets the contents of the http response from the given HttpEntity 630 | * instance. 631 | * 632 | * @param entity the HttpEntity instance holding the http response content 633 | * @return the corresponding response content 634 | */ 635 | protected String getResponseContent(HttpEntity entity) { 636 | byte[] buff = new byte[BUFFER_SIZE]; 637 | StringBuffer contents = new StringBuffer(); 638 | InputStream in = null; 639 | try { 640 | in = entity.getContent(); 641 | BufferedInputStream bis = new BufferedInputStream(in); 642 | int readBytes = 0; 643 | while ((readBytes = bis.read(buff)) != -1) { 644 | contents.append(new String(buff, 0, readBytes)); 645 | } 646 | } catch (Exception e) { 647 | logger.error("Error occurs when trying to reading response", e); 648 | } finally { 649 | closeStream(in); 650 | } 651 | return contents.toString().trim(); 652 | } 653 | 654 | /** 655 | * Closes the given stream. 656 | * 657 | * @param stream the input/output stream to be closed 658 | */ 659 | protected void closeStream(Closeable stream) { 660 | if (stream != null) { 661 | try { 662 | stream.close(); 663 | } catch (IOException ioe) { 664 | logger.error("Error occurs when trying to close the stream:", ioe); 665 | } 666 | } else { 667 | logger.error("The given stream is null"); 668 | } 669 | } 670 | 671 | /** 672 | * Handles the given exception with specific error message, and 673 | * generates a corresponding SparkJobServerClientException. 674 | * 675 | * @param errorMsg the corresponding error message 676 | * @param e the exception to be handled 677 | * @throws SparkJobServerClientException the corresponding transformed 678 | * SparkJobServerClientException instance 679 | */ 680 | protected void processException(String errorMsg, Exception e) throws SparkJobServerClientException { 681 | if (e instanceof SparkJobServerClientException) { 682 | throw (SparkJobServerClientException)e; 683 | } 684 | logger.error(errorMsg, e); 685 | throw new SparkJobServerClientException(errorMsg, e); 686 | } 687 | 688 | /** 689 | * Judges the given string value is not empty or not. 690 | * 691 | * @param value the string value to be checked 692 | * @return true indicates it is not empty, false otherwise 693 | */ 694 | protected boolean isNotEmpty(String value) { 695 | return value != null && !value.isEmpty(); 696 | } 697 | 698 | /** 699 | * Logs the response information when the status is not 200 OK, 700 | * and throws an instance of SparkJobServerClientException. 701 | * 702 | * @param errorStatusCode error status code 703 | * @param msg the message to indicates the status, it can be null 704 | * @param throwable true indicates throws an instance of SparkJobServerClientException 705 | * with corresponding error message, false means only log the error message. 706 | * @throws SparkJobServerClientException containing the corresponding error message 707 | */ 708 | private void logError(int errorStatusCode, String msg, boolean throwable) throws SparkJobServerClientException { 709 | StringBuffer msgBuff = new StringBuffer("Spark Job Server "); 710 | msgBuff.append(jobServerUrl).append(" response ").append(errorStatusCode); 711 | if (null != msg) { 712 | msgBuff.append(" ").append(msg); 713 | } 714 | String errorMsg = msgBuff.toString(); 715 | logger.error(errorMsg); 716 | if (throwable) { 717 | throw new SparkJobServerClientException(errorMsg); 718 | } 719 | } 720 | 721 | /** 722 | * Sets the information of the error details. 723 | * 724 | * @param key the key contains the error details 725 | * @param parnetJsonObj the parent JSONObject instance 726 | */ 727 | private void setErrorDetails(String key, JSONObject parnetJsonObj, SparkJobBaseInfo jobErrorInfo) { 728 | if (parnetJsonObj.containsKey(key)) { 729 | JSONObject resultJson = parnetJsonObj.getJSONObject(key); 730 | if (resultJson.containsKey(SparkJobInfo.INFO_KEY_RESULT_MESSAGE)) { 731 | jobErrorInfo.setMessage(resultJson.getString(SparkJobInfo.INFO_KEY_RESULT_MESSAGE)); 732 | } 733 | if (resultJson.containsKey(SparkJobInfo.INFO_KEY_RESULT_ERROR_CLASS)) { 734 | jobErrorInfo.setErrorClass(resultJson.getString(SparkJobInfo.INFO_KEY_RESULT_ERROR_CLASS)); 735 | } 736 | if (resultJson.containsKey(SparkJobInfo.INFO_KEY_RESULT_STACK)) { 737 | if (resultJson.get(SparkJobInfo.INFO_KEY_RESULT_STACK) instanceof String) { 738 | String[] stack = { resultJson.getString(SparkJobInfo.INFO_KEY_RESULT_STACK) }; 739 | jobErrorInfo.setStack(stack); 740 | 741 | } else { 742 | JSONArray stackJsonArray = resultJson.getJSONArray(SparkJobInfo.INFO_KEY_RESULT_STACK); 743 | String[] stack = new String[stackJsonArray.size()]; 744 | for (int i = 0; i < stackJsonArray.size(); i++) { 745 | stack[i] = stackJsonArray.optString(i); 746 | } 747 | jobErrorInfo.setStack(stack); 748 | } 749 | } 750 | } 751 | } 752 | 753 | /** 754 | * Generates an instance of SparkJobResult according to the given contents. 755 | * 756 | * @param resContent the content of a http response 757 | * @return the corresponding SparkJobResult instance 758 | * @throws Exception error occurs when parsing the http response content 759 | */ 760 | private SparkJobResult parseResult(String resContent) throws Exception { 761 | JSONObject jsonObj = JSONObject.fromObject(resContent); 762 | SparkJobResult jobResult = new SparkJobResult(resContent); 763 | boolean completed = false; 764 | if(jsonObj.has(SparkJobBaseInfo.INFO_KEY_STATUS)) { 765 | jobResult.setStatus(jsonObj.getString(SparkJobBaseInfo.INFO_KEY_STATUS)); 766 | if (SparkJobBaseInfo.COMPLETED.contains(jobResult.getStatus())) { 767 | completed = true; 768 | } 769 | } else { 770 | completed = true; 771 | } 772 | if (completed) { 773 | //Job finished with results 774 | jobResult.setResult(jsonObj.get(SparkJobBaseInfo.INFO_KEY_RESULT).toString()); 775 | } else if (containsAsynjobStatus(jsonObj)) { 776 | //asynchronously started job only with status information 777 | setAsynjobStatus(jobResult, jsonObj); 778 | } else if (containsErrorInfo(jsonObj)) { 779 | String errorKey = null; 780 | if (jsonObj.containsKey(SparkJobBaseInfo.INFO_STATUS_ERROR)) { 781 | errorKey = SparkJobBaseInfo.INFO_STATUS_ERROR; 782 | } else if (jsonObj.containsKey(SparkJobBaseInfo.INFO_KEY_RESULT)) { 783 | errorKey = SparkJobBaseInfo.INFO_KEY_RESULT; 784 | } 785 | //Job failed with error details 786 | setErrorDetails(errorKey, jsonObj, jobResult); 787 | } else { 788 | //Other unknown kind of value needs application to parse itself 789 | Iterator keyIter = jsonObj.keys(); 790 | while (keyIter.hasNext()) { 791 | String key = (String)keyIter.next(); 792 | if (SparkJobInfo.INFO_KEY_STATUS.equals(key)) { 793 | continue; 794 | } 795 | jobResult.putExtendAttribute(key, jsonObj.get(key)); 796 | } 797 | } 798 | return jobResult; 799 | } 800 | 801 | /** 802 | * Judges the given json object contains the error information of a 803 | * spark job or not. 804 | * 805 | * @param jsonObj the JSONObject instance to be checked. 806 | * @return true if it contains the error information, false otherwise 807 | */ 808 | private boolean containsErrorInfo(JSONObject jsonObj) { 809 | return SparkJobBaseInfo.INFO_STATUS_ERROR.equals(jsonObj.getString(SparkJobBaseInfo.INFO_KEY_STATUS)); 810 | } 811 | 812 | 813 | /** 814 | * Judges the given json object contains the status information of a asynchronous 815 | * started spark job or not. 816 | * 817 | * @param jsonObj the JSONObject instance to be checked. 818 | * @return true if it contains the status information of a asynchronous 819 | * started spark job, false otherwise 820 | */ 821 | private boolean containsAsynjobStatus(JSONObject jsonObj) { 822 | return jsonObj != null && jsonObj.containsKey(SparkJobBaseInfo.INFO_KEY_STATUS) 823 | && SparkJobBaseInfo.INFO_STATUS_STARTED.equals(jsonObj.getString(SparkJobBaseInfo.INFO_KEY_STATUS)) 824 | && jsonObj.containsKey(SparkJobBaseInfo.INFO_KEY_RESULT); 825 | } 826 | 827 | /** 828 | * Sets the status information of a asynchronous started spark job to the given 829 | * job result instance. 830 | * 831 | * @param jobResult the SparkJobResult instance to be set the status information 832 | * @param jsonObj the JSONObject instance holds the status information 833 | */ 834 | private void setAsynjobStatus(SparkJobResult jobResult, JSONObject jsonObj) { 835 | JSONObject resultJsonObj = jsonObj.getJSONObject(SparkJobBaseInfo.INFO_KEY_RESULT); 836 | jobResult.setContext(resultJsonObj.getString(SparkJobBaseInfo.INFO_KEY_CONTEXT)); 837 | jobResult.setJobId(resultJsonObj.getString(SparkJobBaseInfo.INFO_KEY_JOB_ID)); 838 | } 839 | 840 | private CloseableHttpClient buildClient() { 841 | return HttpClientBuilder.create().build(); 842 | } 843 | 844 | private void close(final CloseableHttpClient client) { 845 | try { 846 | client.close(); 847 | } catch (final IOException e) { 848 | logger.error("could not close client" , e); 849 | } 850 | } 851 | 852 | /** 853 | * Gets the Basic Auth Header Value for Spark Job Server 854 | * The Value is 'Basic <username>:<password> with base64 encoding 855 | */ 856 | private String getBasicAuthHeader(){ 857 | if (isNotEmpty(this.jobServerUsername) && this.jobServerPassword != null){ 858 | return "Basic " + new String(Base64.getEncoder().encode((this.jobServerUsername + ":" + this.jobServerPassword).getBytes())); 859 | } 860 | return null; 861 | } 862 | } 863 | -------------------------------------------------------------------------------- /src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | log4j.rootLogger=ERROR, Console 2 | 3 | #Console 4 | log4j.appender.Console=org.apache.log4j.ConsoleAppender 5 | log4j.appender.Console.layout=org.apache.log4j.PatternLayout 6 | log4j.appender.Console.layout.ConversionPattern=%d [%t] %-5p [%c] - %m%n 7 | 8 | ###DEBUG,ERROR 9 | log4j.logger.java.sql.ResultSet=ERROR 10 | log4j.logger.org.apache=ERROR 11 | ###DEBUG,ERROR 12 | log4j.logger.java.sql.Connection=ERROR 13 | 14 | log4j.logger.java.sql.Statement=ERROR 15 | log4j.logger.java.sql.PreparedStatement=ERROR -------------------------------------------------------------------------------- /src/test/java/com/bluebreezecf/tools/sparkjobserver/api/SparkJobServiceClientImplTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2014-2022 the original author or authors. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package com.bluebreezecf.tools.sparkjobserver.api; 18 | 19 | import org.junit.After; 20 | import org.junit.Before; 21 | import org.junit.Test; 22 | 23 | import com.bluebreezecf.tools.sparkjobserver.api.ISparkJobServerClient; 24 | import com.bluebreezecf.tools.sparkjobserver.api.ISparkJobServerClientConstants; 25 | import com.bluebreezecf.tools.sparkjobserver.api.SparkJobResult; 26 | import com.bluebreezecf.tools.sparkjobserver.api.SparkJobServerClientFactory; 27 | 28 | import java.io.File; 29 | import java.io.InputStream; 30 | import java.util.HashMap; 31 | import java.util.Map; 32 | import java.util.concurrent.TimeUnit; 33 | 34 | import static org.hamcrest.MatcherAssert.assertThat; 35 | import static org.hamcrest.Matchers.*; 36 | import static org.hamcrest.Matchers.is; 37 | 38 | /** 39 | * a test class for SparkJobServerClientImpl 40 | * @author yatshash 41 | * @since 2017-03-08 42 | */ 43 | public class SparkJobServiceClientImplTest { 44 | private static final String defaultSparkJobHost = "54.178.178.219"; 45 | private static final String defaultSparkJobPort = "8090"; 46 | private static String endpoint = String.format("http://%s:%s/", defaultSparkJobHost, defaultSparkJobPort); 47 | private ISparkJobServerClient client; 48 | private static final long POOLING_TIME_SEC = 1; 49 | 50 | @Before 51 | public void setUp() throws Exception { 52 | client = SparkJobServerClientFactory 53 | .getInstance() 54 | .createSparkJobServerClient(endpoint); 55 | } 56 | 57 | @After 58 | public void tearDown() throws Exception { 59 | 60 | } 61 | 62 | /** 63 | * test runJob with File resource 64 | * Warning: This test require deleting jar after test. 65 | * @throws Exception 66 | */ 67 | @Test 68 | public void testRunJobWithFile() throws Exception { 69 | InputStream jarFileStream = ClassLoader.getSystemResourceAsStream("./job-server-tests-2.11-0.8.0-SNAPSHOT.jar"); 70 | File inputData = new File(ClassLoader.getSystemResource("input-SparkJobServiceClientImpTest.json").toURI()); 71 | 72 | String appName = "runjob-with-file-test"; 73 | boolean isUploaded = client.uploadSparkJobJar(jarFileStream, appName); 74 | 75 | assertThat(isUploaded, is(true)); 76 | 77 | Map params = new HashMap(); 78 | params.put(ISparkJobServerClientConstants.PARAM_APP_NAME, appName); 79 | params.put(ISparkJobServerClientConstants.PARAM_CLASS_PATH, "spark.jobserver.WordCountExample"); 80 | 81 | 82 | SparkJobResult result = client.startJob(inputData, params); 83 | String status = result.getStatus(); 84 | 85 | assertThat(status, anyOf(is("STARTED"), is("FINISHED"))); 86 | 87 | String jobId; 88 | if (status.equals(SparkJobResult.INFO_STATUS_FINISHED)) 89 | { 90 | jobId = result.getJobId(); 91 | } else{ 92 | jobId = (String) result.getExtendAttributes().get("jobId"); 93 | } 94 | 95 | while (!result.getStatus().equals(SparkJobResult.INFO_STATUS_FINISHED) 96 | && !result.getStatus().equals(SparkJobResult.INFO_STATUS_ERROR)){ 97 | TimeUnit.SECONDS.sleep(POOLING_TIME_SEC); 98 | result = client.getJobResult(jobId); 99 | } 100 | 101 | assertThat(result.getResult(), is("{\"fdsafd\":1,\"a\":4,\"b\":1,\"dfsf\":1,\"c\":1}")); 102 | } 103 | 104 | 105 | /** 106 | * Warning: This test require deleting jar after test. 107 | * @throws Exception 108 | */ 109 | @Test 110 | public void testUploadJar() throws Exception { 111 | InputStream jarFileStream = ClassLoader.getSystemResourceAsStream("./job-server-tests-2.11-0.8.0-SNAPSHOT.jar"); 112 | 113 | String appName = "upload-jar-test"; 114 | boolean isUploaded = client.uploadSparkJobJar(jarFileStream, appName); 115 | 116 | assertThat(isUploaded, is(true)); 117 | 118 | Map params = new HashMap(); 119 | params.put(ISparkJobServerClientConstants.PARAM_APP_NAME, appName); 120 | params.put(ISparkJobServerClientConstants.PARAM_CLASS_PATH, "spark.jobserver.WordCountExample"); 121 | 122 | SparkJobResult result = client.startJob("input.string= fdsafd dfsf a b c a a a ", params); 123 | assertThat(result.getStatus(), anyOf(is("STARTED"), is("FINISHED"))); 124 | } 125 | 126 | } -------------------------------------------------------------------------------- /src/test/resources/input-SparkJobServiceClientImpTest.json: -------------------------------------------------------------------------------- 1 | { 2 | "input" : { 3 | "string" : "fdsafd dfsf a b c a a a" 4 | } 5 | } -------------------------------------------------------------------------------- /src/test/resources/job-server-tests-2.11-0.8.0-SNAPSHOT.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bluebreezecf/SparkJobServerClient/46f2d8d50ae406cbf93855eb99b88e0fe9003454/src/test/resources/job-server-tests-2.11-0.8.0-SNAPSHOT.jar --------------------------------------------------------------------------------