37 | * This method implements the Rest API
51 | * This method implements the Rest API
72 | * This method implements the Rest API
91 | * This method implements the Rest API
104 | * This method implements the Rest API
124 | * This method implements the Rest API
139 | * This method implements the Rest API
152 | * This method implements the Rest API
167 | * This method implements the Rest API
172 | * If it is null, it means the target spark job doesn't needs any data set
173 | * in the job configuration.
174 | *
175 | * If it is not null, the format should be like a key-value pair, such as
176 | *
201 | * This method implements the Rest API
206 | * If it is null, it means the target spark job doesn't needs any data set
207 | * in the job configuration.
208 | *
209 | * If it is not null, the format should be Typesafe Config Style, such as
210 | * json, properties file etc. See http://github.com/typesafehub/config
211 | * what the keys in the file are determined by the
212 | * one used in the target spark job main class which is assigned by
213 | * ISparkJobServerClientConstants.PARAM_CLASS_PATH.
214 | * @param params a non-null map containing parameters to start the job.
215 | * the key should be the following ones:
216 | * i.
236 | * This method implements the Rest API
241 | * If it is null, it means the target spark job doesn't needs any data set
242 | * in the job configuration.
243 | *
244 | * If it is not null, the format should be Typesafe Config Style, such as
245 | * json, properties file etc. See http://github.com/typesafehub/config
246 | * what the keys in the file are determined by the
247 | * one used in the target spark job main class which is assigned by
248 | * ISparkJobServerClientConstants.PARAM_CLASS_PATH.
249 | * @param params a non-null map containing parameters to start the job.
250 | * the key should be the following ones:
251 | * i.
271 | * This method implements the Rest API
287 | * This method implements the Rest API
303 | * This method implements the Rest API
33 | * It's value is integer.
34 | *
35 | * It is equivalent to
43 | * It's value is integer.
44 | *
45 | * It is equivalent to
53 | * It's value is integer.
54 | *
55 | * It is equivalent to
63 | * It's value is 'xxxm' such as 512m, where xxx is a integer, and m means MB.
64 | *
65 | * It is equivalent to
73 | * It's value is 'xxxm' such as 512m, where xxx is a integer, and m means MB.
74 | *
75 | * It is equivalent to
98 | * When it wants the new started job to run synchronize and gets the result,
99 | * It becomes a necessary one.
100 | */
101 | String PARAM_CONTEXT = "context";
102 |
103 | /**
104 | * It's an optional parameter which is used to create contexts using context
105 | * factory.
106 | */
107 | String PARAM_CONTEXT_TYPE = "context-factory";
108 |
109 | /**
110 | * It's an optional parameter which could be "true" or "false",
111 | * which is used to identify the client will wait for created
112 | * job 's finish and get the results.
113 | *
114 | *
115 | * When it wants the new started job to run synchronize and gets the result,
116 | * It becomes a necessary one.
117 | */
118 | String PARAM_SYNC = "sync";
119 |
120 | /**
121 | * It's an optional parameter which is the number of seconds the,
122 | * should wait before timeing out. Increasing this value may be
123 | * required when using sync on long jobs.
124 | *
125 | * It's value is integer.
126 | *
76 | * It presents the global description of the error
77 | */
78 | static final String INFO_KEY_RESULT_MESSAGE = "message";
79 |
80 | /**
81 | * Key of error class information of a result item in the Spark Job Server's json response.
82 | *
83 | * It indicates the error class of current error message
84 | */
85 | static final String INFO_KEY_RESULT_ERROR_CLASS = "errorClass";
86 |
87 | /**
88 | * Key of stack class information of a result item in the Spark Job Server's json response.
89 | *
90 | * It shows the information of java/scala exception stack
91 | */
92 | static final String INFO_KEY_RESULT_STACK = "stack";
93 |
94 | /**
95 | * Key of context information of a result item in the Spark Job Server's json response.
96 | *
97 | * It's the context name.
98 | */
99 | static final String INFO_KEY_CONTEXT = "context";
100 |
101 | /**
102 | * Key of context information of a result item in the Spark Job Server's json response.
103 | *
104 | * It shows the job id of the target spark job
105 | */
106 | static final String INFO_KEY_JOB_ID = "jobId";
107 |
108 | static final Set
30 | * The application used this
33 | * The value shows the execution time of the target spark job.
34 | */
35 | static final String INFO_KEY_DURATION = "duration";
36 |
37 | /**
38 | * Key of classPath information in the Spark Job Server's json response.
39 | *
40 | * The value shows the spark job main class which extends class of
47 | * The value shows start time of the target spark job.
48 | */
49 | static final String INFO_KEY_START_TIME = "startTime";
50 |
51 | /**
52 | * Id of Context information in the Spark Job Server's json response.
53 | *
54 | * The value shows Unique Identification for the context of the target spark job.
55 | */
56 | static final String INFO_CONTEXT_ID = "contextId";
57 |
58 | private String duration;
59 | private String classPath;
60 | private String startTime;
61 | private String contextId;
62 |
63 |
64 | public String getDuration() {
65 | return duration;
66 | }
67 | void setDuration(String duration) {
68 | this.duration = duration;
69 | }
70 | public String getClassPath() {
71 | return classPath;
72 | }
73 | void setClassPath(String classPath) {
74 | this.classPath = classPath;
75 | }
76 | public String getStartTime() {
77 | return startTime;
78 | }
79 | void setStartTime(String startTime) {
80 | this.startTime = startTime;
81 | }
82 | public String getContextId() {
83 | return contextId;
84 | }
85 | void setContextId(String contextId) {
86 | this.contextId = contextId;
87 | }
88 |
89 | /**
90 | * {@inheritDoc}
91 | */
92 | @Override
93 | public String toString() {
94 | StringBuffer buff = new StringBuffer("SparkJobInfo");
95 | buff.append("{\n");
96 | buff.append(" ").append(INFO_KEY_DURATION).append(": ")
97 | .append(this.getDuration() != null ? this.getDuration() : INFO_EMPTY_VALUE).append("\n")
98 | .append(" ").append(INFO_KEY_CLASSPATH).append(": ")
99 | .append(this.getClassPath() != null ? this.getClassPath() : INFO_EMPTY_VALUE).append("\n")
100 | .append(" ").append(INFO_KEY_START_TIME).append(": ")
101 | .append(this.getStartTime() != null ? this.getStartTime() : INFO_EMPTY_VALUE).append("\n")
102 | .append(" ").append(INFO_KEY_CONTEXT).append(": ")
103 | .append(this.getContext() != null ? this.getContext() : INFO_EMPTY_VALUE).append("\n")
104 | .append(" ").append(INFO_KEY_JOB_ID).append(": ")
105 | .append(this.getJobId() != null ? this.getJobId() : INFO_EMPTY_VALUE).append("\n")
106 | .append(" ").append(INFO_KEY_STATUS).append(": ")
107 | .append(this.getStatus() != null ? this.getStatus() : INFO_EMPTY_VALUE).append("\n")
108 | .append(" ").append(INFO_CONTEXT_ID).append(": ")
109 | .append(this.getContextId() != null ? this.getContextId() : INFO_EMPTY_VALUE).append("\n");
110 | if (this.getMessage() != null) {
111 | buff.append(" ").append(INFO_KEY_RESULT).append(": {\n")
112 | .append(" ").append(INFO_KEY_RESULT_MESSAGE).append(": ").append(getMessage()).append("\n");
113 | }
114 | if (this.getErrorClass() != null) {
115 | buff.append(" ").append(INFO_KEY_RESULT_ERROR_CLASS).append(": ").append(getErrorClass()).append("\n");
116 | }
117 | if (this.getStack() != null) {
118 | buff.append(" ").append(INFO_KEY_RESULT_STACK).append(": [");
119 | for (String stackItem : getStack()) {
120 | buff.append(" ").append(stackItem).append(",\n");
121 | }
122 | buff.append(" ]");
123 | }
124 |
125 | buff.append("}");
126 | return buff.toString();
127 | }
128 | }
129 |
--------------------------------------------------------------------------------
/src/main/java/com/bluebreezecf/tools/sparkjobserver/api/SparkJobJarInfo.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2014-2022 the original author or authors.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.bluebreezecf.tools.sparkjobserver.api;
18 |
19 | /**
20 | * Presents the information of spark job jar files, when
21 | * calling 'GET /jars'
of the
38 | * Spark Job Server.
39 | *
40 | * @return a list containing information of Spark Job jars
41 | * @throws SparkJobServerClientException error occurs when trying to get
42 | * information of spark job jars
43 | */
44 | List'POST /jars/<appName>'
52 | * of the Spark Job Server.
53 | *
54 | * @param jarData the instance of InputStream
contains the
55 | * contents of the target jar file to be uploaded
56 | * @param appName the application name under which the related Spark Job
57 | * is about to run, meanwhile the application name also be the alias
58 | * name of the uploaded jar file.
59 | * @return true if the operation of uploading is successful, false otherwise
60 | * @throws SparkJobServerClientException if the given parameter jarData or
61 | * appName is null, or error occurs when uploading the related spark job
62 | * jar
63 | */
64 | boolean uploadSparkJobJar(InputStream jarData, String appName)
65 | throws SparkJobServerClientException;
66 |
67 | /**
68 | * Uploads a jar containing spark job to the Spark Job Server under
69 | * the given application name.
70 | *
71 | * 'POST /jars/<appName>'
73 | * of the Spark Job Server.
74 | *
75 | * @param jarFile the jar file
76 | * @param appName the application name under which the related Spark Job
77 | * is about to run, meanwhile the application name also be the alias
78 | * name of the uploaded jar file.
79 | * @return true if the operation of uploading is successful, false otherwise
80 | * @throws SparkJobServerClientException if the given parameter jarData or
81 | * appName is null, or error occurs when uploading the related spark job
82 | * jar
83 | */
84 | boolean uploadSparkJobJar(File jarFile, String appName)
85 | throws SparkJobServerClientException;
86 |
87 | /**
88 | * Lists all the contexts available in the Spark Job Server.
89 | *
90 | * 'GET /contexts '
92 | * of the Spark Job Server.
93 | *
94 | * @return a list containing names of current contexts
95 | * @throws SparkJobServerClientException error occurs when trying to get
96 | * information of contexts
97 | */
98 | List'POST /contexts/<name>'
105 | * of the Spark Job Server.
106 | *
107 | * @param contextName the name of the new context to be created, it should be not null
108 | * and should begin with letter.
109 | * @param params a map containing the key-value pairs appended to appoint the context
110 | * settings if there is a need to configure the new created context, or null indicates
111 | * the new context with the default configuration
112 | * @return true if the operation of creating is successful, false it failed to create
113 | * the context because a context with the same name already exists
114 | * @throws SparkJobServerClientException when the given contextName is null or empty string,
115 | * or I/O error occurs while trying to create context in spark job server.
116 | */
117 | boolean createContext(String contextName, Map'DELETE /contexts/<name>'
125 | * of the Spark Job Server.
126 | *
127 | * @param contextName the name of the target context to be deleted, it should be not null
128 | * and should begin with letter.
129 | * @return true if the operation of the deleting is successful, false otherwise
130 | * @throws SparkJobServerClientException when the given contextName is null or empty string,
131 | * or I/O error occurs while trying to delete context in spark job server.
132 | */
133 | boolean deleteContext(String contextName) throws SparkJobServerClientException;
134 |
135 | /**
136 | * Lists the last N jobs in the Spark Job Server.
137 | *
138 | * 'GET /jobs'
of the Spark
140 | * Job Server.
141 | *
142 | * @return a list containing information of the jobs
143 | * @throws SparkJobServerClientException error occurs when trying to get
144 | * information of jobs
145 | */
146 | List'GET /jobs?status=(RUNNING|ERROR|FINISHED|
153 | * STARTED|OK)'
of the Spark
154 | * Job Server.
155 | *'POST /jobs'
of the Spark
168 | * Job Server.
169 | *
170 | * @param data contains the the data processed by the target job.
171 | * dataKey=dataValue
, what the dataKey is determined by the
177 | * one used in the target spark job main class which is assigned by
178 | * ISparkJobServerClientConstants.PARAM_CLASS_PATH.
179 | * @param params a non-null map containing parameters to start the job.
180 | * the key should be the following ones:
181 | * i. ISparkJobServerClientConstants.PARAM_APP_NAME
, necessary
182 | * one and should be one of the existing name in the calling of GET /jars
.
183 | * That means the appName is the alias name of the uploaded spark job jars.
184 | *
185 | * ii.ISparkJobServerClientConstants.PARAM_CLASS_PATH
, necessary one
186 | *
187 | * iii.ISparkJobServerClientConstants.PARAM_CONTEXT
, optional one
188 | *
189 | * iv.ISparkJobServerClientConstants.PARAM_SYNC
, optional one
190 | *
191 | * @return the corresponding job status or job result
192 | * @throws SparkJobServerClientException the given parameters exist null or empty value,
193 | * or I/O error occurs when trying to start the new job
194 | */
195 | SparkJobResult startJob(String data, Map'POST /jobs'
of the Spark
202 | * Job Server.
203 | *
204 | * @param dataFile contains the the data processed by the target job.
205 | * ISparkJobServerClientConstants.PARAM_APP_NAME
, necessary
217 | * one and should be one of the existing name in the calling of GET /jars
.
218 | * That means the appName is the alias name of the uploaded spark job jars.
219 | *
220 | * ii.ISparkJobServerClientConstants.PARAM_CLASS_PATH
, necessary one
221 | *
222 | * iii.ISparkJobServerClientConstants.PARAM_CONTEXT
, optional one
223 | *
224 | * iv.ISparkJobServerClientConstants.PARAM_SYNC
, optional one
225 | *
226 | * @return the corresponding job status or job result
227 | * @throws SparkJobServerClientException the given parameters exist null or empty value,
228 | * or I/O error occurs when trying to start the new job
229 | */
230 | SparkJobResult startJob(File dataFile, Map'POST /jobs'
of the Spark
237 | * Job Server.
238 | *
239 | * @param dataFileStream contains the the data processed by the target job.
240 | * ISparkJobServerClientConstants.PARAM_APP_NAME
, necessary
252 | * one and should be one of the existing name in the calling of GET /jars
.
253 | * That means the appName is the alias name of the uploaded spark job jars.
254 | *
255 | * ii.ISparkJobServerClientConstants.PARAM_CLASS_PATH
, necessary one
256 | *
257 | * iii.ISparkJobServerClientConstants.PARAM_CONTEXT
, optional one
258 | *
259 | * iv.ISparkJobServerClientConstants.PARAM_SYNC
, optional one
260 | *
261 | * @return the corresponding job status or job result
262 | * @throws SparkJobServerClientException the given parameters exist null or empty value,
263 | * or I/O error occurs when trying to start the new job
264 | */
265 | SparkJobResult startJob(InputStream dataFileStream, Map'GET /jobs/<jobId>'
272 | * of the Spark Job Server.
273 | *
274 | * @param jobId the id of the target job
275 | * @return the corresponding SparkJobResult
instance if the job
276 | * with the given jobId exists, or null if there is no corresponding job or
277 | * the target job has no result.
278 | * @throws SparkJobServerClientException error occurs when trying to get
279 | * information of the target job
280 | */
281 | SparkJobResult getJobResult(String jobId) throws SparkJobServerClientException;
282 |
283 | /**
284 | * Gets the job configuration of a specific job.
285 | *
286 | * 'GET /jobs/<jobId>/config'
288 | * of the Spark Job Server.
289 | *
290 | * @param jobId the id of the target job
291 | * @return the corresponding SparkJobConfig
instance if the job
292 | * with the given jobId exists, or null if there is no corresponding job in
293 | * the spark job server.
294 | * @throws SparkJobServerClientException error occurs when trying to get
295 | * information of the target job configuration
296 | */
297 | SparkJobConfig getConfig(String jobId) throws SparkJobServerClientException;
298 |
299 | /**
300 | * Kill the specified job
301 | *
302 | * 'DELETE /jobs/<jobId>'
of the Spark
304 | * Job Server.
305 | *
306 | * @param jobId the id of the target job.
307 | *
308 | * @return the corresponding killed job status or killed job result
309 | * @throws SparkJobServerClientException if failed to kill a job,
310 | * or I/O error occurs when trying to kill existing job
311 | */
312 | boolean killJob(String jobId) throws SparkJobServerClientException;
313 |
314 | }
315 |
--------------------------------------------------------------------------------
/src/main/java/com/bluebreezecf/tools/sparkjobserver/api/ISparkJobServerClientConstants.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2014-2022 the original author or authors.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.bluebreezecf.tools.sparkjobserver.api;
18 |
19 | /**
20 | * Defines the constants used in public for
21 | * the spark job server client.
22 | *
23 | * @author bluebreezecf
24 | * @since 2014-09-11
25 | *
26 | */
27 | public interface ISparkJobServerClientConstants {
28 |
29 | /**
30 | * Parameter used to create job context, indicates the executor
31 | * instances for current spark job context.
32 | * PARAM_SPARK_CORES_MAX
.
36 | */
37 | String PARAM_NUM_EXECUTORS = "spark.executor.instances";
38 |
39 | /**
40 | * Parameter used to create job context, indicates the number of cpu cores
41 | * for current spark job context.
42 | * PARAM_SPARK_CORES_MAX
.
46 | */
47 | String PARAM_NUM_CPU_CORES = "num-cpu-cores";
48 |
49 | /**
50 | * Parameter used to create job context, indicates the number of cpu cores
51 | * for current spark job context.
52 | * PARAM_NUM_CPU_CORES
.
56 | */
57 | String PARAM_SPARK_CORES_MAX = "spark.cores.max";
58 |
59 | /**
60 | * Parameter used to create job context, indicates the number of memeory
61 | * for each spark job executor.
62 | * PARAM_SPARK_EXECUTOR_MEMORY
.
66 | */
67 | String PARAM_MEM_PER_NODE = "mem-per-node";
68 |
69 | /**
70 | * Parameter used to create job context, indicates the number of memeory
71 | * for each spark job executor.
72 | * PARAM_MEM_PER_NODE
.
76 | */
77 | String PARAM_SPARK_EXECUTOR_MEMORY = "spark.executor.memory";
78 |
79 | /**
80 | * It's a necessary parameter used to assign a application name when
81 | * trying to start a new job.
82 | */
83 | String PARAM_APP_NAME = "appName";
84 |
85 | /**
86 | * It's a necessary parameter which is a full qualified class name such as
87 | * 'spark.jobserver.WorkCountExample'
, used to assign a spark
88 | * job main class when trying to start a new job.
89 | */
90 | String PARAM_CLASS_PATH = "classPath";
91 |
92 | /**
93 | * It's an optional parameter which is an existing context in the calling of
94 | * GET /contexts
, used to assign a context in which the new
95 | * created job will run.
96 | *
97 | * SparkJobInfo
25 | * and SparkJobResult
.
26 | *
27 | * @author bluebreezecf
28 | * @since 2014-09-15
29 | *
30 | */
31 | class SparkJobBaseInfo {
32 | static final String INFO_EMPTY_VALUE = "empty value";
33 |
34 | /**
35 | * Status value in a global job information (a SparkJobInfo
36 | * instance) or a job result/status information
37 | * (a SparkJobResult
instance)
38 | */
39 | static final String INFO_STATUS_ERROR = "ERROR";
40 |
41 | /**
42 | * Status value in a global job information (a SparkJobInfo
43 | * instance)
44 | */
45 | static final String INFO_STATUS_FINISHED = "FINISHED";
46 |
47 | /**
48 | * Status value in a job status/result information (a SparkJobResult
49 | * instance)
50 | */
51 | static final String INFO_STATUS_OK = "OK";
52 |
53 | /**
54 | * Status value in a job status/result information (a SparkJobResult
instance)
55 | */
56 | static final String INFO_STATUS_STARTED = "STARTED";
57 |
58 | /**
59 | * Status value in a job status/result information (a SparkJobResult
instance)
60 | */
61 | static final String INFO_STATUS_RUNNING = "RUNNING";
62 |
63 | /**
64 | * Key of status information in the Spark Job Server's json response.
65 | */
66 | static final String INFO_KEY_STATUS = "status";
67 |
68 | /**
69 | * Key of result information in the Spark Job Server's json response.
70 | */
71 | static final String INFO_KEY_RESULT = "result";
72 |
73 | /**
74 | * Key of message information of a result item in the Spark Job Server's json response.
75 | * GET /jobs/<jobId>/config
to a
27 | * spark job server.
28 | *
29 | * SparkJobConfig
instance
31 | * should use the getConfigs()
and parse the values
32 | * itself.
33 | *
34 | * @author bluebreezecf
35 | * @since 2014-09-11
36 | *
37 | */
38 | public class SparkJobConfig {
39 | private MapGET /jobs
to a spark job server.
23 | *
24 | * @author bluebreezecf
25 | * @since 2014-09-11
26 | *
27 | */
28 | public class SparkJobInfo extends SparkJobBaseInfo {
29 |
30 | /**
31 | * Key of duration information in the Spark Job Server's json response.
32 | * SparkJob
.
41 | */
42 | static final String INFO_KEY_CLASSPATH = "classPath";
43 |
44 | /**
45 | * Key of startTime information in the Spark Job Server's json response.
46 | * GET /jars
to a spark job server.
22 | *
23 | * @author bluebreezecf
24 | * @since 2014-09-11
25 | *
26 | */
27 | public class SparkJobJarInfo {
28 | private static final String INFO_EMPTY_VALUE = "empty value";
29 | static final String INFO_KEY_JAR_NAME = "jarName";
30 | static final String INFO_KEY_UPLOADED_TIME = "uploadedTime";
31 |
32 | private String jarName;
33 | private String uploadedTime;
34 | public String getJarName() {
35 | return jarName;
36 | }
37 | public void setJarName(String jarName) {
38 | this.jarName = jarName;
39 | }
40 | public String getUploadedTime() {
41 | return uploadedTime;
42 | }
43 | public void setUploadedTime(String uploadedTime) {
44 | this.uploadedTime = uploadedTime;
45 | }
46 |
47 | /**
48 | * {@inheritDoc}
49 | */
50 | @Override
51 | public String toString() {
52 | StringBuffer buff = new StringBuffer("SparkJobJarInfo{\n");
53 | buff.append(" ").append(INFO_KEY_JAR_NAME).append(": ")
54 | .append(this.getJarName() != null ? this.getJarName() : INFO_EMPTY_VALUE).append(",\n");
55 | buff.append(" ").append(INFO_KEY_UPLOADED_TIME).append(": ")
56 | .append(this.getUploadedTime() != null ? this.getUploadedTime() : INFO_EMPTY_VALUE).append('\n');
57 | buff.append("}");
58 | return buff.toString();
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/src/main/java/com/bluebreezecf/tools/sparkjobserver/api/SparkJobResult.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2014-2022 the original author or authors.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.bluebreezecf.tools.sparkjobserver.api;
18 |
19 | import java.util.HashMap;
20 | import java.util.Map;
21 |
22 | /**
23 | * Presents the information of spark job result, when calling
24 | * GET /jobs/<jobId>
to a spark job server.
25 | *
26 | * @author bluebreezecf
27 | * @since 2014-09-15
28 | *
29 | */
30 | public class SparkJobResult extends SparkJobBaseInfo {
31 |
32 | private String result;
33 | private MapSparkJobResult
instance represents the
72 | * status information of a asynchronous running spark job or not.
73 | *
74 | * @return true indicates it contains asynchronous running status of a
75 | * spark job, false otherwise
76 | */
77 | public boolean containsAsynStatus() {
78 | return SparkJobBaseInfo.ASYNC_STATUS.contains(getStatus());
79 | }
80 |
81 | /**
82 | * Judges the queried target job doesn't exist or not.
83 | *
84 | * @return true indicates the related job doesn't exist, false otherwise
85 | */
86 | public boolean jobNotExists() {
87 | return SparkJobBaseInfo.INFO_STATUS_ERROR.equals(getStatus())
88 | && getResult() != null && getResult().contains("No such job ID");
89 | }
90 |
91 | /**
92 | * Judges current SparkJobResult
instance contains
93 | * error information of a failed spark job or not.
94 | *
95 | * @return true indicates it contains error message, false otherwise
96 | */
97 | public boolean containsErrorInfo() {
98 | return SparkJobBaseInfo.INFO_STATUS_ERROR.equals(getStatus())
99 | && getMessage() != null;
100 | }
101 |
102 | /**
103 | * Judges current SparkJobResult
instance contains
104 | * custom-defined extend attributes of result or not
105 | *
106 | * @return true indicates it contains custom-defined extend attributes, false otherwise
107 | */
108 | public boolean containsExtendAttributes() {
109 | return !extendAttributes.isEmpty();
110 | }
111 |
112 | // /**
113 | // * Sets the status information of the asynchronous running job.
114 | // *
115 | // * @param buff the existing contents
116 | // */
117 | // private void setAsynStatusInfo(StringBuffer buff) {
118 | // if (buff != null) {
119 | // buff.append(" ").append(SparkJobBaseInfo.INFO_KEY_RESULT).append(": {\n")
120 | // .append(" ").append(SparkJobBaseInfo.INFO_KEY_JOB_ID).append(": ").append(getJobId()).append(",\n")
121 | // .append(" ").append(SparkJobBaseInfo.INFO_KEY_CONTEXT).append(": ").append(getContext()).append(",\n")
122 | // .append(" }\n");
123 | // }
124 | // }
125 | //
126 | // /**
127 | // * Sets the information for non-existence job.
128 | // *
129 | // * @param buff the existing contents
130 | // */
131 | // private void setNotExistsInfo(StringBuffer buff) {
132 | // if (buff != null) {
133 | // buff.append(" ").append(SparkJobBaseInfo.INFO_KEY_RESULT).append(": ")
134 | // .append(getResult()).append(",\n");
135 | // }
136 | // }
137 | //
138 | // /**
139 | // * Sets the error information of the target failed job.
140 | // *
141 | // * @param buff the existing contents
142 | // */
143 | // private void setErrorInfo(StringBuffer buff) {
144 | // if (buff != null) {
145 | // buff.append(" ").append(SparkJobBaseInfo.INFO_STATUS_ERROR).append(": {\n")
146 | // .append(" ").append(SparkJobBaseInfo.INFO_KEY_RESULT_MESSAGE).append(": ").append(getMessage()).append(",\n");
147 | // if (getErrorClass() != null) {
148 | // buff.append(" ").append(SparkJobBaseInfo.INFO_KEY_RESULT_ERROR_CLASS).append(": ")
149 | // .append(getErrorClass()).append(",\n");
150 | // }
151 | // if (getStack() != null && getStack().length > 0) {
152 | // buff.append(" ").append(SparkJobBaseInfo.INFO_KEY_RESULT_STACK).append(": [");
153 | // for (String stackItem : getStack()) {
154 | // buff.append(stackItem).append(", ");
155 | // }
156 | // buff.append("]\n");
157 | // }
158 | // }
159 | // }
160 | //
161 | // /**
162 | // * Sets the information of the extend attributes.
163 | // *
164 | // * @param buff the existing contents
165 | // */
166 | // private void setExtendAttributesInfo(StringBuffer buff) {
167 | // if (buff != null) {
168 | // SetISparkJobServerClient
.
22 | *
23 | * @author bluebreezecf
24 | * @since 2014-09-07
25 | *
26 | */
27 | public class SparkJobServerClientException extends Exception {
28 |
29 | private static final long serialVersionUID = -5065403696198358625L;
30 |
31 | /**
32 | * Constructs a new SparkJobServerClientException
instance
33 | * with the specified detail message. The cause is not initialized, and
34 | * may subsequently be initialized by a call to initCause.
35 | *
36 | * @param message the detail message. The detail message is saved for
37 | * later retrieval by the getMessage() method.
38 | */
39 | public SparkJobServerClientException(String message) {
40 | super(message);
41 | }
42 |
43 | /**
44 | * Constructs a new SparkJobServerClientException
instance
45 | * with the specified detail message and cause.
46 | *
47 | * @param message the detail message (which is saved for later retrieval
48 | * by the {@link #getMessage()} method).
49 | * @param cause the cause (which is saved for later retrieval by the
50 | * {@link #getCause()} method). (A null value is
51 | * permitted, and indicates that the cause is nonexistent or
52 | * unknown.)
53 | */
54 | public SparkJobServerClientException(String message, Throwable cause) {
55 | super(message, cause);
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/src/main/java/com/bluebreezecf/tools/sparkjobserver/api/SparkJobServerClientFactory.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2014-2022 the original author or authors.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.bluebreezecf.tools.sparkjobserver.api;
18 |
19 | import java.net.MalformedURLException;
20 | import java.net.URL;
21 | import java.util.Map;
22 | import java.util.concurrent.ConcurrentHashMap;
23 |
24 | import org.apache.log4j.Logger;
25 |
26 | /**
27 | * The factory is responsible for creating instance of ISparkJobServerClient
28 | * to communicate with the Spark Job Server with the arranged rest apis.
29 | *
30 | * @author bluebreezecf
31 | * @since 2014-09-07
32 | *
33 | */
34 | public final class SparkJobServerClientFactory {
35 | private static final SparkJobServerClientFactory INSTANCE = new SparkJobServerClientFactory();
36 |
37 | private static Logger logger = Logger.getLogger(SparkJobServerClientFactory.class);
38 |
39 | private static MapSparkJobServerClientFactory
.
44 | */
45 | private SparkJobServerClientFactory() {
46 | }
47 |
48 | /**
49 | * Gets the unique instance of SparkJobServerClientFactory
.
50 | * @return the instance of SparkJobServerClientFactory
51 | */
52 | public static SparkJobServerClientFactory getInstance() {
53 | return INSTANCE;
54 | }
55 |
56 | /**
57 | * Creates an instance of ISparkJobServerClient
with the given url.
58 | *
59 | * @param url the url of the target Spark Job Server
60 | * @return the corresponding ISparkJobServerClient
instance
61 | * @throws SparkJobServerClientException error occurs when trying to create the
62 | * target spark job server client
63 | */
64 | public ISparkJobServerClient createSparkJobServerClient(String url)
65 | throws SparkJobServerClientException {
66 | if (!isValidUrl(url)) {
67 | throw new SparkJobServerClientException("Invalid url can't be used to create a spark job server client.");
68 | }
69 | String sparkJobServerUrl = url.trim();
70 | ISparkJobServerClient sparkJobServerClient = jobServerClientCache.get(sparkJobServerUrl);
71 | if (null == sparkJobServerClient) {
72 | sparkJobServerClient = new SparkJobServerClientImpl(url);
73 | jobServerClientCache.put(url, sparkJobServerClient);
74 | }
75 | return sparkJobServerClient;
76 | }
77 |
78 | /**
79 | * Creates an instance of ISparkJobServerClient
with the given url, username and password.
80 | *
81 | * @param url the url of the target Spark Job Server
82 | * @param jobServerUsername the username for authentication of target Spark Job Server
83 | * @param jobServerPassword the password for authentication of the target Spark Job Server
84 | * @return the corresponding ISparkJobServerClient
instance
85 | * @throws SparkJobServerClientException error occurs when trying to create the
86 | * target spark job server client
87 | */
88 | public ISparkJobServerClient createSparkJobServerClient(String url, String jobServerUsername,
89 | String jobServerPassword) throws SparkJobServerClientException {
90 | if (!isValidUrl(url)) {
91 | throw new SparkJobServerClientException("Invalid url can't be used to create a spark job server client.");
92 | }
93 | if (jobServerUsername == null || jobServerUsername.isEmpty()) {
94 | throw new SparkJobServerClientException("Invalid username can't be null or empty.");
95 | }
96 | String sparkJobServerUrl = url.trim();
97 | jobServerUsername = jobServerUsername.trim();
98 | ISparkJobServerClient sparkJobServerClient = jobServerClientCache.get(sparkJobServerUrl + "_@_" + jobServerUsername + "_@_" + jobServerPassword);
99 | if (null == sparkJobServerClient) {
100 | sparkJobServerClient = new SparkJobServerClientImpl(url, jobServerUsername, jobServerPassword);
101 | jobServerClientCache.put(sparkJobServerUrl + "_@_" + jobServerUsername + "_@_" + jobServerPassword, sparkJobServerClient);
102 | }
103 | return sparkJobServerClient;
104 | }
105 |
106 | /**
107 | * Creates an instance of ISparkJobServerClient
with the given
108 | * url, username and password and Http Connection, Request, Socket Timeouts
109 | *
110 | * @param url the url of the target Spark Job Server
111 | * @param jobServerUsername the username for authentication of target Spark Job Server
112 | * @param jobServerPassword the password for authentication of the target Spark Job Server
113 | * @param connectionTimeOut
114 | * @param connectionReqTimeOut
115 | * @param socketTimeOut
116 | * @return the corresponding ISparkJobServerClient
instance
117 | * @throws SparkJobServerClientException error occurs when trying to create the
118 | * target spark job server client
119 | */
120 | public ISparkJobServerClient createSparkJobServerClient(String url, String jobServerUsername,
121 | String jobServerPassword, Integer connectionTimeOut,
122 | Integer connectionReqTimeOut,
123 | Integer socketTimeOut) throws SparkJobServerClientException {
124 | if (!isValidUrl(url)) {
125 | throw new SparkJobServerClientException("Invalid url can't be used to create a spark job server client.");
126 | }
127 | if (jobServerUsername == null || jobServerUsername.isEmpty()) {
128 | throw new SparkJobServerClientException("Invalid username can't be null or empty.");
129 | }
130 | String sparkJobServerUrl = url.trim();
131 | jobServerUsername = jobServerUsername.trim();
132 | String cacheKey = sparkJobServerUrl + "_@_" + jobServerUsername
133 | + "_@_" + jobServerPassword + "_@_" + connectionTimeOut + "_@_" + connectionReqTimeOut +
134 | "_@_" + socketTimeOut;
135 | ISparkJobServerClient sparkJobServerClient = jobServerClientCache.get(cacheKey);
136 | if (null == sparkJobServerClient) {
137 | sparkJobServerClient = new SparkJobServerClientImpl(url, jobServerUsername, jobServerPassword,
138 | connectionTimeOut, connectionReqTimeOut, socketTimeOut);
139 | jobServerClientCache.put(cacheKey, sparkJobServerClient);
140 | }
141 | return sparkJobServerClient;
142 | }
143 |
144 | /**
145 | * Checks the given url is valid or not.
146 | *
147 | * @param url the url to be checked
148 | * @return true if it is valid, false otherwise
149 | */
150 | private boolean isValidUrl(String url) {
151 | if (url == null || url.trim().length() <= 0) {
152 | logger.error("The given url is null or empty.");
153 | return false;
154 | }
155 | try {
156 | new URL(url);
157 | } catch (MalformedURLException me) {
158 | StringBuffer buff = new StringBuffer("The given url ");
159 | buff.append(url).append(" is invalid.");
160 | logger.error(buff.toString(), me);
161 | }
162 | return true;
163 | }
164 | }
165 |
--------------------------------------------------------------------------------
/src/main/java/com/bluebreezecf/tools/sparkjobserver/api/SparkJobServerClientImpl.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2014-2022 the original author or authors.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.bluebreezecf.tools.sparkjobserver.api;
18 |
19 | import java.io.BufferedInputStream;
20 | import java.io.BufferedReader;
21 | import java.io.Closeable;
22 | import java.io.File;
23 | import java.io.FileInputStream;
24 | import java.io.FileNotFoundException;
25 | import java.io.IOException;
26 | import java.io.InputStream;
27 | import java.io.InputStreamReader;
28 | import java.util.ArrayList;
29 | import java.util.Arrays;
30 | import java.util.Base64;
31 | import java.util.Iterator;
32 | import java.util.List;
33 | import java.util.Map;
34 | import java.util.stream.Collectors;
35 | import net.sf.json.JSONArray;
36 | import net.sf.json.JSONObject;
37 | import org.apache.commons.io.IOUtils;
38 | import org.apache.http.HttpEntity;
39 | import org.apache.http.HttpResponse;
40 | import org.apache.http.HttpStatus;
41 | import org.apache.http.client.config.RequestConfig;
42 | import org.apache.http.client.methods.HttpDelete;
43 | import org.apache.http.client.methods.HttpGet;
44 | import org.apache.http.client.methods.HttpPost;
45 | import org.apache.http.client.methods.HttpRequestBase;
46 | import org.apache.http.entity.ByteArrayEntity;
47 | import org.apache.http.entity.StringEntity;
48 | import org.apache.http.impl.client.CloseableHttpClient;
49 | import org.apache.http.impl.client.HttpClientBuilder;
50 | import org.apache.log4j.Logger;
51 |
52 |
53 | /**
54 | * The default client implementation of ISparkJobServerClient
.
55 | * With the specific rest api, it can provide abilities to submit and manage
56 | * Apache Spark jobs, jars, and job contexts in the Spark Job Server.
57 | *
58 | * @author bluebreezecf
59 | * @since 2014-09-07
60 | *
61 | */
62 | class SparkJobServerClientImpl implements ISparkJobServerClient {
63 | private static Logger logger = Logger.getLogger(SparkJobServerClientImpl.class);
64 | private static final int BUFFER_SIZE = 512 * 1024;
65 | private static final int DEFAULT_CONNECTION_TIMEOUT=60000;
66 | private static final int DEFAULT_SOCKET_TIMEOUT=60000;
67 | private static final int DEFAULT_REQUEST_TIMEOUT=60000;
68 | private String jobServerUrl;
69 | private String jobServerUsername;
70 | private String jobServerPassword;
71 | private Integer connectionTimeOut;
72 | private Integer connectionReqTimeOut;
73 | private Integer socketTimeOut;
74 | private static ListSparkJobServerClientImpl
78 | * with the given spark job server url.
79 | *
80 | * @param jobServerUrl a url pointing to a existing spark job server
81 | */
82 | SparkJobServerClientImpl(String jobServerUrl) {
83 | if (!jobServerUrl.endsWith("/")) {
84 | jobServerUrl = jobServerUrl + "/";
85 | }
86 | this.jobServerUrl = jobServerUrl;
87 | this.socketTimeOut = DEFAULT_SOCKET_TIMEOUT;
88 | this.connectionReqTimeOut = DEFAULT_REQUEST_TIMEOUT;
89 | this.connectionTimeOut = DEFAULT_CONNECTION_TIMEOUT;
90 | }
91 |
92 | /**
93 | * Constructs an instance of SparkJobServerClientImpl
94 | * with the given spark job server url.
95 | *
96 | * @param jobServerUrl a url pointing to a existing spark job server
97 | * @param connectionTimeOut TimeOut in milliseconds for to establish Connection
98 | * @param connectionReqTimeOut TimeOut in milliseconds for a connection Request
99 | * @param socketTimeOut Timeout in milliseconds for Socket to transfer data
100 | */
101 | SparkJobServerClientImpl(String jobServerUrl, Integer connectionTimeOut, Integer connectionReqTimeOut, Integer socketTimeOut) {
102 | if (!jobServerUrl.endsWith("/")) {
103 | jobServerUrl = jobServerUrl + "/";
104 | }
105 | this.jobServerUrl = jobServerUrl;
106 | this.socketTimeOut = socketTimeOut == null ? DEFAULT_SOCKET_TIMEOUT : socketTimeOut;
107 | this.connectionReqTimeOut = connectionReqTimeOut == null ? DEFAULT_REQUEST_TIMEOUT : connectionReqTimeOut;
108 | this.connectionTimeOut = connectionTimeOut == null ? DEFAULT_CONNECTION_TIMEOUT : connectionTimeOut;
109 | }
110 |
111 | /**
112 | * Constructs an instance of SparkJobServerClientImpl
113 | * with the given spark job server url, username and password.
114 | *
115 | * @param jobServerUrl a url pointing to a existing spark job server
116 | * @param jobServerUsername a username to a existing spark job server
117 | * @param jobServerPassword a password to a existing spark job server
118 | */
119 | SparkJobServerClientImpl(String jobServerUrl, String jobServerUsername,
120 | String jobServerPassword) {
121 | this(jobServerUrl);
122 | this.jobServerUsername = jobServerUsername;
123 | this.jobServerPassword = jobServerPassword;
124 | }
125 |
126 | /**
127 | * Constructs an instance of SparkJobServerClientImpl
128 | * with the given spark job server url, username and password with HTTP TimeOuts.
129 | *
130 | * @param jobServerUrl a url pointing to a existing spark job server
131 | * @param jobServerUsername a username to a existing spark job server
132 | * @param jobServerPassword a password to a existing spark job server
133 | * @param connectionTimeOut TimeOut in milliseconds for to establish Connection
134 | * @param connectionReqTimeOut TimeOut in milliseconds for a connection Request
135 | * @param socketTimeOut Timeout in milliseconds for Socket to transfer data
136 | */
137 | SparkJobServerClientImpl(String jobServerUrl, String jobServerUsername,
138 | String jobServerPassword, Integer connectionTimeOut, Integer connectionReqTimeOut, Integer socketTimeOut) {
139 | this(jobServerUrl, connectionTimeOut, connectionReqTimeOut, socketTimeOut);
140 | this.jobServerUsername = jobServerUsername;
141 | this.jobServerPassword = jobServerPassword;
142 | }
143 |
144 | /**
145 | * Constructs an instance of RequestConfig
146 | * with configured timeouts for the connection.
147 | */
148 | private RequestConfig getRequestConfig() {
149 | RequestConfig.Builder requestConfig = RequestConfig.custom();
150 | requestConfig.setConnectTimeout(this.connectionTimeOut);
151 | requestConfig.setConnectionRequestTimeout(this.connectionReqTimeOut);
152 | requestConfig.setSocketTimeout(this.socketTimeOut);
153 | return requestConfig.build();
154 | }
155 |
156 | /**
157 | * Set Authorization Header in the given HttpRequestBase
.
158 | *
159 | * @param requestBase HttpRequestBase for the specified endPoint URL
160 | */
161 | private void setAuthorization(HttpRequestBase requestBase) {
162 | String authHeader = getBasicAuthHeader();
163 | if (authHeader != null) {
164 | requestBase.setHeader("Authorization", authHeader);
165 | }
166 | }
167 |
168 | /**
169 | * {@inheritDoc}
170 | */
171 | public ListSparkJobInfo
437 | * from the given spark job JSON.
438 | *
439 | * @param jsonObj spark job json returned by spark job server
440 | */
441 | private SparkJobInfo createSparkJobInfo(JSONObject jsonObj) {
442 | SparkJobInfo toReturn = new SparkJobInfo();
443 | toReturn.setDuration(jsonObj.getString(SparkJobInfo.INFO_KEY_DURATION));
444 | toReturn.setClassPath(jsonObj.getString(SparkJobInfo.INFO_KEY_CLASSPATH));
445 | toReturn.setStartTime(jsonObj.getString(SparkJobInfo.INFO_KEY_START_TIME));
446 | toReturn.setContext(jsonObj.getString(SparkJobBaseInfo.INFO_KEY_CONTEXT));
447 | toReturn.setStatus(jsonObj.getString(SparkJobBaseInfo.INFO_KEY_STATUS));
448 | toReturn.setJobId(jsonObj.getString(SparkJobBaseInfo.INFO_KEY_JOB_ID));
449 | toReturn.setContextId(jsonObj.getString(SparkJobInfo.INFO_CONTEXT_ID));
450 | setErrorDetails(SparkJobBaseInfo.INFO_KEY_RESULT, jsonObj, toReturn);
451 | return toReturn;
452 | }
453 |
454 | /**
455 | * {@inheritDoc}
456 | */
457 | public SparkJobResult startJob(String data, MapHttpEntity
630 | * instance.
631 | *
632 | * @param entity the HttpEntity
instance holding the http response content
633 | * @return the corresponding response content
634 | */
635 | protected String getResponseContent(HttpEntity entity) {
636 | byte[] buff = new byte[BUFFER_SIZE];
637 | StringBuffer contents = new StringBuffer();
638 | InputStream in = null;
639 | try {
640 | in = entity.getContent();
641 | BufferedInputStream bis = new BufferedInputStream(in);
642 | int readBytes = 0;
643 | while ((readBytes = bis.read(buff)) != -1) {
644 | contents.append(new String(buff, 0, readBytes));
645 | }
646 | } catch (Exception e) {
647 | logger.error("Error occurs when trying to reading response", e);
648 | } finally {
649 | closeStream(in);
650 | }
651 | return contents.toString().trim();
652 | }
653 |
654 | /**
655 | * Closes the given stream.
656 | *
657 | * @param stream the input/output stream to be closed
658 | */
659 | protected void closeStream(Closeable stream) {
660 | if (stream != null) {
661 | try {
662 | stream.close();
663 | } catch (IOException ioe) {
664 | logger.error("Error occurs when trying to close the stream:", ioe);
665 | }
666 | } else {
667 | logger.error("The given stream is null");
668 | }
669 | }
670 |
671 | /**
672 | * Handles the given exception with specific error message, and
673 | * generates a corresponding SparkJobServerClientException
.
674 | *
675 | * @param errorMsg the corresponding error message
676 | * @param e the exception to be handled
677 | * @throws SparkJobServerClientException the corresponding transformed
678 | * SparkJobServerClientException
instance
679 | */
680 | protected void processException(String errorMsg, Exception e) throws SparkJobServerClientException {
681 | if (e instanceof SparkJobServerClientException) {
682 | throw (SparkJobServerClientException)e;
683 | }
684 | logger.error(errorMsg, e);
685 | throw new SparkJobServerClientException(errorMsg, e);
686 | }
687 |
688 | /**
689 | * Judges the given string value is not empty or not.
690 | *
691 | * @param value the string value to be checked
692 | * @return true indicates it is not empty, false otherwise
693 | */
694 | protected boolean isNotEmpty(String value) {
695 | return value != null && !value.isEmpty();
696 | }
697 |
698 | /**
699 | * Logs the response information when the status is not 200 OK,
700 | * and throws an instance of SparkJobServerClientException
.
701 | *
702 | * @param errorStatusCode error status code
703 | * @param msg the message to indicates the status, it can be null
704 | * @param throwable true indicates throws an instance of
SparkJobServerClientException
705 | * with corresponding error message, false means only log the error message.
706 | * @throws SparkJobServerClientException containing the corresponding error message
707 | */
708 | private void logError(int errorStatusCode, String msg, boolean throwable) throws SparkJobServerClientException {
709 | StringBuffer msgBuff = new StringBuffer("Spark Job Server ");
710 | msgBuff.append(jobServerUrl).append(" response ").append(errorStatusCode);
711 | if (null != msg) {
712 | msgBuff.append(" ").append(msg);
713 | }
714 | String errorMsg = msgBuff.toString();
715 | logger.error(errorMsg);
716 | if (throwable) {
717 | throw new SparkJobServerClientException(errorMsg);
718 | }
719 | }
720 |
721 | /**
722 | * Sets the information of the error details.
723 | *
724 | * @param key the key contains the error details
725 | * @param parnetJsonObj the parent JSONObject
instance
726 | */
727 | private void setErrorDetails(String key, JSONObject parnetJsonObj, SparkJobBaseInfo jobErrorInfo) {
728 | if (parnetJsonObj.containsKey(key)) {
729 | JSONObject resultJson = parnetJsonObj.getJSONObject(key);
730 | if (resultJson.containsKey(SparkJobInfo.INFO_KEY_RESULT_MESSAGE)) {
731 | jobErrorInfo.setMessage(resultJson.getString(SparkJobInfo.INFO_KEY_RESULT_MESSAGE));
732 | }
733 | if (resultJson.containsKey(SparkJobInfo.INFO_KEY_RESULT_ERROR_CLASS)) {
734 | jobErrorInfo.setErrorClass(resultJson.getString(SparkJobInfo.INFO_KEY_RESULT_ERROR_CLASS));
735 | }
736 | if (resultJson.containsKey(SparkJobInfo.INFO_KEY_RESULT_STACK)) {
737 | if (resultJson.get(SparkJobInfo.INFO_KEY_RESULT_STACK) instanceof String) {
738 | String[] stack = { resultJson.getString(SparkJobInfo.INFO_KEY_RESULT_STACK) };
739 | jobErrorInfo.setStack(stack);
740 |
741 | } else {
742 | JSONArray stackJsonArray = resultJson.getJSONArray(SparkJobInfo.INFO_KEY_RESULT_STACK);
743 | String[] stack = new String[stackJsonArray.size()];
744 | for (int i = 0; i < stackJsonArray.size(); i++) {
745 | stack[i] = stackJsonArray.optString(i);
746 | }
747 | jobErrorInfo.setStack(stack);
748 | }
749 | }
750 | }
751 | }
752 |
753 | /**
754 | * Generates an instance of SparkJobResult
according to the given contents.
755 | *
756 | * @param resContent the content of a http response
757 | * @return the corresponding SparkJobResult
instance
758 | * @throws Exception error occurs when parsing the http response content
759 | */
760 | private SparkJobResult parseResult(String resContent) throws Exception {
761 | JSONObject jsonObj = JSONObject.fromObject(resContent);
762 | SparkJobResult jobResult = new SparkJobResult(resContent);
763 | boolean completed = false;
764 | if(jsonObj.has(SparkJobBaseInfo.INFO_KEY_STATUS)) {
765 | jobResult.setStatus(jsonObj.getString(SparkJobBaseInfo.INFO_KEY_STATUS));
766 | if (SparkJobBaseInfo.COMPLETED.contains(jobResult.getStatus())) {
767 | completed = true;
768 | }
769 | } else {
770 | completed = true;
771 | }
772 | if (completed) {
773 | //Job finished with results
774 | jobResult.setResult(jsonObj.get(SparkJobBaseInfo.INFO_KEY_RESULT).toString());
775 | } else if (containsAsynjobStatus(jsonObj)) {
776 | //asynchronously started job only with status information
777 | setAsynjobStatus(jobResult, jsonObj);
778 | } else if (containsErrorInfo(jsonObj)) {
779 | String errorKey = null;
780 | if (jsonObj.containsKey(SparkJobBaseInfo.INFO_STATUS_ERROR)) {
781 | errorKey = SparkJobBaseInfo.INFO_STATUS_ERROR;
782 | } else if (jsonObj.containsKey(SparkJobBaseInfo.INFO_KEY_RESULT)) {
783 | errorKey = SparkJobBaseInfo.INFO_KEY_RESULT;
784 | }
785 | //Job failed with error details
786 | setErrorDetails(errorKey, jsonObj, jobResult);
787 | } else {
788 | //Other unknown kind of value needs application to parse itself
789 | Iterator> keyIter = jsonObj.keys();
790 | while (keyIter.hasNext()) {
791 | String key = (String)keyIter.next();
792 | if (SparkJobInfo.INFO_KEY_STATUS.equals(key)) {
793 | continue;
794 | }
795 | jobResult.putExtendAttribute(key, jsonObj.get(key));
796 | }
797 | }
798 | return jobResult;
799 | }
800 |
801 | /**
802 | * Judges the given json object contains the error information of a
803 | * spark job or not.
804 | *
805 | * @param jsonObj the JSONObject
instance to be checked.
806 | * @return true if it contains the error information, false otherwise
807 | */
808 | private boolean containsErrorInfo(JSONObject jsonObj) {
809 | return SparkJobBaseInfo.INFO_STATUS_ERROR.equals(jsonObj.getString(SparkJobBaseInfo.INFO_KEY_STATUS));
810 | }
811 |
812 |
813 | /**
814 | * Judges the given json object contains the status information of a asynchronous
815 | * started spark job or not.
816 | *
817 | * @param jsonObj the JSONObject
instance to be checked.
818 | * @return true if it contains the status information of a asynchronous
819 | * started spark job, false otherwise
820 | */
821 | private boolean containsAsynjobStatus(JSONObject jsonObj) {
822 | return jsonObj != null && jsonObj.containsKey(SparkJobBaseInfo.INFO_KEY_STATUS)
823 | && SparkJobBaseInfo.INFO_STATUS_STARTED.equals(jsonObj.getString(SparkJobBaseInfo.INFO_KEY_STATUS))
824 | && jsonObj.containsKey(SparkJobBaseInfo.INFO_KEY_RESULT);
825 | }
826 |
827 | /**
828 | * Sets the status information of a asynchronous started spark job to the given
829 | * job result instance.
830 | *
831 | * @param jobResult the SparkJobResult
instance to be set the status information
832 | * @param jsonObj the JSONObject
instance holds the status information
833 | */
834 | private void setAsynjobStatus(SparkJobResult jobResult, JSONObject jsonObj) {
835 | JSONObject resultJsonObj = jsonObj.getJSONObject(SparkJobBaseInfo.INFO_KEY_RESULT);
836 | jobResult.setContext(resultJsonObj.getString(SparkJobBaseInfo.INFO_KEY_CONTEXT));
837 | jobResult.setJobId(resultJsonObj.getString(SparkJobBaseInfo.INFO_KEY_JOB_ID));
838 | }
839 |
840 | private CloseableHttpClient buildClient() {
841 | return HttpClientBuilder.create().build();
842 | }
843 |
844 | private void close(final CloseableHttpClient client) {
845 | try {
846 | client.close();
847 | } catch (final IOException e) {
848 | logger.error("could not close client" , e);
849 | }
850 | }
851 |
852 | /**
853 | * Gets the Basic Auth Header Value for Spark Job Server
854 | * The Value is 'Basic <username>:<password> with base64 encoding
855 | */
856 | private String getBasicAuthHeader(){
857 | if (isNotEmpty(this.jobServerUsername) && this.jobServerPassword != null){
858 | return "Basic " + new String(Base64.getEncoder().encode((this.jobServerUsername + ":" + this.jobServerPassword).getBytes()));
859 | }
860 | return null;
861 | }
862 | }
863 |
--------------------------------------------------------------------------------
/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | log4j.rootLogger=ERROR, Console
2 |
3 | #Console
4 | log4j.appender.Console=org.apache.log4j.ConsoleAppender
5 | log4j.appender.Console.layout=org.apache.log4j.PatternLayout
6 | log4j.appender.Console.layout.ConversionPattern=%d [%t] %-5p [%c] - %m%n
7 |
8 | ###DEBUG,ERROR
9 | log4j.logger.java.sql.ResultSet=ERROR
10 | log4j.logger.org.apache=ERROR
11 | ###DEBUG,ERROR
12 | log4j.logger.java.sql.Connection=ERROR
13 |
14 | log4j.logger.java.sql.Statement=ERROR
15 | log4j.logger.java.sql.PreparedStatement=ERROR
--------------------------------------------------------------------------------
/src/test/java/com/bluebreezecf/tools/sparkjobserver/api/SparkJobServiceClientImplTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2014-2022 the original author or authors.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.bluebreezecf.tools.sparkjobserver.api;
18 |
19 | import org.junit.After;
20 | import org.junit.Before;
21 | import org.junit.Test;
22 |
23 | import com.bluebreezecf.tools.sparkjobserver.api.ISparkJobServerClient;
24 | import com.bluebreezecf.tools.sparkjobserver.api.ISparkJobServerClientConstants;
25 | import com.bluebreezecf.tools.sparkjobserver.api.SparkJobResult;
26 | import com.bluebreezecf.tools.sparkjobserver.api.SparkJobServerClientFactory;
27 |
28 | import java.io.File;
29 | import java.io.InputStream;
30 | import java.util.HashMap;
31 | import java.util.Map;
32 | import java.util.concurrent.TimeUnit;
33 |
34 | import static org.hamcrest.MatcherAssert.assertThat;
35 | import static org.hamcrest.Matchers.*;
36 | import static org.hamcrest.Matchers.is;
37 |
38 | /**
39 | * a test class for SparkJobServerClientImpl
40 | * @author yatshash
41 | * @since 2017-03-08
42 | */
43 | public class SparkJobServiceClientImplTest {
44 | private static final String defaultSparkJobHost = "54.178.178.219";
45 | private static final String defaultSparkJobPort = "8090";
46 | private static String endpoint = String.format("http://%s:%s/", defaultSparkJobHost, defaultSparkJobPort);
47 | private ISparkJobServerClient client;
48 | private static final long POOLING_TIME_SEC = 1;
49 |
50 | @Before
51 | public void setUp() throws Exception {
52 | client = SparkJobServerClientFactory
53 | .getInstance()
54 | .createSparkJobServerClient(endpoint);
55 | }
56 |
57 | @After
58 | public void tearDown() throws Exception {
59 |
60 | }
61 |
62 | /**
63 | * test runJob with File resource
64 | * Warning: This test require deleting jar after test.
65 | * @throws Exception
66 | */
67 | @Test
68 | public void testRunJobWithFile() throws Exception {
69 | InputStream jarFileStream = ClassLoader.getSystemResourceAsStream("./job-server-tests-2.11-0.8.0-SNAPSHOT.jar");
70 | File inputData = new File(ClassLoader.getSystemResource("input-SparkJobServiceClientImpTest.json").toURI());
71 |
72 | String appName = "runjob-with-file-test";
73 | boolean isUploaded = client.uploadSparkJobJar(jarFileStream, appName);
74 |
75 | assertThat(isUploaded, is(true));
76 |
77 | Map