├── docs
├── imgs
│ └── 启动成功.png
└── 集群配置Livy.md
├── pom.xml
├── readme.md
└── src
├── main
├── java
│ └── space
│ │ └── jwqwy
│ │ └── livy
│ │ ├── common
│ │ └── Constants.java
│ │ ├── entiy
│ │ └── SparkJob.java
│ │ ├── eum
│ │ └── SparkJobState.java
│ │ ├── service
│ │ ├── LivyService.java
│ │ └── impl
│ │ │ └── LivyServiceImpl.java
│ │ └── util
│ │ ├── HttpUtils.java
│ │ └── PropertiesUtil.java
└── resources
│ └── properties
│ ├── livy.properties
│ └── log4j.properties
└── test
└── java
└── LivyServiceTest.java
/docs/imgs/启动成功.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/JiangWenqi/LivyRESTAPI/e6009b09829b764c21c08f4a11e28f9176e03b29/docs/imgs/启动成功.png
--------------------------------------------------------------------------------
/docs/集群配置Livy.md:
--------------------------------------------------------------------------------
1 | # 集群配置Livy
2 |
3 | ## 一、下载
4 |
5 | Livy 官网地址:`http://livy.incubator.apache.org/download/`
6 |
7 | 具体指令:
8 |
9 | ```shell
10 | # 进入指定目录
11 | cd /home/admin/soft
12 | # 下载文件
13 | wget http://mirror.bit.edu.cn/apache/incubator/livy/0.5.0-incubating/livy-0.5.0-incubating-bin.zip
14 | ```
15 |
16 |
17 |
18 | ## 二、解压
19 |
20 | ```bash
21 | unzip livy-0.5.0-incubating-bin.zip
22 |
23 | # 重命名
24 | mv livy-0.5.0-incubating-bin livy-0.5.0
25 |
26 | # 复制到 /etc 目录下
27 | cp -r livy-0.5.0 /etc/livy
28 | ```
29 |
30 |
31 |
32 | ## 三、更改配置文件
33 |
34 | ```shell
35 | cd /etc
36 | # 给文件夹赋予权限
37 | chmod +777 livy
38 | # 配置文件
39 | cd /etc/livy/conf
40 | cp livy-env.sh.template livy-env.sh
41 | vim livy-env.sh
42 | ```
43 |
44 | ```properties
45 | # - HADOOP_CONF_DIR Directory containing the Hadoop / YARN configuration to use.
46 | HADOOP_CONF_DIR=/etc/hadoop/conf
47 | # - SPARK_HOME Spark which you would like to use in Livy.
48 | SPARK_HOME=/opt/cloudera/parcels/SPARK2-2.3.0.cloudera4-1.cdh5.13.3.p0.611179/lib/spark2
49 | ```
50 |
51 |
52 |
53 | ## 四、启动服务
54 |
55 | ```shell
56 | cd /etc/livy
57 | # 新建日志目录,并给权限
58 | mkdir logs
59 | chmod +777 logs
60 | ./bin/livy-server
61 | ```
62 |
63 | 启动成功:
64 |
65 | 
66 |
67 | ```shell
68 | #后台模式
69 | ./bin/livy-server start
70 | ```
71 |
72 |
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 4.0.0
6 |
7 | space.jwqwy
8 | livy
9 | 1.0-SNAPSHOT
10 |
11 |
12 |
13 |
14 | org.apache.livy
15 | livy-core_2.11
16 | 0.5.0-incubating
17 |
18 |
19 |
20 | org.apache.livy
21 | livy-rsc
22 | 0.5.0-incubating
23 |
24 |
25 |
26 |
27 | org.apache.spark
28 | spark-core_2.12
29 | 2.4.0
30 |
31 |
32 |
33 |
34 | log4j
35 | log4j
36 | 1.2.17
37 |
38 |
39 |
40 |
41 | net.sf.json-lib
42 | json-lib
43 | 2.4
44 | jdk15
45 |
46 |
47 | org.apache.httpcomponents
48 | httpclient
49 | 4.5.5
50 |
51 |
52 |
53 | org.apache.ibatis
54 | ibatis-core
55 | 3.0
56 |
57 |
58 | org.junit.jupiter
59 | junit-jupiter-api
60 | 5.3.1
61 | test
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 | org.apache.maven.plugins
70 | maven-compiler-plugin
71 | 3.3
72 |
73 | 1.8
74 | 1.8
75 |
76 |
77 |
78 |
79 |
80 |
--------------------------------------------------------------------------------
/readme.md:
--------------------------------------------------------------------------------
1 | # Livy REST API 封装(Java)
2 |
3 | 
4 |
5 | 参考文章如下:
6 |
7 | https://blog.csdn.net/camel84/article/details/81990383
8 |
9 | https://cloud.tencent.com/developer/article/1078857
10 |
11 |
12 |
13 | # 前言
14 |
15 | > Livy is an open source REST interface for interacting with [Apache Spark](http://spark.apache.org/) from anywhere. It supports executing snippets of code or programs in a Spark context that runs locally or in [Apache Hadoop YARN](http://hadoop.apache.org/docs/current/hadoop-yarn/hadoop-yarn-site/YARN.html).
16 | >
17 | > - Interactive Scala, Python and R shells
18 | > - Batch submissions in Scala, Java, Python
19 | > - Multiple users can share the same server (impersonation support)
20 | > - Can be used for submitting jobs from anywhere with REST
21 | > - Does not require any code change to your programs
22 |
23 | 以上是`Livy`的官方介绍,具体使用请参照这篇[文章](https://blog.csdn.net/camel84/article/details/81990383)。
24 |
25 | 大体思路是用 `Java` 模拟发送请求报文给 `Livy`,
26 |
27 | ## Livy集群配置
28 |
29 | 参见这篇[文档](docs/集群配置Livy.md)
30 |
31 | ---
32 |
33 |
34 |
35 |
36 |
37 | # Get Started
38 |
39 | ## 第一步:上传 jar 包
40 |
41 | 上传测试所用的`jar`包到`hdfs`。
42 |
43 | ```bash
44 | export HADOOP_USER_NAME=hdfs
45 | ${HADOOP_HOME}/bin/hdfs dfs -mkdir /testJars
46 | ${HADOOP_HOME}/bin/hdfs dfs -put /opt/cloudera/parcels/SPARK2-2.3.0.cloudera4-1.cdh5.13.3.p0.611179/lib/spark2/examples/jars/spark-examples_2.11-2.3.0.cloudera4.jar /testJars/
47 | ```
48 |
49 | ## 第二步:创建 Spark Job
50 |
51 | ```java
52 | SparkJob job = new SparkJob();
53 |
54 | job.setFile("hdfs://192.168.1.170:8020/testJars/spark-examples_2.11-2.3.0.cloudera4.jar");
55 | job.setClassName("org.apache.spark.examples.SparkPi");
56 | job.setName("SparkPi");
57 | job.setExecutorCores(3);
58 | ```
59 |
60 |
61 |
62 | ## 第三步:执行任务,查询任务状态等操作
63 |
64 | ```java
65 | int sparkJobID = livyService.startSparkJob(job);
66 |
67 | if (sparkJobID > 0) {
68 | System.out.println("\n创建任务,任务ID为:\n" + sparkJobID);
69 |
70 | Map activeSparkJobs = livyService.getActiveSparkJobs();
71 | System.out.println("\n查询当前所有任务:\n" + activeSparkJobs.toString());
72 |
73 | Map info = livyService.getSparkJobInfo(sparkJobID);
74 | System.out.println("\n查询任务ID为" + sparkJobID + "的任务详情:\n" + info.toString());
75 |
76 | SparkJobState state = livyService.getSparkJobState(sparkJobID);
77 | System.out.println("\n查询任务ID为" + sparkJobID + "的任务状态:\n" + state);
78 |
79 | Map log = livyService.getSparkJoblog(sparkJobID);
80 | System.out.println("\n查询任务ID为" + sparkJobID + "的任务日志:\n" + log.toString());
81 |
82 | // Map del = livyService.deleteSparkJob(sparkJobID);
83 | // System.out.println("删除任务ID为" + sparkJobID + "\n" + del.toString());
84 | }
85 | // 执行任务,一直到任务结束
86 | // System.out.println(runSparkJob(job));
87 | ```
--------------------------------------------------------------------------------
/src/main/java/space/jwqwy/livy/common/Constants.java:
--------------------------------------------------------------------------------
1 | package space.jwqwy.livy.common;
2 |
3 | /**
4 | * Livy REST API 封装
5 | *
6 | * @author Vinci
7 | * Create: 2019/02/19 15:14
8 | * Description: 常量
9 | */
10 |
11 | public class Constants {
12 | /**
13 | * Spark Livy Batch Session State
14 | */
15 | public static final String LIVY_SESSION_STATE = "state";
16 |
17 | /**
18 | * Spark Livy Batch Session id
19 | */
20 | public static final String LIVY_SESSION_ID = "id";
21 | /**
22 | * Spark Livy Batch Session Log Type
23 | */
24 | public static final String LIVY_SESSION_LOG_STDOUT = "stdout";
25 |
26 | /**
27 | * Spark Livy Batch Session Log Type
28 | */
29 | public static final String LIVY_SESSION_LOG_STDERR = "stderr";
30 | }
31 |
--------------------------------------------------------------------------------
/src/main/java/space/jwqwy/livy/entiy/SparkJob.java:
--------------------------------------------------------------------------------
1 | package space.jwqwy.livy.entiy;
2 |
3 | import java.util.List;
4 | import java.util.Map;
5 |
6 | /**
7 | * Livy REST API 封装
8 | *
9 | * @author Vinci
10 | * Create: 2019/02/18 16:37
11 | * Description: Request Body Livy 批处理任务 属性封装
12 | */
13 |
14 | public class SparkJob {
15 |
16 | /**
17 | * 必须有
18 | * 包含需要执行应用的文件,主要是jar包
19 | */
20 | private String file;
21 | /**
22 | * User to impersonate when running the job
23 | */
24 | private String proxyUser;
25 | /**
26 | * Application Java/Spark main class
27 | * 主类
28 | */
29 | private String className;
30 |
31 | /**
32 | * Command line arguments for the application
33 | * 参数
34 | */
35 | private List args;
36 |
37 | /**
38 | * jars to be used in this session
39 | * 这个任务里面用到的其他 jar 包
40 | */
41 | private List jars;
42 |
43 | /**
44 | * Python files to be used in this session
45 | */
46 | private List pyFiles;
47 | /**
48 | * files to be used in this session
49 | */
50 | private List files;
51 |
52 | /**
53 | * Amount of memory to use for the driver process
54 | */
55 | private String driverMemory;
56 |
57 | /**
58 | * Number of cores to use for the driver process
59 | */
60 | private int driverCores;
61 |
62 | /**
63 | * Amount of memory to use per executor process
64 | */
65 | private String executorMemory;
66 | /**
67 | * Number of cores to use for each executor
68 | */
69 | private int executorCores;
70 | /**
71 | * Number of executors to launch for this session
72 | */
73 | private int numExecutors;
74 | /**
75 | * Archives to be used in this session
76 | */
77 | private List archives;
78 | /**
79 | * The name of the YARN queue to which submitted
80 | */
81 | private String queue;
82 | /**
83 | * The name of this session
84 | * 任务名称
85 | */
86 | private String name;
87 | /**
88 | * Spark configuration properties
89 | * spark 配置文件
90 | */
91 | private Map conf;
92 |
93 | public String getFile() {
94 | return file;
95 | }
96 |
97 | public void setFile(String file) {
98 | this.file = file;
99 | }
100 |
101 | public String getProxyUser() {
102 | return proxyUser;
103 | }
104 |
105 | public void setProxyUser(String proxyUser) {
106 | this.proxyUser = proxyUser;
107 | }
108 |
109 | public String getClassName() {
110 | return className;
111 | }
112 |
113 | public void setClassName(String className) {
114 | this.className = className;
115 | }
116 |
117 | public List getArgs() {
118 | return args;
119 | }
120 |
121 | public void setArgs(List args) {
122 | this.args = args;
123 | }
124 |
125 | public List getJars() {
126 | return jars;
127 | }
128 |
129 | public void setJars(List jars) {
130 | this.jars = jars;
131 | }
132 |
133 | public List getPyFiles() {
134 | return pyFiles;
135 | }
136 |
137 | public void setPyFiles(List pyFiles) {
138 | this.pyFiles = pyFiles;
139 | }
140 |
141 | public List getFiles() {
142 | return files;
143 | }
144 |
145 | public void setFiles(List files) {
146 | this.files = files;
147 | }
148 |
149 | public String getDriverMemory() {
150 | return driverMemory;
151 | }
152 |
153 | public void setDriverMemory(String driverMemory) {
154 | this.driverMemory = driverMemory;
155 | }
156 |
157 | public int getDriverCores() {
158 | return driverCores;
159 | }
160 |
161 | public void setDriverCores(int driverCores) {
162 | this.driverCores = driverCores;
163 | }
164 |
165 | public String getExecutorMemory() {
166 | return executorMemory;
167 | }
168 |
169 | public void setExecutorMemory(String executorMemory) {
170 | this.executorMemory = executorMemory;
171 | }
172 |
173 | public int getExecutorCores() {
174 | return executorCores;
175 | }
176 |
177 | public void setExecutorCores(int executorCores) {
178 | this.executorCores = executorCores;
179 | }
180 |
181 | public int getNumExecutors() {
182 | return numExecutors;
183 | }
184 |
185 | public void setNumExecutors(int numExecutors) {
186 | this.numExecutors = numExecutors;
187 | }
188 |
189 | public List getArchives() {
190 | return archives;
191 | }
192 |
193 | public void setArchives(List archives) {
194 | this.archives = archives;
195 | }
196 |
197 | public String getQueue() {
198 | return queue;
199 | }
200 |
201 | public void setQueue(String queue) {
202 | this.queue = queue;
203 | }
204 |
205 | public String getName() {
206 | return name;
207 | }
208 |
209 | public void setName(String name) {
210 | this.name = name;
211 | }
212 |
213 | public Map getConf() {
214 | return conf;
215 | }
216 |
217 | public void setConf(Map conf) {
218 | this.conf = conf;
219 | }
220 | }
221 |
--------------------------------------------------------------------------------
/src/main/java/space/jwqwy/livy/eum/SparkJobState.java:
--------------------------------------------------------------------------------
1 | package space.jwqwy.livy.eum;
2 |
3 | /**
4 | * Livy REST API 封装
5 | *
6 | * @author Vinci
7 | * Create: 2019/02/19 15:09
8 | * Description: SparkJob 的任务状态
9 | */
10 |
11 | public enum SparkJobState {
12 | /**
13 | * Session has not been started
14 | */
15 | NOT_STARTED("not_started"),
16 | /**
17 | * Session is starting
18 | */
19 | STARTING("starting"),
20 | /**
21 | * Session is waiting for input
22 | */
23 | IDLE("idle"),
24 | /**
25 | * Session is executing a statement
26 | */
27 | BUSY("busy"),
28 | /**
29 | * Session is shutting down
30 | */
31 | SHUTTING_DOWN("shutting_down"),
32 | /**
33 | * Session errored out
34 | */
35 | ERROR("error"),
36 | /**
37 | * Session has exited
38 | */
39 | DEAD("dead"),
40 | /**
41 | * Session is successfully stopped
42 | */
43 | SUCCESS("success"),
44 |
45 | /**
46 | * Statement is enqueued but execution hasn't started
47 | */
48 | WAITING("waiting"),
49 |
50 | /**
51 | * Statement is currently running
52 | */
53 | RUNING("running"),
54 |
55 | /**
56 | * Statement has a response ready
57 | */
58 | AVAiLABEL("available"),
59 |
60 | /**
61 | * Statement is being cancelling
62 | */
63 | CANCELLING("canceling"),
64 |
65 | /**
66 | * Statement is cancelled
67 | */
68 | CANCELLED("cancelled");
69 | /**
70 | * 描述
71 | */
72 | private String description;
73 |
74 | /**
75 | * 私有构造,防止被外部调用
76 | *
77 | * @param description 描述
78 | */
79 | SparkJobState(String description) {
80 | this.description = description;
81 | }
82 |
83 | /**
84 | * 定义方法,返回描述,跟常规类的定义没区别
85 | *
86 | * @return 描述
87 | */
88 | public String getDescription() {
89 | return description;
90 | }
91 |
92 | public static SparkJobState fromDescription(String state) {
93 | for (SparkJobState jobStateEnum : SparkJobState.values()) {
94 | if (jobStateEnum.description.equalsIgnoreCase(state)) {
95 | return jobStateEnum;
96 | }
97 | }
98 | return null;
99 | }
100 |
101 | @Override
102 | public String toString() {
103 | return this.description;
104 | }
105 | }
106 |
107 |
--------------------------------------------------------------------------------
/src/main/java/space/jwqwy/livy/service/LivyService.java:
--------------------------------------------------------------------------------
1 | package space.jwqwy.livy.service;
2 |
3 | import space.jwqwy.livy.entiy.SparkJob;
4 | import space.jwqwy.livy.eum.SparkJobState;
5 |
6 | import java.util.List;
7 | import java.util.Map;
8 |
9 | /**
10 | * Livy REST API 封装
11 | *
12 | * @author Vinci
13 | * Create: 2019/02/19 15:01
14 | * Description: 如何通过Livy的RESTful API接口向CDH集群提交作业
15 | */
16 | public interface LivyService {
17 |
18 | /**
19 | * 运行一个 SparkJob 一直等到他运行完成之后才会有返回值
20 | * 期间伴随着日志的输出
21 | *
22 | * @param job SparkJob
23 | * @return 任务是否正确运行结束
24 | */
25 | boolean runSparkJob(SparkJob job);
26 |
27 | /**
28 | * 后台启动一个 SparkJob
29 | *
30 | * @param job SparkJob
31 | * @return SparkJob 的 batch session ID
32 | */
33 | int runSparkJobBackground(SparkJob job);
34 |
35 | /**
36 | * 启动一个 session 运行 SparkJob, 不需要等待是否运行成功
37 | *
38 | * @param job SparkJob
39 | * @return SparkJob 的 batch session ID
40 | */
41 | int startSparkJob(SparkJob job);
42 |
43 | /**
44 | * 查询所有的 活跃的 Spark Job
45 | *
46 | * @return 所有活跃的 Spark Job = batch session
47 | */
48 | Map getActiveSparkJobs();
49 |
50 | /**
51 | * 查询具体的且活跃的 Spark Job 信息
52 | *
53 | * @param sparkJobID SparkJob 的 ID(batch session ID)
54 | * @return Spark Job 信息 ,具体的 batch session 信息
55 | */
56 | Map getSparkJobInfo(int sparkJobID);
57 |
58 | /**
59 | * 查询具体的且活跃的 Spark Job 状态
60 | *
61 | * @param sparkJobID SparkJob 的 ID(batch session ID)
62 | * @return Spark Job 状态 ,具体的 batch session 状态
63 | */
64 | SparkJobState getSparkJobState(int sparkJobID);
65 |
66 | /**
67 | * 查询具体的且活跃的 Spark Job 日志
68 | *
69 | * @param sparkJobID SparkJob 的 ID(batch session ID)
70 | * @return Spark Job 日志 ,具体的 batch session 日志
71 | */
72 | String getSparkJobLog(int sparkJobID);
73 |
74 | /**
75 | * 返回新增加的日志
76 | * 在前端实时展示日志的时候有用
77 | * 每隔几秒请求一下这个接口请求这个
78 | *
79 | * @param sparkJobID SparkJob 的 ID(batch session ID)
80 | * @param oldLog 之前返回的日志
81 | * @return 新生成的日志
82 | */
83 | Map> getSparkJobNewLog(int sparkJobID, Map> oldLog);
84 |
85 | /**
86 | * Kills the Batch job.
87 | *
88 | * @param sparkJobID SparkJob 的 ID(batch session ID)
89 | * @return msg
90 | * {
91 | * "msg": "deleted"
92 | * }
93 | */
94 | Map deleteSparkJob(int sparkJobID);
95 |
96 | }
97 |
--------------------------------------------------------------------------------
/src/main/java/space/jwqwy/livy/service/impl/LivyServiceImpl.java:
--------------------------------------------------------------------------------
1 | package space.jwqwy.livy.service.impl;
2 |
3 | import net.sf.json.JSONArray;
4 | import net.sf.json.JSONObject;
5 | import net.sf.json.JsonConfig;
6 | import net.sf.json.util.PropertyFilter;
7 | import org.apache.log4j.Logger;
8 | import space.jwqwy.livy.common.Constants;
9 | import space.jwqwy.livy.entiy.SparkJob;
10 | import space.jwqwy.livy.eum.SparkJobState;
11 | import space.jwqwy.livy.service.LivyService;
12 | import space.jwqwy.livy.util.HttpUtils;
13 | import space.jwqwy.livy.util.PropertiesUtil;
14 |
15 | import java.io.IOException;
16 | import java.util.*;
17 |
18 | /**
19 | * Livy REST API 封装
20 | *
21 | * @author Vinci
22 | * Create: 2019/02/19 15:12
23 | * Description: Livy Service实现类
24 | */
25 |
26 | public class LivyServiceImpl implements LivyService {
27 |
28 | private static Logger logger = Logger.getLogger(LivyServiceImpl.class);
29 | private static String LIVY_URL = "";
30 |
31 | public LivyServiceImpl() {
32 | try {
33 | Properties properties = PropertiesUtil.getProperties("properties/livy.properties");
34 | LIVY_URL = String.valueOf(properties.get("LIVY_URL"));
35 | } catch (IOException e) {
36 | logger.error("请检查配置文件,找不到 Livy URL");
37 | e.printStackTrace();
38 | }
39 | }
40 |
41 | @Override
42 | public boolean runSparkJob(SparkJob sparkJob) {
43 | int sparkJobID = startSparkJob(sparkJob);
44 |
45 | Map> oldLog = new HashMap<>(0);
46 | while (true) {
47 |
48 | Map> nowLog = getBatchSessionLog(sparkJobID);
49 |
50 | Map> newLog = getNewLog(oldLog, nowLog);
51 |
52 | printSessionLog("info", newLog);
53 |
54 | oldLog = nowLog;
55 |
56 | SparkJobState sparkJobState = getBatchSessionState(sparkJobID);
57 | switch (sparkJobState) {
58 | case SHUTTING_DOWN:
59 | logger.error("\n================================job关闭==================================\n");
60 | return false;
61 | case ERROR:
62 | logger.error("\n================================job错误==================================\n");
63 | return false;
64 | case DEAD:
65 | logger.error("\n================================job死亡==================================\n");
66 | return false;
67 | case SUCCESS:
68 | logger.info("\n================================job执行成功==================================\n");
69 | return true;
70 | default:
71 | }
72 |
73 | try {
74 | // 休眠3s,每隔3s 中执行一次查询
75 | Thread.sleep(3000);
76 | } catch (Exception ex) {
77 | logger.error(ex.getMessage());
78 | }
79 | }
80 | }
81 |
82 | @Override
83 | public int runSparkJobBackground(SparkJob sparkJob) {
84 | return startSparkJob(sparkJob);
85 | }
86 |
87 | @Override
88 | public int startSparkJob(SparkJob sparkJob) {
89 | int sparkJobID = -1;
90 | JSONObject batchSession = createBatch(parse2Json(sparkJob));
91 | String state = batchSession.getString(Constants.LIVY_SESSION_STATE);
92 |
93 | // 如果 session 状态为 不为 dead 或者 error ,则返回 session id
94 | SparkJobState sparkJobState = SparkJobState.fromDescription(state);
95 | if (sparkJobState != SparkJobState.DEAD && sparkJobState != SparkJobState.ERROR) {
96 | sparkJobID = (int) batchSession.get(Constants.LIVY_SESSION_ID);
97 | } else {
98 | logger.error("================ 创建Spark 任务失败=======================\n");
99 | logger.error("=====================失败原因:==========================\n" + batchSession.toString());
100 | }
101 |
102 | return sparkJobID;
103 | }
104 |
105 | @Override
106 | public Map getActiveSparkJobs() {
107 | return getBatchSessions();
108 | }
109 |
110 | @Override
111 | public Map getSparkJobInfo(int sparkJobID) {
112 | return getBatchSession(sparkJobID);
113 | }
114 |
115 | @Override
116 | public SparkJobState getSparkJobState(int sparkJobID) {
117 | return getBatchSessionState(sparkJobID);
118 | }
119 |
120 | @Override
121 | public String getSparkJobLog(int sparkJobID) {
122 | StringBuilder logs = new StringBuilder();
123 |
124 | Map> batchSessionLog = getBatchSessionLog(sparkJobID);
125 | if (batchSessionLog.size() > 0) {
126 | List stdout = batchSessionLog.get("stdout");
127 | List stderr = batchSessionLog.get("stderr");
128 | for (String log : stdout) {
129 | logs.append(log).append("\n");
130 | }
131 | for (String log : stderr) {
132 | logs.append(log).append("\n");
133 | }
134 | }
135 |
136 | return logs.toString();
137 | }
138 |
139 | @Override
140 | public Map> getSparkJobNewLog(int sparkJobID, Map> oldLog) {
141 | Map> nowLog = getBatchSessionLog(sparkJobID);
142 |
143 | return getNewLog(oldLog, nowLog);
144 | }
145 |
146 | @Override
147 | public Map deleteSparkJob(int sparkJobID) {
148 | return deleteBatchSession(sparkJobID);
149 | }
150 |
151 | /**
152 | * 创建一个 Batch Session 执行 SparkJob
153 | *
154 | * @param sparkJobJson sparkJob json 形式
155 | * @return 该 job 的 batch session 信息
156 | */
157 | private JSONObject createBatch(JSONObject sparkJobJson) {
158 | // 将 Map 转为字符串
159 | String sparkJobJsonStr = JSONObject.fromObject(sparkJobJson).toString();
160 | return createBatch(sparkJobJsonStr);
161 | }
162 |
163 | /**
164 | * 创建一个 Batch Session 执行 SparkJob
165 | *
166 | * @param sparkJobJsonStr sparkJob 字符串形式
167 | * @return 该 job 的 batch session 信息
168 | */
169 | private JSONObject createBatch(String sparkJobJsonStr) {
170 | JSONObject resultJson = null;
171 | Map headers = new HashMap<>(4);
172 | headers.put("Accept", "application/json");
173 | headers.put("Content-Type", "application/json");
174 | headers.put("Accept-Charset", "utf-8");
175 |
176 | String result = HttpUtils.postAccess(LIVY_URL + "/batches", headers, sparkJobJsonStr);
177 | if (result != null) {
178 | resultJson = JSONObject.fromObject(result);
179 | } else {
180 | logger.error("\n==============Livy 提交批任务失败==================\n");
181 | }
182 | return resultJson;
183 | }
184 |
185 | private Map getBatchSessions() {
186 | JSONObject resultJson = null;
187 | String result = HttpUtils.getAccess(LIVY_URL + "/batches", null);
188 | if (result != null) {
189 | resultJson = JSONObject.fromObject(result);
190 | } else {
191 | logger.error("\n==============Livy 查询批任务失败==================\n");
192 | }
193 | return resultJson;
194 | }
195 |
196 | private Map getBatchSession(int batchID) {
197 | JSONObject resultJson = null;
198 | String result = HttpUtils.getAccess(LIVY_URL + "/batches/" + batchID, null);
199 | if (result != null) {
200 | resultJson = JSONObject.fromObject(result);
201 | } else {
202 | logger.error("\n==============Livy 查询具体任务失败,任务编号为:\n" + batchID + "\n");
203 | }
204 | return resultJson;
205 | }
206 |
207 | private SparkJobState getBatchSessionState(int batchID) {
208 | SparkJobState sparkJobState = null;
209 | String result = HttpUtils.getAccess(LIVY_URL + "/batches/" + batchID + "/state", null);
210 | if (result != null) {
211 | JSONObject resultJson = JSONObject.fromObject(result);
212 | String state = resultJson.getString("state");
213 | sparkJobState = SparkJobState.fromDescription(state);
214 | } else {
215 | logger.error("\n==============Livy 查询具体任务状态失败,任务编号为:\n" + batchID);
216 | }
217 | return sparkJobState;
218 | }
219 |
220 | /**
221 | * 解析 Session 日志
222 | *
223 | * @param batchID job id
224 | * @return 日志
225 | */
226 | private Map> getBatchSessionLog(int batchID) {
227 | // Session 日志分两个部分,一个是:stdout, 一个是:stderr。且默认各只输出两百行日志
228 | Map> logMap = new HashMap<>(402);
229 | // stdout 标准日志输出字符串链表
230 | List stdoutLog = new ArrayList<>(200);
231 | // stdout 错误日志输出字符串链表
232 | List stderrLog = new ArrayList<>(200);
233 |
234 | String result = HttpUtils.getAccess(LIVY_URL + "/batches/" + batchID + "/log", null);
235 | if (result != null) {
236 | JSONObject responseData = JSONObject.fromObject(result);
237 | // 解析出这202行日志(字符串数组)
238 | JSONArray logJsonArray = responseData.getJSONArray("log");
239 | // 有时候关键字 stdout 不会出现,则stdoutIndex=-1,一般情况下=0
240 | int stdoutIndex = logJsonArray.indexOf("stdout: ");
241 | int stderrIndex = logJsonArray.indexOf("\nstderr: ");
242 | Object[] logs = logJsonArray.toArray();
243 |
244 | for (int i = stdoutIndex + 1; i < stderrIndex; i++) {
245 | stdoutLog.add((String) logs[i]);
246 | }
247 | for (int i = stderrIndex + 1; i < logs.length; i++) {
248 | stderrLog.add((String) logs[i]);
249 | }
250 | logMap.put("stdout", stdoutLog);
251 | logMap.put("stderr", stderrLog);
252 |
253 | } else {
254 | logger.error("\n==============Livy 查询具体任务日志失败,任务编号为:\n" + batchID + "\n");
255 | }
256 |
257 | return logMap;
258 | }
259 |
260 | private Map deleteBatchSession(int batchID) {
261 | JSONObject resultJson = null;
262 | String result = HttpUtils.deleteAccess(LIVY_URL + "/batches/" + batchID, null);
263 | if (result != null) {
264 | resultJson = JSONObject.fromObject(result);
265 | } else {
266 | logger.error("\n==============Livy 删除具体任务失败,任务编号为:\n" + batchID + "\n");
267 | }
268 | return resultJson;
269 | }
270 |
271 | /**
272 | * 每次请求 Batch Session 的 log,都是当前所有的 log,所以重复请求,会有重复的 log
273 | * 于是这个方法就是把重复 log 去掉,只显示新增加 log
274 | *
275 | * @param oldLog 上一次请求的 log
276 | * @param nowLog 这一次请求的 log
277 | * @return 新增加的 log
278 | */
279 | private Map> getNewLog(Map> oldLog, Map> nowLog) {
280 | Map> newLog = new HashMap<>(400);
281 | // stdout 标准日志输出字符串链表
282 | List newStdoutLog = new ArrayList<>(200);
283 | // stdout 错误日志输出字符串链表
284 | List newStderrLog = new ArrayList<>(200);
285 |
286 | if ((oldLog.size() == 0)) {
287 | return nowLog;
288 | }
289 |
290 | List oldStdoutLog = oldLog.get("stdout");
291 | List oldStderrLog = oldLog.get("stderr");
292 |
293 | if ((oldStdoutLog.size() == 0) && (oldStderrLog.size() == 0)) {
294 | return nowLog;
295 | }
296 |
297 | List nowStdoutLog = nowLog.get("stdout");
298 | List nowStderrLog = nowLog.get("stderr");
299 |
300 | getNewLog(newStdoutLog, oldStdoutLog, nowStdoutLog);
301 | newLog.put("stdout", newStdoutLog);
302 | getNewLog(newStderrLog, oldStderrLog, nowStderrLog);
303 | newLog.put("stderr", newStderrLog);
304 | return newLog;
305 | }
306 |
307 | /**
308 | * 去重拼接日志
309 | *
310 | * @param newStdLog 新的日志
311 | * @param oldStdLog 旧的日志
312 | * @param nowStdLog 现在的日志
313 | */
314 | private void getNewLog(List newStdLog, List oldStdLog, List nowStdLog) {
315 | if (oldStdLog.size() > 0 && nowStdLog.size() > 0) {
316 | // 定位最后一行
317 | String oldStdLastLog = oldStdLog.get(oldStdLog.size() - 1);
318 | int newStdLogIndex = nowStdLog.lastIndexOf(oldStdLastLog);
319 | // 从最新行开始往后复制
320 | for (int i = newStdLogIndex + 1; i < nowStdLog.size(); i++) {
321 | newStdLog.add(nowStdLog.get(i));
322 | }
323 | }
324 | }
325 |
326 | /**
327 | * 打印 Session 日志
328 | *
329 | * @param logType 日志类型
330 | * @param logs 日志
331 | */
332 | private void printSessionLog(String logType, Map> logs) {
333 |
334 | // 如果日志为空则不打印
335 | if ((logs.size() == 0)) {
336 | return;
337 | }
338 | List stdoutLog = logs.get("stdout");
339 | List stderrLog = logs.get("stderr");
340 |
341 | if ((stdoutLog.size() == 0) && (stderrLog.size() == 0)) {
342 | return;
343 | }
344 |
345 | StringBuilder stdout = new StringBuilder();
346 |
347 | for (String log : logs.get(Constants.LIVY_SESSION_LOG_STDOUT)) {
348 | stdout.append(log).append("\n");
349 | }
350 | StringBuilder stderr = new StringBuilder();
351 | for (String log : logs.get(Constants.LIVY_SESSION_LOG_STDERR)) {
352 | stderr.append(log).append("\n");
353 | }
354 |
355 | switch (logType) {
356 | case "info":
357 | logger.info("\nstdout:\n" + stdout + "\nstderr:\n" + stderr);
358 | break;
359 | case "error":
360 | logger.error("\nstdout:\n" + stdout + "\nstderr:\n" + stderr);
361 | break;
362 | case "debug":
363 | logger.debug("\nstdout:\n" + stdout + "\nstderr:\n" + stderr);
364 | break;
365 | default:
366 | logger.info("\nstdout:\n" + stdout + "\nstderr:\n" + stderr);
367 | break;
368 | }
369 |
370 | }
371 |
372 | /**
373 | * 过滤器,把默认值的参数剔除掉
374 | *
375 | * @param job Livy任务
376 | * @return jobJson
377 | */
378 | private JSONObject parse2Json(SparkJob job) {
379 | // 过滤器,把默认值的参数剔除掉
380 | PropertyFilter filter = (source, name, value) -> {
381 | // 如果为数字则判断是否为0(默认值),如果为0,则为 true
382 | if (value instanceof Number && (int) value == 0) {
383 | return true;
384 | } else {
385 | return null == value;
386 | }
387 | };
388 | JsonConfig jsonConfig = new JsonConfig();
389 | jsonConfig.setJsonPropertyFilter(filter);
390 | return JSONObject.fromObject(job, jsonConfig);
391 | }
392 | }
393 |
--------------------------------------------------------------------------------
/src/main/java/space/jwqwy/livy/util/HttpUtils.java:
--------------------------------------------------------------------------------
1 | package space.jwqwy.livy.util;
2 |
3 | import org.apache.http.HttpEntity;
4 | import org.apache.http.HttpResponse;
5 | import org.apache.http.client.methods.HttpDelete;
6 | import org.apache.http.client.methods.HttpGet;
7 | import org.apache.http.client.methods.HttpPost;
8 | import org.apache.http.entity.StringEntity;
9 | import org.apache.http.impl.client.CloseableHttpClient;
10 | import org.apache.http.impl.client.HttpClients;
11 | import org.apache.http.util.EntityUtils;
12 |
13 | import java.io.IOException;
14 | import java.util.Map;
15 |
16 | /**
17 | * Livy REST API 封装
18 | *
19 | * @author Vinci
20 | * Create: 2019/02/19 15:35
21 | * Description: Http 报文
22 | */
23 |
24 | public class HttpUtils {
25 | /**
26 | * HttpGET请求
27 | *
28 | * @param url 链接
29 | * @param headers 报文头
30 | * @return 结果
31 | */
32 | public static String getAccess(String url, Map headers) {
33 | String result = null;
34 | CloseableHttpClient httpClient = HttpClients.createDefault();
35 | HttpGet httpGet = new HttpGet(url);
36 | if (headers != null && headers.size() > 0) {
37 | headers.forEach(httpGet::addHeader);
38 | }
39 | try {
40 | HttpResponse response = httpClient.execute(httpGet);
41 | HttpEntity entity = response.getEntity();
42 | result = EntityUtils.toString(entity);
43 | } catch (IOException e) {
44 | e.printStackTrace();
45 | }
46 |
47 | return result;
48 | }
49 |
50 | /**
51 | * HttpDelete请求
52 | *
53 | * @param url 链接
54 | * @param headers 报文头
55 | * @return 结果
56 | */
57 | public static String deleteAccess(String url, Map headers) {
58 | String result = null;
59 | CloseableHttpClient httpClient = HttpClients.createDefault();
60 | HttpDelete httpDelete = new HttpDelete(url);
61 | if (headers != null && headers.size() > 0) {
62 | headers.forEach(httpDelete::addHeader);
63 | }
64 | try {
65 | HttpResponse response = httpClient.execute(httpDelete);
66 | HttpEntity entity = response.getEntity();
67 | result = EntityUtils.toString(entity);
68 | } catch (IOException e) {
69 | e.printStackTrace();
70 | }
71 |
72 | return result;
73 | }
74 |
75 | /**
76 | * HttpPost请求
77 | *
78 | * @param url url
79 | * @param headers 请求报文头
80 | * @param data 数据
81 | * @return 结果
82 | */
83 | public static String postAccess(String url, Map headers, String data) {
84 | String result = null;
85 |
86 | CloseableHttpClient httpClient = HttpClients.createDefault();
87 |
88 | HttpPost post = new HttpPost(url);
89 |
90 | if (headers != null && headers.size() > 0) {
91 | headers.forEach(post::addHeader);
92 | }
93 |
94 | try {
95 | StringEntity entity = new StringEntity(data);
96 | entity.setContentEncoding("UTF-8");
97 | entity.setContentType("application/json");
98 | post.setEntity(entity);
99 |
100 | HttpResponse response = httpClient.execute(post);
101 | HttpEntity resultEntity = response.getEntity();
102 | result = EntityUtils.toString(resultEntity);
103 |
104 | return result;
105 | } catch (Exception e) {
106 | e.printStackTrace();
107 | }
108 | return result;
109 | }
110 | }
111 |
--------------------------------------------------------------------------------
/src/main/java/space/jwqwy/livy/util/PropertiesUtil.java:
--------------------------------------------------------------------------------
1 | package space.jwqwy.livy.util;
2 |
3 | /**
4 | * Livy REST API 封装
5 | *
6 | * @author Vinci
7 | * Create: 2019/02/19 15:41
8 | * Description: TODO
9 | */
10 |
11 | import org.apache.ibatis.io.Resources;
12 |
13 | import java.io.IOException;
14 | import java.io.InputStream;
15 | import java.util.Properties;
16 | public final class PropertiesUtil {
17 | private PropertiesUtil() {
18 | }
19 |
20 | public static Properties getProperties(String path) throws IOException {
21 | Properties properties = new Properties();
22 | try {
23 | InputStream in = Resources.getResourceAsStream(path);
24 | properties.load(in);
25 | } catch (IOException e) {
26 | e.printStackTrace();
27 | }
28 | return properties;
29 | }
30 | }
--------------------------------------------------------------------------------
/src/main/resources/properties/livy.properties:
--------------------------------------------------------------------------------
1 | LIVY_URL = http://192.168.1.170:8998
--------------------------------------------------------------------------------
/src/main/resources/properties/log4j.properties:
--------------------------------------------------------------------------------
1 | ### set log levels ###
2 | log4j.rootLogger = INFO ,console, logfile,errorfile
3 |
4 |
5 | ### INFO ###
6 | log4j.appender.info = org.apache.log4j.ConsoleAppender
7 | #log4j.appender.info.Target = System.out
8 | log4j.appender.info.layout = org.apache.log4j.PatternLayout
9 | log4j.appender.info.layout.ConversionPattern = %-d{yyyy-MM-dd HH\:mm\:ss} [%p]-[%c] %m%n
10 |
11 | ### console ###
12 | log4j.appender.console = org.apache.log4j.ConsoleAppender
13 | log4j.appender.console.layout = org.apache.log4j.PatternLayout
14 | log4j.appender.console.layout.ConversionPattern = %-d{yyyy-MM-dd HH\:mm\:ss} [%p]-[%c] %m%n
15 |
16 | ### log file ###
17 | log4j.appender.logfile = org.apache.log4j.DailyRollingFileAppender
18 | log4j.appender.logfile.File = ./logs/spark-job.log
19 | log4j.appender.logfile.Append = true
20 | log4j.appender.logfile.Threshold = INFO
21 | log4j.appender.logfile.layout = org.apache.log4j.PatternLayout
22 | log4j.appender.logfile.layout.ConversionPattern = %-d{yyyy-MM-dd HH\:mm\:ss} [%p]-[%c] %m%n
23 | ### log file ###
24 | #log4j.appender.debug = org.apache.log4j.DailyRollingFileAppender
25 | #log4j.appender.debug.Append = true
26 | #log4j.appender.debug.Threshold = INFO,debug,console,error
27 | #log4j.appender.debug.layout = org.apache.log4j.PatternLayout
28 | #log4j.appender.debug.layout.ConversionPattern = %-d{yyyy-MM-dd HH\:mm\:ss} [%p]-[%c] %m%n
29 |
30 | ### exception ###
31 | log4j.appender.errorfile = org.apache.log4j.DailyRollingFileAppender
32 | log4j.appender.errorfile.File = ./logs/spark-job_error.log
33 | log4j.appender.errorfile.Append = true
34 | log4j.appender.errorfile.Threshold = ERROR
35 | log4j.appender.errorfile.layout = org.apache.log4j.PatternLayout
36 | log4j.appender.errorfile.layout.ConversionPattern = %-d{yyyy-MM-dd HH\:mm\:ss} [%p]-[%c] %m%n
37 |
38 |
39 | ###\u9700\u8981\u58F0\u660E\uFF0C\u7136\u540E\u4E0B\u65B9\u624D\u53EF\u4EE5\u4F7Fdruid sql\u8F93\u51FA\uFF0C\u5426\u5219\u4F1A\u629B\u51FAlog4j.error.key not found
40 | #log4j.appender.stdout=org.apache.log4j.ConsoleAppender
41 | #log4j.appender.stdout.Target=System.out
42 | #log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
43 | #log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %l %c%n%p: %m%n
44 |
45 | #3 druid
46 | log4j.logger.druid.sql=info
47 | log4j.logger.druid.sql.DataSource=info
48 | log4j.logger.druid.sql.Connection=info
49 | log4j.logger.druid.sql.Statement=info
50 | log4j.logger.druid.sql.ResultSet=info
51 |
52 | #4 mybatis \u663E\u793ASQL\u8BED\u53E5\u90E8\u5206
53 | log4j.logger.java.sql.Connection=info
54 | log4j.logger.org.mybatis=info
55 | #log4j.logger.cn.tibet.cas.dao=DEBUG
56 | #log4j.logger.org.mybatis.common.jdbc.SimpleDataSource=DEBUG#
57 | #log4j.logger.org.mybatis.common.jdbc.ScriptRunner=DEBUG#
58 | #log4j.logger.org.mybatis.sqlmap.engine.impl.SqlMapClientDelegate=DEBUG#
59 | #log4j.logger.java.sql.Connection=DEBUG
60 | log4j.logger.java.sql=info
61 | log4j.logger.java.sql.Statement=info
62 | log4j.logger.java.sql.ResultSet=info
63 | log4j.logger.java.sql.PreparedStatement=info
--------------------------------------------------------------------------------
/src/test/java/LivyServiceTest.java:
--------------------------------------------------------------------------------
1 | import org.junit.jupiter.api.Test;
2 | import space.jwqwy.livy.entiy.SparkJob;
3 | import space.jwqwy.livy.eum.SparkJobState;
4 | import space.jwqwy.livy.service.LivyService;
5 | import space.jwqwy.livy.service.impl.LivyServiceImpl;
6 |
7 | import java.util.ArrayList;
8 | import java.util.List;
9 | import java.util.Map;
10 |
11 | /**
12 | * Livy REST API 封装
13 | *
14 | * @author Vinci
15 | * Create: 2019/02/19 15:47
16 | * Description: Livy Service的测试类
17 | */
18 |
19 | class LivyServiceTest {
20 |
21 | @Test
22 | void submitJob() {
23 |
24 | LivyService livyService = new LivyServiceImpl();
25 |
26 | SparkJob job = new SparkJob();
27 |
28 | // job.setFile("hdfs://192.168.1.170:8020/jar/spark-examples_2.11-2.3.0.cloudera4.jar");
29 | // job.setClassName("org.apache.spark.examples.SparkPi");
30 | // job.setName("SparkPi");
31 |
32 | job.setName("FP_Growth");
33 | job.setFile("hdfs://192.168.1.170:8020/taskJars/FP_Growth_Analysis-23.017-SNAPSHOT.jar");
34 | // job.setFile("hdfs://192.168.1.170:8020/taskJars/sparkAnalysis-1.0-SNAPSHOT.jar");
35 | job.setClassName("com.webstudio.sparkAnalaysis.FP_Growth");
36 | job.setExecutorCores(3);
37 |
38 | List jars = new ArrayList<>(1);
39 | jars.add("hdfs://192.168.1.170:8020/lib/hive-hbase-handler-1.1.0-cdh5.14.2.jar");
40 | job.setJars(jars);
41 |
42 | int sparkJobID = livyService.startSparkJob(job);
43 |
44 | if (sparkJobID > 0) {
45 | System.out.println("\n创建任务,任务ID为:\n" + sparkJobID);
46 |
47 | Map activeSparkJobs = livyService.getActiveSparkJobs();
48 | System.out.println("\n查询当前所有任务:\n" + activeSparkJobs.toString());
49 |
50 | SparkJobState state = livyService.getSparkJobState(sparkJobID);
51 | System.out.println("\n查询任务ID为" + sparkJobID + "的任务状态:\n" + state);
52 |
53 | Map info = livyService.getSparkJobInfo(sparkJobID);
54 | System.out.println("\n查询任务ID为" + sparkJobID + "的任务详情:\n" + info.toString());
55 |
56 | String log = livyService.getSparkJobLog(sparkJobID);
57 | System.out.println("\n查询任务ID为" + sparkJobID + "的任务日志:\n" + log);
58 |
59 | // Map del = livyService.deleteSparkJob(sparkJobID);
60 | // System.out.println("删除任务ID为" + sparkJobID + "\n" + del.toString());
61 | }
62 | // 执行任务,一直到任务结束
63 | // System.out.println(runSparkJob(job));
64 | }
65 |
66 | /**
67 | * Livy查询 Spark 任务失败停用
68 | */
69 | @Test
70 | void failJobTest() {
71 | LivyService livyService = new LivyServiceImpl();
72 | SparkJob job = new SparkJob();
73 | job.setExecutorCores(3);
74 | List jars = new ArrayList<>(1);
75 | jars.add("hdfs://192.168.1.170:8020/lib/hive-hbase-handler-1.1.0-cdh5.14.2.jar");
76 | job.setJars(jars);
77 | job.setName("FP_Growth");
78 | job.setFile("hdfs://192.168.1.170:8020/taskJars/sparkAnalysis-1.0-SNAPSHOT.jar");
79 | job.setClassName("com.webstudio.sparkAnalaysis.FP_Growth");
80 |
81 | int sparkJobID = livyService.runSparkJobBackground(job);
82 |
83 | while (true) {
84 | try {
85 | // 休眠3s
86 | Thread.sleep(4000);
87 | } catch (Exception ex) {
88 | ex.getMessage();
89 | }
90 |
91 | SparkJobState sparkJobState = livyService.getSparkJobState(sparkJobID);
92 | String log = livyService.getSparkJobLog(sparkJobID);
93 |
94 | System.out.println(log);
95 |
96 | switch (sparkJobState) {
97 | case SHUTTING_DOWN:
98 | livyService.deleteSparkJob(sparkJobID);
99 | return;
100 | case ERROR:
101 | livyService.deleteSparkJob(sparkJobID);
102 | return;
103 | case DEAD:
104 | livyService.deleteSparkJob(sparkJobID);
105 | return;
106 | case SUCCESS:
107 | return;
108 | default:
109 | }
110 |
111 | }
112 | }
113 |
114 | @Test
115 | void sparkAnalysisTest() {
116 | LivyService livyService = new LivyServiceImpl();
117 | SparkJob job = new SparkJob();
118 | job.setName("FP_Growth");
119 | job.setFile("hdfs://192.168.1.170:8020/testJars/SparkAnalysis-23.018-SNAPSHOT.jar");
120 | job.setClassName("com.webstudio.sparkAnalaysis.FP_Growth");
121 | job.setExecutorCores(3);
122 |
123 | List jars = new ArrayList<>(1);
124 | jars.add("hdfs://192.168.1.170:8020/lib/hive-hbase-handler-1.1.0-cdh5.14.2.jar");
125 | job.setJars(jars);
126 |
127 | livyService.runSparkJob(job);
128 | }
129 |
130 | }
131 |
--------------------------------------------------------------------------------