├── MachineLearningPlatformClient
├── WebContent
│ ├── META-INF
│ │ └── MANIFEST.MF
│ ├── WEB-INF
│ │ └── web.xml
│ ├── js
│ │ ├── lib
│ │ │ ├── jquery-3.2.1.min.js
│ │ │ └── jquery.json.min.js
│ │ └── test.js
│ └── test.html
├── pom.xml
├── src
│ └── main
│ │ ├── java
│ │ └── com
│ │ │ └── hhu
│ │ │ └── machinelearningplatformclient
│ │ │ ├── algorithm
│ │ │ └── ParameterValueType.java
│ │ │ ├── common
│ │ │ ├── ByteObjectUtil.java
│ │ │ ├── HBaseUtil.java
│ │ │ ├── HDFSUtils.java
│ │ │ ├── RedisUtils.java
│ │ │ └── ResourcePath.java
│ │ │ ├── config
│ │ │ └── SystemConfig.java
│ │ │ ├── controller
│ │ │ └── TaskController.java
│ │ │ ├── data
│ │ │ ├── DataFile.java
│ │ │ ├── DataFileType.java
│ │ │ └── FieldInfo.java
│ │ │ ├── entity
│ │ │ ├── MLAlgorithm.java
│ │ │ ├── Parameter.java
│ │ │ ├── Response.java
│ │ │ ├── ResponseCode.java
│ │ │ └── TaskInit.java
│ │ │ ├── exception
│ │ │ └── ConfigInitException.java
│ │ │ ├── model
│ │ │ └── AlgorithmModel.java
│ │ │ └── task
│ │ │ ├── SparkTaskAlgorithm.java
│ │ │ ├── SparkTaskExecutor.java
│ │ │ ├── TaskExecution.java
│ │ │ ├── TaskInfo.java
│ │ │ ├── TaskManager.java
│ │ │ ├── TaskState.java
│ │ │ └── TaskType.java
│ │ └── resources
│ │ ├── hdfs.properties
│ │ ├── kubernetes.properties
│ │ ├── log4js.properties
│ │ ├── redis.properties
│ │ └── springmvc.xml
└── target
│ └── classes
│ ├── META-INF
│ ├── MANIFEST.MF
│ └── maven
│ │ └── MachineLearningPlatform
│ │ └── MachineLearningPlatformClient
│ │ ├── pom.properties
│ │ └── pom.xml
│ ├── com
│ └── hhu
│ │ └── machinelearningplatformclient
│ │ ├── algorithm
│ │ └── ParameterValueType.class
│ │ ├── common
│ │ ├── ByteObjectUtil.class
│ │ ├── HBaseUtil.class
│ │ ├── HDFSUtils.class
│ │ ├── RedisUtils$1.class
│ │ ├── RedisUtils$2.class
│ │ ├── RedisUtils$3.class
│ │ ├── RedisUtils.class
│ │ └── ResourcePath.class
│ │ ├── config
│ │ └── SystemConfig.class
│ │ ├── controller
│ │ └── TaskController.class
│ │ ├── data
│ │ ├── DataFile.class
│ │ ├── DataFileType.class
│ │ └── FieldInfo.class
│ │ ├── entity
│ │ ├── MLAlgorithm.class
│ │ ├── Parameter.class
│ │ ├── Response.class
│ │ ├── ResponseCode.class
│ │ └── TaskInit.class
│ │ ├── exception
│ │ └── ConfigInitException.class
│ │ ├── model
│ │ └── AlgorithmModel.class
│ │ └── task
│ │ ├── SparkTaskAlgorithm.class
│ │ ├── SparkTaskExecutor.class
│ │ ├── TaskExecution.class
│ │ ├── TaskInfo.class
│ │ ├── TaskManager.class
│ │ ├── TaskState.class
│ │ └── TaskType.class
│ ├── hdfs.properties
│ ├── kubernetes.properties
│ ├── log4js.properties
│ ├── redis.properties
│ └── springmvc.xml
├── MachineLearningPlatformServer
├── pom.xml
├── src
│ ├── main
│ │ ├── MLAlgorithmLoaderTest.java
│ │ ├── java
│ │ │ └── com
│ │ │ │ └── hhu
│ │ │ │ └── machinelearningplatformserver
│ │ │ │ ├── algorithm
│ │ │ │ ├── ComponentType.java
│ │ │ │ ├── MLAlgorithmDesc.java
│ │ │ │ ├── MLAlgorithmLoader.java
│ │ │ │ ├── ParameterDesc.java
│ │ │ │ ├── ParameterValueType.java
│ │ │ │ └── UsageType.java
│ │ │ │ ├── common
│ │ │ │ ├── ByteObjectUtil.java
│ │ │ │ ├── ConfigUtils.java
│ │ │ │ ├── HBaseUtil.java
│ │ │ │ ├── HDFSUtils.java
│ │ │ │ ├── JRedisPoolConfig.java
│ │ │ │ ├── JedisUtils.java
│ │ │ │ ├── RandomUtil.java
│ │ │ │ └── ResourcePath.java
│ │ │ │ ├── data
│ │ │ │ ├── DataFile.java
│ │ │ │ ├── DataFileMapper.java
│ │ │ │ ├── DataFileType.java
│ │ │ │ ├── FieldInfo.java
│ │ │ │ ├── LineParse.java
│ │ │ │ ├── PersistDataset.java
│ │ │ │ └── SparkDataFileConverter.java
│ │ │ │ ├── exception
│ │ │ │ ├── CantConverException.java
│ │ │ │ └── ConfigInitException.java
│ │ │ │ ├── model
│ │ │ │ └── AlgorithmModel.java
│ │ │ │ ├── proxy
│ │ │ │ ├── EstimatorProxy.java
│ │ │ │ ├── MLAlgorithmProxy.java
│ │ │ │ ├── ModelProxy.java
│ │ │ │ └── TransformerProxy.java
│ │ │ │ ├── submit
│ │ │ │ ├── LoadTaskInfo.java
│ │ │ │ └── Submiter.java
│ │ │ │ └── task
│ │ │ │ ├── SparkTaskAlgorithm.java
│ │ │ │ ├── SparkTaskInfo.java
│ │ │ │ ├── TaskInfo.java
│ │ │ │ ├── TaskState.java
│ │ │ │ └── TaskType.java
│ │ └── resources
│ │ │ ├── hdfs.properties
│ │ │ └── redis.properties
│ └── test
│ │ ├── java
│ │ ├── Test.java
│ │ └── Test1.java
│ │ └── resources
│ │ ├── datafile.csv
│ │ └── test.json
└── target
│ ├── classes
│ ├── META-INF
│ │ ├── MANIFEST.MF
│ │ └── maven
│ │ │ └── MachineLearningPlatform
│ │ │ └── MachineLearningPlatformServer
│ │ │ ├── pom.properties
│ │ │ └── pom.xml
│ ├── com
│ │ └── hhu
│ │ │ └── machinelearningplatformserver
│ │ │ ├── algorithm
│ │ │ ├── ComponentType.class
│ │ │ ├── MLAlgorithmDesc.class
│ │ │ ├── MLAlgorithmLoader.class
│ │ │ ├── ParameterDesc.class
│ │ │ ├── ParameterValueType.class
│ │ │ └── UsageType.class
│ │ │ ├── common
│ │ │ ├── ByteObjectUtil.class
│ │ │ ├── ConfigUtils.class
│ │ │ ├── HBaseUtil.class
│ │ │ ├── HDFSUtils.class
│ │ │ ├── JRedisPoolConfig.class
│ │ │ ├── JedisUtils.class
│ │ │ ├── RandomUtil.class
│ │ │ └── ResourcePath.class
│ │ │ ├── data
│ │ │ ├── DataFile.class
│ │ │ ├── DataFileMapper.class
│ │ │ ├── DataFileType.class
│ │ │ ├── FieldInfo.class
│ │ │ ├── LineParse.class
│ │ │ ├── PersistDataset.class
│ │ │ └── SparkDataFileConverter.class
│ │ │ ├── exception
│ │ │ ├── CantConverException.class
│ │ │ └── ConfigInitException.class
│ │ │ ├── model
│ │ │ └── AlgorithmModel.class
│ │ │ ├── proxy
│ │ │ ├── EstimatorProxy.class
│ │ │ ├── MLAlgorithmProxy.class
│ │ │ ├── ModelProxy.class
│ │ │ └── TransformerProxy.class
│ │ │ ├── submit
│ │ │ ├── LoadTaskInfo.class
│ │ │ └── Submiter.class
│ │ │ └── task
│ │ │ ├── SparkTaskAlgorithm.class
│ │ │ ├── SparkTaskInfo.class
│ │ │ ├── TaskInfo.class
│ │ │ ├── TaskState.class
│ │ │ └── TaskType.class
│ ├── hdfs.properties
│ └── redis.properties
│ └── test-classes
│ ├── Test.class
│ ├── Test1.class
│ ├── datafile.csv
│ └── test.json
└── README.md
/MachineLearningPlatformClient/WebContent/META-INF/MANIFEST.MF:
--------------------------------------------------------------------------------
1 | Manifest-Version: 1.0
2 | Class-Path:
3 |
4 |
--------------------------------------------------------------------------------
/MachineLearningPlatformClient/WebContent/WEB-INF/web.xml:
--------------------------------------------------------------------------------
1 |
2 |
6 |
7 | MachineLearningPlatformClient
8 |
9 |
10 | CharacterEncodingFilter
11 | org.springframework.web.filter.CharacterEncodingFilter
12 |
13 | encoding
14 | UTF-8
15 |
16 |
17 | forceEncoding
18 | false
19 |
20 |
21 |
22 | CharacterEncodingFilter
23 | /*
24 |
25 |
26 | log4jConfigLocation
27 | /WEB-INF/classes/log4js.properties
28 |
29 |
30 | log4jRefreshInterval
31 | 60000
32 |
33 |
34 | org.springframework.web.util.Log4jConfigListener
35 |
36 |
37 |
38 | dispatcherServlet
39 | org.springframework.web.servlet.DispatcherServlet
40 |
41 | contextConfigLocation
42 | classpath:springmvc.xml
43 |
44 | 1
45 |
46 |
47 | dispatcherServlet
48 | /
49 |
50 |
51 | default
52 | *.html
53 |
54 |
55 | default
56 | *.jpg
57 |
58 |
59 | default
60 | *.js
61 |
62 |
63 | default
64 | *.css
65 |
66 |
67 | login.html
68 |
69 |
--------------------------------------------------------------------------------
/MachineLearningPlatformClient/WebContent/js/lib/jquery.json.min.js:
--------------------------------------------------------------------------------
1 | /*! jQuery JSON plugin v2.6.0 | github.com/Krinkle/jquery-json */
2 | !function(a){"function"==typeof define&&define.amd?define(["jquery"],a):a("object"==typeof exports?require("jquery"):jQuery)}(function($){"use strict";var escape=/["\\\x00-\x1f\x7f-\x9f]/g,meta={"\b":"\\b","\t":"\\t","\n":"\\n","\f":"\\f","\r":"\\r",'"':'\\"',"\\":"\\\\"},hasOwn=Object.prototype.hasOwnProperty;$.toJSON="object"==typeof JSON&&JSON.stringify?JSON.stringify:function(a){if(null===a)return"null";var b,c,d,e,f=$.type(a);if("undefined"!==f){if("number"===f||"boolean"===f)return String(a);if("string"===f)return $.quoteString(a);if("function"==typeof a.toJSON)return $.toJSON(a.toJSON());if("date"===f){var g=a.getUTCMonth()+1,h=a.getUTCDate(),i=a.getUTCFullYear(),j=a.getUTCHours(),k=a.getUTCMinutes(),l=a.getUTCSeconds(),m=a.getUTCMilliseconds();return g<10&&(g="0"+g),h<10&&(h="0"+h),j<10&&(j="0"+j),k<10&&(k="0"+k),l<10&&(l="0"+l),m<100&&(m="0"+m),m<10&&(m="0"+m),'"'+i+"-"+g+"-"+h+"T"+j+":"+k+":"+l+"."+m+'Z"'}if(b=[],$.isArray(a)){for(c=0;c
2 |
3 |
4 |
5 | 五子棋游戏
6 |
7 |
8 |
9 |
10 |
11 |
17 |
18 |
--------------------------------------------------------------------------------
/MachineLearningPlatformClient/pom.xml:
--------------------------------------------------------------------------------
1 |
3 | 4.0.0
4 |
5 | MachineLearningPlatform
6 | MachineLearningPlatformClient
7 | 0.0.1-SNAPSHOT
8 | jar
9 |
10 | MachineLearningPlatformClient
11 | http://maven.apache.org
12 |
13 |
14 | UTF-8
15 |
16 |
17 |
18 |
19 | junit
20 | junit
21 | 3.8.1
22 | test
23 |
24 |
25 | javax.servlet
26 | javax.servlet-api
27 | 3.1.0
28 | provided
29 |
30 |
31 | org.apache.hadoop
32 | hadoop-common
33 | 2.7.3
34 |
35 |
36 | org.apache.hadoop
37 | hadoop-hdfs
38 | 2.7.3
39 |
40 |
41 | org.apache.hadoop
42 | hadoop-client
43 | 2.7.3
44 |
45 |
46 | javax.servlet
47 | javax.servlet-api
48 | 3.1.0
49 | provided
50 |
51 |
52 | mysql
53 | mysql-connector-java
54 | 6.0.5
55 |
56 |
57 | com.mchange
58 | c3p0
59 | 0.9.5.2
60 |
61 |
62 | org.springframework
63 | spring-core
64 | 4.1.6.RELEASE
65 |
66 |
67 | org.springframework
68 | spring-aop
69 | 4.1.6.RELEASE
70 |
71 |
72 | org.springframework
73 | spring-beans
74 | 4.1.6.RELEASE
75 |
76 |
77 | org.springframework
78 | spring-context
79 | 4.1.6.RELEASE
80 |
81 |
82 | org.springframework
83 | spring-context-support
84 | 4.1.6.RELEASE
85 |
86 |
87 | com.thoughtworks.xstream
88 | xstream
89 | 1.4.8
90 |
91 |
92 | commons-collections
93 | commons-collections
94 | 3.2.1
95 |
96 |
97 | commons-lang
98 | commons-lang
99 | 2.6
100 |
101 |
102 | commons-logging
103 | commons-logging
104 | 1.1.3
105 |
106 |
107 | net.sf.ezmorph
108 | ezmorph
109 | 1.0.6
110 |
111 |
112 | org.springframework
113 | spring-webmvc
114 | 4.1.6.RELEASE
115 |
116 |
117 | org.springframework
118 | spring-orm
119 | 4.1.6.RELEASE
120 |
121 |
122 | org.springframework
123 | spring-tx
124 | 4.1.6.RELEASE
125 |
126 |
127 | org.slf4j
128 | slf4j-api
129 | 1.7.7
130 |
131 |
132 | org.slf4j
133 | slf4j-log4j12
134 | 1.7.7
135 |
136 |
137 | org.codehaus.jackson
138 | jackson-mapper-asl
139 | 1.9.2
140 |
141 |
142 | org.codehaus.jackson
143 | jackson-core-asl
144 | 1.9.2
145 |
146 |
147 | com.fasterxml.jackson.core
148 | jackson-annotations
149 | 2.4.1
150 |
151 |
152 | com.fasterxml.jackson.core
153 | jackson-core
154 | 2.4.1
155 |
156 |
157 | com.fasterxml.jackson.core
158 | jackson-databind
159 | 2.4.1
160 |
161 |
162 | org.freemarker
163 | freemarker
164 | 2.3.20
165 |
166 |
167 | org.apache.httpcomponents
168 | httpclient
169 | 4.5.3
170 |
171 |
172 | org.mybatis
173 | mybatis
174 | 3.2.5
175 |
176 |
177 | org.mybatis
178 | mybatis-spring
179 | 1.2.2
180 |
181 |
182 | dom4j
183 | dom4j
184 | 1.6.1
185 |
186 |
187 | commons-beanutils
188 | commons-beanutils
189 | 1.9.2
190 |
191 |
192 | commons-fileupload
193 | commons-fileupload
194 | 1.3.2
195 |
196 |
197 | jdk.tools
198 | jdk.tools
199 | 1.8
200 | system
201 | ${JAVA_HOME}/lib/tools.jar
202 |
203 |
204 | org.springframework.data
205 | spring-data-redis
206 | 1.6.2.RELEASE
207 |
208 |
209 | redis.clients
210 | jedis
211 | 2.9.0
212 |
213 |
214 | org.apache.hbase
215 | hbase-client
216 | 1.2.5
217 |
218 |
219 | org.apache.hbase
220 | hbase-common
221 | 1.2.5
222 |
223 |
224 | org.apache.hbase
225 | hbase-protocol
226 | 1.2.5
227 |
228 |
229 | org.apache.hbase
230 | hbase-server
231 | 1.2.5
232 |
233 |
234 |
235 |
--------------------------------------------------------------------------------
/MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/algorithm/ParameterValueType.java:
--------------------------------------------------------------------------------
1 | package com.hhu.machinelearningplatformclient.algorithm;
2 |
3 | /**
4 | * 参数值的类型
5 | *
6 | * @author hayes, @create 2017-12-11 19:43
7 | **/
8 | public enum ParameterValueType {
9 |
10 | INT("int"),
11 | DOUBLE("double"),
12 | BOOLEAN("boolean"),
13 | STRING("string");
14 |
15 | private String valueType;
16 |
17 | ParameterValueType(String valueType) {
18 | this.valueType = valueType;
19 | }
20 |
21 |
22 | }
23 |
--------------------------------------------------------------------------------
/MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/common/ByteObjectUtil.java:
--------------------------------------------------------------------------------
1 | package com.hhu.machinelearningplatformclient.common;
2 |
3 | import java.io.ByteArrayInputStream;
4 | import java.io.ByteArrayOutputStream;
5 | import java.io.IOException;
6 | import java.io.ObjectInputStream;
7 | import java.io.ObjectOutputStream;
8 |
9 | public class ByteObjectUtil {
10 |
11 | public static Object ByteToObject(byte[] bytes) {
12 | Object object=null;
13 | try {
14 | ByteArrayInputStream byteArrayInputStream=new ByteArrayInputStream(bytes);
15 | ObjectInputStream objectInputStream=new ObjectInputStream(byteArrayInputStream);
16 | object=objectInputStream.readObject();
17 | objectInputStream.close();
18 | byteArrayInputStream.close();
19 | } catch (ClassNotFoundException e) {
20 | // TODO Auto-generated catch block
21 | e.printStackTrace();
22 | } catch (IOException e) {
23 | // TODO Auto-generated catch block
24 | e.printStackTrace();
25 | }
26 | return object;
27 | }
28 |
29 | public static byte[] ObjectToByte(Object object) {
30 | byte[] bytes=null;
31 | ByteArrayOutputStream byteArrayOutputStream=new ByteArrayOutputStream();
32 | try {
33 | ObjectOutputStream objectOutputStream=new ObjectOutputStream(byteArrayOutputStream);
34 | objectOutputStream.writeObject(object);
35 | //objectOutputStream.flush();
36 | bytes=byteArrayOutputStream.toByteArray();
37 | objectOutputStream.close();
38 | byteArrayOutputStream.close();
39 | } catch (IOException e) {
40 | // TODO Auto-generated catch block
41 | e.printStackTrace();
42 | }
43 | return bytes;
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/common/HBaseUtil.java:
--------------------------------------------------------------------------------
1 | package com.hhu.machinelearningplatformclient.common;
2 |
3 | import java.io.IOException;
4 | import java.util.ArrayList;
5 | import java.util.List;
6 |
7 | import org.apache.hadoop.conf.Configuration;
8 | import org.apache.hadoop.hbase.Cell;
9 | import org.apache.hadoop.hbase.CellUtil;
10 | import org.apache.hadoop.hbase.HBaseConfiguration;
11 | import org.apache.hadoop.hbase.HColumnDescriptor;
12 | import org.apache.hadoop.hbase.HTableDescriptor;
13 | import org.apache.hadoop.hbase.TableName;
14 | import org.apache.hadoop.hbase.client.Connection;
15 | import org.apache.hadoop.hbase.client.ConnectionFactory;
16 | import org.apache.hadoop.hbase.client.Delete;
17 | import org.apache.hadoop.hbase.client.Get;
18 | import org.apache.hadoop.hbase.client.HBaseAdmin;
19 | import org.apache.hadoop.hbase.client.HTable;
20 | import org.apache.hadoop.hbase.client.Put;
21 | import org.apache.hadoop.hbase.client.Result;
22 | import org.apache.hadoop.hbase.client.Scan;
23 | import org.apache.hadoop.hbase.filter.PrefixFilter;
24 | import org.apache.hadoop.hbase.util.Bytes;
25 |
26 | public class HBaseUtil {
27 |
28 | private static HBaseUtil hBaseUtil=new HBaseUtil();
29 |
30 | private Connection connection;
31 | private static final String ZOOKEEPER_QUORUM="10.196.83.90,10.196.83.91,10.196.83.92";
32 | private static final String ZOOKEEPER_CLIENTPORT="2181";
33 | private static final String HBASE_ROOTDIR="hdfs://10.196.83.90:9000/hbase";
34 | private static final String RETRIES_NUMBER="3";
35 | private static final String TABLE_NAME="task";
36 | private static final String FAMILY_NAME="info";
37 |
38 | public static HBaseUtil getInstance() {
39 | return hBaseUtil;
40 | }
41 |
42 | //连接HBase
43 | public void connection() throws IOException {
44 | Configuration conf=HBaseConfiguration.create();
45 | conf.set("hbase.zookeeper.quorum", ZOOKEEPER_QUORUM);
46 | conf.set("hbase.zookeeper.property.clientPort", ZOOKEEPER_CLIENTPORT);//端口号
47 | conf.set("hbase.rootdir", HBASE_ROOTDIR);
48 | conf.set("hbase.client.retries.number", RETRIES_NUMBER);
49 | Connection connection=ConnectionFactory.createConnection(conf);
50 | this.connection=connection;
51 | //this.table=(HTable) connection.getTable(TableName.valueOf(TABLE_NAME));
52 | }
53 |
54 | //建表
55 | public void createTable() throws IOException {
56 | HBaseAdmin admin=null;
57 | try {
58 | admin=(HBaseAdmin) connection.getAdmin();
59 | if(admin.tableExists(TABLE_NAME)) {
60 | System.out.println("表已存在!");
61 | return;
62 | }
63 | HTableDescriptor descriptor=new HTableDescriptor(TableName.valueOf(TABLE_NAME));
64 | HColumnDescriptor columnDescriptor=new HColumnDescriptor(Bytes.toBytes(FAMILY_NAME));
65 | descriptor.addFamily(columnDescriptor);
66 | admin.createTable(descriptor);
67 | } catch (IOException e) {
68 | // TODO Auto-generated catch block
69 | e.printStackTrace();
70 | } finally {
71 | admin.close();
72 | }
73 | }
74 |
75 | /**
76 | * 插入数据
77 | *
78 | * @param tableName
79 | * @param rowKey
80 | * @param familyName
81 | * @param qualifierName
82 | * @param value
83 | * @throws Exception
84 | */
85 | public void putData(String tableName, String rowKey, String familyName, String qualifierName, String value)
86 | throws Exception {
87 | HTable table=(HTable) connection.getTable(TableName.valueOf(TABLE_NAME));
88 | Put put = new Put(Bytes.toBytes(rowKey));
89 | put.addColumn(Bytes.toBytes(familyName), Bytes.toBytes(qualifierName), Bytes.toBytes(value));
90 | table.put(put);
91 | table.close();
92 | }
93 |
94 | /**
95 | * 根据rowkey 查询
96 | *
97 | * @param tableName
98 | * @param rowKey
99 | * @return
100 | * @throws Exception
101 | */
102 | public Result getResult(String tableName, String rowKey) throws Exception {
103 | HTable table=(HTable) connection.getTable(TableName.valueOf(TABLE_NAME));
104 | Get get = new Get(Bytes.toBytes(rowKey));
105 | Result result = table.get(get);
106 | table.close();
107 | return result;
108 | }
109 |
110 | /**
111 | * 查询指定的某列
112 | *
113 | * @param tableName
114 | * @param rowKey
115 | * @param familyName
116 | * @param qualifierName
117 | * @return
118 | * @throws Exception
119 | */
120 | public String getValue(String tableName, String rowKey, String familyName, String qualifierName) throws Exception {
121 | HTable table=(HTable) connection.getTable(TableName.valueOf(TABLE_NAME));
122 | Get get = new Get(Bytes.toBytes(rowKey));
123 | Result result = table.get(get);
124 | Cell cell = result.getColumnLatestCell(Bytes.toBytes(familyName), Bytes.toBytes(qualifierName));
125 | if (cell == null) {
126 | return null;
127 | }
128 | table.close();
129 | return Bytes.toString(CellUtil.cloneValue(cell));
130 | }
131 |
132 | /**
133 | * 根据Row的前缀获得value
134 | *
135 | * @param tableName
136 | * @param rowPrefix
137 | * @param familyName
138 | * @param qualifierName
139 | * @return
140 | * @throws Exception
141 | */
142 | public List getValueByRowPrefix(String tableName, String rowPrefix, String familyName, String qualifierName) throws Exception {
143 | HTable table=(HTable) connection.getTable(TableName.valueOf(TABLE_NAME));
144 | List values = new ArrayList<>();
145 |
146 | Scan scan = new Scan();
147 | scan.setFilter(new PrefixFilter(Bytes.toBytes(rowPrefix)));
148 | table.getScanner(scan).forEach((result) -> {
149 | Cell cell = result.getColumnLatestCell(Bytes.toBytes(familyName), Bytes.toBytes(qualifierName));
150 | if (cell != null) {
151 | values.add(Bytes.toString(CellUtil.cloneValue(cell)));
152 | }
153 | });
154 | table.close();
155 | return values;
156 | }
157 |
158 | /**
159 | * 删除指定某列
160 | *
161 | * @param tableName
162 | * @param rowKey
163 | * @param falilyName
164 | * @param qualifierName
165 | * @throws Exception
166 | */
167 | public void deleteColumn(String tableName, String rowKey, String falilyName, String qualifierName) throws Exception {
168 | HTable table=(HTable) connection.getTable(TableName.valueOf(TABLE_NAME));
169 | Delete delete = new Delete(Bytes.toBytes(rowKey));
170 | delete.addColumn(Bytes.toBytes(falilyName), Bytes.toBytes(qualifierName));
171 | table.delete(delete);
172 | table.close();
173 | }
174 |
175 | /**
176 | * 删除指定的某个rowkey
177 | *
178 | * @param tableName
179 | * @param rowKey
180 | * @throws Exception
181 | */
182 | public void deleteColumn(String tableName, String rowKey) throws Exception {
183 | HTable table=(HTable) connection.getTable(TableName.valueOf(TABLE_NAME));
184 | Delete delete = new Delete(Bytes.toBytes(rowKey));
185 | table.delete(delete);
186 | table.close();
187 | }
188 |
189 | /**
190 | * 删除表
191 | *
192 | * @param tableName
193 | * @throws Exception
194 | */
195 | public void dropTable(String tableName) throws Exception {
196 | HBaseAdmin admin=(HBaseAdmin) connection.getAdmin();
197 | admin.disableTable(TableName.valueOf(tableName));
198 | admin.deleteTable(TableName.valueOf(tableName));
199 | admin.close();
200 | }
201 |
202 | //关闭HBase连接
203 | public void close() throws IOException {
204 | //table.close();
205 | connection.close();
206 | }
207 | }
208 |
--------------------------------------------------------------------------------
/MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/common/HDFSUtils.java:
--------------------------------------------------------------------------------
1 | package com.hhu.machinelearningplatformclient.common;
2 |
3 | import java.io.FileNotFoundException;
4 | import java.io.IOException;
5 | import java.io.InputStream;
6 | import java.net.URI;
7 |
8 | import javax.annotation.PostConstruct;
9 |
10 | import org.apache.commons.io.IOUtils;
11 | import org.apache.commons.lang.StringUtils;
12 | import org.apache.hadoop.conf.Configuration;
13 | import org.apache.hadoop.fs.FSDataOutputStream;
14 | import org.apache.hadoop.fs.FileStatus;
15 | import org.apache.hadoop.fs.FileSystem;
16 | import org.apache.hadoop.fs.Path;
17 | import org.apache.hadoop.hdfs.DistributedFileSystem;
18 | import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
19 |
20 | public class HDFSUtils {
21 |
22 | private FileSystem fileSystem;
23 | private String HDFS_URI;
24 |
25 | private String hdfsIP;
26 | private String hdfsPort;
27 | private String hdfsUser;
28 | private String hdfsUri;
29 |
30 | public String getHdfsIP() {
31 | return hdfsIP;
32 | }
33 |
34 | public void setHdfsIP(String hdfsIP) {
35 | this.hdfsIP = hdfsIP;
36 | }
37 |
38 | public String getHdfsPort() {
39 | return hdfsPort;
40 | }
41 |
42 | public void setHdfsPort(String hdfsPort) {
43 | this.hdfsPort = hdfsPort;
44 | }
45 |
46 | public String getHdfsUser() {
47 | return hdfsUser;
48 | }
49 |
50 | public void setHdfsUser(String hdfsUser) {
51 | this.hdfsUser = hdfsUser;
52 | }
53 |
54 | public String getHdfsUri() {
55 | return hdfsUri;
56 | }
57 |
58 | public void setHdfsUri(String hdfsUri) {
59 | this.hdfsUri = hdfsUri;
60 | }
61 |
62 | @PostConstruct
63 | public void init() throws Exception {
64 | //读取HDFS地址
65 | Configuration conf=new Configuration();;
66 | HDFS_URI="hdfs://"+hdfsIP+":"+hdfsPort+hdfsUri;
67 | URI uri;
68 | try {
69 | uri=new URI("hdfs://"+hdfsIP+":"+hdfsPort);
70 | fileSystem=FileSystem.get(uri, conf, hdfsUser);
71 | } catch (Exception e) {
72 | // TODO Auto-generated catch block
73 | throw e;
74 | }
75 | }
76 |
77 | public FileSystem getFileSystem() throws Exception {
78 | if(fileSystem!=null) {
79 | return fileSystem;
80 | }
81 | else {
82 | init();
83 | return fileSystem;
84 | }
85 | }
86 |
87 | //创建目录
88 | public void createDirectory(String path) throws Exception {
89 | // TODO Auto-generated method stub
90 | String realPath=HDFS_URI+path;
91 | Path hdfsPath=new Path(realPath);
92 | try {
93 | if(fileSystem.exists(hdfsPath)) {
94 | System.out.println("目录已存在!");
95 | }
96 | else {
97 | fileSystem.mkdirs(hdfsPath);
98 | }
99 | } catch (Exception e) {
100 | // TODO Auto-generated catch block
101 | throw e;
102 | }
103 | }
104 |
105 | //删除文件目录
106 | public void delete(String path) throws Exception {
107 | String realPath=HDFS_URI+path;
108 | Path hdfsPath=new Path(realPath);
109 | try {
110 | if(!fileSystem.exists(hdfsPath)) {
111 | System.out.println("目录不存在!");
112 | }
113 | else {
114 | fileSystem.delete(hdfsPath,true);
115 | }
116 | } catch (Exception e) {
117 | // TODO Auto-generated catch block
118 | throw e;
119 | }
120 | }
121 |
122 | //获取目录下的所有文件
123 | public FileStatus[] list(String path) throws FileNotFoundException, IOException {
124 | String realPath=HDFS_URI+path;
125 | Path hdfsPath=new Path(realPath);
126 | FileStatus[] lists=null;
127 | if(!fileSystem.exists(hdfsPath)) {
128 | System.out.println("目录不存在!");
129 | }
130 | else if(fileSystem.isFile(hdfsPath)) {
131 | System.out.println("不是目录!");
132 | }
133 | else {
134 | lists=fileSystem.listStatus(hdfsPath);
135 | }
136 | return lists;
137 | }
138 |
139 | //上传文件
140 | public void copyFileToHDFS(boolean delSrc, boolean overwrite,String srcFile,String destPath) throws IOException {
141 | Path srcPath=new Path(srcFile);
142 | Path hdfsPath=new Path(HDFS_URI+destPath);
143 | if(!fileSystem.exists(hdfsPath)) {
144 | System.out.println("目录不存在!");
145 | return;
146 | }
147 | fileSystem.copyFromLocalFile(delSrc, overwrite, srcPath, hdfsPath);
148 | }
149 |
150 | //上传文件(使用输入流的方式)
151 | public void uploadFileStream(boolean overwrite, InputStream inputStream, String destPath) throws IllegalArgumentException, IOException {
152 | FSDataOutputStream outputStream=fileSystem.create(new Path(HDFS_URI+destPath), overwrite);
153 | IOUtils.copy(inputStream, outputStream);
154 | }
155 |
156 | //下载文件
157 | public void getFile(String srcFile, String destPath) throws IOException {
158 | Path srcPath=new Path(HDFS_URI+srcFile);
159 | Path destFile=new Path(destPath);
160 | if(!fileSystem.exists(srcPath)) {
161 | System.out.println("源文件不存在!");
162 | return;
163 | }
164 | fileSystem.copyToLocalFile(srcPath, destFile);
165 | }
166 |
167 | //判断目录或文件是否存在
168 | public boolean existDir(String filePath) throws IOException {
169 | if(StringUtils.isEmpty(filePath)) {
170 | return false;
171 | }
172 | Path path=new Path(filePath);
173 | if(!fileSystem.exists(path)) {
174 | System.out.println("文件或目录不存在!");
175 | return false;
176 | }
177 | else {
178 | return true;
179 | }
180 | }
181 |
182 | //重命名
183 | public void rename(String srcPath, String dstPath) throws IOException {
184 | srcPath=HDFS_URI+srcPath;
185 | dstPath=HDFS_URI+dstPath;
186 | Path src=new Path(srcPath);
187 | Path dst=new Path(dstPath);
188 | if(!fileSystem.exists(src)) {
189 | System.out.println("文件或目录不存在!");
190 | return;
191 | }
192 | fileSystem.rename(src, dst);
193 | }
194 |
195 | //获得HDFS节点信息
196 | public DatanodeInfo[] getHDFSNodes() throws IOException {
197 | //获取所有节点
198 | DatanodeInfo[] dataNodeStats=new DatanodeInfo[0];
199 | //获取分布式文件系统
200 | DistributedFileSystem hdfs=(DistributedFileSystem) fileSystem;
201 | dataNodeStats=hdfs.getDataNodeStats();
202 | return dataNodeStats;
203 | }
204 |
205 | public void close() throws IOException {
206 | fileSystem.close();
207 | }
208 |
209 | }
210 |
--------------------------------------------------------------------------------
/MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/common/RedisUtils.java:
--------------------------------------------------------------------------------
1 | package com.hhu.machinelearningplatformclient.common;
2 |
3 | import javax.annotation.Resource;
4 |
5 | import org.springframework.dao.DataAccessException;
6 | import org.springframework.data.redis.connection.RedisConnection;
7 | import org.springframework.data.redis.core.RedisCallback;
8 | import org.springframework.data.redis.core.RedisTemplate;
9 | import org.springframework.stereotype.Component;
10 |
11 | @Component
12 | public class RedisUtils {
13 |
14 | @Resource
15 | private RedisTemplate redisTemplate;
16 |
17 | public Object get(Object key) {
18 | String keyRedis=key.toString();
19 | Object object=null;
20 | object=redisTemplate.execute(new RedisCallback