├── MachineLearningPlatformClient ├── WebContent │ ├── META-INF │ │ └── MANIFEST.MF │ ├── WEB-INF │ │ └── web.xml │ ├── js │ │ ├── lib │ │ │ ├── jquery-3.2.1.min.js │ │ │ └── jquery.json.min.js │ │ └── test.js │ └── test.html ├── pom.xml ├── src │ └── main │ │ ├── java │ │ └── com │ │ │ └── hhu │ │ │ └── machinelearningplatformclient │ │ │ ├── algorithm │ │ │ └── ParameterValueType.java │ │ │ ├── common │ │ │ ├── ByteObjectUtil.java │ │ │ ├── HBaseUtil.java │ │ │ ├── HDFSUtils.java │ │ │ ├── RedisUtils.java │ │ │ └── ResourcePath.java │ │ │ ├── config │ │ │ └── SystemConfig.java │ │ │ ├── controller │ │ │ └── TaskController.java │ │ │ ├── data │ │ │ ├── DataFile.java │ │ │ ├── DataFileType.java │ │ │ └── FieldInfo.java │ │ │ ├── entity │ │ │ ├── MLAlgorithm.java │ │ │ ├── Parameter.java │ │ │ ├── Response.java │ │ │ ├── ResponseCode.java │ │ │ └── TaskInit.java │ │ │ ├── exception │ │ │ └── ConfigInitException.java │ │ │ ├── model │ │ │ └── AlgorithmModel.java │ │ │ └── task │ │ │ ├── SparkTaskAlgorithm.java │ │ │ ├── SparkTaskExecutor.java │ │ │ ├── TaskExecution.java │ │ │ ├── TaskInfo.java │ │ │ ├── TaskManager.java │ │ │ ├── TaskState.java │ │ │ └── TaskType.java │ │ └── resources │ │ ├── hdfs.properties │ │ ├── kubernetes.properties │ │ ├── log4js.properties │ │ ├── redis.properties │ │ └── springmvc.xml └── target │ └── classes │ ├── META-INF │ ├── MANIFEST.MF │ └── maven │ │ └── MachineLearningPlatform │ │ └── MachineLearningPlatformClient │ │ ├── pom.properties │ │ └── pom.xml │ ├── com │ └── hhu │ │ └── machinelearningplatformclient │ │ ├── algorithm │ │ └── ParameterValueType.class │ │ ├── common │ │ ├── ByteObjectUtil.class │ │ ├── HBaseUtil.class │ │ ├── HDFSUtils.class │ │ ├── RedisUtils$1.class │ │ ├── RedisUtils$2.class │ │ ├── RedisUtils$3.class │ │ ├── RedisUtils.class │ │ └── ResourcePath.class │ │ ├── config │ │ └── SystemConfig.class │ │ ├── controller │ │ └── TaskController.class │ │ ├── data │ │ ├── DataFile.class │ │ ├── DataFileType.class │ │ └── FieldInfo.class │ │ ├── entity │ │ ├── MLAlgorithm.class │ │ ├── Parameter.class │ │ ├── Response.class │ │ ├── ResponseCode.class │ │ └── TaskInit.class │ │ ├── exception │ │ └── ConfigInitException.class │ │ ├── model │ │ └── AlgorithmModel.class │ │ └── task │ │ ├── SparkTaskAlgorithm.class │ │ ├── SparkTaskExecutor.class │ │ ├── TaskExecution.class │ │ ├── TaskInfo.class │ │ ├── TaskManager.class │ │ ├── TaskState.class │ │ └── TaskType.class │ ├── hdfs.properties │ ├── kubernetes.properties │ ├── log4js.properties │ ├── redis.properties │ └── springmvc.xml ├── MachineLearningPlatformServer ├── pom.xml ├── src │ ├── main │ │ ├── MLAlgorithmLoaderTest.java │ │ ├── java │ │ │ └── com │ │ │ │ └── hhu │ │ │ │ └── machinelearningplatformserver │ │ │ │ ├── algorithm │ │ │ │ ├── ComponentType.java │ │ │ │ ├── MLAlgorithmDesc.java │ │ │ │ ├── MLAlgorithmLoader.java │ │ │ │ ├── ParameterDesc.java │ │ │ │ ├── ParameterValueType.java │ │ │ │ └── UsageType.java │ │ │ │ ├── common │ │ │ │ ├── ByteObjectUtil.java │ │ │ │ ├── ConfigUtils.java │ │ │ │ ├── HBaseUtil.java │ │ │ │ ├── HDFSUtils.java │ │ │ │ ├── JRedisPoolConfig.java │ │ │ │ ├── JedisUtils.java │ │ │ │ ├── RandomUtil.java │ │ │ │ └── ResourcePath.java │ │ │ │ ├── data │ │ │ │ ├── DataFile.java │ │ │ │ ├── DataFileMapper.java │ │ │ │ ├── DataFileType.java │ │ │ │ ├── FieldInfo.java │ │ │ │ ├── LineParse.java │ │ │ │ ├── PersistDataset.java │ │ │ │ └── SparkDataFileConverter.java │ │ │ │ ├── exception │ │ │ │ ├── CantConverException.java │ │ │ │ └── ConfigInitException.java │ │ │ │ ├── model │ │ │ │ └── AlgorithmModel.java │ │ │ │ ├── proxy │ │ │ │ ├── EstimatorProxy.java │ │ │ │ ├── MLAlgorithmProxy.java │ │ │ │ ├── ModelProxy.java │ │ │ │ └── TransformerProxy.java │ │ │ │ ├── submit │ │ │ │ ├── LoadTaskInfo.java │ │ │ │ └── Submiter.java │ │ │ │ └── task │ │ │ │ ├── SparkTaskAlgorithm.java │ │ │ │ ├── SparkTaskInfo.java │ │ │ │ ├── TaskInfo.java │ │ │ │ ├── TaskState.java │ │ │ │ └── TaskType.java │ │ └── resources │ │ │ ├── hdfs.properties │ │ │ └── redis.properties │ └── test │ │ ├── java │ │ ├── Test.java │ │ └── Test1.java │ │ └── resources │ │ ├── datafile.csv │ │ └── test.json └── target │ ├── classes │ ├── META-INF │ │ ├── MANIFEST.MF │ │ └── maven │ │ │ └── MachineLearningPlatform │ │ │ └── MachineLearningPlatformServer │ │ │ ├── pom.properties │ │ │ └── pom.xml │ ├── com │ │ └── hhu │ │ │ └── machinelearningplatformserver │ │ │ ├── algorithm │ │ │ ├── ComponentType.class │ │ │ ├── MLAlgorithmDesc.class │ │ │ ├── MLAlgorithmLoader.class │ │ │ ├── ParameterDesc.class │ │ │ ├── ParameterValueType.class │ │ │ └── UsageType.class │ │ │ ├── common │ │ │ ├── ByteObjectUtil.class │ │ │ ├── ConfigUtils.class │ │ │ ├── HBaseUtil.class │ │ │ ├── HDFSUtils.class │ │ │ ├── JRedisPoolConfig.class │ │ │ ├── JedisUtils.class │ │ │ ├── RandomUtil.class │ │ │ └── ResourcePath.class │ │ │ ├── data │ │ │ ├── DataFile.class │ │ │ ├── DataFileMapper.class │ │ │ ├── DataFileType.class │ │ │ ├── FieldInfo.class │ │ │ ├── LineParse.class │ │ │ ├── PersistDataset.class │ │ │ └── SparkDataFileConverter.class │ │ │ ├── exception │ │ │ ├── CantConverException.class │ │ │ └── ConfigInitException.class │ │ │ ├── model │ │ │ └── AlgorithmModel.class │ │ │ ├── proxy │ │ │ ├── EstimatorProxy.class │ │ │ ├── MLAlgorithmProxy.class │ │ │ ├── ModelProxy.class │ │ │ └── TransformerProxy.class │ │ │ ├── submit │ │ │ ├── LoadTaskInfo.class │ │ │ └── Submiter.class │ │ │ └── task │ │ │ ├── SparkTaskAlgorithm.class │ │ │ ├── SparkTaskInfo.class │ │ │ ├── TaskInfo.class │ │ │ ├── TaskState.class │ │ │ └── TaskType.class │ ├── hdfs.properties │ └── redis.properties │ └── test-classes │ ├── Test.class │ ├── Test1.class │ ├── datafile.csv │ └── test.json └── README.md /MachineLearningPlatformClient/WebContent/META-INF/MANIFEST.MF: -------------------------------------------------------------------------------- 1 | Manifest-Version: 1.0 2 | Class-Path: 3 | 4 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/WebContent/WEB-INF/web.xml: -------------------------------------------------------------------------------- 1 | 2 | 6 | 7 | MachineLearningPlatformClient 8 | 9 | 10 | CharacterEncodingFilter 11 | org.springframework.web.filter.CharacterEncodingFilter 12 | 13 | encoding 14 | UTF-8 15 | 16 | 17 | forceEncoding 18 | false 19 | 20 | 21 | 22 | CharacterEncodingFilter 23 | /* 24 | 25 | 26 | log4jConfigLocation 27 | /WEB-INF/classes/log4js.properties 28 | 29 | 30 | log4jRefreshInterval 31 | 60000 32 | 33 | 34 | org.springframework.web.util.Log4jConfigListener 35 | 36 | 37 | 38 | dispatcherServlet 39 | org.springframework.web.servlet.DispatcherServlet 40 | 41 | contextConfigLocation 42 | classpath:springmvc.xml 43 | 44 | 1 45 | 46 | 47 | dispatcherServlet 48 | / 49 | 50 | 51 | default 52 | *.html 53 | 54 | 55 | default 56 | *.jpg 57 | 58 | 59 | default 60 | *.js 61 | 62 | 63 | default 64 | *.css 65 | 66 | 67 | login.html 68 | 69 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/WebContent/js/lib/jquery.json.min.js: -------------------------------------------------------------------------------- 1 | /*! jQuery JSON plugin v2.6.0 | github.com/Krinkle/jquery-json */ 2 | !function(a){"function"==typeof define&&define.amd?define(["jquery"],a):a("object"==typeof exports?require("jquery"):jQuery)}(function($){"use strict";var escape=/["\\\x00-\x1f\x7f-\x9f]/g,meta={"\b":"\\b","\t":"\\t","\n":"\\n","\f":"\\f","\r":"\\r",'"':'\\"',"\\":"\\\\"},hasOwn=Object.prototype.hasOwnProperty;$.toJSON="object"==typeof JSON&&JSON.stringify?JSON.stringify:function(a){if(null===a)return"null";var b,c,d,e,f=$.type(a);if("undefined"!==f){if("number"===f||"boolean"===f)return String(a);if("string"===f)return $.quoteString(a);if("function"==typeof a.toJSON)return $.toJSON(a.toJSON());if("date"===f){var g=a.getUTCMonth()+1,h=a.getUTCDate(),i=a.getUTCFullYear(),j=a.getUTCHours(),k=a.getUTCMinutes(),l=a.getUTCSeconds(),m=a.getUTCMilliseconds();return g<10&&(g="0"+g),h<10&&(h="0"+h),j<10&&(j="0"+j),k<10&&(k="0"+k),l<10&&(l="0"+l),m<100&&(m="0"+m),m<10&&(m="0"+m),'"'+i+"-"+g+"-"+h+"T"+j+":"+k+":"+l+"."+m+'Z"'}if(b=[],$.isArray(a)){for(c=0;c 2 | 3 | 4 | 5 | 五子棋游戏 6 | 7 | 8 | 9 | 10 | 11 |
12 | 13 | 14 | 15 | 16 |
17 | 18 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/pom.xml: -------------------------------------------------------------------------------- 1 | 3 | 4.0.0 4 | 5 | MachineLearningPlatform 6 | MachineLearningPlatformClient 7 | 0.0.1-SNAPSHOT 8 | jar 9 | 10 | MachineLearningPlatformClient 11 | http://maven.apache.org 12 | 13 | 14 | UTF-8 15 | 16 | 17 | 18 | 19 | junit 20 | junit 21 | 3.8.1 22 | test 23 | 24 | 25 | javax.servlet 26 | javax.servlet-api 27 | 3.1.0 28 | provided 29 | 30 | 31 | org.apache.hadoop 32 | hadoop-common 33 | 2.7.3 34 | 35 | 36 | org.apache.hadoop 37 | hadoop-hdfs 38 | 2.7.3 39 | 40 | 41 | org.apache.hadoop 42 | hadoop-client 43 | 2.7.3 44 | 45 | 46 | javax.servlet 47 | javax.servlet-api 48 | 3.1.0 49 | provided 50 | 51 | 52 | mysql 53 | mysql-connector-java 54 | 6.0.5 55 | 56 | 57 | com.mchange 58 | c3p0 59 | 0.9.5.2 60 | 61 | 62 | org.springframework 63 | spring-core 64 | 4.1.6.RELEASE 65 | 66 | 67 | org.springframework 68 | spring-aop 69 | 4.1.6.RELEASE 70 | 71 | 72 | org.springframework 73 | spring-beans 74 | 4.1.6.RELEASE 75 | 76 | 77 | org.springframework 78 | spring-context 79 | 4.1.6.RELEASE 80 | 81 | 82 | org.springframework 83 | spring-context-support 84 | 4.1.6.RELEASE 85 | 86 | 87 | com.thoughtworks.xstream 88 | xstream 89 | 1.4.8 90 | 91 | 92 | commons-collections 93 | commons-collections 94 | 3.2.1 95 | 96 | 97 | commons-lang 98 | commons-lang 99 | 2.6 100 | 101 | 102 | commons-logging 103 | commons-logging 104 | 1.1.3 105 | 106 | 107 | net.sf.ezmorph 108 | ezmorph 109 | 1.0.6 110 | 111 | 112 | org.springframework 113 | spring-webmvc 114 | 4.1.6.RELEASE 115 | 116 | 117 | org.springframework 118 | spring-orm 119 | 4.1.6.RELEASE 120 | 121 | 122 | org.springframework 123 | spring-tx 124 | 4.1.6.RELEASE 125 | 126 | 127 | org.slf4j 128 | slf4j-api 129 | 1.7.7 130 | 131 | 132 | org.slf4j 133 | slf4j-log4j12 134 | 1.7.7 135 | 136 | 137 | org.codehaus.jackson 138 | jackson-mapper-asl 139 | 1.9.2 140 | 141 | 142 | org.codehaus.jackson 143 | jackson-core-asl 144 | 1.9.2 145 | 146 | 147 | com.fasterxml.jackson.core 148 | jackson-annotations 149 | 2.4.1 150 | 151 | 152 | com.fasterxml.jackson.core 153 | jackson-core 154 | 2.4.1 155 | 156 | 157 | com.fasterxml.jackson.core 158 | jackson-databind 159 | 2.4.1 160 | 161 | 162 | org.freemarker 163 | freemarker 164 | 2.3.20 165 | 166 | 167 | org.apache.httpcomponents 168 | httpclient 169 | 4.5.3 170 | 171 | 172 | org.mybatis 173 | mybatis 174 | 3.2.5 175 | 176 | 177 | org.mybatis 178 | mybatis-spring 179 | 1.2.2 180 | 181 | 182 | dom4j 183 | dom4j 184 | 1.6.1 185 | 186 | 187 | commons-beanutils 188 | commons-beanutils 189 | 1.9.2 190 | 191 | 192 | commons-fileupload 193 | commons-fileupload 194 | 1.3.2 195 | 196 | 197 | jdk.tools 198 | jdk.tools 199 | 1.8 200 | system 201 | ${JAVA_HOME}/lib/tools.jar 202 | 203 | 204 | org.springframework.data 205 | spring-data-redis 206 | 1.6.2.RELEASE 207 | 208 | 209 | redis.clients 210 | jedis 211 | 2.9.0 212 | 213 | 214 | org.apache.hbase 215 | hbase-client 216 | 1.2.5 217 | 218 | 219 | org.apache.hbase 220 | hbase-common 221 | 1.2.5 222 | 223 | 224 | org.apache.hbase 225 | hbase-protocol 226 | 1.2.5 227 | 228 | 229 | org.apache.hbase 230 | hbase-server 231 | 1.2.5 232 | 233 | 234 | 235 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/algorithm/ParameterValueType.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformclient.algorithm; 2 | 3 | /** 4 | * 参数值的类型 5 | * 6 | * @author hayes, @create 2017-12-11 19:43 7 | **/ 8 | public enum ParameterValueType { 9 | 10 | INT("int"), 11 | DOUBLE("double"), 12 | BOOLEAN("boolean"), 13 | STRING("string"); 14 | 15 | private String valueType; 16 | 17 | ParameterValueType(String valueType) { 18 | this.valueType = valueType; 19 | } 20 | 21 | 22 | } 23 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/common/ByteObjectUtil.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformclient.common; 2 | 3 | import java.io.ByteArrayInputStream; 4 | import java.io.ByteArrayOutputStream; 5 | import java.io.IOException; 6 | import java.io.ObjectInputStream; 7 | import java.io.ObjectOutputStream; 8 | 9 | public class ByteObjectUtil { 10 | 11 | public static Object ByteToObject(byte[] bytes) { 12 | Object object=null; 13 | try { 14 | ByteArrayInputStream byteArrayInputStream=new ByteArrayInputStream(bytes); 15 | ObjectInputStream objectInputStream=new ObjectInputStream(byteArrayInputStream); 16 | object=objectInputStream.readObject(); 17 | objectInputStream.close(); 18 | byteArrayInputStream.close(); 19 | } catch (ClassNotFoundException e) { 20 | // TODO Auto-generated catch block 21 | e.printStackTrace(); 22 | } catch (IOException e) { 23 | // TODO Auto-generated catch block 24 | e.printStackTrace(); 25 | } 26 | return object; 27 | } 28 | 29 | public static byte[] ObjectToByte(Object object) { 30 | byte[] bytes=null; 31 | ByteArrayOutputStream byteArrayOutputStream=new ByteArrayOutputStream(); 32 | try { 33 | ObjectOutputStream objectOutputStream=new ObjectOutputStream(byteArrayOutputStream); 34 | objectOutputStream.writeObject(object); 35 | //objectOutputStream.flush(); 36 | bytes=byteArrayOutputStream.toByteArray(); 37 | objectOutputStream.close(); 38 | byteArrayOutputStream.close(); 39 | } catch (IOException e) { 40 | // TODO Auto-generated catch block 41 | e.printStackTrace(); 42 | } 43 | return bytes; 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/common/HBaseUtil.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformclient.common; 2 | 3 | import java.io.IOException; 4 | import java.util.ArrayList; 5 | import java.util.List; 6 | 7 | import org.apache.hadoop.conf.Configuration; 8 | import org.apache.hadoop.hbase.Cell; 9 | import org.apache.hadoop.hbase.CellUtil; 10 | import org.apache.hadoop.hbase.HBaseConfiguration; 11 | import org.apache.hadoop.hbase.HColumnDescriptor; 12 | import org.apache.hadoop.hbase.HTableDescriptor; 13 | import org.apache.hadoop.hbase.TableName; 14 | import org.apache.hadoop.hbase.client.Connection; 15 | import org.apache.hadoop.hbase.client.ConnectionFactory; 16 | import org.apache.hadoop.hbase.client.Delete; 17 | import org.apache.hadoop.hbase.client.Get; 18 | import org.apache.hadoop.hbase.client.HBaseAdmin; 19 | import org.apache.hadoop.hbase.client.HTable; 20 | import org.apache.hadoop.hbase.client.Put; 21 | import org.apache.hadoop.hbase.client.Result; 22 | import org.apache.hadoop.hbase.client.Scan; 23 | import org.apache.hadoop.hbase.filter.PrefixFilter; 24 | import org.apache.hadoop.hbase.util.Bytes; 25 | 26 | public class HBaseUtil { 27 | 28 | private static HBaseUtil hBaseUtil=new HBaseUtil(); 29 | 30 | private Connection connection; 31 | private static final String ZOOKEEPER_QUORUM="10.196.83.90,10.196.83.91,10.196.83.92"; 32 | private static final String ZOOKEEPER_CLIENTPORT="2181"; 33 | private static final String HBASE_ROOTDIR="hdfs://10.196.83.90:9000/hbase"; 34 | private static final String RETRIES_NUMBER="3"; 35 | private static final String TABLE_NAME="task"; 36 | private static final String FAMILY_NAME="info"; 37 | 38 | public static HBaseUtil getInstance() { 39 | return hBaseUtil; 40 | } 41 | 42 | //连接HBase 43 | public void connection() throws IOException { 44 | Configuration conf=HBaseConfiguration.create(); 45 | conf.set("hbase.zookeeper.quorum", ZOOKEEPER_QUORUM); 46 | conf.set("hbase.zookeeper.property.clientPort", ZOOKEEPER_CLIENTPORT);//端口号 47 | conf.set("hbase.rootdir", HBASE_ROOTDIR); 48 | conf.set("hbase.client.retries.number", RETRIES_NUMBER); 49 | Connection connection=ConnectionFactory.createConnection(conf); 50 | this.connection=connection; 51 | //this.table=(HTable) connection.getTable(TableName.valueOf(TABLE_NAME)); 52 | } 53 | 54 | //建表 55 | public void createTable() throws IOException { 56 | HBaseAdmin admin=null; 57 | try { 58 | admin=(HBaseAdmin) connection.getAdmin(); 59 | if(admin.tableExists(TABLE_NAME)) { 60 | System.out.println("表已存在!"); 61 | return; 62 | } 63 | HTableDescriptor descriptor=new HTableDescriptor(TableName.valueOf(TABLE_NAME)); 64 | HColumnDescriptor columnDescriptor=new HColumnDescriptor(Bytes.toBytes(FAMILY_NAME)); 65 | descriptor.addFamily(columnDescriptor); 66 | admin.createTable(descriptor); 67 | } catch (IOException e) { 68 | // TODO Auto-generated catch block 69 | e.printStackTrace(); 70 | } finally { 71 | admin.close(); 72 | } 73 | } 74 | 75 | /** 76 | * 插入数据 77 | * 78 | * @param tableName 79 | * @param rowKey 80 | * @param familyName 81 | * @param qualifierName 82 | * @param value 83 | * @throws Exception 84 | */ 85 | public void putData(String tableName, String rowKey, String familyName, String qualifierName, String value) 86 | throws Exception { 87 | HTable table=(HTable) connection.getTable(TableName.valueOf(TABLE_NAME)); 88 | Put put = new Put(Bytes.toBytes(rowKey)); 89 | put.addColumn(Bytes.toBytes(familyName), Bytes.toBytes(qualifierName), Bytes.toBytes(value)); 90 | table.put(put); 91 | table.close(); 92 | } 93 | 94 | /** 95 | * 根据rowkey 查询 96 | * 97 | * @param tableName 98 | * @param rowKey 99 | * @return 100 | * @throws Exception 101 | */ 102 | public Result getResult(String tableName, String rowKey) throws Exception { 103 | HTable table=(HTable) connection.getTable(TableName.valueOf(TABLE_NAME)); 104 | Get get = new Get(Bytes.toBytes(rowKey)); 105 | Result result = table.get(get); 106 | table.close(); 107 | return result; 108 | } 109 | 110 | /** 111 | * 查询指定的某列 112 | * 113 | * @param tableName 114 | * @param rowKey 115 | * @param familyName 116 | * @param qualifierName 117 | * @return 118 | * @throws Exception 119 | */ 120 | public String getValue(String tableName, String rowKey, String familyName, String qualifierName) throws Exception { 121 | HTable table=(HTable) connection.getTable(TableName.valueOf(TABLE_NAME)); 122 | Get get = new Get(Bytes.toBytes(rowKey)); 123 | Result result = table.get(get); 124 | Cell cell = result.getColumnLatestCell(Bytes.toBytes(familyName), Bytes.toBytes(qualifierName)); 125 | if (cell == null) { 126 | return null; 127 | } 128 | table.close(); 129 | return Bytes.toString(CellUtil.cloneValue(cell)); 130 | } 131 | 132 | /** 133 | * 根据Row的前缀获得value 134 | * 135 | * @param tableName 136 | * @param rowPrefix 137 | * @param familyName 138 | * @param qualifierName 139 | * @return 140 | * @throws Exception 141 | */ 142 | public List getValueByRowPrefix(String tableName, String rowPrefix, String familyName, String qualifierName) throws Exception { 143 | HTable table=(HTable) connection.getTable(TableName.valueOf(TABLE_NAME)); 144 | List values = new ArrayList<>(); 145 | 146 | Scan scan = new Scan(); 147 | scan.setFilter(new PrefixFilter(Bytes.toBytes(rowPrefix))); 148 | table.getScanner(scan).forEach((result) -> { 149 | Cell cell = result.getColumnLatestCell(Bytes.toBytes(familyName), Bytes.toBytes(qualifierName)); 150 | if (cell != null) { 151 | values.add(Bytes.toString(CellUtil.cloneValue(cell))); 152 | } 153 | }); 154 | table.close(); 155 | return values; 156 | } 157 | 158 | /** 159 | * 删除指定某列 160 | * 161 | * @param tableName 162 | * @param rowKey 163 | * @param falilyName 164 | * @param qualifierName 165 | * @throws Exception 166 | */ 167 | public void deleteColumn(String tableName, String rowKey, String falilyName, String qualifierName) throws Exception { 168 | HTable table=(HTable) connection.getTable(TableName.valueOf(TABLE_NAME)); 169 | Delete delete = new Delete(Bytes.toBytes(rowKey)); 170 | delete.addColumn(Bytes.toBytes(falilyName), Bytes.toBytes(qualifierName)); 171 | table.delete(delete); 172 | table.close(); 173 | } 174 | 175 | /** 176 | * 删除指定的某个rowkey 177 | * 178 | * @param tableName 179 | * @param rowKey 180 | * @throws Exception 181 | */ 182 | public void deleteColumn(String tableName, String rowKey) throws Exception { 183 | HTable table=(HTable) connection.getTable(TableName.valueOf(TABLE_NAME)); 184 | Delete delete = new Delete(Bytes.toBytes(rowKey)); 185 | table.delete(delete); 186 | table.close(); 187 | } 188 | 189 | /** 190 | * 删除表 191 | * 192 | * @param tableName 193 | * @throws Exception 194 | */ 195 | public void dropTable(String tableName) throws Exception { 196 | HBaseAdmin admin=(HBaseAdmin) connection.getAdmin(); 197 | admin.disableTable(TableName.valueOf(tableName)); 198 | admin.deleteTable(TableName.valueOf(tableName)); 199 | admin.close(); 200 | } 201 | 202 | //关闭HBase连接 203 | public void close() throws IOException { 204 | //table.close(); 205 | connection.close(); 206 | } 207 | } 208 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/common/HDFSUtils.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformclient.common; 2 | 3 | import java.io.FileNotFoundException; 4 | import java.io.IOException; 5 | import java.io.InputStream; 6 | import java.net.URI; 7 | 8 | import javax.annotation.PostConstruct; 9 | 10 | import org.apache.commons.io.IOUtils; 11 | import org.apache.commons.lang.StringUtils; 12 | import org.apache.hadoop.conf.Configuration; 13 | import org.apache.hadoop.fs.FSDataOutputStream; 14 | import org.apache.hadoop.fs.FileStatus; 15 | import org.apache.hadoop.fs.FileSystem; 16 | import org.apache.hadoop.fs.Path; 17 | import org.apache.hadoop.hdfs.DistributedFileSystem; 18 | import org.apache.hadoop.hdfs.protocol.DatanodeInfo; 19 | 20 | public class HDFSUtils { 21 | 22 | private FileSystem fileSystem; 23 | private String HDFS_URI; 24 | 25 | private String hdfsIP; 26 | private String hdfsPort; 27 | private String hdfsUser; 28 | private String hdfsUri; 29 | 30 | public String getHdfsIP() { 31 | return hdfsIP; 32 | } 33 | 34 | public void setHdfsIP(String hdfsIP) { 35 | this.hdfsIP = hdfsIP; 36 | } 37 | 38 | public String getHdfsPort() { 39 | return hdfsPort; 40 | } 41 | 42 | public void setHdfsPort(String hdfsPort) { 43 | this.hdfsPort = hdfsPort; 44 | } 45 | 46 | public String getHdfsUser() { 47 | return hdfsUser; 48 | } 49 | 50 | public void setHdfsUser(String hdfsUser) { 51 | this.hdfsUser = hdfsUser; 52 | } 53 | 54 | public String getHdfsUri() { 55 | return hdfsUri; 56 | } 57 | 58 | public void setHdfsUri(String hdfsUri) { 59 | this.hdfsUri = hdfsUri; 60 | } 61 | 62 | @PostConstruct 63 | public void init() throws Exception { 64 | //读取HDFS地址 65 | Configuration conf=new Configuration();; 66 | HDFS_URI="hdfs://"+hdfsIP+":"+hdfsPort+hdfsUri; 67 | URI uri; 68 | try { 69 | uri=new URI("hdfs://"+hdfsIP+":"+hdfsPort); 70 | fileSystem=FileSystem.get(uri, conf, hdfsUser); 71 | } catch (Exception e) { 72 | // TODO Auto-generated catch block 73 | throw e; 74 | } 75 | } 76 | 77 | public FileSystem getFileSystem() throws Exception { 78 | if(fileSystem!=null) { 79 | return fileSystem; 80 | } 81 | else { 82 | init(); 83 | return fileSystem; 84 | } 85 | } 86 | 87 | //创建目录 88 | public void createDirectory(String path) throws Exception { 89 | // TODO Auto-generated method stub 90 | String realPath=HDFS_URI+path; 91 | Path hdfsPath=new Path(realPath); 92 | try { 93 | if(fileSystem.exists(hdfsPath)) { 94 | System.out.println("目录已存在!"); 95 | } 96 | else { 97 | fileSystem.mkdirs(hdfsPath); 98 | } 99 | } catch (Exception e) { 100 | // TODO Auto-generated catch block 101 | throw e; 102 | } 103 | } 104 | 105 | //删除文件目录 106 | public void delete(String path) throws Exception { 107 | String realPath=HDFS_URI+path; 108 | Path hdfsPath=new Path(realPath); 109 | try { 110 | if(!fileSystem.exists(hdfsPath)) { 111 | System.out.println("目录不存在!"); 112 | } 113 | else { 114 | fileSystem.delete(hdfsPath,true); 115 | } 116 | } catch (Exception e) { 117 | // TODO Auto-generated catch block 118 | throw e; 119 | } 120 | } 121 | 122 | //获取目录下的所有文件 123 | public FileStatus[] list(String path) throws FileNotFoundException, IOException { 124 | String realPath=HDFS_URI+path; 125 | Path hdfsPath=new Path(realPath); 126 | FileStatus[] lists=null; 127 | if(!fileSystem.exists(hdfsPath)) { 128 | System.out.println("目录不存在!"); 129 | } 130 | else if(fileSystem.isFile(hdfsPath)) { 131 | System.out.println("不是目录!"); 132 | } 133 | else { 134 | lists=fileSystem.listStatus(hdfsPath); 135 | } 136 | return lists; 137 | } 138 | 139 | //上传文件 140 | public void copyFileToHDFS(boolean delSrc, boolean overwrite,String srcFile,String destPath) throws IOException { 141 | Path srcPath=new Path(srcFile); 142 | Path hdfsPath=new Path(HDFS_URI+destPath); 143 | if(!fileSystem.exists(hdfsPath)) { 144 | System.out.println("目录不存在!"); 145 | return; 146 | } 147 | fileSystem.copyFromLocalFile(delSrc, overwrite, srcPath, hdfsPath); 148 | } 149 | 150 | //上传文件(使用输入流的方式) 151 | public void uploadFileStream(boolean overwrite, InputStream inputStream, String destPath) throws IllegalArgumentException, IOException { 152 | FSDataOutputStream outputStream=fileSystem.create(new Path(HDFS_URI+destPath), overwrite); 153 | IOUtils.copy(inputStream, outputStream); 154 | } 155 | 156 | //下载文件 157 | public void getFile(String srcFile, String destPath) throws IOException { 158 | Path srcPath=new Path(HDFS_URI+srcFile); 159 | Path destFile=new Path(destPath); 160 | if(!fileSystem.exists(srcPath)) { 161 | System.out.println("源文件不存在!"); 162 | return; 163 | } 164 | fileSystem.copyToLocalFile(srcPath, destFile); 165 | } 166 | 167 | //判断目录或文件是否存在 168 | public boolean existDir(String filePath) throws IOException { 169 | if(StringUtils.isEmpty(filePath)) { 170 | return false; 171 | } 172 | Path path=new Path(filePath); 173 | if(!fileSystem.exists(path)) { 174 | System.out.println("文件或目录不存在!"); 175 | return false; 176 | } 177 | else { 178 | return true; 179 | } 180 | } 181 | 182 | //重命名 183 | public void rename(String srcPath, String dstPath) throws IOException { 184 | srcPath=HDFS_URI+srcPath; 185 | dstPath=HDFS_URI+dstPath; 186 | Path src=new Path(srcPath); 187 | Path dst=new Path(dstPath); 188 | if(!fileSystem.exists(src)) { 189 | System.out.println("文件或目录不存在!"); 190 | return; 191 | } 192 | fileSystem.rename(src, dst); 193 | } 194 | 195 | //获得HDFS节点信息 196 | public DatanodeInfo[] getHDFSNodes() throws IOException { 197 | //获取所有节点 198 | DatanodeInfo[] dataNodeStats=new DatanodeInfo[0]; 199 | //获取分布式文件系统 200 | DistributedFileSystem hdfs=(DistributedFileSystem) fileSystem; 201 | dataNodeStats=hdfs.getDataNodeStats(); 202 | return dataNodeStats; 203 | } 204 | 205 | public void close() throws IOException { 206 | fileSystem.close(); 207 | } 208 | 209 | } 210 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/common/RedisUtils.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformclient.common; 2 | 3 | import javax.annotation.Resource; 4 | 5 | import org.springframework.dao.DataAccessException; 6 | import org.springframework.data.redis.connection.RedisConnection; 7 | import org.springframework.data.redis.core.RedisCallback; 8 | import org.springframework.data.redis.core.RedisTemplate; 9 | import org.springframework.stereotype.Component; 10 | 11 | @Component 12 | public class RedisUtils { 13 | 14 | @Resource 15 | private RedisTemplate redisTemplate; 16 | 17 | public Object get(Object key) { 18 | String keyRedis=key.toString(); 19 | Object object=null; 20 | object=redisTemplate.execute(new RedisCallback() { 21 | 22 | @Override 23 | public Object doInRedis(RedisConnection connection) throws DataAccessException { 24 | // TODO Auto-generated method stub 25 | byte[] key=keyRedis.getBytes(); 26 | byte[] value=connection.get(key); 27 | if(value==null) { 28 | return null; 29 | } 30 | return ByteObjectUtil.ByteToObject(value); 31 | } 32 | 33 | }); 34 | if(object==null) { 35 | return null; 36 | } 37 | else { 38 | return object; 39 | } 40 | } 41 | 42 | public void put(Object key, Object value) { 43 | // TODO Auto-generated method stub 44 | String keyRedis=key.toString(); 45 | long liveTime = 86400; 46 | redisTemplate.execute(new RedisCallback() { 47 | @Override 48 | public Long doInRedis(RedisConnection connection) throws DataAccessException { 49 | // TODO Auto-generated method stub 50 | byte[] key=keyRedis.getBytes(); 51 | byte[] value1=ByteObjectUtil.ObjectToByte(value); 52 | connection.set(key, value1); 53 | if(liveTime>0) { 54 | connection.expire(key, liveTime); 55 | } 56 | return 1L; 57 | } 58 | }); 59 | } 60 | 61 | public void clear() { 62 | // TODO Auto-generated method stub 63 | redisTemplate.execute(new RedisCallback() { 64 | @Override 65 | public String doInRedis(RedisConnection connection) throws DataAccessException { 66 | // TODO Auto-generated method stub 67 | connection.flushDb(); 68 | return "ok"; 69 | } 70 | }); 71 | } 72 | 73 | } 74 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/common/ResourcePath.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformclient.common; 2 | 3 | //资源文件存放目录 4 | public class ResourcePath { 5 | 6 | public static final String RESOURCE_DIR="src/main/resources/"; 7 | } 8 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/config/SystemConfig.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformclient.config; 2 | 3 | import org.springframework.stereotype.Component; 4 | 5 | import javax.annotation.PostConstruct; 6 | import java.io.IOException; 7 | import java.io.InputStream; 8 | import java.util.Properties; 9 | 10 | /** 11 | * config 12 | * 13 | * @author hayes, @create 2017-12-19 13:00 14 | **/ 15 | @Component 16 | public class SystemConfig { 17 | 18 | private static final String CONF_FILE_PATH = "/kubernetes.properties"; 19 | 20 | private String k8sMaster; 21 | 22 | private String k8sSparkNamespace; 23 | 24 | private String k8sSparkServiceAccountName; 25 | 26 | private String k8sSparkDriverDockerImage; 27 | 28 | private String k8sSparkExecutorDockerImage; 29 | 30 | private String hdfsMaster; 31 | 32 | private String taskJarPath; 33 | 34 | private String sparkK8sDir; 35 | 36 | 37 | @PostConstruct 38 | public void load() throws IOException { 39 | InputStream in = this.getClass().getResourceAsStream(CONF_FILE_PATH); 40 | Properties pro = new Properties(); 41 | pro.load(in); 42 | k8sMaster = pro.getProperty("k8s.master", "10.196.83.65:6443"); 43 | k8sSparkNamespace = pro.getProperty("k8s.spark.namespace", "spark-cluster"); 44 | k8sSparkServiceAccountName = pro.getProperty("k8s.spark.serviceAccountName", "spark-admin"); 45 | k8sSparkDriverDockerImage = pro.getProperty("k8s.spark.driver.docker.image"); 46 | k8sSparkExecutorDockerImage = pro.getProperty("k8s.spark.executor.docker.image"); 47 | hdfsMaster = pro.getProperty("hdfs.master"); 48 | taskJarPath = pro.getProperty("task.jar.path"); 49 | sparkK8sDir = pro.getProperty("spark-k8s.dir"); 50 | } 51 | 52 | 53 | public String getK8sMaster() { 54 | return k8sMaster; 55 | } 56 | 57 | public String getK8sSparkNamespace() { 58 | return k8sSparkNamespace; 59 | } 60 | 61 | public String getK8sSparkServiceAccountName() { 62 | return k8sSparkServiceAccountName; 63 | } 64 | 65 | public String getK8sSparkDriverDockerImage() { 66 | return k8sSparkDriverDockerImage; 67 | } 68 | 69 | public String getK8sSparkExecutorDockerImage() { 70 | return k8sSparkExecutorDockerImage; 71 | } 72 | 73 | public String getHdfsMaster() { 74 | return hdfsMaster; 75 | } 76 | 77 | public String getTaskJarPath() { 78 | return taskJarPath; 79 | } 80 | 81 | public String getSparkK8sDir() { 82 | return sparkK8sDir; 83 | } 84 | 85 | public void setSparkK8sDir(String sparkK8sDir) { 86 | this.sparkK8sDir = sparkK8sDir; 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/controller/TaskController.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformclient.controller; 2 | 3 | import java.util.List; 4 | import java.util.Map; 5 | 6 | import javax.annotation.Resource; 7 | import javax.servlet.http.HttpServletRequest; 8 | import org.codehaus.jackson.map.ObjectMapper; 9 | import org.springframework.stereotype.Controller; 10 | import org.springframework.web.bind.annotation.RequestMapping; 11 | import org.springframework.web.bind.annotation.RequestMethod; 12 | import org.springframework.web.bind.annotation.ResponseBody; 13 | import org.springframework.web.multipart.MultipartFile; 14 | import org.springframework.web.multipart.MultipartHttpServletRequest; 15 | import com.hhu.machinelearningplatformclient.data.FieldInfo; 16 | import com.hhu.machinelearningplatformclient.entity.Response; 17 | import com.hhu.machinelearningplatformclient.entity.ResponseCode; 18 | import com.hhu.machinelearningplatformclient.entity.TaskInit; 19 | import com.hhu.machinelearningplatformclient.task.TaskExecution; 20 | import com.hhu.machinelearningplatformclient.task.TaskType; 21 | 22 | @Controller 23 | @RequestMapping("/task") 24 | public class TaskController { 25 | 26 | @Resource 27 | private TaskExecution taskExecution; 28 | 29 | //提交任务 30 | @SuppressWarnings("unchecked") 31 | @RequestMapping(value="/submitTask", method=RequestMethod.POST) 32 | @ResponseBody 33 | public Response submitTask(HttpServletRequest request) { 34 | Response response=new Response(); 35 | MultipartHttpServletRequest multipartRequest=(MultipartHttpServletRequest) request; 36 | MultipartFile multipartFile=multipartRequest.getFile("file"); 37 | int taskType=Integer.valueOf(multipartRequest.getParameter("taskType")); 38 | TaskInit taskInit=new TaskInit(); 39 | if(taskType==TaskType.ESTIMATOR_TYPE.getValue()) { 40 | int algorithmId=Integer.valueOf(multipartRequest.getParameter("algorithmId")); 41 | taskInit.setAlgorithmId(algorithmId); 42 | } 43 | if(taskType==TaskType.TRANSFORMER_TYPE.getValue()) { 44 | int modelId=Integer.valueOf(multipartRequest.getParameter("modelId")); 45 | taskInit.setModelId(modelId); 46 | } 47 | String parameterMap=multipartRequest.getParameter("parameterMap"); 48 | ObjectMapper objectMapper=new ObjectMapper(); 49 | Map parameterValue=null; 50 | List fieldInfo=null; 51 | String dataFileCol=multipartRequest.getParameter("dataFileCol"); 52 | try { 53 | parameterValue=objectMapper.readValue(parameterMap, Map.class); 54 | fieldInfo=objectMapper.readValue(dataFileCol, List.class); 55 | } catch (Exception e) { 56 | // TODO Auto-generated catch block 57 | response.setResponseCode(ResponseCode.SUCCESS); 58 | response.setMessage("任务提交失败!"); 59 | return response; 60 | } 61 | int sparkExecutorNum=Integer.valueOf(multipartRequest.getParameter("sparkExecutorNum")); 62 | String dataFileType=multipartRequest.getParameter("dataFileType"); 63 | String delim=multipartRequest.getParameter("dataFileDelim"); 64 | taskInit.setDataFileType(dataFileType); 65 | taskInit.setDelim(delim); 66 | taskInit.setFieldInfo(fieldInfo); 67 | taskInit.setMultipartFile(multipartFile); 68 | taskInit.setParameterValue(parameterValue); 69 | taskInit.setSparkExecutorNum(sparkExecutorNum); 70 | taskExecution.taskInit(taskInit); 71 | response.setResponseCode(ResponseCode.SUCCESS); 72 | response.setMessage("任务提交成功!"); 73 | return response; 74 | } 75 | 76 | } 77 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/data/DataFile.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformclient.data; 2 | 3 | import java.io.Serializable; 4 | import java.util.List; 5 | 6 | //算法数据集文件 7 | public class DataFile implements Serializable { 8 | 9 | private static final long serialVersionUID = 1L; 10 | 11 | //数据集文件名称 12 | private String name; 13 | //数据集文件路径 14 | private String path; 15 | //数据集文件上传用户 16 | private long userId; 17 | //数据集文件分隔符 18 | private String delim=","; 19 | //数据集类型 20 | private DataFileType dataFileType=DataFileType.CSV; 21 | //列描述 22 | private List fieldInfos; 23 | 24 | public String getName() { 25 | return name; 26 | } 27 | public void setName(String name) { 28 | this.name = name; 29 | } 30 | public String getPath() { 31 | return path; 32 | } 33 | public void setPath(String path) { 34 | this.path = path; 35 | } 36 | public long getUserId() { 37 | return userId; 38 | } 39 | public void setUserId(long userId) { 40 | this.userId = userId; 41 | } 42 | public String getDelim() { 43 | return delim; 44 | } 45 | public void setDelim(String delim) { 46 | this.delim = delim; 47 | } 48 | public DataFileType getDataFileType() { 49 | return dataFileType; 50 | } 51 | public void setDataFileType(DataFileType dataFileType) { 52 | this.dataFileType = dataFileType; 53 | } 54 | public List getFieldInfos() { 55 | return fieldInfos; 56 | } 57 | public void setFieldInfos(List fieldInfos) { 58 | this.fieldInfos = fieldInfos; 59 | } 60 | 61 | } 62 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/data/DataFileType.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformclient.data; 2 | 3 | public enum DataFileType { 4 | CSV("csv"), 5 | LIBSVM("libsvm"); 6 | 7 | private String type; 8 | 9 | DataFileType(String type) { 10 | this.type = type; 11 | } 12 | 13 | public static DataFileType getDataFileTypeByValue(String value) { 14 | for(DataFileType dataFileType : DataFileType.values()) { 15 | if(dataFileType.name().equals(value)) { 16 | return dataFileType; 17 | } 18 | } 19 | return null; 20 | } 21 | 22 | } 23 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/data/FieldInfo.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformclient.data; 2 | 3 | import java.io.Serializable; 4 | 5 | 6 | /** 7 | * 列属性描述 8 | * 9 | * @author hayes, @create 2017-12-11 19:02 10 | **/ 11 | public class FieldInfo implements Serializable { 12 | 13 | private static final long serialVersionUID = -7123058551214352633L; 14 | 15 | public static final String DOUBLE_DATATYPE = "double"; 16 | public static final String BOOLEAN_DATATYPE = "boolean"; 17 | public static final String INTEGER_DATATYPE = "int"; 18 | public static final String STRING_DATATYPE = "string"; 19 | public static final String TIMESTAMP_DATATYPE = "timestamp"; 20 | public static final String LONG_DATATYPE = "long"; 21 | public static final String NULL_DATATYPE = "null"; 22 | 23 | 24 | /** 25 | * 数据类型 26 | */ 27 | private String dataType; 28 | 29 | /** 30 | * 字段名称 31 | */ 32 | private String name; 33 | 34 | /** 35 | * 是否可以为空 36 | */ 37 | private boolean nullable; 38 | 39 | /** 40 | * index(-1为多列) 41 | */ 42 | private int index = -1; 43 | 44 | /** 45 | * Start index 46 | */ 47 | private int startIndex; 48 | 49 | /** 50 | * end index 51 | */ 52 | private int endIndex; 53 | 54 | 55 | public int getStartIndex() { 56 | return startIndex; 57 | } 58 | 59 | public void setStartIndex(int startIndex) { 60 | this.startIndex = startIndex; 61 | } 62 | 63 | public int getEndIndex() { 64 | return endIndex; 65 | } 66 | 67 | public void setEndIndex(int endIndex) { 68 | this.endIndex = endIndex; 69 | } 70 | 71 | public String getDataType() { 72 | return dataType; 73 | } 74 | 75 | public void setDataType(String dataType) { 76 | this.dataType = dataType; 77 | } 78 | 79 | public boolean isNullable() { 80 | return nullable; 81 | } 82 | 83 | public void setNullable(boolean nullable) { 84 | this.nullable = nullable; 85 | } 86 | 87 | public int getIndex() { 88 | return index; 89 | } 90 | 91 | public void setIndex(int index) { 92 | this.index = index; 93 | } 94 | 95 | public String getName() { 96 | return name; 97 | } 98 | 99 | public void setName(String name) { 100 | this.name = name; 101 | } 102 | } 103 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/entity/MLAlgorithm.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformclient.entity; 2 | 3 | import java.util.Map; 4 | 5 | //算法实体类 6 | public class MLAlgorithm { 7 | 8 | //算法ID 9 | private int id; 10 | //算法名称 11 | private String name; 12 | //算法类名 13 | private String className; 14 | //算法类型 15 | private int componentsType; 16 | //算法用途 17 | private int userType; 18 | //算法参数 19 | private Map map; 20 | 21 | public int getId() { 22 | return id; 23 | } 24 | public void setId(int id) { 25 | this.id = id; 26 | } 27 | public String getName() { 28 | return name; 29 | } 30 | public void setName(String name) { 31 | this.name = name; 32 | } 33 | public String getClassName() { 34 | return className; 35 | } 36 | public void setClassName(String className) { 37 | this.className = className; 38 | } 39 | public int getComponentsType() { 40 | return componentsType; 41 | } 42 | public void setComponentsType(int componentsType) { 43 | this.componentsType = componentsType; 44 | } 45 | public int getUserType() { 46 | return userType; 47 | } 48 | public void setUserType(int userType) { 49 | this.userType = userType; 50 | } 51 | public Map getMap() { 52 | return map; 53 | } 54 | public void setMap(Map map) { 55 | this.map = map; 56 | } 57 | 58 | } 59 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/entity/Parameter.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformclient.entity; 2 | 3 | import com.hhu.machinelearningplatformclient.algorithm.ParameterValueType; 4 | 5 | //算法参数 6 | public class Parameter { 7 | 8 | //参数名称 9 | public String name; 10 | 11 | //参数展示名称 12 | public String showName; 13 | 14 | //参数类型 15 | public ParameterValueType valueType; 16 | 17 | 18 | public Class javaTypeClass() { 19 | switch (valueType) { 20 | case BOOLEAN: { 21 | return boolean.class; 22 | } 23 | case STRING: { 24 | return String.class; 25 | } 26 | case INT: { 27 | return int.class; 28 | } 29 | case DOUBLE: { 30 | return double.class; 31 | } 32 | } 33 | return String.class; 34 | } 35 | 36 | public Object valueOf(String value) { 37 | switch (valueType) { 38 | case BOOLEAN: { 39 | return Boolean.valueOf(value); 40 | } 41 | case STRING: { 42 | return value; 43 | } 44 | case INT: { 45 | return Integer.valueOf(value); 46 | } 47 | case DOUBLE: { 48 | return Double.valueOf(value); 49 | } 50 | } 51 | return value; 52 | } 53 | 54 | public String getName() { 55 | return name; 56 | } 57 | 58 | public void setName(String name) { 59 | this.name = name; 60 | } 61 | 62 | public String getShowName() { 63 | return showName; 64 | } 65 | 66 | public void setShowName(String showName) { 67 | this.showName = showName; 68 | } 69 | 70 | public ParameterValueType getValueType() { 71 | return valueType; 72 | } 73 | 74 | public void setValueType(ParameterValueType valueType) { 75 | this.valueType = valueType; 76 | } 77 | 78 | } 79 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/entity/Response.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformclient.entity; 2 | 3 | public class Response { 4 | 5 | private ResponseCode responseCode; 6 | private String message; 7 | 8 | public ResponseCode getResponseCode() { 9 | return responseCode; 10 | } 11 | public void setResponseCode(ResponseCode responseCode) { 12 | this.responseCode = responseCode; 13 | } 14 | public String getMessage() { 15 | return message; 16 | } 17 | public void setMessage(String message) { 18 | this.message = message; 19 | } 20 | 21 | } 22 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/entity/ResponseCode.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformclient.entity; 2 | 3 | public enum ResponseCode { 4 | 5 | SUCCESS("成功", 200), ERROR("错误", 201); 6 | 7 | private String name; 8 | private int value; 9 | 10 | private ResponseCode(String name, int value) { 11 | this.name = name; 12 | this.value = value; 13 | } 14 | 15 | public String getName() { 16 | return name; 17 | } 18 | 19 | public void setName(String name) { 20 | this.name = name; 21 | } 22 | 23 | public int getValue() { 24 | return value; 25 | } 26 | 27 | public void setValue(int value) { 28 | this.value = value; 29 | } 30 | 31 | } 32 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/entity/TaskInit.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformclient.entity; 2 | 3 | import java.util.List; 4 | import java.util.Map; 5 | 6 | import org.springframework.web.multipart.MultipartFile; 7 | 8 | import com.hhu.machinelearningplatformclient.data.FieldInfo; 9 | 10 | public class TaskInit { 11 | 12 | private int taskType; 13 | private int algorithmId; 14 | private int modelId; 15 | private Map parameterValue; 16 | private List fieldInfo; 17 | private int sparkExecutorNum; 18 | private String dataFileType; 19 | private String delim; 20 | private MultipartFile multipartFile; 21 | 22 | public int getTaskType() { 23 | return taskType; 24 | } 25 | public void setTaskType(int taskType) { 26 | this.taskType = taskType; 27 | } 28 | public int getAlgorithmId() { 29 | return algorithmId; 30 | } 31 | public void setAlgorithmId(int algorithmId) { 32 | this.algorithmId = algorithmId; 33 | } 34 | public int getModelId() { 35 | return modelId; 36 | } 37 | public void setModelId(int modelId) { 38 | this.modelId = modelId; 39 | } 40 | public Map getParameterValue() { 41 | return parameterValue; 42 | } 43 | public void setParameterValue(Map parameterValue) { 44 | this.parameterValue = parameterValue; 45 | } 46 | public List getFieldInfo() { 47 | return fieldInfo; 48 | } 49 | public void setFieldInfo(List fieldInfo) { 50 | this.fieldInfo = fieldInfo; 51 | } 52 | public int getSparkExecutorNum() { 53 | return sparkExecutorNum; 54 | } 55 | public void setSparkExecutorNum(int sparkExecutorNum) { 56 | this.sparkExecutorNum = sparkExecutorNum; 57 | } 58 | public String getDataFileType() { 59 | return dataFileType; 60 | } 61 | public void setDataFileType(String dataFileType) { 62 | this.dataFileType = dataFileType; 63 | } 64 | public String getDelim() { 65 | return delim; 66 | } 67 | public void setDelim(String delim) { 68 | this.delim = delim; 69 | } 70 | public MultipartFile getMultipartFile() { 71 | return multipartFile; 72 | } 73 | public void setMultipartFile(MultipartFile multipartFile) { 74 | this.multipartFile = multipartFile; 75 | } 76 | 77 | } 78 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/exception/ConfigInitException.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformclient.exception; 2 | 3 | public class ConfigInitException extends Exception { 4 | 5 | private static final long serialVersionUID = 1L; 6 | 7 | public ConfigInitException(String name) { 8 | super(name); 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/model/AlgorithmModel.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformclient.model; 2 | 3 | //算法模型类 4 | public class AlgorithmModel { 5 | 6 | private int id; 7 | private String name; 8 | private int userId; 9 | private long createTime; 10 | private String path; 11 | private int algorithmId; 12 | 13 | public int getId() { 14 | return id; 15 | } 16 | public void setId(int id) { 17 | this.id = id; 18 | } 19 | public String getName() { 20 | return name; 21 | } 22 | public void setName(String name) { 23 | this.name = name; 24 | } 25 | public int getUserId() { 26 | return userId; 27 | } 28 | public void setUserId(int userId) { 29 | this.userId = userId; 30 | } 31 | public long getCreateTime() { 32 | return createTime; 33 | } 34 | public void setCreateTime(long createTime) { 35 | this.createTime = createTime; 36 | } 37 | public String getPath() { 38 | return path; 39 | } 40 | public void setPath(String path) { 41 | this.path = path; 42 | } 43 | public int getAlgorithmId() { 44 | return algorithmId; 45 | } 46 | public void setAlgorithmId(int algorithmId) { 47 | this.algorithmId = algorithmId; 48 | } 49 | 50 | } 51 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/task/SparkTaskAlgorithm.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformclient.task; 2 | 3 | import java.io.Serializable; 4 | import java.util.Map; 5 | 6 | //任务算法信息 7 | public class SparkTaskAlgorithm implements Serializable { 8 | 9 | private static final long serialVersionUID = 1L; 10 | 11 | //算法ID 12 | private int id; 13 | //算法参数名称及值 14 | private Map parameters; 15 | 16 | public int getId() { 17 | return id; 18 | } 19 | public void setId(int id) { 20 | this.id = id; 21 | } 22 | public Map getParameters() { 23 | return parameters; 24 | } 25 | public void setParameters(Map parameters) { 26 | this.parameters = parameters; 27 | } 28 | 29 | } 30 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/task/SparkTaskExecutor.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformclient.task; 2 | 3 | import java.io.File; 4 | import java.io.IOException; 5 | import java.io.InputStreamReader; 6 | import java.io.LineNumberReader; 7 | import java.util.ArrayList; 8 | import java.util.List; 9 | 10 | import javax.annotation.Resource; 11 | 12 | import org.springframework.beans.factory.annotation.Autowired; 13 | import org.springframework.stereotype.Component; 14 | 15 | import com.hhu.machinelearningplatformclient.common.RedisUtils; 16 | import com.hhu.machinelearningplatformclient.config.SystemConfig; 17 | 18 | /** 19 | * spark task executor 20 | * 21 | * @author hayes, @create 2017-12-19 14:40 22 | **/ 23 | @Component 24 | public class SparkTaskExecutor { 25 | 26 | @Resource 27 | private RedisUtils redisUtils; 28 | @Autowired 29 | private SystemConfig systemConfig; 30 | 31 | 32 | /** 33 | * 执行Spark任务 TODD: Process的管理 34 | * 35 | * @param taskInfo 36 | * @return 37 | * @throws IOException 38 | */ 39 | public void execute(TaskInfo taskInfo) throws IOException { 40 | ProcessBuilder pb = new ProcessBuilder(); 41 | pb.directory(new File(systemConfig.getSparkK8sDir())); 42 | pb.command(buildCommand(taskInfo)); 43 | Process p = pb.start(); 44 | LineNumberReader reader = new LineNumberReader(new InputStreamReader(p.getInputStream())); 45 | String line = null; 46 | if ((line = reader.readLine()) != null) { 47 | System.out.println(line); 48 | } 49 | redisUtils.put(taskInfo.getTaskId(), TaskState.RUNNING.getValue()); 50 | } 51 | 52 | 53 | public List buildCommand(TaskInfo taskInfo) { 54 | List command = new ArrayList<>(); 55 | command.add("./spark-submit"); 56 | command.add("--deploy-mode"); 57 | command.add("cluster"); 58 | command.add("--master"); 59 | command.add(systemConfig.getK8sMaster()); 60 | command.add("--class"); 61 | command.add("edu.hhu.stonk.spark.Submiter"); 62 | command.add("--kubernetes-namespace"); 63 | command.add(systemConfig.getK8sSparkNamespace()); 64 | command.add("--conf"); 65 | command.add("spark.executor.instances=" + taskInfo.getSparkExecutorNum()); 66 | command.add("--conf"); 67 | command.add("spark.app.name=" + taskInfo.getName()); 68 | command.add("--conf"); 69 | command.add("spark.kubernetes.driver.docker.image=" + systemConfig.getK8sSparkDriverDockerImage()); 70 | command.add("--conf"); 71 | command.add("spark.kubernetes.executor.docker.image=" + systemConfig.getK8sSparkExecutorDockerImage()); 72 | command.add("--conf"); 73 | command.add("spark.kubernetes.authenticate.driver.serviceAccountName=" + systemConfig.getK8sSparkServiceAccountName()); 74 | command.add(systemConfig.getTaskJarPath()); 75 | command.add(systemConfig.getHdfsMaster()); 76 | command.add(String.valueOf(taskInfo.getTaskId())); 77 | command.add(String.valueOf(taskInfo.getUserId())); 78 | 79 | return command; 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/task/TaskExecution.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformclient.task; 2 | 3 | import java.io.IOException; 4 | import java.io.InputStream; 5 | import java.util.List; 6 | import java.util.Map; 7 | import java.util.Random; 8 | 9 | import javax.annotation.Resource; 10 | 11 | import org.codehaus.jackson.map.ObjectMapper; 12 | import org.springframework.stereotype.Component; 13 | import org.springframework.web.multipart.MultipartFile; 14 | 15 | import com.hhu.machinelearningplatformclient.common.HBaseUtil; 16 | import com.hhu.machinelearningplatformclient.common.HDFSUtils; 17 | import com.hhu.machinelearningplatformclient.common.RedisUtils; 18 | import com.hhu.machinelearningplatformclient.data.DataFile; 19 | import com.hhu.machinelearningplatformclient.data.DataFileType; 20 | import com.hhu.machinelearningplatformclient.data.FieldInfo; 21 | import com.hhu.machinelearningplatformclient.entity.TaskInit; 22 | 23 | @Component 24 | public class TaskExecution { 25 | 26 | @Resource 27 | private RedisUtils redisUtils; 28 | @Resource 29 | private HDFSUtils hdfsUtils; 30 | @Resource 31 | private SparkTaskExecutor sparkTaskExecutor; 32 | 33 | //任务初始化 34 | public void taskInit(TaskInit taskInit) { 35 | TaskInfo taskInfo=new TaskInfo(); 36 | long taskId=new Random().nextLong(); 37 | redisUtils.put(taskId, TaskState.INITING.getValue()); 38 | int taskType=taskInit.getTaskType(); 39 | //上传数据集文件到HDFS 40 | Map parameterValue=taskInit.getParameterValue(); 41 | List fieldInfo=taskInit.getFieldInfo(); 42 | int sparkExecutorNum=taskInit.getSparkExecutorNum(); 43 | String dataFileType=taskInit.getDataFileType(); 44 | String delim=taskInit.getDelim(); 45 | MultipartFile multipartFile=taskInit.getMultipartFile(); 46 | String fileName=multipartFile.getName(); 47 | //创建任务工作目录 48 | String taskDir="task-"+taskId+"/"; 49 | String taskInputDir=taskDir+"input/"; 50 | String taskOutputDir=taskDir+"output/"; 51 | String taskModelDir=taskDir+"model/"; 52 | try { 53 | hdfsUtils.createDirectory(taskDir); 54 | hdfsUtils.createDirectory(taskInputDir); 55 | hdfsUtils.createDirectory(taskOutputDir); 56 | hdfsUtils.createDirectory(taskModelDir); 57 | InputStream inputStream=multipartFile.getInputStream(); 58 | hdfsUtils.uploadFileStream(true, inputStream, taskInputDir); 59 | } catch (Exception e) { 60 | // TODO Auto-generated catch block 61 | redisUtils.put(taskId, TaskState.FAIL.getValue()); 62 | } 63 | DataFile dataFile=new DataFile(); 64 | dataFile.setName(fileName); 65 | dataFile.setUserId(1); 66 | dataFile.setPath(taskInputDir+fileName); 67 | dataFile.setDataFileType(DataFileType.getDataFileTypeByValue(dataFileType)); 68 | dataFile.setDelim(delim); 69 | dataFile.setFieldInfos(fieldInfo); 70 | if(taskType==TaskType.ESTIMATOR_TYPE.getValue()) { 71 | taskInfo.setTaskType(TaskType.ESTIMATOR_TYPE); 72 | int algorithmId=taskInit.getAlgorithmId(); 73 | SparkTaskAlgorithm sparkTaskAlgorithm=new SparkTaskAlgorithm(); 74 | sparkTaskAlgorithm.setId(algorithmId); 75 | sparkTaskAlgorithm.setParameters(parameterValue); 76 | taskInfo.setSparkTaskAlgorithm(sparkTaskAlgorithm); 77 | } 78 | if(taskType==TaskType.TRANSFORMER_TYPE.getValue()) { 79 | taskInfo.setTaskType(TaskType.TRANSFORMER_TYPE); 80 | int modelId=taskInit.getModelId(); 81 | taskInfo.setModelId(modelId); 82 | } 83 | //生成任务类 84 | taskInfo.setTaskId(taskId); 85 | taskInfo.setName("任务"+taskId); 86 | taskInfo.setUserId(1); 87 | taskInfo.setDataFile(dataFile); 88 | taskInfo.setWorkDir(taskDir); 89 | taskInfo.setSparkExecutorNum(sparkExecutorNum); 90 | taskInfo.setTimeStamp(System.currentTimeMillis()); 91 | try { 92 | //写入任务信息到HBase 93 | HBaseUtil hBaseUtil=HBaseUtil.getInstance(); 94 | hBaseUtil.connection(); 95 | ObjectMapper objectMapper=new ObjectMapper(); 96 | hBaseUtil.putData("task", String.valueOf(taskId), "info", "taskInfo", objectMapper.writeValueAsString(taskInfo)); 97 | hBaseUtil.close(); 98 | } catch (Exception e) { 99 | // TODO Auto-generated catch block 100 | redisUtils.put(taskId, TaskState.FAIL.getValue()); 101 | } 102 | taskSubmit(taskInfo); 103 | } 104 | 105 | //任务提交 106 | private void taskSubmit(TaskInfo taskInfo) { 107 | redisUtils.put(taskInfo.getTaskId(), TaskState.SUBMITTING.getValue()); 108 | try { 109 | sparkTaskExecutor.execute(taskInfo); 110 | } catch (IOException e) { 111 | // TODO Auto-generated catch block 112 | redisUtils.put(taskInfo.getTaskId(), TaskState.FAIL.getValue()); 113 | } 114 | } 115 | 116 | } 117 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/task/TaskInfo.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformclient.task; 2 | 3 | import java.io.Serializable; 4 | 5 | import com.hhu.machinelearningplatformclient.data.DataFile; 6 | 7 | 8 | 9 | /** 10 | * Stonk task info 11 | * 12 | * @author hayes, @create 2017-12-19 13:40 13 | **/ 14 | public class TaskInfo implements Serializable { 15 | 16 | private static final long serialVersionUID = -7755459521939958459L; 17 | 18 | private long taskId; 19 | 20 | private TaskType taskType; 21 | 22 | private String name; 23 | 24 | private long userId; 25 | 26 | private DataFile dataFile; 27 | 28 | private String workDir; 29 | 30 | private int sparkExecutorNum = 1; 31 | 32 | private long timeStamp; 33 | 34 | private SparkTaskAlgorithm sparkTaskAlgorithm; 35 | 36 | private int modelId; 37 | 38 | public long getTaskId() { 39 | return taskId; 40 | } 41 | 42 | public void setTaskId(long taskId) { 43 | this.taskId = taskId; 44 | } 45 | 46 | public TaskType getTaskType() { 47 | return taskType; 48 | } 49 | 50 | public void setTaskType(TaskType taskType) { 51 | this.taskType = taskType; 52 | } 53 | 54 | public long getUserId() { 55 | return userId; 56 | } 57 | 58 | public void setUserId(long userId) { 59 | this.userId = userId; 60 | } 61 | 62 | public DataFile getDataFile() { 63 | return dataFile; 64 | } 65 | 66 | public void setDataFile(DataFile dataFile) { 67 | this.dataFile = dataFile; 68 | } 69 | 70 | public String getWorkDir() { 71 | return workDir; 72 | } 73 | 74 | public void setWorkDir(String workDir) { 75 | this.workDir = workDir; 76 | } 77 | 78 | public int getSparkExecutorNum() { 79 | return sparkExecutorNum; 80 | } 81 | 82 | public void setSparkExecutorNum(int sparkExecutorNum) { 83 | this.sparkExecutorNum = sparkExecutorNum; 84 | } 85 | 86 | public SparkTaskAlgorithm getSparkTaskAlgorithm() { 87 | return sparkTaskAlgorithm; 88 | } 89 | 90 | public void setSparkTaskAlgorithm(SparkTaskAlgorithm sparkTaskAlgorithm) { 91 | this.sparkTaskAlgorithm = sparkTaskAlgorithm; 92 | } 93 | 94 | public String getName() { 95 | return name; 96 | } 97 | 98 | public void setName(String name) { 99 | this.name = name; 100 | } 101 | 102 | public long getTimeStamp() { 103 | return timeStamp; 104 | } 105 | 106 | public void setTimeStamp(long timeStamp) { 107 | this.timeStamp = timeStamp; 108 | } 109 | 110 | public int getModelId() { 111 | return modelId; 112 | } 113 | 114 | public void setModelId(int modelId) { 115 | this.modelId = modelId; 116 | } 117 | 118 | } 119 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/task/TaskManager.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformclient.task; 2 | 3 | import org.springframework.beans.factory.annotation.Autowired; 4 | import org.springframework.stereotype.Component; 5 | 6 | import java.io.IOException; 7 | 8 | /** 9 | * task manager 10 | * 11 | * @author hayes, @create 2017-12-19 13:25 12 | **/ 13 | @Component 14 | public class TaskManager { 15 | 16 | @Autowired 17 | SparkTaskExecutor sparkTaskExecutor; 18 | 19 | public void execute(TaskInfo taskInfo) throws IOException { 20 | sparkTaskExecutor.execute(taskInfo); 21 | } 22 | 23 | } 24 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/task/TaskState.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformclient.task; 2 | 3 | //任务状态 4 | public enum TaskState { 5 | 6 | INITING(1), 7 | SUBMITTING(2), 8 | RUNNING(3), 9 | SUCCESS(4), 10 | FAIL(5); 11 | 12 | private int value; 13 | 14 | public int getValue() { 15 | return value; 16 | } 17 | 18 | public void setValue(int value) { 19 | this.value = value; 20 | } 21 | 22 | private TaskState(int value) { 23 | 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/java/com/hhu/machinelearningplatformclient/task/TaskType.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformclient.task; 2 | 3 | public enum TaskType { 4 | 5 | ESTIMATOR_TYPE(1), 6 | TRANSFORMER_TYPE(2); 7 | 8 | private int value; 9 | 10 | public int getValue() { 11 | return value; 12 | } 13 | 14 | public void setValue(int value) { 15 | this.value = value; 16 | } 17 | 18 | private TaskType(int value) { 19 | 20 | } 21 | 22 | } 23 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/resources/hdfs.properties: -------------------------------------------------------------------------------- 1 | hdfs.hostname=10.196.83.90 2 | hdfs.port=9000 3 | hdfs.user=hhu_vps 4 | hdfs.uri=/machinelearningplatform/ 5 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/resources/kubernetes.properties: -------------------------------------------------------------------------------- 1 | spark-k8s.dir=/root/spark-k8s/spark-k8s/bin 2 | hdfs.master=hdfs://10.196.83.90:9000 3 | k8s.master=k8s://https://10.196.83.65:6443 4 | k8s.spark.namespace=spark-cluster 5 | k8s.spark.serviceAccountName=spark-admin 6 | k8s.spark.driver.docker.image=kubespark/spark-driver:v2.2.0-kubernetes-0.5.0 7 | k8s.spark.executor.docker.image=kubespark/spark-executor-kubernetes-0.5.0 8 | task.jar.path=local:///opt/spark/stonk-spark.jar 9 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/resources/log4js.properties: -------------------------------------------------------------------------------- 1 | ### set log levels ### 2 | ##log4j.rootLogger = debug , stdout 3 | ### \u8F93\u51FA\u5230\u63A7\u5236\u53F0 ### 4 | ##log4j.appender.stdout = org.apache.log4j.ConsoleAppender 5 | ##log4j.appender.stdout.Target = System.out 6 | ##log4j.appender.stdout.layout = org.apache.log4j.PatternLayout 7 | ##log4j.appender.stdout.layout.ConversionPattern = %-d{yyyy-MM-dd HH:mm:ss} [ %t:%r ] - [ %p ] %m%n 8 | ### \u8F93\u51FA\u5230\u65E5\u5FD7\u6587\u4EF6 ### 9 | ##log4j.appender.D = org.apache.log4j.DailyRollingFileAppender 10 | ##log4j.appender.D.File = logs/log.log 11 | ##log4j.appender.D.Append = true 12 | ##log4j.appender.D.Threshold = DEBUG 13 | ## \u8F93\u51FADEBUG\u7EA7\u522B\u4EE5\u4E0A\u7684\u65E5\u5FD7 14 | ##log4j.appender.D.layout = org.apache.log4j.PatternLayout 15 | ##log4j.appender.D.layout.ConversionPattern = %-d{yyyy-MM-dd HH:mm:ss} [ %t:%r ] - [ %p ] %m%n 16 | ### \u4FDD\u5B58\u5F02\u5E38\u4FE1\u606F\u5230\u5355\u72EC\u6587\u4EF6 ### 17 | ##log4j.appender.D = org.apache.log4j.DailyRollingFileAppender 18 | ##log4j.appender.D.File = logs/error.log 19 | ## \u5F02\u5E38\u65E5\u5FD7\u6587\u4EF6\u540D 20 | ##log4j.appender.D.Append = true 21 | ##log4j.appender.D.Threshold = ERROR 22 | ## \u53EA\u8F93\u51FAERROR\u7EA7\u522B\u4EE5\u4E0A\u7684\u65E5\u5FD7!!! 23 | ##log4j.appender.D.layout = org.apache.log4j.PatternLayout 24 | ##log4j.appender.D.layout.ConversionPattern = %-d{yyyy-MM-dd HH:mm:ss} [ %t:%r ] - [ %p ] %m%n 25 | 26 | 27 | 28 | 29 | #log4j.rootLogger = [ level ] , appenderName, appenderName, ... 30 | log4j.rootLogger = INFO, console, R 31 | #level=INFO,all can be output 32 | 33 | #console is set to be a ConsoleAppender 34 | log4j.appender.console = org.apache.log4j.ConsoleAppender 35 | #console have four patterns 36 | #org.apache.log4j.HTMLLayout 37 | #org.apache.log4j.PatternLayout 38 | #org.apache.log4j.SimpleLayout 39 | #org.apache.log4j.TTCCLayout 40 | log4j.appender.console.layout = org.apache.log4j.PatternLayout 41 | #define the output type 42 | log4j.appender.console.layout.ConversionPattern = %-d{yyyy-MM-dd HH:mm:ss} [%c]-[%p] %m%n 43 | 44 | #file is set to output to a extra file 45 | log4j.appender.R = org.apache.log4j.RollingFileAppender 46 | #the absolute route of the log4j file 47 | ##log4j.appender.R.File = ${webapp.root}/WEB-INF/logs/ACCloud.log 48 | log4j.appender.R.File = ${webApp.root}/src/main/webapp/WEB-INF/logs/ModelCalculationServer.log 49 | #the size 50 | log4j.appender.R.MaxFileSize = 500KB 51 | #back up a file 52 | log4j.appender.R.MaxBackupIndex = 1 53 | log4j.appender.R.layout = org.apache.log4j.PatternLayout 54 | log4j.appender.R.layout.ConversionPattern=%-d{yyyy-MM-dd HH:mm:ss} [%c]-[%p] - %m%n -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/resources/redis.properties: -------------------------------------------------------------------------------- 1 | redis.hostName=10.196.83.92 2 | redis.port=6379 3 | redis.password=941012 4 | redis.maxIdle=300 5 | redis.maxTotal=600 6 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/src/main/resources/springmvc.xml: -------------------------------------------------------------------------------- 1 | 2 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | classpath:hdfs.properties 23 | classpath:redis.properties 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 85 | 86 | 87 | 88 | 0 89 | UTF-8 90 | 0.########## 91 | yyyy-MM-dd HH:mm:ss 92 | true 93 | ignore 94 | 95 | 96 | 97 | 98 | 99 | 101 | 102 | 103 | 104 | org.springframework.web.servlet.view.freemarker.FreeMarkerView 105 | 106 | 107 | true 108 | 109 | 110 | .html 111 | 112 | 113 | text/html; charset=UTF-8 114 | 115 | 116 | 117 | 118 | 123 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/META-INF/MANIFEST.MF: -------------------------------------------------------------------------------- 1 | Manifest-Version: 1.0 2 | Built-By: wyc 3 | Build-Jdk: 1.8.0_131 4 | Created-By: Maven Integration for Eclipse 5 | 6 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/META-INF/maven/MachineLearningPlatform/MachineLearningPlatformClient/pom.properties: -------------------------------------------------------------------------------- 1 | #Generated by Maven Integration for Eclipse 2 | #Tue Mar 13 15:44:51 CST 2018 3 | version=0.0.1-SNAPSHOT 4 | groupId=MachineLearningPlatform 5 | m2e.projectName=MachineLearningPlatformClient 6 | m2e.projectLocation=E\:\\\u5DE5\u4F5C\u7A7A\u95F4\\MachineLearningPlatformClient 7 | artifactId=MachineLearningPlatformClient 8 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/META-INF/maven/MachineLearningPlatform/MachineLearningPlatformClient/pom.xml: -------------------------------------------------------------------------------- 1 | 3 | 4.0.0 4 | 5 | MachineLearningPlatform 6 | MachineLearningPlatformClient 7 | 0.0.1-SNAPSHOT 8 | jar 9 | 10 | MachineLearningPlatformClient 11 | http://maven.apache.org 12 | 13 | 14 | UTF-8 15 | 16 | 17 | 18 | 19 | junit 20 | junit 21 | 3.8.1 22 | test 23 | 24 | 25 | javax.servlet 26 | javax.servlet-api 27 | 3.1.0 28 | provided 29 | 30 | 31 | org.apache.hadoop 32 | hadoop-common 33 | 2.7.3 34 | 35 | 36 | org.apache.hadoop 37 | hadoop-hdfs 38 | 2.7.3 39 | 40 | 41 | org.apache.hadoop 42 | hadoop-client 43 | 2.7.3 44 | 45 | 46 | javax.servlet 47 | javax.servlet-api 48 | 3.1.0 49 | provided 50 | 51 | 52 | mysql 53 | mysql-connector-java 54 | 6.0.5 55 | 56 | 57 | com.mchange 58 | c3p0 59 | 0.9.5.2 60 | 61 | 62 | org.springframework 63 | spring-core 64 | 4.1.6.RELEASE 65 | 66 | 67 | org.springframework 68 | spring-aop 69 | 4.1.6.RELEASE 70 | 71 | 72 | org.springframework 73 | spring-beans 74 | 4.1.6.RELEASE 75 | 76 | 77 | org.springframework 78 | spring-context 79 | 4.1.6.RELEASE 80 | 81 | 82 | org.springframework 83 | spring-context-support 84 | 4.1.6.RELEASE 85 | 86 | 87 | com.thoughtworks.xstream 88 | xstream 89 | 1.4.8 90 | 91 | 92 | commons-collections 93 | commons-collections 94 | 3.2.1 95 | 96 | 97 | commons-lang 98 | commons-lang 99 | 2.6 100 | 101 | 102 | commons-logging 103 | commons-logging 104 | 1.1.3 105 | 106 | 107 | net.sf.ezmorph 108 | ezmorph 109 | 1.0.6 110 | 111 | 112 | org.springframework 113 | spring-webmvc 114 | 4.1.6.RELEASE 115 | 116 | 117 | org.springframework 118 | spring-orm 119 | 4.1.6.RELEASE 120 | 121 | 122 | org.springframework 123 | spring-tx 124 | 4.1.6.RELEASE 125 | 126 | 127 | org.slf4j 128 | slf4j-api 129 | 1.7.7 130 | 131 | 132 | org.slf4j 133 | slf4j-log4j12 134 | 1.7.7 135 | 136 | 137 | org.codehaus.jackson 138 | jackson-mapper-asl 139 | 1.9.2 140 | 141 | 142 | org.codehaus.jackson 143 | jackson-core-asl 144 | 1.9.2 145 | 146 | 147 | com.fasterxml.jackson.core 148 | jackson-annotations 149 | 2.4.1 150 | 151 | 152 | com.fasterxml.jackson.core 153 | jackson-core 154 | 2.4.1 155 | 156 | 157 | com.fasterxml.jackson.core 158 | jackson-databind 159 | 2.4.1 160 | 161 | 162 | org.freemarker 163 | freemarker 164 | 2.3.20 165 | 166 | 167 | org.apache.httpcomponents 168 | httpclient 169 | 4.5.3 170 | 171 | 172 | org.mybatis 173 | mybatis 174 | 3.2.5 175 | 176 | 177 | org.mybatis 178 | mybatis-spring 179 | 1.2.2 180 | 181 | 182 | dom4j 183 | dom4j 184 | 1.6.1 185 | 186 | 187 | commons-beanutils 188 | commons-beanutils 189 | 1.9.2 190 | 191 | 192 | commons-fileupload 193 | commons-fileupload 194 | 1.3.2 195 | 196 | 197 | jdk.tools 198 | jdk.tools 199 | 1.8 200 | system 201 | ${JAVA_HOME}/lib/tools.jar 202 | 203 | 204 | org.springframework.data 205 | spring-data-redis 206 | 1.6.2.RELEASE 207 | 208 | 209 | redis.clients 210 | jedis 211 | 2.9.0 212 | 213 | 214 | org.apache.hbase 215 | hbase-client 216 | 1.2.5 217 | 218 | 219 | org.apache.hbase 220 | hbase-common 221 | 1.2.5 222 | 223 | 224 | org.apache.hbase 225 | hbase-protocol 226 | 1.2.5 227 | 228 | 229 | org.apache.hbase 230 | hbase-server 231 | 1.2.5 232 | 233 | 234 | 235 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/algorithm/ParameterValueType.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/algorithm/ParameterValueType.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/common/ByteObjectUtil.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/common/ByteObjectUtil.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/common/HBaseUtil.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/common/HBaseUtil.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/common/HDFSUtils.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/common/HDFSUtils.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/common/RedisUtils$1.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/common/RedisUtils$1.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/common/RedisUtils$2.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/common/RedisUtils$2.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/common/RedisUtils$3.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/common/RedisUtils$3.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/common/RedisUtils.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/common/RedisUtils.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/common/ResourcePath.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/common/ResourcePath.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/config/SystemConfig.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/config/SystemConfig.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/controller/TaskController.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/controller/TaskController.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/data/DataFile.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/data/DataFile.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/data/DataFileType.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/data/DataFileType.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/data/FieldInfo.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/data/FieldInfo.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/entity/MLAlgorithm.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/entity/MLAlgorithm.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/entity/Parameter.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/entity/Parameter.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/entity/Response.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/entity/Response.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/entity/ResponseCode.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/entity/ResponseCode.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/entity/TaskInit.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/entity/TaskInit.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/exception/ConfigInitException.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/exception/ConfigInitException.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/model/AlgorithmModel.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/model/AlgorithmModel.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/task/SparkTaskAlgorithm.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/task/SparkTaskAlgorithm.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/task/SparkTaskExecutor.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/task/SparkTaskExecutor.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/task/TaskExecution.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/task/TaskExecution.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/task/TaskInfo.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/task/TaskInfo.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/task/TaskManager.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/task/TaskManager.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/task/TaskState.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/task/TaskState.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/task/TaskType.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformClient/target/classes/com/hhu/machinelearningplatformclient/task/TaskType.class -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/hdfs.properties: -------------------------------------------------------------------------------- 1 | hdfs.hostname=10.196.83.90 2 | hdfs.port=9000 3 | hdfs.user=hhu_vps 4 | hdfs.uri=/machinelearningplatform/ 5 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/kubernetes.properties: -------------------------------------------------------------------------------- 1 | spark-k8s.dir=/root/spark-k8s/spark-k8s/bin 2 | hdfs.master=hdfs://10.196.83.90:9000 3 | k8s.master=k8s://https://10.196.83.65:6443 4 | k8s.spark.namespace=spark-cluster 5 | k8s.spark.serviceAccountName=spark-admin 6 | k8s.spark.driver.docker.image=kubespark/spark-driver:v2.2.0-kubernetes-0.5.0 7 | k8s.spark.executor.docker.image=kubespark/spark-executor-kubernetes-0.5.0 8 | task.jar.path=local:///opt/spark/stonk-spark.jar 9 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/log4js.properties: -------------------------------------------------------------------------------- 1 | ### set log levels ### 2 | ##log4j.rootLogger = debug , stdout 3 | ### \u8F93\u51FA\u5230\u63A7\u5236\u53F0 ### 4 | ##log4j.appender.stdout = org.apache.log4j.ConsoleAppender 5 | ##log4j.appender.stdout.Target = System.out 6 | ##log4j.appender.stdout.layout = org.apache.log4j.PatternLayout 7 | ##log4j.appender.stdout.layout.ConversionPattern = %-d{yyyy-MM-dd HH:mm:ss} [ %t:%r ] - [ %p ] %m%n 8 | ### \u8F93\u51FA\u5230\u65E5\u5FD7\u6587\u4EF6 ### 9 | ##log4j.appender.D = org.apache.log4j.DailyRollingFileAppender 10 | ##log4j.appender.D.File = logs/log.log 11 | ##log4j.appender.D.Append = true 12 | ##log4j.appender.D.Threshold = DEBUG 13 | ## \u8F93\u51FADEBUG\u7EA7\u522B\u4EE5\u4E0A\u7684\u65E5\u5FD7 14 | ##log4j.appender.D.layout = org.apache.log4j.PatternLayout 15 | ##log4j.appender.D.layout.ConversionPattern = %-d{yyyy-MM-dd HH:mm:ss} [ %t:%r ] - [ %p ] %m%n 16 | ### \u4FDD\u5B58\u5F02\u5E38\u4FE1\u606F\u5230\u5355\u72EC\u6587\u4EF6 ### 17 | ##log4j.appender.D = org.apache.log4j.DailyRollingFileAppender 18 | ##log4j.appender.D.File = logs/error.log 19 | ## \u5F02\u5E38\u65E5\u5FD7\u6587\u4EF6\u540D 20 | ##log4j.appender.D.Append = true 21 | ##log4j.appender.D.Threshold = ERROR 22 | ## \u53EA\u8F93\u51FAERROR\u7EA7\u522B\u4EE5\u4E0A\u7684\u65E5\u5FD7!!! 23 | ##log4j.appender.D.layout = org.apache.log4j.PatternLayout 24 | ##log4j.appender.D.layout.ConversionPattern = %-d{yyyy-MM-dd HH:mm:ss} [ %t:%r ] - [ %p ] %m%n 25 | 26 | 27 | 28 | 29 | #log4j.rootLogger = [ level ] , appenderName, appenderName, ... 30 | log4j.rootLogger = INFO, console, R 31 | #level=INFO,all can be output 32 | 33 | #console is set to be a ConsoleAppender 34 | log4j.appender.console = org.apache.log4j.ConsoleAppender 35 | #console have four patterns 36 | #org.apache.log4j.HTMLLayout 37 | #org.apache.log4j.PatternLayout 38 | #org.apache.log4j.SimpleLayout 39 | #org.apache.log4j.TTCCLayout 40 | log4j.appender.console.layout = org.apache.log4j.PatternLayout 41 | #define the output type 42 | log4j.appender.console.layout.ConversionPattern = %-d{yyyy-MM-dd HH:mm:ss} [%c]-[%p] %m%n 43 | 44 | #file is set to output to a extra file 45 | log4j.appender.R = org.apache.log4j.RollingFileAppender 46 | #the absolute route of the log4j file 47 | ##log4j.appender.R.File = ${webapp.root}/WEB-INF/logs/ACCloud.log 48 | log4j.appender.R.File = ${webApp.root}/src/main/webapp/WEB-INF/logs/ModelCalculationServer.log 49 | #the size 50 | log4j.appender.R.MaxFileSize = 500KB 51 | #back up a file 52 | log4j.appender.R.MaxBackupIndex = 1 53 | log4j.appender.R.layout = org.apache.log4j.PatternLayout 54 | log4j.appender.R.layout.ConversionPattern=%-d{yyyy-MM-dd HH:mm:ss} [%c]-[%p] - %m%n -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/redis.properties: -------------------------------------------------------------------------------- 1 | redis.hostName=10.196.83.92 2 | redis.port=6379 3 | redis.password=941012 4 | redis.maxIdle=300 5 | redis.maxTotal=600 6 | -------------------------------------------------------------------------------- /MachineLearningPlatformClient/target/classes/springmvc.xml: -------------------------------------------------------------------------------- 1 | 2 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | classpath:hdfs.properties 23 | classpath:redis.properties 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 85 | 86 | 87 | 88 | 0 89 | UTF-8 90 | 0.########## 91 | yyyy-MM-dd HH:mm:ss 92 | true 93 | ignore 94 | 95 | 96 | 97 | 98 | 99 | 101 | 102 | 103 | 104 | org.springframework.web.servlet.view.freemarker.FreeMarkerView 105 | 106 | 107 | true 108 | 109 | 110 | .html 111 | 112 | 113 | text/html; charset=UTF-8 114 | 115 | 116 | 117 | 118 | 123 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/pom.xml: -------------------------------------------------------------------------------- 1 | 3 | 4.0.0 4 | 5 | MachineLearningPlatform 6 | MachineLearningPlatformServer 7 | 0.0.1-SNAPSHOT 8 | jar 9 | 10 | MachineLearningPlatformServer 11 | http://maven.apache.org 12 | 13 | 14 | UTF-8 15 | 16 | 17 | 18 | 19 | org.apache.spark 20 | spark-core_2.11 21 | 2.2.0 22 | 23 | 24 | org.apache.spark 25 | spark-mllib_2.11 26 | 2.2.0 27 | 28 | 29 | org.apache.spark 30 | spark-streaming_2.11 31 | 2.2.0 32 | 33 | 34 | org.apache.spark 35 | spark-sql_2.11 36 | 2.2.0 37 | 38 | 39 | org.scala-lang 40 | scala-library 41 | 2.11.8 42 | 43 | 44 | org.scala-lang.modules 45 | scala-xml_2.11 46 | 1.0.6 47 | 48 | 49 | com.fasterxml.jackson.core 50 | jackson-databind 51 | 2.6.4 52 | 53 | 54 | org.apache.hadoop 55 | hadoop-hdfs 56 | 2.7.3 57 | 58 | 59 | org.apache.hadoop 60 | hadoop-client 61 | 2.7.3 62 | 63 | 64 | org.apache.hbase 65 | hbase-client 66 | 1.2.5 67 | 68 | 69 | org.apache.hbase 70 | hbase-common 71 | 1.2.5 72 | 73 | 74 | org.apache.hbase 75 | hbase-protocol 76 | 1.2.5 77 | 78 | 79 | org.apache.hbase 80 | hbase-server 81 | 1.2.5 82 | 83 | 84 | org.springframework.data 85 | spring-data-redis 86 | 1.6.2.RELEASE 87 | 88 | 89 | redis.clients 90 | jedis 91 | 2.9.0 92 | 93 | 100 | 101 | 102 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/MLAlgorithmLoaderTest.java: -------------------------------------------------------------------------------- 1 | package edu.hhu.stonk.spark.test; 2 | 3 | import com.fasterxml.jackson.databind.ObjectMapper; 4 | import edu.hhu.stonk.spark.mllib.*; 5 | 6 | import org.apache.spark.SparkConf; 7 | import org.apache.spark.api.java.JavaSparkContext; 8 | import org.apache.spark.ml.feature.LabeledPoint; 9 | import org.apache.spark.ml.linalg.Vector; 10 | import org.apache.spark.ml.linalg.Vectors; 11 | import org.junit.Test; 12 | 13 | import java.io.IOException; 14 | import java.util.HashMap; 15 | import java.util.Map; 16 | 17 | /** 18 | * MLAlgorithmLoader测试类 19 | * 20 | * @author hayes, @create 2017-12-14 19:35 21 | **/ 22 | public class MLAlgorithmLoaderTest { 23 | 24 | public static void main(String[] args) { 25 | SparkConf conf=new SparkConf().setAppName(appName).setMaster(master); 26 | JavaSparkContext sc=new JavaSparkContext(conf); 27 | LabeledPoint point=new LabeledPoint(1.0, Vectors.dense(2.0, 3.0, 3.0)); 28 | Vector v=Vectors.dense({1,1,2}); 29 | 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/algorithm/ComponentType.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.algorithm; 2 | 3 | public enum ComponentType { 4 | 5 | TRANSFORMER("transformer"), 6 | ESTIMATOR("estimator"); 7 | 8 | private String value; 9 | 10 | public String getValue() { 11 | return value; 12 | } 13 | 14 | public void setValue(String value) { 15 | this.value = value; 16 | } 17 | 18 | ComponentType(String value) { 19 | this.value = value; 20 | } 21 | 22 | public static ComponentType getComponentTypeByValue(String valueStr) { 23 | for(ComponentType componentType : ComponentType.values()) { 24 | if(componentType.getValue().equals(valueStr)) { 25 | return componentType; 26 | } 27 | } 28 | return null; 29 | } 30 | 31 | } 32 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/algorithm/MLAlgorithmDesc.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.algorithm; 2 | 3 | import java.util.Map; 4 | 5 | /** 6 | * spark mllib 算法 7 | * 8 | * @author hayes, @create 2017-12-11 16:57 9 | **/ 10 | public class MLAlgorithmDesc { 11 | 12 | private int id; 13 | /** 14 | * 算法名字 15 | */ 16 | private String name; 17 | 18 | /** 19 | * 对外展示的名字 20 | */ 21 | private String showName; 22 | 23 | /** 24 | * 算法完整类名 25 | */ 26 | private String className; 27 | 28 | /** 29 | * 算法类型 30 | */ 31 | private ComponentType componentsType; 32 | 33 | /** 34 | * 用途分类 35 | */ 36 | private UsageType usageType; 37 | 38 | /** 39 | * 算法参数 40 | */ 41 | private Map parameterDescs; 42 | 43 | public int getId() { 44 | return id; 45 | } 46 | 47 | public void setId(int id) { 48 | this.id = id; 49 | } 50 | 51 | public String getName() { 52 | return name; 53 | } 54 | 55 | public void setName(String name) { 56 | this.name = name; 57 | } 58 | 59 | public String getShowName() { 60 | return showName; 61 | } 62 | 63 | public void setShowName(String showName) { 64 | this.showName = showName; 65 | } 66 | 67 | public String getClassName() { 68 | return className; 69 | } 70 | 71 | public void setClassName(String className) { 72 | this.className = className; 73 | } 74 | 75 | public ComponentType getComponentsType() { 76 | return componentsType; 77 | } 78 | 79 | public void setComponentsType(ComponentType componentsType) { 80 | this.componentsType = componentsType; 81 | } 82 | 83 | public UsageType getUsageType() { 84 | return usageType; 85 | } 86 | 87 | public void setUsageType(UsageType usageType) { 88 | this.usageType = usageType; 89 | } 90 | 91 | public Map getParameterDescs() { 92 | return parameterDescs; 93 | } 94 | 95 | public void setParameterDescs(Map parameterDescs) { 96 | this.parameterDescs = parameterDescs; 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/algorithm/MLAlgorithmLoader.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.algorithm; 2 | 3 | import com.fasterxml.jackson.databind.ObjectMapper; 4 | import com.hhu.machinelearningplatformserver.common.HBaseUtil; 5 | 6 | import java.io.IOException; 7 | import java.util.HashMap; 8 | import java.util.List; 9 | import java.util.Map; 10 | 11 | import org.apache.hadoop.hbase.client.Result; 12 | import org.apache.hadoop.hbase.util.Bytes; 13 | 14 | /** 15 | * 从HBase中读取算法描述 16 | * 17 | * @author hayes, @create 2017-12-12 14:43 18 | **/ 19 | public class MLAlgorithmLoader { 20 | 21 | private static Map mlAlgos = new HashMap<>(); 22 | 23 | private static IOException err; 24 | 25 | @SuppressWarnings("unchecked") 26 | public static void init() { 27 | HBaseUtil hBaseUtil=HBaseUtil.getInstance(); 28 | try { 29 | hBaseUtil.connection(); 30 | List results=hBaseUtil.getAllData("algorithm"); 31 | for(Result result : results) { 32 | int id=Bytes.toInt(result.getRow()); 33 | String name=Bytes.toString(result.getValue(Bytes.toBytes("info"), Bytes.toBytes("name"))); 34 | String showName=Bytes.toString(result.getValue(Bytes.toBytes("info"), Bytes.toBytes("showName"))); 35 | String className=Bytes.toString(result.getValue(Bytes.toBytes("info"), Bytes.toBytes("className"))); 36 | String componentsType=Bytes.toString(result.getValue(Bytes.toBytes("info"), Bytes.toBytes("componentsType"))); 37 | String usageType=Bytes.toString(result.getValue(Bytes.toBytes("info"), Bytes.toBytes("usageType"))); 38 | String parameterDescsMap=Bytes.toString(result.getValue(Bytes.toBytes("info"), Bytes.toBytes("parameterDescs"))); 39 | MLAlgorithmDesc mlAlgorithmDesc=new MLAlgorithmDesc(); 40 | mlAlgorithmDesc.setId(id); 41 | mlAlgorithmDesc.setName(name); 42 | mlAlgorithmDesc.setShowName(showName); 43 | mlAlgorithmDesc.setClassName(className); 44 | mlAlgorithmDesc.setComponentsType(ComponentType.getComponentTypeByValue(componentsType)); 45 | mlAlgorithmDesc.setUsageType(UsageType.getUsageTypeByValue(usageType)); 46 | ObjectMapper objectMapper=new ObjectMapper(); 47 | Map parameterDescs=objectMapper.readValue(parameterDescsMap, Map.class); 48 | mlAlgorithmDesc.setParameterDescs(parameterDescs); 49 | mlAlgos.put(id, mlAlgorithmDesc); 50 | } 51 | hBaseUtil.close(); 52 | } catch (IOException e) { 53 | // TODO Auto-generated catch block 54 | e.printStackTrace(); 55 | } 56 | 57 | } 58 | 59 | public static Map getAll() throws IOException { 60 | if (err == null) { 61 | return mlAlgos; 62 | } 63 | 64 | throw err; 65 | } 66 | 67 | public static MLAlgorithmDesc getMLAlgorithmDesc(int id) throws IOException { 68 | if (err == null) { 69 | return mlAlgos.get(id); 70 | } 71 | 72 | throw err; 73 | } 74 | 75 | 76 | } 77 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/algorithm/ParameterDesc.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.algorithm; 2 | 3 | /** 4 | * 算法参数描述 5 | * 6 | * @author hayes, @create 2017-12-11 18:34 7 | **/ 8 | public class ParameterDesc { 9 | 10 | /** 11 | * 在算法中的属性名 12 | */ 13 | public String name; 14 | 15 | /** 16 | * 展示的名字(可中文) 17 | */ 18 | public String showName; 19 | 20 | /** 21 | * 参数类型(int、String、...) 22 | */ 23 | public ParameterValueType valueType; 24 | 25 | /** 26 | * 参数值(int、String、...) 27 | */ 28 | public String value; 29 | 30 | public Class javaTypeClass() { 31 | switch (valueType) { 32 | case BOOLEAN: { 33 | return boolean.class; 34 | } 35 | case STRING: { 36 | return String.class; 37 | } 38 | case INT: { 39 | return int.class; 40 | } 41 | case DOUBLE: { 42 | return double.class; 43 | } 44 | } 45 | return String.class; 46 | } 47 | 48 | public Object valueOf(String value) { 49 | switch (valueType) { 50 | case BOOLEAN: { 51 | return Boolean.valueOf(value); 52 | } 53 | case STRING: { 54 | return value; 55 | } 56 | case INT: { 57 | return Integer.valueOf(value); 58 | } 59 | case DOUBLE: { 60 | return Double.valueOf(value); 61 | } 62 | } 63 | return value; 64 | } 65 | 66 | public String getName() { 67 | return name; 68 | } 69 | 70 | public void setName(String name) { 71 | this.name = name; 72 | } 73 | 74 | public String getShowName() { 75 | return showName; 76 | } 77 | 78 | public void setShowName(String showName) { 79 | this.showName = showName; 80 | } 81 | 82 | public ParameterValueType getValueType() { 83 | return valueType; 84 | } 85 | 86 | public void setValueType(ParameterValueType valueType) { 87 | this.valueType = valueType; 88 | } 89 | 90 | public String getValue() { 91 | return value; 92 | } 93 | 94 | public void setValue(String value) { 95 | this.value = value; 96 | } 97 | 98 | } 99 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/algorithm/ParameterValueType.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.algorithm; 2 | 3 | /** 4 | * 参数值的类型 5 | * 6 | * @author hayes, @create 2017-12-11 19:43 7 | **/ 8 | public enum ParameterValueType { 9 | 10 | INT("int"), 11 | DOUBLE("double"), 12 | BOOLEAN("boolean"), 13 | STRING("string"); 14 | 15 | private String valueType; 16 | 17 | ParameterValueType(String valueType) { 18 | this.valueType = valueType; 19 | } 20 | 21 | 22 | } 23 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/algorithm/UsageType.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.algorithm; 2 | 3 | public enum UsageType { 4 | 5 | CLUSTERING("clustering"), 6 | REGRESSION("regression"), 7 | CLASSIFICATION("classification"), 8 | RECOMMENDATION("recommendation"); 9 | 10 | private String value; 11 | 12 | public String getValue() { 13 | return value; 14 | } 15 | 16 | public void setValue(String value) { 17 | this.value = value; 18 | } 19 | 20 | UsageType(String value) { 21 | this.value = value; 22 | } 23 | 24 | public static UsageType getUsageTypeByValue(String valueStr) { 25 | for(UsageType usageType : UsageType.values()) { 26 | if(usageType.getValue().equals(usageType.getValue())) { 27 | return usageType; 28 | } 29 | } 30 | return null; 31 | } 32 | 33 | } 34 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/common/ByteObjectUtil.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.common; 2 | 3 | import java.io.ByteArrayInputStream; 4 | import java.io.ByteArrayOutputStream; 5 | import java.io.IOException; 6 | import java.io.ObjectInputStream; 7 | import java.io.ObjectOutputStream; 8 | 9 | public class ByteObjectUtil { 10 | 11 | public static Object ByteToObject(byte[] bytes) { 12 | Object object=null; 13 | try { 14 | ByteArrayInputStream byteArrayInputStream=new ByteArrayInputStream(bytes); 15 | ObjectInputStream objectInputStream=new ObjectInputStream(byteArrayInputStream); 16 | object=objectInputStream.readObject(); 17 | objectInputStream.close(); 18 | byteArrayInputStream.close(); 19 | } catch (ClassNotFoundException e) { 20 | // TODO Auto-generated catch block 21 | e.printStackTrace(); 22 | } catch (IOException e) { 23 | // TODO Auto-generated catch block 24 | e.printStackTrace(); 25 | } 26 | return object; 27 | } 28 | 29 | public static byte[] ObjectToByte(Object object) { 30 | byte[] bytes=null; 31 | ByteArrayOutputStream byteArrayOutputStream=new ByteArrayOutputStream(); 32 | try { 33 | ObjectOutputStream objectOutputStream=new ObjectOutputStream(byteArrayOutputStream); 34 | objectOutputStream.writeObject(object); 35 | //objectOutputStream.flush(); 36 | bytes=byteArrayOutputStream.toByteArray(); 37 | objectOutputStream.close(); 38 | byteArrayOutputStream.close(); 39 | } catch (IOException e) { 40 | // TODO Auto-generated catch block 41 | e.printStackTrace(); 42 | } 43 | return bytes; 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/common/ConfigUtils.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.common; 2 | 3 | import java.io.BufferedInputStream; 4 | import java.io.FileInputStream; 5 | import java.io.IOException; 6 | import java.io.InputStream; 7 | import java.util.Iterator; 8 | import java.util.Map; 9 | import java.util.Properties; 10 | import java.util.concurrent.ConcurrentHashMap; 11 | 12 | import com.hhu.machinelearningplatformserver.exception.ConfigInitException; 13 | 14 | //读取并加载配置文件中的信息 15 | public class ConfigUtils { 16 | 17 | private static Map map=new ConcurrentHashMap(); 18 | 19 | public static void readConfig() throws Exception { 20 | Properties properties=new Properties(); 21 | try { 22 | //读取属性文件 23 | InputStream inputStream=new BufferedInputStream(new FileInputStream(ResourcePath.RESOURCE_DIR+"hdfs.properties")); 24 | //加载属性列表 25 | properties.load(inputStream); 26 | Iterator iterator=properties.stringPropertyNames().iterator(); 27 | if(!iterator.hasNext()) { 28 | System.out.println("未读取到配置信息!"); 29 | } 30 | while(iterator.hasNext()) { 31 | String key=iterator.next(); 32 | String value=properties.getProperty(key); 33 | map.put(key, value); 34 | } 35 | } catch (Exception e) { 36 | // TODO Auto-generated catch block 37 | System.out.println("读取配置信息失败!"); 38 | throw e; 39 | } 40 | } 41 | 42 | //按照名称获取配置信息 43 | public static String getValueByName(String name) { 44 | String value=map.get(name); 45 | return value; 46 | } 47 | 48 | //初始化配置信息 49 | public static void initConfig() throws ConfigInitException, IOException { 50 | String HDFS_IP=getValueByName("hdfs.hostname"); 51 | if(HDFS_IP==null) { 52 | throw new ConfigInitException("HDFS集群IP地址不能为空!"); 53 | } 54 | String HDFS_PORT=getValueByName("hdfs.port"); 55 | if(HDFS_PORT==null) { 56 | throw new ConfigInitException("HDFS集群端口号不能为空!"); 57 | } 58 | String HDFS_USER=getValueByName("hdfs.user"); 59 | if(HDFS_USER==null) { 60 | throw new ConfigInitException("HDFS集群用户名不能为空!"); 61 | } 62 | String HDFS_URI=getValueByName("hdfs.uri"); 63 | if(HDFS_URI==null) { 64 | throw new ConfigInitException("HDFS集群根目录不能为空!"); 65 | } 66 | try { 67 | HDFSUtils.init(); 68 | } catch (Exception e) { 69 | // TODO Auto-generated catch block 70 | throw new ConfigInitException("HDFS初始化失败!"); 71 | } 72 | 73 | } 74 | 75 | public static void addConfig(String key, String value) { 76 | map.put(key, value); 77 | } 78 | 79 | public static void deleteConfig(String key) { 80 | map.remove(key); 81 | } 82 | 83 | } 84 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/common/HBaseUtil.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.common; 2 | 3 | import java.io.IOException; 4 | import java.util.ArrayList; 5 | import java.util.List; 6 | 7 | import org.apache.hadoop.conf.Configuration; 8 | import org.apache.hadoop.hbase.Cell; 9 | import org.apache.hadoop.hbase.CellUtil; 10 | import org.apache.hadoop.hbase.HBaseConfiguration; 11 | import org.apache.hadoop.hbase.HColumnDescriptor; 12 | import org.apache.hadoop.hbase.HTableDescriptor; 13 | import org.apache.hadoop.hbase.TableName; 14 | import org.apache.hadoop.hbase.client.Connection; 15 | import org.apache.hadoop.hbase.client.ConnectionFactory; 16 | import org.apache.hadoop.hbase.client.Delete; 17 | import org.apache.hadoop.hbase.client.Get; 18 | import org.apache.hadoop.hbase.client.HBaseAdmin; 19 | import org.apache.hadoop.hbase.client.HTable; 20 | import org.apache.hadoop.hbase.client.Put; 21 | import org.apache.hadoop.hbase.client.Result; 22 | import org.apache.hadoop.hbase.client.ResultScanner; 23 | import org.apache.hadoop.hbase.client.Scan; 24 | import org.apache.hadoop.hbase.filter.PrefixFilter; 25 | import org.apache.hadoop.hbase.util.Bytes; 26 | 27 | public class HBaseUtil { 28 | 29 | private static HBaseUtil hBaseUtil=new HBaseUtil(); 30 | 31 | private Connection connection; 32 | private static final String ZOOKEEPER_QUORUM="10.196.83.90,10.196.83.91,10.196.83.92"; 33 | private static final String ZOOKEEPER_CLIENTPORT="2181"; 34 | private static final String HBASE_ROOTDIR="hdfs://10.196.83.90:9000/hbase"; 35 | private static final String RETRIES_NUMBER="3"; 36 | 37 | public static HBaseUtil getInstance() { 38 | return hBaseUtil; 39 | } 40 | 41 | //连接HBase 42 | public void connection() throws IOException { 43 | Configuration conf=HBaseConfiguration.create(); 44 | conf.set("hbase.zookeeper.quorum", ZOOKEEPER_QUORUM); 45 | conf.set("hbase.zookeeper.property.clientPort", ZOOKEEPER_CLIENTPORT);//端口号 46 | conf.set("hbase.rootdir", HBASE_ROOTDIR); 47 | conf.set("hbase.client.retries.number", RETRIES_NUMBER); 48 | Connection connection=ConnectionFactory.createConnection(conf); 49 | this.connection=connection; 50 | //this.table=(HTable) connection.getTable(TableName.valueOf(TABLE_NAME)); 51 | } 52 | 53 | //建表 54 | public void createTable(String tableName, String familyName) throws IOException { 55 | HBaseAdmin admin=null; 56 | try { 57 | admin=(HBaseAdmin) connection.getAdmin(); 58 | if(admin.tableExists(tableName)) { 59 | System.out.println("表已存在!"); 60 | return; 61 | } 62 | HTableDescriptor descriptor=new HTableDescriptor(TableName.valueOf(tableName)); 63 | HColumnDescriptor columnDescriptor=new HColumnDescriptor(Bytes.toBytes(familyName)); 64 | descriptor.addFamily(columnDescriptor); 65 | admin.createTable(descriptor); 66 | } catch (IOException e) { 67 | // TODO Auto-generated catch block 68 | e.printStackTrace(); 69 | } finally { 70 | admin.close(); 71 | } 72 | } 73 | 74 | /** 75 | * 插入数据 76 | * 77 | * @param tableName 78 | * @param rowKey 79 | * @param familyName 80 | * @param qualifierName 81 | * @param value 82 | * @throws Exception 83 | */ 84 | public void putData(String tableName, String rowKey, String familyName, String qualifierName, String value) 85 | throws Exception { 86 | HTable table=(HTable) connection.getTable(TableName.valueOf(tableName)); 87 | Put put = new Put(Bytes.toBytes(rowKey)); 88 | put.addColumn(Bytes.toBytes(familyName), Bytes.toBytes(qualifierName), Bytes.toBytes(value)); 89 | table.put(put); 90 | table.close(); 91 | } 92 | 93 | /** 94 | * 根据rowkey 查询 95 | * 96 | * @param tableName 97 | * @param rowKey 98 | * @return 99 | * @throws Exception 100 | */ 101 | public Result getResult(String tableName, String rowKey) throws Exception { 102 | HTable table=(HTable) connection.getTable(TableName.valueOf(tableName)); 103 | Get get = new Get(Bytes.toBytes(rowKey)); 104 | Result result = table.get(get); 105 | table.close(); 106 | return result; 107 | } 108 | 109 | /** 110 | * 查询指定的某列 111 | * 112 | * @param tableName 113 | * @param rowKey 114 | * @param familyName 115 | * @param qualifierName 116 | * @return 117 | * @throws Exception 118 | */ 119 | public String getValue(String tableName, String rowKey, String familyName, String qualifierName) throws Exception { 120 | HTable table=(HTable) connection.getTable(TableName.valueOf(tableName)); 121 | Get get = new Get(Bytes.toBytes(rowKey)); 122 | Result result = table.get(get); 123 | Cell cell = result.getColumnLatestCell(Bytes.toBytes(familyName), Bytes.toBytes(qualifierName)); 124 | if (cell == null) { 125 | return null; 126 | } 127 | table.close(); 128 | return Bytes.toString(CellUtil.cloneValue(cell)); 129 | } 130 | 131 | //遍历表数据 132 | public List getAllData(String tableName) throws IOException { 133 | HTable table=(HTable) connection.getTable(TableName.valueOf(tableName)); 134 | Scan scan=new Scan(); 135 | ResultScanner resultScanner=table.getScanner(scan); 136 | List lists=new ArrayList(); 137 | for(Result result : resultScanner) { 138 | lists.add(result); 139 | } 140 | table.close(); 141 | return lists; 142 | } 143 | 144 | /** 145 | * 根据Row的前缀获得value 146 | * 147 | * @param tableName 148 | * @param rowPrefix 149 | * @param familyName 150 | * @param qualifierName 151 | * @return 152 | * @throws Exception 153 | */ 154 | public List getValueByRowPrefix(String tableName, String rowPrefix, String familyName, String qualifierName) throws Exception { 155 | HTable table=(HTable) connection.getTable(TableName.valueOf(tableName)); 156 | List values = new ArrayList<>(); 157 | 158 | Scan scan = new Scan(); 159 | scan.setFilter(new PrefixFilter(Bytes.toBytes(rowPrefix))); 160 | table.getScanner(scan).forEach((result) -> { 161 | Cell cell = result.getColumnLatestCell(Bytes.toBytes(familyName), Bytes.toBytes(qualifierName)); 162 | if (cell != null) { 163 | values.add(Bytes.toString(CellUtil.cloneValue(cell))); 164 | } 165 | }); 166 | table.close(); 167 | return values; 168 | } 169 | 170 | 171 | /** 172 | * 删除指定某列 173 | * 174 | * @param tableName 175 | * @param rowKey 176 | * @param falilyName 177 | * @param qualifierName 178 | * @throws Exception 179 | */ 180 | public void deleteColumn(String tableName, String rowKey, String falilyName, String qualifierName) throws Exception { 181 | HTable table=(HTable) connection.getTable(TableName.valueOf(tableName)); 182 | Delete delete = new Delete(Bytes.toBytes(rowKey)); 183 | delete.addColumn(Bytes.toBytes(falilyName), Bytes.toBytes(qualifierName)); 184 | table.delete(delete); 185 | table.close(); 186 | } 187 | 188 | /** 189 | * 删除指定的某个rowkey 190 | * 191 | * @param tableName 192 | * @param rowKey 193 | * @throws Exception 194 | */ 195 | public void deleteColumn(String tableName, String rowKey) throws Exception { 196 | HTable table=(HTable) connection.getTable(TableName.valueOf(tableName)); 197 | Delete delete = new Delete(Bytes.toBytes(rowKey)); 198 | table.delete(delete); 199 | table.close(); 200 | } 201 | 202 | /** 203 | * 删除表 204 | * 205 | * @param tableName 206 | * @throws Exception 207 | */ 208 | public void dropTable(String tableName) throws Exception { 209 | HBaseAdmin admin=(HBaseAdmin) connection.getAdmin(); 210 | admin.disableTable(TableName.valueOf(tableName)); 211 | admin.deleteTable(TableName.valueOf(tableName)); 212 | admin.close(); 213 | } 214 | 215 | //关闭HBase连接 216 | public void close() throws IOException { 217 | //table.close(); 218 | connection.close(); 219 | } 220 | } 221 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/common/HDFSUtils.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.common; 2 | 3 | import java.io.FileNotFoundException; 4 | import java.io.IOException; 5 | import java.net.URI; 6 | 7 | import org.apache.commons.lang.StringUtils; 8 | import org.apache.hadoop.conf.Configuration; 9 | import org.apache.hadoop.fs.FileStatus; 10 | import org.apache.hadoop.fs.FileSystem; 11 | import org.apache.hadoop.fs.Path; 12 | import org.apache.hadoop.hdfs.DistributedFileSystem; 13 | import org.apache.hadoop.hdfs.protocol.DatanodeInfo; 14 | import org.slf4j.Logger; 15 | import org.slf4j.LoggerFactory; 16 | 17 | public class HDFSUtils { 18 | 19 | private static FileSystem fileSystem; 20 | private static String HDFS_URI; 21 | private static final Logger logger=LoggerFactory.getLogger(HDFSUtils.class); 22 | 23 | public static void init() throws Exception { 24 | //读取HDFS地址 25 | Configuration conf=new Configuration(); 26 | String hdfsIP=ConfigUtils.getValueByName("HDFS_IP"); 27 | String hdfsPort=ConfigUtils.getValueByName("HDFS_PORT"); 28 | String hdfsUser=ConfigUtils.getValueByName("HDFS_USER"); 29 | HDFS_URI="hdfs://"+hdfsIP+":"+hdfsPort+ConfigUtils.getValueByName("HDFS_URI"); 30 | URI uri; 31 | try { 32 | uri=new URI("hdfs://"+hdfsIP+":"+hdfsPort); 33 | fileSystem=FileSystem.get(uri, conf, hdfsUser); 34 | } catch (Exception e) { 35 | // TODO Auto-generated catch block 36 | throw e; 37 | } 38 | } 39 | 40 | public static FileSystem getFileSystem() throws Exception { 41 | if(fileSystem!=null) { 42 | return fileSystem; 43 | } 44 | else { 45 | init(); 46 | return fileSystem; 47 | } 48 | } 49 | 50 | //创建目录 51 | public static void createDirectory(String path) throws Exception { 52 | // TODO Auto-generated method stub 53 | String realPath=HDFS_URI+path; 54 | Path hdfsPath=new Path(realPath); 55 | try { 56 | if(fileSystem.exists(hdfsPath)) { 57 | logger.debug("目录已存在!"); 58 | } 59 | else { 60 | fileSystem.mkdirs(hdfsPath); 61 | } 62 | } catch (Exception e) { 63 | // TODO Auto-generated catch block 64 | throw e; 65 | } 66 | } 67 | 68 | //删除文件目录 69 | public static void delete(String path) throws Exception { 70 | String realPath=HDFS_URI+path; 71 | Path hdfsPath=new Path(realPath); 72 | try { 73 | if(!fileSystem.exists(hdfsPath)) { 74 | logger.debug("目录不存在!"); 75 | } 76 | else { 77 | fileSystem.delete(hdfsPath,true); 78 | } 79 | } catch (Exception e) { 80 | // TODO Auto-generated catch block 81 | throw e; 82 | } 83 | } 84 | 85 | //获取目录下的所有文件 86 | public static FileStatus[] list(String path) throws FileNotFoundException, IOException { 87 | String realPath=HDFS_URI+path; 88 | Path hdfsPath=new Path(realPath); 89 | FileStatus[] lists=null; 90 | if(!fileSystem.exists(hdfsPath)) { 91 | logger.info("目录不存在!"); 92 | } 93 | else if(fileSystem.isFile(hdfsPath)) { 94 | logger.info("不是目录!"); 95 | } 96 | else { 97 | lists=fileSystem.listStatus(hdfsPath); 98 | } 99 | return lists; 100 | } 101 | 102 | //上传文件 103 | public static void copyFileToHDFS(boolean delSrc, boolean overwrite,String srcFile,String destPath) throws IOException { 104 | Path srcPath=new Path(srcFile); 105 | Path hdfsPath=new Path(HDFS_URI+destPath); 106 | if(!fileSystem.exists(hdfsPath)) { 107 | logger.debug("目录不存在!"); 108 | return; 109 | } 110 | fileSystem.copyFromLocalFile(delSrc, overwrite, srcPath, hdfsPath); 111 | } 112 | 113 | //下载文件 114 | public static void getFile(String srcFile, String destPath) throws IOException { 115 | Path srcPath=new Path(HDFS_URI+srcFile); 116 | Path destFile=new Path(destPath); 117 | if(!fileSystem.exists(srcPath)) { 118 | logger.debug("源文件不存在!"); 119 | return; 120 | } 121 | fileSystem.copyToLocalFile(srcPath, destFile); 122 | } 123 | 124 | //判断目录或文件是否存在 125 | public static boolean existDir(String filePath) throws IOException { 126 | if(StringUtils.isEmpty(filePath)) { 127 | return false; 128 | } 129 | Path path=new Path(filePath); 130 | if(!fileSystem.exists(path)) { 131 | logger.debug("文件或目录不存在!"); 132 | return false; 133 | } 134 | else { 135 | return true; 136 | } 137 | } 138 | 139 | //重命名 140 | public static void rename(String srcPath, String dstPath) throws IOException { 141 | srcPath=HDFS_URI+srcPath; 142 | dstPath=HDFS_URI+dstPath; 143 | Path src=new Path(srcPath); 144 | Path dst=new Path(dstPath); 145 | if(!fileSystem.exists(src)) { 146 | logger.debug("文件或目录不存在!"); 147 | return; 148 | } 149 | fileSystem.rename(src, dst); 150 | } 151 | 152 | //获得HDFS节点信息 153 | public static DatanodeInfo[] getHDFSNodes() throws IOException { 154 | //获取所有节点 155 | DatanodeInfo[] dataNodeStats=new DatanodeInfo[0]; 156 | //获取分布式文件系统 157 | DistributedFileSystem hdfs=(DistributedFileSystem) fileSystem; 158 | dataNodeStats=hdfs.getDataNodeStats(); 159 | return dataNodeStats; 160 | } 161 | 162 | public static void close() throws IOException { 163 | fileSystem.close(); 164 | } 165 | 166 | } 167 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/common/JRedisPoolConfig.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.common; 2 | 3 | import java.io.IOException; 4 | import java.util.PropertyResourceBundle; 5 | 6 | public class JRedisPoolConfig { 7 | private static PropertyResourceBundle propertyResourceBundle; 8 | static { 9 | try { 10 | propertyResourceBundle=new PropertyResourceBundle(JRedisPoolConfig.class.getResourceAsStream("/redis.properties")); 11 | } catch (IOException e) { 12 | // TODO Auto-generated catch block 13 | e.printStackTrace(); 14 | } 15 | } 16 | static int EXPIRE=Integer.parseInt(propertyResourceBundle.getString("EXPIRE_TIME")); 17 | static int MAX_ACTIVE=Integer.parseInt(propertyResourceBundle.getString("maxActive")); 18 | static int MAX_IDLE=Integer.parseInt(propertyResourceBundle.getString("maxIdle")); 19 | static int MAX_WAIT=Integer.parseInt(propertyResourceBundle.getString("maxWait")); 20 | static int TIME_OUT=Integer.parseInt(propertyResourceBundle.getString("timeout")); 21 | static String REDIS_PASSWORD=propertyResourceBundle.getString("redisPWD"); 22 | static String REDIS_IP=propertyResourceBundle.getString("redisIp"); 23 | static int REDIS_PORT=Integer.parseInt(propertyResourceBundle.getString("redisPort")); 24 | } 25 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/common/JedisUtils.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.common; 2 | 3 | import redis.clients.jedis.Jedis; 4 | import redis.clients.jedis.JedisPool; 5 | import redis.clients.jedis.JedisPoolConfig; 6 | 7 | public class JedisUtils { 8 | 9 | private static JedisPool pool=null; 10 | private static JedisUtils jedisUtils; 11 | private final int expire =JRedisPoolConfig.EXPIRE; 12 | 13 | static { 14 | JedisPoolConfig config=new JedisPoolConfig(); 15 | config.setMaxTotal(JRedisPoolConfig.MAX_ACTIVE); 16 | config.setMaxIdle(JRedisPoolConfig.MAX_IDLE); 17 | config.setMaxWaitMillis(JRedisPoolConfig.MAX_WAIT); 18 | //在获取连接的时候检查有效性 19 | config.setTestOnBorrow(false); 20 | //在return给pool时,是否提前进行validate操作 21 | config.setTestOnReturn(false); 22 | pool=new JedisPool(config,JRedisPoolConfig.REDIS_IP,JRedisPoolConfig.REDIS_PORT,JRedisPoolConfig.TIME_OUT); 23 | } 24 | 25 | public static JedisPool getPool() { 26 | return pool; 27 | } 28 | 29 | //从jedis连接池中获取jedis对象 30 | public static Jedis getJedis() { 31 | return pool.getResource(); 32 | } 33 | 34 | public static JedisUtils getInstance() { 35 | return jedisUtils; 36 | } 37 | 38 | //回收jedis 39 | public static void returnJedis(Jedis jedis) { 40 | pool.returnResource(jedis); 41 | } 42 | 43 | //设置过期时间 44 | public void expire(String key,int seconds) { 45 | if(seconds<0) { 46 | return; 47 | } 48 | Jedis jedis=getJedis(); 49 | jedis.expire(key, seconds); 50 | returnJedis(jedis); 51 | } 52 | 53 | //设置默认过期时间 54 | public void expire(String key) { 55 | expire(key, expire); 56 | } 57 | 58 | 59 | 60 | } 61 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/common/RandomUtil.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.common; 2 | 3 | import java.util.Random; 4 | 5 | /** 6 | * 生成随机字符串工具 7 | * 8 | * @author hayes, @create 2017-12-14 14:35 9 | **/ 10 | public class RandomUtil { 11 | 12 | /** 13 | * 任意长度字符串 14 | * 15 | * @param length 16 | * @return 17 | */ 18 | public static String getRandomString(int length) { 19 | String base = "abcdefghijklmnopqrstuvwxyz0123456789"; 20 | Random random = new Random(); 21 | StringBuffer sb = new StringBuffer(); 22 | for (int i = 0; i < length; i++) { 23 | int number = random.nextInt(base.length()); 24 | sb.append(base.charAt(number)); 25 | } 26 | return sb.toString(); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/common/ResourcePath.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.common; 2 | 3 | //资源文件存放目录 4 | public class ResourcePath { 5 | 6 | public static final String RESOURCE_DIR="src/main/resources/"; 7 | } 8 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/data/DataFile.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.data; 2 | 3 | import java.io.Serializable; 4 | import java.util.List; 5 | 6 | //算法数据集文件 7 | public class DataFile implements Serializable { 8 | 9 | private static final long serialVersionUID = 1L; 10 | 11 | //数据集文件名称 12 | private String name; 13 | //数据集文件路径 14 | private String path; 15 | //数据集文件上传用户 16 | private long userId; 17 | //数据集文件分隔符 18 | private String delim=","; 19 | //数据集类型 20 | private DataFileType dataFileType=DataFileType.CSV; 21 | //列描述 22 | private List fieldInfos; 23 | 24 | public String getName() { 25 | return name; 26 | } 27 | public void setName(String name) { 28 | this.name = name; 29 | } 30 | public String getPath() { 31 | return path; 32 | } 33 | public void setPath(String path) { 34 | this.path = path; 35 | } 36 | public long getUserId() { 37 | return userId; 38 | } 39 | public void setUserId(long userId) { 40 | this.userId = userId; 41 | } 42 | public String getDelim() { 43 | return delim; 44 | } 45 | public void setDelim(String delim) { 46 | this.delim = delim; 47 | } 48 | public DataFileType getDataFileType() { 49 | return dataFileType; 50 | } 51 | public void setDataFileType(DataFileType dataFileType) { 52 | this.dataFileType = dataFileType; 53 | } 54 | public List getFieldInfos() { 55 | return fieldInfos; 56 | } 57 | public void setFieldInfos(List fieldInfos) { 58 | this.fieldInfos = fieldInfos; 59 | } 60 | 61 | } 62 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/data/DataFileMapper.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.data; 2 | 3 | import java.io.IOException; 4 | import java.util.ArrayList; 5 | import java.util.List; 6 | 7 | import com.fasterxml.jackson.databind.ObjectMapper; 8 | import com.hhu.machinelearningplatformserver.common.HBaseUtil; 9 | 10 | /** 11 | * DataFile Mapper 12 | * 13 | * @author hayes, @create 2017-12-20 16:08 14 | **/ 15 | public class DataFileMapper { 16 | 17 | private static final String TABLE_NAME = "task"; 18 | 19 | private static final String FAMILY_NAME = "info"; 20 | 21 | private static final String JSON_QUALIFIER_NAME = "json"; 22 | 23 | private HBaseUtil hBaseUtil; 24 | 25 | private ObjectMapper JSON; 26 | 27 | public DataFileMapper() throws IOException { 28 | hBaseUtil=HBaseUtil.getInstance(); 29 | JSON = new ObjectMapper(); 30 | } 31 | 32 | /** 33 | * 添加数据 34 | * 35 | * @param dataFile 36 | * @throws Exception 37 | */ 38 | public void put(DataFile dataFile) throws Exception { 39 | String rowKey = buildRowKey(dataFile.getUserId(), dataFile.getName()); 40 | 41 | hBaseUtil.putData(TABLE_NAME, rowKey, FAMILY_NAME, JSON_QUALIFIER_NAME, JSON.writeValueAsString(dataFile)); 42 | } 43 | 44 | /** 45 | * 查询 46 | * 47 | * @param uname 48 | * @param dataFileName 49 | * @return 50 | * @throws Exception 51 | */ 52 | public DataFile get(long userId, String dataFileName) throws Exception { 53 | String rowKey = buildRowKey(userId, dataFileName); 54 | 55 | String dataFileJson = hBaseUtil.getValue(TABLE_NAME, rowKey, FAMILY_NAME, JSON_QUALIFIER_NAME); 56 | return JSON.readValue(dataFileJson, DataFile.class); 57 | } 58 | 59 | /** 60 | * 查询用户的所有数据文件 61 | * 62 | * @param uname 63 | * @return 64 | * @throws Exception 65 | */ 66 | public List get(String uname) throws Exception { 67 | List dataFileJsons = hBaseUtil.getValueByRowPrefix(TABLE_NAME, uname, FAMILY_NAME, JSON_QUALIFIER_NAME); 68 | List dataFiles = new ArrayList(); 69 | for (String json : dataFileJsons) { 70 | dataFiles.add(JSON.readValue(json, DataFile.class)); 71 | } 72 | return dataFiles; 73 | } 74 | 75 | 76 | /** 77 | * 生成rowkey {uname}-{dataFile.name} 78 | * 79 | * @param uname 80 | * @param dataFileName 81 | * @return 82 | */ 83 | private String buildRowKey(long userId, String dataFileName) { 84 | return userId + "_" + dataFileName; 85 | } 86 | 87 | } 88 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/data/DataFileType.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.data; 2 | 3 | public enum DataFileType { 4 | CSV("csv"), 5 | LIBSVM("libsvm"); 6 | 7 | private String type; 8 | 9 | DataFileType(String type) { 10 | this.type = type; 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/data/FieldInfo.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.data; 2 | 3 | import java.io.Serializable; 4 | 5 | 6 | /** 7 | * 列属性描述 8 | * 9 | * @author hayes, @create 2017-12-11 19:02 10 | **/ 11 | public class FieldInfo implements Serializable { 12 | 13 | private static final long serialVersionUID = -7123058551214352633L; 14 | 15 | public static final String DOUBLE_DATATYPE = "double"; 16 | public static final String BOOLEAN_DATATYPE = "boolean"; 17 | public static final String INTEGER_DATATYPE = "int"; 18 | public static final String STRING_DATATYPE = "string"; 19 | public static final String TIMESTAMP_DATATYPE = "timestamp"; 20 | public static final String LONG_DATATYPE = "long"; 21 | public static final String NULL_DATATYPE = "null"; 22 | 23 | 24 | /** 25 | * 数据类型 26 | */ 27 | private String dataType; 28 | 29 | /** 30 | * 字段名称 31 | */ 32 | private String name; 33 | 34 | /** 35 | * 是否可以为空 36 | */ 37 | private boolean nullable; 38 | 39 | /** 40 | * index(-1为多列) 41 | */ 42 | private int index = -1; 43 | 44 | /** 45 | * Start index 46 | */ 47 | private int startIndex; 48 | 49 | /** 50 | * end index 51 | */ 52 | private int endIndex; 53 | 54 | 55 | public int getStartIndex() { 56 | return startIndex; 57 | } 58 | 59 | public void setStartIndex(int startIndex) { 60 | this.startIndex = startIndex; 61 | } 62 | 63 | public int getEndIndex() { 64 | return endIndex; 65 | } 66 | 67 | public void setEndIndex(int endIndex) { 68 | this.endIndex = endIndex; 69 | } 70 | 71 | public String getDataType() { 72 | return dataType; 73 | } 74 | 75 | public void setDataType(String dataType) { 76 | this.dataType = dataType; 77 | } 78 | 79 | public boolean isNullable() { 80 | return nullable; 81 | } 82 | 83 | public void setNullable(boolean nullable) { 84 | this.nullable = nullable; 85 | } 86 | 87 | public int getIndex() { 88 | return index; 89 | } 90 | 91 | public void setIndex(int index) { 92 | this.index = index; 93 | } 94 | 95 | public String getName() { 96 | return name; 97 | } 98 | 99 | public void setName(String name) { 100 | this.name = name; 101 | } 102 | } 103 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/data/LineParse.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.data; 2 | 3 | /** 4 | * a 5 | * 6 | * @author hayes, @create 2017-12-20 15:36 7 | **/ 8 | 9 | import org.apache.commons.lang3.StringUtils; 10 | import org.apache.spark.api.java.function.Function; 11 | import org.apache.spark.ml.linalg.Vectors; 12 | import org.apache.spark.sql.Row; 13 | import org.apache.spark.sql.RowFactory; 14 | 15 | import com.hhu.machinelearningplatformserver.exception.CantConverException; 16 | 17 | import java.util.List; 18 | 19 | /** 20 | * 行->Row 21 | */ 22 | class LineParse implements Function { 23 | private static final long serialVersionUID = -1481954080127428634L; 24 | 25 | private DataFile dataFile; 26 | 27 | public LineParse(DataFile dataFile) { 28 | this.dataFile = dataFile; 29 | } 30 | 31 | @Override 32 | public Row call(String line) throws Exception { 33 | String[] strArr; 34 | if (StringUtils.isEmpty(dataFile.getDelim())) { 35 | strArr = new String[]{line}; 36 | } else { 37 | strArr = line.split(dataFile.getDelim()); 38 | } 39 | 40 | List fieldInfos = dataFile.getFieldInfos(); 41 | Object[] objs = new Object[fieldInfos.size()]; 42 | for (int i = 0; i < fieldInfos.size(); i++) { 43 | FieldInfo fieldInfo = fieldInfos.get(i); 44 | //单列 45 | if (fieldInfo.getIndex() != -1) { 46 | objs[i] = fieldCall(fieldInfo, strArr[i]); 47 | //多列 48 | } else { 49 | int tmpSize = fieldInfo.getEndIndex() - fieldInfo.getStartIndex() + 1; 50 | String[] tmp = new String[tmpSize]; 51 | System.arraycopy(strArr, fieldInfo.getStartIndex(), tmp, 0, tmpSize); 52 | objs[i] = fieldCall(fieldInfo, tmp); 53 | } 54 | } 55 | return RowFactory.create(objs); 56 | } 57 | 58 | /** 59 | * String[] -> Obj 60 | * 61 | * @param value 62 | * @return 63 | * @throws Exception 64 | */ 65 | public Object fieldCall(FieldInfo info, String[] value) throws Exception { 66 | switch (info.getDataType()) { 67 | case FieldInfo.STRING_DATATYPE: 68 | case FieldInfo.DOUBLE_DATATYPE: 69 | case FieldInfo.INTEGER_DATATYPE: 70 | case FieldInfo.LONG_DATATYPE: { 71 | double[] vect = new double[value.length]; 72 | try { 73 | for (int i = 0; i < value.length; i++) { 74 | vect[i] = Double.valueOf(value[i]); 75 | } 76 | } catch (Exception e) { 77 | throw new CantConverException(e.getMessage()); 78 | } 79 | return Vectors.dense(vect); 80 | } 81 | default: 82 | throw new CantConverException("不合法类型"); 83 | } 84 | } 85 | 86 | /** 87 | * String -> obj 88 | * 89 | * @param value 90 | * @return 91 | * @throws Exception 92 | */ 93 | public Object fieldCall(FieldInfo info, String value) throws Exception { 94 | if (StringUtils.isEmpty(value) && !info.isNullable()) { 95 | throw new Exception(info.getName() + "列为空"); 96 | } else if (StringUtils.isEmpty(value)) { 97 | return null; 98 | } 99 | 100 | try { 101 | switch (info.getDataType()) { 102 | case FieldInfo.BOOLEAN_DATATYPE: { 103 | return Boolean.valueOf(value); 104 | } 105 | case FieldInfo.STRING_DATATYPE: { 106 | return value; 107 | } 108 | case FieldInfo.DOUBLE_DATATYPE: { 109 | return Double.valueOf(value); 110 | } 111 | case FieldInfo.INTEGER_DATATYPE: { 112 | return Integer.valueOf(value); 113 | } 114 | case FieldInfo.LONG_DATATYPE: { 115 | return Long.valueOf(value); 116 | } 117 | case FieldInfo.TIMESTAMP_DATATYPE: { 118 | return Long.valueOf(value); 119 | } 120 | case FieldInfo.NULL_DATATYPE: { 121 | return null; 122 | } 123 | default: { 124 | throw new CantConverException("dataType不支持"); 125 | } 126 | } 127 | } catch (Exception e) { 128 | throw new Exception(value + "->" + info.getDataType() + " error"); 129 | } 130 | 131 | } 132 | } -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/data/PersistDataset.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.data; 2 | 3 | import org.apache.spark.sql.Dataset; 4 | import org.apache.spark.sql.Row; 5 | 6 | /** 7 | * Persist dataset 8 | * 9 | * @author hayes, @create 2017-12-22 18:08 10 | **/ 11 | public class PersistDataset { 12 | public static void persist(Dataset dataset, String path) { 13 | dataset.javaRDD().saveAsTextFile(path); 14 | // StructType schema = dataset.schema(); 15 | // StructField[] fields = schema.fields(); 16 | // dataset.foreach((row) -> { 17 | // 18 | // }); 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/data/SparkDataFileConverter.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.data; 2 | 3 | import java.util.List; 4 | 5 | import org.apache.spark.api.java.JavaSparkContext; 6 | import org.apache.spark.ml.linalg.VectorUDT; 7 | import org.apache.spark.sql.Dataset; 8 | import org.apache.spark.sql.Row; 9 | import org.apache.spark.sql.SQLContext; 10 | import org.apache.spark.sql.SparkSession; 11 | import org.apache.spark.sql.types.*; 12 | 13 | import com.hhu.machinelearningplatformserver.exception.CantConverException; 14 | import com.hhu.machinelearningplatformserver.task.TaskInfo; 15 | 16 | public class SparkDataFileConverter { 17 | 18 | public static Dataset extractDataFrame(TaskInfo taskInfo, JavaSparkContext context) throws Exception { 19 | DataFile dataFile = taskInfo.getDataFile(); 20 | return convertToDataFrame(dataFile, context); 21 | } 22 | 23 | /** 24 | * 将数据集文件转换为DataFrame TODO:增加json等格式 25 | * 26 | * @param context 27 | * @return 28 | * @throws CantConverException 29 | */ 30 | public static Dataset convertToDataFrame(DataFile dataFile, JavaSparkContext context) throws CantConverException { 31 | SparkSession sparkSession = SparkSession.builder() 32 | .sparkContext(context.sc()) 33 | .getOrCreate(); 34 | 35 | SQLContext sqlContext = new SQLContext(sparkSession); 36 | 37 | switch (dataFile.getDataFileType()) { 38 | case CSV: 39 | return csvToDataFrame(dataFile, context, sqlContext); 40 | case LIBSVM: 41 | return libsvmToDataFrame(dataFile, sqlContext); 42 | default: 43 | throw new CantConverException("不支持的数据集格式"); 44 | } 45 | } 46 | 47 | private static Dataset libsvmToDataFrame(DataFile dataFile, SQLContext sqlContext) { 48 | return sqlContext.read() 49 | .format("libsvm") 50 | .load(dataFile.getPath()); 51 | } 52 | 53 | private static Dataset csvToDataFrame(DataFile dataFile, JavaSparkContext context, SQLContext sqlContext) throws CantConverException { 54 | StructType schema = getStructType(dataFile); 55 | 56 | List dataList = context.textFile(dataFile.getPath()) 57 | .map(new LineParse(dataFile)).collect(); 58 | return sqlContext.createDataFrame(dataList, schema); 59 | // return sqlContext.read() 60 | // .format("csv") 61 | // .option("header", header ? "true" : "false") 62 | // .option("delimiter", delim) 63 | // .option("inferSchema", "false") 64 | // .schema(getStructType()) 65 | // .load(path); 66 | } 67 | 68 | /** 69 | * Spark StructType 70 | * 71 | * @return 72 | * @throws CantConverException 73 | */ 74 | public static StructType getStructType(DataFile dataFile) throws CantConverException { 75 | List fieldInfos = dataFile.getFieldInfos(); 76 | //按照 Index 排序 77 | fieldInfos.sort((FieldInfo f1, FieldInfo f2) -> f1.getIndex() > f2.getIndex() ? -1 : 1); 78 | 79 | StructField[] fields = new StructField[fieldInfos.size()]; 80 | for (int i = 0; i < fieldInfos.size(); i++) { 81 | fields[i] = convertToStructField(fieldInfos.get(i)); 82 | } 83 | return new StructType(fields); 84 | } 85 | 86 | 87 | /** 88 | * StructField, 89 | * 90 | * @return 91 | * @throws CantConverException 92 | */ 93 | public static StructField convertToStructField(FieldInfo info) throws CantConverException { 94 | if (info.getIndex() != -1) { 95 | return DataTypes.createStructField(info.getName(), sparkDataType(info.getDataType()), info.isNullable()); 96 | } else { 97 | switch (info.getDataType()) { 98 | case FieldInfo.STRING_DATATYPE: 99 | case FieldInfo.DOUBLE_DATATYPE: 100 | case FieldInfo.INTEGER_DATATYPE: 101 | case FieldInfo.LONG_DATATYPE: { 102 | return new StructField(info.getName(), new VectorUDT(), info.isNullable(), Metadata.empty()); 103 | } 104 | default: 105 | throw new CantConverException("不合法类型"); 106 | } 107 | } 108 | } 109 | 110 | /** 111 | * Spark SQL DataType 112 | * 113 | * @return 114 | */ 115 | public static DataType sparkDataType(String dataType) throws CantConverException { 116 | switch (dataType) { 117 | case FieldInfo.DOUBLE_DATATYPE: { 118 | return DataTypes.DoubleType; 119 | } 120 | case FieldInfo.BOOLEAN_DATATYPE: { 121 | return DataTypes.BooleanType; 122 | } 123 | case FieldInfo.INTEGER_DATATYPE: { 124 | return DataTypes.IntegerType; 125 | } 126 | case FieldInfo.STRING_DATATYPE: { 127 | return DataTypes.StringType; 128 | } 129 | case FieldInfo.TIMESTAMP_DATATYPE: { 130 | return DataTypes.TimestampType; 131 | } 132 | case FieldInfo.LONG_DATATYPE: { 133 | return DataTypes.LongType; 134 | } 135 | case FieldInfo.NULL_DATATYPE: { 136 | return DataTypes.NullType; 137 | } 138 | default: { 139 | throw new CantConverException("不支持的类型"); 140 | } 141 | } 142 | } 143 | 144 | 145 | } 146 | 147 | 148 | 149 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/exception/CantConverException.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.exception; 2 | 3 | /** 4 | * 无法转换错误 5 | * 6 | * @author hayes, @create 2017-12-11 21:57 7 | **/ 8 | public class CantConverException extends Exception { 9 | 10 | public CantConverException(String msg) { 11 | super(msg); 12 | } 13 | 14 | public CantConverException() { 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/exception/ConfigInitException.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.exception; 2 | 3 | public class ConfigInitException extends Exception { 4 | 5 | private static final long serialVersionUID = 1L; 6 | 7 | public ConfigInitException(String name) { 8 | super(name); 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/model/AlgorithmModel.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.model; 2 | 3 | //算法模型类 4 | public class AlgorithmModel { 5 | 6 | private int id; 7 | private String name; 8 | private int userId; 9 | private long createTime; 10 | private String path; 11 | private int algorithmId; 12 | 13 | public int getId() { 14 | return id; 15 | } 16 | public void setId(int id) { 17 | this.id = id; 18 | } 19 | public String getName() { 20 | return name; 21 | } 22 | public void setName(String name) { 23 | this.name = name; 24 | } 25 | public int getUserId() { 26 | return userId; 27 | } 28 | public void setUserId(int userId) { 29 | this.userId = userId; 30 | } 31 | public long getCreateTime() { 32 | return createTime; 33 | } 34 | public void setCreateTime(long createTime) { 35 | this.createTime = createTime; 36 | } 37 | public String getPath() { 38 | return path; 39 | } 40 | public void setPath(String path) { 41 | this.path = path; 42 | } 43 | public int getAlgorithmId() { 44 | return algorithmId; 45 | } 46 | public void setAlgorithmId(int algorithmId) { 47 | this.algorithmId = algorithmId; 48 | } 49 | 50 | } 51 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/proxy/EstimatorProxy.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.proxy; 2 | 3 | import org.apache.spark.sql.Dataset; 4 | import org.apache.spark.sql.Row; 5 | 6 | import com.hhu.machinelearningplatformserver.task.SparkTaskAlgorithm; 7 | 8 | import java.lang.reflect.Method; 9 | 10 | /** 11 | * Estimator Proxy 12 | * 13 | * @author hayes, @create 2017-12-12 16:18 14 | **/ 15 | public class EstimatorProxy extends MLAlgorithmProxy { 16 | 17 | public EstimatorProxy(SparkTaskAlgorithm mlAlgo) throws Exception { 18 | super(mlAlgo); 19 | } 20 | 21 | public ModelProxy fit(Dataset dataset) throws Exception { 22 | Method method = algoClazz.getMethod("fit"); 23 | Class modelClass = Class.forName(this.desc.getClassName() + "Model"); 24 | return new ModelProxy(method.invoke(algo, dataset), modelClass); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/proxy/MLAlgorithmProxy.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.proxy; 2 | 3 | import java.lang.reflect.Method; 4 | import java.util.Map; 5 | 6 | import com.hhu.machinelearningplatformserver.algorithm.MLAlgorithmDesc; 7 | import com.hhu.machinelearningplatformserver.algorithm.MLAlgorithmLoader; 8 | import com.hhu.machinelearningplatformserver.algorithm.ParameterDesc; 9 | import com.hhu.machinelearningplatformserver.task.SparkTaskAlgorithm; 10 | 11 | /** 12 | * 算法代理 13 | * 14 | * @author hayes, @create 2017-12-12 16:16 15 | **/ 16 | public class MLAlgorithmProxy { 17 | 18 | /** 19 | * 算法实例 20 | */ 21 | protected Object algo; 22 | 23 | protected Class algoClazz; 24 | 25 | protected MLAlgorithmDesc desc; 26 | 27 | MLAlgorithmProxy(SparkTaskAlgorithm mlAlgo) throws Exception { 28 | desc = MLAlgorithmLoader.getMLAlgorithmDesc(mlAlgo.getId()); 29 | algoClazz = Class.forName(desc.getClassName()); 30 | algo = algoClazz.newInstance(); 31 | 32 | for (Map.Entry param : mlAlgo.getParameters().entrySet()) { 33 | //获得参数描述 34 | ParameterDesc paramDesc = desc.getParameterDescs().get(param.getKey()); 35 | String setterMethodName = param.getKey().substring(0, 1).toUpperCase() 36 | + param.getKey().substring(1); 37 | Method method = algoClazz.getMethod("set" + setterMethodName, 38 | paramDesc.javaTypeClass()); 39 | method.invoke(algo, paramDesc.valueOf(param.getValue())); 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/proxy/ModelProxy.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.proxy; 2 | 3 | import org.apache.spark.sql.Dataset; 4 | import org.apache.spark.sql.Row; 5 | 6 | import java.lang.reflect.InvocationTargetException; 7 | import java.lang.reflect.Method; 8 | 9 | /** 10 | * 训练后的模型代理 11 | * 12 | * @author hayes, @create 2017-12-12 15:21 13 | **/ 14 | public class ModelProxy { 15 | 16 | /** 17 | * 模型实例 18 | */ 19 | protected Object model; 20 | 21 | protected Class modelClazz; 22 | 23 | public ModelProxy(Object model, Class modelClazz) { 24 | this.model = model; 25 | this.modelClazz = modelClazz; 26 | } 27 | 28 | private ModelProxy() { 29 | } 30 | 31 | /** 32 | * TODO: 加载 33 | * 34 | * @param path 35 | * @param modelClazz 36 | * @return 37 | */ 38 | public static ModelProxy load(String path, Class modelClazz) { 39 | ModelProxy modelProxy = new ModelProxy(); 40 | modelProxy.modelClazz = modelClazz; 41 | 42 | return modelProxy; 43 | } 44 | 45 | @SuppressWarnings("unchecked") 46 | public Dataset tranform(Dataset dataset) throws NoSuchMethodException, InvocationTargetException, IllegalAccessException { 47 | Method method = modelClazz.getMethod("save", Dataset.class); 48 | return (Dataset) method.invoke(model, dataset); 49 | } 50 | 51 | public void save(String path) throws NoSuchMethodException, InvocationTargetException, IllegalAccessException { 52 | Method method = modelClazz.getMethod("save", String.class); 53 | method.invoke(model,path); 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/proxy/TransformerProxy.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.proxy; 2 | 3 | import org.apache.spark.ml.Model; 4 | import org.apache.spark.sql.Dataset; 5 | import org.apache.spark.sql.Row; 6 | import com.hhu.machinelearningplatformserver.algorithm.MLAlgorithmDesc; 7 | import com.hhu.machinelearningplatformserver.algorithm.MLAlgorithmLoader; 8 | import com.hhu.machinelearningplatformserver.common.ConfigUtils; 9 | import com.hhu.machinelearningplatformserver.model.AlgorithmModel; 10 | import java.lang.reflect.Method; 11 | 12 | /** 13 | * Transformers Proxy 14 | * 15 | * @author hayes, @create 2017-12-12 15:20 16 | **/ 17 | public class TransformerProxy { 18 | 19 | private Object modelObject; 20 | 21 | @SuppressWarnings("rawtypes") 22 | private Model modelProxy; 23 | 24 | @SuppressWarnings("rawtypes") 25 | public TransformerProxy(AlgorithmModel model) throws Exception { 26 | int algorithmId=model.getAlgorithmId(); 27 | MLAlgorithmDesc algorithmDesc=MLAlgorithmLoader.getMLAlgorithmDesc(algorithmId); 28 | Class clazz=Class.forName(algorithmDesc.getClassName()); 29 | Method method=clazz.getMethod("load", String.class); 30 | modelObject=clazz.newInstance(); 31 | String hostname=ConfigUtils.getValueByName("hdfs.hostname"); 32 | String port=ConfigUtils.getValueByName("hdfs.port"); 33 | String uri=ConfigUtils.getValueByName("hdfs.uri"); 34 | modelProxy=(Model) method.invoke(modelObject, "hdfs://"+hostname+":"+port+uri+"model/"+model.getId()+"/"); 35 | } 36 | 37 | @SuppressWarnings("unchecked") 38 | public Dataset transform(Dataset dataset) throws Exception { 39 | Method method = modelProxy.getClass().getMethod("transform", Dataset.class); 40 | return (Dataset) method.invoke(modelProxy, dataset); 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/submit/LoadTaskInfo.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.submit; 2 | 3 | import java.io.IOException; 4 | import java.util.ArrayList; 5 | import java.util.List; 6 | 7 | import com.fasterxml.jackson.databind.ObjectMapper; 8 | import com.hhu.machinelearningplatformserver.common.HBaseUtil; 9 | import com.hhu.machinelearningplatformserver.task.TaskInfo; 10 | 11 | /** 12 | * 加载任务参数信息 13 | * 14 | **/ 15 | public class LoadTaskInfo { 16 | 17 | private static final String TABLE_NAME = "task"; 18 | 19 | private static final String FAMILY_NAME = "info"; 20 | 21 | private static final String JSON_QUALIFIER_NAME = "json"; 22 | 23 | private HBaseUtil hBaseUtil; 24 | 25 | private ObjectMapper JSON; 26 | 27 | public LoadTaskInfo() throws IOException { 28 | hBaseUtil=HBaseUtil.getInstance(); 29 | JSON = new ObjectMapper(); 30 | } 31 | 32 | /** 33 | * 添加数据 34 | * 35 | * @param dataFile 36 | * @throws Exception 37 | */ 38 | public void put(TaskInfo taskInfo) throws Exception { 39 | String rowKey = buildRowKey(taskInfo.getUserId(), taskInfo.getTaskId()); 40 | 41 | hBaseUtil.putData(TABLE_NAME, rowKey, FAMILY_NAME, JSON_QUALIFIER_NAME, JSON.writeValueAsString(taskInfo)); 42 | } 43 | 44 | /** 45 | * 查询 46 | * 47 | * @param uname 48 | * @param dataFileName 49 | * @return 50 | * @throws Exception 51 | */ 52 | public TaskInfo get(long userId, long taskId) throws Exception { 53 | String rowKey = buildRowKey(userId, taskId); 54 | 55 | String taskJson = hBaseUtil.getValue(TABLE_NAME, rowKey, FAMILY_NAME, JSON_QUALIFIER_NAME); 56 | return JSON.readValue(taskJson, TaskInfo.class); 57 | } 58 | 59 | /** 60 | * 查询用户的所有数据文件 61 | * 62 | * @param uname 63 | * @return 64 | * @throws Exception 65 | */ 66 | public List get(String uname) throws Exception { 67 | List taskJsons = hBaseUtil.getValueByRowPrefix(TABLE_NAME, uname, FAMILY_NAME, JSON_QUALIFIER_NAME); 68 | List tasks = new ArrayList(); 69 | for (String json : taskJsons) { 70 | tasks.add(JSON.readValue(json, TaskInfo.class)); 71 | } 72 | return tasks; 73 | } 74 | 75 | 76 | /** 77 | * 生成rowkey {uname}-{dataFile.name} 78 | * 79 | * @param uname 80 | * @param dataFileName 81 | * @return 82 | */ 83 | private String buildRowKey(long userId, long taskId) { 84 | return userId + "_" + taskId; 85 | } 86 | 87 | } 88 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/submit/Submiter.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.submit; 2 | 3 | import java.util.Random; 4 | import org.apache.hadoop.hbase.util.Bytes; 5 | import org.apache.spark.SparkConf; 6 | import org.apache.spark.api.java.JavaSparkContext; 7 | import org.apache.spark.sql.Dataset; 8 | import org.apache.spark.sql.Row; 9 | import org.apache.spark.sql.SparkSession; 10 | import org.codehaus.jackson.map.ObjectMapper; 11 | import com.hhu.machinelearningplatformserver.common.HBaseUtil; 12 | import com.hhu.machinelearningplatformserver.common.JedisUtils; 13 | import com.hhu.machinelearningplatformserver.common.RandomUtil; 14 | import com.hhu.machinelearningplatformserver.data.PersistDataset; 15 | import com.hhu.machinelearningplatformserver.data.SparkDataFileConverter; 16 | import com.hhu.machinelearningplatformserver.model.AlgorithmModel; 17 | import com.hhu.machinelearningplatformserver.proxy.EstimatorProxy; 18 | import com.hhu.machinelearningplatformserver.proxy.ModelProxy; 19 | import com.hhu.machinelearningplatformserver.proxy.TransformerProxy; 20 | import com.hhu.machinelearningplatformserver.task.TaskInfo; 21 | import com.hhu.machinelearningplatformserver.task.TaskState; 22 | import com.hhu.machinelearningplatformserver.task.TaskType; 23 | 24 | import redis.clients.jedis.Jedis; 25 | 26 | //提交Spark任务 27 | public class Submiter { 28 | 29 | private static boolean localMode=false; 30 | 31 | private static TaskInfo taskInfo; 32 | 33 | private static Jedis jedis=JedisUtils.getJedis(); 34 | 35 | private static int userId; 36 | 37 | /** 38 | * 该用户该任务的hdfs文件前缀 39 | */ 40 | private static String hdfsFilePrefix; 41 | 42 | private static JavaSparkContext buildJavaSparkContext() { 43 | JavaSparkContext context = null; 44 | //本地模式 45 | if (localMode) { 46 | SparkConf conf = new SparkConf().setAppName(taskInfo.getName()).setMaster("local[3]"); 47 | context = new JavaSparkContext(conf); 48 | } else { 49 | //JavaSparkContext初始化 50 | SparkSession sparkSession = SparkSession 51 | .builder() 52 | .appName(taskInfo.getName()) 53 | .getOrCreate(); 54 | context = new JavaSparkContext(sparkSession.sparkContext()); 55 | } 56 | return context; 57 | } 58 | 59 | private static void loadArgs(String[] args) throws Exception { 60 | //配置 61 | long taskId = Long.valueOf(args[0]); 62 | userId=Integer.valueOf(args[1]); 63 | HBaseUtil hBaseUtil=HBaseUtil.getInstance(); 64 | hBaseUtil.connection(); 65 | String jsonStr=hBaseUtil.getValue("task", String.valueOf(taskId), "info", "taskInfo"); 66 | ObjectMapper objectMapper=new ObjectMapper(); 67 | taskInfo=objectMapper.readValue(jsonStr, TaskInfo.class); 68 | jedis.set(Bytes.toBytes(taskId), Bytes.toBytes(TaskState.RUNNING.getValue())); 69 | hdfsFilePrefix = new StringBuilder() 70 | .append("/machinelearningplatform/task/") 71 | .append(taskId).append("/") 72 | .toString(); 73 | } 74 | 75 | private static void excuteEstimator(TaskInfo taskInfo, 76 | Dataset dataset) throws Exception { 77 | EstimatorProxy estimatorProxy = new EstimatorProxy(taskInfo.getSparkTaskAlgorithm()); 78 | ModelProxy modelProxy = estimatorProxy.fit(dataset); 79 | //生成模型 80 | int modelId=new Random().nextInt(); 81 | AlgorithmModel model=new AlgorithmModel(); 82 | model.setId(modelId); 83 | model.setName("模型-"+modelId); 84 | model.setUserId(userId); 85 | model.setCreateTime(System.currentTimeMillis()); 86 | model.setAlgorithmId(taskInfo.getSparkTaskAlgorithm().getId()); 87 | model.setPath("model/"+modelId+"/"); 88 | //将模型信息写入到HBase 89 | HBaseUtil hBaseUtil=new HBaseUtil(); 90 | hBaseUtil.connection(); 91 | ObjectMapper objectMapper=new ObjectMapper(); 92 | String jsonStr=objectMapper.writeValueAsString(model); 93 | hBaseUtil.putData("model", String.valueOf(modelId), "info", "modelInfo", jsonStr); 94 | hBaseUtil.close(); 95 | modelProxy.save("/machinelearningplatform/model/"); 96 | } 97 | 98 | private static void excuteTransformer(TaskInfo taskInfo, 99 | Dataset dataset) throws Exception { 100 | int modelId=taskInfo.getModelId(); 101 | HBaseUtil hBaseUtil=new HBaseUtil(); 102 | hBaseUtil.connection(); 103 | String jsonStr=hBaseUtil.getValue("model", String.valueOf(modelId), "info", "modelInfo"); 104 | ObjectMapper objectMapper=new ObjectMapper(); 105 | AlgorithmModel model=objectMapper.readValue(jsonStr, AlgorithmModel.class); 106 | hBaseUtil.close(); 107 | TransformerProxy transformerProxy = new TransformerProxy(model); 108 | Dataset transformedDataset = transformerProxy.transform(dataset); 109 | PersistDataset.persist(transformedDataset, hdfsFilePrefix + "out-" + RandomUtil.getRandomString(5)); 110 | } 111 | 112 | public static void main(String[] args) { 113 | //加载配置 114 | try { 115 | loadArgs(args); 116 | JavaSparkContext context = buildJavaSparkContext(); 117 | Dataset dataset=SparkDataFileConverter.extractDataFrame(taskInfo, context); 118 | TaskType taskType=taskInfo.getTaskType(); 119 | if(taskType==TaskType.ESTIMATOR_TYPE) { 120 | excuteEstimator(taskInfo, dataset); 121 | } 122 | if(taskType==TaskType.TRANSFORMER_TYPE) { 123 | excuteTransformer(taskInfo, dataset); 124 | } 125 | jedis.set(Bytes.toBytes(taskInfo.getTaskId()), Bytes.toBytes(TaskState.SUCCESS.getValue())); 126 | } catch (Exception e) { 127 | // TODO Auto-generated catch block 128 | jedis.set(Bytes.toBytes(taskInfo.getTaskId()), Bytes.toBytes(TaskState.FAIL.getValue())); 129 | } 130 | } 131 | 132 | } 133 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/task/SparkTaskAlgorithm.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.task; 2 | 3 | import java.io.Serializable; 4 | import java.util.Map; 5 | 6 | //任务算法信息 7 | public class SparkTaskAlgorithm implements Serializable { 8 | 9 | private static final long serialVersionUID = 1L; 10 | 11 | //算法ID 12 | private int id; 13 | //算法参数名称及值 14 | private Map parameters; 15 | 16 | public int getId() { 17 | return id; 18 | } 19 | public void setId(int id) { 20 | this.id = id; 21 | } 22 | public Map getParameters() { 23 | return parameters; 24 | } 25 | public void setParameters(Map parameters) { 26 | this.parameters = parameters; 27 | } 28 | 29 | } 30 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/task/SparkTaskInfo.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.task; 2 | 3 | import java.io.Serializable; 4 | 5 | import com.hhu.machinelearningplatformserver.data.DataFile; 6 | 7 | //任务类 8 | public class SparkTaskInfo implements Serializable { 9 | 10 | private static final long serialVersionUID = 1L; 11 | 12 | //任务ID 13 | private long taskId; 14 | //任务名称 15 | private String name; 16 | //提交用户ID 17 | private long userId; 18 | //提交时间 19 | private long submitTime; 20 | //任务类型 21 | private int type; 22 | //任务算法信息 23 | private SparkTaskAlgorithm algorithm; 24 | //任务数据集 25 | private DataFile dataFile; 26 | 27 | public long getTaskId() { 28 | return taskId; 29 | } 30 | public void setTaskId(long taskId) { 31 | this.taskId = taskId; 32 | } 33 | public String getName() { 34 | return name; 35 | } 36 | public void setName(String name) { 37 | this.name = name; 38 | } 39 | public long getUserId() { 40 | return userId; 41 | } 42 | public void setUserId(long userId) { 43 | this.userId = userId; 44 | } 45 | public long getSubmitTime() { 46 | return submitTime; 47 | } 48 | public void setSubmitTime(long submitTime) { 49 | this.submitTime = submitTime; 50 | } 51 | public int getType() { 52 | return type; 53 | } 54 | public void setType(int type) { 55 | this.type = type; 56 | } 57 | public SparkTaskAlgorithm getAlgorithm() { 58 | return algorithm; 59 | } 60 | public void setAlgorithm(SparkTaskAlgorithm algorithm) { 61 | this.algorithm = algorithm; 62 | } 63 | public DataFile getDataFile() { 64 | return dataFile; 65 | } 66 | public void setDataFile(DataFile dataFile) { 67 | this.dataFile = dataFile; 68 | } 69 | 70 | } 71 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/task/TaskInfo.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.task; 2 | 3 | import java.io.Serializable; 4 | 5 | import com.hhu.machinelearningplatformserver.data.DataFile; 6 | 7 | /** 8 | * Stonk task info 9 | * 10 | * @author hayes, @create 2017-12-19 13:40 11 | **/ 12 | public class TaskInfo implements Serializable { 13 | 14 | private static final long serialVersionUID = -7755459521939958459L; 15 | 16 | private long taskId; 17 | 18 | private TaskType taskType; 19 | 20 | private String name; 21 | 22 | private long userId; 23 | 24 | private DataFile dataFile; 25 | 26 | private String workDir; 27 | 28 | private int sparkExecutorNum = 1; 29 | 30 | private long timeStamp; 31 | 32 | private SparkTaskAlgorithm sparkTaskAlgorithm; 33 | 34 | private int modelId; 35 | 36 | public long getTaskId() { 37 | return taskId; 38 | } 39 | 40 | public void setTaskId(long taskId) { 41 | this.taskId = taskId; 42 | } 43 | 44 | public TaskType getTaskType() { 45 | return taskType; 46 | } 47 | 48 | public void setTaskType(TaskType taskType) { 49 | this.taskType = taskType; 50 | } 51 | 52 | public long getUserId() { 53 | return userId; 54 | } 55 | 56 | public void setUserId(long userId) { 57 | this.userId = userId; 58 | } 59 | 60 | public DataFile getDataFile() { 61 | return dataFile; 62 | } 63 | 64 | public void setDataFile(DataFile dataFile) { 65 | this.dataFile = dataFile; 66 | } 67 | 68 | public String getWorkDir() { 69 | return workDir; 70 | } 71 | 72 | public void setWorkDir(String workDir) { 73 | this.workDir = workDir; 74 | } 75 | 76 | public int getSparkExecutorNum() { 77 | return sparkExecutorNum; 78 | } 79 | 80 | public void setSparkExecutorNum(int sparkExecutorNum) { 81 | this.sparkExecutorNum = sparkExecutorNum; 82 | } 83 | 84 | public SparkTaskAlgorithm getSparkTaskAlgorithm() { 85 | return sparkTaskAlgorithm; 86 | } 87 | 88 | public void setSparkTaskAlgorithm(SparkTaskAlgorithm sparkTaskAlgorithm) { 89 | this.sparkTaskAlgorithm = sparkTaskAlgorithm; 90 | } 91 | 92 | public String getName() { 93 | return name; 94 | } 95 | 96 | public void setName(String name) { 97 | this.name = name; 98 | } 99 | 100 | public long getTimeStamp() { 101 | return timeStamp; 102 | } 103 | 104 | public void setTimeStamp(long timeStamp) { 105 | this.timeStamp = timeStamp; 106 | } 107 | 108 | public int getModelId() { 109 | return modelId; 110 | } 111 | 112 | public void setModelId(int modelId) { 113 | this.modelId = modelId; 114 | } 115 | 116 | } 117 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/task/TaskState.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.task; 2 | 3 | //任务状态 4 | public enum TaskState { 5 | 6 | INITING(1), 7 | SUBMITTING(2), 8 | RUNNING(3), 9 | SUCCESS(4), 10 | FAIL(5); 11 | 12 | private int value; 13 | 14 | public int getValue() { 15 | return value; 16 | } 17 | 18 | public void setValue(int value) { 19 | this.value = value; 20 | } 21 | 22 | private TaskState(int value) { 23 | 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/java/com/hhu/machinelearningplatformserver/task/TaskType.java: -------------------------------------------------------------------------------- 1 | package com.hhu.machinelearningplatformserver.task; 2 | 3 | public enum TaskType { 4 | 5 | ESTIMATOR_TYPE(1), 6 | TRANSFORMER_TYPE(2); 7 | 8 | private int value; 9 | 10 | public int getValue() { 11 | return value; 12 | } 13 | 14 | public void setValue(int value) { 15 | this.value = value; 16 | } 17 | 18 | private TaskType(int value) { 19 | 20 | } 21 | 22 | } 23 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/resources/hdfs.properties: -------------------------------------------------------------------------------- 1 | hdfs.hostname=10.196.83.90 2 | hdfs.port=9000 3 | hdfs.user=hhu_vps 4 | hdfs.uri=/machinelearningplatform/ 5 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/main/resources/redis.properties: -------------------------------------------------------------------------------- 1 | #报文定时刷新间隔 2 | crmDataMonitorTime=6000000 3 | #白名单和接口名刷新间隔 4 | whiteListMonitorTime=6000000 5 | #Redi的主机地址 6 | redisIp=10.196.83.92 7 | #Redis的主机接口 8 | redisPort=6379 9 | #Redis的主机密码 10 | redisPWD=941012 11 | #连接超时时间 12 | timeout=60000 13 | #最大访问用户数 14 | maxActive=100 15 | #最大空闲时间长 16 | maxIdle=20 17 | #最大等待时间 18 | maxWait = 1000 19 | #最大存在时间 20 | EXPIRE_TIME =86400 21 | #最大存在时间(默认) 22 | EXPIRE=10000 -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/test/java/Test.java: -------------------------------------------------------------------------------- 1 | import java.lang.reflect.InvocationTargetException; 2 | import java.lang.reflect.Method; 3 | import java.util.ArrayList; 4 | import java.util.Arrays; 5 | import java.util.List; 6 | 7 | import org.apache.spark.SparkConf; 8 | import org.apache.spark.api.java.JavaRDD; 9 | import org.apache.spark.api.java.JavaSparkContext; 10 | import org.apache.spark.ml.Estimator; 11 | import org.apache.spark.ml.Model; 12 | import org.apache.spark.ml.classification.LogisticRegression; 13 | import org.apache.spark.ml.linalg.VectorUDT; 14 | import org.apache.spark.ml.linalg.Vectors; 15 | import org.apache.spark.ml.param.DoubleParam; 16 | import org.apache.spark.ml.param.IntParam; 17 | import org.apache.spark.sql.Dataset; 18 | import org.apache.spark.sql.Row; 19 | import org.apache.spark.sql.RowFactory; 20 | import org.apache.spark.sql.SparkSession; 21 | import org.apache.spark.sql.types.DataTypes; 22 | import org.apache.spark.sql.types.Metadata; 23 | import org.apache.spark.sql.types.StructField; 24 | import org.apache.spark.sql.types.StructType; 25 | 26 | public class Test { 27 | 28 | public static void main(String[] args) throws InstantiationException, IllegalAccessException, ClassNotFoundException, NoSuchMethodException, SecurityException, IllegalArgumentException, InvocationTargetException { 29 | System.setProperty("hadoop.home.dir", "D:/Hadoop/"); 30 | SparkSession sparkSession=SparkSession.builder().appName("sparktest").master("local").getOrCreate(); 31 | List dataTraining = Arrays.asList( 32 | RowFactory.create(1.0, Vectors.dense(0.0, 1.1, 0.1)), 33 | RowFactory.create(0.0, Vectors.dense(2.0, 1.0, -1.0)), 34 | RowFactory.create(0.0, Vectors.dense(2.0, 1.3, 1.0)), 35 | RowFactory.create(1.0, Vectors.dense(0.0, 1.2, -0.5)) 36 | ); 37 | StructType schema = new StructType(new StructField[]{ 38 | new StructField("label", DataTypes.DoubleType, false, Metadata.empty()), 39 | new StructField("features", new VectorUDT(), false, Metadata.empty()) 40 | }); 41 | Dataset training = sparkSession.createDataFrame(dataTraining, schema); 42 | int num=100; 43 | Class clazz=Class.forName("org.apache.spark.ml.classification.LogisticRegression"); 44 | Method[] methods=clazz.getMethods(); 45 | for(Method method : methods) { 46 | //System.out.println(method.getName()); 47 | } 48 | Method method=clazz.getMethod("fit", Dataset.class); 49 | Method setMethod1=clazz.getMethod("setMaxIter", int.class); 50 | Method setMethod3=clazz.getMethod("setRegParam", double.class); 51 | Object suanfa= clazz.newInstance(); 52 | setMethod1.invoke(suanfa, 100); 53 | setMethod3.invoke(suanfa, 0.01); 54 | Model model=(Model) method.invoke(suanfa, training); 55 | List dataTest = Arrays.asList( 56 | RowFactory.create(1.0, Vectors.dense(-1.0, 1.5, 1.3)), 57 | RowFactory.create(0.0, Vectors.dense(3.0, 2.0, -0.1)), 58 | RowFactory.create(1.0, Vectors.dense(0.0, 2.2, -1.5)) 59 | ); 60 | Dataset test = sparkSession.createDataFrame(dataTest, schema); 61 | Dataset results = model.transform(test); 62 | for (Row r: results.collectAsList()) { 63 | System.out.println("(" + r.get(0) + ", " + r.get(1) + ") -> prob=" + r.get(2) 64 | + ", prediction=" + r.get(3)); 65 | } 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/test/java/Test1.java: -------------------------------------------------------------------------------- 1 | import java.io.BufferedReader; 2 | import java.io.FileReader; 3 | import java.io.IOException; 4 | import java.lang.reflect.InvocationTargetException; 5 | import java.lang.reflect.Method; 6 | import java.util.ArrayList; 7 | import java.util.Arrays; 8 | import java.util.List; 9 | import java.util.Map; 10 | 11 | import org.apache.parquet.column.ValuesType; 12 | import org.apache.spark.api.java.JavaRDD; 13 | import org.apache.spark.api.java.JavaSparkContext; 14 | import org.apache.spark.ml.Model; 15 | import org.apache.spark.ml.linalg.VectorUDT; 16 | import org.apache.spark.ml.linalg.Vectors; 17 | import org.apache.spark.rdd.RDD; 18 | import org.apache.spark.sql.Dataset; 19 | import org.apache.spark.sql.Row; 20 | import org.apache.spark.sql.RowFactory; 21 | import org.apache.spark.sql.SparkSession; 22 | import org.apache.spark.sql.types.DataTypes; 23 | import org.apache.spark.sql.types.Metadata; 24 | import org.apache.spark.sql.types.StructField; 25 | import org.apache.spark.sql.types.StructType; 26 | import org.codehaus.jackson.JsonParseException; 27 | import org.codehaus.jackson.map.JsonMappingException; 28 | import org.codehaus.jackson.map.ObjectMapper; 29 | 30 | import com.hhu.machinelearningplatformserver.algorithm.MLAlgorithmDesc; 31 | import com.hhu.machinelearningplatformserver.algorithm.ParameterDesc; 32 | import com.hhu.machinelearningplatformserver.algorithm.ParameterValueType; 33 | import com.hhu.machinelearningplatformserver.data.DataFile; 34 | import com.hhu.machinelearningplatformserver.data.FieldInfo; 35 | import com.hhu.machinelearningplatformserver.data.SparkDataFileConverter; 36 | import com.hhu.machinelearningplatformserver.exception.CantConverException; 37 | 38 | public class Test1 { 39 | 40 | public static void main(String[] args) throws JsonParseException, JsonMappingException, IOException, ClassNotFoundException, NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException, InstantiationException, CantConverException { 41 | BufferedReader bufferedReader=new BufferedReader(new FileReader("src/test/resources/test.json")); 42 | String json=""; 43 | String str; 44 | while((str=bufferedReader.readLine())!=null) { 45 | json+=str; 46 | } 47 | ObjectMapper objectMapper=new ObjectMapper(); 48 | MLAlgorithmDesc algorithmDesc=objectMapper.readValue(json, MLAlgorithmDesc.class); 49 | System.setProperty("hadoop.home.dir", "D:/Hadoop/"); 50 | SparkSession sparkSession=SparkSession.builder().appName("sparktest").master("local").getOrCreate(); 51 | /*List dataTraining = Arrays.asList( 52 | RowFactory.create(1.0, Vectors.dense(0.0, 1.1, 0.1)), 53 | RowFactory.create(0.0, Vectors.dense(2.0, 1.0, -1.0)), 54 | RowFactory.create(0.0, Vectors.dense(2.0, 1.3, 1.0)), 55 | RowFactory.create(1.0, Vectors.dense(0.0, 1.2, -0.5)) 56 | );*/ 57 | /*List dataTraining= JavaSparkContext.fromSparkContext(sparkSession.sparkContext()).textFile("src/test/resources/datafile.csv") 58 | .map(x->{ 59 | String[] lineArr=x.split(" "); 60 | double[] list=new double[lineArr.length-1]; 61 | for(int i=1;i training = sparkSession.createDataFrame(dataTraining, schema);*/ 71 | StructType schema = new StructType(new StructField[]{ 72 | new StructField("label", DataTypes.DoubleType, false, Metadata.empty()), 73 | new StructField("features", new VectorUDT(), false, Metadata.empty()) 74 | }); 75 | DataFile dataFile=new DataFile(); 76 | dataFile.setName("data"); 77 | dataFile.setPath("src/test/resources/datafile.csv"); 78 | dataFile.setDelim(" "); 79 | List fieldInfos=new ArrayList(); 80 | FieldInfo label=new FieldInfo(); 81 | label.setName("label"); 82 | label.setDataType("double"); 83 | label.setIndex(0); 84 | fieldInfos.add(label); 85 | FieldInfo col1=new FieldInfo(); 86 | col1.setName("features"); 87 | col1.setDataType("double"); 88 | col1.setIndex(-1); 89 | col1.setStartIndex(1); 90 | col1.setEndIndex(3); 91 | fieldInfos.add(col1); 92 | dataFile.setFieldInfos(fieldInfos); 93 | Dataset training=SparkDataFileConverter.convertToDataFrame(dataFile, JavaSparkContext.fromSparkContext(sparkSession.sparkContext())); 94 | int num=100; 95 | Class clazz=Class.forName(algorithmDesc.getClassName()); 96 | Method method=clazz.getMethod("fit", Dataset.class); 97 | Map parameterDescs=algorithmDesc.getParameterDescs(); 98 | Object object=clazz.newInstance(); 99 | for(Map.Entry entry : parameterDescs.entrySet()) { 100 | Method method1=null; 101 | Object param=null; 102 | if(entry.getValue().getValueType()==ParameterValueType.INT) { 103 | method1=clazz.getMethod("set"+entry.getValue().getName().substring(0, 1).toUpperCase()+entry.getValue().getName().substring(1), int.class); 104 | param=Integer.valueOf(entry.getValue().getValue()); 105 | } 106 | if(entry.getValue().getValueType()==ParameterValueType.DOUBLE) { 107 | method1=clazz.getMethod("set"+entry.getValue().getName().substring(0, 1).toUpperCase()+entry.getValue().getName().substring(1), double.class); 108 | param=Double.valueOf(entry.getValue().getValue()); 109 | } 110 | method1.invoke(clazz.newInstance(), param); 111 | 112 | } 113 | Object model= method.invoke(object, training); 114 | List dataTest = Arrays.asList( 115 | RowFactory.create(1.0, Vectors.dense(-1.0, 1.5, 1.3)), 116 | RowFactory.create(0.0, Vectors.dense(3.0, 2.0, -0.1)), 117 | RowFactory.create(1.0, Vectors.dense(0.0, 2.2, -1.5)) 118 | ); 119 | Dataset test = sparkSession.createDataFrame(dataTest, schema); 120 | Method t=model.getClass().getMethod("transform", Dataset.class); 121 | Dataset results = (Dataset) t.invoke(model, test); 122 | for (Row r: results.collectAsList()) { 123 | System.out.println("(" + r.get(0) + ", " + r.get(1) + ") -> prob=" + r.get(2) 124 | + ", prediction=" + r.get(3)); 125 | } 126 | } 127 | } 128 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/test/resources/datafile.csv: -------------------------------------------------------------------------------- 1 | 1.0 0.0 1.1 0.1 2 | 0.0 2.0 1.0 -1.0 3 | 0.0 2.0 1.3 1.0 4 | 1.0 0.0 1.2 -0.5 -------------------------------------------------------------------------------- /MachineLearningPlatformServer/src/test/resources/test.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/src/test/resources/test.json -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/META-INF/MANIFEST.MF: -------------------------------------------------------------------------------- 1 | Manifest-Version: 1.0 2 | Built-By: wyc 3 | Build-Jdk: 1.8.0_131 4 | Created-By: Maven Integration for Eclipse 5 | 6 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/META-INF/maven/MachineLearningPlatform/MachineLearningPlatformServer/pom.properties: -------------------------------------------------------------------------------- 1 | #Generated by Maven Integration for Eclipse 2 | #Mon Mar 12 16:01:21 CST 2018 3 | version=0.0.1-SNAPSHOT 4 | groupId=MachineLearningPlatform 5 | m2e.projectName=MachineLearningPlatformServer 6 | m2e.projectLocation=E\:\\\u5DE5\u4F5C\u7A7A\u95F4\\MachineLearningPlatformServer 7 | artifactId=MachineLearningPlatformServer 8 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/META-INF/maven/MachineLearningPlatform/MachineLearningPlatformServer/pom.xml: -------------------------------------------------------------------------------- 1 | 3 | 4.0.0 4 | 5 | MachineLearningPlatform 6 | MachineLearningPlatformServer 7 | 0.0.1-SNAPSHOT 8 | jar 9 | 10 | MachineLearningPlatformServer 11 | http://maven.apache.org 12 | 13 | 14 | UTF-8 15 | 16 | 17 | 18 | 19 | org.apache.spark 20 | spark-core_2.11 21 | 2.2.0 22 | 23 | 24 | org.apache.spark 25 | spark-mllib_2.11 26 | 2.2.0 27 | 28 | 29 | org.apache.spark 30 | spark-streaming_2.11 31 | 2.2.0 32 | 33 | 34 | org.apache.spark 35 | spark-sql_2.11 36 | 2.2.0 37 | 38 | 39 | org.scala-lang 40 | scala-library 41 | 2.11.8 42 | 43 | 44 | org.scala-lang.modules 45 | scala-xml_2.11 46 | 1.0.6 47 | 48 | 49 | com.fasterxml.jackson.core 50 | jackson-databind 51 | 2.6.4 52 | 53 | 54 | org.apache.hadoop 55 | hadoop-hdfs 56 | 2.7.3 57 | 58 | 59 | org.apache.hadoop 60 | hadoop-client 61 | 2.7.3 62 | 63 | 64 | org.apache.hbase 65 | hbase-client 66 | 1.2.5 67 | 68 | 69 | org.apache.hbase 70 | hbase-common 71 | 1.2.5 72 | 73 | 74 | org.apache.hbase 75 | hbase-protocol 76 | 1.2.5 77 | 78 | 79 | org.apache.hbase 80 | hbase-server 81 | 1.2.5 82 | 83 | 84 | org.springframework.data 85 | spring-data-redis 86 | 1.6.2.RELEASE 87 | 88 | 89 | redis.clients 90 | jedis 91 | 2.9.0 92 | 93 | 100 | 101 | 102 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/algorithm/ComponentType.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/algorithm/ComponentType.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/algorithm/MLAlgorithmDesc.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/algorithm/MLAlgorithmDesc.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/algorithm/MLAlgorithmLoader.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/algorithm/MLAlgorithmLoader.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/algorithm/ParameterDesc.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/algorithm/ParameterDesc.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/algorithm/ParameterValueType.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/algorithm/ParameterValueType.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/algorithm/UsageType.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/algorithm/UsageType.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/common/ByteObjectUtil.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/common/ByteObjectUtil.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/common/ConfigUtils.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/common/ConfigUtils.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/common/HBaseUtil.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/common/HBaseUtil.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/common/HDFSUtils.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/common/HDFSUtils.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/common/JRedisPoolConfig.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/common/JRedisPoolConfig.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/common/JedisUtils.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/common/JedisUtils.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/common/RandomUtil.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/common/RandomUtil.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/common/ResourcePath.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/common/ResourcePath.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/data/DataFile.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/data/DataFile.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/data/DataFileMapper.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/data/DataFileMapper.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/data/DataFileType.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/data/DataFileType.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/data/FieldInfo.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/data/FieldInfo.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/data/LineParse.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/data/LineParse.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/data/PersistDataset.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/data/PersistDataset.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/data/SparkDataFileConverter.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/data/SparkDataFileConverter.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/exception/CantConverException.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/exception/CantConverException.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/exception/ConfigInitException.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/exception/ConfigInitException.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/model/AlgorithmModel.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/model/AlgorithmModel.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/proxy/EstimatorProxy.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/proxy/EstimatorProxy.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/proxy/MLAlgorithmProxy.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/proxy/MLAlgorithmProxy.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/proxy/ModelProxy.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/proxy/ModelProxy.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/proxy/TransformerProxy.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/proxy/TransformerProxy.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/submit/LoadTaskInfo.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/submit/LoadTaskInfo.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/submit/Submiter.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/submit/Submiter.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/task/SparkTaskAlgorithm.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/task/SparkTaskAlgorithm.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/task/SparkTaskInfo.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/task/SparkTaskInfo.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/task/TaskInfo.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/task/TaskInfo.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/task/TaskState.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/task/TaskState.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/task/TaskType.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/classes/com/hhu/machinelearningplatformserver/task/TaskType.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/hdfs.properties: -------------------------------------------------------------------------------- 1 | hdfs.hostname=10.196.83.90 2 | hdfs.port=9000 3 | hdfs.user=hhu_vps 4 | hdfs.uri=/machinelearningplatform/ 5 | -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/classes/redis.properties: -------------------------------------------------------------------------------- 1 | #报文定时刷新间隔 2 | crmDataMonitorTime=6000000 3 | #白名单和接口名刷新间隔 4 | whiteListMonitorTime=6000000 5 | #Redi的主机地址 6 | redisIp=10.196.83.92 7 | #Redis的主机接口 8 | redisPort=6379 9 | #Redis的主机密码 10 | redisPWD=941012 11 | #连接超时时间 12 | timeout=60000 13 | #最大访问用户数 14 | maxActive=100 15 | #最大空闲时间长 16 | maxIdle=20 17 | #最大等待时间 18 | maxWait = 1000 19 | #最大存在时间 20 | EXPIRE_TIME =86400 21 | #最大存在时间(默认) 22 | EXPIRE=10000 -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/test-classes/Test.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/test-classes/Test.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/test-classes/Test1.class: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/test-classes/Test1.class -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/test-classes/datafile.csv: -------------------------------------------------------------------------------- 1 | 1.0 0.0 1.1 0.1 2 | 0.0 2.0 1.0 -1.0 3 | 0.0 2.0 1.3 1.0 4 | 1.0 0.0 1.2 -0.5 -------------------------------------------------------------------------------- /MachineLearningPlatformServer/target/test-classes/test.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wyc941012/MachineLearningPlatform/ba93adec825b2a3052b105056232aed4169253ee/MachineLearningPlatformServer/target/test-classes/test.json -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # MachineLearningPlatform 2 | 基于Spark的机器学习平台。 3 | 底层使用Docker容器作为运行环境,使用Kubernetes对Docker集群进行管理。上层通过使用Spark的机器学习算法库对外提供服务,分为离线计算和实时计算两种。离线计算主要包括分类、回归、聚类和推荐等几类,实时计算使用Spark Streaming进行处理。 4 | --------------------------------------------------------------------------------