23 |
--------------------------------------------------------------------------------
/src/main/java/com/structured/pool/tool/ConnectionFactory.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2015-2016 Dark Phoenixs (Open-Source Organization).
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.structured.pool.tool;
17 |
18 | import org.apache.commons.pool2.PooledObjectFactory;
19 |
20 | import java.io.Serializable;
21 |
22 | public interface ConnectionFactoryTitle: createConnection
26 | *Description: 创建连接
27 | * 28 | * @return 连接 29 | * @throws Exception 30 | */ 31 | public abstract T createConnection() throws Exception; 32 | } 33 | -------------------------------------------------------------------------------- /src/main/java/com/structured/APP/structuredJava.java: -------------------------------------------------------------------------------- 1 | package com.structured.APP; 2 | 3 | import org.apache.spark.sql.Dataset; 4 | import org.apache.spark.sql.Row; 5 | import org.apache.spark.sql.SparkSession; 6 | import org.apache.spark.sql.streaming.StreamingQuery; 7 | import org.apache.spark.sql.streaming.StreamingQueryException; 8 | 9 | import com.structured.conf.ConfigurationManager; 10 | import com.structured.constant.Constants; 11 | 12 | public class structuredJava { 13 | 14 | public static void main(String[] args) { 15 | 16 | SparkSession spark = SparkSession.builder() 17 | .appName("structured-streaming-Kafka2HBase") 18 | .master("local[4]") 19 | .getOrCreate(); 20 | 21 | DatasetTitle: getConnection
24 | *Description: 获取连接
25 | * 26 | * @return 连接 27 | */ 28 | public abstract T getConnection(); 29 | 30 | /** 31 | *Title: returnConnection
32 | *Description: 返回连接
33 | * 34 | * @param conn 连接 35 | */ 36 | public void returnConnection(T conn); 37 | 38 | /** 39 | *Title: invalidateConnection
40 | *Description: 废弃连接
41 | * 42 | * @param conn 连接 43 | */ 44 | public void invalidateConnection(T conn); 45 | } 46 | -------------------------------------------------------------------------------- /src/main/java/com/structured/conf/ConfigurationManager.java: -------------------------------------------------------------------------------- 1 | package com.structured.conf; 2 | 3 | import java.io.InputStream; 4 | import java.util.Properties; 5 | 6 | /** 7 | * 配置管理组件 8 | * 9 | * @author rttxdu 10 | * 11 | */ 12 | final public class ConfigurationManager { 13 | 14 | private static Properties prop = new Properties(); 15 | 16 | static { 17 | try { 18 | InputStream in = ConfigurationManager.class 19 | .getClassLoader().getResourceAsStream("my.properties"); 20 | prop.load(in); 21 | } catch (Exception e) { 22 | e.printStackTrace(); 23 | } 24 | } 25 | 26 | /** 27 | * 获取指定key对应的value 28 | * @param key 29 | * @return value 30 | */ 31 | public static String getProperty(String key) { 32 | return prop.getProperty(key); 33 | } 34 | 35 | /** 36 | * 获取整数类型的配置项 37 | * @param key 38 | * @return value 39 | */ 40 | public static Integer getInteger(String key) { 41 | String value = getProperty(key); 42 | try { 43 | return Integer.valueOf(value); 44 | } catch (Exception e) { 45 | e.printStackTrace(); 46 | } 47 | return 0; 48 | } 49 | 50 | /** 51 | * 获取布尔类型的配置项 52 | * @param key 53 | * @return value 54 | */ 55 | public static Boolean getBoolean(String key) { 56 | String value = getProperty(key); 57 | try { 58 | return Boolean.valueOf(value); 59 | } catch (Exception e) { 60 | e.printStackTrace(); 61 | } 62 | return false; 63 | } 64 | 65 | /** 66 | * 获取Long类型的配置项 67 | * @param key 68 | * @return 69 | */ 70 | public static Long getLong(String key) { 71 | String value = getProperty(key); 72 | try { 73 | return Long.valueOf(value); 74 | } catch (Exception e) { 75 | e.printStackTrace(); 76 | } 77 | return 0L; 78 | } 79 | 80 | } 81 | -------------------------------------------------------------------------------- /src/main/java/com/structured/pool/hbase/HbaseConfig.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2015-2016 Dark Phoenixs (Open-Source Organization). 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.structured.pool.hbase; 17 | 18 | public interface HbaseConfig { 19 | 20 | /** 21 | * DEFAULT_HOST 22 | */ 23 | public static final String DEFAULT_HOST = "localhost"; 24 | /** 25 | * DEFAULT_PORT 26 | */ 27 | public static final String DEFAULT_PORT = "2181"; 28 | /** 29 | * DEFAULT_MASTER 30 | */ 31 | public static final String DEFAULT_MASTER = null; 32 | /** 33 | * DEFAULT_ROOTDIR 34 | */ 35 | public static final String DEFAULT_ROOTDIR = null; 36 | 37 | /** 38 | * ZOOKEEPER_QUORUM_PROPERTY 39 | */ 40 | public static final String ZOOKEEPER_QUORUM_PROPERTY = "hbase.zookeeper.quorum"; 41 | /** 42 | * ZOOKEEPER_CLIENTPORT_PROPERTY 43 | */ 44 | public static final String ZOOKEEPER_CLIENTPORT_PROPERTY = "hbase.zookeeper.property.clientPort"; 45 | /** 46 | * MASTER_PROPERTY 47 | */ 48 | public static final String MASTER_PROPERTY = "hbase.master"; 49 | /** 50 | * ROOTDIR_PROPERTY 51 | */ 52 | public static final String ROOTDIR_PROPERTY = "hbase.rootdir"; 53 | 54 | } 55 | -------------------------------------------------------------------------------- /src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | # 17 | 18 | # Set everything to be logged to the console 19 | log4j.rootCategory=WARN, console 20 | log4j.appender.console=org.apache.log4j.ConsoleAppender 21 | log4j.appender.console.target=System.err 22 | log4j.appender.console.layout=org.apache.log4j.PatternLayout 23 | log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n 24 | 25 | # Settings to quiet third party logs that are too verbose 26 | log4j.logger.org.spark-project.jetty=WARN 27 | log4j.logger.org.spark-project.jetty.util.component.AbstractLifeCycle=ERROR 28 | log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO 29 | log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO 30 | log4j.logger.org.apache.parquet=ERROR 31 | log4j.logger.parquet=ERROR 32 | 33 | # SPARK-9183: Settings to avoid annoying messages when looking up nonexistent UDFs in SparkSQL with Hive support 34 | log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL 35 | log4j.logger.org.apache.hadoop.hive.ql.exec.FunctionRegistry=ERROR 36 | -------------------------------------------------------------------------------- /src/main/java/com/structured/pool/tool/ConnectionPoolConfig.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2015-2016 Dark Phoenixs (Open-Source Organization). 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.structured.pool.tool; 17 | 18 | import org.apache.commons.pool2.impl.GenericObjectPoolConfig; 19 | 20 | import java.io.Serializable; 21 | 22 | public class ConnectionPoolConfig extends GenericObjectPoolConfig implements Serializable { 23 | 24 | /** 25 | * DEFAULT_TEST_WHILE_IDLE 26 | */ 27 | public static final boolean DEFAULT_TEST_WHILE_IDLE = true; 28 | /** 29 | * DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS 30 | */ 31 | public static final long DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS = 60000; 32 | /** 33 | * DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS 34 | */ 35 | public static final long DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS = 30000; 36 | /** 37 | * DEFAULT_NUM_TESTS_PER_EVICTION_RUN 38 | */ 39 | public static final int DEFAULT_NUM_TESTS_PER_EVICTION_RUN = -1; 40 | /** 41 | * serialVersionUID 42 | */ 43 | private static final long serialVersionUID = -2414567557372345057L; 44 | 45 | /** 46 | *Title: ConnectionPoolConfig
47 | *Description: 默认构造方法
48 | */ 49 | public ConnectionPoolConfig() { 50 | 51 | // defaults to make your life with connection pool easier :) 52 | setTestWhileIdle(DEFAULT_TEST_WHILE_IDLE); 53 | setMinEvictableIdleTimeMillis(DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS); 54 | setTimeBetweenEvictionRunsMillis(DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS); 55 | setNumTestsPerEvictionRun(DEFAULT_NUM_TESTS_PER_EVICTION_RUN); 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /src/main/java/com/structured/constant/Constants.java: -------------------------------------------------------------------------------- 1 | package com.structured.constant; 2 | 3 | import java.text.SimpleDateFormat; 4 | 5 | /** 6 | * 常量接口 7 | * @author rttxdu 8 | * 9 | */ 10 | public interface Constants { 11 | /** 12 | * Spark作业相关的常量 13 | */ 14 | String HDFS_LODA_ADTA_PATH= "hdfs.LodaData"; 15 | String STREAMING_CHECKPOINT_PATH = "streaming.checkpoint.path"; 16 | String SPARK_RUNMODE_MASTER = "spark.runmode.master"; 17 | String SPARK_PROJECT_NAME = "spark.project.name"; 18 | String SPARK_STREAMING_BATCH_TIME = "spark.string.batch.time"; 19 | 20 | /** 21 | * JDBC配置 22 | */ 23 | String JDBC_URL = "jdbc.url"; 24 | String JDBC_USER = "jdbc.user"; 25 | String JDBC_PASSWORD = "jdbc.password"; 26 | String JDBC_USER_NAME = "user"; 27 | String JDBC_PASSWORD_NAME = "password"; 28 | 29 | /** 30 | * KAFKA和ZK配置 31 | */ 32 | String KAFKA_METADATA_BROKER_LIST = "kafka.metadata.broker.list"; 33 | String ZK_METADATA_BROKER_LIST = "zk.metadata.broker.list"; 34 | String KAFKA_TOPICS_ID = "kafka.topics.id"; 35 | String KAFKA_TOPICS_MYSQL_TABLENAME = "Kafka.topics.mysql.tablename"; 36 | String KAFKA_TOPICS_MYSQL_TOPICNAME = "Kafka.topics.mysql.topic.name"; 37 | String KAFKA_TOPICS_MYSQL_TOPICID = "Kafka.topics.mysql.topic.id"; 38 | String KAFKA_TOPICS_DATA_CHANNEL_DB2HBASE = "Kafka.topics.data.Channel.DB2HBASE"; 39 | String KAFKA_TOPICS_DATA_CHANNEL_FLUME2HBASE= "Kafka.topics.data.Channel.FLUME2HBASE"; 40 | String KAFKA_TOPICS_DATA_CHANNEL_TPTDP2HBASE= "Kafka.topics.data.Channel.TPTDP2HBASE"; 41 | String KAFKA_TOPICS_CHANNEL_DB2HDFS = "Kafka.topics.data.Channel.DB2HDFS"; 42 | 43 | /** 44 | * HBase相关 45 | */ 46 | String CF_DEFAULT = "info"; 47 | String DEFAULT_ROW_KEY = "_pk"; 48 | String HBASE_POOL_MAX_TOTAL = "hbase.pool.max-total"; 49 | String HBASE_POOL_MAX_IDLE = "hbase.pool.max-idle"; 50 | String HBASE_POOL_MAX_WAITMILLIS = "hbase.pool.max-waitmillis"; 51 | String HBASE_POOL_TESTONBORROW = "hbase.pool.testonborrow"; 52 | 53 | /** 54 | * 数据管道json字段 55 | */ 56 | String CHANNEL_JSON_EVEENTCHANNEL = "eventChannel"; 57 | String CHANNEL_JSON_EVEENTID = "eventId"; 58 | String CHANNEL_JSON_EVEENTTIME = "eventTime"; 59 | String CHANNEL_JSON_EVEENTTYPE = "eventType"; 60 | String CHANNEL_JSON_EVEENTDATA = "eventData"; 61 | String CHANNEL_JSON_EVEENTTARGET = "eventTarget"; 62 | 63 | /** 64 | * 分隔符和常量 65 | */ 66 | String SEPARATOR_001 = "\001"; 67 | String SEPARATOR_002 = "\002"; 68 | String ENCODE_UTF8 = "UTF-8"; 69 | String DECODE_UTF8 = "UTF-8"; 70 | 71 | /** 72 | * 用于身份证解析与反解析 73 | */ 74 | String IDCard_BACK_x = "x"; 75 | String IDCard_BACK_X = "X"; 76 | String IDCard_BACK_11 = "11"; 77 | String STR_NUMBER_0 = "0"; 78 | 79 | /** 80 | * 格式化时间 81 | */ 82 | SimpleDateFormat TIME_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); 83 | SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd"); 84 | SimpleDateFormat DATEKEY_FORMAT = new SimpleDateFormat("yyyyMMdd"); 85 | 86 | /** 87 | * jar产生的log日志 88 | */ 89 | String JAR_LOG_PATH = "jar.log.path"; 90 | } 91 | -------------------------------------------------------------------------------- /src/main/java/com/structured/pool/hbase/HbaseSharedConnPool.java: -------------------------------------------------------------------------------- 1 | package com.structured.pool.hbase; 2 | 3 | import org.apache.hadoop.conf.Configuration; 4 | import org.apache.hadoop.hbase.client.Connection; 5 | import org.apache.hadoop.hbase.client.ConnectionFactory; 6 | 7 | import com.structured.pool.tool.ConnectionException; 8 | import com.structured.pool.tool.ConnectionPool; 9 | 10 | import java.io.IOException; 11 | import java.util.Map; 12 | import java.util.Properties; 13 | import java.util.concurrent.atomic.AtomicReference; 14 | 15 | public class HbaseSharedConnPool implements ConnectionPool