├── README.md ├── pom.xml └── src ├── main └── java │ └── com │ └── du │ ├── flink2hbase.java │ └── pool │ ├── hbase │ ├── HbaseConfig.java │ ├── HbaseConnectionFactory.java │ ├── HbaseConnectionPool.java │ └── HbaseSharedConnPool.java │ └── tool │ ├── ConnectionException.java │ ├── ConnectionFactory.java │ ├── ConnectionPool.java │ ├── ConnectionPoolBase.java │ └── ConnectionPoolConfig.java └── test └── java └── com └── du └── AppTest.java /README.md: -------------------------------------------------------------------------------- 1 | ## kafka-flink-hbase项目介绍 2 | flink实时处理kafka传来的数据通过连接池技术写入hbase
3 | 该项目修改基础配置就可以跑
4 | 5 | # 赞助 6 | 7 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 3 | 4.0.0 4 | 5 | com.du 6 | kafka-flink-hbase 7 | 0.0.1-SNAPSHOT 8 | jar 9 | 10 | kafka-flink-hbase 11 | http://maven.apache.org 12 | 13 | 14 | UTF-8 15 | 2.11.11 16 | 2.11 17 | 2.2.0 18 | 0.9.1.1 19 | 5.1.26 20 | 1.1.41 21 | 1.2.0 22 | 1.4.2 23 | 24 | 25 | 26 | 27 | 28 | aliyun 29 | http://maven.aliyun.com/nexus/content/groups/public/ 30 | 31 | 32 | cloudera 33 | https://repository.cloudera.com/artifactory/cloudera-repos/ 34 | 35 | 36 | jboss 37 | http://repository.jboss.org/nexus/content/groups/public 38 | 39 | 40 | 41 | 42 | 43 | org.apache.flink 44 | flink-streaming-java_${scala.compat.version} 45 | ${flink.version} 46 | 47 | 48 | 49 | org.apache.flink 50 | flink-connector-twitter_${scala.compat.version} 51 | ${flink.version} 52 | 53 | 54 | 55 | org.apache.flink 56 | flink-connector-kafka-0.10_${scala.compat.version} 57 | ${flink.version} 58 | 59 | 60 | 61 | org.apache.flink 62 | flink-statebackend-rocksdb_${scala.compat.version} 63 | ${flink.version} 64 | 65 | 66 | 67 | org.apache.flink 68 | flink-table_${scala.compat.version} 69 | ${flink.version} 70 | 71 | 72 | org.apache.flink 73 | flink-streaming-scala_${scala.compat.version} 74 | ${flink.version} 75 | 76 | 77 | org.apache.flink 78 | flink-hbase_${scala.compat.version} 79 | ${flink.version} 80 | 81 | 82 | org.apache.hadoop 83 | hadoop-client 84 | 2.7.0 85 | 86 | 87 | org.apache.hbase 88 | hbase-server 89 | ${hbase.version} 90 | 91 | 92 | org.apache.hbase 93 | hbase-common 94 | ${hbase.version} 95 | 96 | 97 | org.apache.commons 98 | commons-pool2 99 | 2.4.2 100 | 101 | 102 | 103 | 104 | src/main/java 105 | src/test/java 106 | 107 | 108 | maven-assembly-plugin 109 | 110 | 111 | jar-with-dependencies 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | make-assembly 122 | package 123 | 124 | single 125 | 126 | 127 | 128 | 129 | 130 | 131 | org.codehaus.mojo 132 | exec-maven-plugin 133 | 1.2.1 134 | 135 | 136 | 137 | exec 138 | 139 | 140 | 141 | 142 | java 143 | true 144 | false 145 | compile 146 | com.stars 147 | 148 | 149 | 150 | 151 | org.apache.maven.plugins 152 | maven-compiler-plugin 153 | 154 | 1.8 155 | 1.8 156 | 157 | 158 | 159 | 160 | 161 | 162 | -------------------------------------------------------------------------------- /src/main/java/com/du/flink2hbase.java: -------------------------------------------------------------------------------- 1 | package com.du; 2 | 3 | import java.io.IOException; 4 | import java.text.SimpleDateFormat; 5 | import java.util.Date; 6 | import java.util.Properties; 7 | 8 | import org.apache.flink.streaming.api.TimeCharacteristic; 9 | import org.apache.flink.streaming.api.datastream.DataStream; 10 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 11 | import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer010; 12 | import org.apache.flink.api.common.functions.MapFunction; 13 | import org.apache.flink.api.common.serialization.SimpleStringSchema; 14 | import org.apache.hadoop.conf.Configuration; 15 | import org.apache.hadoop.hbase.HBaseConfiguration; 16 | import org.apache.hadoop.hbase.HColumnDescriptor; 17 | import org.apache.hadoop.hbase.HTableDescriptor; 18 | import org.apache.hadoop.hbase.TableName; 19 | import org.apache.hadoop.hbase.client.Admin; 20 | import org.apache.hadoop.hbase.client.Connection; 21 | import org.apache.hadoop.hbase.client.Put; 22 | import org.apache.hadoop.hbase.client.Table; 23 | 24 | import com.du.pool.hbase.HbaseConnectionPool; 25 | import com.du.pool.tool.ConnectionPoolConfig; 26 | 27 | public class flink2hbase { 28 | 29 | private static TableName tableName = TableName.valueOf("Flink2HBase"); 30 | private static final String columnFamily = "info"; 31 | 32 | public static void main(String[] args) throws Exception { 33 | 34 | final String ZOOKEEPER_HOST = "node71:2181,node72:2181,node73:2181"; 35 | final String KAFKA_HOST = "node71:9092,node72:9092,node73:9092,cm70:9092"; 36 | final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 37 | env.enableCheckpointing(1000); // 非常关键,一定要设置启动检查点!! 38 | env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); 39 | 40 | Properties props = new Properties(); 41 | props.setProperty("zookeeper.connect", ZOOKEEPER_HOST); 42 | props.setProperty("bootstrap.servers", KAFKA_HOST); 43 | props.setProperty("group.id", "test-consumer-group"); 44 | 45 | DataStream transction = env.addSource(new FlinkKafkaConsumer010("test2", new SimpleStringSchema(), props)); 46 | 47 | transction.rebalance().map(new MapFunction() { 48 | private static final long serialVersionUID = 1L; 49 | public String map(String value)throws IOException{ 50 | System.out.println(value); 51 | writeIntoHBase(value); 52 | return null; 53 | } 54 | 55 | }); 56 | 57 | 58 | env.execute(); 59 | } 60 | 61 | public static void writeIntoHBase(String m)throws IOException{ 62 | ConnectionPoolConfig config = new ConnectionPoolConfig(); 63 | config.setMaxTotal(20); 64 | config.setMaxIdle(5); 65 | config.setMaxWaitMillis(1000); 66 | config.setTestOnBorrow(true); 67 | 68 | 69 | Configuration hbaseConfig = HBaseConfiguration.create(); 70 | 71 | hbaseConfig = HBaseConfiguration.create(); 72 | hbaseConfig.set("hbase.zookeeper.quorum", "node71:2181,node72:2181,node73:2181"); 73 | hbaseConfig.set("hbase.defaults.for.version.skip", "true"); 74 | 75 | HbaseConnectionPool pool = null; 76 | 77 | try { 78 | pool = new HbaseConnectionPool(config, hbaseConfig); 79 | 80 | Connection con = pool.getConnection(); 81 | 82 | Admin admin = con.getAdmin(); 83 | 84 | if(!admin.tableExists(tableName)){ 85 | admin.createTable(new HTableDescriptor(tableName).addFamily(new HColumnDescriptor(columnFamily))); 86 | } 87 | Table table = con.getTable(tableName); 88 | 89 | SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); 90 | 91 | Put put = new Put(org.apache.hadoop.hbase.util.Bytes.toBytes(df.format(new Date()))); 92 | 93 | put.addColumn(org.apache.hadoop.hbase.util.Bytes.toBytes(columnFamily), org.apache.hadoop.hbase.util.Bytes.toBytes("test"), 94 | org.apache.hadoop.hbase.util.Bytes.toBytes(m)); 95 | 96 | table.put(put); 97 | table.close(); 98 | pool.returnConnection(con); 99 | 100 | } catch (Exception e) { 101 | pool.close(); 102 | } 103 | } 104 | } -------------------------------------------------------------------------------- /src/main/java/com/du/pool/hbase/HbaseConfig.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2015-2016 Dark Phoenixs (Open-Source Organization). 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.du.pool.hbase; 17 | 18 | public interface HbaseConfig { 19 | 20 | /** 21 | * DEFAULT_HOST 22 | */ 23 | public static final String DEFAULT_HOST = "localhost"; 24 | /** 25 | * DEFAULT_PORT 26 | */ 27 | public static final String DEFAULT_PORT = "2181"; 28 | /** 29 | * DEFAULT_MASTER 30 | */ 31 | public static final String DEFAULT_MASTER = null; 32 | /** 33 | * DEFAULT_ROOTDIR 34 | */ 35 | public static final String DEFAULT_ROOTDIR = null; 36 | 37 | /** 38 | * ZOOKEEPER_QUORUM_PROPERTY 39 | */ 40 | public static final String ZOOKEEPER_QUORUM_PROPERTY = "hbase.zookeeper.quorum"; 41 | /** 42 | * ZOOKEEPER_CLIENTPORT_PROPERTY 43 | */ 44 | public static final String ZOOKEEPER_CLIENTPORT_PROPERTY = "hbase.zookeeper.property.clientPort"; 45 | /** 46 | * MASTER_PROPERTY 47 | */ 48 | public static final String MASTER_PROPERTY = "hbase.master"; 49 | /** 50 | * ROOTDIR_PROPERTY 51 | */ 52 | public static final String ROOTDIR_PROPERTY = "hbase.rootdir"; 53 | 54 | } 55 | -------------------------------------------------------------------------------- /src/main/java/com/du/pool/hbase/HbaseConnectionFactory.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2015-2016 Dark Phoenixs (Open-Source Organization). 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.du.pool.hbase; 17 | 18 | import com.du.pool.tool.ConnectionException; 19 | import com.du.pool.tool.ConnectionFactory; 20 | 21 | import org.apache.commons.pool2.PooledObject; 22 | import org.apache.commons.pool2.impl.DefaultPooledObject; 23 | import org.apache.hadoop.conf.Configuration; 24 | import org.apache.hadoop.hbase.client.Connection; 25 | 26 | import java.util.Map.Entry; 27 | import java.util.Properties; 28 | 29 | class HbaseConnectionFactory implements ConnectionFactory { 30 | 31 | /** 32 | * serialVersionUID 33 | */ 34 | private static final long serialVersionUID = 4024923894283696465L; 35 | 36 | /** 37 | * hadoopConfiguration 38 | */ 39 | private final Configuration hadoopConfiguration; 40 | 41 | /** 42 | *

Title: HbaseConnectionFactory

43 | *

Description: 构造方法

44 | * 45 | * @param hadoopConfiguration hbase配置 46 | */ 47 | public HbaseConnectionFactory(final Configuration hadoopConfiguration) { 48 | 49 | this.hadoopConfiguration = hadoopConfiguration; 50 | } 51 | 52 | /** 53 | *

Title: HbaseConnectionFactory

54 | *

Description: 构造方法

55 | * 56 | * @param host zookeeper地址 57 | * @param port zookeeper端口 58 | * @param master hbase主机 59 | * @param rootdir hdfs数据目录 60 | */ 61 | public HbaseConnectionFactory(final String host, final String port, final String master, final String rootdir) { 62 | 63 | this.hadoopConfiguration = new Configuration(); 64 | 65 | if (host == null) 66 | throw new ConnectionException("[" + HbaseConfig.ZOOKEEPER_QUORUM_PROPERTY + "] is required !"); 67 | this.hadoopConfiguration.set(HbaseConfig.ZOOKEEPER_QUORUM_PROPERTY, host); 68 | 69 | if (port == null) 70 | throw new ConnectionException("[" + HbaseConfig.ZOOKEEPER_CLIENTPORT_PROPERTY + "] is required !"); 71 | this.hadoopConfiguration.set(HbaseConfig.ZOOKEEPER_CLIENTPORT_PROPERTY, port); 72 | 73 | if (master != null) 74 | this.hadoopConfiguration.set(HbaseConfig.MASTER_PROPERTY, master); 75 | 76 | if (rootdir != null) 77 | this.hadoopConfiguration.set(HbaseConfig.ROOTDIR_PROPERTY, rootdir); 78 | } 79 | 80 | /** 81 | * @param properties 参数配置 82 | * @since 1.2.1 83 | */ 84 | public HbaseConnectionFactory(final Properties properties) { 85 | 86 | this.hadoopConfiguration = new Configuration(); 87 | 88 | for (Entry entry : properties.entrySet()) { 89 | 90 | this.hadoopConfiguration.set((String) entry.getKey(), (String) entry.getValue()); 91 | } 92 | } 93 | 94 | @Override 95 | public PooledObject makeObject() throws Exception { 96 | 97 | Connection connection = this.createConnection(); 98 | 99 | return new DefaultPooledObject(connection); 100 | } 101 | 102 | @Override 103 | public void destroyObject(PooledObject p) throws Exception { 104 | 105 | Connection connection = p.getObject(); 106 | 107 | if (connection != null) 108 | 109 | connection.close(); 110 | } 111 | 112 | @Override 113 | public boolean validateObject(PooledObject p) { 114 | 115 | Connection connection = p.getObject(); 116 | 117 | if (connection != null) 118 | 119 | return ((!connection.isAborted()) && (!connection.isClosed())); 120 | 121 | return false; 122 | } 123 | 124 | @Override 125 | public void activateObject(PooledObject p) throws Exception { 126 | // TODO Auto-generated method stub 127 | 128 | } 129 | 130 | @Override 131 | public void passivateObject(PooledObject p) throws Exception { 132 | // TODO Auto-generated method stub 133 | 134 | } 135 | 136 | @Override 137 | public Connection createConnection() throws Exception { 138 | 139 | Connection connection = org.apache.hadoop.hbase.client.ConnectionFactory 140 | .createConnection(hadoopConfiguration); 141 | 142 | return connection; 143 | } 144 | 145 | } 146 | -------------------------------------------------------------------------------- /src/main/java/com/du/pool/hbase/HbaseConnectionPool.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2015-2016 Dark Phoenixs (Open-Source Organization). 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.du.pool.hbase; 17 | 18 | import com.du.pool.tool.ConnectionPool; 19 | import com.du.pool.tool.ConnectionPoolBase; 20 | import com.du.pool.tool.ConnectionPoolConfig; 21 | 22 | import org.apache.hadoop.conf.Configuration; 23 | import org.apache.hadoop.hbase.client.Connection; 24 | 25 | import java.util.Properties; 26 | 27 | public class HbaseConnectionPool extends ConnectionPoolBase implements ConnectionPool { 28 | 29 | /** 30 | * serialVersionUID 31 | */ 32 | private static final long serialVersionUID = -9126420905798370243L; 33 | 34 | /** 35 | *

Title: HbaseConnectionPool

36 | *

Description: 默认构造方法

37 | */ 38 | public HbaseConnectionPool() { 39 | 40 | this(HbaseConfig.DEFAULT_HOST, HbaseConfig.DEFAULT_PORT); 41 | } 42 | 43 | /** 44 | *

Title: HbaseConnectionPool

45 | *

Description: 构造方法

46 | * 47 | * @param host 地址 48 | * @param port 端口 49 | */ 50 | public HbaseConnectionPool(final String host, final String port) { 51 | 52 | this(new ConnectionPoolConfig(), host, port, HbaseConfig.DEFAULT_MASTER, HbaseConfig.DEFAULT_ROOTDIR); 53 | } 54 | 55 | /** 56 | *

Title: HbaseConnectionPool

57 | *

Description: 构造方法

58 | * 59 | * @param host 地址 60 | * @param port 端口 61 | * @param master hbase主机 62 | * @param rootdir hdfs目录 63 | */ 64 | public HbaseConnectionPool(final String host, final String port, final String master, final String rootdir) { 65 | 66 | this(new ConnectionPoolConfig(), host, port, master, rootdir); 67 | } 68 | 69 | /** 70 | *

Title: HbaseConnectionPool

71 | *

Description: 构造方法

72 | * 73 | * @param hadoopConfiguration hbase配置 74 | */ 75 | public HbaseConnectionPool(final Configuration hadoopConfiguration) { 76 | 77 | this(new ConnectionPoolConfig(), hadoopConfiguration); 78 | } 79 | 80 | /** 81 | *

Title: HbaseConnectionPool

82 | *

Description: 构造方法

83 | * 84 | * @param poolConfig 池配置 85 | * @param host 地址 86 | * @param port 端口 87 | */ 88 | public HbaseConnectionPool(final ConnectionPoolConfig poolConfig, final String host, final String port) { 89 | 90 | this(poolConfig, host, port, HbaseConfig.DEFAULT_MASTER, HbaseConfig.DEFAULT_ROOTDIR); 91 | } 92 | 93 | /** 94 | *

Title: HbaseConnectionPool

95 | *

Description: 构造方法

96 | * 97 | * @param poolConfig 池配置 98 | * @param hadoopConfiguration hbase配置 99 | */ 100 | public HbaseConnectionPool(final ConnectionPoolConfig poolConfig, final Configuration hadoopConfiguration) { 101 | 102 | super(poolConfig, new HbaseConnectionFactory(hadoopConfiguration)); 103 | } 104 | 105 | /** 106 | *

Title: HbaseConnectionPool

107 | *

Description: 构造方法

108 | * 109 | * @param poolConfig 池配置 110 | * @param host 地址 111 | * @param port 端口 112 | * @param master hbase主机 113 | * @param rootdir hdfs目录 114 | */ 115 | public HbaseConnectionPool(final ConnectionPoolConfig poolConfig, final String host, final String port, final String master, final String rootdir) { 116 | 117 | super(poolConfig, new HbaseConnectionFactory(host, port, master, rootdir)); 118 | } 119 | 120 | /** 121 | * @param poolConfig 池配置 122 | * @param properties 参数配置 123 | * @since 1.2.1 124 | */ 125 | public HbaseConnectionPool(final ConnectionPoolConfig poolConfig, final Properties properties) { 126 | 127 | super(poolConfig, new HbaseConnectionFactory(properties)); 128 | } 129 | 130 | @Override 131 | public Connection getConnection() { 132 | 133 | return super.getResource(); 134 | } 135 | 136 | @Override 137 | public void returnConnection(Connection conn) { 138 | 139 | super.returnResource(conn); 140 | } 141 | 142 | @Override 143 | public void invalidateConnection(Connection conn) { 144 | 145 | super.invalidateResource(conn); 146 | } 147 | 148 | } 149 | -------------------------------------------------------------------------------- /src/main/java/com/du/pool/hbase/HbaseSharedConnPool.java: -------------------------------------------------------------------------------- 1 | package com.du.pool.hbase; 2 | 3 | import com.du.pool.tool.ConnectionException; 4 | import com.du.pool.tool.ConnectionPool; 5 | 6 | import org.apache.hadoop.conf.Configuration; 7 | import org.apache.hadoop.hbase.client.Connection; 8 | import org.apache.hadoop.hbase.client.ConnectionFactory; 9 | 10 | import java.io.IOException; 11 | import java.util.Map; 12 | import java.util.Properties; 13 | import java.util.concurrent.atomic.AtomicReference; 14 | 15 | public class HbaseSharedConnPool implements ConnectionPool { 16 | 17 | private static final long serialVersionUID = 1L; 18 | 19 | private static final AtomicReference pool = new AtomicReference(); 20 | 21 | private final Connection connection; 22 | 23 | private HbaseSharedConnPool(Configuration configuration) throws IOException { 24 | this.connection = ConnectionFactory.createConnection(configuration); 25 | } 26 | 27 | /** 28 | * Gets instance. 29 | * 30 | * @param host the host 31 | * @param port the port 32 | * @param master the master 33 | * @param rootdir the rootdir 34 | * @return the instance 35 | */ 36 | public synchronized static HbaseSharedConnPool getInstance(final String host, final String port, final String master, final String rootdir) { 37 | 38 | Properties properties = new Properties(); 39 | 40 | if (host == null) 41 | throw new ConnectionException("[" + HbaseConfig.ZOOKEEPER_QUORUM_PROPERTY + "] is required !"); 42 | properties.setProperty(HbaseConfig.ZOOKEEPER_QUORUM_PROPERTY, host); 43 | 44 | if (port == null) 45 | throw new ConnectionException("[" + HbaseConfig.ZOOKEEPER_CLIENTPORT_PROPERTY + "] is required !"); 46 | properties.setProperty(HbaseConfig.ZOOKEEPER_CLIENTPORT_PROPERTY, port); 47 | 48 | if (master != null) 49 | properties.setProperty(HbaseConfig.MASTER_PROPERTY, master); 50 | 51 | if (rootdir != null) 52 | properties.setProperty(HbaseConfig.ROOTDIR_PROPERTY, rootdir); 53 | 54 | return getInstance(properties); 55 | } 56 | 57 | /** 58 | * Gets instance. 59 | * 60 | * @param properties the properties 61 | * @return the instance 62 | */ 63 | public synchronized static HbaseSharedConnPool getInstance(final Properties properties) { 64 | 65 | Configuration configuration = new Configuration(); 66 | 67 | for (Map.Entry entry : properties.entrySet()) { 68 | 69 | configuration.set((String) entry.getKey(), (String) entry.getValue()); 70 | } 71 | 72 | return getInstance(configuration); 73 | } 74 | 75 | /** 76 | * Gets instance. 77 | * 78 | * @param configuration the configuration 79 | * @return the instance 80 | */ 81 | public synchronized static HbaseSharedConnPool getInstance(final Configuration configuration) { 82 | 83 | if (pool.get() == null) 84 | 85 | try { 86 | pool.set(new HbaseSharedConnPool(configuration)); 87 | 88 | } catch (IOException e) { 89 | 90 | e.printStackTrace(); 91 | } 92 | 93 | return pool.get(); 94 | } 95 | 96 | @Override 97 | public Connection getConnection() { 98 | 99 | return connection; 100 | } 101 | 102 | @Override 103 | public void returnConnection(Connection conn) { 104 | 105 | // TODO: 2016/8/25 106 | } 107 | 108 | @Override 109 | public void invalidateConnection(Connection conn) { 110 | 111 | try { 112 | if (conn != null) 113 | 114 | conn.close(); 115 | 116 | } catch (IOException e) { 117 | 118 | e.printStackTrace(); 119 | } 120 | } 121 | 122 | /** 123 | * Close. 124 | */ 125 | public void close() { 126 | 127 | try { 128 | connection.close(); 129 | 130 | pool.set(null); 131 | 132 | } catch (IOException e) { 133 | 134 | e.printStackTrace(); 135 | } 136 | } 137 | } 138 | -------------------------------------------------------------------------------- /src/main/java/com/du/pool/tool/ConnectionException.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2015-2016 Dark Phoenixs (Open-Source Organization). 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.du.pool.tool; 17 | 18 | public class ConnectionException extends RuntimeException { 19 | 20 | private static final long serialVersionUID = -6503525110247209484L; 21 | 22 | public ConnectionException() { 23 | super(); 24 | } 25 | 26 | public ConnectionException(String message) { 27 | super(message); 28 | } 29 | 30 | public ConnectionException(Throwable e) { 31 | super(e); 32 | } 33 | 34 | public ConnectionException(String message, Throwable cause) { 35 | super(message, cause); 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/main/java/com/du/pool/tool/ConnectionFactory.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2015-2016 Dark Phoenixs (Open-Source Organization). 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.du.pool.tool; 17 | 18 | import org.apache.commons.pool2.PooledObjectFactory; 19 | 20 | import java.io.Serializable; 21 | 22 | public interface ConnectionFactory extends PooledObjectFactory, Serializable { 23 | 24 | /** 25 | *

Title: createConnection

26 | *

Description: 创建连接

27 | * 28 | * @return 连接 29 | * @throws Exception 30 | */ 31 | public abstract T createConnection() throws Exception; 32 | } 33 | -------------------------------------------------------------------------------- /src/main/java/com/du/pool/tool/ConnectionPool.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2015-2016 Dark Phoenixs (Open-Source Organization). 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.du.pool.tool; 17 | 18 | import java.io.Serializable; 19 | 20 | public interface ConnectionPool extends Serializable { 21 | 22 | /** 23 | *

Title: getConnection

24 | *

Description: 获取连接

25 | * 26 | * @return 连接 27 | */ 28 | public abstract T getConnection(); 29 | 30 | /** 31 | *

Title: returnConnection

32 | *

Description: 返回连接

33 | * 34 | * @param conn 连接 35 | */ 36 | public void returnConnection(T conn); 37 | 38 | /** 39 | *

Title: invalidateConnection

40 | *

Description: 废弃连接

41 | * 42 | * @param conn 连接 43 | */ 44 | public void invalidateConnection(T conn); 45 | } 46 | -------------------------------------------------------------------------------- /src/main/java/com/du/pool/tool/ConnectionPoolBase.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2015-2016 Dark Phoenixs (Open-Source Organization). 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.du.pool.tool; 17 | 18 | import org.apache.commons.pool2.PooledObjectFactory; 19 | import org.apache.commons.pool2.impl.GenericObjectPool; 20 | import org.apache.commons.pool2.impl.GenericObjectPoolConfig; 21 | 22 | import java.io.Closeable; 23 | import java.io.Serializable; 24 | 25 | public abstract class ConnectionPoolBase implements Closeable, Serializable { 26 | 27 | /** 28 | * serialVersionUID 29 | */ 30 | private static final long serialVersionUID = 536428799879058482L; 31 | 32 | /** 33 | * internalPool 34 | */ 35 | protected GenericObjectPool internalPool; 36 | 37 | /** 38 | *

Title: ConnectionPoolBase

39 | *

Description: 构造方法

40 | */ 41 | public ConnectionPoolBase() { 42 | } 43 | 44 | /** 45 | *

Title: ConnectionPoolBase

46 | *

Description: 构造方法

47 | * 48 | * @param poolConfig 池配置 49 | * @param factory 池对象工厂 50 | */ 51 | public ConnectionPoolBase(final GenericObjectPoolConfig poolConfig, 52 | PooledObjectFactory factory) { 53 | this.initPool(poolConfig, factory); 54 | } 55 | 56 | /** 57 | *

Title: initPool

58 | *

Description: 初始化对象池

59 | * 60 | * @param poolConfig 池配置 61 | * @param factory 池对象工厂 62 | */ 63 | protected void initPool(final GenericObjectPoolConfig poolConfig, 64 | PooledObjectFactory factory) { 65 | if (this.internalPool != null) 66 | this.destroy(); 67 | 68 | this.internalPool = new GenericObjectPool(factory, poolConfig); 69 | } 70 | 71 | /** 72 | *

Title: destroy

73 | *

Description: 销毁对象池

74 | */ 75 | protected void destroy() { 76 | this.close(); 77 | } 78 | 79 | /** 80 | *

Title: getResource

81 | *

Description: 获得池对象

82 | * 83 | * @return 池对象 84 | */ 85 | protected T getResource() { 86 | try { 87 | return internalPool.borrowObject(); 88 | } catch (Exception e) { 89 | throw new ConnectionException( 90 | "Could not get a resource from the pool", e); 91 | } 92 | } 93 | 94 | /** 95 | *

Title: returnResource

96 | *

Description: 返回池对象

97 | * 98 | * @param resource 池对象 99 | */ 100 | protected void returnResource(final T resource) { 101 | if (null != resource) 102 | try { 103 | internalPool.returnObject(resource); 104 | } catch (Exception e) { 105 | throw new ConnectionException( 106 | "Could not return the resource to the pool", e); 107 | } 108 | } 109 | 110 | /** 111 | *

Title: invalidateResource

112 | *

Description: 废弃池对象

113 | * 114 | * @param resource 池对象 115 | */ 116 | protected void invalidateResource(final T resource) { 117 | if (null != resource) 118 | try { 119 | internalPool.invalidateObject(resource); 120 | } catch (Exception e) { 121 | throw new ConnectionException( 122 | "Could not invalidate the resource to the pool", e); 123 | } 124 | } 125 | 126 | /** 127 | *

Title: getNumActive

128 | *

Description: 获得池激活数

129 | * 130 | * @return 激活数 131 | */ 132 | public int getNumActive() { 133 | if (isInactived()) { 134 | return -1; 135 | } 136 | 137 | return this.internalPool.getNumActive(); 138 | } 139 | 140 | /** 141 | *

Title: getNumIdle

142 | *

Description: 获得池空闲数

143 | * 144 | * @return 空闲数 145 | */ 146 | public int getNumIdle() { 147 | if (isInactived()) { 148 | return -1; 149 | } 150 | 151 | return this.internalPool.getNumIdle(); 152 | } 153 | 154 | /** 155 | *

Title: getNumWaiters

156 | *

Description: 获得池等待数

157 | * 158 | * @return 等待数 159 | */ 160 | public int getNumWaiters() { 161 | if (isInactived()) { 162 | return -1; 163 | } 164 | 165 | return this.internalPool.getNumWaiters(); 166 | } 167 | 168 | /** 169 | *

Title: getMeanBorrowWaitTimeMillis

170 | *

Description: 获得平均等待时间

171 | * 172 | * @return 平均等待时间 173 | */ 174 | public long getMeanBorrowWaitTimeMillis() { 175 | if (isInactived()) { 176 | return -1; 177 | } 178 | 179 | return this.internalPool.getMeanBorrowWaitTimeMillis(); 180 | } 181 | 182 | /** 183 | *

Title: getMaxBorrowWaitTimeMillis

184 | *

Description: 获得最大等待时间

185 | * 186 | * @return 最大等待时间 187 | */ 188 | public long getMaxBorrowWaitTimeMillis() { 189 | if (isInactived()) { 190 | return -1; 191 | } 192 | 193 | return this.internalPool.getMaxBorrowWaitTimeMillis(); 194 | } 195 | 196 | /** 197 | *

Title: isClosed

198 | *

Description: 池是否关闭

199 | * 200 | * @return 是否关闭 201 | */ 202 | public boolean isClosed() { 203 | try { 204 | return this.internalPool.isClosed(); 205 | } catch (Exception e) { 206 | throw new ConnectionException( 207 | "Could not check closed from the pool", e); 208 | } 209 | } 210 | 211 | /** 212 | *

Title: isInactived

213 | *

Description: 池是否失效

214 | * 215 | * @return 是否失效 216 | */ 217 | private boolean isInactived() { 218 | try { 219 | return this.internalPool == null || this.internalPool.isClosed(); 220 | } catch (Exception e) { 221 | throw new ConnectionException( 222 | "Could not check inactived from the pool", e); 223 | } 224 | } 225 | 226 | /** 227 | *

Title: addObjects

228 | *

Description: 添加池对象

229 | * 230 | * @param count 池对象数量 231 | */ 232 | protected void addObjects(final int count) { 233 | try { 234 | for (int i = 0; i < count; i++) { 235 | this.internalPool.addObject(); 236 | } 237 | } catch (Exception e) { 238 | throw new ConnectionException("Error trying to add idle objects", e); 239 | } 240 | } 241 | 242 | /** 243 | *

Title: clear

244 | *

Description: 清除对象池

245 | */ 246 | public void clear() { 247 | try { 248 | this.internalPool.clear(); 249 | } catch (Exception e) { 250 | throw new ConnectionException("Could not clear the pool", e); 251 | } 252 | } 253 | 254 | /** 255 | *

Title: close

256 | *

Description: 关闭对象池

257 | */ 258 | public void close() { 259 | try { 260 | this.internalPool.close(); 261 | } catch (Exception e) { 262 | throw new ConnectionException("Could not destroy the pool", e); 263 | } 264 | } 265 | } 266 | -------------------------------------------------------------------------------- /src/main/java/com/du/pool/tool/ConnectionPoolConfig.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2015-2016 Dark Phoenixs (Open-Source Organization). 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.du.pool.tool; 17 | 18 | import org.apache.commons.pool2.impl.GenericObjectPoolConfig; 19 | 20 | import java.io.Serializable; 21 | 22 | public class ConnectionPoolConfig extends GenericObjectPoolConfig implements Serializable { 23 | 24 | /** 25 | * DEFAULT_TEST_WHILE_IDLE 26 | */ 27 | public static final boolean DEFAULT_TEST_WHILE_IDLE = true; 28 | /** 29 | * DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS 30 | */ 31 | public static final long DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS = 60000; 32 | /** 33 | * DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS 34 | */ 35 | public static final long DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS = 30000; 36 | /** 37 | * DEFAULT_NUM_TESTS_PER_EVICTION_RUN 38 | */ 39 | public static final int DEFAULT_NUM_TESTS_PER_EVICTION_RUN = -1; 40 | /** 41 | * serialVersionUID 42 | */ 43 | private static final long serialVersionUID = -2414567557372345057L; 44 | 45 | /** 46 | *

Title: ConnectionPoolConfig

47 | *

Description: 默认构造方法

48 | */ 49 | public ConnectionPoolConfig() { 50 | 51 | // defaults to make your life with connection pool easier :) 52 | setTestWhileIdle(DEFAULT_TEST_WHILE_IDLE); 53 | setMinEvictableIdleTimeMillis(DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS); 54 | setTimeBetweenEvictionRunsMillis(DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS); 55 | setNumTestsPerEvictionRun(DEFAULT_NUM_TESTS_PER_EVICTION_RUN); 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /src/test/java/com/du/AppTest.java: -------------------------------------------------------------------------------- 1 | package com.du; 2 | 3 | import junit.framework.Test; 4 | import junit.framework.TestCase; 5 | import junit.framework.TestSuite; 6 | 7 | /** 8 | * Unit test for simple App. 9 | */ 10 | public class AppTest 11 | extends TestCase 12 | { 13 | /** 14 | * Create the test case 15 | * 16 | * @param testName name of the test case 17 | */ 18 | public AppTest( String testName ) 19 | { 20 | super( testName ); 21 | } 22 | 23 | /** 24 | * @return the suite of tests being tested 25 | */ 26 | public static Test suite() 27 | { 28 | return new TestSuite( AppTest.class ); 29 | } 30 | 31 | /** 32 | * Rigourous Test :-) 33 | */ 34 | public void testApp() 35 | { 36 | assertTrue( true ); 37 | } 38 | } 39 | --------------------------------------------------------------------------------