├── LICENSE ├── README.md ├── _img ├── dst1.jpg └── dst2.jpg ├── assembly.xml ├── pom.xml └── src └── main ├── java └── com │ └── share │ ├── common │ ├── ConsoleLogQueue.java │ ├── DataSourceUtil.java │ ├── ElasticsearchSingleton.java │ └── SystemConfig.java │ ├── pojo │ └── MonitorInfo.java │ ├── service │ ├── CanalConfig.java │ ├── CanalFactory.java │ ├── DataSourceUtil.java │ ├── MutiCanalFactory.java │ ├── cassandra │ │ ├── CassandraFactory.java │ │ └── CassandraService.java │ ├── elasticsearch │ │ ├── AbstractElasticsearchFactory.java │ │ ├── ElasticsearchFactory.java │ │ ├── entity │ │ │ └── Result.java │ │ ├── http │ │ │ ├── ElasticsearchExtendHttpFactory.java │ │ │ └── ElasticsearchHttpFactory.java │ │ ├── rest │ │ │ ├── ElasticsearchExtendHighRestFactory.java │ │ │ ├── ElasticsearchExtendRestFactory.java │ │ │ ├── ElasticsearchHighRestFactory.java │ │ │ └── ElasticsearchRestFactory.java │ │ ├── spring │ │ │ └── ElasticsearchSpringFactory.java │ │ └── transport │ │ │ ├── ElasticsearchExtendTransportFactory.java │ │ │ └── ElasticsearchTransportFactory.java │ ├── greenplum │ │ └── GreenplumFactory.java │ ├── jdbc │ │ └── JDBCFactory.java │ ├── kafka │ │ └── KafkaFactory.java │ └── mongodb │ │ └── MongoDBFactory.java │ ├── swing │ ├── CommonFrame.java │ ├── ConfigDialog.java │ ├── DataInfo.java │ ├── Main.java │ ├── ManageTable.java │ └── ResourceHolder.java │ └── util │ ├── ClazzUtil.java │ ├── DateUtil.java │ ├── HttpUtil.java │ ├── NumberUtil.java │ └── StringUtil.java └── resources ├── bin ├── start.sh └── stop.sh ├── canal.properties ├── config.properties ├── log4j2.properties ├── messages_en.properties └── messages_zh_CN.properties /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # database-transform-tool 2 | 为数据监控以及数据同步提供C/S方式的J2SE的swing的图形化界面服务,并为服务提供高性能的同步以及监控服务。
3 | 可支持数据同步的数据源或数据库有: 4 | - SQL数据源【Oracle|SQL Server|MySQL】; 5 | - NoSQL数据源【MongoDB|Cassandra|Redis】; 6 | - 数据引擎【Elasticsearch】; 7 | - 数据仓库【GreenPlum|PostgreSQL】; 8 | - 消息队列【Kafka】
9 | 提供Canal可支持【MySQL|MariaDB】数据库监控以及数据备份。 10 | ### 1. Canal监控【MySQL|MariaDB】 11 | ### 2. 数据资源类型 12 | * 1)Elasticsearch服务(Transport/Rest/HighLevelRest) 13 | * 2)NoSQL服务[Cassandra|MongoDB|Redis|Memecached] 14 | * 3)SQL服务[MySQL|SQL Server|Oracle] 15 | * 4)数据仓库(GreenPlum|PostgreSQL) 16 | * 5)消息队列(Kafka) 17 | ### 3. 同步数据转换类型: 18 | * 1)Cassandra-->Cassandra 19 | * 2)Cassandra-->MongoDB 20 | * 3)Cassandra-->MySQL|SQL Server|Oracle 21 | * 4)Cassandra-->Greenplum 22 | * 5)MongoDB-->Cassandra 23 | * 6)MongoDB-->MySQL|SQL Server|Oracle 24 | * 7)MongoDB-->Greenplum 25 | * 8)MongoDB-->MongoDB 26 | * 9)[MySQL|SQL Server|Oracle]-->Cassandra 27 | * 10)[MySQL|SQL Server|Oracle]-->MongoDB 28 | * 11)[MySQL|SQL Server|Oracle]-->[MySQL|SQL Server|Oracle] 29 | * 12)[MySQL|SQL Server|Oracle]-->Greenplum 30 | * 13)Greenplum-->Cassandra 31 | * 14)Greenplum-->Greenplum 32 | * 15)Greenplum-->MongoDB 33 | * 16)[MySQL|SQL Server|Oracle]-->Greenplum 34 | ### 4. 图形界面 35 | ![服务支持](_img/dst1.jpg) 36 | ![服务配置](_img/dst2.jpg) 37 | -------------------------------------------------------------------------------- /_img/dst1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dev-share/database-transform-tool/39378bac8371e3871bd37bf1545a6f437ba12660/_img/dst1.jpg -------------------------------------------------------------------------------- /_img/dst2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dev-share/database-transform-tool/39378bac8371e3871bd37bf1545a6f437ba12660/_img/dst2.jpg -------------------------------------------------------------------------------- /assembly.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | release 4 | 5 | tar.gz 6 | 7 | true 8 | ${project.basedir}/${project.artifactId} 9 | 10 | 11 | ${project.basedir}/target/jsw/${project.name} 12 | ./ 13 | 0777 14 | 15 | 16 | ${project.build.directory}/lib 17 | ./lib 18 | 19 | 20 | ${project.basedir} 21 | ./ 22 | 23 | README.txt 24 | 25 | 26 | 27 | ${project.basedir}/target/classes 28 | ./config 29 | 30 | *.properties 31 | 32 | 33 | 34 | ${project.basedir}/target/classes/bin 35 | ./bin 36 | 0777 37 | 38 | *.sh 39 | *.bat 40 | *.cmd 41 | 42 | 43 | 44 | ${project.basedir}/target 45 | ./ 46 | 47 | *.jar 48 | 49 | 50 | 51 | -------------------------------------------------------------------------------- /src/main/java/com/share/common/ConsoleLogQueue.java: -------------------------------------------------------------------------------- 1 | package com.share.common; 2 | 3 | import java.util.concurrent.LinkedBlockingQueue; 4 | 5 | public class ConsoleLogQueue { 6 | private static class InitSingleton{ 7 | private final static ConsoleLogQueue INSTANCE = new ConsoleLogQueue(); 8 | } 9 | private LinkedBlockingQueue queue = new LinkedBlockingQueue(); 10 | private ConsoleLogQueue() {} 11 | public final static ConsoleLogQueue getIntance(){ 12 | return InitSingleton.INSTANCE; 13 | } 14 | public void log(String info) { 15 | synchronized (queue) { 16 | queue.offer(info); 17 | } 18 | } 19 | public String println() { 20 | String info = null; 21 | if(!queue.isEmpty()) { 22 | info = queue.poll(); 23 | } 24 | return info; 25 | } 26 | public String printAll() { 27 | String info = ""; 28 | while(!queue.isEmpty()) { 29 | info +="\\n"+ queue.poll(); 30 | } 31 | return info; 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /src/main/java/com/share/common/DataSourceUtil.java: -------------------------------------------------------------------------------- 1 | package com.share.common; 2 | 3 | import com.share.service.cassandra.CassandraFactory; 4 | import com.share.service.elasticsearch.http.ElasticsearchHttpFactory; 5 | import com.share.service.elasticsearch.transport.ElasticsearchTransportFactory; 6 | import com.share.service.greenplum.GreenplumFactory; 7 | import com.share.service.jdbc.JDBCFactory; 8 | import com.share.service.kafka.KafkaFactory; 9 | import com.share.service.mongodb.MongoDBFactory; 10 | /** 11 | * @decription 配置文件获取数据源工厂 12 | * @author yi.zhang 13 | * @time 2017年7月31日 下午12:03:10 14 | * @since 1.0 15 | * @jdk 1.8 16 | */ 17 | public class DataSourceUtil { 18 | /** 19 | * @decription Cassandra配置 20 | * @author yi.zhang 21 | * @time 2017年7月31日 下午12:03:45 22 | * @return 23 | */ 24 | public static CassandraFactory cassandra(){ 25 | try { 26 | String servers = SystemConfig.getProperty("cassandra.servers"); 27 | String keyspace = SystemConfig.getProperty("cassandra.keyspace"); 28 | String username = SystemConfig.getProperty("cassandra.username"); 29 | String password = SystemConfig.getProperty("cassandra.password"); 30 | CassandraFactory factory = new CassandraFactory(); 31 | factory.init(servers, keyspace, username, password); 32 | return factory; 33 | } catch (Exception e) { 34 | // TODO Auto-generated catch block 35 | e.printStackTrace(); 36 | } 37 | return null; 38 | } 39 | /** 40 | * @decription MongoDB配置 41 | * @author yi.zhang 42 | * @time 2017年7月31日 下午12:03:45 43 | * @return 44 | */ 45 | public static MongoDBFactory mongodb(){ 46 | try { 47 | String servers = SystemConfig.getProperty("mongodb.servers"); 48 | String database = SystemConfig.getProperty("mongodb.database"); 49 | String schema = SystemConfig.getProperty("mongodb.schema"); 50 | String username = SystemConfig.getProperty("mongodb.username"); 51 | String password = SystemConfig.getProperty("mongodb.password"); 52 | MongoDBFactory factory = new MongoDBFactory(); 53 | factory.init(servers, database, schema, username, password); 54 | return factory; 55 | } catch (Exception e) { 56 | // TODO Auto-generated catch block 57 | e.printStackTrace(); 58 | } 59 | return null; 60 | } 61 | /** 62 | * @decription Elasticsearch配置[Http接口] 63 | * @author yi.zhang 64 | * @time 2017年7月31日 下午12:03:45 65 | * @return 66 | */ 67 | public static ElasticsearchHttpFactory httpElasticsearch(){ 68 | try { 69 | String clusterName = SystemConfig.getProperty("elasticsearch.cluster.name"); 70 | String servers = SystemConfig.getProperty("elasticsearch.cluster.servers"); 71 | String username = SystemConfig.getProperty("elasticsearch.cluster.username"); 72 | String password = SystemConfig.getProperty("elasticsearch.cluster.password"); 73 | ElasticsearchHttpFactory factory = new ElasticsearchHttpFactory(clusterName,servers, username, password); 74 | factory.init(); 75 | return factory; 76 | } catch (Exception e) { 77 | // TODO Auto-generated catch block 78 | e.printStackTrace(); 79 | } 80 | return null; 81 | } 82 | /** 83 | * @decription Elasticsearch配置[java接口] 84 | * @author yi.zhang 85 | * @time 2017年7月31日 下午12:03:45 86 | * @return 87 | */ 88 | public static ElasticsearchTransportFactory elasticsearch(){ 89 | try { 90 | String clusterName = SystemConfig.getProperty("elasticsearch.cluster.name"); 91 | String servers = SystemConfig.getProperty("elasticsearch.cluster.servers"); 92 | String username = SystemConfig.getProperty("elasticsearch.cluster.username"); 93 | String password = SystemConfig.getProperty("elasticsearch.cluster.password"); 94 | ElasticsearchTransportFactory factory = new ElasticsearchTransportFactory(clusterName, servers, username, password); 95 | factory.init(); 96 | return factory; 97 | } catch (Exception e) { 98 | // TODO Auto-generated catch block 99 | e.printStackTrace(); 100 | } 101 | return null; 102 | } 103 | /** 104 | * @decription Greenplum配置 105 | * @author yi.zhang 106 | * @time 2017年7月31日 下午12:03:45 107 | * @return 108 | */ 109 | public static GreenplumFactory greenplum(){ 110 | try { 111 | String address = SystemConfig.getProperty("greenplum.address"); 112 | String database = SystemConfig.getProperty("greenplum.database"); 113 | String schema = SystemConfig.getProperty("greenplum.schema"); 114 | String username = SystemConfig.getProperty("greenplum.username"); 115 | String password = SystemConfig.getProperty("greenplum.password"); 116 | boolean isDruid = Boolean.valueOf(SystemConfig.getProperty("jdbc.druid.enabled")); 117 | Integer max_pool_size = Integer.valueOf(SystemConfig.getProperty("jdbc.druid.max_pool_size")); 118 | Integer init_pool_size = Integer.valueOf(SystemConfig.getProperty("jdbc.druid.init_pool_size")); 119 | GreenplumFactory factory = new GreenplumFactory(); 120 | factory.init(address, database, schema, username, password, isDruid, max_pool_size, init_pool_size); 121 | return factory; 122 | } catch (Exception e) { 123 | // TODO Auto-generated catch block 124 | e.printStackTrace(); 125 | } 126 | return null; 127 | } 128 | /** 129 | * @decription JDBC(MySQL|SQL Server|Oracle等)配置 130 | * @author yi.zhang 131 | * @time 2017年7月31日 下午12:03:45 132 | * @return 133 | */ 134 | public static JDBCFactory jdbc(){ 135 | try { 136 | String driverName = SystemConfig.getProperty("jdbc.driver"); 137 | String url = SystemConfig.getProperty("jdbc.url"); 138 | String username = SystemConfig.getProperty("jdbc.username"); 139 | String password = SystemConfig.getProperty("jdbc.password"); 140 | boolean isDruid = Boolean.valueOf(SystemConfig.getProperty("jdbc.druid.enabled")); 141 | Integer max_pool_size = Integer.valueOf(SystemConfig.getProperty("jdbc.druid.max_pool_size")); 142 | Integer init_pool_size = Integer.valueOf(SystemConfig.getProperty("jdbc.druid.init_pool_size")); 143 | JDBCFactory factory = new JDBCFactory(); 144 | factory.init(driverName, url, username, password, isDruid, max_pool_size, init_pool_size); 145 | return factory; 146 | } catch (Exception e) { 147 | // TODO Auto-generated catch block 148 | e.printStackTrace(); 149 | } 150 | return null; 151 | } 152 | /** 153 | * @decription Kafka配置 154 | * @author yi.zhang 155 | * @time 2017年7月31日 下午12:06:39 156 | * @return 157 | */ 158 | public static KafkaFactory kafka(){ 159 | try { 160 | String servers = SystemConfig.getProperty("kafka.servers");// 127.0.0.1:9092 161 | boolean isZookeeper = Boolean.valueOf(SystemConfig.getProperty("kafka.zookeeper.enabled")); 162 | String zookeeper_servers = SystemConfig.getProperty("kafka.zookeeper.servers");// 127.0.0.1:9092 163 | String acks = SystemConfig.getProperty("kafka.productor.acks"); 164 | KafkaFactory factory = new KafkaFactory(); 165 | factory.init(servers, isZookeeper, zookeeper_servers, acks); 166 | return factory; 167 | } catch (Exception e) { 168 | // TODO Auto-generated catch block 169 | e.printStackTrace(); 170 | } 171 | return null; 172 | } 173 | 174 | } 175 | -------------------------------------------------------------------------------- /src/main/java/com/share/common/ElasticsearchSingleton.java: -------------------------------------------------------------------------------- 1 | package com.share.common; 2 | 3 | import org.apache.logging.log4j.LogManager; 4 | import org.apache.logging.log4j.Logger; 5 | 6 | import com.share.service.elasticsearch.http.ElasticsearchExtendHttpFactory; 7 | import com.share.service.elasticsearch.rest.ElasticsearchExtendHighRestFactory; 8 | import com.share.service.elasticsearch.rest.ElasticsearchExtendRestFactory; 9 | import com.share.service.elasticsearch.transport.ElasticsearchExtendTransportFactory; 10 | import com.share.util.StringUtil; 11 | /** 12 | * 描述: Elasticsearch初始化实例 13 | * 时间: 2018年1月9日 上午11:18:20 14 | * @author yi.zhang 15 | * @since 1.0 16 | * JDK版本:1.8 17 | */ 18 | public class ElasticsearchSingleton { 19 | private static Logger logger = LogManager.getLogger(); 20 | private static class InitSingleton{ 21 | private final static ElasticsearchSingleton INSTANCE = new ElasticsearchSingleton(); 22 | } 23 | private ElasticsearchExtendHttpFactory http; 24 | private ElasticsearchExtendRestFactory rest; 25 | private ElasticsearchExtendHighRestFactory high; 26 | private ElasticsearchExtendTransportFactory transport; 27 | private ElasticsearchSingleton(){ 28 | try { 29 | String clusterName = SystemConfig.getProperty(SystemConfig.Config.SERVICE,"elasticsearch.cluster.name"); 30 | String servers = SystemConfig.getProperty("elasticsearch.cluster.servers"); 31 | String username = SystemConfig.getProperty("elasticsearch.cluster.username"); 32 | String password = SystemConfig.getProperty("elasticsearch.cluster.password"); 33 | String http_port = SystemConfig.getProperty("elasticsearch.http.port"); 34 | String transport_port = SystemConfig.getProperty("elasticsearch.transport.port"); 35 | http=http(clusterName, servers, username, password, http_port); 36 | rest=rest(clusterName, servers, username, password, http_port); 37 | high=high(clusterName, servers, username, password, http_port); 38 | transport=transport(clusterName, servers, username, password, transport_port); 39 | } catch (Exception e) { 40 | logger.error("--Elasticsearch init Error!",e); 41 | } 42 | } 43 | public final static ElasticsearchSingleton getIntance(){ 44 | return InitSingleton.INSTANCE; 45 | } 46 | public ElasticsearchExtendHttpFactory httpES(){ 47 | return http; 48 | } 49 | public ElasticsearchExtendRestFactory restES(){ 50 | return rest; 51 | } 52 | public ElasticsearchExtendHighRestFactory highES(){ 53 | return high; 54 | } 55 | public ElasticsearchExtendTransportFactory transportES(){ 56 | return transport; 57 | } 58 | /** 59 | * 描述: Elasticsearch配置[Http接口] 60 | * 时间: 2018年1月9日 上午11:02:08 61 | * @author yi.zhang 62 | * @param clusterName 集群名 63 | * @param servers 服务地址(多地址以','分割) 64 | * @param username 认证用户名 65 | * @param password 认证密码 66 | * @param port 服务端口 67 | * @return 68 | */ 69 | private ElasticsearchExtendHttpFactory http(String clusterName,String servers,String username,String password,String port){ 70 | try { 71 | ElasticsearchExtendHttpFactory factory = new ElasticsearchExtendHttpFactory(clusterName, servers, username, password); 72 | if(!StringUtil.isEmpty(port))factory = new ElasticsearchExtendHttpFactory(clusterName, servers, username, password,Integer.valueOf(port)); 73 | factory.init(); 74 | return factory; 75 | } catch (Exception e) { 76 | logger.error("--Http Elasticsearch init Error!",e); 77 | } 78 | return null; 79 | } 80 | /** 81 | * 描述: Elasticsearch配置[Rest接口] 82 | * 时间: 2018年1月9日 上午11:02:08 83 | * @author yi.zhang 84 | * @param clusterName 集群名 85 | * @param servers 服务地址(多地址以','分割) 86 | * @param username 认证用户名 87 | * @param password 认证密码 88 | * @param port 服务端口 89 | * @return 90 | */ 91 | private ElasticsearchExtendRestFactory rest(String clusterName,String servers,String username,String password,String port){ 92 | try { 93 | ElasticsearchExtendRestFactory factory = new ElasticsearchExtendRestFactory(clusterName, servers, username, password); 94 | if(!StringUtil.isEmpty(port))factory = new ElasticsearchExtendRestFactory(clusterName, servers, username, password,Integer.valueOf(port)); 95 | factory.init(); 96 | return factory; 97 | } catch (Exception e) { 98 | logger.error("--Rest Elasticsearch init Error!",e); 99 | } 100 | return null; 101 | } 102 | /** 103 | * 描述: Elasticsearch配置[HighRest接口] 104 | * 时间: 2018年1月9日 上午11:02:08 105 | * @author yi.zhang 106 | * @param clusterName 集群名 107 | * @param servers 服务地址(多地址以','分割) 108 | * @param username 认证用户名 109 | * @param password 认证密码 110 | * @param port 服务端口 111 | * @return 112 | */ 113 | private ElasticsearchExtendHighRestFactory high(String clusterName,String servers,String username,String password,String port){ 114 | try { 115 | ElasticsearchExtendHighRestFactory factory = new ElasticsearchExtendHighRestFactory(clusterName, servers, username, password); 116 | if(!StringUtil.isEmpty(port))factory = new ElasticsearchExtendHighRestFactory(clusterName, servers, username, password,Integer.valueOf(port)); 117 | factory.init(); 118 | return factory; 119 | } catch (Exception e) { 120 | logger.error("--High Rest Elasticsearch init Error!",e); 121 | } 122 | return null; 123 | } 124 | /** 125 | * 描述: Elasticsearch配置[Transport接口] 126 | * 时间: 2018年1月9日 上午11:02:08 127 | * @author yi.zhang 128 | * @param clusterName 集群名 129 | * @param servers 服务地址(多地址以','分割) 130 | * @param username 认证用户名 131 | * @param password 认证密码 132 | * @param port 服务端口 133 | * @return 134 | */ 135 | private ElasticsearchExtendTransportFactory transport(String clusterName,String servers,String username,String password,String port){ 136 | try { 137 | ElasticsearchExtendTransportFactory factory=new ElasticsearchExtendTransportFactory(clusterName, servers, username, password); 138 | if(!StringUtil.isEmpty(port))factory = new ElasticsearchExtendTransportFactory(clusterName, servers, username, password,Integer.valueOf(port)); 139 | factory.init(); 140 | return factory; 141 | } catch (Exception e) { 142 | logger.error("--Transport Elasticsearch init Error!",e); 143 | } 144 | return null; 145 | } 146 | } 147 | -------------------------------------------------------------------------------- /src/main/java/com/share/common/SystemConfig.java: -------------------------------------------------------------------------------- 1 | package com.share.common; 2 | 3 | import java.io.IOException; 4 | import java.io.InputStream; 5 | import java.util.HashMap; 6 | import java.util.Map; 7 | import java.util.Properties; 8 | 9 | import org.apache.logging.log4j.LogManager; 10 | import org.apache.logging.log4j.Logger; 11 | 12 | public class SystemConfig { 13 | private static Logger log = LogManager.getLogger(SystemConfig.class); 14 | private static class InitConfig{ 15 | private static Properties ALL_INSTANCE = new Properties(); 16 | private static Map LAZY_INSTANCE = new HashMap(); 17 | } 18 | public static enum Config{ 19 | CANAL("canal.properties"), 20 | SERVICE("config.properties"); 21 | private String file; 22 | private Config(String file) { 23 | this.file = file; 24 | } 25 | public String getFile() { 26 | return file; 27 | } 28 | } 29 | public static Properties getInstance(){ 30 | if(!InitConfig.ALL_INSTANCE.isEmpty()){ 31 | return InitConfig.ALL_INSTANCE; 32 | } 33 | InputStream in = null; 34 | try { 35 | for(Config conf:Config.values()){ 36 | in = ClassLoader.getSystemResourceAsStream(conf.getFile()); 37 | Properties properties = new Properties(); 38 | properties.load(in); 39 | InitConfig.ALL_INSTANCE.putAll(properties); 40 | } 41 | } catch (IOException e) { 42 | log.error("--All Properties read error!",e); 43 | }finally{ 44 | if(in!=null){ 45 | try { 46 | in.close(); 47 | } catch (Exception e) { 48 | log.error("--All InputStream read error!",e); 49 | } 50 | } 51 | } 52 | return InitConfig.ALL_INSTANCE; 53 | } 54 | public static Properties getInstance(Config conf){ 55 | if(InitConfig.LAZY_INSTANCE.containsKey(conf)){ 56 | return InitConfig.LAZY_INSTANCE.get(conf); 57 | } 58 | InputStream in = null; 59 | try { 60 | in = ClassLoader.getSystemResourceAsStream(conf.getFile()); 61 | Properties properties = new Properties(); 62 | properties.load(in); 63 | InitConfig.LAZY_INSTANCE.put(conf, properties); 64 | } catch (IOException e) { 65 | log.error("--Lazy Properties read error!",e); 66 | }finally{ 67 | if(in!=null){ 68 | try { 69 | in.close(); 70 | } catch (Exception e) { 71 | log.error("--Lazy InputStream read error!",e); 72 | } 73 | } 74 | } 75 | return InitConfig.LAZY_INSTANCE.get(conf); 76 | } 77 | 78 | public static String getProperty(String key) throws Exception{ 79 | return getInstance().getProperty(key); 80 | } 81 | public static String getProperty(Config conf,String key) throws Exception{ 82 | return getInstance(conf).getProperty(key); 83 | } 84 | } -------------------------------------------------------------------------------- /src/main/java/com/share/pojo/MonitorInfo.java: -------------------------------------------------------------------------------- 1 | package com.share.pojo; 2 | 3 | import java.io.Serializable; 4 | import java.util.ArrayList; 5 | import java.util.List; 6 | 7 | import com.alibaba.fastjson.JSONObject; 8 | /** 9 | * @decription 监控数据 10 | * @author yi.zhang 11 | * @time 2017年6月1日 上午10:13:38 12 | * @since 1.0 13 | * @jdk 1.8 14 | */ 15 | @SuppressWarnings("serial") 16 | public class MonitorInfo implements Serializable{ 17 | /** 18 | * 数据库名 19 | */ 20 | private String schema; 21 | /** 22 | * 表明 23 | */ 24 | private String table; 25 | /** 26 | * 操作类型(Insert|Update|Delete|Select|Create等) 27 | */ 28 | private String type; 29 | /** 30 | * 数据库SQL(Create|Drop|Grant|Alter等) 31 | */ 32 | private String sql; 33 | /** 34 | * 发生改变的行 35 | */ 36 | private List rows = new ArrayList(); 37 | 38 | public String getSchema() { 39 | return schema; 40 | } 41 | public void setSchema(String schema) { 42 | this.schema = schema; 43 | } 44 | public String getTable() { 45 | return table; 46 | } 47 | public void setTable(String table) { 48 | this.table = table; 49 | } 50 | public String getType() { 51 | return type; 52 | } 53 | public void setType(String type) { 54 | this.type = type; 55 | } 56 | public String getSql() { 57 | return sql; 58 | } 59 | public void setSql(String sql) { 60 | this.sql = sql; 61 | } 62 | public List getRows() { 63 | return rows; 64 | } 65 | public void setRows(List rows) { 66 | this.rows = rows; 67 | } 68 | public class RowInfo implements Serializable{ 69 | /** 70 | * 主键字段 71 | */ 72 | private String kid; 73 | /** 74 | * 改变之前字段数据(update|delete) 75 | */ 76 | private JSONObject before = new JSONObject(); 77 | /** 78 | * 改变之后字段数据(update|insert) 79 | */ 80 | private JSONObject after = new JSONObject(); 81 | /** 82 | * 发生改变字段数据(update) 83 | */ 84 | private JSONObject change = new JSONObject(); 85 | 86 | public String getKid() { 87 | return kid; 88 | } 89 | public void setKid(String kid) { 90 | this.kid = kid; 91 | } 92 | public JSONObject getBefore() { 93 | return before; 94 | } 95 | public void setBefore(JSONObject before) { 96 | this.before = before; 97 | } 98 | public JSONObject getAfter() { 99 | return after; 100 | } 101 | public void setAfter(JSONObject after) { 102 | this.after = after; 103 | } 104 | public JSONObject getChange() { 105 | return change; 106 | } 107 | public void setChange(JSONObject change) { 108 | this.change = change; 109 | } 110 | } 111 | } 112 | -------------------------------------------------------------------------------- /src/main/java/com/share/service/CanalConfig.java: -------------------------------------------------------------------------------- 1 | package com.share.service; 2 | 3 | import java.io.IOException; 4 | import java.io.InputStream; 5 | import java.util.Properties; 6 | 7 | import org.apache.logging.log4j.LogManager; 8 | import org.apache.logging.log4j.Logger; 9 | 10 | public class CanalConfig { 11 | private static Logger log = LogManager.getLogger(CanalConfig.class); 12 | private static Properties config = null; 13 | 14 | public static Properties getInstance(String properties){ 15 | InputStream in = null; 16 | try { 17 | in = ClassLoader.getSystemResourceAsStream(properties); 18 | config = new Properties(); 19 | config.load(in); 20 | } catch (IOException e) { 21 | log.error("--Canal Properties read error!",e); 22 | }finally{ 23 | if(in!=null){ 24 | try { 25 | in.close(); 26 | } catch (Exception e) { 27 | log.error("--Canal InputStream read error!",e); 28 | } 29 | } 30 | } 31 | return config; 32 | } 33 | 34 | public static String getProperty(String key) throws Exception{ 35 | if(config==null){ 36 | config = getInstance("canal.properties"); 37 | } 38 | return config.getProperty(key); 39 | } 40 | } -------------------------------------------------------------------------------- /src/main/java/com/share/service/CanalFactory.java: -------------------------------------------------------------------------------- 1 | package com.share.service; 2 | 3 | import java.net.InetSocketAddress; 4 | import java.net.SocketAddress; 5 | import java.util.ArrayList; 6 | import java.util.List; 7 | 8 | import org.apache.logging.log4j.LogManager; 9 | import org.apache.logging.log4j.Logger; 10 | 11 | import com.alibaba.fastjson.JSONObject; 12 | import com.alibaba.otter.canal.client.CanalConnector; 13 | import com.alibaba.otter.canal.client.CanalConnectors; 14 | import com.alibaba.otter.canal.protocol.CanalEntry.Column; 15 | import com.alibaba.otter.canal.protocol.CanalEntry.Entry; 16 | import com.alibaba.otter.canal.protocol.CanalEntry.EntryType; 17 | import com.alibaba.otter.canal.protocol.CanalEntry.RowChange; 18 | import com.alibaba.otter.canal.protocol.CanalEntry.RowData; 19 | import com.alibaba.otter.canal.protocol.Message; 20 | import com.share.pojo.MonitorInfo; 21 | import com.share.util.DateUtil; 22 | /** 23 | * @decription Canal服务(MySQL数据库监控) 24 | * @author yi.zhang 25 | * @time 2017年6月1日 上午10:09:03 26 | * @since 1.0 27 | * @jdk 1.8 28 | */ 29 | public class CanalFactory { 30 | private static Logger logger = LogManager.getLogger(CanalFactory.class); 31 | private static CanalConnector connector; 32 | private static int BATCH_SIZE = 1000; 33 | /** 34 | * 监控过滤规则(默认所有操作:.*\\..*) 35 | * EX: 36 | * 1.库db1下所有表:db1\\..* 37 | * 2.库db1/库db2下所有表:db1\\..*,db2\\..* 38 | * 3.库db1下table1表以及库db2下table2表:db1.table1,db2.table2 39 | * 4.以name1开头以及包含name2的所有库表:.*\\.name1.*,.*\\.*.name2.* 40 | */ 41 | private static String CANAL_FILTER_REGEX = ".*\\..*"; 42 | 43 | private String destination; 44 | private String servers; 45 | private String username; 46 | private String password; 47 | private String filter_regex; 48 | private boolean isZookeeper; 49 | private int batch_size; 50 | 51 | 52 | public String getDestination() { 53 | return destination; 54 | } 55 | public void setDestination(String destination) { 56 | this.destination = destination; 57 | } 58 | public String getServers() { 59 | return servers; 60 | } 61 | public void setServers(String servers) { 62 | this.servers = servers; 63 | } 64 | public String getUsername() { 65 | return username; 66 | } 67 | public void setUsername(String username) { 68 | this.username = username; 69 | } 70 | public String getPassword() { 71 | return password; 72 | } 73 | public void setPassword(String password) { 74 | this.password = password; 75 | } 76 | public String getFilter_regex() { 77 | return filter_regex; 78 | } 79 | public void setFilter_regex(String filter_regex) { 80 | this.filter_regex = filter_regex; 81 | } 82 | public boolean isZookeeper() { 83 | return isZookeeper; 84 | } 85 | public void setZookeeper(boolean isZookeeper) { 86 | this.isZookeeper = isZookeeper; 87 | } 88 | public int getBatch_size() { 89 | return batch_size; 90 | } 91 | public void setBatch_size(int batch_size) { 92 | this.batch_size = batch_size; 93 | } 94 | /** 95 | * @description Canal服务配置 96 | * @author yi.zhang 97 | * @time 2017年4月19日 上午10:38:42 98 | * @throws Exception 99 | */ 100 | public void init(String destination,String servers,String username,String password,String filter_regex,boolean isZookeeper,Integer batch_size){ 101 | try { 102 | if(filter_regex!=null){ 103 | CANAL_FILTER_REGEX = filter_regex; 104 | } 105 | if(batch_size!=null){ 106 | BATCH_SIZE = batch_size; 107 | } 108 | if(isZookeeper){ 109 | connector = CanalConnectors.newClusterConnector(servers, destination, username, password); 110 | }else{ 111 | List addresses = new ArrayList(); 112 | for(String address : servers.split(",")){ 113 | String[] ips = address.split(":"); 114 | String ip = ips[0]; 115 | int port=11111; 116 | if(ips.length>1){ 117 | port = Integer.valueOf(ips[1]); 118 | } 119 | addresses.add(new InetSocketAddress(ip, port)); 120 | } 121 | connector = CanalConnectors.newClusterConnector(addresses, destination, username, password); 122 | } 123 | connector.connect(); 124 | connector.subscribe(CANAL_FILTER_REGEX); 125 | connector.rollback(); 126 | } catch (Exception e) { 127 | logger.error("-----Canal Config init Error-----", e); 128 | } 129 | } 130 | /** 131 | * 关闭服务 132 | */ 133 | public static void close(){ 134 | if(connector!=null){ 135 | connector.disconnect(); 136 | } 137 | } 138 | /** 139 | * 提交数据 140 | * @param batchId 141 | */ 142 | public static void ack(long batchId){ 143 | connector.ack(batchId); 144 | } 145 | /** 146 | * 回滚数据 147 | * @param batchId 148 | */ 149 | public static void rollback(long batchId){ 150 | connector.rollback(batchId); 151 | } 152 | /** 153 | * @decription 监控数据 154 | * @author yi.zhang 155 | * @time 2017年6月1日 上午10:10:52 156 | * @return 157 | */ 158 | public List execute(){ 159 | try { 160 | if(connector==null){ 161 | init(destination, servers, username, password, filter_regex, isZookeeper, batch_size); 162 | } 163 | List monitors = new ArrayList(); 164 | Message message = connector.getWithoutAck(BATCH_SIZE); // 获取指定数量的数据 165 | long batchId = message.getId(); 166 | List list = message.getEntries(); 167 | if(list!=null&&list.size()>0){ 168 | for (Entry entry : list) { 169 | if (entry.getEntryType() == EntryType.TRANSACTIONBEGIN || entry.getEntryType() == EntryType.TRANSACTIONEND) { 170 | continue; 171 | } 172 | RowChange event = null; 173 | try { 174 | event = RowChange.parseFrom(entry.getStoreValue()); 175 | } catch (Exception e) { 176 | throw new RuntimeException("ERROR ## parser of eromanga-event has an error , data:" + entry.toString(), e); 177 | } 178 | String schema = entry.getHeader().getSchemaName(); 179 | String table = entry.getHeader().getTableName(); 180 | String type = event.hasEventType()?event.getEventType().name():null; 181 | String sql = event.getSql(); 182 | System.out.println("-----{schema:"+schema+",table:"+table+",type:"+type+",sql:"+sql+"}"); 183 | MonitorInfo monitor = new MonitorInfo(); 184 | monitor.setSchema(schema); 185 | monitor.setTable(table); 186 | monitor.setType(type); 187 | monitor.setSql(sql); 188 | List rows = monitor.getRows(); 189 | for (RowData rowData : event.getRowDatasList()) { 190 | MonitorInfo.RowInfo row = monitor.new RowInfo(); 191 | JSONObject before = row.getBefore(); 192 | JSONObject after = row.getAfter(); 193 | JSONObject change = row.getChange(); 194 | List cbefores = rowData.getBeforeColumnsList(); 195 | List cafters = rowData.getAfterColumnsList(); 196 | for (Column column : cbefores) { 197 | String key = column.getName(); 198 | Object value = column.getValue(); 199 | String ctype = column.getMysqlType().toLowerCase(); 200 | if(ctype.contains("int")){ 201 | if(ctype.contains("bigint")){ 202 | value = Long.valueOf(column.getValue()); 203 | }else{ 204 | value = Integer.valueOf(column.getValue()); 205 | } 206 | } 207 | if(ctype.contains("decimal")||ctype.contains("numeric")||ctype.contains("double")||ctype.contains("float")){ 208 | value = Double.valueOf(column.getValue()); 209 | } 210 | if(ctype.contains("timestamp")||ctype.contains("date")){ 211 | if(ctype.contains("timestamp")){ 212 | value = DateUtil.formatDateTime(column.getValue()); 213 | }else{ 214 | value = DateUtil.formatDate(column.getValue()); 215 | } 216 | } 217 | boolean update = column.getUpdated(); 218 | before.put(key, value); 219 | if(update){ 220 | change.put(key, value); 221 | } 222 | System.out.println("--"+type+"--before----{"+key+ ": " + value + ",update: " + update+"}"); 223 | } 224 | for (Column column : cafters) { 225 | String key = column.getName(); 226 | Object value = column.getValue(); 227 | String ctype = column.getMysqlType().toLowerCase(); 228 | if(ctype.contains("int")){ 229 | if(ctype.contains("bigint")){ 230 | value = Long.valueOf(column.getValue()); 231 | }else{ 232 | value = Integer.valueOf(column.getValue()); 233 | } 234 | } 235 | if(ctype.contains("decimal")||ctype.contains("numeric")||ctype.contains("double")||ctype.contains("float")){ 236 | value = Double.valueOf(column.getValue()); 237 | } 238 | if(ctype.contains("timestamp")||ctype.contains("date")){ 239 | if(ctype.contains("timestamp")){ 240 | value = DateUtil.formatDateTime(column.getValue()); 241 | }else{ 242 | value = DateUtil.formatDate(column.getValue()); 243 | } 244 | } 245 | boolean update = column.getUpdated(); 246 | after.put(key, value); 247 | if(update){ 248 | change.put(key, value); 249 | } 250 | System.out.println("--"+type+"--after----{"+key+ ": " + value + ",update: " + update+"}"); 251 | } 252 | rows.add(row); 253 | } 254 | monitors.add(monitor); 255 | } 256 | } 257 | ack(batchId); 258 | return monitors; 259 | }catch (Exception e) { 260 | logger.error("--Canal监控失败!",e); 261 | } 262 | return null; 263 | } 264 | } -------------------------------------------------------------------------------- /src/main/java/com/share/service/DataSourceUtil.java: -------------------------------------------------------------------------------- 1 | package com.share.service; 2 | 3 | import com.share.service.cassandra.CassandraFactory; 4 | import com.share.service.elasticsearch.http.ElasticsearchHttpFactory; 5 | import com.share.service.elasticsearch.transport.ElasticsearchTransportFactory; 6 | import com.share.service.greenplum.GreenplumFactory; 7 | import com.share.service.jdbc.JDBCFactory; 8 | import com.share.service.kafka.KafkaFactory; 9 | import com.share.service.mongodb.MongoDBFactory; 10 | /** 11 | * @decription 配置文件获取数据源工厂 12 | * @author yi.zhang 13 | * @time 2017年7月31日 下午12:03:10 14 | * @since 1.0 15 | * @jdk 1.8 16 | */ 17 | public class DataSourceUtil { 18 | /** 19 | * @decription Cassandra配置 20 | * @author yi.zhang 21 | * @time 2017年7月31日 下午12:03:45 22 | * @return 23 | */ 24 | public static CassandraFactory cassandra(){ 25 | try { 26 | String servers = CanalConfig.getProperty("cassandra.servers"); 27 | String keyspace = CanalConfig.getProperty("cassandra.keyspace"); 28 | String username = CanalConfig.getProperty("cassandra.username"); 29 | String password = CanalConfig.getProperty("cassandra.password"); 30 | CassandraFactory factory = new CassandraFactory(); 31 | factory.init(servers, keyspace, username, password); 32 | return factory; 33 | } catch (Exception e) { 34 | // TODO Auto-generated catch block 35 | e.printStackTrace(); 36 | } 37 | return null; 38 | } 39 | /** 40 | * @decription MongoDB配置 41 | * @author yi.zhang 42 | * @time 2017年7月31日 下午12:03:45 43 | * @return 44 | */ 45 | public static MongoDBFactory mongodb(){ 46 | try { 47 | String servers = CanalConfig.getProperty("mongodb.servers"); 48 | String database = CanalConfig.getProperty("mongodb.database"); 49 | String schema = CanalConfig.getProperty("mongodb.schema"); 50 | String username = CanalConfig.getProperty("mongodb.username"); 51 | String password = CanalConfig.getProperty("mongodb.password"); 52 | MongoDBFactory factory = new MongoDBFactory(); 53 | factory.init(servers, database, schema, username, password); 54 | return factory; 55 | } catch (Exception e) { 56 | // TODO Auto-generated catch block 57 | e.printStackTrace(); 58 | } 59 | return null; 60 | } 61 | /** 62 | * @decription Elasticsearch配置[Http接口] 63 | * @author yi.zhang 64 | * @time 2017年7月31日 下午12:03:45 65 | * @return 66 | */ 67 | public static ElasticsearchHttpFactory httpElasticsearch(){ 68 | try { 69 | String clusterName = CanalConfig.getProperty("elasticsearch.cluster.name"); 70 | String servers = CanalConfig.getProperty("elasticsearch.cluster.servers"); 71 | String username = CanalConfig.getProperty("elasticsearch.cluster.username"); 72 | String password = CanalConfig.getProperty("elasticsearch.cluster.password"); 73 | ElasticsearchHttpFactory factory = new ElasticsearchHttpFactory(clusterName,servers, username, password); 74 | factory.init(); 75 | return factory; 76 | } catch (Exception e) { 77 | // TODO Auto-generated catch block 78 | e.printStackTrace(); 79 | } 80 | return null; 81 | } 82 | /** 83 | * @decription Elasticsearch配置[java接口] 84 | * @author yi.zhang 85 | * @time 2017年7月31日 下午12:03:45 86 | * @return 87 | */ 88 | public static ElasticsearchTransportFactory elasticsearch(){ 89 | try { 90 | String clusterName = CanalConfig.getProperty("elasticsearch.cluster.name"); 91 | String servers = CanalConfig.getProperty("elasticsearch.cluster.servers"); 92 | String username = CanalConfig.getProperty("elasticsearch.cluster.username"); 93 | String password = CanalConfig.getProperty("elasticsearch.cluster.password"); 94 | ElasticsearchTransportFactory factory = new ElasticsearchTransportFactory(clusterName, servers, username, password); 95 | factory.init(); 96 | return factory; 97 | } catch (Exception e) { 98 | // TODO Auto-generated catch block 99 | e.printStackTrace(); 100 | } 101 | return null; 102 | } 103 | /** 104 | * @decription Greenplum配置 105 | * @author yi.zhang 106 | * @time 2017年7月31日 下午12:03:45 107 | * @return 108 | */ 109 | public static GreenplumFactory greenplum(){ 110 | try { 111 | String address = CanalConfig.getProperty("greenplum.address"); 112 | String database = CanalConfig.getProperty("greenplum.database"); 113 | String schema = CanalConfig.getProperty("greenplum.schema"); 114 | String username = CanalConfig.getProperty("greenplum.username"); 115 | String password = CanalConfig.getProperty("greenplum.password"); 116 | boolean isDruid = Boolean.valueOf(CanalConfig.getProperty("jdbc.druid.enabled")); 117 | Integer max_pool_size = Integer.valueOf(CanalConfig.getProperty("jdbc.druid.max_pool_size")); 118 | Integer init_pool_size = Integer.valueOf(CanalConfig.getProperty("jdbc.druid.init_pool_size")); 119 | GreenplumFactory factory = new GreenplumFactory(); 120 | factory.init(address, database, schema, username, password, isDruid, max_pool_size, init_pool_size); 121 | return factory; 122 | } catch (Exception e) { 123 | // TODO Auto-generated catch block 124 | e.printStackTrace(); 125 | } 126 | return null; 127 | } 128 | /** 129 | * @decription JDBC(MySQL|SQL Server|Oracle等)配置 130 | * @author yi.zhang 131 | * @time 2017年7月31日 下午12:03:45 132 | * @return 133 | */ 134 | public static JDBCFactory jdbc(){ 135 | try { 136 | String driverName = CanalConfig.getProperty("jdbc.driver"); 137 | String url = CanalConfig.getProperty("jdbc.url"); 138 | String username = CanalConfig.getProperty("jdbc.username"); 139 | String password = CanalConfig.getProperty("jdbc.password"); 140 | boolean isDruid = Boolean.valueOf(CanalConfig.getProperty("jdbc.druid.enabled")); 141 | Integer max_pool_size = Integer.valueOf(CanalConfig.getProperty("jdbc.druid.max_pool_size")); 142 | Integer init_pool_size = Integer.valueOf(CanalConfig.getProperty("jdbc.druid.init_pool_size")); 143 | JDBCFactory factory = new JDBCFactory(); 144 | factory.init(driverName, url, username, password, isDruid, max_pool_size, init_pool_size); 145 | return factory; 146 | } catch (Exception e) { 147 | // TODO Auto-generated catch block 148 | e.printStackTrace(); 149 | } 150 | return null; 151 | } 152 | /** 153 | * @decription Kafka配置 154 | * @author yi.zhang 155 | * @time 2017年7月31日 下午12:06:39 156 | * @return 157 | */ 158 | public static KafkaFactory kafka(){ 159 | try { 160 | String servers = CanalConfig.getProperty("kafka.servers");// 127.0.0.1:9092 161 | boolean isZookeeper = Boolean.valueOf(CanalConfig.getProperty("kafka.zookeeper.enabled")); 162 | String zookeeper_servers = CanalConfig.getProperty("kafka.zookeeper.servers");// 127.0.0.1:9092 163 | String acks = CanalConfig.getProperty("kafka.productor.acks"); 164 | KafkaFactory factory = new KafkaFactory(); 165 | factory.init(servers, isZookeeper, zookeeper_servers, acks); 166 | return factory; 167 | } catch (Exception e) { 168 | // TODO Auto-generated catch block 169 | e.printStackTrace(); 170 | } 171 | return null; 172 | } 173 | 174 | } 175 | -------------------------------------------------------------------------------- /src/main/java/com/share/service/MutiCanalFactory.java: -------------------------------------------------------------------------------- 1 | package com.share.service; 2 | 3 | import java.net.InetSocketAddress; 4 | import java.net.SocketAddress; 5 | import java.util.ArrayList; 6 | import java.util.Date; 7 | import java.util.List; 8 | import java.util.concurrent.ConcurrentHashMap; 9 | 10 | import org.apache.logging.log4j.LogManager; 11 | import org.apache.logging.log4j.Logger; 12 | 13 | import com.alibaba.fastjson.JSONObject; 14 | import com.alibaba.otter.canal.client.CanalConnector; 15 | import com.alibaba.otter.canal.client.CanalConnectors; 16 | import com.alibaba.otter.canal.protocol.CanalEntry.Column; 17 | import com.alibaba.otter.canal.protocol.CanalEntry.Entry; 18 | import com.alibaba.otter.canal.protocol.CanalEntry.EntryType; 19 | import com.alibaba.otter.canal.protocol.CanalEntry.EventType; 20 | import com.alibaba.otter.canal.protocol.CanalEntry.RowChange; 21 | import com.alibaba.otter.canal.protocol.CanalEntry.RowData; 22 | import com.alibaba.otter.canal.protocol.Message; 23 | import com.share.pojo.MonitorInfo; 24 | import com.share.util.DateUtil; 25 | /** 26 | * @decription Canal服务(MySQL数据库监控) 27 | * @author yi.zhang 28 | * @time 2017年6月1日 上午10:09:03 29 | * @since 1.0 30 | * @jdk 1.8 31 | */ 32 | public class MutiCanalFactory { 33 | private static Logger logger = LogManager.getLogger(MutiCanalFactory.class); 34 | /** 35 | * 监控过滤规则(默认所有操作:.*\\..*) 36 | * EX: 37 | * 1.库db1下所有表:db1\\..* 38 | * 2.库db1/库db2下所有表:db1\\..*,db2\\..* 39 | * 3.库db1下table1表以及库db2下table2表:db1.table1,db2.table2 40 | * 4.以name1开头以及包含name2的所有库表:.*\\.name1.*,.*\\.*.name2.* 41 | */ 42 | private static String CANAL_FILTER_REGEX = ".*\\..*"; 43 | /** 44 | * 多实例列表连接 45 | */ 46 | private static ConcurrentHashMap cache = new ConcurrentHashMap(); 47 | private static int BATCH_SIZE = 1000; 48 | 49 | private String destinations; 50 | private String servers; 51 | private String username; 52 | private String password; 53 | private String filter_regex; 54 | private boolean isZookeeper; 55 | private int batch_size; 56 | 57 | public String getDestinations() { 58 | return destinations; 59 | } 60 | public void setDestinations(String destinations) { 61 | this.destinations = destinations; 62 | } 63 | public String getServers() { 64 | return servers; 65 | } 66 | public void setServers(String servers) { 67 | this.servers = servers; 68 | } 69 | public String getUsername() { 70 | return username; 71 | } 72 | public void setUsername(String username) { 73 | this.username = username; 74 | } 75 | public String getPassword() { 76 | return password; 77 | } 78 | public void setPassword(String password) { 79 | this.password = password; 80 | } 81 | public String getFilter_regex() { 82 | return filter_regex; 83 | } 84 | public void setFilter_regex(String filter_regex) { 85 | this.filter_regex = filter_regex; 86 | } 87 | public boolean isZookeeper() { 88 | return isZookeeper; 89 | } 90 | public void setZookeeper(boolean isZookeeper) { 91 | this.isZookeeper = isZookeeper; 92 | } 93 | public int getBatch_size() { 94 | return batch_size; 95 | } 96 | public void setBatch_size(int batch_size) { 97 | this.batch_size = batch_size; 98 | } 99 | /** 100 | * @description Canal服务配置 101 | * @author yi.zhang 102 | * @time 2017年4月19日 上午10:38:42 103 | * @throws Exception 104 | */ 105 | public void init(String destinations,String servers,String username,String password,String filter_regex,boolean isZookeeper,Integer batch_size){ 106 | try { 107 | if(filter_regex!=null){ 108 | CANAL_FILTER_REGEX = filter_regex; 109 | } 110 | if(batch_size!=null){ 111 | BATCH_SIZE = batch_size; 112 | } 113 | if(servers==null||"".equals(servers)){ 114 | return; 115 | } 116 | if(destinations!=null&&!"".equals(destinations)){ 117 | for(String destination:destinations.split(",")){ 118 | if(destination==null||"".equals(destination)){ 119 | continue; 120 | } 121 | CanalConnector connector = null; 122 | if(isZookeeper){ 123 | connector = CanalConnectors.newClusterConnector(servers, destination, username, password); 124 | }else{ 125 | List addresses = new ArrayList(); 126 | for(String address : servers.split(",")){ 127 | String[] ips = address.split(":"); 128 | String ip = ips[0]; 129 | int port=11111; 130 | if(ips.length>1){ 131 | port = Integer.valueOf(ips[1]); 132 | } 133 | addresses.add(new InetSocketAddress(ip, port)); 134 | } 135 | if(addresses!=null&&addresses.size()==1){ 136 | connector = CanalConnectors.newSingleConnector(addresses.get(0), destination, username, password); 137 | }else{ 138 | connector = CanalConnectors.newClusterConnector(addresses, destination, username, password); 139 | } 140 | } 141 | connector.connect(); 142 | connector.subscribe(CANAL_FILTER_REGEX); 143 | connector.rollback(); 144 | cache.put(destination, connector); 145 | } 146 | } 147 | } catch (Exception e) { 148 | logger.error("-----Muti Canal Config init Error-----", e); 149 | } 150 | } 151 | /** 152 | * 关闭服务 153 | */ 154 | public static void close(){ 155 | if(!cache.isEmpty()){ 156 | for (CanalConnector connector : cache.values()) { 157 | connector.disconnect(); 158 | } 159 | } 160 | } 161 | /** 162 | * 提交数据 163 | * @param batchId 164 | */ 165 | public static void ack(CanalConnector connector,long batchId){ 166 | connector.ack(batchId); 167 | } 168 | /** 169 | * 回滚数据 170 | * @param batchId 171 | */ 172 | public static void rollback(CanalConnector connector,long batchId){ 173 | connector.rollback(batchId); 174 | } 175 | 176 | public List service(){ 177 | List data = new ArrayList(); 178 | try { 179 | if(cache==null||cache.isEmpty()){ 180 | init(destinations, servers, username, password, filter_regex, isZookeeper, batch_size); 181 | } 182 | if(!cache.isEmpty()){ 183 | for (CanalConnector connector : cache.values()) { 184 | List list = execute(connector); 185 | if(list!=null&&list.size()>0){ 186 | data.addAll(list); 187 | } 188 | } 189 | } 190 | } catch (Exception e) { 191 | logger.error("--Muti Canal监控失败!",e); 192 | } 193 | return data; 194 | } 195 | /** 196 | * @decription 监控数据 197 | * @author yi.zhang 198 | * @time 2017年6月1日 上午10:10:52 199 | * @return 200 | */ 201 | protected List execute(CanalConnector connector){ 202 | List monitors = new ArrayList(); 203 | Message message = connector.getWithoutAck(BATCH_SIZE); // 获取指定数量的数据 204 | long batchId = message.getId(); 205 | List list = message.getEntries(); 206 | if(list!=null&&list.size()>0){ 207 | for (Entry entry : list) { 208 | if (entry.getEntryType() == EntryType.TRANSACTIONBEGIN || entry.getEntryType() == EntryType.TRANSACTIONEND) { 209 | continue; 210 | } 211 | RowChange event = null; 212 | try { 213 | event = RowChange.parseFrom(entry.getStoreValue()); 214 | } catch (Exception e) { 215 | throw new RuntimeException("ERROR ## parser of eromanga-event has an error , data:" + entry.toString(), e); 216 | } 217 | String schema = entry.getHeader().getSchemaName(); 218 | String table = entry.getHeader().getTableName(); 219 | String type = event.hasEventType()?event.getEventType().name():null; 220 | String sql = event.getSql(); 221 | System.out.println("-----{schema:"+schema+",table:"+table+",type:"+type+",sql:"+sql+"}"); 222 | MonitorInfo monitor = new MonitorInfo(); 223 | monitor.setSchema(schema); 224 | monitor.setTable(table); 225 | monitor.setType(type); 226 | List rows = monitor.getRows(); 227 | for (RowData rowData : event.getRowDatasList()) { 228 | MonitorInfo.RowInfo row = monitor.new RowInfo(); 229 | String kid = null; 230 | JSONObject before = row.getBefore(); 231 | JSONObject after = row.getAfter(); 232 | JSONObject change = row.getChange(); 233 | List cbefores = rowData.getBeforeColumnsList(); 234 | List cafters = rowData.getAfterColumnsList(); 235 | for (Column column : cbefores) { 236 | String key = column.getName(); 237 | Object value = column.getValue(); 238 | String ctype = column.getMysqlType().toLowerCase(); 239 | if(ctype.contains("int")){ 240 | if(ctype.contains("bigint")){ 241 | value = Long.valueOf(column.getValue()); 242 | }else{ 243 | value = Integer.valueOf(column.getValue()); 244 | } 245 | } 246 | if(ctype.contains("decimal")||ctype.contains("numeric")||ctype.contains("double")||ctype.contains("float")){ 247 | value = Double.valueOf(column.getValue()); 248 | } 249 | if(ctype.contains("timestamp")||ctype.contains("date")){ 250 | if(ctype.contains("timestamp")){ 251 | value = DateUtil.formatDateTime(column.getValue()); 252 | }else{ 253 | value = DateUtil.formatDate(column.getValue()); 254 | } 255 | } 256 | boolean update = column.getUpdated(); 257 | before.put(key, value); 258 | if(update){ 259 | change.put(key, value); 260 | } 261 | if(column.getIsKey()&&kid==null){ 262 | kid = key; 263 | } 264 | System.out.println("--"+type+"--before----{"+key+ ": " + value + ",update: " + update+","+column.getSqlType()+":"+column.getMysqlType()+":"+column.getLength()+"}"); 265 | } 266 | for (Column column : cafters) { 267 | String key = column.getName(); 268 | Object value = column.getValue(); 269 | String ctype = column.getMysqlType().toLowerCase(); 270 | if(ctype.contains("int")){ 271 | if(ctype.contains("bigint")){ 272 | value = Long.valueOf(column.getValue()); 273 | }else{ 274 | value = Integer.valueOf(column.getValue()); 275 | } 276 | } 277 | if(ctype.contains("decimal")||ctype.contains("numeric")||ctype.contains("double")||ctype.contains("float")){ 278 | value = Double.valueOf(column.getValue()); 279 | } 280 | if(ctype.contains("timestamp")||ctype.contains("date")){ 281 | if(ctype.contains("timestamp")){ 282 | value = DateUtil.formatDateTime(column.getValue()); 283 | }else{ 284 | value = DateUtil.formatDate(column.getValue()); 285 | } 286 | } 287 | boolean update = column.getUpdated(); 288 | after.put(key, value); 289 | if(update){ 290 | change.put(key, value); 291 | } 292 | if(column.getIsKey()&&kid==null){ 293 | kid = key; 294 | } 295 | System.out.println("--"+type+"--after----{"+key+ ": " + value + ",update: " + update+"}"); 296 | } 297 | row.setKid(kid); 298 | if (event.getEventType() == EventType.DELETE) { 299 | Object id = before.get(kid); 300 | sql += "delete from "+table+" where "+kid+"="+(id instanceof String?"'"+id+"'":id)+";"; 301 | } 302 | if (event.getEventType() == EventType.INSERT) { 303 | String keys = ""; 304 | String values = ""; 305 | for (String key : after.keySet()) { 306 | Object value = after.get(key); 307 | if(value instanceof Date){ 308 | value = DateUtil.formatDateTimeStr((Date)value); 309 | } 310 | if("".equals(keys)){ 311 | keys = key; 312 | values = (value instanceof String?"'"+value+"'":value+""); 313 | }else{ 314 | keys +=',' + key; 315 | values +=',' + (value instanceof String?"'"+value+"'":value+""); 316 | } 317 | } 318 | sql += "insert into "+table+"("+keys+")values("+values+");"; 319 | } 320 | if (event.getEventType() == EventType.UPDATE) { 321 | String set = ""; 322 | for (String key : change.keySet()) { 323 | Object value = after.get(key); 324 | if(value instanceof Date){ 325 | value = DateUtil.formatDateTimeStr((Date)value); 326 | } 327 | if("".equals(set)){ 328 | set = key+"="+(value instanceof String?"'"+value+"'":value+""); 329 | }else{ 330 | set +=',' + key+"="+(value instanceof String?"'"+value+"'":value+""); 331 | } 332 | } 333 | Object id = before.get(kid); 334 | sql += "update "+table+" set "+set+" where "+kid+"="+(id instanceof String?"'"+id+"'":id)+";"; 335 | } 336 | rows.add(row); 337 | } 338 | monitor.setSql(sql); 339 | monitors.add(monitor); 340 | } 341 | } 342 | ack(connector,batchId); 343 | return monitors; 344 | } 345 | } -------------------------------------------------------------------------------- /src/main/java/com/share/service/cassandra/CassandraFactory.java: -------------------------------------------------------------------------------- 1 | package com.share.service.cassandra; 2 | import java.lang.reflect.Field; 3 | import java.net.InetSocketAddress; 4 | import java.util.ArrayList; 5 | import java.util.HashMap; 6 | import java.util.List; 7 | import java.util.Map; 8 | 9 | import org.apache.logging.log4j.LogManager; 10 | import org.apache.logging.log4j.Logger; 11 | 12 | import com.alibaba.fastjson.JSON; 13 | import com.alibaba.fastjson.JSONObject; 14 | import com.datastax.driver.core.Cluster; 15 | import com.datastax.driver.core.Cluster.Builder; 16 | import com.datastax.driver.core.ColumnDefinitions; 17 | import com.datastax.driver.core.PoolingOptions; 18 | import com.datastax.driver.core.ProtocolOptions; 19 | import com.datastax.driver.core.ResultSet; 20 | import com.datastax.driver.core.ResultSetFuture; 21 | import com.datastax.driver.core.Row; 22 | import com.datastax.driver.core.Session; 23 | import com.datastax.driver.core.SocketOptions; 24 | import com.datastax.driver.core.policies.DefaultRetryPolicy; 25 | import com.datastax.driver.mapping.Mapper; 26 | import com.datastax.driver.mapping.Mapper.Option; 27 | import com.datastax.driver.mapping.MappingManager; 28 | 29 | /** 30 | * 描述: Cassandra数据服务封装 31 | * 时间: 2017年11月15日 上午11:24:49 32 | * @author yi.zhang 33 | * @since 1.0 34 | * JDK版本:1.8 35 | */ 36 | @SuppressWarnings("all") 37 | public class CassandraFactory { 38 | private static Logger logger = LogManager.getLogger(); 39 | /** 40 | * 过期时间(单位:秒) 41 | */ 42 | private static int EXPIRE_TIME = 15 * 24 * 60 * 60; 43 | 44 | protected Session session = null; 45 | 46 | protected MappingManager mapping = null; 47 | 48 | private String servers; 49 | private String keyspace; 50 | private String username; 51 | private String password; 52 | 53 | public String getServers() { 54 | return servers; 55 | } 56 | 57 | public void setServers(String servers) { 58 | this.servers = servers; 59 | } 60 | 61 | public String getKeyspace() { 62 | return keyspace; 63 | } 64 | 65 | public void setKeyspace(String keyspace) { 66 | this.keyspace = keyspace; 67 | } 68 | 69 | public String getUsername() { 70 | return username; 71 | } 72 | 73 | public void setUsername(String username) { 74 | this.username = username; 75 | } 76 | 77 | public String getPassword() { 78 | return password; 79 | } 80 | 81 | public void setPassword(String password) { 82 | this.password = password; 83 | } 84 | 85 | /** 86 | * 描述: 初始化配置 87 | * 时间: 2017年11月15日 上午11:25:07 88 | * @author yi.zhang 89 | * @param servers 服务地址 90 | * @param keyspace 命名空间 91 | * @param username 账号 92 | * @param password 密码 93 | */ 94 | public void init(String servers,String keyspace,String username,String password) { 95 | try { 96 | // socket 链接配置 97 | SocketOptions socket = new SocketOptions(); 98 | socket.setKeepAlive(true); 99 | socket.setReceiveBufferSize(1024* 1024); 100 | socket.setSendBufferSize(1024* 1024); 101 | socket.setConnectTimeoutMillis(5 * 1000); 102 | socket.setReadTimeoutMillis(1000); 103 | //设置连接池 104 | PoolingOptions pool = new PoolingOptions(); 105 | // pool.setMaxRequestsPerConnection(HostDistance.LOCAL, 32); 106 | // pool.setMaxRequestsPerConnection(HostDistance.REMOTE, 32); 107 | // pool.setCoreConnectionsPerHost(HostDistance.LOCAL, 2); 108 | // pool.setCoreConnectionsPerHost(HostDistance.REMOTE, 2); 109 | // pool.setMaxConnectionsPerHost(HostDistance.LOCAL, 4); 110 | // pool.setMaxConnectionsPerHost(HostDistance.REMOTE, 4); 111 | pool.setHeartbeatIntervalSeconds(60); 112 | pool.setIdleTimeoutSeconds(120); 113 | pool.setPoolTimeoutMillis(5 * 1000); 114 | List saddress = new ArrayList(); 115 | if (servers != null && !"".equals(servers)) { 116 | for (String server : servers.split(",")) { 117 | String[] address = server.split(":"); 118 | String ip = address[0]; 119 | int port = 9042; 120 | if (address != null && address.length > 1) { 121 | port = Integer.valueOf(address[1]); 122 | } 123 | saddress.add(new InetSocketAddress(ip, port)); 124 | } 125 | } 126 | InetSocketAddress[] addresses = new InetSocketAddress[saddress.size()]; 127 | saddress.toArray(addresses); 128 | 129 | Builder builder = Cluster.builder(); 130 | builder.withSocketOptions(socket); 131 | // 设置压缩方式 132 | builder.withCompression(ProtocolOptions.Compression.LZ4); 133 | // 负载策略 134 | // DCAwareRoundRobinPolicy loadBalance = DCAwareRoundRobinPolicy.builder().withLocalDc("localDc").withUsedHostsPerRemoteDc(2).allowRemoteDCsForLocalConsistencyLevel().build(); 135 | // builder.withLoadBalancingPolicy(loadBalance); 136 | // 重试策略 137 | builder.withRetryPolicy(DefaultRetryPolicy.INSTANCE); 138 | builder.withPoolingOptions(pool); 139 | builder.addContactPointsWithPorts(addresses); 140 | builder.withCredentials(username, password); 141 | Cluster cluster = builder.build(); 142 | if (keyspace != null && !"".equals(keyspace)) { 143 | session = cluster.connect(keyspace); 144 | } else { 145 | session = cluster.connect(); 146 | } 147 | mapping = new MappingManager(session); 148 | } catch (Exception e) { 149 | logger.error("-----Cassandra Config init Error-----", e); 150 | } 151 | } 152 | 153 | /** 154 | * 描述: 保存数据 155 | * 时间: 2017年11月15日 上午11:26:42 156 | * @author yi.zhang 157 | * @param obj 对象 158 | * @return 返回值 159 | */ 160 | public int save(Object obj) { 161 | try { 162 | if(session!=null){ 163 | init(servers, keyspace, username, password); 164 | } 165 | Mapper mapper = mapping.mapper(obj.getClass()); 166 | mapper.save(obj, Option.saveNullFields(true)); 167 | return 1; 168 | } catch (Exception e) { 169 | // TODO Auto-generated catch block 170 | e.printStackTrace(); 171 | } 172 | return -1; 173 | } 174 | 175 | /** 176 | * 描述: 更新数据 177 | * 时间: 2017年11月15日 上午11:26:42 178 | * @author yi.zhang 179 | * @param obj 对象 180 | * @return 返回值 181 | */ 182 | public int update(Object obj) { 183 | try { 184 | if(session!=null){ 185 | init(servers, keyspace, username, password); 186 | } 187 | Mapper mapper = mapping.mapper(obj.getClass()); 188 | mapper.save(obj, Option.saveNullFields(false),Option.ttl(EXPIRE_TIME)); 189 | return 1; 190 | } catch (Exception e) { 191 | // TODO Auto-generated catch block 192 | e.printStackTrace(); 193 | } 194 | return -1; 195 | } 196 | 197 | /** 198 | * 描述: 删除数据 199 | * 时间: 2017年11月15日 上午11:26:42 200 | * @author yi.zhang 201 | * @param obj 对象 202 | * @return 返回值 203 | */ 204 | public int delete(Object obj) { 205 | try { 206 | if(session!=null){ 207 | init(servers, keyspace, username, password); 208 | } 209 | Mapper mapper = mapping.mapper(obj.getClass()); 210 | mapper.delete(obj); 211 | return 1; 212 | } catch (Exception e) { 213 | // TODO Auto-generated catch block 214 | e.printStackTrace(); 215 | } 216 | return -1; 217 | } 218 | 219 | /** 220 | * 描述: 数据操作(Insert|Update|Delete) 221 | * 时间: 2017年11月15日 上午11:27:52 222 | * @author yi.zhang 223 | * @param cql cql语句 224 | * @param params 参数 225 | * @return 返回值 226 | */ 227 | public int executeUpdate(String cql, Object... params) { 228 | try { 229 | if(session!=null){ 230 | init(servers, keyspace, username, password); 231 | } 232 | ResultSet rs = session.execute(cql, params); 233 | return rs.getAvailableWithoutFetching(); 234 | } catch (Exception e) { 235 | // TODO Auto-generated catch block 236 | e.printStackTrace(); 237 | } 238 | return -1; 239 | } 240 | 241 | /** 242 | * 描述: 数据库查询(Select) 243 | * 时间: 2017年11月15日 上午11:28:42 244 | * @author yi.zhang 245 | * @param cql cql语句 246 | * @param clazz 映射对象 247 | * @param params 占位符参数 248 | * @return 249 | */ 250 | public List executeQuery(String cql, Class clazz, Object... params) { 251 | try { 252 | if(session!=null){ 253 | init(servers, keyspace, username, password); 254 | } 255 | List list = new ArrayList(); 256 | ResultSet rs = session.execute(cql, params); 257 | ColumnDefinitions rscd = rs.getColumnDefinitions(); 258 | int count = rscd.size(); 259 | Map reflect = new HashMap(); 260 | for (int i = 0; i < count; i++) { 261 | String column = rscd.getName(i); 262 | String tcolumn = column.replaceAll("_", ""); 263 | if (clazz == null) { 264 | reflect.put(column, column); 265 | } else { 266 | Field[] fields = clazz.getDeclaredFields(); 267 | for (Field field : fields) { 268 | String tfield = field.getName(); 269 | if (tcolumn.equalsIgnoreCase(tfield)) { 270 | reflect.put(column, tfield); 271 | break; 272 | } 273 | } 274 | } 275 | } 276 | for (Row row : rs.all()) { 277 | JSONObject obj = new JSONObject(); 278 | for (String column : reflect.keySet()) { 279 | String key = reflect.get(column); 280 | Object value = row.getObject(column); 281 | obj.put(key, value); 282 | } 283 | Object object = obj; 284 | if (clazz != null) { 285 | object = JSON.parseObject(obj.toJSONString(), clazz); 286 | } 287 | list.add(object); 288 | } 289 | return list; 290 | } catch (Exception e) { 291 | // TODO Auto-generated catch block 292 | e.printStackTrace(); 293 | } 294 | return null; 295 | } 296 | 297 | /** 298 | * 描述: 查询数据表字段名(key:字段名,value:字段类型名) 299 | * 时间: 2017年11月15日 上午11:29:32 300 | * @author yi.zhang 301 | * @param table 表名 302 | * @return 303 | */ 304 | public Map queryColumns(String table){ 305 | try { 306 | String sql = "select * from "+table; 307 | ResultSet rs = session.execute(sql); 308 | ColumnDefinitions rscd = rs.getColumnDefinitions(); 309 | int count = rscd.size(); 310 | Map reflect = new HashMap(); 311 | for (int i = 0; i < count; i++) { 312 | String column = rscd.getName(i); 313 | String type = rscd.getType(i).getName().name().toLowerCase(); 314 | reflect.put(column, type); 315 | } 316 | return reflect; 317 | } catch (Exception e) { 318 | // TODO Auto-generated catch block 319 | e.printStackTrace(); 320 | } 321 | return null; 322 | } 323 | /** 324 | * 描述: 查询数据库表名[未实现] 325 | * 时间: 2017年11月15日 上午11:29:59 326 | * @author yi.zhang 327 | * @return 返回表 328 | */ 329 | @Deprecated 330 | public List queryTables(){ 331 | try { 332 | List tables = new ArrayList(); 333 | String useQuery = "describe tables"; 334 | ResultSet rs = session.execute(useQuery); 335 | for (Row row : rs.all()) { 336 | String table = row.getString(1); 337 | tables.add(table); 338 | } 339 | return tables; 340 | } catch (Exception e) { 341 | // TODO Auto-generated catch block 342 | e.printStackTrace(); 343 | } 344 | return null; 345 | } 346 | } -------------------------------------------------------------------------------- /src/main/java/com/share/service/cassandra/CassandraService.java: -------------------------------------------------------------------------------- 1 | package com.share.service.cassandra; 2 | 3 | public class CassandraService { 4 | 5 | } 6 | -------------------------------------------------------------------------------- /src/main/java/com/share/service/elasticsearch/AbstractElasticsearchFactory.java: -------------------------------------------------------------------------------- 1 | package com.share.service.elasticsearch; 2 | 3 | import java.lang.reflect.Field; 4 | import java.util.HashMap; 5 | import java.util.Map; 6 | 7 | import com.alibaba.fastjson.JSON; 8 | import com.alibaba.fastjson.JSONObject; 9 | 10 | public abstract class AbstractElasticsearchFactory implements ElasticsearchFactory{ 11 | protected static String regex = "[-,:,/\"]"; 12 | 13 | protected String clusterName="elasticsearch"; 14 | protected String servers="localhost"; 15 | protected String username; 16 | protected String password; 17 | protected int port; 18 | 19 | public abstract int defaultPort(); 20 | /** 21 | * 描述: 字段映射 22 | * 时间: 2018年1月9日 上午11:36:54 23 | * @author yi.zhang 24 | * @param index 索引 25 | * @param type 类型 26 | * @param clazz 实例 27 | * @return 28 | */ 29 | public abstract String mapping(String index,String type,@SuppressWarnings("rawtypes") Class clazz); 30 | 31 | public AbstractElasticsearchFactory() { 32 | this.port = defaultPort(); 33 | } 34 | public AbstractElasticsearchFactory(String servers) { 35 | this.servers = servers; 36 | this.port = defaultPort(); 37 | } 38 | public AbstractElasticsearchFactory(String servers,int port) { 39 | this.servers = servers; 40 | this.port = port>0?port:defaultPort(); 41 | } 42 | public AbstractElasticsearchFactory(String clusterName, String servers,int port) { 43 | this.clusterName = clusterName; 44 | this.servers = servers; 45 | this.port = port>0?port:defaultPort(); 46 | } 47 | public AbstractElasticsearchFactory(String clusterName, String servers, String username, String password) { 48 | this.clusterName = clusterName; 49 | this.servers = servers; 50 | this.username = username; 51 | this.password = password; 52 | this.port = defaultPort(); 53 | } 54 | public AbstractElasticsearchFactory(String clusterName, String servers, String username, String password,int port) { 55 | this.clusterName = clusterName; 56 | this.servers = servers; 57 | this.username = username; 58 | this.password = password; 59 | this.port = port>0?port:defaultPort(); 60 | } 61 | 62 | public String getClusterName() { 63 | return clusterName; 64 | } 65 | 66 | public void setClusterName(String clusterName) { 67 | this.clusterName = clusterName; 68 | } 69 | 70 | public String getServers() { 71 | return servers; 72 | } 73 | 74 | public void setServers(String servers) { 75 | this.servers = servers; 76 | } 77 | 78 | public String getUsername() { 79 | return username; 80 | } 81 | 82 | public void setUsername(String username) { 83 | this.username = username; 84 | } 85 | 86 | public String getPassword() { 87 | return password; 88 | } 89 | 90 | public void setPassword(String password) { 91 | this.password = password; 92 | } 93 | 94 | public int getPort() { 95 | return port; 96 | } 97 | 98 | public void setPort(int port) { 99 | this.port = port; 100 | } 101 | 102 | protected Map reflect(@SuppressWarnings("rawtypes") Class clazz,boolean isCustom){ 103 | Map map = new HashMap(); 104 | Field[] fields = clazz.getDeclaredFields(); 105 | for (Field field : fields) { 106 | String name = field.getName(); 107 | if(name.equalsIgnoreCase("serialVersionUID")){ 108 | continue; 109 | } 110 | String type = field.getType().getSimpleName(); 111 | if(type.equalsIgnoreCase("int")||type.equalsIgnoreCase("Integer")){ 112 | String mapping = "{type: 'integer',index:'not_analyzed'}"; 113 | map.put(name, JSON.parseObject(mapping)); 114 | continue; 115 | }else if(type.equalsIgnoreCase("byte")){ 116 | String mapping = "{type: 'byte',index:'not_analyzed'}"; 117 | map.put(name, JSON.parseObject(mapping)); 118 | continue; 119 | }else if(type.equalsIgnoreCase("short")){ 120 | String mapping = "{type: 'short',index:'not_analyzed'}"; 121 | map.put(name, JSON.parseObject(mapping)); 122 | continue; 123 | }else if(type.equalsIgnoreCase("long")){ 124 | String mapping = "{type: 'long',index:'not_analyzed'}"; 125 | map.put(name, JSON.parseObject(mapping)); 126 | continue; 127 | }else if(type.equalsIgnoreCase("boolean")){ 128 | String mapping = "{type: 'boolean',index:'not_analyzed'}"; 129 | map.put(name, JSON.parseObject(mapping)); 130 | continue; 131 | }else if(type.equalsIgnoreCase("float")){ 132 | String mapping = "{type: 'float',index:'not_analyzed'}"; 133 | map.put(name, JSON.parseObject(mapping)); 134 | continue; 135 | }else if(type.equalsIgnoreCase("double")){ 136 | String mapping = "{type: 'double',index:'not_analyzed'}"; 137 | map.put(name, JSON.parseObject(mapping)); 138 | continue; 139 | }else if(type.equalsIgnoreCase("date")||type.equalsIgnoreCase("datetime")){ 140 | String format = "strict_date_optional_time||epoch_millis"; 141 | format += "||basic_date||basic_date_time||basic_date_time_no_millis"; 142 | format += "||basic_time||basic_time_no_millis||basic_t_time||basic_t_time_no_millis"; 143 | format += "||strict_basic_week_date||strict_basic_week_date_time_no_millis"; 144 | format += "||strict_date||strict_date_hour_minute||strict_date_hour_minute_second||strict_date_hour_minute_second_fraction||strict_date_hour_minute_second_millis"; 145 | format += "||strict_date_time||strict_date_time_no_millis"; 146 | format += "||strict_hour_minute||strict_hour_minute_second||strict_hour_minute_second_fraction||strict_hour_minute_second_millis"; 147 | format += "||strict_time||strict_time_no_millis||strict_t_time||strict_t_time_no_millis"; 148 | format += "||strict_week_date||strict_week_date_time||strict_week_date_time_no_millis"; 149 | format += "||strict_year_month_day"; 150 | format += "||yyyy-MM-dd||yyyy-MM-dd HH:mm:ss"; 151 | String mapping = "{type: 'date',index:'not_analyzed',format:'"+format+"'}"; 152 | map.put(name, JSON.parseObject(mapping)); 153 | continue; 154 | }else if(type.equalsIgnoreCase("char")||type.equalsIgnoreCase("Character")){ 155 | String mapping = "{type: 'string',index:'not_analyzed'}"; 156 | map.put(name, JSON.parseObject(mapping)); 157 | continue; 158 | }else if(type.equalsIgnoreCase("object")){ 159 | String mapping = "{type: 'object'}"; 160 | map.put(name, JSON.parseObject(mapping)); 161 | continue; 162 | }else if(type.equalsIgnoreCase("String[]")){ 163 | String mapping = "{type: 'text'}"; 164 | map.put(name, JSON.parseObject(mapping)); 165 | continue; 166 | }else if(type.equalsIgnoreCase("string")){ 167 | String mapping = "{type: 'string',index:'analyzed',fields:{en:{type:'string',analyzer:'english'},keyword:{type:'keyword',ignore_above:256}"+(isCustom?",custom:{type:'string',analyzer:'es_analyzer'}":"")+"}}"; 168 | map.put(name, JSON.parseObject(mapping)); 169 | continue; 170 | }else{ 171 | String mapping = "{properties: "+JSON.toJSONString(reflect(field.getDeclaringClass(),isCustom))+"}"; 172 | map.put(name, JSON.parseObject(mapping)); 173 | } 174 | } 175 | return map; 176 | } 177 | 178 | protected JSONObject analyzer(String index){ 179 | String char_filter="{symbol_transform:{type:'mapping',mappings:['&=> and ','||=> or ']}}"; 180 | String filter="{default_stopwords:{type:'stop',stopwords:['a','an','and','are','as','at','be','but','by','for','if','in','into','is','it','no','not','of','on','or','such','that','the','their','then','there','these','they','this','to','was','will','with']}}"; 181 | String analyzer="{es_analyzer:{type:'custom',char_filter:['html_strip','symbol_transform'],tokenizer:'standard',filter:['lowercase','default_stopwords']}}"; 182 | String settings="{settings:{analysis:{char_filter:"+JSON.parseObject(char_filter).toJSONString()+",filter:"+JSON.parseObject(filter).toJSONString()+",analyzer:"+JSON.parseObject(analyzer).toJSONString()+"}}}"; 183 | return JSON.parseObject(settings); 184 | } 185 | } 186 | -------------------------------------------------------------------------------- /src/main/java/com/share/service/elasticsearch/ElasticsearchFactory.java: -------------------------------------------------------------------------------- 1 | package com.share.service.elasticsearch; 2 | 3 | import java.util.List; 4 | import java.util.Map; 5 | /** 6 | * 描述: Elasticsearch接口 7 | * 时间: 2018年1月9日 上午11:21:01 8 | * @author yi.zhang 9 | * @since 1.0 10 | * JDK版本:1.8 11 | */ 12 | public interface ElasticsearchFactory { 13 | /** 14 | * 描述: 保存数据 15 | * 时间: 2018年1月9日 上午11:22:25 16 | * @author yi.zhang 17 | * @param index 索引 18 | * @param type 类型 19 | * @param json 数据对象(json对象或字符串) 20 | * @return 21 | */ 22 | public String insert(String index,String type,Object json); 23 | /** 24 | * 描述: 修改数据 25 | * 时间: 2018年1月9日 上午11:22:25 26 | * @author yi.zhang 27 | * @param index 索引 28 | * @param type 类型 29 | * @param id 唯一标识 30 | * @param json 数据对象(json对象或字符串) 31 | * @return 32 | */ 33 | public String update(String index,String type,String id,Object json); 34 | /** 35 | * 描述: 保存或修改数据 36 | * 时间: 2018年1月9日 上午11:22:25 37 | * @author yi.zhang 38 | * @param index 索引 39 | * @param type 类型 40 | * @param id 唯一标识 41 | * @param json 数据对象(json对象或字符串) 42 | * @return 43 | */ 44 | public String upsert(String index,String type,String id,Object json); 45 | /** 46 | * 描述: 删除数据 47 | * 时间: 2018年1月9日 上午11:22:25 48 | * @author yi.zhang 49 | * @param index 索引 50 | * @param type 类型 51 | * @param id 唯一标识 52 | * @return 53 | */ 54 | public String delete(String index,String type,String id); 55 | /** 56 | * 描述: 批量插入或修改 57 | * 时间: 2018年1月9日 上午11:25:47 58 | * @author yi.zhang 59 | * @param index 索引 60 | * @param type 类型 61 | * @param jsons 数据集合(json对象或字符串) 62 | * @return 63 | */ 64 | public String bulkUpsert(String index,String type,List jsons); 65 | /** 66 | * 描述: 批量删除 67 | * 时间: 2018年1月9日 上午11:26:42 68 | * @author yi.zhang 69 | * @param index 索引 70 | * @param type 类型 71 | * @param ids 唯一标识集合 72 | * @return 73 | */ 74 | public String bulkDelete(String index,String type,String... ids); 75 | /** 76 | * 描述: 销毁索引 77 | * 时间: 2018年1月9日 上午11:27:42 78 | * @author yi.zhang 79 | * @param indexs 索引(支持通配符) 80 | * @return 81 | */ 82 | public String drop(String indexs); 83 | /** 84 | * 描述: 查询详细信息 85 | * 时间: 2018年1月9日 上午11:28:44 86 | * @author yi.zhang 87 | * @param index 索引 88 | * @param type 类型 89 | * @param id 唯一标识 90 | * @return 91 | */ 92 | public String select(String index,String type,String id); 93 | /** 94 | * 描述: 全文搜索匹配 95 | * 时间: 2018年1月9日 上午11:29:22 96 | * @author yi.zhang 97 | * @param indexs 索引 98 | * @param types 类型 99 | * @param condition 条件字符串 100 | * @return 101 | */ 102 | public String selectAll(String indexs,String types,String condition); 103 | /** 104 | * 描述: 精确字段匹配 105 | * 时间: 2018年1月9日 上午11:30:33 106 | * @author yi.zhang 107 | * @param indexs 索引 108 | * @param types 类型 109 | * @param field 字段 110 | * @param value 字段值 111 | * @return 112 | */ 113 | public String selectMatchAll(String indexs,String types,String field,String value); 114 | /** 115 | * 描述: 精确条件匹配 116 | * 时间: 2018年1月9日 上午11:31:46 117 | * @author yi.zhang 118 | * @param indexs 索引 119 | * @param types 类型 120 | * @param must must条件 121 | * @param should should条件 122 | * @param must_not must_not条件 123 | * @param ranges 范围条件 124 | * @return 125 | */ 126 | public String selectMatchAll(String indexs,String types,Map must,Map should,Map must_not,Map> ranges); 127 | /** 128 | * 描述: 【分页】精确条件【排序】匹配检索 129 | * 时间: 2018年1月9日 上午11:33:14 130 | * @author yi.zhang 131 | * @param indexs 索引 132 | * @param types 类型 133 | * @param must must条件 134 | * @param should should条件 135 | * @param must_not must_not条件 136 | * @param ranges 范围条件 137 | * @param order 排序字段 138 | * @param isAsc 排序类型(true:升序,false:降序) 139 | * @param pageNo 分页编号 140 | * @param pageSize 分页数量 141 | * @return 142 | */ 143 | public String selectMatchAll(String indexs,String types,Map must,Map should,Map must_not,Map> ranges,String order,boolean isAsc,int pageNo,int pageSize); 144 | } 145 | -------------------------------------------------------------------------------- /src/main/java/com/share/service/elasticsearch/entity/Result.java: -------------------------------------------------------------------------------- 1 | package com.share.service.elasticsearch.entity; 2 | 3 | import java.util.Date; 4 | 5 | import com.alibaba.fastjson.JSON; 6 | import com.alibaba.fastjson.serializer.SerializerFeature; 7 | /** 8 | * 描述: 返回结果 9 | * 时间: 2018年1月3日 上午11:47:59 10 | * @author yi.zhang 11 | * @since 1.0 12 | * JDK版本:1.8 13 | */ 14 | public class Result{ 15 | public final static int SUCCESS_CODE = 200; 16 | public final static int ERROR_CODE = 500; 17 | public static Error ERROR = new Error(ERROR_CODE,null); 18 | public static Success SUCCESS = new Success(SUCCESS_CODE,null); 19 | private int status; 20 | private Date timestamp = new Date(); 21 | public Result() { 22 | super(); 23 | } 24 | public Result(int status) { 25 | super(); 26 | this.status = status; 27 | } 28 | public int getStatus() { 29 | return status; 30 | } 31 | public void setStatus(int status) { 32 | this.status = status; 33 | } 34 | public Date getTimestamp() { 35 | return timestamp; 36 | } 37 | public void setTimestamp(Date timestamp) { 38 | this.timestamp = timestamp; 39 | } 40 | /** 41 | * 描述: 失败信息 42 | * 时间: 2018年1月3日 上午11:52:30 43 | * @author yi.zhang 44 | * @since 1.0 45 | * JDK版本:1.8 46 | */ 47 | public static class Error extends Result{ 48 | private String error; 49 | private Object message; 50 | public Error() { 51 | super(ERROR_CODE); 52 | } 53 | public Error(String error) { 54 | super(ERROR_CODE); 55 | this.error = error; 56 | } 57 | public Error(int status,String error) { 58 | super(status); 59 | this.error = error; 60 | } 61 | public Error(String error, Object message) { 62 | super(ERROR_CODE); 63 | this.error = error; 64 | this.message = message; 65 | } 66 | public Error(int status,String error, Object message) { 67 | super(status); 68 | this.error = error; 69 | this.message = message; 70 | } 71 | public String getError() { 72 | return error; 73 | } 74 | public void setError(String error) { 75 | this.error = error; 76 | } 77 | public Object getMessage() { 78 | return message; 79 | } 80 | public void setMessage(Object message) { 81 | this.message = message; 82 | } 83 | public String toString(){ 84 | return JSON.toJSONString(this, new SerializerFeature[]{SerializerFeature.BrowserCompatible}); 85 | } 86 | } 87 | /** 88 | * 描述: 一般成功信息 89 | * 时间: 2018年1月3日 上午11:52:56 90 | * @author yi.zhang 91 | * @since 1.0 92 | * JDK版本:1.8 93 | */ 94 | public static class Success extends Result{ 95 | private Object data; 96 | public Success() { 97 | super(SUCCESS_CODE); 98 | } 99 | public Success(Object data) { 100 | super(SUCCESS_CODE); 101 | this.data = data; 102 | } 103 | public Success(int status,Object data) { 104 | super(status); 105 | this.data = data; 106 | } 107 | public Object getData() { 108 | return data; 109 | } 110 | public void setData(Object data) { 111 | this.data = data; 112 | } 113 | public String toString(){ 114 | return JSON.toJSONString(this, new SerializerFeature[]{SerializerFeature.BrowserCompatible}); 115 | } 116 | } 117 | /** 118 | * 描述: 分页成功信息 119 | * 时间: 2018年1月3日 上午11:53:22 120 | * @author yi.zhang 121 | * @since 1.0 122 | * JDK版本:1.8 123 | */ 124 | public static class PSuccess extends Success{ 125 | private int pageNo; 126 | private int pageSize; 127 | private long total; 128 | public PSuccess() { 129 | super(); 130 | } 131 | public PSuccess(int pageNo, int pageSize, long total,Object data) { 132 | super(data); 133 | this.pageNo = pageNo; 134 | this.pageSize = pageSize; 135 | this.total = total; 136 | } 137 | 138 | public int getPageNo() { 139 | return pageNo; 140 | } 141 | public void setPageNo(int pageNo) { 142 | this.pageNo = pageNo; 143 | } 144 | public int getPageSize() { 145 | return pageSize; 146 | } 147 | public void setPageSize(int pageSize) { 148 | this.pageSize = pageSize; 149 | } 150 | public long getTotal() { 151 | return total; 152 | } 153 | public void setTotal(long total) { 154 | this.total = total; 155 | } 156 | public String toString(){ 157 | return JSON.toJSONString(this, new SerializerFeature[]{SerializerFeature.BrowserCompatible}); 158 | } 159 | } 160 | } 161 | -------------------------------------------------------------------------------- /src/main/java/com/share/service/elasticsearch/http/ElasticsearchExtendHttpFactory.java: -------------------------------------------------------------------------------- 1 | package com.share.service.elasticsearch.http; 2 | 3 | import java.util.ArrayList; 4 | import java.util.Date; 5 | import java.util.List; 6 | import java.util.Map; 7 | 8 | import com.alibaba.fastjson.JSON; 9 | import com.alibaba.fastjson.JSONObject; 10 | import com.share.util.HttpUtil; 11 | import com.share.util.StringUtil; 12 | 13 | public class ElasticsearchExtendHttpFactory extends ElasticsearchHttpFactory{ 14 | public ElasticsearchExtendHttpFactory() { 15 | super(); 16 | } 17 | public ElasticsearchExtendHttpFactory(String servers) { 18 | super(servers); 19 | } 20 | public ElasticsearchExtendHttpFactory(String servers,int port) { 21 | super(servers, port); 22 | } 23 | public ElasticsearchExtendHttpFactory(String clusterName, String servers,int port) { 24 | super(clusterName, servers, port); 25 | } 26 | public ElasticsearchExtendHttpFactory(String clusterName, String servers, String username, String password) { 27 | super(clusterName, servers, username, password); 28 | } 29 | public ElasticsearchExtendHttpFactory(String clusterName, String servers, String username, String password,int port) { 30 | super(clusterName, servers, username, password, port); 31 | } 32 | public String selectTermAll(String indexs, String types, String field, String value) { 33 | if(StringUtil.isEmpty(indexs))indexs="_all"; 34 | String uri = "/"+indexs+(StringUtil.isEmpty(types)?"":"/"+types)+"/_search?pretty"; 35 | String body = ""; 36 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)&&!(field.matches(regex)||field.matches(value))){ 37 | String query = "{query:{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+value+"'}}}"; 38 | body = JSON.parseObject(query).toJSONString(); 39 | } 40 | String result = base(uri, HttpUtil.METHOD_POST, body); 41 | return result; 42 | } 43 | 44 | public String selectTermAll(String indexs, String types,Map must, Map should, Map must_not, Map> ranges) { 45 | if(StringUtil.isEmpty(indexs))indexs="_all"; 46 | String uri = "/"+indexs+(StringUtil.isEmpty(types)?"":"/"+types)+"/_search?pretty"; 47 | List must_terms = new ArrayList(); 48 | List should_terms = new ArrayList(); 49 | List must_not_terms = new ArrayList(); 50 | if(must!=null&&must.size()>0){ 51 | for (String field : must.keySet()) { 52 | if(field.matches(regex)){ 53 | continue; 54 | } 55 | Object text = must.get(field); 56 | String value = text instanceof String ?text.toString():JSON.toJSONString(text); 57 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)){ 58 | if(value.startsWith("[")&&value.endsWith("]")){ 59 | List child_terms = new ArrayList(); 60 | List values = JSON.parseArray(value, String.class); 61 | for (String _value : values) { 62 | if(!_value.matches(regex)){ 63 | String match = "{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+_value+"'}}"; 64 | child_terms.add(JSON.parseObject(match)); 65 | } 66 | } 67 | String match = "{bool:{must:"+JSON.toJSONString(child_terms)+"}}"; 68 | must_terms.add(JSON.parseObject(match)); 69 | }else{ 70 | if(!value.matches(regex)){ 71 | String match = "{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+value+"'}}"; 72 | must_terms.add(JSON.parseObject(match)); 73 | } 74 | } 75 | } 76 | } 77 | } 78 | if(should!=null&&should.size()>0){ 79 | for (String field : should.keySet()) { 80 | if(field.matches(regex)){ 81 | continue; 82 | } 83 | Object text = must.get(field); 84 | String value = text instanceof String ?text.toString():JSON.toJSONString(text); 85 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)){ 86 | if(value.startsWith("[")&&value.endsWith("]")){ 87 | List child_terms = new ArrayList(); 88 | List values = JSON.parseArray(value, String.class); 89 | for (String _value : values) { 90 | if(!_value.matches(regex)){ 91 | String match = "{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+_value+"'}}"; 92 | child_terms.add(JSON.parseObject(match)); 93 | } 94 | } 95 | String match = "{bool:{should:"+JSON.toJSONString(child_terms)+"}}"; 96 | must_terms.add(JSON.parseObject(match)); 97 | }else{ 98 | if(!value.matches(regex)){ 99 | String match = "{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+value+"'}}"; 100 | should_terms.add(JSON.parseObject(match)); 101 | } 102 | } 103 | } 104 | } 105 | } 106 | if(must_not!=null&&must_not.size()>0){ 107 | for (String field : must_not.keySet()) { 108 | if(field.matches(regex)){ 109 | continue; 110 | } 111 | Object text = must.get(field); 112 | String value = text instanceof String ?text.toString():JSON.toJSONString(text); 113 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)){ 114 | if(value.startsWith("[")&&value.endsWith("]")){ 115 | List child_terms = new ArrayList(); 116 | List values = JSON.parseArray(value, String.class); 117 | for (String _value : values) { 118 | if(!_value.matches(regex)){ 119 | String match = "{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+_value+"'}}"; 120 | child_terms.add(JSON.parseObject(match)); 121 | } 122 | } 123 | String match = "{bool:{must_not:"+JSON.toJSONString(child_terms)+"}}"; 124 | must_not_terms.add(JSON.parseObject(match)); 125 | }else{ 126 | if(!value.matches(regex)){ 127 | String match = "{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+value+"'}}"; 128 | must_not_terms.add(JSON.parseObject(match)); 129 | } 130 | } 131 | } 132 | } 133 | } 134 | if(ranges!=null&&ranges.size()>0){ 135 | for (String key : ranges.keySet()) { 136 | if(key.matches(regex)){ 137 | continue; 138 | } 139 | List between = ranges.get(key); 140 | if(between!=null&&!between.isEmpty()){ 141 | Object start = between.get(0); 142 | Object end = between.size()>1?between.get(1):null; 143 | if(start!=null&&end!=null){ 144 | Long starttime = start instanceof Date?((Date)start).getTime():Long.valueOf(start.toString()); 145 | Long endtime = end instanceof Date?((Date)end).getTime():Long.valueOf(end.toString()); 146 | if(starttime>endtime){ 147 | Object temp = start; 148 | start = end; 149 | end = temp; 150 | } 151 | } 152 | String range = "{range:{"+key+":{gte:"+start+",lt:"+end+"}}}"; 153 | must_terms.add(JSON.parseObject(range)); 154 | } 155 | }; 156 | } 157 | String query = "{query:{bool:{must:"+JSON.toJSONString(must_terms)+",must_not:"+JSON.toJSONString(must_not_terms)+",should:"+JSON.toJSONString(should_terms)+"}}}"; 158 | String body = JSON.parseObject(query).toJSONString(); 159 | String result = base(uri, HttpUtil.METHOD_POST, body); 160 | return result; 161 | } 162 | public String selectTermAll(String indexs, String types, Map must, Map should, Map must_not, Map> ranges, String order, boolean isAsc, int pageNo, int pageSize) { 163 | if(StringUtil.isEmpty(indexs))indexs="_all"; 164 | pageNo=pageNo<1?1:pageNo; 165 | pageSize=pageSize<1?10:pageSize; 166 | String uri = "/"+indexs+(StringUtil.isEmpty(types)?"":"/"+types)+"/_search?pretty&size="+pageSize+"&from"+(pageNo-1)*pageSize; 167 | List must_terms = new ArrayList(); 168 | List should_terms = new ArrayList(); 169 | List must_not_terms = new ArrayList(); 170 | if(must!=null&&must.size()>0){ 171 | for (String field : must.keySet()) { 172 | if(field.matches(regex)){ 173 | continue; 174 | } 175 | Object text = must.get(field); 176 | String value = text instanceof String ?text.toString():JSON.toJSONString(text); 177 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)){ 178 | if(value.startsWith("[")&&value.endsWith("]")){ 179 | List child_terms = new ArrayList(); 180 | List values = JSON.parseArray(value, String.class); 181 | for (String _value : values) { 182 | if(!_value.matches(regex)){ 183 | String match = "{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+_value+"'}}"; 184 | child_terms.add(JSON.parseObject(match)); 185 | } 186 | } 187 | String match = "{bool:{must:"+JSON.toJSONString(child_terms)+"}}"; 188 | must_terms.add(JSON.parseObject(match)); 189 | }else{ 190 | if(!value.matches(regex)){ 191 | String match = "{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+value+"'}}"; 192 | must_terms.add(JSON.parseObject(match)); 193 | } 194 | } 195 | } 196 | } 197 | } 198 | if(should!=null&&should.size()>0){ 199 | for (String field : should.keySet()) { 200 | if(field.matches(regex)){ 201 | continue; 202 | } 203 | Object text = must.get(field); 204 | String value = text instanceof String ?text.toString():JSON.toJSONString(text); 205 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)){ 206 | if(value.startsWith("[")&&value.endsWith("]")){ 207 | List child_terms = new ArrayList(); 208 | List values = JSON.parseArray(value, String.class); 209 | for (String _value : values) { 210 | if(!_value.matches(regex)){ 211 | String match = "{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+_value+"'}}"; 212 | child_terms.add(JSON.parseObject(match)); 213 | } 214 | } 215 | String match = "{bool:{should:"+JSON.toJSONString(child_terms)+"}}"; 216 | must_terms.add(JSON.parseObject(match)); 217 | }else{ 218 | if(!value.matches(regex)){ 219 | String match = "{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+value+"'}}"; 220 | should_terms.add(JSON.parseObject(match)); 221 | } 222 | } 223 | } 224 | } 225 | } 226 | if(must_not!=null&&must_not.size()>0){ 227 | for (String field : must_not.keySet()) { 228 | if(field.matches(regex)){ 229 | continue; 230 | } 231 | Object text = must.get(field); 232 | String value = text instanceof String ?text.toString():JSON.toJSONString(text); 233 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)){ 234 | if(value.startsWith("[")&&value.endsWith("]")){ 235 | List child_terms = new ArrayList(); 236 | List values = JSON.parseArray(value, String.class); 237 | for (String _value : values) { 238 | if(!_value.matches(regex)){ 239 | String match = "{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+_value+"'}}"; 240 | child_terms.add(JSON.parseObject(match)); 241 | } 242 | } 243 | String match = "{bool:{must_not:"+JSON.toJSONString(child_terms)+"}}"; 244 | must_not_terms.add(JSON.parseObject(match)); 245 | }else{ 246 | if(!value.matches(regex)){ 247 | String match = "{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+value+"'}}"; 248 | must_not_terms.add(JSON.parseObject(match)); 249 | } 250 | } 251 | } 252 | } 253 | } 254 | if(ranges!=null&&ranges.size()>0){ 255 | for (String key : ranges.keySet()) { 256 | if(key.matches(regex)){ 257 | continue; 258 | } 259 | List between = ranges.get(key); 260 | if(between!=null&&!between.isEmpty()){ 261 | Object start = between.get(0); 262 | Object end = between.size()>1?between.get(1):null; 263 | if(start!=null&&end!=null){ 264 | Long starttime = start instanceof Date?((Date)start).getTime():Long.valueOf(start.toString()); 265 | Long endtime = end instanceof Date?((Date)end).getTime():Long.valueOf(end.toString()); 266 | if(starttime>endtime){ 267 | Object temp = start; 268 | start = end; 269 | end = temp; 270 | } 271 | } 272 | String range = "{range:{"+key+":{gte:"+start+",lt:"+end+"}}}"; 273 | must_terms.add(JSON.parseObject(range)); 274 | } 275 | }; 276 | } 277 | List sorts = new ArrayList(); 278 | sorts.add(JSON.parseObject("{_score:{order:'desc'}}")); 279 | if(!StringUtil.isEmpty(order)){ 280 | sorts.add(JSON.parseObject("{"+order+":{order:'"+(isAsc?"asc":"desc")+"'}}")); 281 | } 282 | String query = "{query:{bool:{must:"+JSON.toJSONString(must_terms)+",must_not:"+JSON.toJSONString(must_not_terms)+",should:"+JSON.toJSONString(should_terms)+"}},sort:"+JSON.toJSONString(sorts)+"}"; 283 | String body = JSON.parseObject(query).toJSONString(); 284 | String result = base(uri, HttpUtil.METHOD_POST, body); 285 | return result; 286 | } 287 | } 288 | -------------------------------------------------------------------------------- /src/main/java/com/share/service/elasticsearch/rest/ElasticsearchExtendHighRestFactory.java: -------------------------------------------------------------------------------- 1 | package com.share.service.elasticsearch.rest; 2 | 3 | import java.util.Date; 4 | import java.util.List; 5 | import java.util.Map; 6 | 7 | import org.elasticsearch.action.search.SearchRequest; 8 | import org.elasticsearch.action.search.SearchResponse; 9 | import org.elasticsearch.action.search.SearchType; 10 | import org.elasticsearch.index.query.BoolQueryBuilder; 11 | import org.elasticsearch.index.query.QueryBuilders; 12 | import org.elasticsearch.index.query.RangeQueryBuilder; 13 | import org.elasticsearch.search.aggregations.AggregationBuilders; 14 | import org.elasticsearch.search.builder.SearchSourceBuilder; 15 | import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; 16 | import org.elasticsearch.search.sort.SortBuilders; 17 | import org.elasticsearch.search.sort.SortOrder; 18 | 19 | import com.alibaba.fastjson.JSON; 20 | import com.share.util.StringUtil; 21 | 22 | public class ElasticsearchExtendHighRestFactory extends ElasticsearchHighRestFactory{ 23 | public ElasticsearchExtendHighRestFactory() { 24 | super(); 25 | } 26 | public ElasticsearchExtendHighRestFactory(String servers) { 27 | super(servers); 28 | } 29 | public ElasticsearchExtendHighRestFactory(String servers,int port) { 30 | super(servers, port); 31 | } 32 | public ElasticsearchExtendHighRestFactory(String clusterName, String servers,int port) { 33 | super(clusterName, servers, port); 34 | } 35 | public ElasticsearchExtendHighRestFactory(String clusterName, String servers, String username, String password) { 36 | super(clusterName, servers, username, password); 37 | } 38 | public ElasticsearchExtendHighRestFactory(String clusterName, String servers, String username, String password,int port) { 39 | super(clusterName, servers, username, password, port); 40 | } 41 | public String selectTermAll(String indexs,String types,String field,String value){ 42 | try { 43 | if(StringUtil.isEmpty(indexs))indexs="_all"; 44 | if(xclient==null){ 45 | init(); 46 | } 47 | SearchSourceBuilder search = new SearchSourceBuilder(); 48 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)&&!(field.matches(regex)||field.matches(value))){ 49 | search.query(QueryBuilders.termQuery(field, value)); 50 | } 51 | search.aggregation(AggregationBuilders.terms("data").field(field+".keyword")); 52 | search.explain(false); 53 | SearchRequest request = new SearchRequest(); 54 | request.searchType(SearchType.DFS_QUERY_THEN_FETCH); 55 | request.source(search); 56 | request.indices(indexs.split(",")); 57 | request.types(types.split(",")); 58 | SearchResponse response = xclient.search(request); 59 | return response.toString(); 60 | } catch (Exception e) { 61 | // TODO Auto-generated catch block 62 | e.printStackTrace(); 63 | } 64 | return null; 65 | } 66 | public String selectTermAll(String indexs,String types,Map must, Map should, Map must_not, Map> ranges){ 67 | try { 68 | if(StringUtil.isEmpty(indexs))indexs="_all"; 69 | if(xclient==null){ 70 | init(); 71 | } 72 | BoolQueryBuilder boolquery = QueryBuilders.boolQuery(); 73 | HighlightBuilder highlight = new HighlightBuilder(); 74 | if(must!=null&&must.size()>0){ 75 | for (String field : must.keySet()) { 76 | if(field.matches(regex)){ 77 | continue; 78 | } 79 | Object text = must.get(field); 80 | String value = text instanceof String ?text.toString():JSON.toJSONString(text); 81 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)){ 82 | if(value.startsWith("[")&&value.endsWith("]")){ 83 | BoolQueryBuilder child = QueryBuilders.boolQuery(); 84 | List values = JSON.parseArray(value, String.class); 85 | for (String _value : values) { 86 | if(!_value.matches(regex)){ 87 | child.should(QueryBuilders.termQuery(field, value)); 88 | } 89 | } 90 | boolquery.must(child); 91 | }else{ 92 | if(!value.matches(regex)){ 93 | boolquery.must(QueryBuilders.termQuery(field, value)); 94 | } 95 | } 96 | } 97 | highlight.field(field); 98 | } 99 | } 100 | if(should!=null&&should.size()>0){ 101 | for (String field : should.keySet()) { 102 | if(field.matches(regex)){ 103 | continue; 104 | } 105 | Object text = must.get(field); 106 | String value = text instanceof String ?text.toString():JSON.toJSONString(text); 107 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)){ 108 | if(value.startsWith("[")&&value.endsWith("]")){ 109 | BoolQueryBuilder child = QueryBuilders.boolQuery(); 110 | List values = JSON.parseArray(value, String.class); 111 | for (String _value : values) { 112 | if(!_value.matches(regex)){ 113 | child.should(QueryBuilders.termQuery(field, value)); 114 | } 115 | } 116 | boolquery.should(child); 117 | }else{ 118 | if(!value.matches(regex)){ 119 | boolquery.should(QueryBuilders.termQuery(field, value)); 120 | } 121 | } 122 | } 123 | highlight.field(field); 124 | } 125 | } 126 | if(must_not!=null&&must_not.size()>0){ 127 | for (String field : must_not.keySet()) { 128 | if(field.matches(regex)){ 129 | continue; 130 | } 131 | Object text = must.get(field); 132 | String value = text instanceof String ?text.toString():JSON.toJSONString(text); 133 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)){ 134 | if(value.startsWith("[")&&value.endsWith("]")){ 135 | BoolQueryBuilder child = QueryBuilders.boolQuery(); 136 | List values = JSON.parseArray(value, String.class); 137 | for (String _value : values) { 138 | if(!_value.matches(regex)){ 139 | child.should(QueryBuilders.termQuery(field, value)); 140 | } 141 | } 142 | boolquery.mustNot(child); 143 | }else{ 144 | if(!value.matches(regex)){ 145 | boolquery.mustNot(QueryBuilders.termQuery(field, value)); 146 | } 147 | } 148 | } 149 | highlight.field(field); 150 | } 151 | } 152 | if(ranges!=null&&ranges.size()>0){ 153 | for (String key : ranges.keySet()) { 154 | if(key.matches(regex)){ 155 | continue; 156 | } 157 | List between = ranges.get(key); 158 | if(between!=null&&!between.isEmpty()){ 159 | Object start = between.get(0); 160 | Object end = between.size()>1?between.get(1):null; 161 | if(start!=null&&end!=null){ 162 | Long starttime = start instanceof Date?((Date)start).getTime():Long.valueOf(start.toString()); 163 | Long endtime = end instanceof Date?((Date)end).getTime():Long.valueOf(end.toString()); 164 | if(starttime>endtime){ 165 | Object temp = start; 166 | start = end; 167 | end = temp; 168 | } 169 | } 170 | RangeQueryBuilder range = QueryBuilders.rangeQuery(key); 171 | if(start!=null){ 172 | range.gte(start); 173 | } 174 | if(start!=null){ 175 | range.lt(start); 176 | } 177 | boolquery.must(range); 178 | } 179 | }; 180 | } 181 | SearchSourceBuilder search = new SearchSourceBuilder(); 182 | search.query(boolquery); 183 | search.highlighter(highlight); 184 | search.explain(false); 185 | SearchRequest request = new SearchRequest(); 186 | request.searchType(SearchType.DFS_QUERY_THEN_FETCH); 187 | request.source(search); 188 | request.indices(indexs.split(",")); 189 | request.types(types.split(",")); 190 | SearchResponse response = xclient.search(request); 191 | return response.toString(); 192 | } catch (Exception e) { 193 | // TODO Auto-generated catch block 194 | e.printStackTrace(); 195 | } 196 | return null; 197 | } 198 | public String selectTermAll(String indexs, String types, Map must, Map should, Map must_not, Map> ranges, String order, boolean isAsc, int pageNo,int pageSize) { 199 | try { 200 | pageNo=pageNo<1?1:pageNo; 201 | pageSize=pageSize<1?10:pageSize; 202 | if(StringUtil.isEmpty(indexs))indexs="_all"; 203 | if(xclient==null){ 204 | init(); 205 | } 206 | BoolQueryBuilder boolquery = QueryBuilders.boolQuery(); 207 | HighlightBuilder highlight = new HighlightBuilder(); 208 | if(must!=null&&must.size()>0){ 209 | for (String field : must.keySet()) { 210 | if(field.matches(regex)){ 211 | continue; 212 | } 213 | Object text = must.get(field); 214 | String value = text instanceof String ?text.toString():JSON.toJSONString(text); 215 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)){ 216 | if(value.startsWith("[")&&value.endsWith("]")){ 217 | BoolQueryBuilder child = QueryBuilders.boolQuery(); 218 | List values = JSON.parseArray(value, String.class); 219 | for (String _value : values) { 220 | if(!_value.matches(regex)){ 221 | child.should(QueryBuilders.termQuery(field, value)); 222 | } 223 | } 224 | boolquery.must(child); 225 | }else{ 226 | if(!value.matches(regex)){ 227 | boolquery.must(QueryBuilders.termQuery(field, value)); 228 | } 229 | } 230 | } 231 | highlight.field(field); 232 | } 233 | } 234 | if(should!=null&&should.size()>0){ 235 | for (String field : should.keySet()) { 236 | if(field.matches(regex)){ 237 | continue; 238 | } 239 | Object text = must.get(field); 240 | String value = text instanceof String ?text.toString():JSON.toJSONString(text); 241 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)){ 242 | if(value.startsWith("[")&&value.endsWith("]")){ 243 | BoolQueryBuilder child = QueryBuilders.boolQuery(); 244 | List values = JSON.parseArray(value, String.class); 245 | for (String _value : values) { 246 | if(!_value.matches(regex)){ 247 | child.should(QueryBuilders.termQuery(field, value)); 248 | } 249 | } 250 | boolquery.should(child); 251 | }else{ 252 | if(!value.matches(regex)){ 253 | boolquery.should(QueryBuilders.termQuery(field, value)); 254 | } 255 | } 256 | } 257 | highlight.field(field); 258 | } 259 | } 260 | if(must_not!=null&&must_not.size()>0){ 261 | for (String field : must_not.keySet()) { 262 | if(field.matches(regex)){ 263 | continue; 264 | } 265 | Object text = must.get(field); 266 | String value = text instanceof String ?text.toString():JSON.toJSONString(text); 267 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)){ 268 | if(value.startsWith("[")&&value.endsWith("]")){ 269 | BoolQueryBuilder child = QueryBuilders.boolQuery(); 270 | List values = JSON.parseArray(value, String.class); 271 | for (String _value : values) { 272 | if(!_value.matches(regex)){ 273 | child.should(QueryBuilders.termQuery(field, value)); 274 | } 275 | } 276 | boolquery.mustNot(child); 277 | }else{ 278 | if(!value.matches(regex)){ 279 | boolquery.mustNot(QueryBuilders.termQuery(field, value)); 280 | } 281 | } 282 | } 283 | highlight.field(field); 284 | } 285 | } 286 | if(ranges!=null&&ranges.size()>0){ 287 | for (String key : ranges.keySet()) { 288 | if(key.matches(regex)){ 289 | continue; 290 | } 291 | List between = ranges.get(key); 292 | if(between!=null&&!between.isEmpty()){ 293 | Object start = between.get(0); 294 | Object end = between.size()>1?between.get(1):null; 295 | if(start!=null&&end!=null){ 296 | Long starttime = start instanceof Date?((Date)start).getTime():Long.valueOf(start.toString()); 297 | Long endtime = end instanceof Date?((Date)end).getTime():Long.valueOf(end.toString()); 298 | if(starttime>endtime){ 299 | Object temp = start; 300 | start = end; 301 | end = temp; 302 | } 303 | } 304 | RangeQueryBuilder range = QueryBuilders.rangeQuery(key); 305 | if(start!=null){ 306 | range.gte(start); 307 | } 308 | if(start!=null){ 309 | range.lt(start); 310 | } 311 | boolquery.must(range); 312 | } 313 | }; 314 | } 315 | if(ranges!=null&&ranges.size()>0){ 316 | for (String key : ranges.keySet()) { 317 | if(key.matches(regex)){ 318 | continue; 319 | } 320 | List between = ranges.get(key); 321 | if(between!=null&&!between.isEmpty()){ 322 | Object start = between.get(0); 323 | Object end = between.size()>1?between.get(1):null; 324 | if(start!=null&&end!=null){ 325 | Long starttime = start instanceof Date?((Date)start).getTime():Long.valueOf(start.toString()); 326 | Long endtime = end instanceof Date?((Date)end).getTime():Long.valueOf(end.toString()); 327 | if(starttime>endtime){ 328 | Object temp = start; 329 | start = end; 330 | end = temp; 331 | } 332 | } 333 | RangeQueryBuilder range = QueryBuilders.rangeQuery(key); 334 | if(start!=null){ 335 | range.gte(start); 336 | } 337 | if(start!=null){ 338 | range.lt(start); 339 | } 340 | boolquery.must(range); 341 | } 342 | }; 343 | } 344 | SearchSourceBuilder search = new SearchSourceBuilder(); 345 | search.query(boolquery); 346 | search.highlighter(highlight); 347 | search.from((pageNo-1)*pageSize); 348 | search.size(pageSize); 349 | search.sort(SortBuilders.scoreSort()); 350 | if(!StringUtil.isEmpty(order)){ 351 | search.sort(SortBuilders.fieldSort(order).order(isAsc?SortOrder.ASC:SortOrder.DESC)); 352 | } 353 | search.explain(false); 354 | SearchRequest request = new SearchRequest(); 355 | request.searchType(SearchType.DFS_QUERY_THEN_FETCH); 356 | request.source(search); 357 | request.indices(indexs.split(",")); 358 | request.types(types.split(",")); 359 | SearchResponse response = xclient.search(request); 360 | return response.toString(); 361 | } catch (Exception e) { 362 | // TODO Auto-generated catch block 363 | e.printStackTrace(); 364 | } 365 | return null; 366 | } 367 | } -------------------------------------------------------------------------------- /src/main/java/com/share/service/elasticsearch/rest/ElasticsearchExtendRestFactory.java: -------------------------------------------------------------------------------- 1 | package com.share.service.elasticsearch.rest; 2 | 3 | import java.util.ArrayList; 4 | import java.util.Date; 5 | import java.util.List; 6 | import java.util.Map; 7 | 8 | import com.alibaba.fastjson.JSON; 9 | import com.alibaba.fastjson.JSONObject; 10 | import com.share.util.HttpUtil; 11 | import com.share.util.StringUtil; 12 | 13 | public class ElasticsearchExtendRestFactory extends ElasticsearchRestFactory{ 14 | public ElasticsearchExtendRestFactory() { 15 | super(); 16 | } 17 | public ElasticsearchExtendRestFactory(String servers) { 18 | super(servers); 19 | } 20 | public ElasticsearchExtendRestFactory(String servers,int port) { 21 | super(servers, port); 22 | } 23 | public ElasticsearchExtendRestFactory(String clusterName, String servers,int port) { 24 | super(clusterName, servers, port); 25 | } 26 | public ElasticsearchExtendRestFactory(String clusterName, String servers, String username, String password) { 27 | super(clusterName, servers, username, password); 28 | } 29 | public ElasticsearchExtendRestFactory(String clusterName, String servers, String username, String password,int port) { 30 | super(clusterName, servers, username, password, port); 31 | } 32 | public String selectTermAll(String indexs, String types, String field, String value) { 33 | if(StringUtil.isEmpty(indexs))indexs="_all"; 34 | String uri = "/"+indexs+(StringUtil.isEmpty(types)?"":"/"+types)+"/_search?pretty"; 35 | String body = ""; 36 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)&&!(field.matches(regex)||field.matches(value))){ 37 | String query = "{query:{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+value+"'}}}"; 38 | body = JSON.parseObject(query).toJSONString(); 39 | } 40 | String result = base(uri, HttpUtil.METHOD_POST, body); 41 | return result; 42 | } 43 | 44 | public String selectTermAll(String indexs, String types,Map must, Map should, Map must_not, Map> ranges) { 45 | if(StringUtil.isEmpty(indexs))indexs="_all"; 46 | String uri = "/"+indexs+(StringUtil.isEmpty(types)?"":"/"+types)+"/_search?pretty"; 47 | List must_terms = new ArrayList(); 48 | List should_terms = new ArrayList(); 49 | List must_not_terms = new ArrayList(); 50 | if(must!=null&&must.size()>0){ 51 | for (String field : must.keySet()) { 52 | if(field.matches(regex)){ 53 | continue; 54 | } 55 | Object text = must.get(field); 56 | String value = text instanceof String ?text.toString():JSON.toJSONString(text); 57 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)){ 58 | if(value.startsWith("[")&&value.endsWith("]")){ 59 | List child_terms = new ArrayList(); 60 | List values = JSON.parseArray(value, String.class); 61 | for (String _value : values) { 62 | if(!_value.matches(regex)){ 63 | String match = "{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+_value+"'}}"; 64 | child_terms.add(JSON.parseObject(match)); 65 | } 66 | } 67 | String match = "{bool:{must:"+JSON.toJSONString(child_terms)+"}}"; 68 | must_terms.add(JSON.parseObject(match)); 69 | }else{ 70 | if(!value.matches(regex)){ 71 | String match = "{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+value+"'}}"; 72 | must_terms.add(JSON.parseObject(match)); 73 | } 74 | } 75 | } 76 | } 77 | } 78 | if(should!=null&&should.size()>0){ 79 | for (String field : should.keySet()) { 80 | if(field.matches(regex)){ 81 | continue; 82 | } 83 | Object text = must.get(field); 84 | String value = text instanceof String ?text.toString():JSON.toJSONString(text); 85 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)){ 86 | if(value.startsWith("[")&&value.endsWith("]")){ 87 | List child_terms = new ArrayList(); 88 | List values = JSON.parseArray(value, String.class); 89 | for (String _value : values) { 90 | if(!_value.matches(regex)){ 91 | String match = "{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+_value+"'}}"; 92 | child_terms.add(JSON.parseObject(match)); 93 | } 94 | } 95 | String match = "{bool:{should:"+JSON.toJSONString(child_terms)+"}}"; 96 | must_terms.add(JSON.parseObject(match)); 97 | }else{ 98 | if(!value.matches(regex)){ 99 | String match = "{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+value+"'}}"; 100 | should_terms.add(JSON.parseObject(match)); 101 | } 102 | } 103 | } 104 | } 105 | } 106 | if(must_not!=null&&must_not.size()>0){ 107 | for (String field : must_not.keySet()) { 108 | if(field.matches(regex)){ 109 | continue; 110 | } 111 | Object text = must.get(field); 112 | String value = text instanceof String ?text.toString():JSON.toJSONString(text); 113 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)){ 114 | if(value.startsWith("[")&&value.endsWith("]")){ 115 | List child_terms = new ArrayList(); 116 | List values = JSON.parseArray(value, String.class); 117 | for (String _value : values) { 118 | if(!_value.matches(regex)){ 119 | String match = "{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+_value+"'}}"; 120 | child_terms.add(JSON.parseObject(match)); 121 | } 122 | } 123 | String match = "{bool:{must_not:"+JSON.toJSONString(child_terms)+"}}"; 124 | must_not_terms.add(JSON.parseObject(match)); 125 | }else{ 126 | if(!value.matches(regex)){ 127 | String match = "{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+value+"'}}"; 128 | must_not_terms.add(JSON.parseObject(match)); 129 | } 130 | } 131 | } 132 | } 133 | } 134 | if(ranges!=null&&ranges.size()>0){ 135 | for (String key : ranges.keySet()) { 136 | if(key.matches(regex)){ 137 | continue; 138 | } 139 | List between = ranges.get(key); 140 | if(between!=null&&!between.isEmpty()){ 141 | Object start = between.get(0); 142 | Object end = between.size()>1?between.get(1):null; 143 | if(start!=null&&end!=null){ 144 | Long starttime = start instanceof Date?((Date)start).getTime():Long.valueOf(start.toString()); 145 | Long endtime = end instanceof Date?((Date)end).getTime():Long.valueOf(end.toString()); 146 | if(starttime>endtime){ 147 | Object temp = start; 148 | start = end; 149 | end = temp; 150 | } 151 | } 152 | String range = "{range:{"+key+":{gte:"+start+",lt:"+end+"}}}"; 153 | must_terms.add(JSON.parseObject(range)); 154 | } 155 | }; 156 | } 157 | String query = "{query:{bool:{must:"+JSON.toJSONString(must_terms)+",must_not:"+JSON.toJSONString(must_not_terms)+",should:"+JSON.toJSONString(should_terms)+"}}}"; 158 | String body = JSON.parseObject(query).toJSONString(); 159 | String result = base(uri, HttpUtil.METHOD_POST, body); 160 | return result; 161 | } 162 | public String selectTermAll(String indexs, String types, Map must, Map should, Map must_not, Map> ranges, String order, boolean isAsc, int pageNo, int pageSize) { 163 | if(StringUtil.isEmpty(indexs))indexs="_all"; 164 | pageNo=pageNo<1?1:pageNo; 165 | pageSize=pageSize<1?10:pageSize; 166 | String uri = "/"+indexs+(StringUtil.isEmpty(types)?"":"/"+types)+"/_search?pretty&size="+pageSize+"&from"+(pageNo-1)*pageSize; 167 | List must_terms = new ArrayList(); 168 | List should_terms = new ArrayList(); 169 | List must_not_terms = new ArrayList(); 170 | if(must!=null&&must.size()>0){ 171 | for (String field : must.keySet()) { 172 | if(field.matches(regex)){ 173 | continue; 174 | } 175 | Object text = must.get(field); 176 | String value = text instanceof String ?text.toString():JSON.toJSONString(text); 177 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)){ 178 | if(value.startsWith("[")&&value.endsWith("]")){ 179 | List child_terms = new ArrayList(); 180 | List values = JSON.parseArray(value, String.class); 181 | for (String _value : values) { 182 | if(!_value.matches(regex)){ 183 | String match = "{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+_value+"'}}"; 184 | child_terms.add(JSON.parseObject(match)); 185 | } 186 | } 187 | String match = "{bool:{must:"+JSON.toJSONString(child_terms)+"}}"; 188 | must_terms.add(JSON.parseObject(match)); 189 | }else{ 190 | if(!value.matches(regex)){ 191 | String match = "{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+value+"'}}"; 192 | must_terms.add(JSON.parseObject(match)); 193 | } 194 | } 195 | } 196 | } 197 | } 198 | if(should!=null&&should.size()>0){ 199 | for (String field : should.keySet()) { 200 | if(field.matches(regex)){ 201 | continue; 202 | } 203 | Object text = must.get(field); 204 | String value = text instanceof String ?text.toString():JSON.toJSONString(text); 205 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)){ 206 | if(value.startsWith("[")&&value.endsWith("]")){ 207 | List child_terms = new ArrayList(); 208 | List values = JSON.parseArray(value, String.class); 209 | for (String _value : values) { 210 | if(!_value.matches(regex)){ 211 | String match = "{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+_value+"'}}"; 212 | child_terms.add(JSON.parseObject(match)); 213 | } 214 | } 215 | String match = "{bool:{should:"+JSON.toJSONString(child_terms)+"}}"; 216 | must_terms.add(JSON.parseObject(match)); 217 | }else{ 218 | if(!value.matches(regex)){ 219 | String match = "{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+value+"'}}"; 220 | should_terms.add(JSON.parseObject(match)); 221 | } 222 | } 223 | } 224 | } 225 | } 226 | if(must_not!=null&&must_not.size()>0){ 227 | for (String field : must_not.keySet()) { 228 | if(field.matches(regex)){ 229 | continue; 230 | } 231 | Object text = must.get(field); 232 | String value = text instanceof String ?text.toString():JSON.toJSONString(text); 233 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)){ 234 | if(value.startsWith("[")&&value.endsWith("]")){ 235 | List child_terms = new ArrayList(); 236 | List values = JSON.parseArray(value, String.class); 237 | for (String _value : values) { 238 | if(!_value.matches(regex)){ 239 | String match = "{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+_value+"'}}"; 240 | child_terms.add(JSON.parseObject(match)); 241 | } 242 | } 243 | String match = "{bool:{must_not:"+JSON.toJSONString(child_terms)+"}}"; 244 | must_not_terms.add(JSON.parseObject(match)); 245 | }else{ 246 | if(!value.matches(regex)){ 247 | String match = "{term:{"+(field.endsWith(".keyword")?field:field+".keyword")+":'"+value+"'}}"; 248 | must_not_terms.add(JSON.parseObject(match)); 249 | } 250 | } 251 | } 252 | } 253 | } 254 | if(ranges!=null&&ranges.size()>0){ 255 | for (String key : ranges.keySet()) { 256 | if(key.matches(regex)){ 257 | continue; 258 | } 259 | List between = ranges.get(key); 260 | if(between!=null&&!between.isEmpty()){ 261 | Object start = between.get(0); 262 | Object end = between.size()>1?between.get(1):null; 263 | if(start!=null&&end!=null){ 264 | Long starttime = start instanceof Date?((Date)start).getTime():Long.valueOf(start.toString()); 265 | Long endtime = end instanceof Date?((Date)end).getTime():Long.valueOf(end.toString()); 266 | if(starttime>endtime){ 267 | Object temp = start; 268 | start = end; 269 | end = temp; 270 | } 271 | } 272 | String range = "{range:{"+key+":{gte:"+start+",lt:"+end+"}}}"; 273 | must_terms.add(JSON.parseObject(range)); 274 | } 275 | }; 276 | } 277 | List sorts = new ArrayList(); 278 | sorts.add(JSON.parseObject("{_score:{order:'desc'}}")); 279 | if(!StringUtil.isEmpty(order)){ 280 | sorts.add(JSON.parseObject("{"+order+":{order:'"+(isAsc?"asc":"desc")+"'}}")); 281 | } 282 | String query = "{query:{bool:{must:"+JSON.toJSONString(must_terms)+",must_not:"+JSON.toJSONString(must_not_terms)+",should:"+JSON.toJSONString(should_terms)+"}},sort:"+JSON.toJSONString(sorts)+"}"; 283 | String body = JSON.parseObject(query).toJSONString(); 284 | String result = base(uri, HttpUtil.METHOD_POST, body); 285 | return result; 286 | } 287 | } 288 | -------------------------------------------------------------------------------- /src/main/java/com/share/service/elasticsearch/spring/ElasticsearchSpringFactory.java: -------------------------------------------------------------------------------- 1 | package com.share.service.elasticsearch.spring; 2 | 3 | import java.util.Properties; 4 | 5 | import org.apache.logging.log4j.LogManager; 6 | import org.apache.logging.log4j.Logger; 7 | import org.springframework.data.elasticsearch.client.TransportClientFactoryBean; 8 | import org.springframework.data.elasticsearch.core.ElasticsearchTemplate; 9 | 10 | import com.share.util.StringUtil; 11 | 12 | public class ElasticsearchSpringFactory { 13 | private static Logger logger = LogManager.getLogger(); 14 | protected ElasticsearchTemplate template; 15 | private String clusterName; 16 | private String servers; 17 | private String username; 18 | private String password; 19 | 20 | public String getClusterName() { 21 | return clusterName; 22 | } 23 | 24 | public void setClusterName(String clusterName) { 25 | this.clusterName = clusterName; 26 | } 27 | 28 | public String getServers() { 29 | return servers; 30 | } 31 | 32 | public void setServers(String servers) { 33 | this.servers = servers; 34 | } 35 | 36 | public String getUsername() { 37 | return username; 38 | } 39 | 40 | public void setUsername(String username) { 41 | this.username = username; 42 | } 43 | 44 | public String getPassword() { 45 | return password; 46 | } 47 | 48 | public void setPassword(String password) { 49 | this.password = password; 50 | } 51 | 52 | /** 53 | * 描述: Elasticsearch服务初始化 54 | * 时间: 2017年11月14日 上午10:55:02 55 | * @author yi.zhang 56 | */ 57 | public void init(String clusterName,String servers,String username,String password){ 58 | try { 59 | TransportClientFactoryBean client = new TransportClientFactoryBean(); 60 | client.setClusterName(clusterName); 61 | String clusterNodes = ""; 62 | for(String server : servers.split(",")){ 63 | String[] address = server.split(":"); 64 | String ip = address[0]; 65 | int port=9300; 66 | if(address.length>1){ 67 | port = Integer.valueOf(address[1]); 68 | } 69 | if(StringUtil.isEmpty(clusterNodes)){ 70 | clusterNodes = ip+":"+port; 71 | }else{ 72 | clusterNodes +=","+ ip+":"+port; 73 | } 74 | } 75 | client.setClusterNodes(clusterNodes); 76 | if(!StringUtil.isEmpty(username)&&!StringUtil.isEmpty(password)){ 77 | Properties properties = new Properties(); 78 | properties.put("xpack.security.user",username+":"+password); 79 | client.setProperties(properties); 80 | } 81 | client.afterPropertiesSet(); 82 | template = new ElasticsearchTemplate(client.getObject()); 83 | } catch (Exception e) { 84 | logger.error("-----Elasticsearch Config init Error-----", e); 85 | } 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /src/main/java/com/share/service/elasticsearch/transport/ElasticsearchExtendTransportFactory.java: -------------------------------------------------------------------------------- 1 | package com.share.service.elasticsearch.transport; 2 | 3 | import java.util.Date; 4 | import java.util.List; 5 | import java.util.Map; 6 | 7 | import org.elasticsearch.action.search.SearchRequestBuilder; 8 | import org.elasticsearch.action.search.SearchResponse; 9 | import org.elasticsearch.action.search.SearchType; 10 | import org.elasticsearch.index.query.BoolQueryBuilder; 11 | import org.elasticsearch.index.query.QueryBuilders; 12 | import org.elasticsearch.index.query.RangeQueryBuilder; 13 | import org.elasticsearch.search.aggregations.AggregationBuilders; 14 | import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; 15 | import org.elasticsearch.search.sort.SortBuilders; 16 | import org.elasticsearch.search.sort.SortOrder; 17 | 18 | import com.alibaba.fastjson.JSON; 19 | import com.share.util.StringUtil; 20 | 21 | public class ElasticsearchExtendTransportFactory extends ElasticsearchTransportFactory{ 22 | public ElasticsearchExtendTransportFactory() { 23 | super(); 24 | } 25 | public ElasticsearchExtendTransportFactory(String servers) { 26 | super(servers); 27 | } 28 | public ElasticsearchExtendTransportFactory(String servers,int port) { 29 | super(servers, port); 30 | } 31 | public ElasticsearchExtendTransportFactory(String clusterName, String servers,int port) { 32 | super(clusterName, servers, port); 33 | } 34 | public ElasticsearchExtendTransportFactory(String clusterName, String servers, String username, String password) { 35 | super(clusterName, servers, username, password); 36 | } 37 | public ElasticsearchExtendTransportFactory(String clusterName, String servers, String username, String password,int port) { 38 | super(clusterName, servers, username, password, port); 39 | } 40 | public String selectTermAll(String indexs,String types,String field,String value){ 41 | try { 42 | if(client==null){ 43 | init(); 44 | } 45 | SearchRequestBuilder request = client.prepareSearch(indexs.split(",")).setTypes(types.split(",")); 46 | request.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); 47 | request.setQuery(QueryBuilders.termQuery(field, value)); 48 | request.highlighter(new HighlightBuilder().field(field)); 49 | request.addAggregation(AggregationBuilders.terms("data").field(field+".keyword")); 50 | request.setExplain(false); 51 | SearchResponse response = request.get(); 52 | return response.toString(); 53 | } catch (Exception e) { 54 | // TODO Auto-generated catch block 55 | e.printStackTrace(); 56 | } 57 | return null; 58 | } 59 | public String selectTermAll(String indexs,String types,Map must,Map should,Map must_not,Map> ranges){ 60 | try { 61 | if(client==null){ 62 | init(); 63 | } 64 | BoolQueryBuilder boolquery = QueryBuilders.boolQuery(); 65 | HighlightBuilder highlight = new HighlightBuilder(); 66 | if(must!=null&&must.size()>0){ 67 | for (String field : must.keySet()) { 68 | if(field.matches(regex)){ 69 | continue; 70 | } 71 | Object text = must.get(field); 72 | String value = text instanceof String ?text.toString():JSON.toJSONString(text); 73 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)){ 74 | if(value.startsWith("[")&&value.endsWith("]")){ 75 | BoolQueryBuilder child = QueryBuilders.boolQuery(); 76 | List values = JSON.parseArray(value, String.class); 77 | for (String _value : values) { 78 | if(!_value.matches(regex)){ 79 | child.should(QueryBuilders.termQuery(field, value)); 80 | } 81 | } 82 | boolquery.must(child); 83 | }else{ 84 | if(!value.matches(regex)){ 85 | boolquery.must(QueryBuilders.termQuery(field, value)); 86 | } 87 | } 88 | } 89 | highlight.field(field); 90 | } 91 | } 92 | if(should!=null&&should.size()>0){ 93 | for (String field : should.keySet()) { 94 | if(field.matches(regex)){ 95 | continue; 96 | } 97 | Object text = must.get(field); 98 | String value = text instanceof String ?text.toString():JSON.toJSONString(text); 99 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)){ 100 | if(value.startsWith("[")&&value.endsWith("]")){ 101 | BoolQueryBuilder child = QueryBuilders.boolQuery(); 102 | List values = JSON.parseArray(value, String.class); 103 | for (String _value : values) { 104 | if(!_value.matches(regex)){ 105 | child.should(QueryBuilders.termQuery(field, value)); 106 | } 107 | } 108 | boolquery.should(child); 109 | }else{ 110 | if(!value.matches(regex)){ 111 | boolquery.should(QueryBuilders.termQuery(field, value)); 112 | } 113 | } 114 | } 115 | highlight.field(field); 116 | } 117 | } 118 | if(must_not!=null&&must_not.size()>0){ 119 | for (String field : must_not.keySet()) { 120 | if(field.matches(regex)){ 121 | continue; 122 | } 123 | Object text = must.get(field); 124 | String value = text instanceof String ?text.toString():JSON.toJSONString(text); 125 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)){ 126 | if(value.startsWith("[")&&value.endsWith("]")){ 127 | BoolQueryBuilder child = QueryBuilders.boolQuery(); 128 | List values = JSON.parseArray(value, String.class); 129 | for (String _value : values) { 130 | if(!_value.matches(regex)){ 131 | child.should(QueryBuilders.termQuery(field, value)); 132 | } 133 | } 134 | boolquery.mustNot(child); 135 | }else{ 136 | if(!value.matches(regex)){ 137 | boolquery.mustNot(QueryBuilders.termQuery(field, value)); 138 | } 139 | } 140 | } 141 | highlight.field(field); 142 | } 143 | } 144 | if(ranges!=null&&ranges.size()>0){ 145 | for (String key : ranges.keySet()) { 146 | if(key.matches(regex)){ 147 | continue; 148 | } 149 | List between = ranges.get(key); 150 | if(between!=null&&!between.isEmpty()){ 151 | Object start = between.get(0); 152 | Object end = between.size()>1?between.get(1):null; 153 | if(start!=null&&end!=null){ 154 | Long starttime = start instanceof Date?((Date)start).getTime():Long.valueOf(start.toString()); 155 | Long endtime = end instanceof Date?((Date)end).getTime():Long.valueOf(end.toString()); 156 | if(starttime>endtime){ 157 | Object temp = start; 158 | start = end; 159 | end = temp; 160 | } 161 | } 162 | RangeQueryBuilder range = QueryBuilders.rangeQuery(key); 163 | if(start!=null){ 164 | range.gte(start); 165 | } 166 | if(start!=null){ 167 | range.lt(start); 168 | } 169 | boolquery.must(range); 170 | } 171 | } 172 | } 173 | SearchRequestBuilder request = client.prepareSearch(indexs.split(",")).setTypes(types.split(",")); 174 | request.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); 175 | request.setQuery(boolquery); 176 | request.highlighter(highlight); 177 | request.setExplain(false); 178 | SearchResponse response = request.get(); 179 | return response.toString(); 180 | } catch (Exception e) { 181 | // TODO Auto-generated catch block 182 | e.printStackTrace(); 183 | } 184 | return null; 185 | } 186 | public String selectTermAll(String indexs, String types, Map must, Map should, Map must_not, Map> ranges, String order, boolean isAsc, int pageNo,int pageSize) { 187 | if(client==null){ 188 | init(); 189 | } 190 | pageNo=pageNo<1?1:pageNo; 191 | pageSize=pageSize<1?10:pageSize; 192 | BoolQueryBuilder boolquery = QueryBuilders.boolQuery(); 193 | HighlightBuilder highlight = new HighlightBuilder(); 194 | if(must!=null&&must.size()>0){ 195 | for (String field : must.keySet()) { 196 | if(field.matches(regex)){ 197 | continue; 198 | } 199 | Object text = must.get(field); 200 | String value = text instanceof String ?text.toString():JSON.toJSONString(text); 201 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)){ 202 | if(value.startsWith("[")&&value.endsWith("]")){ 203 | BoolQueryBuilder child = QueryBuilders.boolQuery(); 204 | List values = JSON.parseArray(value, String.class); 205 | for (String _value : values) { 206 | if(!_value.matches(regex)){ 207 | child.should(QueryBuilders.termQuery(field, value)); 208 | } 209 | } 210 | boolquery.must(child); 211 | }else{ 212 | if(!value.matches(regex)){ 213 | boolquery.must(QueryBuilders.termQuery(field, value)); 214 | } 215 | } 216 | } 217 | highlight.field(field); 218 | } 219 | } 220 | if(should!=null&&should.size()>0){ 221 | for (String field : should.keySet()) { 222 | if(field.matches(regex)){ 223 | continue; 224 | } 225 | Object text = must.get(field); 226 | String value = text instanceof String ?text.toString():JSON.toJSONString(text); 227 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)){ 228 | if(value.startsWith("[")&&value.endsWith("]")){ 229 | BoolQueryBuilder child = QueryBuilders.boolQuery(); 230 | List values = JSON.parseArray(value, String.class); 231 | for (String _value : values) { 232 | if(!_value.matches(regex)){ 233 | child.should(QueryBuilders.termQuery(field, value)); 234 | } 235 | } 236 | boolquery.should(child); 237 | }else{ 238 | if(!value.matches(regex)){ 239 | boolquery.should(QueryBuilders.termQuery(field, value)); 240 | } 241 | } 242 | } 243 | highlight.field(field); 244 | } 245 | } 246 | if(must_not!=null&&must_not.size()>0){ 247 | for (String field : must_not.keySet()) { 248 | if(field.matches(regex)){ 249 | continue; 250 | } 251 | Object text = must.get(field); 252 | String value = text instanceof String ?text.toString():JSON.toJSONString(text); 253 | if(!StringUtil.isEmpty(field)&&!StringUtil.isEmpty(value)){ 254 | if(value.startsWith("[")&&value.endsWith("]")){ 255 | BoolQueryBuilder child = QueryBuilders.boolQuery(); 256 | List values = JSON.parseArray(value, String.class); 257 | for (String _value : values) { 258 | if(!_value.matches(regex)){ 259 | child.should(QueryBuilders.termQuery(field, value)); 260 | } 261 | } 262 | boolquery.mustNot(child); 263 | }else{ 264 | if(!value.matches(regex)){ 265 | boolquery.mustNot(QueryBuilders.termQuery(field, value)); 266 | } 267 | } 268 | } 269 | highlight.field(field); 270 | } 271 | } 272 | if(ranges!=null&&ranges.size()>0){ 273 | for (String key : ranges.keySet()) { 274 | if(key.matches(regex)){ 275 | continue; 276 | } 277 | List between = ranges.get(key); 278 | if(between!=null&&!between.isEmpty()){ 279 | Object start = between.get(0); 280 | Object end = between.size()>1?between.get(1):null; 281 | if(start!=null&&end!=null){ 282 | Long starttime = start instanceof Date?((Date)start).getTime():Long.valueOf(start.toString()); 283 | Long endtime = end instanceof Date?((Date)end).getTime():Long.valueOf(end.toString()); 284 | if(starttime>endtime){ 285 | Object temp = start; 286 | start = end; 287 | end = temp; 288 | } 289 | } 290 | RangeQueryBuilder range = QueryBuilders.rangeQuery(key); 291 | if(start!=null){ 292 | range.gte(start); 293 | } 294 | if(start!=null){ 295 | range.lt(start); 296 | } 297 | boolquery.must(range); 298 | } 299 | }; 300 | } 301 | 302 | SearchRequestBuilder request = client.prepareSearch(indexs.split(",")).setTypes(types.split(",")); 303 | request.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); 304 | request.setQuery(boolquery); 305 | request.highlighter(highlight); 306 | request.addSort(SortBuilders.scoreSort()); 307 | if(!StringUtil.isEmpty(order)){ 308 | request.addSort(SortBuilders.fieldSort(order).order(isAsc?SortOrder.ASC:SortOrder.DESC)); 309 | } 310 | request.setFrom((pageNo-1)*pageSize); 311 | request.setSize(pageSize); 312 | request.setExplain(false); 313 | SearchResponse response = request.get(); 314 | return response.toString(); 315 | } 316 | } 317 | -------------------------------------------------------------------------------- /src/main/java/com/share/service/greenplum/GreenplumFactory.java: -------------------------------------------------------------------------------- 1 | package com.share.service.greenplum; 2 | 3 | import java.util.List; 4 | import java.util.Map; 5 | 6 | import org.apache.logging.log4j.LogManager; 7 | import org.apache.logging.log4j.Logger; 8 | 9 | import com.share.service.jdbc.JDBCFactory; 10 | 11 | 12 | /** 13 | * 描述: 数据仓库(Greenplum)服务封装 14 | * 时间: 2017年11月15日 上午11:30:36 15 | * @author yi.zhang 16 | * @since 1.0 17 | * JDK版本:1.8 18 | */ 19 | public class GreenplumFactory extends JDBCFactory { 20 | private static Logger logger = LogManager.getLogger(); 21 | public static String GREENPLUM_SCHEMA = null; 22 | private String address; 23 | private String database; 24 | private String schema; 25 | private String username; 26 | private String password; 27 | private boolean isDruid; 28 | private int max_pool_size=100; 29 | private int init_pool_size=10; 30 | 31 | /** 32 | * 描述: 初始化配置 33 | * 时间: 2017年11月15日 上午11:30:53 34 | * @author yi.zhang 35 | * @param address 地址 36 | * @param database 认证数据库 37 | * @param schema 操作数据库 38 | * @param username 用户名 39 | * @param password 密码 40 | * @param isDruid 是否使用Druid 41 | * @param max_pool_size 最大连接池数 42 | * @param init_pool_size 最小连接池 43 | */ 44 | public void init(String address,String database,String schema,String username,String password,boolean isDruid,Integer max_pool_size,Integer init_pool_size){ 45 | try { 46 | String driverName = "org.postgresql.Driver"; 47 | GREENPLUM_SCHEMA = schema; 48 | String url = "jdbc:postgresql://"+address+"/"+database; 49 | super.init(driverName, url, username, password, isDruid, max_pool_size, init_pool_size); 50 | } catch (Exception e) { 51 | logger.error("-----Greenplum Config init Error-----", e); 52 | } 53 | } 54 | 55 | /** 56 | * 描述: 数据操作(Insert|Update|Delete) 57 | * 时间: 2017年11月15日 上午11:27:52 58 | * @author yi.zhang 59 | * @param sql sql语句 60 | * @param params 参数 61 | * @return 返回值 62 | */ 63 | public int executeUpdate(String sql,Object...params ){ 64 | try { 65 | if(connect==null){ 66 | this.init(address, database, schema, username, password, isDruid, max_pool_size, init_pool_size); 67 | } 68 | return super.executeUpdate(handleSQL(sql), params); 69 | } catch (Exception e) { 70 | // TODO Auto-generated catch block 71 | e.printStackTrace(); 72 | }; 73 | return -1; 74 | } 75 | /** 76 | * 描述: 数据库查询(Select) 77 | * 时间: 2017年11月15日 上午11:28:42 78 | * @author yi.zhang 79 | * @param sql sql语句 80 | * @param clazz 映射对象 81 | * @param params 占位符参数 82 | * @return 83 | */ 84 | @SuppressWarnings("rawtypes") 85 | public List executeQuery(String sql,Class clazz,Object...params){ 86 | try { 87 | if(connect==null){ 88 | this.init(address, database, schema, username, password, isDruid, max_pool_size, init_pool_size); 89 | } 90 | return super.executeQuery(handleSQL(sql),clazz, params); 91 | } catch (Exception e) { 92 | // TODO Auto-generated catch block 93 | e.printStackTrace(); 94 | }; 95 | return null; 96 | } 97 | /** 98 | * 描述: 查询数据表字段名(key:字段名,value:字段类型名) 99 | * 时间: 2017年11月15日 上午11:29:32 100 | * @author yi.zhang 101 | * @param table 表名 102 | * @return 103 | */ 104 | public Map queryColumns(String table){ 105 | try { 106 | if(connect==null){ 107 | this.init(address, database, schema, username, password, isDruid, max_pool_size, init_pool_size); 108 | } 109 | if(GREENPLUM_SCHEMA!=null&&!table.contains(GREENPLUM_SCHEMA+".")){ 110 | table = GREENPLUM_SCHEMA+"."+table; 111 | } 112 | return super.queryColumns(table); 113 | } catch (Exception e) { 114 | // TODO Auto-generated catch block 115 | e.printStackTrace(); 116 | } 117 | return null; 118 | } 119 | /** 120 | * 描述: SQL语句处理 121 | * 时间: 2017年11月15日 上午11:34:01 122 | * @author yi.zhang 123 | * @param sql SQL语句 124 | * @return 125 | */ 126 | private String handleSQL(String sql){ 127 | if(GREENPLUM_SCHEMA!=null&&!sql.contains(GREENPLUM_SCHEMA+".")){ 128 | sql= sql.trim(); 129 | if(sql.toLowerCase().startsWith("insert")){ 130 | String temp = sql.substring(0, sql.indexOf("(")); 131 | String table = temp.substring(temp.lastIndexOf(" ")+1); 132 | sql = sql.replaceFirst(table, GREENPLUM_SCHEMA+"."+table); 133 | } 134 | if(sql.toLowerCase().startsWith("update")){ 135 | String temp = sql.substring(0, sql.toLowerCase().indexOf("set")); 136 | String table = temp.substring(temp.indexOf(" ")+1); 137 | sql = sql.replaceFirst(table, GREENPLUM_SCHEMA+"."+table); 138 | } 139 | if(sql.toLowerCase().startsWith("select")||sql.toLowerCase().startsWith("delete")){ 140 | String temp = sql.substring(sql.toLowerCase().indexOf("from")).trim(); 141 | String table = !temp.contains(" ")?temp.substring(0):temp.substring(0,temp.toLowerCase().contains("where")?temp.toLowerCase().indexOf("where"):temp.indexOf(" ")); 142 | sql = sql.replaceFirst(table, GREENPLUM_SCHEMA+"."+table); 143 | } 144 | } 145 | return sql; 146 | } 147 | } -------------------------------------------------------------------------------- /src/main/java/com/share/service/jdbc/JDBCFactory.java: -------------------------------------------------------------------------------- 1 | package com.share.service.jdbc; 2 | 3 | import java.lang.reflect.Field; 4 | import java.sql.Connection; 5 | import java.sql.DriverManager; 6 | import java.sql.PreparedStatement; 7 | import java.sql.ResultSet; 8 | import java.sql.ResultSetMetaData; 9 | import java.sql.SQLException; 10 | import java.util.ArrayList; 11 | import java.util.HashMap; 12 | import java.util.List; 13 | import java.util.Map; 14 | 15 | import org.apache.logging.log4j.LogManager; 16 | import org.apache.logging.log4j.Logger; 17 | 18 | import com.alibaba.druid.pool.xa.DruidXADataSource; 19 | import com.alibaba.fastjson.JSON; 20 | import com.alibaba.fastjson.JSONObject; 21 | import com.share.util.StringUtil; 22 | /** 23 | * @decription 数据库(MySQL|SQL Server|Oracle|Postgresql)服务封装 24 | * @author yi.zhang 25 | * @time 2017年6月2日 下午2:14:31 26 | * @since 1.0 27 | * @jdk 1.8 28 | */ 29 | public class JDBCFactory { 30 | private static Logger logger = LogManager.getLogger(); 31 | protected Connection connect = null; 32 | private String driverName; 33 | private String url; 34 | private String username; 35 | private String password; 36 | private boolean isDruid; 37 | private int max_pool_size=10; 38 | private int init_pool_size=2; 39 | 40 | public String getDriverName() { 41 | return driverName; 42 | } 43 | public void setDriverName(String driverName) { 44 | this.driverName = driverName; 45 | } 46 | public String getUrl() { 47 | return url; 48 | } 49 | public void setUrl(String url) { 50 | this.url = url; 51 | } 52 | public String getUsername() { 53 | return username; 54 | } 55 | public void setUsername(String username) { 56 | this.username = username; 57 | } 58 | public String getPassword() { 59 | return password; 60 | } 61 | public void setPassword(String password) { 62 | this.password = password; 63 | } 64 | public boolean isDruid() { 65 | return isDruid; 66 | } 67 | public void setDruid(boolean isDruid) { 68 | this.isDruid = isDruid; 69 | } 70 | public int getMax_pool_size() { 71 | return max_pool_size; 72 | } 73 | public void setMax_pool_size(int max_pool_size) { 74 | this.max_pool_size = max_pool_size; 75 | } 76 | public int getInit_pool_size() { 77 | return init_pool_size; 78 | } 79 | public void setInit_pool_size(int init_pool_size) { 80 | this.init_pool_size = init_pool_size; 81 | } 82 | /** 83 | * 描述: 数据库或数据仓库配置 84 | * 时间: 2017年11月15日 上午11:35:08 85 | * @author yi.zhang 86 | * @param driverName 驱动 87 | * @param url URL地址 88 | * @param username 用户名 89 | * @param password 密码 90 | * @param isDruid 是否使用Druid 91 | * @param max_pool_size 最大连接池数 92 | * @param init_pool_size 最小连接池 93 | */ 94 | public void init(String driverName,String url,String username,String password,boolean isDruid,Integer max_pool_size,Integer init_pool_size){ 95 | try { 96 | if(isDruid){ 97 | @SuppressWarnings("resource") 98 | DruidXADataSource dataSource = new DruidXADataSource(); 99 | if(!StringUtil.isEmpty(driverName)){ 100 | dataSource.setDriverClassName(driverName); 101 | } 102 | dataSource.setUrl(url); 103 | dataSource.setUsername(username); 104 | dataSource.setPassword(password); 105 | if(max_pool_size!=null&&max_pool_size>0){ 106 | dataSource.setMaxActive(max_pool_size); 107 | } 108 | if(init_pool_size!=null&&init_pool_size>0){ 109 | dataSource.setInitialSize(init_pool_size); 110 | } 111 | connect = dataSource.getConnection(); 112 | }else{ 113 | Class.forName(driverName); 114 | connect = DriverManager.getConnection(url,username,password); 115 | } 116 | } catch (Exception e) { 117 | logger.error("-----SQL(MySQL|SQL Server|Oracle|Postgresql) Config init Error-----", e); 118 | } 119 | } 120 | /** 121 | * 描述: 数据操作(Insert|Update|Delete) 122 | * 时间: 2017年11月15日 上午11:27:52 123 | * @author yi.zhang 124 | * @param sql sql语句 125 | * @param params 参数 126 | * @return 返回值 127 | */ 128 | public int executeUpdate(String sql,Object...params ){ 129 | try { 130 | if(connect==null){ 131 | init(driverName, url, username, password, isDruid, max_pool_size, init_pool_size); 132 | } 133 | PreparedStatement ps = connect.prepareStatement(sql); 134 | if(params!=null&¶ms.length>0){ 135 | for(int i=1;i<=params.length;i++){ 136 | Object value = params[i-1]; 137 | ps.setObject(i, value); 138 | } 139 | } 140 | int result = ps.executeUpdate(); 141 | return result; 142 | } catch (Exception e) { 143 | logger.error("-----SQL excute update Error-----", e); 144 | } 145 | return -1; 146 | } 147 | /** 148 | * 描述: 数据库查询(Select) 149 | * 时间: 2017年11月15日 上午11:28:42 150 | * @author yi.zhang 151 | * @param sql sql语句 152 | * @param clazz 映射对象 153 | * @param params 占位符参数 154 | * @return 155 | */ 156 | @SuppressWarnings({ "rawtypes", "unchecked" }) 157 | public List executeQuery(String sql,Class clazz,Object...params){ 158 | try { 159 | if(connect==null){ 160 | init(driverName, url, username, password, isDruid, max_pool_size, init_pool_size); 161 | } 162 | List list=new ArrayList(); 163 | PreparedStatement ps = connect.prepareStatement(sql); 164 | if(params!=null&¶ms.length>0){ 165 | for(int i=1;i<=params.length;i++){ 166 | Object value = params[i-1]; 167 | ps.setObject(i, value); 168 | } 169 | } 170 | ResultSet rs = ps.executeQuery(); 171 | ResultSetMetaData rsmd = rs.getMetaData(); 172 | int count = rsmd.getColumnCount(); 173 | Map reflect = new HashMap(); 174 | for(int i=1;i<=count;i++){ 175 | String column = rsmd.getColumnName(i); 176 | String tcolumn = column.replaceAll("_", ""); 177 | if(clazz==null){ 178 | reflect.put(column, column); 179 | }else{ 180 | Field[] fields = clazz.getDeclaredFields(); 181 | for (Field field : fields) { 182 | String tfield = field.getName(); 183 | if(tcolumn.equalsIgnoreCase(tfield)){ 184 | reflect.put(column, tfield); 185 | break; 186 | } 187 | } 188 | } 189 | } 190 | while(rs.next()){ 191 | JSONObject obj = new JSONObject(); 192 | for(String column:reflect.keySet()){ 193 | String key = reflect.get(column); 194 | Object value = rs.getObject(column); 195 | obj.put(key, value); 196 | } 197 | Object object = obj; 198 | if(clazz!=null){ 199 | object = JSON.parseObject(obj.toJSONString(), clazz); 200 | } 201 | list.add(object); 202 | } 203 | rs.close(); 204 | ps.close(); 205 | return list; 206 | } catch (Exception e) { 207 | logger.error("-----SQL excute query Error-----", e); 208 | } 209 | return null; 210 | } 211 | /** 212 | * 描述: 查询数据表字段名(key:字段名,value:字段类型名) 213 | * 时间: 2017年11月15日 上午11:29:32 214 | * @author yi.zhang 215 | * @param table 表名 216 | * @return 217 | */ 218 | public Map queryColumns(String table){ 219 | try { 220 | if(connect==null){ 221 | init(driverName, url, username, password, isDruid, max_pool_size, init_pool_size); 222 | } 223 | String sql = "select * from "+table; 224 | PreparedStatement ps = connect.prepareStatement(sql); 225 | ResultSet rs = ps.executeQuery(); 226 | ResultSetMetaData rsmd = rs.getMetaData(); 227 | int count = rsmd.getColumnCount(); 228 | Map reflect = new HashMap(); 229 | for(int i=1;i<=count;i++){ 230 | String column = rsmd.getColumnName(i); 231 | String type = rsmd.getColumnTypeName(i); 232 | reflect.put(column, type); 233 | } 234 | rs.close(); 235 | ps.close(); 236 | return reflect; 237 | } catch (Exception e) { 238 | logger.error("-----Columns excute query Error-----", e); 239 | } 240 | return null; 241 | } 242 | /** 243 | * 描述: 查询数据库表名 244 | * 时间: 2017年11月15日 上午11:29:59 245 | * @author yi.zhang 246 | * @return 返回表 247 | */ 248 | public List queryTables(){ 249 | try { 250 | String sql = "show tables"; 251 | PreparedStatement ps = connect.prepareStatement(sql); 252 | ResultSet rs = ps.executeQuery(); 253 | List tables = new ArrayList(); 254 | while(rs.next()){ 255 | String table = rs.getString(1); 256 | tables.add(table); 257 | } 258 | rs.close(); 259 | ps.close(); 260 | return tables; 261 | } catch (SQLException e) { 262 | logger.error("-----Tables excute query Error-----", e); 263 | } 264 | return null; 265 | } 266 | } -------------------------------------------------------------------------------- /src/main/java/com/share/service/kafka/KafkaFactory.java: -------------------------------------------------------------------------------- 1 | package com.share.service.kafka; 2 | 3 | import java.util.Arrays; 4 | import java.util.Properties; 5 | 6 | import org.apache.kafka.clients.consumer.KafkaConsumer; 7 | import org.apache.kafka.clients.producer.KafkaProducer; 8 | import org.apache.kafka.clients.producer.ProducerRecord; 9 | import org.apache.kafka.common.serialization.StringDeserializer; 10 | import org.apache.kafka.common.serialization.StringSerializer; 11 | import org.apache.logging.log4j.LogManager; 12 | import org.apache.logging.log4j.Logger; 13 | /** 14 | * @decription Kafka服务 15 | * @author yi.zhang 16 | * @time 2017年6月8日 下午2:39:42 17 | * @since 1.0 18 | * @jdk 1.8 19 | */ 20 | public class KafkaFactory { 21 | private static Logger logger = LogManager.getLogger(); 22 | private static String KAFKA_TOPIC = "KAFKA_CANAL"; 23 | public static int KAFKA_CONSUMER_BATCCH_SIZE = 100; 24 | private KafkaProducer producer = null; 25 | private KafkaConsumer consumer = null; 26 | 27 | private String servers; 28 | private boolean isZookeeper; 29 | private String zookeeper_servers; 30 | private String acks; 31 | 32 | public String getServers() { 33 | return servers; 34 | } 35 | public void setServers(String servers) { 36 | this.servers = servers; 37 | } 38 | public boolean isZookeeper() { 39 | return isZookeeper; 40 | } 41 | public void setZookeeper(boolean isZookeeper) { 42 | this.isZookeeper = isZookeeper; 43 | } 44 | public String getZookeeper_servers() { 45 | return zookeeper_servers; 46 | } 47 | public void setZookeeper_servers(String zookeeper_servers) { 48 | this.zookeeper_servers = zookeeper_servers; 49 | } 50 | public String getAcks() { 51 | return acks; 52 | } 53 | public void setAcks(String acks) { 54 | this.acks = acks; 55 | } 56 | /** 57 | * @decription 初始化配置 58 | * @author yi.zhang 59 | * @time 2017年6月2日 下午2:15:57 60 | */ 61 | public void init(String servers,boolean isZookeeper,String zookeeper_servers,String acks) { 62 | try { 63 | Properties productor_config = new Properties(); 64 | if(isZookeeper&&zookeeper_servers!=null){ 65 | productor_config.put("zk.connect", zookeeper_servers); 66 | } 67 | productor_config.put("bootstrap.servers", servers); 68 | // “所有”设置将导致记录的完整提交阻塞,最慢的,但最持久的设置。(The "all" setting we have specified will result in blocking on the full commit of the record, the slowest but most durable setting.) 69 | productor_config.put("acks", acks); 70 | // 如果请求失败,生产者也会自动重试,即使设置成0 the producer can automatically retry. 71 | productor_config.put("retries", 0); 72 | // The producer maintains buffers of unsent records for each partition. 73 | productor_config.put("batch.size", 16384); 74 | // 默认立即发送,这里这是延时毫秒数 75 | productor_config.put("linger.ms", 1); 76 | // 生产者缓冲大小,当缓冲区耗尽后,额外的发送调用将被阻塞。时间超过max.block.ms将抛出TimeoutException 77 | productor_config.put("buffer.memory", 33554432); 78 | // The key.serializer and value.serializer instruct how to turn the key and value objects the user provides with their ProducerRecord into bytes. 79 | productor_config.put("key.serializer", StringSerializer.class.getName()); 80 | productor_config.put("value.serializer", StringSerializer.class.getName()); 81 | // 创建kafka的生产者类 82 | producer = new KafkaProducer(productor_config); 83 | Properties consumer_config = new Properties(); 84 | if(isZookeeper&&zookeeper_servers!=null){ 85 | consumer_config.put("zookeeper.connect", zookeeper_servers); 86 | } 87 | consumer_config.put("bootstrap.servers", servers); 88 | // 消费者的组id 89 | consumer_config.put("group.id", "kafka_consumer_group"); 90 | consumer_config.put("enable.auto.commit", true); 91 | consumer_config.put("auto.commit.interval.ms", 10*1000); 92 | // 从poll(拉)的回话处理时长 93 | consumer_config.put("session.timeout.ms", 30*1000); 94 | consumer_config.put("key.deserializer", StringDeserializer.class.getName()); 95 | consumer_config.put("value.deserializer", StringDeserializer.class.getName()); 96 | consumer = new KafkaConsumer(consumer_config); 97 | // 订阅主题列表topic 98 | consumer.subscribe(Arrays.asList(KAFKA_TOPIC)); 99 | } catch (Exception e) { 100 | logger.error("-----Kafka Config init Error-----", e); 101 | } 102 | } 103 | /** 104 | * 关闭服务 105 | */ 106 | public void close(){ 107 | if(producer!=null){ 108 | producer.close(); 109 | } 110 | if(consumer!=null){ 111 | consumer.close(); 112 | } 113 | } 114 | /** 115 | * @decription 生产者推送数据 116 | * @author yi.zhang 117 | * @time 2017年6月8日 下午2:24:05 118 | * @param data 119 | */ 120 | public void send(String data){ 121 | if(producer==null){ 122 | init(servers, isZookeeper, zookeeper_servers, acks); 123 | } 124 | producer.send(new ProducerRecord(KAFKA_TOPIC,data)); 125 | producer.flush(); 126 | } 127 | /** 128 | * @decription Kafka生产者 129 | * @author yi.zhang 130 | * @time 2017年6月8日 下午2:44:01 131 | * @return 132 | */ 133 | public KafkaProducer getProducer() { 134 | if(producer==null){ 135 | init(servers, isZookeeper, zookeeper_servers, acks); 136 | } 137 | return producer; 138 | } 139 | /** 140 | * @decription Kafka消费者 141 | * @author yi.zhang 142 | * @time 2017年6月8日 下午2:44:32 143 | * @return 144 | */ 145 | public KafkaConsumer getConsumer() { 146 | if(consumer==null){ 147 | init(servers, isZookeeper, zookeeper_servers, acks); 148 | } 149 | return consumer; 150 | } 151 | } -------------------------------------------------------------------------------- /src/main/java/com/share/service/mongodb/MongoDBFactory.java: -------------------------------------------------------------------------------- 1 | package com.share.service.mongodb; 2 | 3 | import java.lang.reflect.Field; 4 | import java.util.ArrayList; 5 | import java.util.Date; 6 | import java.util.HashMap; 7 | import java.util.List; 8 | import java.util.Map; 9 | import java.util.concurrent.TimeUnit; 10 | 11 | import org.apache.logging.log4j.LogManager; 12 | import org.apache.logging.log4j.Logger; 13 | import org.bson.Document; 14 | import org.bson.conversions.Bson; 15 | import org.bson.types.ObjectId; 16 | 17 | import com.alibaba.fastjson.JSON; 18 | import com.alibaba.fastjson.JSONObject; 19 | import com.mongodb.MongoClient; 20 | import com.mongodb.MongoClientOptions; 21 | import com.mongodb.MongoClientOptions.Builder; 22 | import com.mongodb.MongoCredential; 23 | import com.mongodb.ServerAddress; 24 | import com.mongodb.client.FindIterable; 25 | import com.mongodb.client.MongoCollection; 26 | import com.mongodb.client.MongoCursor; 27 | import com.mongodb.client.MongoDatabase; 28 | import com.mongodb.client.MongoIterable; 29 | import com.mongodb.client.model.Filters; 30 | 31 | /** 32 | * @decription MongoDB数据服务封装 33 | * @author yi.zhang 34 | * @time 2017年6月2日 下午2:48:49 35 | * @since 1.0 36 | * @jdk 1.8 37 | */ 38 | @SuppressWarnings("all") 39 | public class MongoDBFactory { 40 | private static Logger logger = LogManager.getLogger(); 41 | /** 42 | * 主键ID是否处理(true:处理[id],false:不处理[_id]) 43 | */ 44 | public static boolean ID_HANDLE=false; 45 | /** 46 | * 批量数据大小 47 | */ 48 | public static int BATCH_SIZE = 10000; 49 | /** 50 | * 最大时间(单位:毫秒) 51 | */ 52 | public static int MAX_WAIT_TIME = 24*60*60*1000; 53 | 54 | protected MongoDatabase session = null; 55 | 56 | private String servers; 57 | private String database; 58 | private String schema; 59 | private String username; 60 | private String password; 61 | 62 | public String getServers() { 63 | return servers; 64 | } 65 | 66 | public void setServers(String servers) { 67 | this.servers = servers; 68 | } 69 | 70 | public String getDatabase() { 71 | return database; 72 | } 73 | 74 | public void setDatabase(String database) { 75 | this.database = database; 76 | } 77 | 78 | public String getSchema() { 79 | return schema; 80 | } 81 | 82 | public void setSchema(String schema) { 83 | this.schema = schema; 84 | } 85 | 86 | public String getUsername() { 87 | return username; 88 | } 89 | 90 | public void setUsername(String username) { 91 | this.username = username; 92 | } 93 | 94 | public String getPassword() { 95 | return password; 96 | } 97 | 98 | public void setPassword(String password) { 99 | this.password = password; 100 | } 101 | 102 | /** 103 | * @decription 初始化配置 104 | * @author yi.zhang 105 | * @time 2017年6月2日 下午2:15:57 106 | */ 107 | public void init(String servers,String database,String schema,String username,String password) { 108 | try { 109 | List saddress = new ArrayList(); 110 | if (servers != null && !"".equals(servers)) { 111 | for (String server : servers.split(",")) { 112 | String[] address = server.split(":"); 113 | String ip = address[0]; 114 | int port = 27017; 115 | if (address != null && address.length > 1) { 116 | port = Integer.valueOf(address[1]); 117 | } 118 | saddress.add(new ServerAddress(ip, port)); 119 | } 120 | } 121 | MongoCredential credential = MongoCredential.createScramSha1Credential(username, database,password.toCharArray()); 122 | List credentials = new ArrayList(); 123 | credentials.add(credential); 124 | Builder builder = new MongoClientOptions.Builder(); 125 | builder.maxWaitTime(MAX_WAIT_TIME); 126 | // 通过连接认证获取MongoDB连接 127 | MongoClient client = new MongoClient(saddress, credentials, builder.build()); 128 | // 连接到数据库 129 | session = client.getDatabase(schema); 130 | } catch (Exception e) { 131 | logger.error("-----MongoDB Config init Error-----", e); 132 | } 133 | } 134 | 135 | /** 136 | * @decription 保存数据 137 | * @author yi.zhang 138 | * @time 2017年6月2日 下午6:18:49 139 | * @param table 文档名称(表名) 140 | * @param obj 141 | * @return 142 | */ 143 | public int save(String table, Object obj) { 144 | try { 145 | if(session==null){ 146 | init(servers, database, schema, username, password); 147 | } 148 | MongoCollection collection = session.getCollection(table); 149 | if (collection == null) { 150 | session.createCollection(table); 151 | collection = session.getCollection(table); 152 | } 153 | collection.insertOne(Document.parse(JSON.toJSONString(obj))); 154 | return 1; 155 | } catch (Exception e) { 156 | // TODO Auto-generated catch block 157 | e.printStackTrace(); 158 | } 159 | return -1; 160 | } 161 | 162 | /** 163 | * @decription 更新数据 164 | * @author yi.zhang 165 | * @time 2017年6月2日 下午6:19:08 166 | * @param table 文档名称(表名) 167 | * @param obj 168 | * @return 169 | */ 170 | public int update(String table, Object obj) { 171 | try { 172 | if(session==null){ 173 | init(servers, database, schema, username, password); 174 | } 175 | MongoCollection collection = session.getCollection(table); 176 | if (collection == null) { 177 | return 0; 178 | } 179 | JSONObject json = JSON.parseObject(JSON.toJSONString(obj)); 180 | Document value = Document.parse(JSON.toJSONString(obj)); 181 | collection.replaceOne(Filters.eq("_id", json.containsKey("_id")?json.get("_id"):json.get("id")), value); 182 | return 1; 183 | } catch (Exception e) { 184 | // TODO Auto-generated catch block 185 | e.printStackTrace(); 186 | } 187 | return -1; 188 | } 189 | 190 | /** 191 | * @decription 删除数据 192 | * @author yi.zhang 193 | * @time 2017年6月2日 下午6:19:25 194 | * @param table 文档名称(表名) 195 | * @param obj 196 | * @return 197 | */ 198 | public int delete(String table, Object obj) { 199 | try { 200 | if(session==null){ 201 | init(servers, database, schema, username, password); 202 | } 203 | MongoCollection collection = session.getCollection(table); 204 | if (collection == null) { 205 | return 0; 206 | } 207 | JSONObject json = JSON.parseObject(JSON.toJSONString(obj)); 208 | collection.findOneAndDelete(Filters.eq("_id", json.containsKey("_id")?json.get("_id"):json.get("id"))); 209 | return 1; 210 | } catch (Exception e) { 211 | // TODO Auto-generated catch block 212 | e.printStackTrace(); 213 | } 214 | return -1; 215 | } 216 | 217 | /** 218 | * @decription 数据库查询 219 | * @author yi.zhang 220 | * @time 2017年6月26日 下午4:12:59 221 | * @param table 文档名称(表名) 222 | * @param clazz 映射对象 223 | * @param params 参数 224 | * @return 225 | */ 226 | public List executeQuery(String table, Class clazz, JSONObject params) { 227 | try { 228 | if(session==null){ 229 | init(servers, database, schema, username, password); 230 | } 231 | MongoCollection collection = session.getCollection(table); 232 | if (collection == null) { 233 | return null; 234 | } 235 | List list = new ArrayList(); 236 | FindIterable documents = null; 237 | if (params != null) { 238 | List filters = new ArrayList(); 239 | for (String key : params.keySet()) { 240 | Object value = params.get(key); 241 | filters.add(Filters.eq(key, value)); 242 | } 243 | documents = collection.find(Filters.and(filters)); 244 | } else { 245 | documents = collection.find(); 246 | } 247 | MongoCursor cursor = documents.batchSize(BATCH_SIZE).noCursorTimeout(true).iterator(); 248 | while (cursor.hasNext()) { 249 | JSONObject obj = new JSONObject(); 250 | Document document = cursor.next(); 251 | for (String column : document.keySet()) { 252 | Object value = document.get(column); 253 | if(value instanceof ObjectId){ 254 | value = document.getObjectId(column).toHexString(); 255 | } 256 | if (clazz == null) { 257 | obj.put(ID_HANDLE?column.replaceFirst("^(\\_?)", ""):column, value); 258 | } else { 259 | String tcolumn = column.replaceAll("_", ""); 260 | Field[] fields = clazz.getDeclaredFields(); 261 | for (Field field : fields) { 262 | String tfield = field.getName(); 263 | if (column.equalsIgnoreCase(tfield) || tcolumn.equalsIgnoreCase(tfield)) { 264 | obj.put(tfield, value); 265 | break; 266 | } 267 | } 268 | } 269 | } 270 | Object object = obj; 271 | if (clazz != null) { 272 | object = JSON.parseObject(obj.toJSONString(), clazz); 273 | } 274 | list.add(object); 275 | } 276 | cursor.close(); 277 | return list; 278 | } catch (Exception e) { 279 | // TODO Auto-generated catch block 280 | e.printStackTrace(); 281 | } 282 | return null; 283 | } 284 | 285 | /** 286 | * @decription 查询数据表字段名(key:字段名,value:字段类型名) 287 | * @author yi.zhang 288 | * @time 2017年6月30日 下午2:16:02 289 | * @param table 表名 290 | * @return 291 | */ 292 | public Map queryColumns(String table){ 293 | try { 294 | if(session==null){ 295 | init(servers, database, schema, username, password); 296 | } 297 | MongoCollection collection = session.getCollection(table); 298 | if (collection == null) { 299 | return null; 300 | } 301 | Map reflect = new HashMap(); 302 | FindIterable documents = collection.find(); 303 | Document document = documents.first(); 304 | if(document==null){ 305 | return reflect; 306 | } 307 | for (String column : document.keySet()) { 308 | Object value = document.get(column); 309 | String type = "string"; 310 | if(value instanceof Integer){ 311 | type = "int"; 312 | } 313 | if(value instanceof Long){ 314 | type = "long"; 315 | } 316 | if(value instanceof Double){ 317 | type = "double"; 318 | } 319 | if(value instanceof Boolean){ 320 | type = "boolean"; 321 | } 322 | if(value instanceof Date){ 323 | type = "date"; 324 | } 325 | reflect.put(column, type); 326 | } 327 | return reflect; 328 | } catch (Exception e) { 329 | // TODO Auto-generated catch block 330 | e.printStackTrace(); 331 | } 332 | return null; 333 | } 334 | /** 335 | * @decription 查询数据库表名 336 | * @author yi.zhang 337 | * @time 2017年6月30日 下午2:16:02 338 | * @param table 表名 339 | * @return 340 | */ 341 | public List queryTables(){ 342 | try { 343 | if(session==null){ 344 | init(servers, database, schema, username, password); 345 | } 346 | MongoIterable collection = session.listCollectionNames(); 347 | if (collection == null) { 348 | return null; 349 | } 350 | List tables = new ArrayList(); 351 | MongoCursor cursor = collection.iterator(); 352 | while(cursor.hasNext()){ 353 | String table = cursor.next(); 354 | tables.add(table); 355 | } 356 | return tables; 357 | } catch (Exception e) { 358 | // TODO Auto-generated catch block 359 | e.printStackTrace(); 360 | } 361 | return null; 362 | } 363 | } -------------------------------------------------------------------------------- /src/main/java/com/share/swing/CommonFrame.java: -------------------------------------------------------------------------------- 1 | package com.share.swing; 2 | 3 | import java.awt.Color; 4 | import java.awt.Container; 5 | import java.awt.Dimension; 6 | import java.awt.Font; 7 | import java.awt.Toolkit; 8 | import java.awt.event.ActionEvent; 9 | import java.awt.event.ActionListener; 10 | import java.util.Date; 11 | import java.util.concurrent.ConcurrentLinkedQueue; 12 | 13 | import javax.swing.JFrame; 14 | 15 | import com.share.util.DateUtil; 16 | import com.share.util.StringUtil; 17 | /** 18 | * @decription 公共框架 19 | * @author yi.zhang 20 | * @time 2017年7月13日 下午3:27:29 21 | * @since 1.0 22 | * @jdk 1.8 23 | */ 24 | public abstract class CommonFrame extends JFrame implements ActionListener{ 25 | private static final long serialVersionUID = -7858440654032631473L; 26 | public static String title = ResourceHolder.getProperty("system.name"); 27 | public static String version = ResourceHolder.getProperty("system.version"); 28 | public static String decription = ResourceHolder.getProperty("system.decription"); 29 | public static int width = 800; 30 | public static int height = 600; 31 | private static ConcurrentLinkedQueue logger = new ConcurrentLinkedQueue(); 32 | public Container container; 33 | 34 | public CommonFrame(){ 35 | _init(); 36 | } 37 | /** 38 | * @decription 初始化信息 39 | * @author yi.zhang 40 | * @time 2017年7月13日 下午3:27:57 41 | */ 42 | public void _init(){ 43 | Toolkit kit = Toolkit.getDefaultToolkit(); 44 | Dimension screen = kit.getScreenSize(); 45 | height = Double.valueOf(screen.getHeight()).intValue(); 46 | width = Double.valueOf(screen.getWidth()).intValue(); 47 | // this.setLocationByPlatform(true); 48 | this.setSize(width>800?width*3/4:800, height>600?height*3/4:600); 49 | this.setFont(new Font("Consolas", Font.BOLD, 16)); 50 | this.setLocation((width-this.getSize().width)/2, (height-this.getSize().height)/2); 51 | this.setBackground(Color.GREEN); 52 | this.setTitle("®™"+title+" "+ version +" ©ZhangYi"); 53 | this.setVisible(true); 54 | } 55 | /** 56 | * @decription 处理服务IP与端口 57 | * @author yi.zhang 58 | * @time 2017年7月21日 下午3:33:01 59 | * @param servers 服务拼接地址 60 | * @param port 服务端口 61 | * @return 62 | */ 63 | public String handleServers(String servers,int port){ 64 | String result = ""; 65 | if(port>0&&!StringUtil.isEmpty(servers)){ 66 | if(servers.contains(",")){ 67 | String[] addesses = servers.split(","); 68 | for (String server : addesses) { 69 | String[] adress = server.split(":"); 70 | if(adress.length==1){ 71 | result+=(StringUtil.isEmpty(result)?"":",")+adress[0]+":"+port; 72 | }else{ 73 | result+=(StringUtil.isEmpty(result)?"":",")+server; 74 | } 75 | } 76 | }else{ 77 | result+=servers+":"+port; 78 | } 79 | }else{ 80 | result = servers; 81 | } 82 | return result; 83 | } 84 | /** 85 | * @decription 处理资源类型-> 86 | * 0:Canal服务, 87 | * 1:Elasticsearch服务, 88 | * 2:NoSQL服务[2.1->Cassandra,2.2->MongoDB,2.3->Redis,2.4->Memecached] 89 | * 3:SQL服务[3.1->MySQL,3.2->SQL Server,3.3->Oracle] 90 | * 4:数据仓库(Greenplum) 91 | * 5:消息队列(Kafka) 92 | * @author yi.zhang 93 | * @time 2017年7月21日 下午3:34:29 94 | * @param type 95 | * @return 96 | */ 97 | public String handleType(double type){ 98 | int dtype = Double.valueOf(type).intValue(); 99 | String result= ResourceHolder.getProperty("dstt.ds.type."+type); 100 | if(StringUtil.isEmpty(result)||type==0.1){ 101 | result = ResourceHolder.getProperty("dstt.ds.type."+dtype); 102 | } 103 | return StringUtil.isEmpty(result)?"Unknow":result; 104 | } 105 | /** 106 | * @decription 处理资源类型-> 107 | * 0:Canal服务, 108 | * 1:Elasticsearch服务, 109 | * 2:NoSQL服务[2.1->Cassandra,2.2->MongoDB,2.3->Redis,2.4->Memecached] 110 | * 3:SQL服务[3.1->MySQL,3.2->SQL Server,3.3->Oracle] 111 | * 4:数据仓库(Greenplum) 112 | * 5:消息队列(Kafka) 113 | * @author yi.zhang 114 | * @time 2017年7月21日 下午3:34:29 115 | * @param type 116 | * @return 117 | */ 118 | public double handleType(String value){ 119 | double type = -1; 120 | if(value!=null){ 121 | if(value.equalsIgnoreCase(ResourceHolder.getProperty("dstt.ds.type.0"))){ 122 | type = 0; 123 | }else if(value.equalsIgnoreCase(ResourceHolder.getProperty("dstt.ds.type.0.1"))){ 124 | type = 0.1; 125 | }else if(value.equalsIgnoreCase(ResourceHolder.getProperty("dstt.ds.type.1"))){ 126 | type = 1; 127 | }else if(value.equalsIgnoreCase(ResourceHolder.getProperty("dstt.ds.type.2"))){ 128 | type = 2; 129 | }else if(value.equalsIgnoreCase(ResourceHolder.getProperty("dstt.ds.type.2.1"))){ 130 | type = 2.1; 131 | }else if(value.equalsIgnoreCase(ResourceHolder.getProperty("dstt.ds.type.2.2"))){ 132 | type = 2.2; 133 | }else if(value.equalsIgnoreCase(ResourceHolder.getProperty("dstt.ds.type.2.3"))){ 134 | type = 2.3; 135 | }else if(value.equalsIgnoreCase(ResourceHolder.getProperty("dstt.ds.type.2.4"))){ 136 | type = 2.4; 137 | }else if(value.equalsIgnoreCase(ResourceHolder.getProperty("dstt.ds.type.3"))){ 138 | type = 3; 139 | }else if(value.equalsIgnoreCase(ResourceHolder.getProperty("dstt.ds.type.3.1"))){ 140 | type = 3.1; 141 | }else if(value.equalsIgnoreCase(ResourceHolder.getProperty("dstt.ds.type.3.2"))){ 142 | type = 3.2; 143 | }else if(value.equalsIgnoreCase(ResourceHolder.getProperty("dstt.ds.type.3.3"))){ 144 | type = 3.3; 145 | }else if(value.equalsIgnoreCase(ResourceHolder.getProperty("dstt.ds.type.4"))){ 146 | type = 4; 147 | }else if(value.equalsIgnoreCase(ResourceHolder.getProperty("dstt.ds.type.4.1"))){ 148 | type = 4.1; 149 | }else if(value.equalsIgnoreCase(ResourceHolder.getProperty("dstt.ds.type.5"))){ 150 | type = 5; 151 | }else if(value.equalsIgnoreCase(ResourceHolder.getProperty("dstt.ds.type.5.1"))){ 152 | type = 5.1; 153 | } 154 | } 155 | return type; 156 | } 157 | /** 158 | * @decription 默认端口 159 | * @author yi.zhang 160 | * @time 2017年7月28日 上午11:55:32 161 | * @param type 162 | * @return 163 | */ 164 | public String handlePort(double type){ 165 | String port = ""; 166 | if(type==0)port=""+11111; 167 | if(type==0.1)port=""+2181; 168 | if(type==1)port=""+9300; 169 | if(type==2.1)port=""+9402; 170 | if(type==2.2)port=""+27017; 171 | if(type==2.3)port=""+8888; 172 | if(type==2.4)port=""+11211; 173 | if(type==3.1)port=""+3306; 174 | if(type==3.2)port=""+1433; 175 | if(type==3.3)port=""+1521; 176 | if(type==4.1)port=""+5432; 177 | if(type==5.1)port=""+9092; 178 | return port; 179 | } 180 | @Override 181 | public void actionPerformed(ActionEvent e) { 182 | System.out.println(e.getSource()); 183 | } 184 | 185 | public static void offer(String msg){ 186 | String data = "["+DateUtil.formatDateTimeStr(new Date())+"]"+msg; 187 | logger.offer(data); 188 | } 189 | 190 | public static String poll(){ 191 | return logger.poll(); 192 | } 193 | } 194 | -------------------------------------------------------------------------------- /src/main/java/com/share/swing/DataInfo.java: -------------------------------------------------------------------------------- 1 | package com.share.swing; 2 | 3 | import java.io.Serializable; 4 | import java.util.List; 5 | import java.util.Map; 6 | import java.util.TreeMap; 7 | 8 | @SuppressWarnings("serial") 9 | public class DataInfo implements Serializable{ 10 | /** 11 | * 数据源 12 | */ 13 | private Config source; 14 | /** 15 | * 目标源 16 | */ 17 | private Config target; 18 | /** 19 | * 库表对应关系映射 20 | */ 21 | private Map mapping=new TreeMap(); 22 | /** 23 | * 过滤字段 24 | */ 25 | private List filter_columns; 26 | 27 | public Config getSource() { 28 | return source; 29 | } 30 | 31 | public void setSource(Config source) { 32 | this.source = source; 33 | } 34 | 35 | public Config getTarget() { 36 | return target; 37 | } 38 | 39 | public void setTarget(Config target) { 40 | this.target = target; 41 | } 42 | 43 | public Map getMapping() { 44 | return mapping; 45 | } 46 | 47 | public void setMapping(Map mapping) { 48 | this.mapping = mapping; 49 | } 50 | 51 | public List getFilter_columns() { 52 | return filter_columns; 53 | } 54 | 55 | public void setFilter_columns(List filter_columns) { 56 | this.filter_columns = filter_columns; 57 | } 58 | /** 59 | * @decription 数据配置 60 | * @author yi.zhang 61 | * @time 2017年7月13日 下午5:19:28 62 | * @since 1.0 63 | * @jdk 1.8 64 | */ 65 | public class Config implements Serializable{ 66 | private String servers; 67 | private int port=0; 68 | /** 69 | * 资源类型-> 70 | * 0:Canal服务, 71 | * 1:Elasticsearch服务, 72 | * 2:NoSQL服务[2.1->Cassandra,2.2->MongoDB,2.3->Redis,2.4->Memecached] 73 | * 3:SQL服务[3.1->MySQL,3.2->SQL Server,3.3->Oracle] 74 | * 4:数据仓库(Greenplum) 75 | * 5:消息队列(Kafka) 76 | */ 77 | private double type = -1; 78 | private String version = ""; 79 | private String username = ""; 80 | private String password = ""; 81 | private String database = ""; 82 | private String schema = ""; 83 | private String keyspace = ""; 84 | private String other = ""; 85 | private int batch_size = 100; 86 | 87 | public String getServers() { 88 | return servers; 89 | } 90 | public void setServers(String servers) { 91 | this.servers = servers; 92 | } 93 | public int getPort() { 94 | return port; 95 | } 96 | public void setPort(int port) { 97 | this.port = port; 98 | } 99 | /** 100 | * 资源类型-> 101 | * 0:Canal服务, 102 | * 1:Elasticsearch服务, 103 | * 2:NoSQL服务[2.1->Cassandra,2.2->MongoDB,2.3->Redis,2.4->Memecached] 104 | * 3:SQL服务[3.1->MySQL,3.2->SQL Server,3.3->Oracle] 105 | * 4:数据仓库(Greenplum) 106 | * 5:消息队列(Kafka) 107 | */ 108 | public double getType() { 109 | return type; 110 | } 111 | /** 112 | * 资源类型-> 113 | * 0:Canal服务, 114 | * 1:Elasticsearch服务, 115 | * 2:NoSQL服务[2.1->Cassandra,2.2->MongoDB,2.3->Redis,2.4->Memecached] 116 | * 3:SQL服务[3.1->MySQL,3.2->SQL Server,3.3->Oracle] 117 | * 4:数据仓库(Greenplum) 118 | * 5:消息队列(Kafka) 119 | */ 120 | public void setType(double type) { 121 | this.type = type; 122 | } 123 | public String getVersion() { 124 | return version; 125 | } 126 | public void setVersion(String version) { 127 | this.version = version; 128 | } 129 | public String getUsername() { 130 | return username; 131 | } 132 | public void setUsername(String username) { 133 | this.username = username; 134 | } 135 | public String getPassword() { 136 | return password; 137 | } 138 | public void setPassword(String password) { 139 | this.password = password; 140 | } 141 | public String getDatabase() { 142 | return database; 143 | } 144 | public void setDatabase(String database) { 145 | this.database = database; 146 | } 147 | public String getSchema() { 148 | return schema; 149 | } 150 | public void setSchema(String schema) { 151 | this.schema = schema; 152 | } 153 | public String getKeyspace() { 154 | return keyspace; 155 | } 156 | public void setKeyspace(String keyspace) { 157 | this.keyspace = keyspace; 158 | } 159 | public String getOther() { 160 | return other; 161 | } 162 | public void setOther(String other) { 163 | this.other = other; 164 | } 165 | public int getBatch_size() { 166 | return batch_size; 167 | } 168 | public void setBatch_size(int batch_size) { 169 | this.batch_size = batch_size; 170 | } 171 | } 172 | } 173 | -------------------------------------------------------------------------------- /src/main/java/com/share/swing/Main.java: -------------------------------------------------------------------------------- 1 | package com.share.swing; 2 | 3 | import java.awt.Color; 4 | import java.awt.Font; 5 | import java.awt.event.ActionEvent; 6 | import java.awt.event.ActionListener; 7 | import java.util.Locale; 8 | 9 | import javax.swing.ButtonGroup; 10 | import javax.swing.DropMode; 11 | import javax.swing.JButton; 12 | import javax.swing.JFrame; 13 | import javax.swing.JLabel; 14 | import javax.swing.JRadioButton; 15 | import javax.swing.JTextPane; 16 | import javax.swing.SwingConstants; 17 | 18 | public class Main extends CommonFrame { 19 | private static final long serialVersionUID = 8927006591622687297L; 20 | 21 | public Main() { 22 | super._init(); 23 | language(Locale.CHINESE); 24 | } 25 | 26 | protected void init(){ 27 | this.setTitle("®™"+title+" "+ version +" ©ZhangYi"); 28 | container = this.getContentPane(); 29 | container.setForeground(Color.BLACK); 30 | container.setBackground(new Color(204, 255, 255)); 31 | container.setFont(new Font("仿宋", Font.PLAIN, 12)); 32 | container.setLayout(null); 33 | int cwidth = container.getSize().width>0?container.getSize().width:width; 34 | int cheight = container.getSize().height>0?container.getSize().height:height; 35 | 36 | JLabel jtitle = new JLabel(title); 37 | jtitle.setLabelFor(getContentPane()); 38 | jtitle.setToolTipText(decription); 39 | jtitle.setHorizontalAlignment(SwingConstants.CENTER); 40 | jtitle.setForeground(Color.RED); 41 | jtitle.setBackground(Color.WHITE); 42 | //为标签设置及添加到框架 43 | jtitle.setBounds(cwidth*3/8,98,cwidth/4,81); 44 | jtitle.setFont(new Font("华文新魏", Font.BOLD, 30)); 45 | container.add(jtitle); 46 | 47 | JTextPane textPane = new JTextPane(); 48 | textPane.setDropMode(DropMode.USE_SELECTION); 49 | textPane.setForeground(Color.BLACK); 50 | textPane.setBackground(new Color(245, 255, 250)); 51 | textPane.setEditable(false); 52 | textPane.setFont(new Font("华文宋体", Font.BOLD, 16)); 53 | textPane.setText(decription.replace("\n", "\n\n")); 54 | textPane.setBounds(cwidth*5/16, 200, cwidth*3/8, cheight*3/8); 55 | container.add(textPane); 56 | 57 | JButton button = new JButton(ResourceHolder.getProperty("dstt.btn.next")); 58 | button.setFont(new Font("宋体", Font.BOLD, 14)); 59 | button.addActionListener(new ActionListener() { 60 | public void actionPerformed(ActionEvent e) { 61 | setVisible(false); 62 | new ManageTable(); 63 | } 64 | }); 65 | button.setBounds(cwidth*7/8, cheight*15/16, 93, 30); 66 | container.add(button); 67 | 68 | final ButtonGroup language = new ButtonGroup(); 69 | 70 | final JRadioButton chinese = new JRadioButton(ResourceHolder.getProperty("system.language.zh_CN")); 71 | chinese.setBackground(new Color(224, 255, 255)); 72 | chinese.setHorizontalAlignment(SwingConstants.CENTER); 73 | chinese.setFont(new Font("仿宋", Font.BOLD, 12)); 74 | chinese.setBounds(cwidth*14/16, 16, 75, 23); 75 | language.add(chinese); 76 | final JRadioButton english = new JRadioButton(ResourceHolder.getProperty("system.language.en_US")); 77 | english.setBackground(new Color(224, 255, 255)); 78 | english.setHorizontalAlignment(SwingConstants.CENTER); 79 | english.setFont(new Font("仿宋", Font.BOLD, 12)); 80 | english.setBounds(cwidth*15/16, 16, 75, 23); 81 | language.add(english); 82 | chinese.addActionListener(new ActionListener() { 83 | public void actionPerformed(ActionEvent e) { 84 | language.clearSelection(); 85 | language(Locale.CHINESE); 86 | } 87 | }); 88 | english.addActionListener(new ActionListener() { 89 | public void actionPerformed(ActionEvent e) { 90 | language.clearSelection(); 91 | language(Locale.ENGLISH); 92 | } 93 | }); 94 | if(this.getLocale().equals(Locale.ENGLISH)){ 95 | english.setSelected(true); 96 | }else{ 97 | chinese.setSelected(true); 98 | } 99 | container.add(chinese); 100 | container.add(english); 101 | } 102 | 103 | public void language(Locale locale){ 104 | if(locale==null)locale=Locale.getDefault(); 105 | ResourceHolder.locale=locale; 106 | this.setLocale(locale); 107 | title = ResourceHolder.getProperty("system.name"); 108 | version = ResourceHolder.getProperty("system.version"); 109 | decription = ResourceHolder.getProperty("system.decription"); 110 | this.getContentPane().removeAll(); 111 | init(); 112 | 113 | this.setVisible(true); 114 | this.setResizable(false); 115 | this.validate(); 116 | this.repaint(); 117 | } 118 | public static void main(String[] args) { 119 | JFrame frame = new Main(); 120 | frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); 121 | frame.setVisible(true); 122 | } 123 | } 124 | -------------------------------------------------------------------------------- /src/main/java/com/share/swing/ResourceHolder.java: -------------------------------------------------------------------------------- 1 | package com.share.swing; 2 | 3 | import java.io.IOException; 4 | import java.io.InputStream; 5 | import java.util.Locale; 6 | import java.util.Properties; 7 | 8 | import org.apache.log4j.Logger; 9 | 10 | import com.share.util.StringUtil; 11 | 12 | 13 | /** 14 | *
15 |  * 项目:数据转换
16 |  * 描述:资源配置文件取值
17 |  * 作者:ZhangYi
18 |  * 时间:2016年1月26日 上午9:54:00
19 |  * 版本:dtt_v1.0
20 |  * JDK:1.7.80
21 |  * 
22 | */ 23 | public class ResourceHolder { 24 | 25 | private static final Logger logger = Logger.getLogger(ResourceHolder.class); 26 | public static Locale locale = Locale.getDefault(); 27 | /** 28 | *
29 | 	 * 描述:获取资源文件值(messages_en.properties/messages_zh.properties)
30 | 	 * 作者:ZhangYi
31 | 	 * 时间:2016年1月26日 上午9:52:03
32 | 	 * 参数:(参数列表)
33 | 	 * @param key		国际化key
34 | 	 * @param language	(ZH:中文,EN:英文)
35 | 	 * @return
36 | 	 * 
37 | */ 38 | public static String getProperty(String key) { 39 | String value = ""; 40 | String file = "/messages_zh_CN.properties"; 41 | if (locale.getLanguage().contains("en")) { 42 | file = "/messages_en.properties"; 43 | } 44 | try { 45 | InputStream stream = ResourceHolder.class.getResourceAsStream(file); 46 | Properties properties = new Properties(); 47 | properties.load(stream); 48 | value = properties.getProperty(key); 49 | } catch (IOException e) { 50 | logger.error("--资源文件取值失败--", e); 51 | } 52 | return value; 53 | } 54 | 55 | /** 56 | *
57 | 	 * 描述:获取本地语言
58 | 	 * 作者:ZhangYi
59 | 	 * 时间:2015年1月30日 下午1:24:06
60 | 	 * 参数:(参数列表)
61 | 	 * @param language
62 | 	 * @return
63 | 	 * 
64 | */ 65 | public static Locale getLocale(String language) { 66 | Locale locale = Locale.CHINESE; 67 | if (!StringUtil.isEmpty(language)) { 68 | language = language.toLowerCase(); 69 | if (language.indexOf("en") != -1) { 70 | locale = Locale.ENGLISH; 71 | } 72 | if (language.indexOf("zh_hk") != -1) { 73 | locale = Locale.TRADITIONAL_CHINESE; 74 | } 75 | if (language.indexOf("zh_cn") != -1) { 76 | locale = Locale.SIMPLIFIED_CHINESE; 77 | } 78 | } 79 | return locale; 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /src/main/java/com/share/util/ClazzUtil.java: -------------------------------------------------------------------------------- 1 | package com.share.util; 2 | 3 | import java.lang.reflect.Field; 4 | import java.lang.reflect.Method; 5 | import java.util.Date; 6 | import java.util.LinkedHashMap; 7 | import java.util.Map; 8 | 9 | public class ClazzUtil { 10 | 11 | public Map reflect(Object obj){ 12 | Map map = new LinkedHashMap(); 13 | Class clazz = obj.getClass(); 14 | Field[] fields = clazz.getDeclaredFields(); 15 | for (Field field : fields) { 16 | String name = field.getName(); 17 | if(name.equalsIgnoreCase("serialVersionUID")||name.contains("$this")){ 18 | continue; 19 | } 20 | String type = field.getType().getSimpleName(); 21 | try { 22 | Method method = clazz.getMethod((type.equalsIgnoreCase("boolean")?"is":"get")+name.substring(0, 1).toUpperCase()+ name.substring(1)); 23 | Object value = method.invoke(obj); 24 | if(type.equalsIgnoreCase("String")||type.equalsIgnoreCase("Date")){ 25 | if(type.equalsIgnoreCase("Date")){ 26 | value = DateUtil.formatDateTimeStr((Date)value); 27 | } 28 | value = value.toString(); 29 | } 30 | map.put(name, value); 31 | }catch (Exception e) { 32 | e.printStackTrace(); 33 | } 34 | } 35 | return map; 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/main/java/com/share/util/HttpUtil.java: -------------------------------------------------------------------------------- 1 | package com.share.util; 2 | 3 | import java.io.ByteArrayOutputStream; 4 | import java.io.DataOutputStream; 5 | import java.io.InputStream; 6 | import java.io.UnsupportedEncodingException; 7 | import java.net.Authenticator; 8 | import java.net.HttpURLConnection; 9 | import java.net.PasswordAuthentication; 10 | import java.net.URL; 11 | import java.net.URLDecoder; 12 | import java.net.URLEncoder; 13 | 14 | import org.apache.commons.codec.binary.Base64; 15 | import org.apache.http.Consts; 16 | import org.apache.http.HttpEntity; 17 | import org.apache.http.HttpResponse; 18 | import org.apache.http.client.methods.HttpDelete; 19 | import org.apache.http.client.methods.HttpGet; 20 | import org.apache.http.client.methods.HttpHead; 21 | import org.apache.http.client.methods.HttpOptions; 22 | import org.apache.http.client.methods.HttpPost; 23 | import org.apache.http.client.methods.HttpPut; 24 | import org.apache.http.client.methods.HttpRequestBase; 25 | import org.apache.http.client.methods.HttpTrace; 26 | import org.apache.http.client.utils.HttpClientUtils; 27 | import org.apache.http.entity.ContentType; 28 | import org.apache.http.entity.StringEntity; 29 | import org.apache.http.impl.client.CloseableHttpClient; 30 | import org.apache.http.impl.client.HttpClients; 31 | import org.apache.http.util.EntityUtils; 32 | import org.apache.logging.log4j.LogManager; 33 | import org.apache.logging.log4j.Logger; 34 | 35 | public class HttpUtil { 36 | private static Logger logger = LogManager.getLogger(HttpUtil.class); 37 | /** 38 | * http客户端 39 | */ 40 | private static CloseableHttpClient httpClient = HttpClients.createDefault(); 41 | /** 42 | * Get请求 43 | */ 44 | public final static String METHOD_GET = "GET"; 45 | /** 46 | * Post请求 47 | */ 48 | public final static String METHOD_POST = "POST"; 49 | /** 50 | * Head请求 51 | */ 52 | public final static String METHOD_HEAD = "HEAD"; 53 | /** 54 | * Options请求 55 | */ 56 | public final static String METHOD_OPTIONS = "OPTIONS"; 57 | /** 58 | * Put请求 59 | */ 60 | public final static String METHOD_PUT = "PUT"; 61 | /** 62 | * Delete请求 63 | */ 64 | public final static String METHOD_DELETE = "DELETE"; 65 | /** 66 | * Trace请求 67 | */ 68 | public final static String METHOD_TRACE = "TRACE"; 69 | /** 70 | * @param proxyHost 代理地址 71 | * @param port 代理端口 72 | * @param account 认证账号 73 | * @param password 认证密码 74 | */ 75 | public static void auth(String proxyHost,int port,final String account,final String password){ 76 | System.setProperty("https.proxyHost", proxyHost); 77 | System.setProperty("https.proxyPort", port+""); 78 | Authenticator.setDefault(new Authenticator() { 79 | protected PasswordAuthentication getPasswordAuthentication(){ 80 | return new PasswordAuthentication(account, new String(password).toCharArray()); 81 | } 82 | }); 83 | } 84 | /** 85 | * @description 判断服务连通性 86 | * @author yi.zhang 87 | * @time 2017年4月19日 下午6:00:40 88 | * @param url 89 | * @param auth 认证信息(username+":"+password) 90 | * @return (true:连接成功,false:连接失败) 91 | */ 92 | public static boolean checkConnection(String url,String auth){ 93 | boolean flag = false; 94 | try { 95 | HttpURLConnection connection = (HttpURLConnection)new URL(url).openConnection(); 96 | connection.setConnectTimeout(5*1000); 97 | if(auth!=null&&!"".equals(auth)){ 98 | String authorization = "Basic "+new String(Base64.encodeBase64(auth.getBytes())); 99 | connection.setRequestProperty("Authorization", authorization); 100 | } 101 | connection.connect(); 102 | if(connection.getResponseCode()==HttpURLConnection.HTTP_OK){ 103 | flag = true; 104 | } 105 | connection.disconnect(); 106 | }catch (Exception e) { 107 | logger.error("--Server Connect Error !",e); 108 | } 109 | return flag; 110 | } 111 | /** 112 | * @param url 请求URL 113 | * @param method 请求URL 114 | * @param param json参数(post|put) 115 | * @param auth 认证信息(username+":"+password) 116 | * @return 117 | */ 118 | public static String httpRequest(String url,String method,String param,String auth){ 119 | String result = null; 120 | HttpResponse httpResponse = null; 121 | try { 122 | HttpRequestBase http = new HttpGet(url); 123 | if(method.equalsIgnoreCase(METHOD_POST)){ 124 | http = new HttpPost(url); 125 | StringEntity body = new StringEntity(param,ContentType.APPLICATION_JSON); 126 | body.setContentType("application/json"); 127 | ((HttpPost)http).setEntity(body); 128 | }else if(method.equalsIgnoreCase(METHOD_PUT)){ 129 | http = new HttpPut(url); 130 | StringEntity body = new StringEntity(param,ContentType.APPLICATION_JSON); 131 | body.setContentType("application/json"); 132 | ((HttpPut)http).setEntity(body); 133 | }else if(method.equalsIgnoreCase(METHOD_DELETE)){ 134 | http = new HttpDelete(url); 135 | }else if(method.equalsIgnoreCase(METHOD_HEAD)){ 136 | http = new HttpHead(url); 137 | }else if(method.equalsIgnoreCase(METHOD_OPTIONS)){ 138 | http = new HttpOptions(url); 139 | }else if(method.equalsIgnoreCase(METHOD_TRACE)){ 140 | http = new HttpTrace(url); 141 | } 142 | if(auth!=null&&!"".equals(auth)){ 143 | String authorization = "Basic "+new String(Base64.encodeBase64(auth.getBytes())); 144 | http.setHeader("Authorization", authorization); 145 | } 146 | httpResponse = httpClient.execute(http); 147 | HttpEntity entity = httpResponse.getEntity(); 148 | result = EntityUtils.toString(entity,Consts.UTF_8); 149 | }catch (Exception e) { 150 | logger.error("--http request error !",e); 151 | result = e.getMessage(); 152 | }finally { 153 | HttpClientUtils.closeQuietly(httpResponse); 154 | } 155 | return result; 156 | } 157 | /** 158 | * @param url 请求URL 159 | * @param method 请求URL 160 | * @param param json参数(post|put) 161 | * @return 162 | */ 163 | public static String urlRequest(String url,String method,String param,String auth){ 164 | String result = null; 165 | try { 166 | HttpURLConnection connection = (HttpURLConnection)new URL(url).openConnection(); 167 | connection.setConnectTimeout(60*1000); 168 | connection.setRequestMethod(method.toUpperCase()); 169 | if(auth!=null&&!"".equals(auth)){ 170 | String authorization = "Basic "+new String(Base64.encodeBase64(auth.getBytes())); 171 | connection.setRequestProperty("Authorization", authorization); 172 | } 173 | if(param!=null&&!"".equals(param)){ 174 | connection.setDoInput(true); 175 | connection.setDoOutput(true); 176 | connection.connect(); 177 | DataOutputStream dos = new DataOutputStream(connection.getOutputStream()); 178 | dos.write(param.getBytes(Consts.UTF_8)); 179 | dos.flush(); 180 | dos.close(); 181 | }else{ 182 | connection.connect(); 183 | } 184 | if(connection.getResponseCode()==HttpURLConnection.HTTP_OK){ 185 | InputStream in = connection.getInputStream(); 186 | ByteArrayOutputStream out = new ByteArrayOutputStream(); 187 | byte[] buff = new byte[1024]; 188 | int len = 0; 189 | while((len=in.read(buff, 0, buff.length))>0){ 190 | out.write(buff, 0, len); 191 | } 192 | byte[] data = out.toByteArray(); 193 | in.close(); 194 | result = data!=null&&data.length>0?new String(data, Consts.UTF_8):null; 195 | }else{ 196 | result = "{\"status\":"+connection.getResponseCode()+",\"msg\":\""+connection.getResponseMessage()+"\"}"; 197 | } 198 | connection.disconnect(); 199 | }catch (Exception e) { 200 | logger.error("--http request error !",e); 201 | } 202 | return result; 203 | } 204 | /** 205 | * @decription URL编码 206 | * @author yi.zhang 207 | * @time 2017年9月15日 下午3:33:38 208 | * @param target 209 | * @return 210 | */ 211 | public static String encode(String target){ 212 | String result = target; 213 | try { 214 | result = URLEncoder.encode(target, Consts.UTF_8.name()); 215 | } catch (UnsupportedEncodingException e) { 216 | logger.error("--http encode error !",e); 217 | } 218 | return result; 219 | } 220 | /** 221 | * @decription URL解码 222 | * @author yi.zhang 223 | * @time 2017年9月15日 下午3:33:38 224 | * @param target 225 | * @return 226 | */ 227 | public static String decode(String target){ 228 | String result = target; 229 | try { 230 | result = URLDecoder.decode(target, Consts.UTF_8.name()); 231 | } catch (UnsupportedEncodingException e) { 232 | logger.error("--http decode error !",e); 233 | } 234 | return result; 235 | } 236 | 237 | public static void main(String[] args) { 238 | String index = "testlog"; 239 | String type = "servicelog"; 240 | String id = ""; 241 | String url = "http://127.0.0.1:9200/"+index+"/"+type; 242 | if(!"".equals(id)){ 243 | url=url+"/"+id; 244 | }else{ 245 | // url=url+"/_search"; 246 | } 247 | String method = "post"; 248 | // String body = "{\"query\":{\"match\":{\"operator\":\"test\"}}}"; 249 | String body = "{\"name\":\"mobile music\",\"operator\":\"10000\",\"content\":\"I like music!\",\"createTime\":\"2017-04-20\"}"; 250 | String result = null; 251 | String auth="elastic:elastic"; 252 | result = checkConnection("http://127.0.0.1:9200",auth)+""; 253 | result = httpRequest(url, method, body,null); 254 | System.out.println(result); 255 | System.out.println("---------------------------------------------------------"); 256 | // result = urlRequest(url, method, param); 257 | System.out.println(result); 258 | } 259 | } 260 | 261 | -------------------------------------------------------------------------------- /src/main/java/com/share/util/NumberUtil.java: -------------------------------------------------------------------------------- 1 | package com.share.util; 2 | 3 | public class NumberUtil { 4 | public static boolean isNumber(String target){ 5 | if(target==null||"".equals(target)||"null".equals(target)){ 6 | return false; 7 | } 8 | for(int i=0;i='0'&&c<='9')){ 11 | return false; 12 | } 13 | } 14 | return true; 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /src/main/java/com/share/util/StringUtil.java: -------------------------------------------------------------------------------- 1 | package com.share.util; 2 | 3 | public class StringUtil { 4 | public static boolean isEmpty(String target){ 5 | if(target==null||"".equals(target)||"null".equals(target)){ 6 | return true; 7 | } 8 | return false; 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /src/main/resources/bin/start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | path="${BASH_SOURCE-$0}" 3 | path="$(dirname "${path}")" 4 | path="$(cd "${path}";pwd)" 5 | base=${path}/.. 6 | base_path="$(cd "${base}";pwd)" 7 | 8 | app_name=database-transform-tool 9 | conf=${base_path}/config/config.properties 10 | log=${base_path}/logs/${app_name}.log 11 | pid=${base_path}/data/${app_name}.pid 12 | if [ -f $pid ] ; then 13 | echo "please run stop.sh first,then start.sh" 2>$2 14 | exit 1; 15 | fi 16 | 17 | if [ -n "${app_name}" ] ; then 18 | kid = `ps -ef |grep ${app_name}|grep -v grep|awk '{print $2}'` 19 | echo pid[$kid] from `uname` system process! 20 | fi 21 | 22 | if [ -n $kid ] ; then 23 | echo [`uname`] ${app_name} process [$kid] is Running! 24 | exit 1; 25 | fi 26 | 27 | if [ -f $log ] ; then 28 | rm -rf ${base_path}/logs/* 29 | rm -rf $pid 30 | fi 31 | 32 | if [ ! -d ${base_path}/logs ] ; then 33 | mkdir -p ${base_path}/logs 34 | fi 35 | 36 | if [ ! -d ${base_path}/data ] ; then 37 | mkdir -p ${base_path}/data 38 | fi 39 | 40 | if [ "$JAVA_HOME" != "" ]; then 41 | JAVA="$JAVA_HOME/bin/java" 42 | else 43 | JAVA=java 44 | fi 45 | JAVA_ENV="-server -Xms2g -Xmx2g -Xss1m " 46 | JAVA_OPTS="$JAVA_ENV -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=75 -XX:+UseCMSInitiatingOccupancyOnly -XX:+AlwaysPreTouch -Djava.awt.headless=true -Dfile.encoding=UTF-8 -Djna.nosys=true -Djdk.io.permissionsUseCanonicalPath=true -Dio.netty.noUnsafe=true -Dio.netty.noKeySetOptimization=true -Dio.netty.recycler.maxCapacityPerThread=0 -Dlog4j.shutdownHookEnabled=false -Dlog4j2.disable.jmx=true -Dlog4j.skipJansi=true -XX:+HeapDumpOnOutOfMemoryError " 47 | 48 | for i in "${base_path}"/lib/*.jar 49 | do 50 | CLASSPATH="$i:$CLASSPATH" 51 | done 52 | 53 | if [ -e $conf -a -d ${base_path}/logs ] 54 | then 55 | echo ------------------------------------------------------------------------------------------- 56 | cd ${base_path} 57 | 58 | for file in "${base_path}"/*.jar 59 | do 60 | file=${file##*/} 61 | filename=${file%.*} 62 | echo -----------------file=${file},filename=${filename}------------------ 63 | if [[ $filename =~ $app_name ]]; then 64 | app=$file 65 | echo app jar:$app 66 | break; 67 | fi 68 | done 69 | 70 | echo ${app_name} Starting ... 71 | $JAVA $JAVA_OPTS -classpath=.:$CLASSPATH -cp $app:"${base_path}"/lib/*.jar com.ucloudlink.css.Application -spring.config.location=$conf -base.path=${base_path} >$log 2>&1 & 72 | echo $! > $pid 73 | 74 | kid = `ps -ef |grep ${app_name}|grep -v grep|awk '{print $2}'` 75 | if [ -n $kid ] ; then 76 | echo ----------------------------${app_name} STARTED SUCCESS------------------------------------ 77 | else 78 | echo ----------------------------${app_name} STARTED ERROR------------------------------------ 79 | fi 80 | echo ------------------------------------------------------------------------------------------- 81 | else 82 | echo "${app_name} config($conf) Or logs direction is not exist,please create first!" 83 | rm -rf $pid 84 | fi -------------------------------------------------------------------------------- /src/main/resources/bin/stop.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | path="${BASH_SOURCE-$0}" 3 | path="$(dirname "${path}")" 4 | path="$(cd "${path}";pwd)" 5 | base=${path}/.. 6 | base_path="$(cd "${base}";pwd)" 7 | 8 | app_name=database-transform-tool 9 | conf=${base_path}/config/application.properties 10 | log=${base_path}/logs/${app_name}.log 11 | pid=${base_path}/data/${app_name}.pid 12 | 13 | if [ -n "${app_name}" ] ; then 14 | kid=`ps -ef |grep ${app_name}|grep -v grep|awk '{print $2}'` 15 | echo pid[$kid] from `uname` system process! 16 | fi 17 | 18 | if [ -z "$kid" -a -e "$pid" ] ; then 19 | chmod +x $pid 20 | kid=`cat $pid` 21 | echo pid[$kid] from pid file! 22 | fi 23 | 24 | if [ -n "${kid}" ]; 25 | then 26 | echo ${app_name} pid:${kid} 27 | kill -9 ${kid} 28 | echo ----------------------------${app_name} STOPED SUCCESS------------------------------------ 29 | else 30 | echo ${app_name} pid isn't exist or has stoped ! 31 | fi 32 | 33 | if [ -f $pid ]; then 34 | rm -rf $pid 35 | echo "If there is a problem, Please check the log!" 36 | fi -------------------------------------------------------------------------------- /src/main/resources/canal.properties: -------------------------------------------------------------------------------- 1 | ######################################################################################### 2 | #################################### Canal Config ################################# 3 | ######################################################################################### 4 | #\u670d\u52a1\u5668\u5b9e\u4f8b\u5217\u8868(\u591a\u5b9e\u4f8b\u4ee5','\u5206\u5272) 5 | canal.destinations=example 6 | canal.servers=127.0.0.1:11111 7 | canal.username=canal 8 | canal.password=canal 9 | canal.batch_size=1000 10 | #\u76d1\u63a7\u8fc7\u6ee4\u89c4\u5219(\u9ed8\u8ba4\u6240\u6709) 11 | canal.filter_regex=.*\\..* 12 | #\u662f\u5426\u4f7f\u7528zookeeper\u5730\u5740\u8bbf\u95ee\u65b9\u5f0f 13 | canal.zookeeper.enabled=false 14 | #\u662f\u5426\u542f\u7528\u6570\u636e\u8f6c\u5316\u65b9\u5f0f 15 | canal.transform.enabled=false 16 | #\u6570\u636e\u8f6c\u5316\u5b58\u50a8\u7c7b\u578b:elasticsearch|cassandra|jdbc(MySQL|SQL Server|Oracle)|greenplum 17 | canal.transform.type=elasticsearch -------------------------------------------------------------------------------- /src/main/resources/config.properties: -------------------------------------------------------------------------------- 1 | ######################################################################################### 2 | ########## Transform Config(\u6570\u636e\u8f6c\u5b58)-->transform_enabled=true ######################### 3 | ######################################################################################### 4 | #1.Elasticsearch\u914d\u7f6e[transform_type=elasticsearch](\u652f\u6301\u96c6\u7fa4\u914d\u7f6e,\u591a\u670d\u52a1\u4ee5','\u5206\u5272) 5 | elasticsearch.cluster.name=elasticsearch 6 | elasticsearch.cluster.servers=localhost:9300 7 | elasticsearch.cluster.username=elastic 8 | elasticsearch.cluster.password=elastic 9 | #2.Cassandra\u914d\u7f6e[transform_type=cassandra](\u652f\u6301\u96c6\u7fa4\u914d\u7f6e,\u591a\u670d\u52a1\u4ee5','\u5206\u5272) 10 | cassandra.servers=localhost:9402 11 | cassandra.keyspace=css_analyse 12 | cassandra.username=cassandra 13 | cassandra.password=cassandra 14 | #3.MongoDB\u914d\u7f6e[transform_type=mongodb](\u652f\u6301\u96c6\u7fa4\u914d\u7f6e,\u591a\u670d\u52a1\u4ee5','\u5206\u5272) 15 | mongodb.servers=localhost:27017 16 | mongodb.database=admin 17 | mongodb.schema=cdr 18 | mongodb.username=root 19 | mongodb.password=root 20 | #4.Greenplum\u914d\u7f6e[transform_type=greenplum](\u6570\u636e\u4ed3\u5e93) 21 | greenplum.address=localhost:5432 22 | greenplum.database=bitest 23 | greenplum.schema=cdr 24 | greenplum.username=bitest 25 | greenplum.password=123456 26 | #5.\u6570\u636e\u5e93\u914d\u7f6e[transform_type=jdbc](\u652f\u6301[MySQL|SQL Server|Oracle]\u7b49\u6570\u636e\u5e93) 27 | jdbc.driver=com.mysql.jdbc.Driver 28 | jdbc.url=jdbc:mysql://localhost:3306/cdr?useUnicode=true&characterEncoding=UTF8 29 | jdbc.username=root 30 | jdbc.password=root 31 | jdbc.druid.enabled=true 32 | jdbc.druid.max_pool_size=100 33 | jdbc.druid.init_pool_size=10 34 | ######################################################################################### 35 | #(\u652f\u6301\u96c6\u7fa4\u914d\u7f6e,\u591a\u670d\u52a1\u4ee5','\u5206\u5272) 36 | kafka.servers=127.0.0.1:9092 37 | #\u662f\u5426\u4f7f\u7528zookeeper\u5730\u5740\u8bbf\u95ee\u65b9\u5f0f 38 | kafka.zookeeper.enabled=false 39 | kafka.zookeeper.servers=127.0.0.1:2181 40 | #\u751f\u4ea7\u7aef\u6301\u4e45\u8bbe\u7f6e 41 | kafka.productor.acks=all 42 | #\u6d88\u8d39\u7aef\u6279\u91cf\u83b7\u53d6\u6570\u91cf 43 | kafka.consumer.batch_size=1000 -------------------------------------------------------------------------------- /src/main/resources/log4j2.properties: -------------------------------------------------------------------------------- 1 | appenders = console,rolling 2 | 3 | appender.console.type = Console 4 | appender.console.name = System 5 | appender.console.target = SYSTEM_OUT 6 | appender.console.layout.type = PatternLayout 7 | appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss}|%-5p|[%c{1}(%L)-->%M] -- %m%n 8 | 9 | rootLogger.level = info 10 | rootLogger.appenderRefs= console 11 | rootLogger.appenderRef.console.ref = System 12 | 13 | appender.rolling.type = RollingFile 14 | appender.rolling.name = LogFile 15 | appender.rolling.append = false 16 | appender.rolling.layout.type = PatternLayout 17 | appender.rolling.layout.pattern = %d{yyyy-MM-dd HH:mm:ss}|%-5p|[%c{1}(%L)-->%M] -- %m%n 18 | appender.rolling.fileName = target/logs/sys.log 19 | appender.rolling.filePattern = target/logs/log_[%d{yyyy-MM-dd-HH-mm}]-%i.log.gz 20 | appender.rolling.policies.type = Policies 21 | appender.rolling.policies.time.type = TimeBasedTriggeringPolicy 22 | appender.rolling.policies.time.interval = 24 23 | appender.rolling.policies.time.modulate = true 24 | appender.rolling.policies.size.type = SizeBasedTriggeringPolicy 25 | appender.rolling.policies.size.size = 10MB 26 | appender.rolling.strategy.type = DefaultRolloverStrategy 27 | appender.rolling.strategy.max = 5 28 | 29 | loggers=rolling,other 30 | logger.rolling.name=com.ucloudlink 31 | logger.rolling.level=info 32 | logger.rolling.additivity=true 33 | logger.rolling.appenderRefs= rolling 34 | logger.rolling.appenderRef.rolling.ref = LogFile 35 | logger.other.name=org 36 | logger.other.level=warn 37 | logger.other.additivity=true 38 | logger.other.appenderRefs= other 39 | logger.other.appenderRef.other.ref = LogFile -------------------------------------------------------------------------------- /src/main/resources/messages_en.properties: -------------------------------------------------------------------------------- 1 | system.language.zh_CN=Chinese 2 | system.language.en_US=English 3 | system.name=DataSource Transform Tool 4 | system.version=v1.0 5 | system.decription=Software supports DataSource:\n1.Canal Monitor(MySQL) Syn\n2. Data Syn Transform: \n1)SQL DataSource (MySQL|SQL Server|Oracle) \n2)NoSQL DataSource (Cassandra|MongoDB) \n3)Search Engine (Elasticsearch) \n4)Message Queue (Kafka) etc. 6 | ################################################################################################################### 7 | dstt.btn.continue=Continue 8 | dstt.btn.prev=Prev 9 | dstt.btn.next=Next 10 | dstt.btn.back=Back 11 | dstt.btn.open=Open 12 | dstt.btn.close=Close 13 | dstt.btn.enabled=Enabled 14 | dstt.btn.disabled=Disabled 15 | dstt.btn.submit=Submit 16 | dstt.btn.add=Add 17 | dstt.btn.update=Update 18 | dstt.btn.cancel=Cancel 19 | dstt.btn.remove=Remove 20 | dstt.btn.refresh=Refresh 21 | dstt.btn.syn=Syn 22 | dstt.btn.synall=Syn All 23 | 24 | dstt.service.canal=Canal Monitor 25 | dstt.service.datasyn=Data Syn 26 | dstt.service.datalog=Data Log 27 | 28 | dstt.ds.source=Source 29 | dstt.ds.target=Target 30 | 31 | dstt.ds.seq=No. 32 | dstt.ds.saddress=Source Adress 33 | dstt.ds.stype=Source Type 34 | dstt.ds.taddress=Target Address 35 | dstt.ds.ttype=Target Type 36 | dstt.ds.action=Action 37 | 38 | dstt.ds.type=Service Type 39 | dstt.ds.type.0=Canal Service 40 | dstt.ds.type.0.1=Zookeeper Service 41 | dstt.ds.type.1=Elasticsearch Service 42 | dstt.ds.type.2=NoSQL Service 43 | dstt.ds.type.2.1=Cassandra 44 | dstt.ds.type.2.2=MongoDB 45 | dstt.ds.type.2.3=Redis 46 | dstt.ds.type.2.4=Memecached 47 | dstt.ds.type.3=SQL Service 48 | dstt.ds.type.3.1=MySQL 49 | dstt.ds.type.3.2=SQL\u3000Server 50 | dstt.ds.type.3.3=Oracle 51 | dstt.ds.type.4=Data Store 52 | dstt.ds.type.4.1=Greenplum 53 | dstt.ds.type.5=Message Queue 54 | dstt.ds.type.5.1=Kafka 55 | 56 | dstt.ds.servers=Servers 57 | dstt.ds.servers.tip=(Support cluster and multi service use ',' split) 58 | dstt.ds.port=Port 59 | dstt.ds.username=Username 60 | dstt.ds.password=Password 61 | dstt.ds.database=Database 62 | dstt.ds.schema=Schema 63 | dstt.ds.keyspace=Keyspace 64 | dstt.ds.mapping=Table Mapping 65 | dstt.ds.filter=Filter Columns 66 | dstt.ds.filter.tip=(Multi columns use ',' split) -------------------------------------------------------------------------------- /src/main/resources/messages_zh_CN.properties: -------------------------------------------------------------------------------- 1 | system.language.zh_CN=\u4e2d\u6587 2 | system.language.en_US=\u82f1\u6587 3 | system.name=\u6570\u636e\u6e90\u8f6c\u6362\u5de5\u5177 4 | system.version=v1.0 5 | system.decription=\u8f6f\u4ef6\u652f\u6301\u6570\u636e\u8d44\u6e90:\n1.Canal\u670d\u52a1\u76d1\u63a7(MySQL)\n2.\u6570\u636e\u540c\u6b65\u8f6c\u6362:\n1)SQL\u6570\u636e\u6e90(MySQL|SQL Server|Oracle\u7b49)\n2)NoSQL\u6570\u636e\u6e90(Cassandra|MongoDB\u7b49)\n3)\u641c\u7d22\u5f15\u64ce(Elasticsearch)\n4)\u6d88\u606f\u961f\u5217(Kafka)\u7b49 6 | ################################################################################################################### 7 | dstt.btn.continue=\u7ee7\u7eed 8 | dstt.btn.prev=\u4e0a\u4e00\u6b65 9 | dstt.btn.next=\u4e0b\u4e00\u6b65 10 | dstt.btn.back=\u8fd4\u56de 11 | dstt.btn.open=\u5f00\u542f 12 | dstt.btn.close=\u5173\u95ed 13 | dstt.btn.enabled=\u542f\u7528 14 | dstt.btn.disabled=\u7981\u7528 15 | dstt.btn.submit=\u63d0\u4ea4 16 | dstt.btn.add=\u589e\u52a0 17 | dstt.btn.update=\u4fee\u6539 18 | dstt.btn.cancel=\u53d6\u6d88 19 | dstt.btn.remove=\u79fb\u9664 20 | dstt.btn.refresh=\u5237\u65b0 21 | dstt.btn.syn=\u540c\u6b65 22 | dstt.btn.synall=\u540c\u6b65\u5168\u90e8 23 | 24 | dstt.service.canal=Canal\u76d1\u63a7 25 | dstt.service.datasyn=\u6570\u636e\u540c\u6b65 26 | dstt.service.datalog=\u65e5\u5fd7 27 | 28 | dstt.ds.source=\u6570\u636e\u6e90 29 | dstt.ds.target=\u76ee\u6807\u6e90 30 | 31 | dstt.ds.seq=\u5e8f\u53f7 32 | dstt.ds.saddress=\u6570\u636e\u5730\u5740 33 | dstt.ds.stype=\u6570\u636e\u7c7b\u578b 34 | dstt.ds.taddress=\u76ee\u6807\u5730\u5740 35 | dstt.ds.ttype=\u76ee\u6807\u7c7b\u578b 36 | dstt.ds.action=\u64cd\u4f5c 37 | 38 | dstt.ds.type=\u670d\u52a1\u7c7b\u578b 39 | dstt.ds.type.0=Canal\u670d\u52a1 40 | dstt.ds.type.0.1=Zookeeper\u670d\u52a1 41 | dstt.ds.type.1=Elasticsearch\u670d\u52a1 42 | dstt.ds.type.2=NoSQL\u670d\u52a1 43 | dstt.ds.type.2.1=Cassandra 44 | dstt.ds.type.2.2=MongoDB 45 | dstt.ds.type.2.3=Redis 46 | dstt.ds.type.2.4=Memecached 47 | dstt.ds.type.3=SQL\u670d\u52a1 48 | dstt.ds.type.3.1=MySQL 49 | dstt.ds.type.3.2=SQL\u3000Server 50 | dstt.ds.type.3.3=Oracle 51 | dstt.ds.type.4=\u6570\u636e\u4ed3\u5e93 52 | dstt.ds.type.4.1=Greenplum 53 | dstt.ds.type.5=\u6d88\u606f\u961f\u5217 54 | dstt.ds.type.5.1=Kafka 55 | 56 | dstt.ds.servers=\u670d\u52a1\u5730\u5740 57 | dstt.ds.servers.tip=(\u652f\u6301\u96c6\u7fa4\u914d\u7f6e\u5219\u591a\u670d\u52a1\u4ee5','\u5206\u5272) 58 | dstt.ds.port=\u670d\u52a1\u7aef\u53e3 59 | dstt.ds.username=\u7528 \u6237 \u540d 60 | dstt.ds.password=\u5bc6 \u7801 61 | dstt.ds.database=\u6570 \u636e \u5e93 62 | dstt.ds.schema=Schema 63 | dstt.ds.keyspace=Keyspace 64 | dstt.ds.mapping=\u5e93\u8868\u6620\u5c04 65 | dstt.ds.filter=\u8868\u5b57\u6bb5\u8fc7\u6ee4 66 | dstt.ds.filter.tip=(\u591a\u5b57\u6bb5\u4ee5','\u5206\u5272) --------------------------------------------------------------------------------