├── docs ├── 11-monitoring.md ├── 10-junit-auto-init.md ├── 08-dynamic-sql.md ├── 04-pagnation.md ├── 06-inserting-map-pojo.md ├── 00-start.md ├── 01-dependency.md ├── 07-transaction.md ├── 09-spring-boot-autoconfig.md ├── 03-querying.md ├── 02-datasources.md └── 05-updating.md ├── hydrogen-dao ├── src │ ├── test │ │ ├── resources │ │ │ ├── scripts │ │ │ │ ├── before.sql │ │ │ │ ├── after.sql │ │ │ │ ├── blog.csv │ │ │ │ └── tables.sql │ │ │ ├── application.properties │ │ │ ├── application-h2.properties │ │ │ └── logback.xml │ │ └── java │ │ │ ├── com │ │ │ └── hyd │ │ │ │ ├── dao │ │ │ │ ├── src │ │ │ │ │ └── models │ │ │ │ │ │ ├── BlogRecord.java │ │ │ │ │ │ └── Blog.java │ │ │ │ ├── command │ │ │ │ │ └── builder │ │ │ │ │ │ └── helper │ │ │ │ │ │ ├── CommandBuilderHelperTest.java │ │ │ │ │ │ └── ColumnInfoHelperTest.java │ │ │ │ ├── mate │ │ │ │ │ └── util │ │ │ │ │ │ ├── ClsTest.java │ │ │ │ │ │ ├── HashMapTest.java │ │ │ │ │ │ ├── ScriptExecutorTest.java │ │ │ │ │ │ ├── BatchPipelineTest.java │ │ │ │ │ │ ├── BatchTest.java │ │ │ │ │ │ ├── BeanUtilTest.java │ │ │ │ │ │ ├── LockerTest.java │ │ │ │ │ │ ├── CaseInsensitiveHashMapTest.java │ │ │ │ │ │ └── StrTest.java │ │ │ │ ├── database │ │ │ │ │ └── type │ │ │ │ │ │ └── CamelUnderscoreNameConverterTest.java │ │ │ │ ├── PageTest.java │ │ │ │ ├── dialects │ │ │ │ │ └── h2 │ │ │ │ │ │ ├── Payment.java │ │ │ │ │ │ └── H2FileDBTest.java │ │ │ │ ├── DAOUtils.java │ │ │ │ └── SQLTest.java │ │ │ │ └── daotests │ │ │ │ ├── dialects │ │ │ │ ├── H2Test.java │ │ │ │ └── MySQLTest.java │ │ │ │ ├── DataSourceFactories.java │ │ │ │ ├── benchmark │ │ │ │ ├── RunH2Server.java │ │ │ │ └── InsertBenchmark.java │ │ │ │ ├── springboot │ │ │ │ ├── SpringBootTestApp.java │ │ │ │ └── MultiDataSourceConf.java │ │ │ │ ├── InMemoryTestBase.java │ │ │ │ ├── JUnitRuleTestBase.java │ │ │ │ └── AbstractDaoTest.java │ │ │ └── jdbc │ │ │ └── InsertAffectedRowsTest.java │ └── main │ │ └── java │ │ └── com │ │ └── hyd │ │ └── dao │ │ ├── database │ │ ├── executor │ │ │ ├── ExecuteMode.java │ │ │ ├── ExecutionContext.java │ │ │ └── Executor.java │ │ ├── dialects │ │ │ ├── DefaultDialect.java │ │ │ ├── impl │ │ │ │ ├── MsSqlServerDialect.java │ │ │ │ ├── H2Dialect.java │ │ │ │ ├── OracleDialect.java │ │ │ │ └── MySqlDialect.java │ │ │ ├── Dialects.java │ │ │ └── Dialect.java │ │ ├── ExecutorFactory.java │ │ ├── type │ │ │ ├── ClobUtil.java │ │ │ ├── BlobReader.java │ │ │ └── NameConverter.java │ │ ├── ConnectionHolder.java │ │ ├── FQN.java │ │ ├── JDBCDriver.java │ │ ├── ColumnInfo.java │ │ ├── function │ │ │ └── FunctionHelper.java │ │ ├── NonPooledDataSource.java │ │ ├── RowIterator.java │ │ └── ConnectionContext.java │ │ ├── Table.java │ │ ├── exception │ │ ├── LogException.java │ │ ├── NoPrimaryKeyException.java │ │ └── DataConversionException.java │ │ ├── command │ │ ├── builder │ │ │ ├── CommandBuilder.java │ │ │ ├── UpdateBuilder.java │ │ │ ├── QueryBuilder.java │ │ │ ├── DeleteBuilder.java │ │ │ ├── helper │ │ │ │ ├── ColumnInfoHelper.java │ │ │ │ └── CommandBuilderHelper.java │ │ │ └── InsertBuilder.java │ │ ├── IteratorBatchCommand.java │ │ ├── Command.java │ │ ├── BatchCommand.java │ │ └── MappedCommand.java │ │ ├── mate │ │ └── util │ │ │ ├── Arr.java │ │ │ ├── BeanException.java │ │ │ ├── MapCacheUtils.java │ │ │ ├── Cls.java │ │ │ ├── Closer.java │ │ │ ├── Locker.java │ │ │ ├── BatchPipeline.java │ │ │ ├── Batch.java │ │ │ ├── CSVReader.java │ │ │ ├── CaseInsensitiveHashMap.java │ │ │ ├── DBCPDataSource.java │ │ │ ├── TypeUtil.java │ │ │ ├── ScriptExecutor.java │ │ │ └── ResultSetUtil.java │ │ ├── TransactionException.java │ │ ├── spring │ │ ├── SpringConnectionFactory.java │ │ └── SpringAutoConfiguration.java │ │ ├── sp │ │ ├── SpParamType.java │ │ ├── SpParam.java │ │ └── StorageProcedureHelper.java │ │ ├── time │ │ └── TimeFormatters.java │ │ ├── snapshot │ │ ├── Snapshot.java │ │ └── ExecutorInfo.java │ │ ├── Page.java │ │ ├── DAOException.java │ │ ├── repository │ │ └── Repository.java │ │ ├── junit │ │ └── HydrogenDAORule.java │ │ ├── DataSources.java │ │ └── Row.java └── local-lib │ ├── ojdbc6.jar │ ├── sqljdbc.jar │ ├── sqljdbc4.jar │ └── sqljdbc41.jar ├── deploy.cmd.txt ├── .gitignore ├── hydrogen-mate-config.xml └── .editorconfig /docs/11-monitoring.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/10-junit-auto-init.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/resources/scripts/before.sql: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/resources/scripts/after.sql: -------------------------------------------------------------------------------- 1 | drop table blog; 2 | drop table blog2; 3 | -------------------------------------------------------------------------------- /deploy.cmd.txt: -------------------------------------------------------------------------------- 1 | mvn -P maven-central-for-deploy "-Dmaven.test.skip=true" "-DPASS=!" clean deploy 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | .idea 3 | *.iml 4 | nb-configuration.xml 5 | pom-private.xml 6 | hydrogen-generator-profiles.json 7 | -------------------------------------------------------------------------------- /hydrogen-dao/local-lib/ojdbc6.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yiding-he/hydrogen-dao/HEAD/hydrogen-dao/local-lib/ojdbc6.jar -------------------------------------------------------------------------------- /hydrogen-dao/local-lib/sqljdbc.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yiding-he/hydrogen-dao/HEAD/hydrogen-dao/local-lib/sqljdbc.jar -------------------------------------------------------------------------------- /hydrogen-dao/local-lib/sqljdbc4.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yiding-he/hydrogen-dao/HEAD/hydrogen-dao/local-lib/sqljdbc4.jar -------------------------------------------------------------------------------- /hydrogen-dao/local-lib/sqljdbc41.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yiding-he/hydrogen-dao/HEAD/hydrogen-dao/local-lib/sqljdbc41.jar -------------------------------------------------------------------------------- /hydrogen-dao/src/test/resources/application.properties: -------------------------------------------------------------------------------- 1 | spring.datasource.url=jdbc:h2:./target/db/default 2 | spring.datasource.username=sa 3 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/database/executor/ExecuteMode.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.database.executor; 2 | 3 | public enum ExecuteMode { 4 | Batch, Streaming 5 | } 6 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/resources/application-h2.properties: -------------------------------------------------------------------------------- 1 | spring.datasource.url=jdbc:h2:./target/db/default 2 | spring.datasource.username=sa 3 | 4 | logging.level.com.hyd.dao=debug 5 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/dao/src/models/BlogRecord.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.src.models; 2 | 3 | import java.util.Date; 4 | 5 | public record BlogRecord( 6 | Long id, String title, String content, Date createTime 7 | ) { 8 | 9 | } 10 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/database/executor/ExecutionContext.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.database.executor; 2 | 3 | import lombok.Data; 4 | 5 | @Data 6 | public class ExecutionContext { 7 | 8 | private ExecuteMode executeMode = ExecuteMode.Batch; 9 | } 10 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/dao/command/builder/helper/CommandBuilderHelperTest.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.command.builder.helper; 2 | 3 | import org.junit.jupiter.api.Test; 4 | 5 | public class CommandBuilderHelperTest { 6 | 7 | @Test 8 | public void getColumnInfos() { 9 | 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/resources/scripts/blog.csv: -------------------------------------------------------------------------------- 1 | id,title,content,create_time,last_update,hidden 2 | 1,blog1,content of blog1,2018-01-07 11:23:45,2018-01-07 11:23:45,false 3 | 2,blog2,content of blog2,2018-01-07 11:23:45,2018-01-07 11:23:45,false 4 | 3,blog3,content of blog3,2018-01-07 11:23:45,2018-01-07 11:23:45,true -------------------------------------------------------------------------------- /hydrogen-mate-config.xml: -------------------------------------------------------------------------------- 1 | 2 | E:\Projects\my-open-source\hydrogen-dao\spring-boot-demo\src\main\java 3 | com.demo.pojo 4 | E:\Projects\my-open-source\hydrogen-dao\spring-boot-starter\src\test\resources\application.properties 5 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/Table.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao; 2 | 3 | import java.lang.annotation.Retention; 4 | import java.lang.annotation.RetentionPolicy; 5 | 6 | /** 7 | * 标识类对应的表明 8 | * 9 | * @author yiding.he 10 | */ 11 | @Retention(RetentionPolicy.RUNTIME) 12 | public @interface Table { 13 | 14 | String name(); 15 | } 16 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/exception/LogException.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.exception; 2 | 3 | public class LogException extends RuntimeException { 4 | 5 | public LogException(String message) { 6 | super(message); 7 | } 8 | 9 | public LogException(Throwable cause) { 10 | super(cause); 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/command/builder/CommandBuilder.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.command.builder; 2 | 3 | import com.hyd.dao.database.ConnectionContext; 4 | 5 | public abstract class CommandBuilder { 6 | 7 | protected final ConnectionContext context; 8 | 9 | public CommandBuilder(ConnectionContext context) { 10 | this.context = context; 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/daotests/dialects/H2Test.java: -------------------------------------------------------------------------------- 1 | package com.hyd.daotests.dialects; 2 | 3 | import com.hyd.dao.mate.util.DBCPDataSource; 4 | import com.hyd.daotests.AbstractDaoTest; 5 | 6 | import javax.sql.DataSource; 7 | 8 | public class H2Test extends AbstractDaoTest { 9 | 10 | @Override 11 | protected DataSource createDataSource() { 12 | return DBCPDataSource.newH2MemDataSource(); 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/daotests/DataSourceFactories.java: -------------------------------------------------------------------------------- 1 | package com.hyd.daotests; 2 | 3 | import javax.sql.DataSource; 4 | 5 | import static com.hyd.dao.mate.util.DBCPDataSource.newMySqlDataSource; 6 | 7 | public class DataSourceFactories { 8 | 9 | public static DataSource mysqlDataSource() { 10 | return newMySqlDataSource( 11 | "localhost", 3306, "demo", "root", "root123", true, "utf8" 12 | ); 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/daotests/benchmark/RunH2Server.java: -------------------------------------------------------------------------------- 1 | package com.hyd.daotests.benchmark; 2 | 3 | import org.h2.tools.Server; 4 | 5 | import java.sql.SQLException; 6 | 7 | public class RunH2Server { 8 | 9 | public static void main(String[] args) throws SQLException { 10 | Server server = Server.createTcpServer(args); 11 | System.out.println("server port: " + server.getPort()); 12 | server.start(); 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/dao/mate/util/ClsTest.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.mate.util; 2 | 3 | import com.hyd.dao.src.models.Blog; 4 | import org.junit.jupiter.api.Test; 5 | 6 | import static org.junit.jupiter.api.Assertions.assertTrue; 7 | 8 | public class ClsTest { 9 | 10 | @Test 11 | public void testHasField() throws Exception { 12 | assertTrue(Cls.hasField(Blog.class, "id")); 13 | assertTrue(Cls.hasField(Blog.class, "title")); 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/dao/mate/util/HashMapTest.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.mate.util; 2 | 3 | import org.junit.jupiter.api.Test; 4 | 5 | import java.util.HashMap; 6 | 7 | import static org.junit.jupiter.api.Assertions.assertFalse; 8 | 9 | public class HashMapTest { 10 | 11 | @Test 12 | public void removeNull() throws Exception { 13 | HashMap map = new HashMap<>(); 14 | map.put("1", "2"); 15 | map.remove(null); 16 | assertFalse(map.isEmpty()); 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /docs/08-dynamic-sql.md: -------------------------------------------------------------------------------- 1 | hydrogen-dao 提供 `com.hyd.dao.SQL` 类用于组建简单的动态 SQL 语句。下面是一个例子: 2 | 3 | ```java 4 | String name = null; 5 | String maxAge = 50; 6 | 7 | dao.execute(SQL 8 | .Update("user_table") 9 | .Set("last_update", new Date()) 10 | .Where(name != null, "name=?", name) 11 | .And("age <= ?", maxAge)); 12 | ``` 13 | 14 | 上面的例子中,因为 name 值为空,所以最后生成的语句就是 15 | 16 | ```sql 17 | update user_table set last_update = ? where age <= ? 18 | ``` 19 | 20 | 而不会包含 name 条件。 21 | 22 | `SQL` 类的具体使用方法,请参考单元测试。 -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/mate/util/Arr.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.mate.util; 2 | 3 | /** 4 | * @author yidin 5 | */ 6 | public class Arr { 7 | 8 | @SuppressWarnings("unchecked") 9 | public static T[] subarray(T[] arr, int start, int end) { 10 | if (start < 0 || end < 0 || start >= end) { 11 | return (T[]) new Object[0]; 12 | } 13 | 14 | var result = new Object[end - start]; 15 | System.arraycopy(arr, start, result, 0, end - start); 16 | return (T[]) result; 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/TransactionException.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao; 2 | 3 | /** 4 | * 与事务处理失败有关的异常 5 | */ 6 | public class TransactionException extends DAOException { 7 | 8 | public TransactionException() { 9 | } 10 | 11 | public TransactionException(String message) { 12 | super(message); 13 | } 14 | 15 | public TransactionException(Throwable cause) { 16 | super(cause); 17 | } 18 | 19 | public TransactionException(String message, Throwable cause) { 20 | super(message, cause); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/daotests/dialects/MySQLTest.java: -------------------------------------------------------------------------------- 1 | package com.hyd.daotests.dialects; 2 | 3 | import com.hyd.daotests.AbstractDaoTest; 4 | import com.hyd.daotests.DataSourceFactories; 5 | import org.apache.commons.dbcp2.BasicDataSource; 6 | 7 | import javax.sql.DataSource; 8 | 9 | public class MySQLTest extends AbstractDaoTest { 10 | 11 | @Override 12 | protected DataSource createDataSource() { 13 | DataSource dataSource = DataSourceFactories.mysqlDataSource(); 14 | ((BasicDataSource)dataSource).setMaxTotal(3); 15 | return dataSource; 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/database/dialects/DefaultDialect.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.database.dialects; 2 | 3 | import java.sql.Connection; 4 | import java.util.function.Predicate; 5 | 6 | public class DefaultDialect implements Dialect { 7 | 8 | public static final DefaultDialect VALUE = new DefaultDialect(); 9 | 10 | @Override 11 | public Predicate getMatcher() { 12 | return c -> true; 13 | } 14 | 15 | @Override 16 | public String wrapRangeQuery(String sql, int startPos, int endPos) { 17 | throw new UnsupportedOperationException(); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/exception/NoPrimaryKeyException.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.exception; 2 | 3 | import com.hyd.dao.DAOException; 4 | 5 | /** 6 | * 表示没有找到主键的异常 7 | */ 8 | public class NoPrimaryKeyException extends DAOException { 9 | 10 | public NoPrimaryKeyException() { 11 | } 12 | 13 | public NoPrimaryKeyException(String message) { 14 | super(message); 15 | } 16 | 17 | public NoPrimaryKeyException(Throwable cause) { 18 | super(cause); 19 | } 20 | 21 | public NoPrimaryKeyException(String message, Throwable cause) { 22 | super(message, cause); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/dao/mate/util/ScriptExecutorTest.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.mate.util; 2 | 3 | import com.hyd.dao.DAO; 4 | import com.hyd.dao.DAOUtils; 5 | import org.junit.jupiter.api.Test; 6 | 7 | /** 8 | * (description) 9 | * created at 2018/5/22 10 | * 11 | * @author yidin 12 | */ 13 | public class ScriptExecutorTest { 14 | 15 | static { 16 | DAOUtils.setupLocalMySQL(); 17 | } 18 | 19 | @Test 20 | public void testExecute() throws Exception { 21 | DAO dao = DAOUtils.getDAO(); 22 | ScriptExecutor.execute("classpath:/junit-test-scripts/init.sql", dao); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/mate/util/BeanException.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.mate.util; 2 | 3 | import com.hyd.dao.DAOException; 4 | 5 | /** 6 | * (description) 7 | * created at 2017/11/9 8 | * 9 | * @author yidin 10 | */ 11 | public class BeanException extends DAOException { 12 | 13 | public BeanException() { 14 | } 15 | 16 | public BeanException(String message) { 17 | super(message); 18 | } 19 | 20 | public BeanException(Throwable cause) { 21 | super(cause); 22 | } 23 | 24 | public BeanException(String message, Throwable cause) { 25 | super(message, cause); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/exception/DataConversionException.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.exception; 2 | 3 | import com.hyd.dao.DAOException; 4 | 5 | /** 6 | * @author yiding.he 7 | */ 8 | public class DataConversionException extends DAOException { 9 | 10 | public DataConversionException() { 11 | } 12 | 13 | public DataConversionException(String message) { 14 | super(message); 15 | } 16 | 17 | public DataConversionException(Throwable cause) { 18 | super(cause); 19 | } 20 | 21 | public DataConversionException(String message, Throwable cause) { 22 | super(message, cause); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /docs/04-pagnation.md: -------------------------------------------------------------------------------- 1 | `DAO` 对象提供 `queryPage()` 方法来执行分页查询,该方法返回一个 `Page` 对象。`Page` 类是 List 的子类,它多了一个 `total` 属性,表示查询结果的总记录数。 2 | 3 | 在执行 `queryPage()` 方法时,`DAO` 实际执行了两次查询,一次是查询总记录数,一次是查询指定范围的记录列表。 4 | 5 | ```java 6 | // 分页查询 7 | String sql = "select * from users where name like ?"; 8 | int pageSize = 10; // 页大小 9 | int pageIndex = 2; // 页号,0 表示第一页 10 | 11 | // 分页查询。为了获取总记录数,实际上查询了两次 12 | Page page = dao.queryPage( 13 | User.class, sql, pageSize, pageIndex, "Adm%"); 14 | 15 | System.out.println("Total count: " + page.getTotal()); 16 | 17 | for (User user: page) { 18 | System.out.println(user); 19 | } 20 | ``` 21 | 22 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/dao/database/type/CamelUnderscoreNameConverterTest.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.database.type; 2 | 3 | import org.junit.jupiter.api.Test; 4 | 5 | import static com.hyd.dao.database.type.NameConverter.CAMEL_UNDERSCORE; 6 | import static org.junit.jupiter.api.Assertions.assertEquals; 7 | 8 | public class CamelUnderscoreNameConverterTest { 9 | 10 | @Test 11 | public void testColumnToField() throws Exception { 12 | NameConverter converter = CAMEL_UNDERSCORE; 13 | assertEquals("id", converter.column2Field("ID")); 14 | assertEquals("userName", converter.column2Field("USER_NAME")); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /docs/06-inserting-map-pojo.md: -------------------------------------------------------------------------------- 1 | `DAO.insert()` 方法可以用于将 Pojo/Map 对象插入到数据库表。下面是一个例子: 2 | 3 | ```java 4 | User user = new User(); 5 | user.setId(100); 6 | user.setName("user1"); 7 | dao.insert(user, "T_USERS"); 8 | ``` 9 | 10 | Pojo 属性名和表字段名之间的转换规则,参见[本页](03-querying.md)。 11 | 12 | 如果要插入 Map 对象,其 key 的值必须和字段名一致,不区分大小写(大多数数据库是这样)。下面是一个例子: 13 | 14 | ```java 15 | Map map= new HashMap<>(); 16 | map.put("user_id", 1); 17 | map.put("user_name", "someone"); 18 | dao.insert(map, "T_SYS_USER"); 19 | ``` 20 | 21 | > 注意:当使用这种方式插入记录时,hydrogen-dao 会从数据库查询要插入的表包含哪些字段,用于生成 insert 语句。如果出现数据库兼容性问题以致执行出错,那么建议换用 `dao.execute()` 方法,直接执行 insert 语句。 22 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/database/ExecutorFactory.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.database; 2 | 3 | import com.hyd.dao.DAO; 4 | import com.hyd.dao.database.executor.DefaultExecutor; 5 | import com.hyd.dao.database.executor.Executor; 6 | import com.hyd.dao.transaction.TransactionManager; 7 | 8 | /** 9 | * 构造 Executor 对象的工厂。 10 | * 11 | * @author yiding_he 12 | */ 13 | public class ExecutorFactory { 14 | 15 | public static Executor getExecutor(DAO dao) { 16 | final var connectionContext = TransactionManager.getConnectionContext(dao); 17 | return new DefaultExecutor(connectionContext); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/spring/SpringConnectionFactory.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.spring; 2 | 3 | import org.springframework.jdbc.datasource.DataSourceUtils; 4 | import org.springframework.transaction.support.TransactionSynchronizationManager; 5 | 6 | import javax.sql.DataSource; 7 | import java.sql.Connection; 8 | 9 | public class SpringConnectionFactory { 10 | 11 | public static Connection getConnection(DataSource dataSource) { 12 | return DataSourceUtils.getConnection(dataSource); 13 | } 14 | 15 | public static boolean isTransactionActive() { 16 | return TransactionSynchronizationManager.isSynchronizationActive(); 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/dao/mate/util/BatchPipelineTest.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.mate.util; 2 | 3 | import org.junit.jupiter.api.Test; 4 | 5 | import java.util.stream.IntStream; 6 | 7 | public class BatchPipelineTest { 8 | 9 | @Test 10 | public void testBatch() throws Exception { 11 | 12 | BatchPipeline bp = new BatchPipeline() 13 | .setBatchSize(8) 14 | .setBatchOperation(System.out::println); 15 | 16 | bp.feed(1, 2, 3, 4, 5, 5, 6, 7, 8, 8, 6, 4, 4, 56, 6, 7, 7, 4); 17 | bp.feed(IntStream.of(6, 5, 4, 5, 6, 7, 8, 89, 9, 90, 8, 7, 6, 5, 4, 3, 22)); 18 | bp.flush(); 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/dao/mate/util/BatchTest.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.mate.util; 2 | 3 | import org.junit.jupiter.api.Test; 4 | 5 | import java.util.Collection; 6 | import java.util.stream.IntStream; 7 | 8 | import static org.junit.jupiter.api.Assertions.assertEquals; 9 | 10 | public class BatchTest { 11 | 12 | @Test 13 | public void testBatch() throws Exception { 14 | Batch 15 | .with(IntStream.of(1, 2, 3, 4, 5)) 16 | .size(3) 17 | .forEachBatch(System.out::println); 18 | 19 | int count = Batch 20 | .with(IntStream.of(1, 2, 3, 4, 5)) 21 | .size(3).sumEachBatch(Collection::size).getResultCount(); 22 | assertEquals(5, count); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/dao/PageTest.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao; 2 | 3 | import com.alibaba.fastjson2.JSON; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import org.junit.jupiter.api.Test; 6 | 7 | import java.util.Arrays; 8 | 9 | public class PageTest { 10 | 11 | @Test 12 | public void testJackson() throws Exception { 13 | Page page = new Page<>(); 14 | page.setTotal(100); 15 | page.setPageIndex(5); 16 | page.setPageSize(10); 17 | page.addAll(Arrays.asList("1", "2", "3")); 18 | 19 | ObjectMapper objectMapper = new ObjectMapper(); 20 | System.out.println(objectMapper.writeValueAsString(page)); 21 | 22 | System.out.println(JSON.toJSONString(page)); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/resources/scripts/tables.sql: -------------------------------------------------------------------------------- 1 | drop table if exists blog; 2 | drop table if exists blog2; 3 | 4 | CREATE TABLE if not exists blog ( 5 | id INT PRIMARY KEY , 6 | title VARCHAR(100), 7 | create_time TIMESTAMP NOT NULL DEFAULT current_timestamp(), 8 | last_update TIMESTAMP NOT NULL DEFAULT current_timestamp(), 9 | content TEXT, 10 | hidden varchar (5) default 'false' not null 11 | ); 12 | 13 | create table if not exists blog2 ( 14 | id INT PRIMARY KEY , 15 | title VARCHAR(100), 16 | createTime TIMESTAMP NOT NULL DEFAULT current_timestamp(), 17 | content TEXT, 18 | hidden varchar (5) default 'false' not null 19 | ); 20 | 21 | delete from blog; 22 | delete from blog2; 23 | 24 | insert into blog2 set id=1, title='title', content='content'; -------------------------------------------------------------------------------- /docs/00-start.md: -------------------------------------------------------------------------------- 1 | # hydrogen-dao 介绍 2 | 3 | hydrogen-dao 是一个轻量级的 JDBC 数据库操作工具,专注于简化数据库的连接管理 SQL 执行。其主要功能有: 4 | 5 | * 连接池管理,状态查看 6 | * 根据条件动态组装 SQL 语句,免除大量的 if-else 7 | * 简化分页查询和批处理 8 | 9 | 大部分功能都由 com.hyd.dao.DAO 提供。 10 | 11 | ### 功能列表 12 | 13 | hydrogen-dao 还提供更多非常方便的操作数据库方式,包括**直接插入 Pojo/Map 对象到数据库** , **构建动态条件的查询** 等。下面从配置开始逐一介绍。 14 | 15 | 1. 添加依赖关系(Maven) 16 | 1. 配置数据源 17 | 1. 查询记录 18 | 1. 分页查询 19 | 1. 执行SQL 20 | 1. 插入 Pojo/Map 21 | 1. 事务处理 22 | 1. 构建带动态条件的语句 23 | 1. Spring Boot 自动初始化 24 | 1. 在 Junit 单元测试中自动进行数据库初始化 25 | 26 | ### 数据库兼容性 27 | 28 | 本人开发经历有限,现在能做到比较好的兼容性的数据库是 Oracle/MySQL/HSQLDB,其他数据库若有不兼容之处,还望不吝指出,我会尽快解决! 29 | 30 | ### 关于 Hibernate 和 MyBatis 等其他数据库访问框架 31 | 32 | hydrogen-dao 与其他数据库访问框架之间相互独立,你可以同时用 hydrogen-dao 和其他数据库访问框架,hydrogen-dao 仍然会按自己的方式正常运作。 33 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/mate/util/MapCacheUtils.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.mate.util; 2 | 3 | import java.util.Collections; 4 | import java.util.LinkedHashMap; 5 | import java.util.Map; 6 | import java.util.Map.Entry; 7 | 8 | public final class MapCacheUtils { 9 | 10 | private MapCacheUtils() { 11 | } 12 | 13 | @SuppressWarnings({"unchecked", "rawtypes"}) 14 | public static Map newLRUCache(final int size, boolean threadSafe) { 15 | LinkedHashMap cache = new LinkedHashMap(size + 1, 0.75F, true) { 16 | 17 | public boolean removeEldestEntry(Entry eldest) { 18 | return this.size() > size; 19 | } 20 | }; 21 | return threadSafe ? Collections.synchronizedMap(cache) : cache; 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /docs/01-dependency.md: -------------------------------------------------------------------------------- 1 | 2 | ## Maven 配置 3 | 4 | hydrogen-dao 的发布地址: 5 | 6 | https://mvnrepository.com/artifact/com.github.yiding-he/hydrogen-dao 7 | 8 | ### 配置依赖关系: 9 | 10 | 新建一个空的 Maven 项目,在 pom.xml 中添加下面的依赖关系: 11 | 12 | ```xml 13 | 14 | 15 | org.hsqldb 16 | hsqldb 17 | 2.3.4 18 | 19 | 20 | 21 | com.github.yiding-he 22 | hydrogen-dao 23 | ${hydrogen-dao.version} 24 | 25 | 26 | 27 | ch.qos.logback 28 | logback-classic 29 | 1.1.2 30 | 31 | ``` -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/mate/util/Cls.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.mate.util; 2 | 3 | public class Cls { 4 | 5 | public static boolean exists(String typeName) { 6 | return getType(typeName) != null; 7 | } 8 | 9 | public static Class getType(String typeName) { 10 | try { 11 | return Class.forName(typeName); 12 | } catch (ClassNotFoundException e) { 13 | return null; 14 | } 15 | } 16 | 17 | public static boolean hasField(Class type, String field) { 18 | var _type = type; 19 | while (_type != null) { 20 | try { 21 | _type.getDeclaredField(field); 22 | return true; 23 | } catch (NoSuchFieldException e) { 24 | _type = _type.getSuperclass(); 25 | } 26 | } 27 | return false; 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*.{txt,java,xml,md,properties}] 4 | charset = utf-8 5 | indent_style = space 6 | end_of_line = lf 7 | 8 | [*.{txt,java,xml}] 9 | indent_size = 2 10 | insert_final_newline = true 11 | trim_trailing_whitespace = true 12 | 13 | [*.md] 14 | insert_final_newline = false 15 | trim_trailing_whitespace = false 16 | 17 | [*.java] 18 | indent_size = 4 19 | ij_java_blank_lines_around_field = 1 20 | ij_java_blank_lines_after_class_header = 1 21 | ij_java_doc_add_blank_line_after_return = true 22 | ij_java_doc_add_blank_line_after_param_comments = true 23 | ij_java_doc_add_blank_line_after_description = true 24 | ij_java_doc_keep_empty_parameter_tag = false 25 | ij_java_doc_keep_empty_return_tag = false 26 | ij_java_doc_keep_empty_throws_tag = false 27 | 28 | [*.properties] 29 | ij_properties_keep_blank_lines = true 30 | ij_properties_spaces_around_key_value_delimiter = true 31 | 32 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/daotests/springboot/SpringBootTestApp.java: -------------------------------------------------------------------------------- 1 | package com.hyd.daotests.springboot; 2 | 3 | import com.hyd.dao.DAO; 4 | import com.hyd.dao.spring.SpringAutoConfiguration; 5 | import org.springframework.beans.factory.annotation.Autowired; 6 | import org.springframework.boot.SpringApplication; 7 | import org.springframework.boot.autoconfigure.SpringBootApplication; 8 | import org.springframework.context.annotation.Import; 9 | 10 | import javax.annotation.PostConstruct; 11 | 12 | @SpringBootApplication 13 | @Import(SpringAutoConfiguration.class) 14 | public class SpringBootTestApp { 15 | 16 | @Autowired 17 | private DAO dao; 18 | 19 | public static void main(String[] args) { 20 | SpringApplication.run(SpringBootTestApp.class, args); 21 | } 22 | 23 | @PostConstruct 24 | public void test() { 25 | System.out.println("dao = " + dao); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/sp/SpParamType.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.sp; 2 | 3 | /** 4 | * 存储过程参数类型 5 | */ 6 | 7 | public class SpParamType { 8 | 9 | private String name; 10 | 11 | /** 12 | * 表示该参数是输入参数 13 | */ 14 | public static final SpParamType IN = new SpParamType("in"); 15 | 16 | /** 17 | * 表示该参数是输出参数 18 | */ 19 | public static final SpParamType OUT = new SpParamType("out"); 20 | 21 | /** 22 | * 表示该参数既是输入参数也是输出参数 23 | */ 24 | public static final SpParamType IN_OUT = new SpParamType("in_out"); 25 | 26 | public SpParamType(String name) { 27 | this.name = name; 28 | } 29 | 30 | public String getName() { 31 | return name; 32 | } 33 | 34 | public void setName(String name) { 35 | this.name = name; 36 | } 37 | 38 | public String toString() { 39 | return "SpParamType." + name; 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/dao/mate/util/BeanUtilTest.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.mate.util; 2 | 3 | import org.junit.jupiter.api.Test; 4 | 5 | import java.util.Arrays; 6 | import java.util.HashMap; 7 | import java.util.Map; 8 | 9 | import static org.junit.jupiter.api.Assertions.assertEquals; 10 | 11 | public class BeanUtilTest { 12 | 13 | @Test 14 | public void sort() { 15 | Map map1 = new HashMap<>(); 16 | Map map2 = new HashMap<>(); 17 | Map map3 = new HashMap<>(); 18 | 19 | map1.put("name", "user3"); 20 | map2.put("name", "user1"); 21 | map3.put("name", "user2"); 22 | 23 | var maps = Arrays.asList(map1, map2, map3); 24 | BeanUtil.sort(maps, "name", ""); 25 | assertEquals("user1", maps.get(0).get("name")); 26 | assertEquals("user2", maps.get(1).get("name")); 27 | assertEquals("user3", maps.get(2).get("name")); 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/jdbc/InsertAffectedRowsTest.java: -------------------------------------------------------------------------------- 1 | package jdbc; 2 | 3 | import org.apache.commons.dbcp2.BasicDataSource; 4 | import org.junit.jupiter.api.Test; 5 | 6 | import java.sql.Connection; 7 | 8 | import static org.junit.jupiter.api.Assertions.assertEquals; 9 | 10 | public class InsertAffectedRowsTest { 11 | 12 | @Test 13 | public void testInsertAffectedRows() throws Exception { 14 | try (BasicDataSource ds = new BasicDataSource()) { 15 | ds.setUrl("jdbc:h2:mem:db1"); 16 | 17 | Connection c = ds.getConnection(); 18 | c.createStatement().execute("create table t1(id int)"); 19 | c.createStatement().execute("create table t2(id int)"); 20 | c.createStatement().execute("insert into t1(id) values(1),(2),(3),(4),(5)"); 21 | 22 | int count = c.createStatement().executeUpdate("insert into t2 select id from t1"); 23 | assertEquals(5, count); 24 | } 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/time/TimeFormatters.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.time; 2 | 3 | import java.time.format.DateTimeFormatter; 4 | import java.util.Map; 5 | 6 | import static com.hyd.dao.mate.util.MapCacheUtils.newLRUCache; 7 | 8 | /** 9 | * DateTimeFormatter 的一个缓存池 10 | */ 11 | public class TimeFormatters { 12 | 13 | /** 14 | * 最多缓存多少个不同格式的 DateTimeFormatter 实例。超过的话按 LRU 策略删除 15 | */ 16 | public static final int DEFAULT_POOL_SIZE = 100; 17 | 18 | private static Map cache = newLRUCache(DEFAULT_POOL_SIZE, true); 19 | 20 | /** 21 | * 万一默认缓存池大小不够,可以在应用启动时通过本方法修改缓存大小 22 | * 23 | * @param newPoolSize 新的缓存大小 24 | */ 25 | public static void adjustPoolSize(int newPoolSize) { 26 | cache = newLRUCache(newPoolSize, true); 27 | } 28 | 29 | public static DateTimeFormatter ofPattern(String pattern) { 30 | return cache.computeIfAbsent(pattern, DateTimeFormatter::ofPattern); 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/dao/dialects/h2/Payment.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.dialects.h2; 2 | 3 | import java.util.Date; 4 | 5 | public class Payment { 6 | 7 | private long id; 8 | 9 | private Date payTime; 10 | 11 | private int amount; 12 | 13 | public long getId() { 14 | return id; 15 | } 16 | 17 | public void setId(long id) { 18 | this.id = id; 19 | } 20 | 21 | public Date getPayTime() { 22 | return payTime; 23 | } 24 | 25 | public void setPayTime(Date payTime) { 26 | this.payTime = payTime; 27 | } 28 | 29 | public int getAmount() { 30 | return amount; 31 | } 32 | 33 | public void setAmount(int amount) { 34 | this.amount = amount; 35 | } 36 | 37 | @Override 38 | public String toString() { 39 | return "Payment{" + 40 | "id=" + id + 41 | ", payTime=" + payTime + 42 | ", amount=" + amount + 43 | '}'; 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/dao/dialects/h2/H2FileDBTest.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.dialects.h2; 2 | 3 | import com.hyd.dao.DAO; 4 | import com.hyd.dao.DataSources; 5 | import com.hyd.dao.mate.util.ScriptExecutor; 6 | import com.mysql.cj.x.protobuf.MysqlxDatatypes; 7 | 8 | import static com.hyd.dao.mate.util.DBCPDataSource.newH2FileDataSource; 9 | 10 | public class H2FileDBTest { 11 | 12 | public static void main(MysqlxDatatypes.Scalar.String[] args) { 13 | DataSources dataSources = DataSources.getInstance(); 14 | dataSources.setDataSource("default", newH2FileDataSource("./target/data/payments", false)); 15 | 16 | DAO dao = new DAO("default"); 17 | ScriptExecutor.execute("classpath:/h2/init-script.sql", dao); 18 | System.out.println("Database initialized."); 19 | 20 | dao.execute("insert into payments set id=?,amount=?", System.currentTimeMillis(), 100); 21 | dao.query(Payment.class, "select * from payments").forEach(System.out::println); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | %d{HH:mm:ss} [%-5level] %logger{20}:%line - %msg%n 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/daotests/InMemoryTestBase.java: -------------------------------------------------------------------------------- 1 | package com.hyd.daotests; 2 | 3 | import com.hyd.dao.DAO; 4 | import com.hyd.dao.DataSources; 5 | import com.hyd.dao.database.type.NameConverter; 6 | import com.hyd.dao.mate.util.DBCPDataSource; 7 | import org.apache.commons.dbcp2.BasicDataSource; 8 | import org.junit.jupiter.api.BeforeAll; 9 | 10 | /** 11 | * @author yidin 12 | */ 13 | public abstract class InMemoryTestBase { 14 | 15 | protected static DAO dao; 16 | 17 | protected static DAO dao2; 18 | 19 | protected static BasicDataSource dataSource; 20 | 21 | @BeforeAll 22 | public static void beforeClass() { 23 | DataSources dataSources = DataSources.getInstance(); 24 | 25 | dataSource = DBCPDataSource.newH2MemDataSource(); 26 | dataSources.setDataSource("h20", dataSource); 27 | dataSources.setDataSource("h21", dataSource); 28 | 29 | dataSources.setColumnNameConverter("h21", NameConverter.NONE); 30 | 31 | dao = new DAO("h20"); 32 | dao2 = new DAO("h21"); 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /docs/07-transaction.md: -------------------------------------------------------------------------------- 1 | hydrogen-dao 支持跨数据库的事务,只要每个数据库都支持事务即可。 2 | 3 | DAO 对象提供 `runTransaction()` 方法用于执行事务。该方法接受一个 `Runnable` 对象,该对象即代表要执行的事务,其 `run()` 方法中的所有数据库操作(仅限于 hydrogen-dao 的数据库操作,其他框架的操作不受管理)都会作为事务的一部分。 4 | 5 | 当出现错误需要回滚事务时,在 `run()` 方法中抛出 `RuntimeException` 即可。下面是一个例子: 6 | 7 | ```java 8 | final DAO dao = getDAO(); 9 | final User user1 = new User(111L, "user01", "pass01"); 10 | final User user2 = new User(222L, "user02", "pass02"); 11 | 12 | try { 13 | DAO.runTransactionWithException(new Runnable() { 14 | 15 | public void run() { 16 | dao.insert(user1); 17 | dao.insert(user2); 18 | 19 | // 模拟事务执行失败,两个 insert 都会回滚 20 | throw new RuntimeException("Transaction aborted."); 21 | } 22 | }); 23 | } catch (TransactionException e) { 24 | e.printStackTrace(); 25 | } 26 | ``` 27 | 28 | 将事务包装成 `Runnable` 的好处在于让该事务的逻辑变得独立可复用。 29 | 30 | ### 嵌套事务 31 | 32 | hydrogen-dao 支持嵌套事务,但要注意,每层事务都会把持一个数据库连接,直到该事务提交或回滚。因此在数据库连接有限的情况下,请不要执行层次过多的事务;在极端情况下,当事务层次超过连接池的最大连接数时,整个应用都可能阻塞无响应。 -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/database/type/ClobUtil.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.database.type; 2 | 3 | import java.io.IOException; 4 | import java.sql.Clob; 5 | import java.sql.SQLException; 6 | 7 | /** 8 | * 从 Clob 读取信息 9 | */ 10 | public class ClobUtil { 11 | 12 | public static String read(Clob clob) throws IOException, SQLException { 13 | var reader = clob.getCharacterStream(); 14 | var result = new char[0]; 15 | var buf = new char[4096]; 16 | int size; 17 | while ((size = reader.read(buf)) != -1) { 18 | var new_result = new char[result.length + size]; 19 | System.arraycopy(result, 0, new_result, 0, result.length); 20 | System.arraycopy(buf, 0, new_result, result.length, size); 21 | result = new_result; 22 | } 23 | return new String(result); 24 | } 25 | 26 | public static void write(Clob clob, String text) throws SQLException, IOException { 27 | var out = clob.setCharacterStream(1); 28 | out.write(text); 29 | out.flush(); 30 | out.close(); 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/dao/mate/util/LockerTest.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.mate.util; 2 | 3 | 4 | import org.junit.jupiter.api.Test; 5 | 6 | import java.util.Random; 7 | 8 | public class LockerTest { 9 | 10 | @Test 11 | public void testMultiThread() throws Exception { 12 | class Task implements Runnable { 13 | 14 | final String id; 15 | 16 | Task(String id) { 17 | this.id = id; 18 | } 19 | 20 | @Override 21 | public void run() { 22 | try { 23 | Thread.sleep(1000); 24 | System.out.println("Task " + id + " finished."); 25 | } catch (InterruptedException e) { 26 | e.printStackTrace(); 27 | } 28 | } 29 | } 30 | 31 | String[] ids = new String[] {"1", "2", "3", "4", "5"}; 32 | Random random = new Random(); 33 | for (int i = 0; i < 100; i++) { 34 | String id = ids[random.nextInt(ids.length)]; 35 | new Thread(() -> Locker.lockAndRun(id, new Task(id))).start(); 36 | } 37 | 38 | Thread.sleep(25000); 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/daotests/JUnitRuleTestBase.java: -------------------------------------------------------------------------------- 1 | package com.hyd.daotests; 2 | 3 | import com.hyd.dao.DAO; 4 | import com.hyd.dao.DataSources; 5 | import com.hyd.dao.junit.HydrogenDAORule; 6 | 7 | import javax.sql.DataSource; 8 | import java.util.function.Supplier; 9 | 10 | /** 11 | * @author yidin 12 | */ 13 | public abstract class JUnitRuleTestBase { 14 | 15 | protected DAO dao; 16 | 17 | /** 18 | * get or create data source 19 | */ 20 | protected abstract DataSource getDataSource(); 21 | 22 | { 23 | if (!DataSources.getInstance().contains("default")) { 24 | DataSources.getInstance().setDataSource("default", getDataSource()); 25 | } 26 | this.dao = new DAO("default"); 27 | } 28 | 29 | public HydrogenDAORule hydrogenDAORule = new HydrogenDAORule(getDAOSupplier()); 30 | 31 | protected Supplier getDAOSupplier() { 32 | return this::getDao; 33 | } 34 | 35 | protected DAO getDao() { 36 | return this.dao; 37 | } 38 | 39 | protected void sleep(int millis) { 40 | try { 41 | Thread.sleep(millis); 42 | } catch (InterruptedException e) { 43 | e.printStackTrace(); 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/database/dialects/impl/MsSqlServerDialect.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.database.dialects.impl; 2 | 3 | import com.hyd.dao.DAOException; 4 | import com.hyd.dao.database.dialects.Dialect; 5 | 6 | import java.sql.Connection; 7 | import java.sql.ResultSet; 8 | import java.sql.SQLException; 9 | import java.util.function.Predicate; 10 | import java.util.regex.Pattern; 11 | 12 | public class MsSqlServerDialect implements Dialect { 13 | 14 | private static final Pattern PRODUCT_NAME_PATTERN = Pattern.compile(".*Microsoft SQL Server.*"); 15 | 16 | @Override 17 | public Predicate getMatcher() { 18 | return c -> { 19 | try { 20 | return PRODUCT_NAME_PATTERN.matcher(c.getMetaData().getDatabaseProductName()).matches(); 21 | } catch (SQLException e) { 22 | throw new DAOException(e); 23 | } 24 | }; 25 | } 26 | 27 | @Override 28 | public String wrapRangeQuery(String sql, int startPos, int endPos) { 29 | return sql + " offset " + startPos + " rows fetch next " + (endPos - startPos) + " rows only"; 30 | } 31 | 32 | @Override 33 | public int resultSetTypeForReading() { 34 | return ResultSet.TYPE_SCROLL_SENSITIVE; 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/database/ConnectionHolder.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.database; 2 | 3 | import java.sql.Connection; 4 | import java.util.function.Supplier; 5 | 6 | /** 7 | * Used when 8 | * 1. Connection object is not reliable 9 | * 2. Need lazy initialization for connection 10 | * However, default implementation does not reassign {@link #connectionInstance} 11 | */ 12 | public class ConnectionHolder { 13 | 14 | private final Supplier connectionSupplier; 15 | 16 | private Connection connectionInstance; 17 | 18 | public static ConnectionHolder fromStatic(Connection connection) { 19 | return new ConnectionHolder(() -> connection); 20 | } 21 | 22 | public static ConnectionHolder fromSupplier(Supplier connectionSupplier) { 23 | return new ConnectionHolder(connectionSupplier); 24 | } 25 | 26 | private ConnectionHolder(Supplier connectionSupplier) { 27 | this.connectionSupplier = connectionSupplier; 28 | this.connectionInstance = connectionSupplier.get(); 29 | } 30 | 31 | public Connection getConnection() { 32 | if (connectionInstance == null) { 33 | connectionInstance = this.connectionSupplier.get(); 34 | } 35 | return connectionInstance; 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/command/IteratorBatchCommand.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.command; 2 | 3 | import java.util.Iterator; 4 | import java.util.List; 5 | 6 | /** 7 | * 流式批处理命令。需要进行批处理,但是记录数不可预见的情况下使用。例如从文件中读取并导入数据, 8 | * 使用 IteratorBatchCommand 可以节省内存使用。 9 | * 10 | * @author yidin 11 | */ 12 | public class IteratorBatchCommand { 13 | 14 | public static final int DEFAULT_BATCH_SIZE = 100; 15 | 16 | private final String command; 17 | 18 | private Iterator> params; 19 | 20 | private int batchSize = DEFAULT_BATCH_SIZE; 21 | 22 | public IteratorBatchCommand(String command) { 23 | this.command = command; 24 | } 25 | 26 | public IteratorBatchCommand(String command, Iterator> params) { 27 | this.command = command; 28 | this.params = params; 29 | } 30 | 31 | public IteratorBatchCommand(String command, Iterator> params, int batchSize) { 32 | this.command = command; 33 | this.params = params; 34 | this.batchSize = batchSize; 35 | } 36 | 37 | public int getBatchSize() { 38 | return batchSize; 39 | } 40 | 41 | public String getCommand() { 42 | return command; 43 | } 44 | 45 | public Iterator> getParams() { 46 | return params; 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/database/dialects/Dialects.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.database.dialects; 2 | 3 | import com.hyd.dao.database.dialects.impl.H2Dialect; 4 | import com.hyd.dao.database.dialects.impl.MsSqlServerDialect; 5 | import com.hyd.dao.database.dialects.impl.MySqlDialect; 6 | import com.hyd.dao.database.dialects.impl.OracleDialect; 7 | 8 | import java.sql.Connection; 9 | import java.util.ArrayList; 10 | import java.util.List; 11 | 12 | public class Dialects { 13 | 14 | /** 15 | * 注册所有 Dialect,优先级从低到高 16 | */ 17 | private static final List DIALECTS = new ArrayList<>(); 18 | 19 | static { 20 | DIALECTS.add(new DefaultDialect()); 21 | DIALECTS.add(new OracleDialect()); 22 | DIALECTS.add(new MsSqlServerDialect()); 23 | DIALECTS.add(new MySqlDialect()); 24 | DIALECTS.add(new H2Dialect()); 25 | } 26 | 27 | public static void registerDialect(Dialect dialect) { 28 | DIALECTS.add(dialect); 29 | } 30 | 31 | public static Dialect getDialect(Connection connection) { 32 | for (var i = DIALECTS.size() - 1; i >= 0; i--) { 33 | var dialect = DIALECTS.get(i); 34 | if (dialect.getMatcher().test(connection)) { 35 | return dialect; 36 | } 37 | } 38 | return new DefaultDialect(); 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/mate/util/Closer.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.mate.util; 2 | 3 | import java.sql.ResultSet; 4 | import java.sql.SQLException; 5 | import java.sql.Statement; 6 | 7 | public class Closer { 8 | 9 | public static boolean isClosed(ResultSet rs) { 10 | try { 11 | return rs.isClosed(); 12 | } catch (SQLException e) { 13 | return true; 14 | } 15 | } 16 | 17 | public static void closeResultSet(ResultSet rs) { 18 | if (rs == null || isClosed(rs)) { 19 | return; 20 | } 21 | 22 | try { 23 | var st = rs.getStatement(); 24 | closeStatement(st); 25 | } catch (SQLException e) { 26 | // ignore this 27 | } 28 | 29 | try { 30 | rs.close(); 31 | } catch (SQLException e) { 32 | // ignore this 33 | } 34 | } 35 | 36 | private static void closeStatement(Statement st) throws SQLException { 37 | if (st != null && !st.isClosed()) { 38 | try { 39 | var conn = st.getConnection(); 40 | if (conn != null) { 41 | conn.close(); 42 | } 43 | } catch (SQLException e) { 44 | // ignore this 45 | } 46 | st.close(); 47 | } 48 | } 49 | 50 | } 51 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/snapshot/Snapshot.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.snapshot; 2 | 3 | import java.util.ArrayList; 4 | import java.util.Collections; 5 | import java.util.List; 6 | import java.util.Map; 7 | import java.util.concurrent.ConcurrentHashMap; 8 | 9 | /** 10 | * 包含当前连接数据的快照(仅当使用本地连接池时可用) 11 | */ 12 | public class Snapshot { 13 | 14 | /** 15 | * 数据源 -> 快照 16 | */ 17 | private static final Map instances = new ConcurrentHashMap<>(); 18 | 19 | /** 20 | * 当前正在执行数据库命令的 Executor 列表 21 | */ 22 | private final List executorInfoList = Collections.synchronizedList(new ArrayList<>()); 23 | 24 | /** 25 | * 获得指定数据源的一个快照 26 | * 27 | * @param dsName 数据源名称 28 | * 29 | * @return 对应的快照 30 | */ 31 | public static Snapshot getInstance(String dsName) { 32 | return instances.computeIfAbsent(dsName, __dsName -> new Snapshot()); 33 | } 34 | 35 | //////////////////////////////////////////////////////////////// 36 | 37 | void addExecutorInfo(ExecutorInfo info) { 38 | executorInfoList.add(info); 39 | } 40 | 41 | public ExecutorInfo[] getExecutorInfos() { 42 | return executorInfoList.toArray(new ExecutorInfo[0]); 43 | } 44 | 45 | void removeInfo(ExecutorInfo executorInfo) { 46 | executorInfoList.remove(executorInfo); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/daotests/springboot/MultiDataSourceConf.java: -------------------------------------------------------------------------------- 1 | package com.hyd.daotests.springboot; 2 | 3 | import com.hyd.dao.DAO; 4 | import org.springframework.boot.context.properties.ConfigurationProperties; 5 | import org.springframework.boot.jdbc.DataSourceBuilder; 6 | import org.springframework.context.annotation.Bean; 7 | import org.springframework.context.annotation.Configuration; 8 | 9 | import javax.sql.DataSource; 10 | 11 | @Configuration 12 | public class MultiDataSourceConf { 13 | 14 | @Bean("ds1") 15 | @ConfigurationProperties(prefix = "spring.datasource.ds1") 16 | public DataSource dataSource1() { 17 | return DataSourceBuilder.create().build(); 18 | } 19 | 20 | @Bean("ds2") 21 | @ConfigurationProperties(prefix = "spring.datasource.ds2") 22 | public DataSource dataSource2() { 23 | return DataSourceBuilder.create().build(); 24 | } 25 | 26 | @Bean("ds3") 27 | @ConfigurationProperties(prefix = "spring.datasource.ds3") 28 | public DataSource dataSource3() { 29 | return DataSourceBuilder.create().build(); 30 | } 31 | 32 | @Bean 33 | public DAO dao1() { 34 | return new DAO("ds1"); 35 | } 36 | 37 | @Bean 38 | public DAO dao2() { 39 | return new DAO("ds2"); 40 | } 41 | 42 | @Bean 43 | public DAO dao3() { 44 | return new DAO("ds3"); 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /docs/09-spring-boot-autoconfig.md: -------------------------------------------------------------------------------- 1 | 对于 Spring Boot 项目,hydrogen-dao 提供了开箱即用的 DAO 对象。你可以在 2 | `application.properties` 当中配置单个或多个数据源,然后得到可用的 DAO 对象。 3 | 4 | ## 添加依赖关系 5 | 6 | ```xml 7 | 8 | com.github.yiding-he 9 | spring-boot-starter-hydrogen-dao 10 | 3.5.0 11 | 12 | ``` 13 | 14 | ## 单数据源 15 | 16 | 在 Spring Boot 配置中加入配置如下例子所示: 17 | 18 | ```properties 19 | spring.datasource.url=jdbc:h2:./target/db/default 20 | spring.datasource.username=sa 21 | ``` 22 | 23 | 然后就可以直接在任何 Spring bean 当中使用了: 24 | 25 | ```java 26 | @Service 27 | public class UserService { 28 | 29 | @Autowired 30 | private DAO dao; 31 | 32 | // ... 33 | } 34 | ``` 35 | 36 | ## 多数据源 37 | 38 | 你需要分别为每个数据源配置不同的 @Bean,然后在 `DataSources` 类中使用,下面是一个例子: 39 | 40 | ```java 41 | @Bean 42 | @ConfigurationProperties("spring.datasource.ds1") 43 | public DataSource ds1(DataSources dataSources) { 44 | DataSource dataSource = 45 | org.springframework.boot.jdbc.DataSourceBuilder.create().build(); 46 | dataSources.setDataSource("ds1", dataSource); 47 | return dataSource; 48 | } 49 | ``` 50 | 51 | ```java 52 | @Autowired 53 | private DataSources dataSources; 54 | 55 | public void showTables() { 56 | DAO ds1 = this.dataSources.getDAO("ds1"); 57 | ds1.query("show tables").forEach(System.out::println); 58 | } 59 | ``` 60 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/database/dialects/impl/H2Dialect.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.database.dialects.impl; 2 | 3 | import com.hyd.dao.DAOException; 4 | import com.hyd.dao.database.FQN; 5 | import com.hyd.dao.database.dialects.Dialect; 6 | 7 | import java.sql.Connection; 8 | import java.sql.SQLException; 9 | import java.util.function.Predicate; 10 | import java.util.regex.Pattern; 11 | 12 | public class H2Dialect implements Dialect { 13 | 14 | private static final Pattern PRODUCT_NAME_PATTERN = Pattern.compile(".*(HSQL|H2).*"); 15 | 16 | @Override 17 | public Predicate getMatcher() { 18 | return c -> { 19 | try { 20 | return PRODUCT_NAME_PATTERN.matcher(c.getMetaData().getDatabaseProductName()).matches(); 21 | } catch (SQLException e) { 22 | throw new DAOException(e); 23 | } 24 | }; 25 | } 26 | 27 | @Override 28 | public String wrapRangeQuery(String sql, int startPos, int endPos) { 29 | return null; 30 | } 31 | 32 | @Override 33 | public MetaNameConvention getMetaNameConvention() { 34 | return MetaNameConvention.Uppercase; 35 | } 36 | 37 | @Override 38 | public String fixCatalog(String connectionCatalog, FQN fqn) { 39 | return null; // 一律返回 null 40 | } 41 | 42 | @Override 43 | public String identityQuoter() { 44 | return ""; // 不作引用 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/dao/mate/util/CaseInsensitiveHashMapTest.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.mate.util; 2 | 3 | import org.junit.jupiter.api.Test; 4 | 5 | import java.util.Map; 6 | 7 | import static org.junit.jupiter.api.Assertions.*; 8 | 9 | 10 | public class CaseInsensitiveHashMapTest { 11 | 12 | @Test 13 | public void getSet() throws Exception { 14 | CaseInsensitiveHashMap map = new CaseInsensitiveHashMap<>(); 15 | 16 | map.put("1", "2"); 17 | assertEquals("2", map.get("1")); 18 | 19 | assertNull(map.get("%*&%$%^%^#%^$#^$%#^%$#^%$#^")); 20 | 21 | map.put("AAAaaa", "BBBbbb"); 22 | assertEquals("BBBbbb", map.get("AAAAAA")); 23 | assertEquals("BBBbbb", map.get("aaaAAA")); 24 | 25 | assertTrue(map.containsKey("AAAaaa")); 26 | assertTrue(map.containsKey("AAAAAA")); 27 | assertTrue(map.containsKey("aaaaaa")); 28 | assertFalse(map.containsKey("bbbbbb")); 29 | 30 | // keySet 只能返回原始 key 31 | assertTrue(map.keySet().contains("AAAaaa")); 32 | assertFalse(map.keySet().contains("aaaaaa")); 33 | 34 | // entrySet 只能返回原始 key 35 | assertTrue(map.entrySet().stream().anyMatch(e -> e.getKey().equals("AAAaaa"))); 36 | assertTrue(map.entrySet().stream().noneMatch(e -> e.getKey().equals("aaaaaa"))); 37 | 38 | map.putAll(Map.of("AAAaaa", "CCCCCC")); 39 | assertEquals("CCCCCC", map.get("AAAAAA")); 40 | assertEquals("CCCCCC", map.get("AAAaaa")); 41 | 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/database/type/BlobReader.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.database.type; 2 | 3 | import java.io.IOException; 4 | import java.sql.Blob; 5 | import java.sql.SQLException; 6 | 7 | /** 8 | * 从 Blob 读取信息 9 | */ 10 | public class BlobReader { 11 | 12 | /** 13 | * 从 BLOB 对象中读取字符串 14 | * 15 | * @param blob BLOB 对象 16 | * @param charset 编码 17 | * 18 | * @return 读取到的字符串 19 | * 20 | * @throws SQLException 如果数据库访问 BLOB 失败 21 | * @throws IOException 如果从流中读取内容失败 22 | */ 23 | public static String readString(Blob blob, String charset) throws SQLException, IOException { 24 | var result = readBytes(blob); 25 | return new String(result, charset); 26 | } 27 | 28 | /** 29 | * 从 BLOB 对象中读取字节数组 30 | * 31 | * @param blob BLOB 对象 32 | * 33 | * @return 字节数组 34 | * 35 | * @throws SQLException 如果数据库访问 BLOB 失败 36 | * @throws IOException 如果从流中读取内容失败 37 | */ 38 | public static byte[] readBytes(Blob blob) throws SQLException, IOException { 39 | var stream = blob.getBinaryStream(); 40 | var result = new byte[0]; 41 | var buf = new byte[4096]; 42 | int size; 43 | while ((size = stream.read(buf)) != -1) { 44 | var new_result = new byte[result.length + size]; 45 | System.arraycopy(result, 0, new_result, 0, result.length); 46 | System.arraycopy(buf, 0, new_result, result.length, size); 47 | result = new_result; 48 | } 49 | return result; 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/dao/src/models/Blog.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.src.models; 2 | 3 | import java.util.Date; 4 | 5 | public class Blog { 6 | 7 | private Long id; 8 | 9 | private String title; 10 | 11 | private String content; 12 | 13 | private Date createTime; 14 | 15 | public Blog() { 16 | } 17 | 18 | public Blog(Long id, String title, String content, Date createTime) { 19 | this.id = id; 20 | this.title = title; 21 | this.content = content; 22 | this.createTime = createTime; 23 | } 24 | 25 | public Long getId() { 26 | return this.id; 27 | } 28 | 29 | public void setId(Long id) { 30 | this.id = id; 31 | } 32 | 33 | public String getTitle() { 34 | return this.title; 35 | } 36 | 37 | public void setTitle(String title) { 38 | this.title = title; 39 | } 40 | 41 | public String getContent() { 42 | return this.content; 43 | } 44 | 45 | public void setContent(String content) { 46 | this.content = content; 47 | } 48 | 49 | public Date getCreateTime() { 50 | return this.createTime; 51 | } 52 | 53 | public void setCreateTime(Date createTime) { 54 | this.createTime = createTime; 55 | } 56 | 57 | @Override 58 | public String toString() { 59 | return "Blog{" + 60 | "id=" + id + 61 | ", title='" + title + '\'' + 62 | ", content='" + content + '\'' + 63 | ", createTime=" + createTime + 64 | '}'; 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/sp/SpParam.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.sp; 2 | 3 | /** 4 | * 存储过程和 function 的调用参数 5 | */ 6 | 7 | public class SpParam { 8 | 9 | private SpParamType type; // 参数的输入/输出类型 10 | 11 | private String name; // 自定义的 Oracle 数据类型名称 12 | 13 | private int sqlType; // 参数的数据类型。参考 java.sql.Types 14 | 15 | private Object value; // 参数值。如果参数不是输入参数,则 value 的值被忽略。 16 | 17 | public SpParam(SpParamType type, int sqlType, Object value) { 18 | this.type = type; 19 | this.sqlType = sqlType; 20 | this.value = value; 21 | } 22 | 23 | public SpParam(SpParamType type, String name, int sqlType, Object value) { 24 | this.type = type; 25 | this.name = name; 26 | this.sqlType = sqlType; 27 | this.value = value; 28 | } 29 | 30 | public String getName() { 31 | return name; 32 | } 33 | 34 | public void setName(String name) { 35 | this.name = name; 36 | } 37 | 38 | public SpParamType getType() { 39 | return type; 40 | } 41 | 42 | public int getSqlType() { 43 | return sqlType; 44 | } 45 | 46 | public void setSqlType(int sqlType) { 47 | this.sqlType = sqlType; 48 | } 49 | 50 | public void setType(SpParamType type) { 51 | this.type = type; 52 | } 53 | 54 | public Object getValue() { 55 | return value; 56 | } 57 | 58 | public void setValue(Object value) { 59 | this.value = value; 60 | } 61 | 62 | public String toString() { 63 | return "{" + getType() + ":" + getValue() + "}"; 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/database/type/NameConverter.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.database.type; 2 | 3 | import com.hyd.dao.mate.util.Str; 4 | 5 | /** 6 | * 名称转换规则。 7 | * TODO 重构这个类的使用方式,明确定义 column name 的风格和 field name 的风格 8 | */ 9 | public interface NameConverter { 10 | 11 | NameConverter NONE = new NoneNameConverter(); 12 | 13 | NameConverter CAMEL_UNDERSCORE = new CamelUnderscoreNameConverter(); 14 | 15 | NameConverter DEFAULT = CAMEL_UNDERSCORE; 16 | 17 | ////////////////////////////////////////////////////////////// 18 | 19 | // 这个方法暂时没用到,因为目前属性名转字段名的方法是 20 | // 先分析表字段,再转成属性名,再在类的属性中寻找匹配 21 | String field2Column(String prop); 22 | 23 | String column2Field(String column); 24 | 25 | ////////////////////////////////////////////////////////////// 26 | 27 | /** 28 | * 不做任何转换 29 | */ 30 | class NoneNameConverter implements NameConverter { 31 | 32 | @Override 33 | public String field2Column(String prop) { 34 | return prop; 35 | } 36 | 37 | @Override 38 | public String column2Field(String column) { 39 | return column; 40 | } 41 | } 42 | 43 | /** 44 | * 属性名为驼峰风格(如"userName"),字段名为下划线隔开(如"user_name") 45 | */ 46 | class CamelUnderscoreNameConverter implements NameConverter { 47 | 48 | @Override 49 | public String field2Column(String prop) { 50 | return Str.propertyToColumn(prop); 51 | } 52 | 53 | @Override 54 | public String column2Field(String column) { 55 | return Str.columnToProperty(column); 56 | } 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/command/Command.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.command; 2 | 3 | import java.util.List; 4 | 5 | /** 6 | * 对 PreparedStatement 语句和参数的封装 7 | */ 8 | public class Command { 9 | 10 | /** 11 | * 包含 "?" 参数占位符的 SQL 语句 12 | */ 13 | private String statement; 14 | 15 | /** 16 | * 与参数占位符对应的参数值列表 17 | */ 18 | private List params; 19 | 20 | /** 21 | * 缺省构造函数 22 | */ 23 | public Command() { 24 | } 25 | 26 | /** 27 | * 构造函数 28 | * 29 | * @param statement SQL 语句 30 | * @param params 参数 31 | */ 32 | public Command(String statement, List params) { 33 | this.statement = statement; 34 | this.params = params; 35 | } 36 | 37 | /** 38 | * 获得 SQL 语句 39 | * 40 | * @return SQL 语句 41 | */ 42 | public String getStatement() { 43 | return statement; 44 | } 45 | 46 | /** 47 | * 设置 SQL 语句 48 | * 49 | * @param statement SQL 语句 50 | */ 51 | public void setStatement(String statement) { 52 | this.statement = statement; 53 | } 54 | 55 | /** 56 | * 获得参数 57 | * 58 | * @return 参数 59 | */ 60 | public List getParams() { 61 | return params; 62 | } 63 | 64 | /** 65 | * 设置参数 66 | * 67 | * @param params 参数 68 | */ 69 | public void setParams(List params) { 70 | this.params = params; 71 | } 72 | 73 | @Override 74 | public String toString() { 75 | return "Command{" + 76 | "statement='" + statement + '\'' + 77 | ", params=" + params + 78 | '}'; 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/dao/DAOUtils.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao; 2 | 3 | import com.hyd.dao.database.JDBCDriver; 4 | import com.hyd.dao.database.NonPooledDataSource; 5 | 6 | import javax.sql.DataSource; 7 | 8 | /** 9 | * @author yiding.he 10 | */ 11 | public class DAOUtils { 12 | 13 | private static final DataSources dataSources = DataSources.getInstance(); 14 | 15 | public static DAO getDAO() { 16 | if (dataSources.isEmpty()) { 17 | dataSources.setDataSource(DataSources.DEFAULT_DATA_SOURCE_NAME, createDataSource()); 18 | } 19 | 20 | return new DAO(DataSources.DEFAULT_DATA_SOURCE_NAME); 21 | } 22 | 23 | private static DataSource createDataSource() { 24 | String url = System.getProperty("jdbc.url"); 25 | JDBCDriver driver = JDBCDriver.getDriverByUrl(url); 26 | 27 | if (driver == null) { 28 | throw new DAOException("Driver not found for " + url); 29 | } 30 | 31 | return new NonPooledDataSource( 32 | driver.getAvailableDriver().getCanonicalName(), 33 | url, 34 | System.getProperty("jdbc.username"), 35 | System.getProperty("jdbc.password") 36 | ); 37 | } 38 | 39 | public static void setupDataSource(String url, String username, String password) { 40 | System.setProperty("jdbc.url", url); 41 | if (username != null) { 42 | System.setProperty("jdbc.username", username); 43 | } 44 | if (password != null) { 45 | System.setProperty("jdbc.password", password); 46 | } 47 | } 48 | 49 | public static void setupLocalMySQL() { 50 | setupDataSource("jdbc:mysql://localhost/test", "root", "root123"); 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /docs/03-querying.md: -------------------------------------------------------------------------------- 1 | 使用 DAO 对象来进行查询的方式有以下几种。 2 | 3 | ### 简单的带参数查询 4 | ```java 5 | // 简单查询 6 | List users = dao.query( 7 | "select * from users where id in(?,?,?)", 8 | 1, 2, 3 9 | ); 10 | ``` 11 | 12 | 使用简单的带参数查询,只需要调用 query() 方法,在 SQL 中用问号"?"指定参数位置,并在后面写上对应数量的参数值即可。只要你写过 JDBC 调用 PreparedStatement 来执行查询,就会很熟悉这种方式。 13 | 14 | query() 方法返回一个 Row 对象列表,Row 是 Map 的子类,它的 key 不区分大小写。 15 | 16 | ### 查询结果包装成 Pojo 17 | 18 | 你可以指定将查询结果包装成什么样的 Pojo 对象。hydrogen-dao 有一套固定的命名转换规则,将查询结果的字段名匹配到 Pojo 类的属性名上。下面是几个例子: 19 | 20 | 21 | 字段名 |属性名 22 | ---|--- 23 | user_name |userName 24 | address |address 25 | _my_member_id | MyMemberId 26 | class | CLASS 27 | 28 | 29 | 注意,当使用了像 `abstract`/`private`/`protected`/`static`/`void`/`interface`/`enum`/`class` 等 Java 关键字来做字段名时,因为这些名字不可能转为 Pojo 类的属性名,hydrogen-dao 将其转换为大写。如果你有一条查询语句返回了一个名为 `class` 的字段,而你想用 Pojo 来接收它,你可以在 Pojo 中定义一个名为 "`CLASS`" 的属性。 30 | 31 | ```java 32 | // 查询结果包装成 Pojo 33 | List users = dao.query(User.class, 34 | "select * from users where id in(?,?,?)", 35 | 1, 2, 3); 36 | ``` 37 | 38 | ### 带参数名的查询 39 | 40 | 有些童鞋不喜欢 "?" 作为占位符,希望每个参数都有名字。hydrogen-dao 提供一个叫 MappedCommand 的类,下面是一个例子: 41 | 42 | ```java 43 | MappedCommand mappedCommand = new MappedCommand( 44 | "select * from USER where USERNAME=#username# and ROLE in (#role#)") 45 | .setParam("username", "admin") 46 | .setParam("role", new int[]{1, 2, 3, 4, 5, 6}); 47 | 48 | List users = dao.query(User.class, mappedCommand); 49 | ``` 50 | 51 | ### 只取第一条查询结果 52 | 53 | DAO 提供 queryFirst() 方法,其参数与 query() 方法类同,不过返回值是单个的 Row 对象或 Pojo 对象。该方法只返回查询结果中的第一条记录。 54 | 55 | > 注意:不管查询本身是返回多条记录还是单条记录,queryFirst() 方法都只返回第一条找到的记录。 -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/snapshot/ExecutorInfo.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.snapshot; 2 | 3 | import java.util.Date; 4 | 5 | /** 6 | * 保存 Executor 执行信息。因为直接调用 Executor 获取数据可能会遇到阻塞,因此 7 | * 在 Executor 中包含一个 ExecutorInfo 对象以避免阻塞。当 8 | */ 9 | public class ExecutorInfo { 10 | 11 | private String lastCommand; // 最近执行的 SQL 语句 12 | 13 | private Date lastExecuteTime; // 最近执行 SQL 语句的时间 14 | 15 | private boolean closed; // Executor 是否已关闭 16 | 17 | private Snapshot snapshot; // 关联的快照(用于主动移除自己) 18 | 19 | public ExecutorInfo(String dsName) { 20 | this.snapshot = Snapshot.getInstance(dsName); 21 | this.snapshot.addExecutorInfo(this); 22 | } 23 | 24 | public Snapshot getSnapshot() { 25 | return snapshot; 26 | } 27 | 28 | public void setSnapshot(Snapshot snapshot) { 29 | this.snapshot = snapshot; 30 | } 31 | 32 | public boolean isClosed() { 33 | return closed; 34 | } 35 | 36 | public void setClosed(boolean closed) { 37 | this.closed = closed; 38 | this.snapshot.removeInfo(this); 39 | } 40 | 41 | public String getLastCommand() { 42 | return lastCommand; 43 | } 44 | 45 | public void setLastCommand(String lastCommand) { 46 | this.lastCommand = lastCommand; 47 | } 48 | 49 | public Date getLastExecuteTime() { 50 | return lastExecuteTime; 51 | } 52 | 53 | public void setLastExecuteTime(Date lastExecuteTime) { 54 | this.lastExecuteTime = lastExecuteTime; 55 | } 56 | 57 | @Override 58 | public String toString() { 59 | return "ExecutorInfo{" + 60 | "lastCommand='" + lastCommand + '\'' + 61 | ", lastExecuteTime=" + lastExecuteTime + 62 | ", closed=" + closed + 63 | '}'; 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /docs/02-datasources.md: -------------------------------------------------------------------------------- 1 | hydrogen-dao 需要一个 JDBC 数据源来实现数据库访问。只需要标准的 `javax.sql.DataSource` 对象即可。 2 | 3 | hydrogen-dao 支持同时管理多个数据源,不论它们各自属于什么类型的数据库。 4 | 5 | ### 创建 Datasources 对象 6 | 7 | `com.hyd.dao.DataSources` 是管理数据源的类,并且是单例的。 8 | 9 | ```java 10 | DataSources datasources = DataSources.getInstance(); 11 | ``` 12 | 13 | ### 创建数据源 14 | 15 | 接下来,你可以用任何数据库连接池类库(DBCP,c3p0,等等)创建一个包装好的 `javax.sql.DataSource` 对象,然后将其配置到 DataSources 里。例如配置一个 DBCP 的数据源: 16 | 17 | ```java 18 | DataSource ds = new org.apache.commons.dbcp.BasicDataSource(); 19 | ... 20 | datasources.setDataSource("db1", ds); 21 | ``` 22 | 23 | ### 获取 DAO 对象 24 | 25 | 配置好数据源后,就可以调用 DataSources 的 getDAO() 方法来获得 DAO 对象了。 26 | 27 | ```java 28 | DAO dao = datasources.getDAO("db1"); // 这个 DAO 对象的所有操作都是针对 db1 数据库 29 | ``` 30 | 31 | ### 完整例子 32 | 33 | 下面是一个完整的例子: 34 | 35 | ```java 36 | 37 | // 1. 创建一个数据源 38 | BasicDataSource dataSource = new BasicDataSource(); 39 | dataSource.setDriverClassName("org.hsqldb.jdbc.JDBCDriver"); 40 | dataSource.setUrl("jdbc:hsqldb:mem:demodb"); 41 | dataSource.setUsername("SA"); 42 | 43 | // 2. 将 DataSource 对象注册到 com.hyd.dao.DataSources 对象中 44 | // 这两步通常会在 Spring 当中以配置的方式完成。 45 | // DataSources 对象应该是全局唯一的。 46 | DataSources.getInstance().setDataSource("demodb1", dataSource); 47 | 48 | // 3. 创建 DAO 对象 49 | DAO dao = new DAO("demodb1"); 50 | 51 | ``` 52 | 53 | 如果你用的是 Spring,则配置起来是这个样子: 54 | 55 | ```java 56 | @Configuration 57 | public class DbConfiguration { 58 | 59 | // 假设配置好了一个数据源 60 | @Bean 61 | public DataSources dataSources(DataSource dataSource) { 62 | DataSources.getInstance().setDataSource("db1", dataSource); 63 | } 64 | } 65 | 66 | @Component 67 | public class UserService { 68 | 69 | public User findUser(Long userId) { 70 | return new DAO("db1").queryFirst(User.class, "select * from users where id=?", userId); 71 | } 72 | } 73 | ``` -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/mate/util/Locker.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.mate.util; 2 | 3 | import java.util.Map; 4 | import java.util.concurrent.locks.Lock; 5 | import java.util.concurrent.locks.ReentrantLock; 6 | import java.util.function.Supplier; 7 | 8 | /** 9 | * Lock and synchronization by string key. 10 | * 11 | * @author yidin 12 | */ 13 | public class Locker { 14 | 15 | public static final int CACHE_SIZE = 10000; 16 | 17 | private static final Map CACHE = MapCacheUtils.newLRUCache(CACHE_SIZE, false); 18 | 19 | private static final Lock CACHE_LOCK = new ReentrantLock(); 20 | 21 | public static void lockAndRun(String key, Runnable runnable) { 22 | lockAndRun(getCacheLock(key), runnable); 23 | } 24 | 25 | public static T lockAndRun(String key, Supplier supplier) { 26 | return lockAndRun(getCacheLock(key), supplier); 27 | } 28 | 29 | private static Lock getCacheLock(String key) { 30 | Lock[] lock = new Lock[]{CACHE.get(key)}; 31 | if (lock[0] == null) { 32 | lockAndRun(CACHE_LOCK, () -> { 33 | lock[0] = CACHE.get(key); 34 | if (lock[0] == null) { 35 | lock[0] = new ReentrantLock(); 36 | CACHE.put(key, lock[0]); 37 | } 38 | }); 39 | } 40 | return lock[0]; 41 | } 42 | 43 | private static void lockAndRun(Lock lock, Runnable runnable) { 44 | try { 45 | lock.lock(); 46 | runnable.run(); 47 | } finally { 48 | lock.unlock(); 49 | } 50 | } 51 | 52 | private static T lockAndRun(Lock lock, Supplier supplier) { 53 | try { 54 | lock.lock(); 55 | return supplier.get(); 56 | } finally { 57 | lock.unlock(); 58 | } 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/dao/mate/util/StrTest.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.mate.util; 2 | 3 | import org.junit.jupiter.api.Test; 4 | 5 | import java.time.Year; 6 | import java.util.Collections; 7 | import java.util.HashMap; 8 | import java.util.Map; 9 | 10 | import static org.junit.jupiter.api.Assertions.assertEquals; 11 | 12 | public class StrTest { 13 | 14 | @Test 15 | public void eval() { 16 | Map emptyMap = Collections.emptyMap(); 17 | Map variableMap = new HashMap(){{ 18 | put("name", "HydrogenDAO"); 19 | }}; 20 | 21 | assertEquals("Hello", Str.eval("Hello", emptyMap)); 22 | assertEquals("Hello, HydrogenDAO", Str.eval("Hello, {name}", variableMap)); 23 | assertEquals("Hello, ", Str.eval("Hello, {}", variableMap)); 24 | assertEquals("Hello, \\{HydrogenDAO", Str.eval("Hello, \\{{name}", variableMap)); 25 | assertEquals("Hello, \\{}HydrogenDAO", Str.eval("Hello, \\{}{name}", variableMap)); 26 | assertEquals("Hello, \\{123}HydrogenDAO", Str.eval("Hello, \\{123}{name}", variableMap)); 27 | assertEquals("Hello, \\{\\}HydrogenDAO", Str.eval("Hello, \\{\\}{name}", variableMap)); 28 | assertEquals("Hello, \\{123\\}HydrogenDAO", Str.eval("Hello, \\{123\\}{name}", variableMap)); 29 | 30 | variableMap.put("year", Year.now()); 31 | assertEquals("Hello, HydrogenDAO, now is 2020.", 32 | Str.eval("Hello, {name}, now is {year}.", variableMap)); 33 | } 34 | 35 | @Test 36 | public void testUnderscore2Property() throws Exception { 37 | assertEquals("", Str.underscore2Property("")); 38 | assertEquals("123", Str.underscore2Property("123")); 39 | assertEquals("123456", Str.underscore2Property("123_456")); 40 | assertEquals("abcDef", Str.underscore2Property("abc_def")); 41 | assertEquals("abcDef", Str.underscore2Property("abc_def_")); 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /docs/05-updating.md: -------------------------------------------------------------------------------- 1 | `DAO.execute()` 方法可以用来执行所有的 `insert`/`update`/`delete` 语句,以及 DDL 语句(如 `create table`)。 2 | 3 | 只要不是在事务中执行,`execute()` 方法都会自动提交。 4 | 5 | 下面是执行 SQL 的几种方式: 6 | 7 | ### 1. 直接执行语句并带参数 8 | 9 | ```java 10 | dao.execute("insert into USER(id, username) values(?,?)", 1, "admin"); 11 | ``` 12 | 13 | ### 2. 执行 `Command` 对象 14 | 15 | `com.hyd.dao.database.commandbuilder.Command` 对象包含了要执行的语句和相关参数。`Command` 类的目的是将语句和参数绑在一起传递。 16 | 17 | ```java 18 | Command command = new Command(); 19 | command.setStatement("insert into USER(id, username) values(?,?)"); 20 | command.setParams(Arrays.asList(1, "admin")); 21 | dao.execute(command); 22 | ``` 23 | 24 | ### 3. 执行 `MappedCommand` 对象 25 | 26 | `com.hyd.dao.MappedCommand` 对象支持为参数起名。目前只有 `MappedCommand` 支持这种方式,其他方式执行查询或变更,都是用 "?" 作为参数占位符。 27 | 28 | ```java 29 | MappedCommand cmd = new MappedCommand("insert into USER(username) values (#username#)"); 30 | cmd.setParam("username", "user1"); 31 | dao.execute(cmd); 32 | ``` 33 | 34 | ### 4 根据参数动态生成的条件 35 | 36 | com.hyd.dao.SQL 类帮助生成一个包含动态条件的语句。当条件不满足时,相关的条件不会出现在 SQL 语句中,并继续保证 SQL 的合法性。具体使用方法请参考 SQL 类的单元测试。 37 | 38 | ```java 39 | dao.execute(SQL 40 | .Update("USER") 41 | .Set("ROLE=?", roleId) 42 | .Where("USERID>?", 10) 43 | .And(username != null, "USERNAME=?", username) // 如果 username 为 null,则本条件不会生成 44 | ); 45 | ``` 46 | 47 | ### 5 批处理:执行 com.hyd.dao.BatchCommand 对象 48 | 49 | BatchCommand 对象用于执行批处理语句,比如批量插入: 50 | 51 | ```java 52 | BatchCommand bc = new BatchCommand("insert into USER(id, username) values(?,?)"); 53 | bc.addParams(1, "user1"); 54 | bc.addParams(2, "user2"); 55 | bc.addParams(3, "user3"); 56 | dao.execute(bc); 57 | ``` 58 | 59 | > 注意:批处理不是当成事务来执行的。每次调用 `execute()` 方法执行批处理时,其执行效果取决于 `java.sql.Statement#executeBatch` 方法的执行效果。如果要将批处理作为事务执行,请参考[事务处理](07-transaction.md)。 60 | 61 | ### 6 批处理的返回值 62 | 63 | `execute(BatchCommand)` 方法的返回值等于每条执行的语句所变更的记录数的总和。例如第一条语句更新了 3 条记录,第二条更新了 7 条记录,那么 `execute()` 方法将返回 10。 -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/dao/SQLTest.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao; 2 | 3 | import com.hyd.dao.command.Command; 4 | import org.junit.jupiter.api.Test; 5 | 6 | import java.util.Arrays; 7 | 8 | import static com.hyd.dao.SQL.Select; 9 | 10 | public class SQLTest { 11 | 12 | private static void output(SQL.Generatable generatable) { 13 | Command command = generatable.toCommand(); 14 | System.out.println(command.getStatement()); 15 | System.out.println(command.getParams()); 16 | } 17 | 18 | @Test 19 | public void testJoin() throws Exception { 20 | String name = null; 21 | Select select = Select("*") 22 | .From("table1 t1") 23 | .InnerJoin("Table2 t2 on t1.id=t2.id+?", 10) 24 | .IfNotEmpty(name, (_select, _name) -> 25 | _select.Where("t2.name=?", _name.toUpperCase())); 26 | output(select); 27 | } 28 | 29 | @Test 30 | public void testJoin2() throws Exception { 31 | Select select = Select("*") 32 | .From("table1 t1") 33 | .InnerJoin("Table2 t2 on t1.id=t2.id+?", 10) 34 | .Where("t2.name=?", "aaa") 35 | .Where("t2.name=?", "bbb") 36 | .Where("t2.name=?", "ccc"); 37 | 38 | output(select); 39 | } 40 | 41 | @Test 42 | public void testIn() throws Exception { 43 | Select select = Select("*") 44 | .From("table1 t1") 45 | .Where("name in ?", Arrays.asList("name1", "name2", "name3")) 46 | .Or("id in ?", "id1", "id2", "id3"); 47 | 48 | output(select); 49 | } 50 | 51 | @Test 52 | public void testChildStatements() throws Exception { 53 | output(Select("*").From("t1") 54 | .Where("col1 in", Select("pid").From("t2").Where("t2.name=?", "aaa")) 55 | .And("col2 not in ", Select("qid").From("t3").Where("t3.xxx in ?", "111", "222", "333")) 56 | .OrderBy("col3 desc") 57 | .Limit(100) 58 | ); 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/spring/SpringAutoConfiguration.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.spring; 2 | 3 | import com.hyd.dao.DAO; 4 | import com.hyd.dao.DataSources; 5 | import com.hyd.dao.log.Logger; 6 | import org.springframework.beans.BeansException; 7 | import org.springframework.beans.factory.config.BeanPostProcessor; 8 | import org.springframework.boot.autoconfigure.AutoConfigureAfter; 9 | import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; 10 | import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration; 11 | import org.springframework.context.annotation.Bean; 12 | 13 | import javax.sql.DataSource; 14 | 15 | /** 16 | * 自动根据 spring.datasource 配置初始化 DAO 对象 17 | * 使用方法:在适当的地方加入 @Import(com.hyd.dao.spring.SpringAutoConfiguration.class) 即可。 18 | * 多数据源:这里会自动根据数据源对象的 beanName 注册到 DataSources 中,以便后面创建各自不同的 DAO 对象。 19 | * 20 | * @author yidin 21 | */ 22 | @AutoConfigureAfter(DataSourceAutoConfiguration.class) 23 | public class SpringAutoConfiguration { 24 | 25 | private static final Logger LOG = Logger.getLogger(SpringAutoConfiguration.class); 26 | 27 | @Bean("dao") 28 | @ConditionalOnMissingBean(name = "dao") 29 | public DAO dao() { 30 | return new DAO(DataSources.DEFAULT_DATA_SOURCE_NAME); 31 | } 32 | 33 | @Bean 34 | public BeanPostProcessor dataSourcePostProcessor() { 35 | return new BeanPostProcessor() { 36 | @Override 37 | public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException { 38 | if (bean instanceof DataSource ds) { 39 | var instance = DataSources.getInstance(); 40 | if (instance.isEmpty()) { 41 | instance.setDataSource(DataSources.DEFAULT_DATA_SOURCE_NAME, ds); 42 | } 43 | instance.setDataSource(beanName, ds); 44 | LOG.debug("Recognized data source '" + beanName + "' as " + ds); 45 | } 46 | return bean; 47 | } 48 | }; 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/command/builder/UpdateBuilder.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.command.builder; 2 | 3 | import com.hyd.dao.SQL; 4 | import com.hyd.dao.command.Command; 5 | import com.hyd.dao.command.builder.helper.CommandBuilderHelper; 6 | import com.hyd.dao.database.ColumnInfo; 7 | import com.hyd.dao.database.ConnectionContext; 8 | import com.hyd.dao.database.FQN; 9 | import com.hyd.dao.database.type.NameConverter; 10 | 11 | import java.util.List; 12 | 13 | public class UpdateBuilder extends CommandBuilder { 14 | 15 | public UpdateBuilder(ConnectionContext context) { 16 | super(context); 17 | } 18 | 19 | /** 20 | * 生成根据主键值更新字段 Update 命令 21 | * 22 | * @param tableName 表名 23 | * @param object 包含主键值和要更新的字段值的对象 24 | * 25 | * @return 生成的命令 26 | */ 27 | public Command buildByKey(String tableName, Object object) { 28 | if (object == null) { 29 | throw new NullPointerException("object is null"); 30 | } 31 | 32 | final NameConverter nameConverter = context.getNameConverter(); 33 | final FQN fqn = new FQN(context, tableName); 34 | final List infos = CommandBuilderHelper.getColumnInfos(fqn, context); 35 | final SQL.Update update = new SQL.Update(tableName); 36 | 37 | for (ColumnInfo info : infos) { 38 | String columnName = context.getDialect().quote(info.getColumnName()); 39 | Object param = CommandBuilderHelper.generateParamValue(object, info, nameConverter); 40 | 41 | if (info.isPrimary()) { 42 | if (param == null) { 43 | throw new IllegalStateException("Update command missing param value for column " + columnName); 44 | } 45 | update.Where(columnName + "=?", param); 46 | } else { 47 | update.Set(param != null, columnName, param); 48 | } 49 | } 50 | 51 | if (!update.hasConditions()) { 52 | throw new IllegalStateException( 53 | "Update command has no condition, dangerous operation prohibited."); 54 | } 55 | 56 | return update.toCommand(); 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/database/FQN.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.database; 2 | 3 | import com.hyd.dao.DAOException; 4 | import com.hyd.dao.mate.util.Str; 5 | import lombok.EqualsAndHashCode; 6 | 7 | import java.sql.SQLException; 8 | 9 | /** 10 | * FQN of a table. 11 | * 12 | * @author yiding.he 13 | */ 14 | @EqualsAndHashCode 15 | public class FQN { 16 | 17 | private final String schema; 18 | 19 | private final String name; 20 | 21 | private final String quotedName; 22 | 23 | public FQN(ConnectionContext context, String fqn) { 24 | if (Str.isEmpty(fqn)) { 25 | throw new IllegalArgumentException("FQN parameter cannot be empty"); 26 | } 27 | 28 | this.schema = parseSchema(context, fqn); 29 | this.name = fqn.contains(".") ? Str.subStringAfterLast(fqn, ".") : fqn; 30 | this.quotedName = context.getDialect().quote(this.schema, this.name); 31 | } 32 | 33 | private String parseSchema(ConnectionContext context, String fqn) { 34 | try { 35 | // 如果表名带 "." 则取前面部分,否则从 Connection 对象中取,如果取不到则使用 "%" 36 | if (fqn.contains(".")) { 37 | return Str.subStringBeforeLast(fqn, "."); 38 | } else { 39 | return Str.fromCandidates( 40 | context.getDriverConnection().getSchema(), 41 | context.getDriverConnection().getCatalog(), 42 | "%" 43 | ); 44 | } 45 | } catch (SQLException e) { 46 | throw new DAOException(e); 47 | } 48 | } 49 | 50 | public String getSchema(String defaultValue) { 51 | return Str.defaultIfEmpty(schema, defaultValue); 52 | } 53 | 54 | public String getName(String defaultValue) { 55 | return Str.defaultIfEmpty(name, defaultValue); 56 | } 57 | 58 | public String getSchema() { 59 | return schema; 60 | } 61 | 62 | public String getName() { 63 | return name; 64 | } 65 | 66 | public String getQuotedName() { 67 | return quotedName; 68 | } 69 | 70 | public String getFullName() { 71 | return Str.appendIfNotEmpty(schema, ".", "") + name; 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/database/dialects/impl/OracleDialect.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.database.dialects.impl; 2 | 3 | import com.hyd.dao.DAOException; 4 | import com.hyd.dao.database.dialects.Dialect; 5 | import com.hyd.dao.database.type.NameConverter; 6 | import com.hyd.dao.mate.util.ResultSetUtil; 7 | 8 | import java.sql.Connection; 9 | import java.sql.ResultSet; 10 | import java.sql.SQLException; 11 | import java.util.function.Predicate; 12 | import java.util.regex.Pattern; 13 | 14 | public class OracleDialect implements Dialect { 15 | 16 | private static final Pattern PRODUCT_NAME_PATTERN = Pattern.compile(".*Oracle.*"); 17 | 18 | public static final int TYPE_CURSOR = -10; 19 | 20 | @Override 21 | public String identityQuoter() { 22 | return "\""; 23 | } 24 | 25 | @Override 26 | public Predicate getMatcher() { 27 | return connection -> { 28 | try { 29 | var databaseProductName = connection.getMetaData().getDatabaseProductName(); 30 | return PRODUCT_NAME_PATTERN.matcher(databaseProductName).matches(); 31 | } catch (SQLException e) { 32 | throw new DAOException(e); 33 | } 34 | }; 35 | } 36 | 37 | @Override 38 | public String wrapRangeQuery(String sql, int startPos, int endPos) { 39 | var _startPos = startPos + 1; 40 | var sql_prefix = "select * from ( select pagination_wrapper.*, rownum " + 41 | ResultSetUtil.PAGINATION_WRAPPER_COLUMN_NAME + " from ("; 42 | var sql_suffix = ") pagination_wrapper) where " + 43 | ResultSetUtil.PAGINATION_WRAPPER_COLUMN_NAME + " between " + _startPos + " and " + endPos; 44 | return sql_prefix + sql + sql_suffix; 45 | } 46 | 47 | @Override 48 | public Object parseCallableStatementResult(int sqlType, Object value) { 49 | try { 50 | if (sqlType == TYPE_CURSOR) { 51 | var rs1 = (ResultSet) value; 52 | return ResultSetUtil.readResultSet(rs1, null, NameConverter.DEFAULT, -1, -1); 53 | } else { 54 | return Dialect.super.parseCallableStatementResult(sqlType, value); 55 | } 56 | } catch (Exception e) { 57 | throw DAOException.wrap(e); 58 | } 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/database/JDBCDriver.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.database; 2 | 3 | import com.hyd.dao.mate.util.Cls; 4 | 5 | import java.sql.Driver; 6 | 7 | /** 8 | * 根据 JDBC URL 来猜测对应的 Driver 类 9 | * 10 | * @author yiding.he@gmail.com 11 | */ 12 | public enum JDBCDriver { 13 | 14 | MySQL("jdbc:mysql:", "com.mysql.cj.jdbc.Driver", "com.mysql.jdbc.Driver"), 15 | Oracle("jdbc:oracle:", "oracle.jdbc.OracleDriver"), 16 | H2("jdbc:h2:", "org.h2.Driver"), 17 | HSQLDB("jdbc:hsqldb:", "org.hsqldb.jdbc.JDBCDriver"), 18 | SQLServer("jdbc:sqlserver:", "com.microsoft.sqlserver.jdbc.SQLServerDriver"), 19 | DB2("jdbc:db2:", "com.ibm.db2.jcc.DB2Driver"), 20 | SQLiteJDBC("jdbc:sqlite:", "org.sqlite.JDBC"), 21 | PostgreSQL("jdbc:postgresql:", "org.postgresql.Driver"), 22 | ODBC("jdbc:odbc:", "sun.jdbc.odbc.JdbcOdbcDriver"), 23 | 24 | /////////////////////////////////////////////// 25 | ; 26 | 27 | private final String schemaPrefix; 28 | 29 | private final String[] driverClasses; 30 | 31 | private Class availableDriver; 32 | 33 | JDBCDriver(String schemaPrefix, String... driverClasses) { 34 | this.schemaPrefix = schemaPrefix; 35 | this.driverClasses = driverClasses; 36 | } 37 | 38 | public String getSchemaPrefix() { 39 | return schemaPrefix; 40 | } 41 | 42 | public String[] getDriverClasses() { 43 | return driverClasses; 44 | } 45 | 46 | public Class getAvailableDriver() { 47 | return this.availableDriver; 48 | } 49 | 50 | // 检查 JDBC Class 是否存在 51 | @SuppressWarnings("unchecked") 52 | public boolean isAvailable() { 53 | for (String driverClass : driverClasses) { 54 | Class type = Cls.getType(driverClass); 55 | if (type != null && Driver.class.isAssignableFrom(type)) { 56 | availableDriver = (Class) type; 57 | return true; 58 | } 59 | } 60 | return false; 61 | } 62 | 63 | public static JDBCDriver getDriverByUrl(String jdbcUrl) { 64 | for (JDBCDriver driver : values()) { 65 | if (jdbcUrl.startsWith(driver.schemaPrefix) && driver.isAvailable()) { 66 | return driver; 67 | } 68 | } 69 | 70 | return null; 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/Page.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao; 2 | 3 | import java.util.ArrayList; 4 | import java.util.Collection; 5 | import java.util.Collections; 6 | import java.util.List; 7 | 8 | /** 9 | * 分页查询结果 10 | */ 11 | public class Page { 12 | 13 | private int total; // 总记录数 14 | 15 | private int pageIndex; 16 | 17 | private int pageSize; 18 | 19 | private List list; 20 | 21 | public Page() { 22 | } 23 | 24 | public Page(Collection c) { 25 | this(c, 0, 0, 0); 26 | } 27 | 28 | public Page(int total, int pageIndex, int pageSize) { 29 | this(Collections.emptyList(), total, pageIndex, pageSize); 30 | } 31 | 32 | public Page(Collection c, int pageIndex, int pageSize) { 33 | this(c, c.size(), pageIndex, pageSize); 34 | } 35 | 36 | public Page(Collection c, int total, int pageIndex, int pageSize) { 37 | this.list = new ArrayList<>(c); 38 | this.total = total; 39 | this.pageIndex = pageIndex; 40 | this.pageSize = pageSize; 41 | } 42 | 43 | public int getTotal() { 44 | return total; 45 | } 46 | 47 | public void setTotal(int total) { 48 | this.total = total; 49 | } 50 | 51 | public int getPageIndex() { 52 | return pageIndex; 53 | } 54 | 55 | public void setPageIndex(int pageIndex) { 56 | this.pageIndex = pageIndex; 57 | } 58 | 59 | public int getPageSize() { 60 | return pageSize; 61 | } 62 | 63 | public void setPageSize(int pageSize) { 64 | this.pageSize = pageSize; 65 | } 66 | 67 | public int getTotalPage() { 68 | return this.pageSize == 0? 0: ((this.total + this.pageSize - 1) / this.pageSize); 69 | } 70 | 71 | public int size() { 72 | return this.list == null ? 0 : this.list.size(); 73 | } 74 | 75 | public void addAll(Collection collection) { 76 | if (list != null) { 77 | list.addAll(collection); 78 | } else { 79 | list = new ArrayList<>(collection); 80 | } 81 | } 82 | 83 | public boolean isEmpty() { 84 | return list == null || list.isEmpty(); 85 | } 86 | 87 | public T get(int index) { 88 | return list == null ? null : list.get(index); 89 | } 90 | 91 | public List getList() { 92 | return list; 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/command/builder/QueryBuilder.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.command.builder; 2 | 3 | import com.hyd.dao.DAOException; 4 | import com.hyd.dao.SQL; 5 | import com.hyd.dao.command.Command; 6 | import com.hyd.dao.database.ColumnInfo; 7 | import com.hyd.dao.database.ConnectionContext; 8 | import com.hyd.dao.database.FQN; 9 | import com.hyd.dao.database.type.NameConverter; 10 | 11 | import java.util.List; 12 | 13 | import static com.hyd.dao.command.builder.helper.CommandBuilderHelper.*; 14 | 15 | /** 16 | * 构建查询语句的类 17 | * 18 | * @author yiding.he 19 | */ 20 | public final class QueryBuilder extends CommandBuilder { 21 | 22 | public QueryBuilder(ConnectionContext context) { 23 | super(context); 24 | } 25 | 26 | /** 27 | * 根据主键值构建查询语句 28 | */ 29 | public Command buildByKey(String tableName, Object primaryKey) throws DAOException { 30 | final FQN fqn = new FQN(context, tableName); 31 | final List infos = getColumnInfos(fqn, context); 32 | final SQL.Select select = new SQL.Select("*").From(fqn.getQuotedName()); 33 | 34 | for (ColumnInfo info : infos) { 35 | if (info.isPrimary()) { 36 | select.And(context.getDialect().quote(info.getColumnName()) + "=?", primaryKey); 37 | break; 38 | } 39 | } 40 | 41 | if (!select.hasConditions()) { 42 | throw new DAOException("Primary key not found in table \"" + tableName + "\""); 43 | } 44 | 45 | return select.toCommand(); 46 | } 47 | 48 | /** 49 | * 根据 obj 对象构建查询语句 50 | */ 51 | public Command build(String tableName, Object obj) { 52 | final FQN fqn = new FQN(context, tableName); 53 | final SQL.Select select = new SQL.Select("*").From(fqn.getQuotedName()); 54 | 55 | final NameConverter nameConverter = context.getNameConverter(); 56 | final List infos = obj == null ? 57 | getColumnInfos(fqn, context) : 58 | filterColumnsByType(getColumnInfos(fqn, context), obj.getClass(), nameConverter); 59 | 60 | if (obj != null) { 61 | infos.forEach(info -> { 62 | Object value = generateParamValue(obj, info, nameConverter); 63 | if (value != null) { 64 | select.And(context.getDialect().quote(info.getColumnName()) + "=?", value); 65 | } 66 | }); 67 | } 68 | 69 | return select.toCommand(); 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/mate/util/BatchPipeline.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.mate.util; 2 | 3 | import java.util.ArrayList; 4 | import java.util.Collection; 5 | import java.util.List; 6 | import java.util.function.Consumer; 7 | import java.util.stream.BaseStream; 8 | import java.util.stream.Stream; 9 | 10 | public class BatchPipeline { 11 | 12 | private final List buffer = new ArrayList<>(); 13 | 14 | private Consumer> batchOperation; 15 | 16 | private int batchSize = 1; 17 | 18 | private boolean ignoreNullItem = true; 19 | 20 | public BatchPipeline setBatchSize(int batchSize) { 21 | if (batchSize <= 0) { 22 | throw new IllegalArgumentException("Batch size " + batchSize + " should be positive."); 23 | } 24 | this.batchSize = batchSize; 25 | return this; 26 | } 27 | 28 | public BatchPipeline setBatchOperation(Consumer> batchOperation) { 29 | this.batchOperation = batchOperation; 30 | return this; 31 | } 32 | 33 | public BatchPipeline setIgnoreNullItem(boolean ignoreNullItem) { 34 | this.ignoreNullItem = ignoreNullItem; 35 | return this; 36 | } 37 | 38 | public int getBatchSize() { 39 | return batchSize; 40 | } 41 | 42 | public boolean isIgnoreNullItem() { 43 | return ignoreNullItem; 44 | } 45 | 46 | public void flush() { 47 | if (buffer.isEmpty()) { 48 | return; 49 | } 50 | 51 | var list = List.copyOf(buffer); 52 | buffer.clear(); 53 | if (batchOperation != null) { 54 | batchOperation.accept(list); 55 | } 56 | } 57 | 58 | public synchronized void feed(T item) { 59 | if (item == null && ignoreNullItem) { 60 | return; 61 | } 62 | 63 | buffer.add(item); 64 | 65 | if (buffer.size() >= this.batchSize) { 66 | flush(); 67 | } 68 | } 69 | 70 | @SafeVarargs 71 | public final void feed(T... items) { 72 | for (var item : items) { 73 | feed(item); 74 | } 75 | } 76 | 77 | public void feed(Stream itemStream) { 78 | itemStream.forEach(this::feed); 79 | } 80 | 81 | public void feed(BaseStream itemStream) { 82 | var iterator = itemStream.iterator(); 83 | while (iterator.hasNext()) { 84 | var item = iterator.next(); 85 | feed(item); 86 | } 87 | } 88 | 89 | public void feed(Collection itemCollection) { 90 | itemCollection.forEach(this::feed); 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/mate/util/Batch.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.mate.util; 2 | 3 | import java.util.ArrayList; 4 | import java.util.Collection; 5 | import java.util.List; 6 | import java.util.function.Consumer; 7 | import java.util.function.Function; 8 | import java.util.stream.BaseStream; 9 | import java.util.stream.Stream; 10 | 11 | /** 12 | * 将流转为批处理 13 | */ 14 | public class Batch { 15 | 16 | public static final int DEFAULT_BATCH_SIZE = 100; 17 | 18 | public static Batch with(Stream stream) { 19 | return new Batch<>(stream); 20 | } 21 | 22 | public static > Batch with(BaseStream stream) { 23 | return new Batch<>(stream); 24 | } 25 | 26 | public static Batch with(Collection collection) { 27 | return new Batch<>(collection.stream()); 28 | } 29 | 30 | ////////////////////////////////////////////////////////////// 31 | 32 | private final BaseStream stream; 33 | 34 | private int batchSize = DEFAULT_BATCH_SIZE; 35 | 36 | private int resultCount = 0; 37 | 38 | public Batch(BaseStream stream) { 39 | this.stream = stream; 40 | } 41 | 42 | public Batch size(int batchSize) { 43 | this.batchSize = batchSize; 44 | return this; 45 | } 46 | 47 | public void forEachBatch(Consumer> consumer) { 48 | List buffer = new ArrayList<>(); 49 | 50 | var iterator = stream.iterator(); 51 | while (iterator.hasNext()) { 52 | var t = iterator.next(); 53 | buffer.add(t); 54 | if (buffer.size() >= batchSize) { 55 | consumer.accept(new ArrayList<>(buffer)); 56 | buffer.clear(); 57 | } 58 | } 59 | 60 | if (buffer.size() > 0) { 61 | consumer.accept(buffer); 62 | } 63 | } 64 | 65 | public Batch sumEachBatch(Function, Integer> func) { 66 | List list = new ArrayList<>(); 67 | 68 | var iterator = stream.iterator(); 69 | while (iterator.hasNext()) { 70 | var t = iterator.next(); 71 | list.add(t); 72 | if (list.size() >= batchSize) { 73 | resultCount += func.apply(new ArrayList<>(list)); 74 | list.clear(); 75 | } 76 | } 77 | 78 | if (list.size() > 0) { 79 | resultCount += func.apply(list); 80 | } 81 | 82 | return this; 83 | } 84 | 85 | public int getResultCount() { 86 | return resultCount; 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/DAOException.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao; 2 | 3 | import com.hyd.dao.command.Command; 4 | 5 | import java.sql.SQLException; 6 | import java.util.Collections; 7 | import java.util.List; 8 | 9 | /** 10 | * DAO 的基础异常类 11 | */ 12 | @SuppressWarnings("unchecked") 13 | public class DAOException extends RuntimeException { 14 | 15 | private final Command command; // 异常相关 SQL 16 | 17 | public Command getCommand() { 18 | return command; 19 | } 20 | 21 | public static DAOException wrap(Throwable t) { 22 | return t instanceof DAOException ? (DAOException) t : new DAOException(t); 23 | } 24 | 25 | public DAOException() { 26 | this.command = null; 27 | } 28 | 29 | public DAOException(String message) { 30 | super(message); 31 | this.command = null; 32 | } 33 | 34 | public DAOException(Throwable cause) { 35 | super(cause); 36 | this.command = null; 37 | } 38 | 39 | public DAOException(String message, Throwable cause) { 40 | super(message, cause); 41 | this.command = null; 42 | } 43 | 44 | public DAOException(String message, Command command) { 45 | super(message); 46 | this.command = command; 47 | } 48 | 49 | public DAOException(String message, Throwable cause, Command command) { 50 | super(message, cause); 51 | this.command = command; 52 | } 53 | 54 | public DAOException(String message, String sql, List params) { 55 | this(message, null, sql, params); 56 | } 57 | 58 | public DAOException(String message, Exception e, String sql, List params) { 59 | super(message, e); 60 | List _params = params == null ? Collections.emptyList() : (List) params; 61 | this.command = new Command(sql, _params); 62 | } 63 | 64 | public int getSqlErrorNumber() { 65 | if (getCause() instanceof SQLException) { 66 | return ((SQLException) getCause()).getErrorCode(); 67 | } 68 | return -1; 69 | } 70 | 71 | @Override 72 | public String toString() { 73 | if (this.command == null) { 74 | return super.toString(); 75 | } else { 76 | String cause = ""; 77 | if (getCause() instanceof SQLException) { 78 | cause = getCause().toString().trim(); 79 | } 80 | 81 | return super.toString().trim() + 82 | "\n --SQL : " + this.command.getStatement() + 83 | "\n --Params: " + this.command.getParams() + 84 | "\n --Cause : " + cause + "\n"; 85 | 86 | // 上面两个地方用了 trim() 是因为 Oracle 的异常信息字符串最后会有一个换行 87 | } 88 | 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/mate/util/CSVReader.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.mate.util; 2 | 3 | import com.hyd.dao.DAOException; 4 | import com.hyd.dao.Row; 5 | 6 | import java.io.*; 7 | import java.nio.charset.Charset; 8 | import java.util.ArrayList; 9 | import java.util.List; 10 | import java.util.Objects; 11 | 12 | /** 13 | * 从 csv 文件中读取内容到 Row 列表 14 | */ 15 | public class CSVReader { 16 | 17 | /** 18 | * 从 csv 文件读取内容到 Row 列表 19 | * 20 | * @param path 资源路径 21 | * @param charset 编码 22 | * 23 | * @return 读取结果 24 | * 25 | * @throws DAOException 如果读取失败 26 | */ 27 | public static List read(String path, String charset) throws DAOException { 28 | 29 | InputStream inputStream; 30 | if (path.startsWith("classpath:")) { 31 | inputStream = CSVReader.class.getResourceAsStream(path.substring("classpath:".length())); 32 | } else { 33 | try { 34 | inputStream = new FileInputStream(path); 35 | } catch (FileNotFoundException e) { 36 | throw new DAOException(e); 37 | } 38 | } 39 | 40 | return read(inputStream, charset); 41 | } 42 | 43 | public static List read(File file, String charset) throws DAOException { 44 | try { 45 | return read(new FileInputStream(file), Charset.forName(charset)); 46 | } catch (FileNotFoundException e) { 47 | throw new DAOException(e); 48 | } 49 | } 50 | 51 | public static List read(InputStream inputStream, String charset) throws DAOException { 52 | return read(inputStream, Charset.forName(charset)); 53 | } 54 | 55 | public static List read(InputStream inputStream, Charset charset) throws DAOException { 56 | Objects.requireNonNull(inputStream, "input stream is null"); 57 | try (inputStream; BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, charset))) { 58 | return convertLines(reader.lines().toList()); 59 | } catch (IOException e) { 60 | throw new DAOException(e); 61 | } 62 | } 63 | 64 | private static List convertLines(List lines) { 65 | String[] columns = lines.get(0).split(","); 66 | List rows = new ArrayList<>(); 67 | 68 | for (int i = 1; i < lines.size(); i++) { 69 | String line = lines.get(i); 70 | String[] values = line.split(","); 71 | Row row = new Row(); 72 | 73 | for (int j = 0; j < columns.length; j++) { 74 | String column = columns[j]; 75 | String value = values[j]; 76 | row.put(column, value); 77 | } 78 | 79 | rows.add(row); 80 | } 81 | 82 | return rows; 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/database/dialects/impl/MySqlDialect.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.database.dialects.impl; 2 | 3 | import com.hyd.dao.DAOException; 4 | import com.hyd.dao.database.ColumnInfo; 5 | import com.hyd.dao.database.FQN; 6 | import com.hyd.dao.database.dialects.Dialect; 7 | import com.hyd.dao.database.executor.ExecuteMode; 8 | import com.hyd.dao.mate.util.Str; 9 | 10 | import java.sql.Connection; 11 | import java.sql.SQLException; 12 | import java.sql.Statement; 13 | import java.sql.Types; 14 | import java.util.function.Predicate; 15 | import java.util.regex.Pattern; 16 | 17 | public class MySqlDialect implements Dialect { 18 | 19 | private static final Pattern PRODUCT_NAME_PATTERN = Pattern.compile(".*MySQL.*"); 20 | 21 | @Override 22 | public Predicate getMatcher() { 23 | return c -> { 24 | try { 25 | return PRODUCT_NAME_PATTERN.matcher(c.getMetaData().getDatabaseProductName()).matches(); 26 | } catch (SQLException e) { 27 | throw new DAOException(e); 28 | } 29 | }; 30 | } 31 | 32 | @Override 33 | public String wrapRangeQuery(String sql, int startPos, int endPos) { 34 | int size = endPos - startPos; 35 | return "select range_wrapper.* from (" + sql + ") range_wrapper limit " + startPos + ", " + size; 36 | } 37 | 38 | @Override 39 | public String wrapCountQuery(String sql) { 40 | return "select count(*) cnt from (" + sql + ") count_sql_wrapper"; 41 | } 42 | 43 | @Override 44 | public String identityQuoter() { 45 | return "`"; 46 | } 47 | 48 | @Override 49 | public String getJavaTypeByDatabase(ColumnInfo columnInfo) { 50 | if (columnInfo.getDataType() == Types.DECIMAL) { 51 | return "Double"; 52 | } else { 53 | return "String"; 54 | } 55 | } 56 | 57 | // MySQL 遇到 catalog 为空时会强行取 Connection 的 catalog 而忽略 schema, 58 | // 所以这里当 FQN 指定了 schema 时,要用它来取代 catalog 59 | @Override 60 | public String fixCatalog(String connectionCatalog, FQN fqn) { 61 | return Str.defaultIfEmpty(fqn.getSchema(), connectionCatalog); 62 | } 63 | 64 | @Override 65 | public void setupStatement(Statement statement, ExecuteMode executeMode) throws SQLException { 66 | if (executeMode == ExecuteMode.Streaming) { 67 | // https://dev.mysql.com/doc/connector-j/8.0/en/connector-j-reference-implementation-notes.html 68 | // Chapter "ResultSet": 69 | // "If you are working with ResultSets that have a large number of rows or large values 70 | // and cannot allocate heap space in your JVM for the memory required, you can tell the driver 71 | // to stream the results back one row at a time." 72 | statement.setFetchSize(Integer.MIN_VALUE); 73 | } 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/command/builder/DeleteBuilder.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.command.builder; 2 | 3 | import com.hyd.dao.DAOException; 4 | import com.hyd.dao.SQL; 5 | import com.hyd.dao.command.Command; 6 | import com.hyd.dao.database.ColumnInfo; 7 | import com.hyd.dao.database.ConnectionContext; 8 | import com.hyd.dao.database.FQN; 9 | import com.hyd.dao.database.dialects.Dialect; 10 | import com.hyd.dao.database.type.NameConverter; 11 | 12 | import java.util.List; 13 | 14 | import static com.hyd.dao.command.builder.helper.CommandBuilderHelper.generateParamValue; 15 | import static com.hyd.dao.command.builder.helper.CommandBuilderHelper.getColumnInfos; 16 | 17 | /** 18 | * 生成 delete 语句 19 | */ 20 | public final class DeleteBuilder extends CommandBuilder { 21 | 22 | public DeleteBuilder(ConnectionContext context) { 23 | super(context); 24 | } 25 | 26 | /** 27 | * 从 object 中提取参数 28 | */ 29 | public Command build(String tableName, Object object) throws DAOException { 30 | if (object == null) { 31 | throw new NullPointerException("object is null"); 32 | } 33 | 34 | final FQN fqn = new FQN(context, tableName); 35 | final NameConverter nameConverter = context.getNameConverter(); 36 | final List infos = getColumnInfos(fqn, context); 37 | final Dialect dialect = context.getDialect(); 38 | 39 | SQL.Delete delete = new SQL.Delete(fqn.getQuotedName()); 40 | for (ColumnInfo info : infos) { 41 | String columnName = dialect.quote(info.getColumnName()); 42 | Object param = generateParamValue(object, info, nameConverter); 43 | if (param != null) { 44 | delete.And(columnName + "=?", param); 45 | } 46 | } 47 | 48 | if (!delete.hasConditions()) { 49 | throw new IllegalStateException( 50 | "Delete command has no condition, dangerous operation prohibited."); 51 | } 52 | 53 | return delete.toCommand(); 54 | } 55 | 56 | /** 57 | * 根据主键值构造参数(仅支持单个字段主键) 58 | */ 59 | public Command buildByKey(String tableName, Object key) throws DAOException { 60 | if (key == null) { 61 | throw new NullPointerException("key is null"); 62 | } 63 | 64 | final FQN fqn = new FQN(context, tableName); 65 | final List infos = getColumnInfos(fqn, context); 66 | final Dialect dialect = context.getDialect(); 67 | 68 | SQL.Delete delete = new SQL.Delete(fqn.getQuotedName()); 69 | for (ColumnInfo info : infos) { 70 | if (info.isPrimary()) { 71 | String columnName = dialect.quote(info.getColumnName()); 72 | delete.Where(columnName + "=?", key); 73 | break; 74 | } 75 | } 76 | 77 | return delete.toCommand(); 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/repository/Repository.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.repository; 2 | 3 | import com.hyd.dao.DAO; 4 | import com.hyd.dao.command.builder.DeleteBuilder; 5 | import com.hyd.dao.command.builder.InsertBuilder; 6 | import com.hyd.dao.command.builder.QueryBuilder; 7 | import com.hyd.dao.command.builder.UpdateBuilder; 8 | import com.hyd.dao.database.ConnectionContext; 9 | import com.hyd.dao.transaction.TransactionManager; 10 | 11 | import java.util.List; 12 | import java.util.function.Function; 13 | 14 | public class Repository { 15 | 16 | private final DAO dao; 17 | 18 | private final String tableName; 19 | 20 | private final Class type; 21 | 22 | public Repository(Class type, DAO dao, String tableName) { 23 | this.type = type; 24 | this.dao = dao; 25 | this.tableName = tableName; 26 | } 27 | 28 | private E withConnectionContext(Function f) { 29 | var context = TransactionManager.getConnectionContext(this.dao); 30 | try { 31 | return f.apply(context); 32 | } finally { 33 | context.closeIfAutoCommit(); 34 | } 35 | } 36 | 37 | public T queryById(Object singlePrimaryKey) { 38 | var command = withConnectionContext( 39 | context -> new QueryBuilder(context).buildByKey(tableName, singlePrimaryKey) 40 | ); 41 | return dao.queryFirst(type, command); 42 | } 43 | 44 | public List queryByInstance(T t) { 45 | var command = withConnectionContext( 46 | context -> new QueryBuilder(context).build(tableName, t) 47 | ); 48 | return dao.query(type, command); 49 | } 50 | 51 | public int deleteById(Object singlePrimaryKey) { 52 | var command = withConnectionContext( 53 | context -> new DeleteBuilder(context).buildByKey(tableName, singlePrimaryKey) 54 | ); 55 | return dao.execute(command); 56 | } 57 | 58 | public int deleteByInstance(T t) { 59 | if (t == null) { 60 | return 0; 61 | } 62 | 63 | var command = withConnectionContext( 64 | context -> new DeleteBuilder(context).build(tableName, t) 65 | ); 66 | return dao.execute(command); 67 | } 68 | 69 | public int insertInstance(T t) { 70 | var command = withConnectionContext( 71 | context -> new InsertBuilder(context).build(tableName, t) 72 | ); 73 | return dao.execute(command); 74 | } 75 | 76 | public int insertBatch(List list) { 77 | var command = withConnectionContext( 78 | context -> new InsertBuilder(context).buildBatch(tableName, list) 79 | ); 80 | return dao.execute(command); 81 | } 82 | 83 | public int updateById(T t) { 84 | var command = withConnectionContext( 85 | context -> new UpdateBuilder(context).buildByKey(tableName, t) 86 | ); 87 | return dao.execute(command); 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/junit/HydrogenDAORule.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.junit; 2 | 3 | import com.hyd.dao.DAO; 4 | import com.hyd.dao.Row; 5 | import com.hyd.dao.mate.util.CSVReader; 6 | import com.hyd.dao.mate.util.ScriptExecutor; 7 | 8 | import java.io.File; 9 | import java.util.*; 10 | import java.util.function.Supplier; 11 | 12 | /** 13 | * 用于单元测试的 Rule 14 | * 15 | * @author yidin 16 | */ 17 | public class HydrogenDAORule { 18 | 19 | private static final String SCRIPT_FOLDER = "scripts"; 20 | 21 | private final Supplier daoSupplier; 22 | 23 | private final String scriptFolder; 24 | 25 | private final Map> prepareData = new HashMap<>(); 26 | 27 | public HydrogenDAORule(Supplier daoSupplier) { 28 | this.daoSupplier = daoSupplier; 29 | this.scriptFolder = SCRIPT_FOLDER; 30 | init(); 31 | } 32 | 33 | public HydrogenDAORule(Supplier daoSupplier, String scriptFolder) { 34 | this.daoSupplier = daoSupplier; 35 | this.scriptFolder = scriptFolder; 36 | init(); 37 | } 38 | 39 | private void init() { 40 | 41 | // 搜索文件列表 42 | var csvFiles = scanCsvFiles(); 43 | 44 | // 将文件内容插入到数据库 45 | for (var csvFile : csvFiles) { 46 | var fileName = csvFile.getName(); 47 | var tableName = fileName.substring(0, fileName.length() - 4); 48 | var rows = CSVReader.read(csvFile, "UTF-8"); 49 | prepareData.put(tableName, rows); 50 | } 51 | } 52 | 53 | /** 54 | * 检查 {@link #scriptFolder} 目录下是否有 csv 文件 55 | * 56 | * @return csv 文件列表 57 | */ 58 | private List scanCsvFiles() { 59 | var classPath = System.getProperty("java.class.path", "."); 60 | var classPathElements = classPath.split(File.pathSeparator); 61 | List csvFiles = new ArrayList<>(); 62 | 63 | for (var pathElement : classPathElements) { 64 | var file = new File(pathElement); 65 | if (file.exists() && file.isDirectory()) { 66 | var csvFolder = new File(file, scriptFolder); 67 | if (csvFolder.exists() && csvFolder.isDirectory()) { 68 | var files = csvFolder.listFiles(f -> f.getName().toLowerCase().endsWith(".csv")); 69 | if (files != null) { 70 | csvFiles.addAll(Arrays.asList(files)); 71 | } 72 | } 73 | } 74 | } 75 | 76 | return csvFiles; 77 | } 78 | 79 | private void insertData(DAO dao) { 80 | prepareData.forEach((tableName, rows) -> dao.insert(rows, tableName)); 81 | } 82 | 83 | public void before() { 84 | var dao = daoSupplier.get(); 85 | 86 | ScriptExecutor.execute("classpath:/" + scriptFolder + "/tables.sql", dao); 87 | insertData(dao); 88 | ScriptExecutor.execute("classpath:/" + scriptFolder + "/before.sql", dao); 89 | } 90 | 91 | public void after() { 92 | var dao = daoSupplier.get(); 93 | ScriptExecutor.execute("classpath:/" + scriptFolder + "/after.sql", dao); 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/command/BatchCommand.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.command; 2 | 3 | 4 | import com.hyd.dao.DAO; 5 | import com.hyd.dao.database.ColumnInfo; 6 | 7 | import java.util.ArrayList; 8 | import java.util.Arrays; 9 | import java.util.List; 10 | 11 | /** 12 | *

批处理命令。一个批处理命令包含 SQL 语句和参数列表。

13 | * 下面是一个例子:
 14 |  * BatchCommand cmd = new BatchCommand(
 15 |  *     "delete * from tt_test where id>=? and id<=?"
 16 |  * );
 17 |  * cmd.addParams("3", "4");
 18 |  * cmd.addParams("100", "200");
 19 |  * cmd.addParams("201", "205");
 20 |  * dao.execute(cmd);
 21 |  * 
22 | */ 23 | public class BatchCommand { 24 | 25 | public static final BatchCommand EMPTY = new BatchCommand(""); 26 | 27 | ///////////////////////////////////////////////////////////////// 28 | 29 | private final String command; 30 | 31 | private List> params = new ArrayList>(); 32 | 33 | private ColumnInfo[] columnInfos; // 参数对应的字段信息,有助于生成 null 参数,非必须 34 | 35 | public ColumnInfo[] getColumnInfos() { 36 | return columnInfos; 37 | } 38 | 39 | public void setColumnInfos(ColumnInfo[] columnInfos) { 40 | this.columnInfos = columnInfos; 41 | } 42 | 43 | public void setColumnInfos(List columnInfos) { 44 | setColumnInfos(columnInfos.toArray(new ColumnInfo[0])); 45 | } 46 | 47 | /** 48 | * 构造函数 49 | * 50 | * @param command SQL 语句 51 | */ 52 | public BatchCommand(String command) { 53 | this.command = command; 54 | } 55 | 56 | /** 57 | * 构造函数 58 | * 59 | * @param command SQL 语句 60 | * @param params 批量参数值 61 | */ 62 | public BatchCommand(String command, List> params) { 63 | this.command = command; 64 | this.params = params; 65 | } 66 | 67 | /** 68 | * 获得 SQL 语句 69 | * 70 | * @return SQL 语句 71 | */ 72 | public String getCommand() { 73 | return command; 74 | } 75 | 76 | /** 77 | * 获得所有的参数组 78 | * 79 | * @return 所有参数组 80 | */ 81 | public List> getParams() { 82 | return params; 83 | } 84 | 85 | /** 86 | * 添加一组参数 87 | * 88 | * @param params 一组参数 89 | */ 90 | @SuppressWarnings("unchecked") 91 | public void addParams(Object... params) { 92 | if (params.length == 1 && params[0] instanceof List) { 93 | List list = (List) params[0]; 94 | for (Object o : list) { 95 | if (o == DAO.SYSDATE) { 96 | throw new IllegalArgumentException("DAO.SYSDATE cannot be used in batch command."); 97 | } 98 | } 99 | this.params.add(list); 100 | } else { 101 | for (Object o : params) { 102 | if (o == DAO.SYSDATE) { 103 | throw new IllegalArgumentException("DAO.SYSDATE cannot be used in batch command."); 104 | } 105 | } 106 | this.params.add(Arrays.asList(params)); 107 | } 108 | } 109 | 110 | } 111 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/daotests/benchmark/InsertBenchmark.java: -------------------------------------------------------------------------------- 1 | package com.hyd.daotests.benchmark; 2 | 3 | import com.hyd.dao.DAO; 4 | import com.hyd.dao.DataSources; 5 | import com.hyd.dao.mate.util.DBCPDataSource; 6 | import com.hyd.dao.repository.Repository; 7 | import lombok.Data; 8 | import org.apache.commons.dbcp2.BasicDataSource; 9 | import org.junit.jupiter.api.BeforeEach; 10 | import org.junit.jupiter.api.Test; 11 | 12 | import java.util.Date; 13 | 14 | import static com.hyd.dao.DataSources.DEFAULT_DATA_SOURCE_NAME; 15 | 16 | // 性能测试的目的是衡量代码效率,所以只测试单核性能 17 | public class InsertBenchmark { 18 | 19 | public static final String DROP_TABLE = "drop table if exists table1"; 20 | 21 | public static final String CREATE_TABLE = "create table table1(" + 22 | "num_value1 int, " + 23 | "num_value2 int, " + 24 | "num_value3 int, " + 25 | "str_value1 varchar(100), " + 26 | "str_value2 varchar(100), " + 27 | "str_value3 varchar(100), " + 28 | "date_value1 timestamp, " + 29 | "date_value2 timestamp, " + 30 | "date_value3 timestamp" + 31 | ")"; 32 | 33 | public static final String INSERT = "insert into table1" + 34 | "(num_value1,num_value2,num_value3,str_value1,str_value2,str_value3,date_value1,date_value2,date_value3)" + 35 | "values(?,?,?,?,?,?,?,?,?)"; 36 | 37 | @Data 38 | public static class Bean { 39 | 40 | private Long numValue1; 41 | 42 | private Long numValue2; 43 | 44 | private Long numValue3; 45 | 46 | private String strValue1; 47 | 48 | private String strValue2; 49 | 50 | private String strValue3; 51 | 52 | private Date dateValue1; 53 | 54 | private Date dateValue2; 55 | 56 | private Date dateValue3; 57 | } 58 | 59 | private DAO dao; 60 | 61 | { 62 | BasicDataSource dataSource = DBCPDataSource.newH2MemDataSource(); 63 | DataSources.getInstance().setDataSource(DEFAULT_DATA_SOURCE_NAME, dataSource); 64 | this.dao = new DAO(DEFAULT_DATA_SOURCE_NAME); 65 | } 66 | 67 | @BeforeEach 68 | public void init() { 69 | this.dao.execute(DROP_TABLE); 70 | this.dao.execute(CREATE_TABLE); 71 | } 72 | 73 | ////////////////////////////////////////////////////////////// 74 | 75 | @Test 76 | public void testInsertBySQL() throws Exception { 77 | Date now = new Date(); 78 | for (int i = 0; i < 100000; i++) { 79 | this.dao.execute(INSERT, 1, 2, 3, "1", "2", "3", now, now, now); 80 | } 81 | } 82 | 83 | @Test 84 | public void testInsertByBean() throws Exception { 85 | Date now = new Date(); 86 | Bean bean = new Bean(); 87 | bean.setNumValue1(1L); 88 | bean.setNumValue2(2L); 89 | bean.setNumValue3(3L); 90 | bean.setStrValue1("1"); 91 | bean.setStrValue2("2"); 92 | bean.setStrValue3("3"); 93 | bean.setDateValue1(now); 94 | bean.setDateValue2(now); 95 | bean.setDateValue3(now); 96 | 97 | Repository beanRepository = new Repository<>(Bean.class, dao, "table1"); 98 | for (int i = 0; i < 100000; i++) { 99 | beanRepository.insertInstance(bean); 100 | } 101 | } 102 | } 103 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/database/ColumnInfo.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.database; 2 | 3 | /** 4 | * 用来描述库表字段信息的类 5 | */ 6 | @SuppressWarnings("unused") 7 | public class ColumnInfo { 8 | 9 | /** 10 | * 字段名 11 | */ 12 | private String columnName; 13 | 14 | /** 15 | * 数据类型,参考 {@link java.sql.Types} 16 | */ 17 | private int dataType; 18 | 19 | /** 20 | * 是否是主键 21 | */ 22 | private boolean primary; 23 | 24 | /** 25 | * 是否自动增长 26 | */ 27 | private boolean autoIncrement; 28 | 29 | /** 30 | * 注释 31 | */ 32 | private String comment; 33 | 34 | /** 35 | * 最大大小 36 | */ 37 | private int size; 38 | 39 | /** 40 | * 是否可以为空 41 | */ 42 | private boolean nullable; 43 | 44 | public boolean isNullable() { 45 | return nullable; 46 | } 47 | 48 | public void setNullable(boolean nullable) { 49 | this.nullable = nullable; 50 | } 51 | 52 | public boolean isAutoIncrement() { 53 | return autoIncrement; 54 | } 55 | 56 | public void setAutoIncrement(boolean autoIncrement) { 57 | this.autoIncrement = autoIncrement; 58 | } 59 | 60 | /** 61 | * 获取字段最大长度 62 | * 63 | * @return 字段最大长度 64 | */ 65 | public int getSize() { 66 | return size; 67 | } 68 | 69 | /** 70 | * 设置字段最大长度 71 | * 72 | * @param size 字段最大长度 73 | */ 74 | public void setSize(int size) { 75 | this.size = size; 76 | } 77 | 78 | /** 79 | * 获取注释 80 | * 81 | * @return 注释 82 | */ 83 | public String getComment() { 84 | return comment; 85 | } 86 | 87 | /** 88 | * 设置注释 89 | * 90 | * @param comment 注释 91 | */ 92 | public void setComment(String comment) { 93 | this.comment = comment; 94 | } 95 | 96 | /** 97 | * 获取字段是不是主键 98 | * 99 | * @return 如果字段是主键,则返回 true,否则返回 false 100 | */ 101 | public boolean isPrimary() { 102 | return primary; 103 | } 104 | 105 | /** 106 | * 设置字段的主键类型 107 | * 108 | * @param primary true 表示该字段是主键。false 则表示不是。 109 | */ 110 | public void setPrimary(boolean primary) { 111 | this.primary = primary; 112 | } 113 | 114 | /** 115 | * 获得字段名 116 | * 117 | * @return 字段名 118 | */ 119 | public String getColumnName() { 120 | return columnName; 121 | } 122 | 123 | /** 124 | * 设置字段名 125 | * 126 | * @param columnName 字段名 127 | */ 128 | public void setColumnName(String columnName) { 129 | this.columnName = columnName; 130 | } 131 | 132 | /** 133 | * 获得字段数据类型 134 | * 135 | * @return 字段数据类型。具体的值参考 {@link java.sql.Types} 136 | */ 137 | public int getDataType() { 138 | return dataType; 139 | } 140 | 141 | /** 142 | * 设置字段数据类型 143 | * 144 | * @param dataType 字段数据类型。具体的值参考 {@link java.sql.Types} 145 | */ 146 | public void setDataType(int dataType) { 147 | this.dataType = dataType; 148 | } 149 | 150 | @Override 151 | public String toString() { 152 | return "ColumnInfo{" + 153 | "columnName='" + columnName + '\'' + 154 | ", dataType=" + dataType + 155 | ", primary=" + primary + 156 | ", autoIncrement=" + autoIncrement + 157 | ", comment='" + comment + '\'' + 158 | ", size=" + size + 159 | ", nullable=" + nullable + 160 | '}'; 161 | } 162 | } 163 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/database/function/FunctionHelper.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.database.function; 2 | 3 | import com.hyd.dao.DAOException; 4 | import com.hyd.dao.mate.util.ResultSetUtil; 5 | import com.hyd.dao.sp.SpParam; 6 | import com.hyd.dao.sp.SpParamType; 7 | import com.hyd.dao.sp.StorageProcedureHelper; 8 | 9 | import java.sql.CallableStatement; 10 | import java.sql.Connection; 11 | import java.sql.SQLException; 12 | import java.util.Comparator; 13 | import java.util.HashMap; 14 | import java.util.stream.Collectors; 15 | import java.util.stream.Stream; 16 | 17 | /** 18 | * 数据库 function 处理帮助类 19 | */ 20 | public class FunctionHelper { 21 | 22 | public static CallableStatement createCallableStatement( 23 | String name, int resultType, SpParam[] params, Connection connection) throws SQLException { 24 | var call_str = generateFunctionStatement(name, params); 25 | var cs = connection.prepareCall(call_str); 26 | setupFunctionParams(params, resultType, cs); 27 | return cs; 28 | } 29 | 30 | private static void setupFunctionParams(SpParam[] params, int resultType, CallableStatement cs) throws SQLException { 31 | cs.registerOutParameter(1, resultType); 32 | for (var i = 0; i < params.length; i++) { 33 | var param = params[i]; 34 | if (param.getType() == SpParamType.IN || param.getType() == SpParamType.IN_OUT) { 35 | cs.setObject(i + 2, param.getValue()); 36 | } 37 | if (param.getType() == SpParamType.OUT || param.getType() == SpParamType.IN_OUT) { 38 | cs.registerOutParameter(i + 2, param.getSqlType()); 39 | } 40 | } 41 | } 42 | 43 | private static String generateFunctionStatement(String name, SpParam[] params) { 44 | return "{? = call " + name + "(" + 45 | Stream.of(params).map(p -> "?").collect(Collectors.joining(",")) + 46 | ")}"; 47 | } 48 | 49 | public static SpParam[] createFunctionParams(String name, Object[] params, Connection connection) throws Exception { 50 | var metaData = connection.getMetaData(); 51 | 52 | var schema = name.contains(".") ? 53 | name.split("\\.")[0].toUpperCase() : metaData.getUserName().toUpperCase(); 54 | 55 | var lastPartOfName = name.contains(".") ? name.substring(name.lastIndexOf(".") + 1) : name; 56 | 57 | var rs = metaData.getProcedureColumns(null, schema, lastPartOfName.toUpperCase(), "%"); 58 | var functionColumns = ResultSetUtil.readResultSet(rs); 59 | 60 | if (functionColumns.isEmpty()) { 61 | throw new DAOException("存储过程 " + name + " 没有找到任何参数。"); 62 | } 63 | 64 | // 按照 sequence 的值对 map 数组进行排序 65 | functionColumns.sort(Comparator.comparing(m -> m.getIntegerObject("sequence"))); 66 | 67 | var result = new SpParam[functionColumns.size()]; 68 | 69 | for (var i = 0; i < functionColumns.size(); i++) { 70 | var row = functionColumns.get(i); 71 | // function_columns 的第一行是方法的返回值 72 | // 注意,params 的长度可能小于 function 参数列表的长度,这时候假设多余的参数是有缺省值的。 73 | var param_value = (i > 0 && i <= params.length) ? params[i - 1] : null; 74 | result[i] = createSpParam(row, param_value); 75 | } 76 | 77 | return result; 78 | } 79 | 80 | private static SpParam createSpParam(HashMap row, Object param_value) { 81 | var param_type = StorageProcedureHelper.SP_PARAM_TYPES.get(((Double) row.get("column_type")).intValue()); 82 | var data_type = ((Double) row.get("data_type")).intValue(); 83 | return new SpParam(param_type, data_type, param_value); 84 | } 85 | 86 | } 87 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/database/NonPooledDataSource.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.database; 2 | 3 | import com.hyd.dao.DAOException; 4 | 5 | import javax.sql.DataSource; 6 | import java.io.PrintWriter; 7 | import java.sql.*; 8 | import java.util.logging.Logger; 9 | 10 | /** 11 | * 实现一个最简单的数据源 12 | * created at 2015/3/12 13 | * 14 | * @author Yiding 15 | */ 16 | @SuppressWarnings({"unused", "RedundantThrows"}) 17 | public class NonPooledDataSource implements DataSource { 18 | 19 | private String driverClassName; 20 | 21 | private String url; 22 | 23 | private String username; 24 | 25 | private String password; 26 | 27 | private PrintWriter logWriter; 28 | 29 | public NonPooledDataSource() { 30 | } 31 | 32 | public NonPooledDataSource(String driverClassName, String url) { 33 | this.driverClassName = driverClassName; 34 | this.url = url; 35 | 36 | initDriver(); 37 | } 38 | 39 | public NonPooledDataSource(String driverClassName, String url, String username, String password) { 40 | this.driverClassName = driverClassName; 41 | this.url = url; 42 | this.username = username; 43 | this.password = password; 44 | 45 | initDriver(); 46 | } 47 | 48 | public String getDriverClassName() { 49 | return driverClassName; 50 | } 51 | 52 | public void setDriverClassName(String driverClassName) { 53 | this.driverClassName = driverClassName; 54 | } 55 | 56 | public String getUrl() { 57 | return url; 58 | } 59 | 60 | public void setUrl(String url) { 61 | this.url = url; 62 | } 63 | 64 | public String getUsername() { 65 | return username; 66 | } 67 | 68 | public void setUsername(String username) { 69 | this.username = username; 70 | } 71 | 72 | public String getPassword() { 73 | return password; 74 | } 75 | 76 | public void setPassword(String password) { 77 | this.password = password; 78 | } 79 | 80 | private void initDriver() { 81 | 82 | try { 83 | Class type = Class.forName(this.driverClassName); 84 | Driver driver = (Driver) type.getConstructor().newInstance(); 85 | DriverManager.registerDriver(driver); 86 | } catch (Exception e) { 87 | throw new DAOException("Error registering JDBC driver", e); 88 | } 89 | } 90 | 91 | public Connection getConnection() throws SQLException { 92 | return DriverManager.getConnection(this.url, this.username, this.password); 93 | } 94 | 95 | public Connection getConnection(String username, String password) throws SQLException { 96 | return DriverManager.getConnection(this.url, username, password); 97 | } 98 | 99 | public PrintWriter getLogWriter() throws SQLException { 100 | return this.logWriter; 101 | } 102 | 103 | public void setLogWriter(PrintWriter out) throws SQLException { 104 | this.logWriter = out; 105 | } 106 | 107 | public int getLoginTimeout() throws SQLException { 108 | throw new UnsupportedOperationException("Not supported by NonPooledDataSource"); 109 | } 110 | 111 | public Logger getParentLogger() throws SQLFeatureNotSupportedException { 112 | return null; 113 | } 114 | 115 | public void setLoginTimeout(int seconds) throws SQLException { 116 | throw new UnsupportedOperationException("Not supported by NonPooledDataSource"); 117 | } 118 | 119 | public T unwrap(Class iface) throws SQLException { 120 | throw new SQLException("NonPooledDataSource is not a wrapper."); 121 | } 122 | 123 | public boolean isWrapperFor(Class iface) throws SQLException { 124 | return false; 125 | } 126 | } 127 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/command/MappedCommand.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.command; 2 | 3 | import java.lang.reflect.Array; 4 | import java.util.*; 5 | import java.util.regex.Matcher; 6 | import java.util.regex.Pattern; 7 | 8 | /** 9 | * 带参数的 SQL 语句及参数。SQL 语句中的参数以 #xxx# 的格式存在。例如: 10 | * 11 | *
 12 |  *     String sql = "select * from USER where USERNAME=#username# and ROLE in (#roles#)";
 13 |  *     Map<String, Object> params = new HashMap<String, Object>();
 14 |  *     params.put("username", "admin");
 15 |  *     params.put("roles", new int[]{1, 2, 3, 4});
 16 |  *     dao.query(new MappedCommand(sql, params));
 17 |  * 
18 | * 19 | * created at 2015/3/6 20 | * 21 | * @author Yiding 22 | */ 23 | public class MappedCommand { 24 | 25 | private String statement; 26 | 27 | private Map params; 28 | 29 | public MappedCommand() { 30 | this.params = new HashMap(); 31 | } 32 | 33 | public MappedCommand(String statement) { 34 | this.statement = statement; 35 | this.params = new HashMap(); 36 | } 37 | 38 | public MappedCommand(String statement, Map params) { 39 | this.statement = statement; 40 | this.params = params; 41 | } 42 | 43 | public String getStatement() { 44 | return statement; 45 | } 46 | 47 | public MappedCommand setStatement(String statement) { 48 | this.statement = statement; 49 | return this; 50 | } 51 | 52 | public Map getParams() { 53 | return params; 54 | } 55 | 56 | public MappedCommand setParams(Map params) { 57 | this.params = params; 58 | return this; 59 | } 60 | 61 | public MappedCommand setParam(String name, Object value) { 62 | this.params.put(name, value); 63 | return this; 64 | } 65 | 66 | public MappedCommand setParam(String name, Object... values) { 67 | this.params.put(name, values); 68 | return this; 69 | } 70 | 71 | public Command toCommand() { 72 | Command command = new Command(); 73 | Pattern pattern = Pattern.compile("#(\\S+)#"); 74 | Matcher matcher = pattern.matcher(this.statement); 75 | 76 | StringBuffer sb = new StringBuffer(); 77 | List paramList = new ArrayList(); 78 | 79 | while (matcher.find()) { 80 | String name = matcher.group(1); 81 | Object value = this.params.get(name); 82 | 83 | if (value.getClass().isArray()) { 84 | int length = Array.getLength(value); 85 | String holders = ""; 86 | for (int i = 0; i < length; i++) { 87 | paramList.add(Array.get(value, i)); 88 | holders += "?,"; 89 | } 90 | if (holders.endsWith(",")) { 91 | holders = holders.substring(0, holders.length() - 1); 92 | } 93 | matcher.appendReplacement(sb, holders); 94 | } else if (value instanceof Collection) { 95 | String holders = ""; 96 | for (Object item : (Collection) value) { 97 | paramList.add(item); 98 | holders += "?,"; 99 | } 100 | if (holders.endsWith(",")) { 101 | holders = holders.substring(0, holders.length() - 1); 102 | } 103 | matcher.appendReplacement(sb, holders); 104 | } else { 105 | paramList.add(value); 106 | matcher.appendReplacement(sb, "?"); 107 | } 108 | } 109 | 110 | matcher.appendTail(sb); 111 | command.setParams(paramList); 112 | command.setStatement(sb.toString()); 113 | return command; 114 | } 115 | } 116 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/mate/util/CaseInsensitiveHashMap.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.mate.util; 2 | 3 | import java.io.Serial; 4 | import java.util.HashMap; 5 | import java.util.HashSet; 6 | import java.util.Map; 7 | import java.util.Set; 8 | import java.util.stream.Collectors; 9 | 10 | /** 11 | * 对 key 忽略大小写的 HashMap 12 | */ 13 | public class CaseInsensitiveHashMap extends HashMap { 14 | 15 | @Serial 16 | private static final long serialVersionUID = 1L; 17 | 18 | // lowercase -> original 19 | private final Map originalKeys = new HashMap<>(); 20 | 21 | /** 22 | * 根据 key 获取值 23 | * 24 | * @param key 键 25 | * 26 | * @return 相对应的值 27 | * 28 | * @throws IllegalArgumentException 如果 key 不是一个字符串 29 | */ 30 | public V get(String key) { 31 | var originalKey = originalKeys.get(key.toLowerCase()); 32 | if (originalKey == null) { 33 | return null; 34 | } else { 35 | return super.get(originalKey); 36 | } 37 | } 38 | 39 | /** 40 | * 设置值 41 | * 42 | * @param key 键 43 | * @param value 值 44 | * 45 | * @throws IllegalArgumentException 如果 key 不是一个字符串 46 | */ 47 | public V put(String key, V value) { 48 | var lcKey = key.toLowerCase(); 49 | var originalKey = originalKeys.get(lcKey); 50 | if (originalKey != null) { 51 | super.remove(originalKey); 52 | } 53 | 54 | originalKeys.put(lcKey, key); 55 | return super.put(key, value); 56 | } 57 | 58 | /** 59 | * 检查 key 是否存在 60 | * 61 | * @param key 要检查的 key 62 | * 63 | * @return 如果存在则返回 true 64 | */ 65 | @Override 66 | public boolean containsKey(Object key) { 67 | if (!(key instanceof String)) { 68 | throw new IllegalArgumentException("Key must be a string."); 69 | } 70 | 71 | var lcKey = ((String) key).toLowerCase(); 72 | return originalKeys.containsKey(lcKey); 73 | } 74 | 75 | @Override 76 | public V remove(Object key) { 77 | if (!(key instanceof String)) { 78 | throw new IllegalArgumentException("Key must be a string."); 79 | } 80 | 81 | var lcKey = ((String) key).toLowerCase(); 82 | var originalKey = originalKeys.get(lcKey); 83 | originalKeys.remove(lcKey); 84 | return super.remove(originalKey); 85 | } 86 | 87 | @Override 88 | public boolean remove(Object key, Object value) { 89 | if (!(key instanceof String)) { 90 | throw new IllegalArgumentException("Key must be a string."); 91 | } 92 | 93 | var lcKey = ((String) key).toLowerCase(); 94 | var originalKey = originalKeys.get(lcKey); 95 | var found = super.remove(originalKey, value); 96 | if (found) { 97 | originalKeys.remove(lcKey); 98 | } 99 | return found; 100 | } 101 | 102 | @Override 103 | public void putAll(Map m) { 104 | for (Map.Entry entry : m.entrySet()) { 105 | put(entry.getKey(), entry.getValue()); 106 | } 107 | } 108 | 109 | @Override 110 | public void clear() { 111 | super.clear(); 112 | originalKeys.clear(); 113 | } 114 | 115 | @Override 116 | public Set keySet() { 117 | return new HashSet<>(originalKeys.values()); 118 | } 119 | 120 | @Override 121 | public Set> entrySet() { 122 | return originalKeys.entrySet().stream().map(entry -> 123 | new SimpleEntry<>(entry.getValue(), super.get(entry.getValue())) 124 | ).collect(Collectors.toSet()); 125 | } 126 | 127 | public String getOriginalKey(String key) { 128 | return originalKeys.get(key.toLowerCase()); 129 | } 130 | } 131 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/dao/command/builder/helper/ColumnInfoHelperTest.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.command.builder.helper; 2 | 3 | import com.hyd.dao.DAO; 4 | import com.hyd.dao.DataSources; 5 | import com.hyd.dao.Row; 6 | import com.hyd.dao.database.ColumnInfo; 7 | import com.hyd.dao.database.ConnectionContext; 8 | import com.hyd.dao.database.FQN; 9 | import com.hyd.dao.mate.util.DBCPDataSource; 10 | import com.hyd.dao.mate.util.ResultSetUtil; 11 | import org.apache.commons.dbcp2.BasicDataSource; 12 | import org.junit.jupiter.api.Test; 13 | 14 | import java.sql.Connection; 15 | import java.sql.ResultSet; 16 | import java.util.HashMap; 17 | import java.util.List; 18 | 19 | public class ColumnInfoHelperTest { 20 | 21 | @Test 22 | public void testForMySQL() throws Exception { 23 | String url = "jdbc:mysql://localhost/?serverTimezone=UTC"; 24 | BasicDataSource dataSource = DBCPDataSource.newMySqlDataSource(url, "root", "root123"); 25 | Connection connection = dataSource.getConnection(); 26 | ConnectionContext context = ConnectionContext.create(connection); 27 | 28 | List columnInfo = ColumnInfoHelper.getColumnInfo(new FQN(context, "demo.blog"), connection); 29 | columnInfo.forEach(System.out::println); 30 | } 31 | 32 | 33 | @Test 34 | public void testForH2() throws Exception { 35 | BasicDataSource dataSource = DBCPDataSource.newH2MemDataSource(); 36 | DataSources dataSources = DataSources.getInstance(); 37 | dataSources.setDataSource("h2", dataSource); 38 | 39 | DAO dao = new DAO("h2"); 40 | List schemas = dao.query("show schemas"); 41 | schemas.forEach(System.out::println); 42 | 43 | dao.execute("create table table1(id int primary key, name varchar(100))"); 44 | dao.execute("create table \"table2\"(id int primary key, name varchar(100))"); 45 | dao.execute("insert into table1 set id=?, name=?", 0, "Hello, world"); 46 | dao.execute("insert into \"table2\" set id=?, name=?", 0, "Hello, world"); 47 | 48 | System.out.println("//////////////////////////////////////////////////////////////"); 49 | dao.query("select * from table1").forEach(System.out::println); 50 | System.out.println("//////////////////////////////////////////////////////////////"); 51 | dao.query("select * from \"table2\"").forEach(System.out::println); 52 | System.out.println("//////////////////////////////////////////////////////////////"); 53 | 54 | dataSources.withConnection("h2", connection -> { 55 | try { 56 | ResultSet columns = connection.getMetaData().getColumns(connection.getCatalog(), "PUBLIC", "TABLE1", "%"); 57 | List maps = ResultSetUtil.readResultSet(columns); 58 | if (!maps.isEmpty()) { 59 | for (HashMap map : maps) { 60 | System.out.println(map); 61 | } 62 | } else { 63 | System.err.println("No column found for table1"); 64 | } 65 | } catch (Exception e) { 66 | e.printStackTrace(); 67 | } 68 | }); 69 | 70 | System.out.println("//////////////////////////////////////////////////////////////"); 71 | 72 | dataSources.withConnection("h2", connection -> { 73 | try { 74 | ConnectionContext context = ConnectionContext.create(connection); 75 | FQN table1 = new FQN(context, "table1"); 76 | List columnInfos = CommandBuilderHelper.getColumnInfos(table1, context); 77 | System.out.println("Columns count of table1: " + columnInfos.size()); 78 | 79 | for (ColumnInfo columnInfo : columnInfos) { 80 | System.out.println(columnInfo); 81 | } 82 | } catch (Exception e) { 83 | e.printStackTrace(); 84 | } 85 | }); 86 | 87 | System.out.println("//////////////////////////////////////////////////////////////"); 88 | } 89 | 90 | } 91 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/mate/util/DBCPDataSource.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.mate.util; 2 | 3 | 4 | import org.apache.commons.dbcp2.BasicDataSource; 5 | 6 | import javax.sql.DataSource; 7 | 8 | /** 9 | * 创建基于 DBCP 的 DataSource 对象 10 | * created at 2014/12/26 11 | * 12 | * @author Yiding 13 | */ 14 | public final class DBCPDataSource { 15 | 16 | private DBCPDataSource() { 17 | 18 | } 19 | 20 | public static BasicDataSource newDataSource( 21 | String driverClass, String url, String username, String password) { 22 | 23 | var ds = new BasicDataSource(); 24 | ds.setDriverClassName(driverClass); 25 | ds.setUrl(url); 26 | ds.setUsername(username); 27 | ds.setPassword(password); 28 | return ds; 29 | } 30 | 31 | public static BasicDataSource newH2MemDataSource() { 32 | var ds = new BasicDataSource(); 33 | ds.setDriverClassName("org.h2.Driver"); 34 | ds.setUrl("jdbc:h2:mem:db1"); 35 | return ds; 36 | } 37 | 38 | public static BasicDataSource newH2ServerDataSource(String host, int port, String database) { 39 | var ds = new BasicDataSource(); 40 | ds.setDriverClassName("org.h2.Driver"); 41 | ds.setUrl("jdbc:h2:tcp://" + host + ":" + port + "/" + database + ";IFNOTEXISTS=true"); 42 | return ds; 43 | } 44 | 45 | public static BasicDataSource newH2FileDataSource(String filePath, boolean onlyIfExists) { 46 | var ds = new BasicDataSource(); 47 | ds.setDriverClassName("org.h2.Driver"); 48 | ds.setUrl("jdbc:h2:" + filePath + (onlyIfExists ? ";IFEXISTS=TRUE" : "")); 49 | return ds; 50 | } 51 | 52 | public static BasicDataSource newOracleDataSource( 53 | String host, int port, String sid, String username, String password) { 54 | 55 | var ds = new BasicDataSource(); 56 | ds.setDriverClassName("oracle.jdbc.OracleDriver"); 57 | ds.setUrl("jdbc:oracle:thin:@" + host + ":" + port + ":" + sid); 58 | ds.setUsername(username); 59 | ds.setPassword(password); 60 | 61 | return ds; 62 | } 63 | 64 | public static BasicDataSource newMySqlDataSource( 65 | String url, String username, String password) { 66 | 67 | var ds = new BasicDataSource(); 68 | ds.setDriverClassName("com.mysql.jdbc.Driver"); 69 | ds.setUrl(url); 70 | ds.setUsername(username); 71 | ds.setPassword(password); 72 | return ds; 73 | } 74 | 75 | public static BasicDataSource newMySqlDataSource( 76 | String host, int port, String database, String username, String password, 77 | boolean useUnicode, String charEncoding) { 78 | 79 | var ds = new BasicDataSource(); 80 | ds.setDriverClassName("com.mysql.jdbc.Driver"); 81 | ds.setUrl("jdbc:mysql://" + host + ":" + port + "/" + database + 82 | "?serverTimezone=UTC&useUnicode=" + useUnicode + "&characterEncoding=" + charEncoding); 83 | ds.setUsername(username); 84 | ds.setPassword(password); 85 | return ds; 86 | } 87 | 88 | public static BasicDataSource newRemoteHsqldbDataSource( 89 | String host, int port, String database, String username, String password) { 90 | 91 | var ds = new BasicDataSource(); 92 | ds.setDriverClassName("org.hsqldb.jdbc.JDBCDriver"); 93 | ds.setUrl("jdbc:hsqldb:hsql://" + host + ":" + port + "/" + database); 94 | ds.setUsername(username); 95 | ds.setPassword(password); 96 | return ds; 97 | } 98 | 99 | public static DataSource newSqlServerDataSource(String host, int port, String database, String username, String password) { 100 | var ds = new BasicDataSource(); 101 | ds.setDriverClassName("com.microsoft.sqlserver.jdbc.SQLServerDriver"); 102 | ds.setUrl("jdbc:sqlserver://" + host + ":" + port + ";databaseName=" + database); 103 | ds.setUsername(username); 104 | ds.setPassword(password); 105 | return ds; 106 | } 107 | } 108 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/DataSources.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao; 2 | 3 | import com.hyd.dao.database.ExecutorFactory; 4 | import com.hyd.dao.database.type.NameConverter; 5 | 6 | import javax.sql.DataSource; 7 | import java.sql.Connection; 8 | import java.sql.SQLException; 9 | import java.util.HashMap; 10 | import java.util.Map; 11 | import java.util.concurrent.ConcurrentHashMap; 12 | import java.util.function.Consumer; 13 | 14 | /** 15 | * 管理数据源配置,允许在运行时动态添加数据源 16 | * 17 | * @author yiding.he 18 | */ 19 | public class DataSources { 20 | 21 | @FunctionalInterface 22 | public interface DataSourceConsumer { 23 | 24 | void accept(DataSource dataSource) throws SQLException; 25 | } 26 | 27 | private static final DataSources INSTANCE = new DataSources(); 28 | 29 | public static final String DEFAULT_DATA_SOURCE_NAME = "default"; 30 | 31 | /** 32 | * dsName -> DataSource 33 | */ 34 | private Map dataSources = new ConcurrentHashMap<>(); 35 | 36 | /** 37 | * dsName -> ExecutorFactory 38 | * TODO: clean useless variables 39 | */ 40 | private final Map executorFactories = new HashMap<>(); 41 | 42 | /** 43 | * dsName -> NameConverter 44 | * TODO: clean useless variables 45 | */ 46 | private final Map columnNameConverters = new HashMap<>(); 47 | 48 | public static DataSources getInstance() { 49 | return INSTANCE; 50 | } 51 | 52 | private DataSources() { 53 | 54 | } 55 | 56 | /** 57 | * 删除指定的数据源 58 | * 59 | * @param dataSourceName 数据源名称 60 | * @param finalization 删除后要对数据源做什么操作(例如关闭) 61 | */ 62 | public void remove(String dataSourceName, DataSourceConsumer finalization) throws DAOException { 63 | DataSource dataSource = dataSources.get(dataSourceName); 64 | 65 | if (dataSource != null) { 66 | dataSources.remove(dataSourceName); 67 | executorFactories.remove(dataSourceName); 68 | try { 69 | finalization.accept(dataSource); 70 | } catch (Exception e) { 71 | throw DAOException.wrap(e); 72 | } 73 | } 74 | } 75 | 76 | public void closeAll(DataSourceConsumer finalization) { 77 | this.dataSources.keySet().forEach(dataSourceName -> remove(dataSourceName, finalization)); 78 | } 79 | 80 | public Map getDataSources() { 81 | return dataSources; 82 | } 83 | 84 | public void setDataSources(Map dataSources) { 85 | this.dataSources = dataSources; 86 | } 87 | 88 | public void setDataSource(String dataSourceName, DataSource dataSource) { 89 | this.dataSources.put(dataSourceName, dataSource); 90 | } 91 | 92 | public DataSource getDataSource(String dataSourceName) { 93 | return this.dataSources.get(dataSourceName); 94 | } 95 | 96 | public void setColumnNameConverter(String dataSourceName, NameConverter nameConverter) { 97 | this.columnNameConverters.put(dataSourceName, nameConverter); 98 | } 99 | 100 | public boolean contains(String dsName) { 101 | return this.dataSources.containsKey(dsName); 102 | } 103 | 104 | /** 105 | * 操作数据库,连接然后自动关闭连接。 106 | * 107 | * @param dataSourceName 数据源名称 108 | * @param connectionConsumer 要进行的操作 109 | * 110 | * @throws SQLException 如果操作数据库失败 111 | */ 112 | public void withConnection(String dataSourceName, Consumer connectionConsumer) throws SQLException { 113 | 114 | if (!dataSources.containsKey(dataSourceName)) { 115 | throw new DAOException("Data source '" + dataSourceName + "' not found."); 116 | } 117 | 118 | try (Connection connection = dataSources.get(dataSourceName).getConnection()) { 119 | connectionConsumer.accept(connection); 120 | } 121 | } 122 | 123 | public boolean isEmpty() { 124 | return this.dataSources.isEmpty(); 125 | } 126 | } 127 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/command/builder/helper/ColumnInfoHelper.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.command.builder.helper; 2 | 3 | import com.github.benmanes.caffeine.cache.Cache; 4 | import com.github.benmanes.caffeine.cache.Caffeine; 5 | import com.hyd.dao.DAOException; 6 | import com.hyd.dao.database.ColumnInfo; 7 | import com.hyd.dao.database.FQN; 8 | import com.hyd.dao.database.dialects.Dialect; 9 | import com.hyd.dao.database.dialects.Dialects; 10 | import com.hyd.dao.log.Logger; 11 | 12 | import javax.sql.PooledConnection; 13 | import java.sql.Connection; 14 | import java.sql.DatabaseMetaData; 15 | import java.sql.ResultSet; 16 | import java.sql.SQLException; 17 | import java.time.Duration; 18 | import java.util.ArrayList; 19 | import java.util.List; 20 | 21 | public class ColumnInfoHelper { 22 | 23 | private static final Logger log = Logger.getLogger(ColumnInfoHelper.class); 24 | 25 | public static final String NULLABLE = "1"; 26 | 27 | /** 28 | * 缓存表的列信息。当数据库变更后,需要清空缓存,或等待缓存超时 29 | */ 30 | private static Cache> columnInfoCache = Caffeine.newBuilder() 31 | .maximumSize(1000) 32 | .expireAfterAccess(Duration.ofHours(1)) 33 | .build(); 34 | 35 | /** 36 | * 以自定义配置来初始化缓存 37 | */ 38 | public static synchronized void resetColumnInfoCache(Caffeine> builder) { 39 | columnInfoCache = builder.build(); 40 | } 41 | 42 | /** 43 | * 清空缓存。当数据库变更后若不想重启应用,则调用此方法。 44 | */ 45 | public static synchronized void cleanUpCache() { 46 | columnInfoCache.cleanUp(); 47 | } 48 | 49 | public static List getColumnInfo(FQN fqn, Connection connection) { 50 | return columnInfoCache.get(fqn, _fqn -> { 51 | log.debug("Reading columns of table " + fqn + "..."); 52 | 53 | try { 54 | Connection conn = connection; 55 | if (connection instanceof PooledConnection) { 56 | conn = ((PooledConnection) connection).getConnection(); 57 | } 58 | 59 | Dialect dialect = Dialects.getDialect(conn); 60 | 61 | Dialect.ColumnMetaFields columnMeta = dialect.getColumnMetaFields(); 62 | DatabaseMetaData dbMeta = conn.getMetaData(); 63 | 64 | String catalog = dialect.fixCatalog(conn.getCatalog(), fqn); 65 | String schema = fqn.getSchema(); 66 | String fixedName = dialect.fixMetaName(fqn.getName()); 67 | 68 | try ( 69 | ResultSet columns = dbMeta.getColumns(catalog, schema, fixedName, "%"); 70 | ResultSet keys = dbMeta.getPrimaryKeys(catalog, schema, fixedName) 71 | ) { 72 | 73 | List keyNames = new ArrayList<>(); 74 | while (keys.next()) { 75 | keyNames.add(keys.getString(columnMeta.columnNameField())); 76 | } 77 | 78 | List infos = new ArrayList<>(); 79 | while (columns.next()) { 80 | String columnName = columns.getString(columnMeta.columnNameField()); 81 | boolean primaryKey = keyNames.contains(columnName); 82 | 83 | ColumnInfo info = new ColumnInfo(); 84 | info.setColumnName(columnName); 85 | info.setDataType(Integer.parseInt(columns.getString(columnMeta.dataTypeField()))); 86 | info.setPrimary(primaryKey); 87 | info.setComment(columns.getString(columnMeta.remarksField())); 88 | info.setSize(columns.getInt(columnMeta.columnSizeField())); 89 | info.setNullable(NULLABLE.equals(columns.getString(columnMeta.nullableField()))); 90 | infos.add(info); 91 | } 92 | 93 | return infos; 94 | } 95 | } catch (SQLException e) { 96 | throw new DAOException(e); 97 | } 98 | }); 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/database/RowIterator.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.database; 2 | 3 | import com.hyd.dao.DAOException; 4 | import com.hyd.dao.Row; 5 | import com.hyd.dao.database.type.NameConverter; 6 | import com.hyd.dao.database.type.TypeConverter; 7 | import com.hyd.dao.mate.util.ResultSetUtil; 8 | 9 | import java.io.Closeable; 10 | import java.io.IOException; 11 | import java.sql.ResultSet; 12 | import java.sql.SQLException; 13 | import java.util.Iterator; 14 | import java.util.function.Consumer; 15 | import java.util.function.Function; 16 | 17 | import static com.hyd.dao.mate.util.Closer.closeResultSet; 18 | 19 | /** 20 | *

查询结果迭代器。当查询返回大量结果,又没有足够的内存进行缓存时,可以使用 DAO.queryIterator 21 | * 方法。该方法返回一个迭代器,用来每次获取一行查询结果。当处理完毕后,请务必记得将其关闭。

22 | */ 23 | public class RowIterator implements Closeable, Iterable { 24 | 25 | private final ResultSet rs; 26 | 27 | private NameConverter nameConverter; 28 | 29 | private Consumer rowPreProcessor; 30 | 31 | private boolean closed; 32 | 33 | public RowIterator(ResultSet rs) { 34 | this(rs, null); 35 | } 36 | 37 | public RowIterator(ResultSet rs, Consumer rowPreProcessor) { 38 | this.rs = rs; 39 | this.rowPreProcessor = rowPreProcessor; 40 | 41 | if (this.rs == null) { 42 | closed = true; 43 | } 44 | } 45 | 46 | public void setRowPreProcessor(Consumer rowPreProcessor) { 47 | this.rowPreProcessor = rowPreProcessor; 48 | } 49 | 50 | public void setNameConverter(NameConverter nameConverter) { 51 | this.nameConverter = nameConverter; 52 | } 53 | 54 | /** 55 | * 获得是否还有查询结果 56 | * 57 | * @return 如果还有查询结果则返回 true,并移至下一行 58 | */ 59 | public boolean next() { 60 | 61 | if (closed) { 62 | return false; 63 | } 64 | 65 | try { 66 | if (rs.isClosed()) { 67 | return false; 68 | } 69 | 70 | var next = rs.next(); 71 | if (!next) { 72 | close(); 73 | } 74 | return next; 75 | } catch (SQLException e) { 76 | throw new DAOException("failed to read next record", e); 77 | } 78 | } 79 | 80 | /** 81 | * 获得查询结果中的当前行 82 | * 83 | * @return 当前行的内容 84 | */ 85 | public Row getRow() { 86 | try { 87 | var row = ResultSetUtil.readRow(rs); 88 | if (this.rowPreProcessor != null) { 89 | this.rowPreProcessor.accept(row); 90 | } 91 | return row; 92 | } catch (IOException | SQLException e) { 93 | throw new DAOException("failed to read record", e); 94 | } 95 | } 96 | 97 | @Override 98 | public void close() { 99 | 100 | if (closed) { 101 | return; 102 | } 103 | 104 | try { 105 | closeResultSet(rs); 106 | } finally { 107 | closed = true; 108 | } 109 | } 110 | 111 | @Override 112 | public Iterator iterator() { 113 | return iterator(Row.class); 114 | } 115 | 116 | public void forEach(Class type, Consumer action) { 117 | iterator(type).forEachRemaining(action); 118 | } 119 | 120 | @SuppressWarnings("unchecked") 121 | private Iterator iterator(Class type) { 122 | Function converter = row -> { 123 | if (type.isAssignableFrom(Row.class)) { 124 | return (T) row; 125 | } else { 126 | try { 127 | return (T) TypeConverter.convertRow(type, row, nameConverter); 128 | } catch (Throwable e) { 129 | throw DAOException.wrap(e); 130 | } 131 | } 132 | }; 133 | 134 | return new Iterator<>() { 135 | private T next = null; 136 | 137 | private void fetchNext() { 138 | if (RowIterator.this.next()) { 139 | this.next = converter.apply(RowIterator.this.getRow()); 140 | } else { 141 | this.next = null; 142 | } 143 | } 144 | 145 | { 146 | fetchNext(); 147 | } 148 | 149 | @Override 150 | public boolean hasNext() { 151 | return this.next != null; 152 | } 153 | 154 | @Override 155 | public T next() { 156 | var result = this.next; 157 | fetchNext(); 158 | return result; 159 | } 160 | }; 161 | } 162 | } 163 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/mate/util/TypeUtil.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.mate.util; 2 | 3 | import com.hyd.dao.DAOException; 4 | import com.hyd.dao.database.type.BlobReader; 5 | import com.hyd.dao.database.type.ClobUtil; 6 | import com.hyd.dao.time.UniTime; 7 | 8 | import java.lang.reflect.Field; 9 | import java.math.BigDecimal; 10 | import java.sql.Blob; 11 | import java.sql.Clob; 12 | import java.sql.Timestamp; 13 | import java.sql.Types; 14 | import java.text.ParseException; 15 | import java.text.SimpleDateFormat; 16 | import java.time.temporal.Temporal; 17 | import java.util.Date; 18 | 19 | /** 20 | * 处理数据库中的值类型的类 21 | */ 22 | public class TypeUtil { 23 | 24 | public static final String[] DATE_PATTERNS = { 25 | "yyyy-MM-dd HH:mm:ss", 26 | "yyyy-MM-dd HH:mm:ss.SSS", 27 | "yyyy-MM-dd", 28 | }; 29 | 30 | /** 31 | * 根据字段数据类型将数据库中的值转化为 Java 类型,用于对简单查询结果的转换 32 | * 转换结果将放入 Row 对象,以帮助进行进一步转换。 33 | *

34 | * 数字类型 -> BigDecimal 35 | * CLOB -> String 36 | * BLOB -> byte[] 37 | * 其他类型保持原样 38 | * 39 | * @param columnType 值的 SQL 类型 40 | * @param value 值 41 | * 42 | * @return 转化后的类型 43 | * 44 | * @throws DAOException 如果读取 LOB 失败 45 | */ 46 | public static Object convertDatabaseValue(int columnType, Object value) { 47 | try { 48 | if (value == null) { 49 | return null; 50 | } else if (isNumericType(columnType)) { 51 | return (value instanceof BigDecimal ? value : new BigDecimal(value.toString())); 52 | } else if (isDateType(columnType)) { 53 | return toDate(value); 54 | } else if (value instanceof Clob) { 55 | return ClobUtil.read((Clob) value); 56 | } else if (value instanceof Blob) { 57 | return BlobReader.readBytes((Blob) value); 58 | } 59 | return value; 60 | } catch (Exception e) { 61 | throw DAOException.wrap(e); 62 | } 63 | } 64 | 65 | private static boolean isNumericType(int columnType) { 66 | return columnType == Types.NUMERIC || columnType == Types.INTEGER 67 | || columnType == Types.BIGINT || columnType == Types.REAL 68 | || columnType == Types.DECIMAL || columnType == Types.FLOAT 69 | || columnType == Types.DOUBLE; 70 | } 71 | 72 | public static boolean isDateType(int columnType) { 73 | return columnType == Types.DATE || columnType == Types.TIME || columnType == Types.TIMESTAMP; 74 | } 75 | 76 | private static Date toDate(Object value) { 77 | 78 | if (value instanceof Date) { 79 | return (Date) value; 80 | } 81 | 82 | var type = value.getClass(); 83 | 84 | if (type == String.class) { 85 | return toDateFromString(value.toString()); 86 | } else if (value instanceof Temporal) { 87 | return UniTime.fromTemporal((Temporal) value).toDate(); 88 | } 89 | 90 | try { 91 | return (Date) type.getDeclaredMethod("dateValue").invoke(value); 92 | } catch (NoSuchMethodException e) { 93 | throw new IllegalStateException("Value of type " + type + " cannot be cast to Date"); 94 | } catch (Exception e) { 95 | throw new IllegalStateException(e); 96 | } 97 | } 98 | 99 | private static Date toDateFromString(String s) { 100 | for (var pattern : DATE_PATTERNS) { 101 | try { 102 | return new SimpleDateFormat(pattern).parse(s); 103 | } catch (ParseException e) { 104 | // ignore 105 | } 106 | } 107 | 108 | throw new IllegalStateException("Unable to parse date string '" + s + "'"); 109 | } 110 | 111 | /** 112 | * 对 SQL 的参数进行转换 113 | * 114 | * @param obj 参数值 115 | * 116 | * @return 修复后的参数 117 | */ 118 | public static Object convertParamValue(Object obj) { 119 | if (obj == null) { 120 | return ""; 121 | } else if (obj.getClass().isEnum()) { 122 | return ((Enum) obj).name(); 123 | } else if (obj.getClass().equals(Date.class)) { 124 | return new Timestamp(((Date) obj).getTime()); // 将 Date 转化为 TimeStamp,以避免时间丢失 125 | } else { 126 | return obj; 127 | } 128 | } 129 | 130 | public static Field getFieldIgnoreCase(Class type, String fieldName) { 131 | if (type == Object.class) { 132 | return null; 133 | } 134 | 135 | var fields = type.getDeclaredFields(); 136 | for (var field : fields) { 137 | if (field.getName().equalsIgnoreCase(fieldName)) { 138 | return field; 139 | } 140 | } 141 | 142 | return getFieldIgnoreCase(type.getSuperclass(), fieldName); 143 | } 144 | } 145 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/command/builder/InsertBuilder.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.command.builder; 2 | 3 | import com.hyd.dao.DAO; 4 | import com.hyd.dao.DAOException; 5 | import com.hyd.dao.SQL; 6 | import com.hyd.dao.command.BatchCommand; 7 | import com.hyd.dao.command.Command; 8 | import com.hyd.dao.database.ColumnInfo; 9 | import com.hyd.dao.database.ConnectionContext; 10 | import com.hyd.dao.database.FQN; 11 | import com.hyd.dao.database.type.NameConverter; 12 | 13 | import java.util.Collection; 14 | import java.util.List; 15 | 16 | import static com.hyd.dao.command.builder.helper.CommandBuilderHelper.*; 17 | 18 | /** 19 | * 创建 insert 语句 20 | */ 21 | public final class InsertBuilder extends CommandBuilder { 22 | 23 | public InsertBuilder(ConnectionContext context) { 24 | super(context); 25 | } 26 | 27 | /** 28 | * 构造一个批处理命令。注意:批处理命令 SQL 语句中有哪些参数,是根据第一个要插入的记录生成的。 29 | * 这时候不能因为记录的某个属性值为 null 就不将该属性加入 SQL,因为后面的其他记录的属性值可能不是 null。 30 | * 31 | * @param tableName 表名 32 | * @param objects 要插入的记录对象 33 | * 34 | * @return 批处理插入命令 35 | */ 36 | public BatchCommand buildBatch(String tableName, Collection objects) { 37 | 38 | if (objects == null || objects.isEmpty()) { 39 | return BatchCommand.EMPTY; 40 | } 41 | 42 | final NameConverter nameConverter = context.getNameConverter(); 43 | final FQN fqn = new FQN(context, tableName); 44 | final Object sample = objects.iterator().next(); 45 | final List infos = getBatchColumnInfo(context, tableName, sample); 46 | final SQL.Insert insert = new SQL.Insert(fqn.getQuotedName()); 47 | 48 | for (ColumnInfo info : infos) { 49 | boolean isUsingSysdate = info.getDataType() == DAO.SYSDATE_TYPE; 50 | String columnName; 51 | 52 | if (isUsingSysdate) { 53 | columnName = context.getDialect().currentTimeExpression(); 54 | } else { 55 | columnName = context.getDialect().quote(info.getColumnName()); 56 | } 57 | 58 | insert.Values(columnName, new Object()); 59 | } 60 | 61 | // 生成命令 62 | BatchCommand bc = new BatchCommand(insert.toCommand().getStatement()); 63 | bc.setColumnInfos(infos); 64 | 65 | for (Object object : objects) { 66 | bc.addParams(generateParams(infos, object, nameConverter)); 67 | } 68 | return bc; 69 | } 70 | 71 | // 获取要批量插入的表字段信息 72 | private static List getBatchColumnInfo( 73 | ConnectionContext context, String tableName, Object sample 74 | ) { 75 | 76 | NameConverter nameConverter = context.getNameConverter(); 77 | FQN fqn = new FQN(context, tableName); 78 | List originColInfos = getColumnInfos(fqn, context); 79 | List infos = filterColumnsByType(originColInfos, sample.getClass(), nameConverter); 80 | 81 | List list = generateParams(infos, sample, nameConverter); 82 | for (int i = 0, listSize = list.size(); i < listSize; i++) { 83 | Object propertyValue = list.get(i); 84 | if (propertyValue == DAO.SYSDATE) { 85 | infos.get(i).setDataType(DAO.SYSDATE_TYPE); 86 | } 87 | } 88 | 89 | return infos; 90 | } 91 | 92 | /** 93 | * 构造一条插入命令 94 | * 95 | * @param tableName 表名 96 | * @param object 要插入的对象 97 | * 98 | * @return 插入命令 99 | * 100 | * @throws DAOException 如果获取数据库信息失败 101 | */ 102 | public Command build(String tableName, Object object) throws DAOException { 103 | FQN fqn = new FQN(context, tableName); 104 | NameConverter nameConverter = context.getNameConverter(); 105 | List infos = filterColumnsByType(getColumnInfos(fqn, context), object.getClass(), nameConverter); 106 | List params = generateParams(infos, object, nameConverter); 107 | return buildCommand(tableName, infos, params); 108 | } 109 | 110 | /** 111 | * 构造一条插入命令 112 | * 113 | * @param tableName 表名 114 | * @param infos 表的字段信息 115 | * @param params 参数值 116 | * 117 | * @return 插入命令 118 | */ 119 | private Command buildCommand(String tableName, List infos, List params) { 120 | final FQN fqn = new FQN(context, tableName); 121 | final SQL.Insert insert = new SQL.Insert(fqn.getQuotedName()); 122 | 123 | for (int i = 0; i < infos.size(); i++) { 124 | Object value = params.get(i); 125 | if (value == null) { 126 | continue; 127 | } 128 | 129 | String columnName = context.getDialect().quote(infos.get(i).getColumnName()); 130 | insert.Values(columnName, value); 131 | } 132 | 133 | return insert.toCommand(); 134 | } 135 | 136 | } 137 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/mate/util/ScriptExecutor.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.mate.util; 2 | 3 | import com.hyd.dao.DAO; 4 | import com.hyd.dao.DAOException; 5 | import com.hyd.dao.log.Logger; 6 | 7 | import java.io.File; 8 | import java.io.FileInputStream; 9 | import java.io.FileNotFoundException; 10 | import java.io.InputStream; 11 | import java.nio.charset.Charset; 12 | import java.nio.charset.StandardCharsets; 13 | import java.util.Scanner; 14 | import java.util.concurrent.atomic.AtomicInteger; 15 | 16 | /** 17 | * 用来执行 SQL 文件的类。 18 | *

19 | * 要求: 20 | * 1、只支持创建表、视图、索引等单条语句,不支持创建存储过程; 21 | * 2、SQL 语句可以多行,但必须以分号结尾; 22 | * 3、注释占整行,必须以 // 或 -- 开头,不支持 SQL 行尾加上注释。 23 | * 24 | * @author yidin 25 | */ 26 | public class ScriptExecutor { 27 | 28 | private static final Logger LOG = Logger.getLogger(ScriptExecutor.class); 29 | 30 | private static final String CLASSPATH = "classpath:"; 31 | 32 | public static void execute(File file, DAO dao, Charset charset) { 33 | if (!file.exists() || !file.isFile()) { 34 | throw new DAOException("Invalid file '" + file.getAbsolutePath() + "'"); 35 | } else { 36 | try { 37 | InputStream inputStream = new FileInputStream(file); 38 | execute(inputStream, dao, charset); 39 | } catch (FileNotFoundException e) { 40 | throw new DAOException(e); 41 | } 42 | } 43 | } 44 | 45 | public static void execute(String resourcePath, DAO dao) { 46 | execute(resourcePath, dao, StandardCharsets.UTF_8); 47 | } 48 | 49 | public static void execute(String resourcePath, DAO dao, String charset) { 50 | execute(resourcePath, dao, Charset.forName(charset)); 51 | } 52 | 53 | public static void execute(String path, DAO dao, Charset charset) { 54 | 55 | LOG.info(() -> "Executing script '" + path + "'..."); 56 | InputStream inputStream; 57 | 58 | if (path.startsWith(CLASSPATH)) { 59 | inputStream = ScriptExecutor.class 60 | .getResourceAsStream(path.substring(CLASSPATH.length())); 61 | } else { 62 | try { 63 | inputStream = new FileInputStream(path); 64 | } catch (FileNotFoundException e) { 65 | throw new DAOException(e); 66 | } 67 | } 68 | 69 | execute(inputStream, dao, charset); 70 | } 71 | 72 | public static void execute(InputStream is, DAO dao, Charset charset) { 73 | 74 | if (is == null) { 75 | throw new DAOException("Invalid input stream"); 76 | } 77 | 78 | AtomicInteger counter = new AtomicInteger(); 79 | 80 | try { 81 | executeStatements(is, dao, charset, counter); 82 | LOG.info(() -> counter.get() + " statements executed successfully."); 83 | } catch (RuntimeException e) { 84 | LOG.error(() -> counter.get() + " statements executed before exception."); 85 | throw e; 86 | } 87 | } 88 | 89 | private static void executeStatements( 90 | InputStream is, DAO dao, Charset charset, AtomicInteger counter) { 91 | 92 | String line; 93 | StringBuilder statement = new StringBuilder(); 94 | try (Scanner scanner = new Scanner(is, charset.name())) { 95 | while (scanner.hasNextLine()) { 96 | line = scanner.nextLine().trim(); 97 | 98 | // 整行为注释内容,略过 99 | if (line.startsWith("//") || line.startsWith("--")) { 100 | continue; 101 | } 102 | 103 | // 对于可能出现的行尾注释,如果 "--" 是出现在单引号内部,则不视为注释 104 | line = fixInlineComments(line).trim(); 105 | 106 | statement.append(" ").append(line); 107 | 108 | if (line.endsWith(";")) { 109 | executeStatement(dao, statement.toString()); 110 | counter.incrementAndGet(); 111 | statement = new StringBuilder(); 112 | } 113 | } 114 | } 115 | 116 | String finalStatement = statement.toString(); 117 | if (finalStatement.trim().length() > 0) { 118 | executeStatement(dao, finalStatement); 119 | counter.incrementAndGet(); 120 | } 121 | } 122 | 123 | private static String fixInlineComments(String line) { 124 | int index = line.indexOf("--"); 125 | while (index != -1) { 126 | if (isComment(line, index)) { 127 | return line.substring(0, index); 128 | } 129 | index = line.indexOf("--", index + 2); 130 | } 131 | return line; 132 | } 133 | 134 | private static boolean isComment(String line, int index) { 135 | int count = Str.countMatches(line.substring(0, index), "'"); 136 | return count % 2 == 0; 137 | } 138 | 139 | private static void executeStatement(DAO dao, String statement) { 140 | dao.execute(statement); 141 | } 142 | } 143 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/mate/util/ResultSetUtil.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.mate.util; 2 | 3 | import com.hyd.dao.Page; 4 | import com.hyd.dao.Row; 5 | import com.hyd.dao.database.type.NameConverter; 6 | import com.hyd.dao.database.type.TypeConverter; 7 | 8 | import java.io.IOException; 9 | import java.sql.ResultSet; 10 | import java.sql.ResultSetMetaData; 11 | import java.sql.SQLException; 12 | import java.sql.SQLFeatureNotSupportedException; 13 | import java.util.ArrayList; 14 | import java.util.List; 15 | import java.util.Map; 16 | import java.util.stream.Collectors; 17 | 18 | /** 19 | * 用于处理 ResultSet 的辅助类 20 | */ 21 | @SuppressWarnings({"unchecked"}) 22 | public final class ResultSetUtil { 23 | 24 | public static final String PAGINATION_WRAPPER_COLUMN_NAME = "pagination_wrapper_column_name"; 25 | 26 | private ResultSetUtil() { 27 | 28 | } 29 | 30 | /** 31 | * 查询 ResultSet 中有哪些字段 32 | * 33 | * @param rs ResultSet 对象 34 | * 35 | * @return rs 包含的字段名 36 | * 37 | * @throws SQLException 如果获取失败 38 | */ 39 | public static List getColumnNames(ResultSet rs) throws SQLException { 40 | int column_count = rs.getMetaData().getColumnCount(); 41 | List result = new ArrayList<>(); 42 | for (int i = 0; i < column_count; i++) { 43 | result.add(rs.getMetaData().getColumnName(i + 1)); 44 | } 45 | return result; 46 | } 47 | 48 | /** 49 | * 将查询结果包装为 HashMap 50 | * 51 | * @param rs 已经移至当前行的查询结果 52 | * 53 | * @return 包装好的查询结果 54 | * 55 | * @throws java.sql.SQLException 如果查询失败 56 | * @throws java.io.IOException 如果获取值失败 57 | */ 58 | public static Row readRow(ResultSet rs) throws SQLException, IOException { 59 | Row row = new Row(); 60 | ResultSetMetaData meta = rs.getMetaData(); 61 | for (int i = 0; i < meta.getColumnCount(); i++) { 62 | String colName = meta.getColumnLabel(i + 1); 63 | int columnType = meta.getColumnType(i + 1); 64 | Object o = rs.getObject(i + 1); 65 | Object value = TypeUtil.convertDatabaseValue(columnType, o); 66 | row.put(colName, value); 67 | } 68 | return row; 69 | } 70 | 71 | /** 72 | * 读取查询结果并包装 73 | * 74 | * @param rs 查询结果 75 | * @param clazz 包装类。如果为空则表示用 Map 包装。 76 | * @param startPosition 开始位置(0 表示第一条记录) 77 | * @param endPosition 结束位置(不包含) 78 | * 79 | * @return 包装好的查询结果。如果 startPosition < 0 或 endPosition < 0 则表示返回所有的查询结果 80 | * 81 | * @throws java.sql.SQLException 如果查询失败 82 | */ 83 | public static List readResultSet( 84 | ResultSet rs, Class clazz, NameConverter nameConverter, 85 | int startPosition, int endPosition) throws Exception { 86 | 87 | ArrayList result = new ArrayList<>(); 88 | 89 | // startPosition 是指向要读取的第一条记录之前的位置 90 | if (startPosition > 0) { 91 | rs.absolute(startPosition); 92 | } else if (startPosition == 0) { 93 | resetRsPosition(rs); 94 | } 95 | 96 | int counter = startPosition; 97 | while (rs.next() && (startPosition < 0 || endPosition < 0 || counter < endPosition)) { 98 | Map row = readRow(rs); 99 | 100 | // 如果是包含分页字段,则去掉 101 | row.remove(PAGINATION_WRAPPER_COLUMN_NAME); 102 | 103 | result.add(row); 104 | counter++; 105 | } 106 | 107 | return clazz == null ? result : TypeConverter.convert(clazz, result, nameConverter); 108 | } 109 | 110 | // 将 ResultSet 扫描位置重置为第0位 111 | private static void resetRsPosition(ResultSet rs) throws SQLException { 112 | try { 113 | if (rs.getType() == ResultSet.TYPE_FORWARD_ONLY) { 114 | return; 115 | } 116 | rs.beforeFirst(); 117 | } catch (SQLFeatureNotSupportedException e) { 118 | // just ignore it 119 | } 120 | } 121 | 122 | /** 123 | * 获取分页查询结果 124 | * 125 | * @param rs 查询结果 126 | * @param clazz 包装类 127 | * @param pageSize 页大小。如果小于 0 则表示取所有记录 128 | * @param pageIndex 页号 129 | * 130 | * @return 查询结果 131 | * 132 | * @throws java.sql.SQLException 如果查询失败 133 | */ 134 | public static Page readPageResultSet( 135 | ResultSet rs, Class clazz, NameConverter nameConverter, 136 | int pageSize, int pageIndex) throws Exception { 137 | 138 | Page result = new Page(); 139 | 140 | int startPos = pageSize < 0 ? -1 : pageIndex * pageSize; 141 | int endPos = startPos + pageSize; 142 | 143 | result.addAll(readResultSet(rs, clazz, nameConverter, startPos, endPos)); 144 | 145 | return result; 146 | } 147 | 148 | public static List readResultSet(ResultSet rs) throws Exception { 149 | List list = readResultSet(rs, null, NameConverter.DEFAULT, -1, -1); 150 | return list.stream().map(o -> (Row)o).collect(Collectors.toList()); 151 | } 152 | } 153 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/database/executor/Executor.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.database.executor; 2 | 3 | import com.hyd.dao.DAOException; 4 | import com.hyd.dao.Page; 5 | import com.hyd.dao.Row; 6 | import com.hyd.dao.command.BatchCommand; 7 | import com.hyd.dao.command.Command; 8 | import com.hyd.dao.command.IteratorBatchCommand; 9 | import com.hyd.dao.database.ConnectionContext; 10 | import com.hyd.dao.database.RowIterator; 11 | import com.hyd.dao.database.dialects.Dialect; 12 | import com.hyd.dao.database.type.NameConverter; 13 | import com.hyd.dao.snapshot.ExecutorInfo; 14 | 15 | import java.sql.Connection; 16 | import java.sql.SQLException; 17 | import java.util.List; 18 | import java.util.function.Consumer; 19 | 20 | /** 21 | * 执行数据库操作的接口 22 | *

23 | * Executor 持有 Connection 对象。如果没有在事务中,Executor 对象是一次性的, 24 | * 执行完第一个 SQL 命令后就会关闭连接(或返还给连接池),且不能再被使用; 25 | * 而如果在事务中,Executor 对象会被 TransactionManager 缓存起来,直到事务结束。 26 | * 27 | * @author yiding_he 28 | */ 29 | @SuppressWarnings("rawtypes") 30 | public abstract class Executor { 31 | 32 | protected ExecutorInfo info; // 当前状态 33 | 34 | protected ConnectionContext context; // 连接上下文 35 | 36 | protected ExecutionContext executionContext = new ExecutionContext(); // 本次执行上下文 37 | 38 | public Executor(ConnectionContext context) { 39 | this.info = new ExecutorInfo(context.getDataSourceName()); 40 | this.context = context; 41 | } 42 | 43 | public ConnectionContext getContext() { 44 | return context; 45 | } 46 | 47 | protected Dialect getDialect() { 48 | return context.getDialect(); 49 | } 50 | 51 | public void setAutoCommit(boolean autoCommit) { 52 | try { 53 | this.context.getConnection().setAutoCommit(autoCommit); 54 | } catch (SQLException e) { 55 | throw DAOException.wrap(e); 56 | } 57 | } 58 | 59 | /** 60 | * 判断连接是否已经关闭 61 | * 62 | * @return 如果链接已经关闭,则返回 true 63 | */ 64 | public abstract boolean isClosed(); 65 | 66 | /** 67 | * 执行 sql 语句 68 | * 69 | * @param sql 要执行的语句 70 | * @param params 参数 71 | * 72 | * @return 受影响的行数 73 | */ 74 | public abstract int execute(String sql, List params); 75 | 76 | /** 77 | * 执行 SQL 命令 78 | * 79 | * @param command SQL 命令 80 | * 81 | * @return 受影响的行数 82 | */ 83 | public int execute(Command command) { 84 | return execute(command.getStatement(), command.getParams()); 85 | } 86 | 87 | /** 88 | * 执行批量 SQL 语句 89 | * 90 | * @param command 批量 SQL 语句 91 | * 92 | * @return 受影响的行数 93 | */ 94 | public abstract int execute(BatchCommand command); 95 | 96 | /** 97 | * 流式执行批量 SQL 语句 98 | * 99 | * @param command 批量 SQL 语句 100 | * 101 | * @return 受影响的行数 102 | */ 103 | public abstract int execute(IteratorBatchCommand command); 104 | 105 | /** 106 | * 查询分页 107 | * 108 | * @param clazz 包装类 109 | * @param sql 查询语句 110 | * @param params 参数 111 | * @param pageSize 分页大小 112 | * @param pageIndex 页号(从0开始) 113 | * 114 | * @return 查询的当前页 115 | */ 116 | public abstract Page queryPage(Class clazz, String sql, List params, int pageSize, int pageIndex); 117 | 118 | /** 119 | * @param clazz 包装类 120 | * @param sql 查询语句 121 | * @param params 参数 122 | * @param startPosition 获取查询结果的开始位置(包含) 123 | * @param endPosition 获取查询结果的结束位置(不包含) 124 | * 125 | * @return 查询结果。如果 startPosition < 0 或 endPosition < 0 则表示返回所有的查询结果 126 | */ 127 | public abstract List query(Class clazz, String sql, List params, int startPosition, int endPosition); 128 | 129 | /** 130 | * 调用存储过程并返回结果 131 | * 132 | * @param name 存储过程名称 133 | * @param params 参数 134 | * 135 | * @return 调用结果 136 | */ 137 | public abstract List call(String name, Object[] params); 138 | 139 | /** 140 | * 调用 Oracle 存储过程 141 | * 参见 {@link com.hyd.dao.DAO#callFunction} 142 | */ 143 | public abstract List callFunction(String name, Object[] params); 144 | 145 | /** 146 | * 执行查询,返回迭代器 147 | * 148 | * @param sql 要执行的查询语句 149 | * @param params 查询参数 150 | * @param preProcessor 对 Row 对象的预处理 151 | * 152 | * @return 用于获得查询结果的迭代器 153 | */ 154 | public abstract RowIterator queryIterator(String sql, List params, Consumer preProcessor); 155 | 156 | ////////////////////////////////////////////////////////////// 157 | 158 | public ExecutorInfo getInfo() { 159 | return info; 160 | } 161 | 162 | protected NameConverter getNameConverter() { 163 | return this.context.getNameConverter(); 164 | } 165 | 166 | protected Connection getConnection() { 167 | return this.context.getConnection(); 168 | } 169 | 170 | /** 171 | * 关闭 executor 对象,如果当前不处于事务当中。 172 | */ 173 | public void finish() { 174 | this.context.closeIfAutoCommit(); 175 | if (info != null) { 176 | info.setClosed(true); 177 | } 178 | } 179 | 180 | } 181 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/database/ConnectionContext.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.database; 2 | 3 | import com.hyd.dao.DAOException; 4 | import com.hyd.dao.database.dialects.Dialect; 5 | import com.hyd.dao.database.dialects.Dialects; 6 | import com.hyd.dao.database.type.NameConverter; 7 | import com.hyd.dao.log.Logger; 8 | import lombok.Builder; 9 | import lombok.Getter; 10 | 11 | import javax.sql.PooledConnection; 12 | import java.sql.Connection; 13 | import java.sql.SQLException; 14 | 15 | /** 16 | * 17 | */ 18 | @Builder 19 | @Getter 20 | public class ConnectionContext { 21 | 22 | private static final Logger LOG = Logger.getLogger(ConnectionContext.class); 23 | 24 | private final String dataSourceName; 25 | 26 | private final ConnectionHolder connectionHolder; 27 | 28 | private final NameConverter nameConverter; 29 | 30 | private final String databaseProductName; 31 | 32 | private final String databaseVersion; 33 | 34 | private final Dialect dialect; 35 | 36 | private boolean disposed; 37 | 38 | public static ConnectionContext create( 39 | String dataSourceName, ConnectionHolder connectionHolder, NameConverter nameConverter) { 40 | try { 41 | Connection connection = connectionHolder.getConnection(); 42 | String databaseProductName = connection.getMetaData().getDatabaseProductName(); 43 | String databaseProductVersion = connection.getMetaData().getDatabaseProductVersion(); 44 | return ConnectionContext.builder() 45 | .databaseProductName(databaseProductName) 46 | .databaseVersion(databaseProductVersion) 47 | .dataSourceName(dataSourceName) 48 | .connectionHolder(connectionHolder) 49 | .nameConverter(nameConverter) 50 | .dialect(Dialects.getDialect(connection)) 51 | .build(); 52 | } catch (SQLException e) { 53 | throw new DAOException(e); 54 | } 55 | } 56 | 57 | public static ConnectionContext create(Connection connection) { 58 | return create("", ConnectionHolder.fromStatic(connection), NameConverter.DEFAULT); 59 | } 60 | 61 | private void validateDisposeStatus() { 62 | if (this.disposed) { 63 | throw new IllegalStateException("ConnectionContext is disposed."); 64 | } 65 | } 66 | 67 | /** 68 | * 从 ConnectionHolder 获得连接对象,然后尝试得到原始的连接。 69 | */ 70 | public Connection getDriverConnection() { 71 | try { 72 | Connection connection = this.connectionHolder.getConnection(); 73 | return connection instanceof PooledConnection ? ((PooledConnection) connection).getConnection() : connection; 74 | } catch (SQLException e) { 75 | throw new DAOException(e); 76 | } 77 | } 78 | 79 | /** 80 | * 从 ConnectionHolder 直接获得连接对象。 81 | */ 82 | public Connection getConnection() { 83 | return this.connectionHolder.getConnection(); 84 | } 85 | 86 | /** 87 | * 提交事务并关闭连接,仅当在事务结束时调用 88 | */ 89 | public void commit() { 90 | try { 91 | validateDisposeStatus(); 92 | Connection connection = connectionHolder.getConnection(); 93 | if (!connection.getAutoCommit() && !connection.isClosed()) { 94 | connection.commit(); 95 | connection.close(); 96 | } 97 | disposed = true; 98 | LOG.debug("Connection committed."); 99 | } catch (SQLException e) { 100 | LOG.error("Error committing database connection, dataSource=" + this.dataSourceName, e); 101 | } 102 | } 103 | 104 | /** 105 | * 回滚事务并关闭连接,仅当在事务结束时调用 106 | */ 107 | public void rollback() { 108 | try { 109 | validateDisposeStatus(); 110 | Connection connection = connectionHolder.getConnection(); 111 | if (!connection.getAutoCommit() && !connection.isClosed()) { 112 | connection.rollback(); 113 | connection.close(); 114 | } 115 | disposed = true; 116 | LOG.debug("Connection rolled back."); 117 | } catch (SQLException e) { 118 | LOG.error("Error rolling back database connection, dataSource=" + this.dataSourceName, e); 119 | } 120 | } 121 | 122 | /** 123 | * 关闭连接,仅当不在事务中时调用 124 | */ 125 | public void closeIfAutoCommit() { 126 | try { 127 | Connection connection = connectionHolder.getConnection(); 128 | if (connection.getAutoCommit()) { 129 | close(); 130 | } 131 | } catch (SQLException e) { 132 | LOG.error("Error closing database connection, dataSource=" + this.dataSourceName, e); 133 | } 134 | } 135 | 136 | /** 137 | * 关闭连接 138 | */ 139 | private void close() { 140 | try { 141 | validateDisposeStatus(); 142 | Connection connection = connectionHolder.getConnection(); 143 | if (!connection.isClosed()) { 144 | connection.close(); 145 | } 146 | disposed = true; 147 | LOG.debug("Connection closed."); 148 | } catch (SQLException e) { 149 | LOG.error("Error closing database connection, dataSource=" + this.dataSourceName, e); 150 | } 151 | } 152 | } 153 | -------------------------------------------------------------------------------- /hydrogen-dao/src/test/java/com/hyd/daotests/AbstractDaoTest.java: -------------------------------------------------------------------------------- 1 | package com.hyd.daotests; 2 | 3 | import com.hyd.dao.*; 4 | import com.hyd.dao.junit.HydrogenDAORule; 5 | import com.hyd.dao.src.models.Blog; 6 | import com.hyd.dao.src.models.BlogRecord; 7 | import org.junit.jupiter.api.AfterEach; 8 | import org.junit.jupiter.api.BeforeEach; 9 | import org.junit.jupiter.api.Test; 10 | 11 | import javax.sql.DataSource; 12 | import java.util.List; 13 | import java.util.concurrent.atomic.AtomicInteger; 14 | 15 | import static com.hyd.dao.DataSources.DEFAULT_DATA_SOURCE_NAME; 16 | import static org.junit.jupiter.api.Assertions.*; 17 | 18 | public abstract class AbstractDaoTest { 19 | 20 | protected DAO dao; 21 | 22 | protected HydrogenDAORule rule; 23 | 24 | protected DAO getDao() { 25 | return dao; 26 | } 27 | 28 | protected abstract DataSource createDataSource(); 29 | 30 | @BeforeEach 31 | public void init() { 32 | if (!DataSources.getInstance().contains(DEFAULT_DATA_SOURCE_NAME)) { 33 | DataSources.getInstance().setDataSource(DEFAULT_DATA_SOURCE_NAME, createDataSource()); 34 | } 35 | this.dao = new DAO(DEFAULT_DATA_SOURCE_NAME); 36 | this.rule = new HydrogenDAORule(this::getDao); 37 | this.rule.before(); 38 | } 39 | 40 | @AfterEach 41 | public void fin() { 42 | this.rule.after(); 43 | } 44 | 45 | @Test 46 | public void query() throws Exception { 47 | List rows = getDao().query("select * from blog"); 48 | assertFalse(rows.isEmpty()); 49 | rows.forEach(System.out::println); 50 | } 51 | 52 | @Test 53 | public void queryObject() throws Exception { 54 | List blogs = getDao().query(Blog.class, "select * from blog"); 55 | assertFalse(blogs.isEmpty()); 56 | 57 | assertNotNull(blogs.get(0).getId()); 58 | assertNotNull(blogs.get(0).getContent()); 59 | assertNotNull(blogs.get(0).getCreateTime()); 60 | assertNotNull(blogs.get(0).getTitle()); 61 | } 62 | 63 | @Test 64 | public void testQueryPage() throws Exception { 65 | Page page = getDao().queryPage(Blog.class, "select * from blog", 2, 0); 66 | assertNotNull(page); 67 | assertFalse(page.isEmpty()); 68 | assertNotNull(page.get(0)); 69 | assertEquals(2, page.getTotalPage()); 70 | assertEquals(3, page.getTotal()); 71 | } 72 | 73 | @Test 74 | public void testQueryIterator() throws Exception { 75 | AtomicInteger counter = new AtomicInteger(); 76 | try (var rows = getDao().queryIterator("select * from blog")) { 77 | rows.forEach(row -> { 78 | counter.incrementAndGet(); 79 | System.out.println(row); 80 | }); 81 | } 82 | assertEquals(3, counter.get()); 83 | } 84 | 85 | @Test 86 | public void testQueryIteratorBean() { 87 | AtomicInteger counter = new AtomicInteger(); 88 | try (var rows = getDao().queryIterator("select * from blog")) { 89 | rows.forEach(Blog.class, blog -> { 90 | counter.incrementAndGet(); 91 | assertNotNull(blog.getId()); 92 | }); 93 | } 94 | assertEquals(3, counter.get()); 95 | } 96 | 97 | @Test 98 | public void testInsertNullContent() throws Exception { 99 | getDao().execute("insert into blog(id,title,content)values(?,?,?)", 666, "no-content", null); 100 | Blog blog = getDao().queryFirst(Blog.class, "select * from blog where id=?", 666); 101 | assertNotNull(blog); 102 | assertNull(blog.getContent()); 103 | } 104 | 105 | @Test 106 | public void queryMap() throws Exception { 107 | List rows = getDao().query("select * from blog"); 108 | assertFalse(rows.isEmpty()); 109 | assertNotNull(rows.get(0).get("id")); 110 | } 111 | 112 | @Test 113 | public void testDelete() { 114 | assertNotNull(getDao().queryFirst("select * from blog where id=?", 1)); 115 | getDao().execute("delete from blog where id=?", 1); 116 | assertNull(getDao().queryFirst("select * from blog where id=?", 1)); 117 | } 118 | 119 | 120 | @Test 121 | public void testRunTransactionCommit() throws Exception { 122 | DAO dao = getDao(); 123 | DAO.runTransaction(() -> { 124 | assertNotNull(dao.queryFirst("select * from blog where id=?", 1)); 125 | dao.execute("delete from blog where id=?", 1); 126 | }); 127 | assertNull(dao.queryFirst("select * from blog where id=?", 1)); 128 | } 129 | 130 | @Test 131 | public void testRunTransactionRollback() throws Exception { 132 | DAO dao = getDao(); 133 | try { 134 | DAO.runTransaction(() -> { 135 | assertNotNull(dao.queryFirst("select * from blog where id=?", 1)); 136 | dao.execute("delete from blog where id=?", 1); 137 | throw new RuntimeException("FAKE ERROR"); 138 | }); 139 | } catch (TransactionException e) { 140 | e.printStackTrace(); 141 | } finally { 142 | assertNotNull(dao.queryFirst("select * from blog where id=?", 1)); 143 | } 144 | } 145 | 146 | @Test 147 | public void testQueryRecord() { 148 | var dao = getDao(); 149 | var record = dao.queryFirst(BlogRecord.class, "select * from blog where id=1"); 150 | assertNotNull(record); 151 | assertEquals(1L, record.id()); 152 | } 153 | } 154 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/database/dialects/Dialect.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.database.dialects; 2 | 3 | import com.hyd.dao.database.ColumnInfo; 4 | import com.hyd.dao.database.FQN; 5 | import com.hyd.dao.database.executor.ExecuteMode; 6 | import com.hyd.dao.mate.util.Str; 7 | import com.hyd.dao.mate.util.TypeUtil; 8 | 9 | import java.sql.*; 10 | import java.util.function.Predicate; 11 | import java.util.stream.Collectors; 12 | import java.util.stream.Stream; 13 | 14 | public interface Dialect { 15 | 16 | /** 17 | * 当查询元数据时,要以什么方式转换对象名 18 | * 某些数据库例如 H2,创建表时,表名可以用小写,但创建出来的表是大写的 19 | */ 20 | enum MetaNameConvention { 21 | Uppercase, Lowercase, Unchanged 22 | } 23 | 24 | /** 25 | * JDBC Driver 当中如何命名表元数据字段。 26 | * 万一有哪个数据库驱动存在不同的命名,可以针对该驱动 27 | * 覆写 {@link Dialect#getColumnMetaFields()} 方法。 28 | */ 29 | interface ColumnMetaFields { 30 | 31 | String columnNameField(); 32 | 33 | String columnSizeField(); 34 | 35 | String nullableField(); 36 | 37 | String dataTypeField(); 38 | 39 | String typeNameField(); 40 | 41 | String remarksField(); 42 | } 43 | 44 | ////////////////////////////////////////////////////////////// 45 | 46 | Predicate getMatcher(); 47 | 48 | String wrapRangeQuery(String sql, int startPos, int endPos); 49 | 50 | default String wrapCountQuery(String sql) { 51 | return "select count(*) cnt from (" + sql + ")"; 52 | } 53 | 54 | default ColumnMetaFields getColumnMetaFields() { 55 | return new ColumnMetaFields() { 56 | @Override 57 | public String columnNameField() { 58 | return "COLUMN_NAME"; 59 | } 60 | 61 | @Override 62 | public String columnSizeField() { 63 | return "COLUMN_SIZE"; 64 | } 65 | 66 | @Override 67 | public String nullableField() { 68 | return "NULLABLE"; 69 | } 70 | 71 | @Override 72 | public String dataTypeField() { 73 | return "DATA_TYPE"; 74 | } 75 | 76 | @Override 77 | public String typeNameField() { 78 | return "TYPE_NAME"; 79 | } 80 | 81 | @Override 82 | public String remarksField() { 83 | return "REMARKS"; 84 | } 85 | }; 86 | } 87 | 88 | default String currentTimeExpression() { 89 | return "current_timestamp"; // SQL 92 spec 90 | } 91 | 92 | default String identityQuoter() { 93 | return "\""; 94 | } 95 | 96 | default String quote(String schema, String name) { 97 | return quote((Str.isEmpty(schema) ? "" : schema + ".") + name); 98 | } 99 | 100 | default String quote(String objectName) { 101 | return Stream.of(objectName.split("\\.")) 102 | .map(n -> identityQuoter() + n + identityQuoter()) 103 | .collect(Collectors.joining(".")); 104 | } 105 | 106 | default Object parseCallableStatementResult(int sqlType, Object value) { 107 | return TypeUtil.convertDatabaseValue(sqlType, value); 108 | } 109 | 110 | default int resultSetTypeForReading() { 111 | return ResultSet.TYPE_FORWARD_ONLY; 112 | } 113 | 114 | default void setupStatement(Statement statement, ExecuteMode executeMode) throws SQLException { 115 | 116 | } 117 | 118 | default String getJavaType(ColumnInfo columnInfo) { 119 | 120 | int dataType = columnInfo.getDataType(); 121 | int size = columnInfo.getSize(); 122 | 123 | switch (dataType) { 124 | case Types.VARCHAR: 125 | case Types.CHAR: 126 | case Types.LONGVARCHAR: 127 | return "String"; 128 | case Types.BIT: 129 | return "Boolean"; 130 | case Types.NUMERIC: 131 | return "BigDecimal"; 132 | case Types.TINYINT: 133 | return "Integer"; 134 | case Types.SMALLINT: 135 | return "Short"; 136 | case Types.INTEGER: 137 | return size < 10 ? "Integer" : "Long"; 138 | case Types.BIGINT: 139 | return "Long"; 140 | case Types.REAL: 141 | case Types.FLOAT: 142 | return "Float"; 143 | case Types.DOUBLE: 144 | return "Double"; 145 | case Types.VARBINARY: 146 | case Types.BINARY: 147 | return "byte[]"; 148 | case Types.DATE: 149 | case Types.TIME: 150 | case Types.TIMESTAMP: 151 | return "Date"; 152 | default: 153 | return getJavaTypeByDatabase(columnInfo); 154 | } 155 | } 156 | 157 | default String getJavaTypeByDatabase(ColumnInfo columnInfo) { 158 | return "String"; 159 | } 160 | 161 | default MetaNameConvention getMetaNameConvention() { 162 | return MetaNameConvention.Unchanged; 163 | } 164 | 165 | default String fixMetaName(String metaName) { 166 | switch (getMetaNameConvention()) { 167 | case Lowercase: 168 | return metaName.toLowerCase(); 169 | case Uppercase: 170 | return metaName.toUpperCase(); 171 | case Unchanged: 172 | return metaName; 173 | } 174 | throw new RuntimeException("Shouldn't be here"); 175 | } 176 | 177 | default String fixCatalog(String connectionCatalog, FQN fqn) { 178 | return null; 179 | } 180 | } 181 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/command/builder/helper/CommandBuilderHelper.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.command.builder.helper; 2 | 3 | import com.hyd.dao.DAO; 4 | import com.hyd.dao.database.ColumnInfo; 5 | import com.hyd.dao.database.ConnectionContext; 6 | import com.hyd.dao.database.FQN; 7 | import com.hyd.dao.database.type.NameConverter; 8 | import com.hyd.dao.exception.DataConversionException; 9 | import com.hyd.dao.mate.util.BeanUtil; 10 | import com.hyd.dao.mate.util.Cls; 11 | import com.hyd.dao.mate.util.Str; 12 | 13 | import java.lang.reflect.Field; 14 | import java.math.BigDecimal; 15 | import java.sql.Types; 16 | import java.util.ArrayList; 17 | import java.util.List; 18 | import java.util.Locale; 19 | import java.util.Map; 20 | 21 | /** 22 | * 用于构造 SQL 命令的帮助类,隐藏不同数据库之间的区别 23 | */ 24 | public class CommandBuilderHelper { 25 | 26 | /** 27 | * 获得指定库表的字段信息 28 | * 29 | * @param fqn 表名信息 30 | * 31 | * @return 表的字段信息 32 | */ 33 | public static List getColumnInfos(FQN fqn, ConnectionContext context) { 34 | return ColumnInfoHelper.getColumnInfo(fqn, context.getConnection()); 35 | } 36 | 37 | /** 38 | * 生成 SQL 语句参数 39 | * 40 | * @param infos 字段信息 41 | * @param object 提供字段的参数值的对象 42 | * 43 | * @return 生成的 SQL 语句参数 44 | */ 45 | public static List generateParams(List infos, Object object, NameConverter nameConverter) { 46 | List params = new ArrayList<>(); 47 | for (ColumnInfo info : infos) { 48 | if (info.getDataType() != DAO.SYSDATE_TYPE) { 49 | params.add(generateParamValue(object, info, nameConverter)); 50 | } 51 | } 52 | return params; 53 | } 54 | 55 | /** 56 | * 根据 bean 类型过滤字段列表,删除类型中没有定义的字段 57 | */ 58 | public static List filterColumnsByType(List original, Class type, NameConverter nameConverter) { 59 | // 如果类型不是 POJO 而是 Map 则无需过滤,原样返回 60 | if (Map.class.isAssignableFrom(type)) { 61 | return new ArrayList<>(original); 62 | } else { 63 | List infoList = new ArrayList<>(); 64 | for (ColumnInfo info : original) { 65 | String field = nameConverter.column2Field(info.getColumnName()); 66 | if (Cls.hasField(type, field)) { 67 | infoList.add(info); 68 | } 69 | } 70 | return infoList; 71 | } 72 | } 73 | 74 | /** 75 | * 根据字段信息,从对象中取得相应的属性值 76 | * 77 | * @param object 对象 78 | * @param info 字段信息 79 | * 80 | * @return 属性值。如果获取失败或需要跳过该字段则返回 null 81 | */ 82 | public static Object generateParamValue(Object object, ColumnInfo info, NameConverter nameConverter) { 83 | String fieldName = nameConverter.column2Field(info.getColumnName()); 84 | 85 | String strValue; 86 | Object value; 87 | 88 | // 如果 object 是一个 Map,则根据字段名取值;否则根据属性名取值。 89 | if (object instanceof Map) { 90 | Map map = (Map) object; 91 | value = map.get(info.getColumnName()); 92 | if (value == null) { 93 | value = map.get(info.getColumnName().toUpperCase(Locale.ENGLISH)); 94 | } 95 | if (value == null) { 96 | value = map.get(info.getColumnName().toLowerCase()); 97 | } 98 | } else { 99 | Field field = getObjectField(object, fieldName); 100 | if (field == null) { 101 | return null; 102 | } 103 | 104 | value = BeanUtil.getValue(object, fieldName); 105 | } 106 | 107 | if (value == null) { 108 | return null; 109 | } 110 | 111 | strValue = Str.valueOf(value); 112 | 113 | // 获取返回值 114 | switch (info.getDataType()) { 115 | case Types.NUMERIC: // 1. 如果是数字类型的字段,则根据 strValue 进行转换; 116 | case Types.DECIMAL: 117 | case Types.BIGINT: 118 | case Types.DOUBLE: 119 | case Types.FLOAT: 120 | case Types.INTEGER: 121 | if (Str.isEmptyString(strValue)) { 122 | return null; 123 | } else { 124 | try { 125 | return new BigDecimal(strValue); 126 | } catch (NumberFormatException e) { 127 | throw new DataConversionException( 128 | "Conversion from value '" + strValue + "' to column " + info + " failed.", e); 129 | } 130 | } 131 | 132 | case Types.DATE: // 2. 如果是日期类型的字段,则直接从 Map 或 Bean 中获取; 133 | case Types.TIME: 134 | case Types.TIMESTAMP: 135 | if (object instanceof Map) { 136 | return value; 137 | } else { 138 | return BeanUtil.getValue(object, fieldName); 139 | } 140 | 141 | case Types.BLOB: // 3. LOB 类型可以传入原值 142 | case Types.CLOB: 143 | case Types.NCLOB: 144 | return value; 145 | default: // 4. 其他类型则直接使用 string_value。 146 | return strValue; 147 | } 148 | } 149 | 150 | private static Field getObjectField(Object object, String fieldName) { 151 | Field field = null; 152 | Class type = object.getClass(); 153 | 154 | while (field == null && type != null) { 155 | try { 156 | field = type.getDeclaredField(fieldName); 157 | } catch (Exception e) { 158 | type = type.getSuperclass(); 159 | } 160 | } 161 | 162 | return field; 163 | } 164 | 165 | } 166 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/sp/StorageProcedureHelper.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao.sp; 2 | 3 | import com.hyd.dao.DAOException; 4 | import com.hyd.dao.Row; 5 | import com.hyd.dao.mate.util.ResultSetUtil; 6 | import com.hyd.dao.mate.util.Str; 7 | 8 | import java.math.BigDecimal; 9 | import java.sql.CallableStatement; 10 | import java.sql.Connection; 11 | import java.sql.SQLException; 12 | import java.util.Comparator; 13 | import java.util.HashMap; 14 | import java.util.List; 15 | import java.util.Map; 16 | import java.util.stream.Collectors; 17 | import java.util.stream.Stream; 18 | 19 | /** 20 | * 执行存储过程帮助类 21 | */ 22 | public final class StorageProcedureHelper { 23 | 24 | public static final Map SP_PARAM_TYPES = new HashMap<>(); 25 | 26 | static { 27 | SP_PARAM_TYPES.put(1, SpParamType.IN); 28 | SP_PARAM_TYPES.put(2, SpParamType.IN_OUT); 29 | SP_PARAM_TYPES.put(4, SpParamType.OUT); 30 | } 31 | 32 | private StorageProcedureHelper() { 33 | 34 | } 35 | 36 | /** 37 | * 创建一个 CallableStatement 38 | * 39 | * @param name 存储过程名 40 | * @param params 调用参数 41 | * @param connection 数据库连接 42 | * 43 | * @return CallableStatement 对象 44 | * @throws SQLException 如果创建失败 45 | */ 46 | public static CallableStatement createCallableStatement( 47 | String name, SpParam[] params, Connection connection) throws SQLException { 48 | var call_str = generateCallStatement(name, params); 49 | var cs = connection.prepareCall(call_str); 50 | setupParams(params, cs); 51 | return cs; 52 | } 53 | 54 | /** 55 | * 设置存储过程的调用参数,以及注册返回值 56 | * 57 | * @param params 参数 58 | * @param cs 要执行的 CallableStatement 59 | * 60 | * @throws SQLException 如果设置参数失败 61 | */ 62 | private static void setupParams(SpParam[] params, CallableStatement cs) throws SQLException { 63 | for (var i = 0; i < params.length; i++) { 64 | var param = params[i]; 65 | if ((param.getType() == SpParamType.IN || param.getType() == SpParamType.IN_OUT) 66 | && param.getValue() != null) { 67 | cs.setObject(i + 1, param.getValue()); 68 | } 69 | if (param.getType() == SpParamType.OUT || param.getType() == SpParamType.IN_OUT) { 70 | if (Str.isEmptyString(param.getName())) { 71 | cs.registerOutParameter(i + 1, param.getSqlType()); 72 | } else { 73 | cs.registerOutParameter(i + 1, param.getSqlType(), param.getName()); 74 | } 75 | } 76 | } 77 | } 78 | 79 | /** 80 | * 生成一个调用存储过程的语句,格式类似于“{call XXX(?,?,?)}” 81 | * 82 | * @param name 存储过程名 83 | * @param params 参数 84 | * 85 | * @return 调用存储过程的语句 86 | */ 87 | private static String generateCallStatement(String name, SpParam... params) { 88 | return "{call " + name + "(" + 89 | Stream.of(params).map(p -> "?").collect(Collectors.joining(",")) + 90 | ")}"; 91 | } 92 | 93 | /** 94 | * 创建存储过程调用参数 95 | * 96 | * @param name 存储过程名称 97 | * @param params 参数值 98 | * @param conn 数据库连接(执行完后不会关闭) 99 | * 100 | * @return 存储过程调用参数 101 | * @throws SQLException 如果获取存储过程信息失败 102 | */ 103 | public static SpParam[] createSpParams(String name, Object[] params, Connection conn) throws SQLException { 104 | try { 105 | var rows = getSpParamDefinitions(conn, name); 106 | 107 | var sp_params = new SpParam[rows.size()]; 108 | var param_counter = 0; 109 | 110 | for (var i = 0; i < rows.size(); i++) { 111 | var row = rows.get(i); 112 | var data_type = getIntegerValue(row, "data_type"); 113 | var column_type = getIntegerValue(row, "column_type"); 114 | 115 | var type = SP_PARAM_TYPES.get(column_type); 116 | Object value; 117 | if (type != SpParamType.OUT) { 118 | value = params[param_counter]; 119 | param_counter++; 120 | } else { 121 | value = null; 122 | } 123 | sp_params[i] = new SpParam(type, data_type, value); 124 | } 125 | 126 | return sp_params; 127 | } catch (SQLException e) { 128 | throw e; 129 | } catch (Exception e) { 130 | throw new DAOException(e); 131 | } 132 | } 133 | 134 | private static int getIntegerValue(HashMap row, String colName) { 135 | var dataType = row.get(colName); 136 | 137 | if (dataType != null) { 138 | return new BigDecimal(dataType.toString()).intValue(); 139 | } else { 140 | throw new DAOException("Unknown procedure parameter type: " + row); 141 | } 142 | } 143 | 144 | /** 145 | * 查询存储过程参数信息 146 | * 147 | * @param conn 数据库连接 148 | * @param spName 存储过程名称 149 | * 150 | * @return 参数信息 151 | * @throws Exception 如果获取存储过程信息失败 152 | */ 153 | private static List getSpParamDefinitions(Connection conn, String spName) throws Exception { 154 | var metaData = conn.getMetaData(); 155 | 156 | String schema; 157 | var fixedSpName = spName; 158 | 159 | if (!spName.contains(".")) { 160 | schema = metaData.getUserName().toUpperCase(); 161 | } else { 162 | schema = spName.split("\\.")[0].toUpperCase(); 163 | fixedSpName = spName.substring(spName.lastIndexOf(".") + 1); 164 | } 165 | 166 | try (var procedures = metaData.getProcedureColumns(null, schema, fixedSpName.toUpperCase(), "%")) { 167 | var rows = ResultSetUtil.readResultSet(procedures); 168 | rows.sort(Comparator.comparing(m -> m.getIntegerObject("sequence"))); 169 | return rows; 170 | } 171 | } 172 | } 173 | -------------------------------------------------------------------------------- /hydrogen-dao/src/main/java/com/hyd/dao/Row.java: -------------------------------------------------------------------------------- 1 | package com.hyd.dao; 2 | 3 | import com.hyd.dao.log.Logger; 4 | import com.hyd.dao.mate.util.CaseInsensitiveHashMap; 5 | 6 | import java.text.ParseException; 7 | import java.text.SimpleDateFormat; 8 | import java.util.Date; 9 | import java.util.Map; 10 | 11 | /** 12 | * 表示查询结果中的一行。在所有的方法中,字段名不分大小写。 13 | * 14 | * @author yiding.he 15 | */ 16 | public class Row extends CaseInsensitiveHashMap implements Map { 17 | 18 | /** 19 | * 缺省日期格式 20 | */ 21 | public static final String DEFAULT_DATE_PATTERN = "yyyy-MM-dd HH:mm:ss"; 22 | 23 | private static final Logger LOG = Logger.getLogger(Row.class); 24 | 25 | public Double getDoubleObject(String key) { 26 | var value = get(key); 27 | if (value == null) { 28 | return null; 29 | } else if (value instanceof String) { 30 | return Double.parseDouble((String) value); 31 | } else if (value instanceof Double) { 32 | return (Double) value; 33 | } else if (value instanceof Date) { 34 | return (double) ((Date) value).getTime(); 35 | } else { 36 | return Double.parseDouble(value.toString()); 37 | } 38 | } 39 | 40 | public double getDouble(String key, double defaultValue) { 41 | try { 42 | var value = getDoubleObject(key); 43 | return value == null ? defaultValue : value; 44 | } catch (Exception e) { 45 | LOG.warn(e.getMessage(), e); 46 | return defaultValue; 47 | } 48 | } 49 | 50 | /** 51 | * 以 Long 类型获取字段的值 52 | * 53 | * @param key 字段名 54 | * 55 | * @return 字段值 56 | */ 57 | public Long getLongObject(String key) { 58 | var value = get(key); 59 | if (value == null) { 60 | return null; 61 | } else if (value instanceof String) { 62 | return Long.parseLong((String) value); 63 | } else if (value instanceof Double) { 64 | return ((Double) value).longValue(); 65 | } else if (value instanceof Date) { 66 | return ((Date) value).getTime(); 67 | } else { 68 | return Long.parseLong(value.toString()); 69 | } 70 | } 71 | 72 | /** 73 | * 以 long 类型获取字段的值 74 | * 75 | * @param key 字段名 76 | * @param defaultValue 缺省值 77 | * 78 | * @return 字段值。如果值不存在,则返回 defaultValue 79 | */ 80 | public long getLong(String key, long defaultValue) { 81 | try { 82 | var l = getLongObject(key); 83 | return l == null ? defaultValue : l; 84 | } catch (Exception e) { 85 | LOG.warn(e.getMessage(), e); 86 | return defaultValue; 87 | } 88 | } 89 | 90 | /** 91 | * 以 Integer 类型获取字段的值 92 | * 93 | * @param key 字段名 94 | * 95 | * @return 字段的值 96 | */ 97 | public Integer getIntegerObject(String key) { 98 | var value = get(key); 99 | if (value == null) { 100 | return null; 101 | } else if (value instanceof String) { 102 | return Integer.parseInt((String) value); 103 | } else if (value instanceof Double) { 104 | return ((Double) value).intValue(); 105 | } else if (value instanceof Date) { 106 | return (int) ((Date) value).getTime(); 107 | } else { 108 | return Integer.parseInt(value.toString()); 109 | } 110 | } 111 | 112 | /** 113 | * 以 int 类型获取字段的值 114 | * 115 | * @param key 字段名 116 | * @param defaultValue 缺省值 117 | * 118 | * @return 字段值。如果值不存在,则返回 defaultValue 119 | */ 120 | public int getInteger(String key, int defaultValue) { 121 | try { 122 | var i = getIntegerObject(key); 123 | return i == null ? defaultValue : i; 124 | } catch (Exception e) { 125 | LOG.warn(e.getMessage(), e); 126 | return defaultValue; 127 | } 128 | } 129 | 130 | /** 131 | * 以 Date 类型获取字段的值。 132 | *
    133 | *
  • 如果字段是日期类型,则直接返回其值;
  • 134 | *
  • 如果字段是字符串类型,则按照 {@link #DEFAULT_DATE_PATTERN} 来解析;
  • 135 | *
  • 如果字段是数字类型,则将其看作是自 1970 年 1 月 1 日以来的毫秒数来解析。
  • 136 | *
137 | * 138 | * @param key 字段名 139 | * 140 | * @return 字段的值 141 | */ 142 | public Date getDate(String key) { 143 | return getDate(key, DEFAULT_DATE_PATTERN); 144 | } 145 | 146 | /** 147 | * 以 Date 类型获取字段的值 148 | *
    149 | *
  • 如果字段是日期类型,则直接返回其值;
  • 150 | *
  • 如果字段是字符串类型,则按照 pattern 参数来解析;
  • 151 | *
  • 如果字段是数字类型,则将其看作是自 1970 年 1 月 1 日以来的毫秒数来解析。
  • 152 | *
153 | * 154 | * @param key 字段名 155 | * @param pattern 如果字段是字符串类型,则表示日期格式 156 | * 157 | * @return 字段的值 158 | */ 159 | public Date getDate(String key, String pattern) { 160 | var value = get(key); 161 | if (value == null) { 162 | return null; 163 | } else if (value instanceof String) { 164 | try { 165 | return parseDate((String) value, pattern); 166 | } catch (ParseException e) { 167 | throw new DAOException(e); 168 | } 169 | } else if (value instanceof Double) { 170 | return new Date(((Double) value).longValue()); 171 | } else if (value instanceof Date) { 172 | return (Date) value; 173 | } else { 174 | try { 175 | return parseDate(value.toString(), pattern); 176 | } catch (ParseException e) { 177 | throw new DAOException(e); 178 | } 179 | } 180 | } 181 | 182 | private Date parseDate(String dateStr, String pattern) throws ParseException { 183 | return new SimpleDateFormat(pattern).parse(dateStr); 184 | } 185 | 186 | public String getString(String key) { 187 | var value = get(key); 188 | if (value == null) { 189 | return null; 190 | } else { 191 | return value.toString(); 192 | } 193 | } 194 | } 195 | --------------------------------------------------------------------------------