├── examples ├── zeko-vertx-query │ ├── vertx-run.sh │ ├── .editorconfig │ ├── src │ │ └── main │ │ │ └── kotlin │ │ │ └── com │ │ │ └── zeko │ │ │ └── example │ │ │ └── MainVerticle.kt │ └── pom.xml └── sql │ ├── zeko_test_role.sql │ ├── zeko_test_user.sql │ ├── zeko_test_address.sql │ └── zeko_test_user_has_role.sql ├── docs └── assets │ ├── zeko-test-schema.jpg │ └── zeko-rabbit.svg ├── src ├── main │ └── kotlin │ │ └── io │ │ └── zeko │ │ └── model │ │ ├── IgniteSelect.kt │ │ ├── DataHelper.kt │ │ ├── DataMapperVertx.kt │ │ ├── DateTimeHelper.kt │ │ ├── declarations │ │ └── extensions.kt │ │ ├── Select.kt │ │ ├── ResultSetHelper.kt │ │ ├── MapperConfig.kt │ │ └── DataMapper.kt └── test │ └── kotlin │ └── io │ └── zeko │ └── model │ ├── DataHelperSpec.kt │ ├── DataMapperSpec.kt │ └── DataMapperPOJOSpec.kt ├── .gitignore ├── README.md ├── LICENSE └── pom.xml /examples/zeko-vertx-query/vertx-run.sh: -------------------------------------------------------------------------------- 1 | mvn clean compile vertx:run 2 | 3 | -------------------------------------------------------------------------------- /docs/assets/zeko-test-schema.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/darkredz/Zeko-Data-Mapper/HEAD/docs/assets/zeko-test-schema.jpg -------------------------------------------------------------------------------- /src/main/kotlin/io/zeko/model/IgniteSelect.kt: -------------------------------------------------------------------------------- 1 | package io.zeko.model 2 | 3 | class IgniteSelect: Select { 4 | constructor(asChar: String = "="): super("\"", asChar, true) 5 | } 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ### Maven ### 2 | target/ 3 | pom.xml.tag 4 | pom.xml.releaseBackup 5 | pom.xml.versionsBackup 6 | pom.xml.next 7 | release.properties 8 | dependency-reduced-pom.xml 9 | buildNumber.properties 10 | .mvn/timing.properties 11 | .mvn/wrapper/maven-wrapper.jar 12 | -------------------------------------------------------------------------------- /examples/zeko-vertx-query/.editorconfig: -------------------------------------------------------------------------------- 1 | # EditorConfig is awesome: https://EditorConfig.org 2 | 3 | root = true 4 | 5 | [*] 6 | charset = utf-8 7 | indent_style = space 8 | indent_size = 4 9 | trim_trailing_whitespace = true 10 | end_of_line = lf 11 | insert_final_newline = true 12 | -------------------------------------------------------------------------------- /src/main/kotlin/io/zeko/model/DataHelper.kt: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (c) 2017 Leng Sheng Hong 3 | * ------------------------------------------------------ 4 | * Licensed to the Apache Software Foundation (ASF) under one 5 | * or more contributor license agreements. See the NOTICE file 6 | * distributed with this work for additional information 7 | * regarding copyright ownership. The ASF licenses this file 8 | * to you under the Apache License, Version 2.0 (the 9 | * "License"); you may not use this file except in compliance 10 | * with the License. You may obtain a copy of the License at 11 | * 12 | * http://www.apache.org/licenses/LICENSE-2.0 13 | * 14 | * Unless required by applicable law or agreed to in writing, software 15 | * distributed under the License is distributed on an "AS IS" BASIS, 16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 17 | * See the License for the specific language governing permissions and 18 | * limitations under the License. 19 | */ 20 | 21 | package io.zeko.model 22 | 23 | import io.vertx.core.json.JsonArray 24 | import io.vertx.core.json.JsonObject 25 | import java.util.LinkedHashMap 26 | 27 | /** 28 | * Helper class to convert hash map into vertx JsonArray 29 | */ 30 | class DataHelper { 31 | companion object { 32 | fun toJsonArray(rs: List>): JsonArray { 33 | val arr = JsonArray() 34 | for (map in rs) { 35 | arr.add(JsonObject.mapFrom(map)) 36 | } 37 | return arr 38 | } 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /src/test/kotlin/io/zeko/model/DataHelperSpec.kt: -------------------------------------------------------------------------------- 1 | package io.zeko.model 2 | 3 | import java.util.LinkedHashMap 4 | import org.spekframework.spek2.Spek 5 | import org.spekframework.spek2.style.specification.describe 6 | import org.spekframework.spek2.style.gherkin.Feature 7 | import kotlin.test.assertEquals 8 | 9 | class DataHelperSpec : Spek({ 10 | 11 | describe("An array of 2 HashMap objects") { 12 | val all = ArrayList>() 13 | all.add(linkedMapOf( 14 | "id" to 123, 15 | "name" to "Leng", 16 | "age" to 12, 17 | "credit" to 856.87 18 | )) 19 | 20 | all.add(linkedMapOf( 21 | "id" to 124, 22 | "name" to "Superman", 23 | "age" to 55, 24 | "credit" to 986.08 25 | )) 26 | 27 | context("convert result to json array") { 28 | val result = DataHelper.toJsonArray(all) 29 | it("should not be empty") { 30 | assertEquals(false, result.isEmpty) 31 | } 32 | it("should have 2 elements") { 33 | val size = result.size() 34 | assertEquals(2, size) 35 | } 36 | it("should object with all the fields set") { 37 | assertEquals(123, result.getJsonObject(0).getInteger("id")) 38 | assertEquals("Leng", result.getJsonObject(0).getString("name")) 39 | assertEquals(12, result.getJsonObject(0).getInteger("age")) 40 | assertEquals(856.87, result.getJsonObject(0).getDouble("credit")) 41 | } 42 | } 43 | } 44 | }) 45 | -------------------------------------------------------------------------------- /examples/sql/zeko_test_role.sql: -------------------------------------------------------------------------------- 1 | -- MySQL dump 10.13 Distrib 5.7.17, for macos10.12 (x86_64) 2 | -- 3 | -- Host: 127.0.0.1 Database: zeko_test 4 | -- ------------------------------------------------------ 5 | -- Server version 5.7.17 6 | 7 | /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; 8 | /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; 9 | /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; 10 | /*!40101 SET NAMES utf8 */; 11 | /*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; 12 | /*!40103 SET TIME_ZONE='+00:00' */; 13 | /*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; 14 | /*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; 15 | /*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; 16 | /*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; 17 | 18 | -- 19 | -- Table structure for table `role` 20 | -- 21 | 22 | DROP TABLE IF EXISTS `role`; 23 | /*!40101 SET @saved_cs_client = @@character_set_client */; 24 | /*!40101 SET character_set_client = utf8 */; 25 | CREATE TABLE `role` ( 26 | `id` int(11) NOT NULL AUTO_INCREMENT, 27 | `role_name` varchar(45) DEFAULT NULL, 28 | `type` tinyint(4) DEFAULT NULL, 29 | PRIMARY KEY (`id`) 30 | ) ENGINE=InnoDB AUTO_INCREMENT=6 DEFAULT CHARSET=utf8; 31 | /*!40101 SET character_set_client = @saved_cs_client */; 32 | 33 | -- 34 | -- Dumping data for table `role` 35 | -- 36 | 37 | LOCK TABLES `role` WRITE; 38 | /*!40000 ALTER TABLE `role` DISABLE KEYS */; 39 | INSERT INTO `role` VALUES (1,'admin',1),(2,'super admin',1),(3,'normal user',2),(4,'moderator',3),(5,'super moderator',3); 40 | /*!40000 ALTER TABLE `role` ENABLE KEYS */; 41 | UNLOCK TABLES; 42 | /*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; 43 | 44 | /*!40101 SET SQL_MODE=@OLD_SQL_MODE */; 45 | /*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; 46 | /*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */; 47 | /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; 48 | /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; 49 | /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; 50 | /*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */; 51 | 52 | -- Dump completed on 2018-12-18 22:30:47 53 | -------------------------------------------------------------------------------- /examples/sql/zeko_test_user.sql: -------------------------------------------------------------------------------- 1 | -- MySQL dump 10.13 Distrib 5.7.17, for macos10.12 (x86_64) 2 | -- 3 | -- Host: 127.0.0.1 Database: zeko_test 4 | -- ------------------------------------------------------ 5 | -- Server version 5.7.17 6 | 7 | /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; 8 | /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; 9 | /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; 10 | /*!40101 SET NAMES utf8 */; 11 | /*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; 12 | /*!40103 SET TIME_ZONE='+00:00' */; 13 | /*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; 14 | /*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; 15 | /*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; 16 | /*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; 17 | 18 | -- 19 | -- Table structure for table `user` 20 | -- 21 | 22 | DROP TABLE IF EXISTS `user`; 23 | /*!40101 SET @saved_cs_client = @@character_set_client */; 24 | /*!40101 SET character_set_client = utf8 */; 25 | CREATE TABLE `user` ( 26 | `id` int(11) NOT NULL AUTO_INCREMENT, 27 | `name` varchar(45) DEFAULT NULL, 28 | `age` tinyint(4) DEFAULT NULL, 29 | `email` varchar(45) DEFAULT NULL, 30 | PRIMARY KEY (`id`) 31 | ) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8; 32 | /*!40101 SET character_set_client = @saved_cs_client */; 33 | 34 | -- 35 | -- Dumping data for table `user` 36 | -- 37 | 38 | LOCK TABLES `user` WRITE; 39 | /*!40000 ALTER TABLE `user` DISABLE KEYS */; 40 | INSERT INTO `user` VALUES (1,'Leng',31,'asd@gmail.com'),(2,'John',22,'lon@email.com'),(3,'Joey',43,'tutu@gmail.com'); 41 | /*!40000 ALTER TABLE `user` ENABLE KEYS */; 42 | UNLOCK TABLES; 43 | /*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; 44 | 45 | /*!40101 SET SQL_MODE=@OLD_SQL_MODE */; 46 | /*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; 47 | /*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */; 48 | /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; 49 | /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; 50 | /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; 51 | /*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */; 52 | 53 | -- Dump completed on 2018-12-18 22:30:47 54 | -------------------------------------------------------------------------------- /src/main/kotlin/io/zeko/model/DataMapperVertx.kt: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (c) 2017 Leng Sheng Hong 3 | * ------------------------------------------------------ 4 | * Licensed to the Apache Software Foundation (ASF) under one 5 | * or more contributor license agreements. See the NOTICE file 6 | * distributed with this work for additional information 7 | * regarding copyright ownership. The ASF licenses this file 8 | * to you under the Apache License, Version 2.0 (the 9 | * "License"); you may not use this file except in compliance 10 | * with the License. You may obtain a copy of the License at 11 | * 12 | * http://www.apache.org/licenses/LICENSE-2.0 13 | * 14 | * Unless required by applicable law or agreed to in writing, software 15 | * distributed under the License is distributed on an "AS IS" BASIS, 16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 17 | * See the License for the specific language governing permissions and 18 | * limitations under the License. 19 | */ 20 | 21 | package io.zeko.model 22 | 23 | import io.vertx.core.json.JsonArray 24 | import io.vertx.core.json.JsonObject 25 | 26 | class DataMapperVertx: DataMapper() { 27 | 28 | fun mapJsonObjects(allTableInfo: LinkedHashMap, arr: List, delimiter: String): ArrayList>? { //ArrayList>? { 29 | if (arr.size == 0) { 30 | return arrayListOf() 31 | } 32 | return super.map(allTableInfo, arr, delimiter) 33 | } 34 | 35 | fun mapJsonArray(allTableInfo: LinkedHashMap, arr: JsonArray, delimiter: String): ArrayList>? { //ArrayList>? { 36 | if (arr.size() == 0) { 37 | return arrayListOf() 38 | } 39 | return super.map(allTableInfo, (arr.list as List), delimiter) 40 | } 41 | 42 | override fun convertToMap(oriData: Any?): LinkedHashMap? { 43 | var dataNew = LinkedHashMap() 44 | 45 | if (oriData is JsonObject) { 46 | dataNew = oriData.map as LinkedHashMap 47 | } else { 48 | dataNew = oriData as LinkedHashMap 49 | } 50 | return dataNew 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /examples/sql/zeko_test_address.sql: -------------------------------------------------------------------------------- 1 | -- MySQL dump 10.13 Distrib 5.7.17, for macos10.12 (x86_64) 2 | -- 3 | -- Host: 127.0.0.1 Database: zeko_test 4 | -- ------------------------------------------------------ 5 | -- Server version 5.7.17 6 | 7 | /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; 8 | /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; 9 | /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; 10 | /*!40101 SET NAMES utf8 */; 11 | /*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; 12 | /*!40103 SET TIME_ZONE='+00:00' */; 13 | /*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; 14 | /*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; 15 | /*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; 16 | /*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; 17 | 18 | -- 19 | -- Table structure for table `address` 20 | -- 21 | 22 | DROP TABLE IF EXISTS `address`; 23 | /*!40101 SET @saved_cs_client = @@character_set_client */; 24 | /*!40101 SET character_set_client = utf8 */; 25 | CREATE TABLE `address` ( 26 | `id` int(11) NOT NULL AUTO_INCREMENT, 27 | `street1` varchar(45) DEFAULT NULL, 28 | `street2` varchar(45) DEFAULT NULL, 29 | `user_id` int(11) NOT NULL, 30 | PRIMARY KEY (`id`), 31 | KEY `fk_address_user_idx` (`user_id`), 32 | CONSTRAINT `fk_address_user` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE NO ACTION ON UPDATE NO ACTION 33 | ) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=utf8; 34 | /*!40101 SET character_set_client = @saved_cs_client */; 35 | 36 | -- 37 | -- Dumping data for table `address` 38 | -- 39 | 40 | LOCK TABLES `address` WRITE; 41 | /*!40000 ALTER TABLE `address` DISABLE KEYS */; 42 | INSERT INTO `address` VALUES (1,'Jalan 123','Taman Tun',1),(2,'Jalan Gembira','Taman OUG',2),(3,'Jalan Bunga','Taman Negara',1); 43 | /*!40000 ALTER TABLE `address` ENABLE KEYS */; 44 | UNLOCK TABLES; 45 | /*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; 46 | 47 | /*!40101 SET SQL_MODE=@OLD_SQL_MODE */; 48 | /*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; 49 | /*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */; 50 | /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; 51 | /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; 52 | /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; 53 | /*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */; 54 | 55 | -- Dump completed on 2018-12-18 22:30:48 56 | -------------------------------------------------------------------------------- /examples/sql/zeko_test_user_has_role.sql: -------------------------------------------------------------------------------- 1 | -- MySQL dump 10.13 Distrib 5.7.17, for macos10.12 (x86_64) 2 | -- 3 | -- Host: 127.0.0.1 Database: zeko_test 4 | -- ------------------------------------------------------ 5 | -- Server version 5.7.17 6 | 7 | /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; 8 | /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; 9 | /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; 10 | /*!40101 SET NAMES utf8 */; 11 | /*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; 12 | /*!40103 SET TIME_ZONE='+00:00' */; 13 | /*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; 14 | /*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; 15 | /*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; 16 | /*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; 17 | 18 | -- 19 | -- Table structure for table `user_has_role` 20 | -- 21 | 22 | DROP TABLE IF EXISTS `user_has_role`; 23 | /*!40101 SET @saved_cs_client = @@character_set_client */; 24 | /*!40101 SET character_set_client = utf8 */; 25 | CREATE TABLE `user_has_role` ( 26 | `user_id` int(11) NOT NULL, 27 | `role_id` int(11) NOT NULL, 28 | PRIMARY KEY (`user_id`,`role_id`), 29 | KEY `fk_user_has_role_role1_idx` (`role_id`), 30 | KEY `fk_user_has_role_user1_idx` (`user_id`), 31 | CONSTRAINT `fk_user_has_role_role1` FOREIGN KEY (`role_id`) REFERENCES `role` (`id`) ON DELETE NO ACTION ON UPDATE NO ACTION, 32 | CONSTRAINT `fk_user_has_role_user1` FOREIGN KEY (`user_id`) REFERENCES `user` (`id`) ON DELETE NO ACTION ON UPDATE NO ACTION 33 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8; 34 | /*!40101 SET character_set_client = @saved_cs_client */; 35 | 36 | -- 37 | -- Dumping data for table `user_has_role` 38 | -- 39 | 40 | LOCK TABLES `user_has_role` WRITE; 41 | /*!40000 ALTER TABLE `user_has_role` DISABLE KEYS */; 42 | INSERT INTO `user_has_role` VALUES (2,1),(1,2),(2,3),(1,4); 43 | /*!40000 ALTER TABLE `user_has_role` ENABLE KEYS */; 44 | UNLOCK TABLES; 45 | /*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; 46 | 47 | /*!40101 SET SQL_MODE=@OLD_SQL_MODE */; 48 | /*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; 49 | /*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */; 50 | /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; 51 | /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; 52 | /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; 53 | /*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */; 54 | 55 | -- Dump completed on 2018-12-18 22:30:47 56 | -------------------------------------------------------------------------------- /src/main/kotlin/io/zeko/model/DateTimeHelper.kt: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (c) 2018 Leng Sheng Hong 3 | * ------------------------------------------------------ 4 | * Licensed to the Apache Software Foundation (ASF) under one 5 | * or more contributor license agreements. See the NOTICE file 6 | * distributed with this work for additional information 7 | * regarding copyright ownership. The ASF licenses this file 8 | * to you under the Apache License, Version 2.0 (the 9 | * "License"); you may not use this file except in compliance 10 | * with the License. You may obtain a copy of the License at 11 | * 12 | * http://www.apache.org/licenses/LICENSE-2.0 13 | * 14 | * Unless required by applicable law or agreed to in writing, software 15 | * distributed under the License is distributed on an "AS IS" BASIS, 16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 17 | * See the License for the specific language governing permissions and 18 | * limitations under the License. 19 | */ 20 | 21 | package io.zeko.model 22 | 23 | import org.joda.time.DateTimeZone 24 | import org.joda.time.LocalDateTime 25 | import org.joda.time.LocalDate 26 | import org.joda.time.LocalTime 27 | import org.joda.time.DateTime 28 | import org.joda.time.base.BaseLocal 29 | import org.joda.time.format.ISODateTimeFormat 30 | 31 | class DateTimeHelper { 32 | companion object { 33 | fun toDateTimeStrUTC(value: BaseLocal): Any { 34 | val dt = when (value) { 35 | is LocalDateTime -> value.toString(ISODateTimeFormat.dateTime()) + "Z" 36 | is LocalDate -> value.toString(ISODateTimeFormat.date()) 37 | is LocalTime -> value.toString(ISODateTimeFormat.time()) 38 | else -> value 39 | } 40 | return dt 41 | } 42 | 43 | fun toDateTimeStrZone(value: BaseLocal, tzFrom: DateTimeZone?, tzTo: DateTimeZone?): Any { 44 | val dt = when (value) { 45 | is LocalDateTime -> DateTime(value.toDateTime(tzFrom).millis).toDateTime(tzTo).toString("yyyy-MM-dd'T'HH:mm:ss.SSSZZ") 46 | is LocalDate -> value.toString(ISODateTimeFormat.date()) 47 | is LocalTime -> value.toString(ISODateTimeFormat.time()) 48 | else -> value 49 | } 50 | return dt 51 | } 52 | 53 | fun toUnixTimeMilis(value: BaseLocal): Any { 54 | val dt = when (value) { 55 | is LocalDateTime -> value.toDateTime(DateTimeZone.UTC).millis 56 | else -> value 57 | } 58 | return dt 59 | } 60 | 61 | fun toUnixTime(value: BaseLocal): Any { 62 | val milis = toUnixTimeMilis(value) 63 | if (milis is Long) { 64 | return milis / 1000 65 | } 66 | return milis 67 | } 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /src/main/kotlin/io/zeko/model/declarations/extensions.kt: -------------------------------------------------------------------------------- 1 | package io.zeko.model.declarations 2 | 3 | import com.github.jasync.sql.db.ResultSet 4 | import io.zeko.model.ResultSetHelper 5 | import io.zeko.model.SelectInfo 6 | import io.vertx.core.json.JsonArray 7 | import org.joda.time.DateTimeZone 8 | import org.joda.time.base.BaseLocal 9 | import java.util.LinkedHashMap 10 | 11 | 12 | fun io.vertx.ext.sql.ResultSet.toMaps(columns: List, timeProcessor: ((BaseLocal, DateTimeZone?, DateTimeZone?) -> Any)? = null, tzFrom: DateTimeZone? = null, tzTo: DateTimeZone? = null): List> { 13 | return ResultSetHelper.toMaps(this, columns, timeProcessor, tzFrom, tzTo) 14 | } 15 | 16 | fun io.vertx.ext.sql.ResultSet.toMaps(sel: SelectInfo, timeProcessor: ((BaseLocal, DateTimeZone?, DateTimeZone?) -> Any)? = null, tzFrom: DateTimeZone? = null, tzTo: DateTimeZone? = null): List> { 17 | return ResultSetHelper.toMaps(this, sel.columns, timeProcessor, tzFrom, tzTo) 18 | } 19 | 20 | fun java.sql.ResultSet.toMaps(columns: List, timeProcessor: ((BaseLocal, DateTimeZone?, DateTimeZone?) -> Any)? = null, tzFrom: DateTimeZone? = null, tzTo: DateTimeZone? = null): List> { 21 | return ResultSetHelper.toMaps(this, columns, timeProcessor, tzFrom, tzTo) 22 | } 23 | 24 | fun java.sql.ResultSet.toMaps(sel: SelectInfo, timeProcessor: ((BaseLocal, DateTimeZone?, DateTimeZone?) -> Any)? = null, tzFrom: DateTimeZone? = null, tzTo: DateTimeZone? = null): List> { 25 | return ResultSetHelper.toMaps(this, sel.columns, timeProcessor, tzFrom, tzTo) 26 | } 27 | 28 | fun ResultSet.toMaps(columns: List, timeProcessor: ((BaseLocal, DateTimeZone?, DateTimeZone?) -> Any)? = null, tzFrom: DateTimeZone? = null, tzTo: DateTimeZone? = null): List> { 29 | return ResultSetHelper.toMaps(this, columns, timeProcessor, tzFrom, tzTo) 30 | } 31 | 32 | fun ResultSet.toMaps(sel: SelectInfo, timeProcessor: ((BaseLocal, DateTimeZone?, DateTimeZone?) -> Any)? = null, tzFrom: DateTimeZone? = null, tzTo: DateTimeZone? = null): List> { 33 | return ResultSetHelper.toMaps(this, sel.columns, timeProcessor, tzFrom, tzTo) 34 | } 35 | 36 | fun List.toMaps(columns: List, timeProcessor: ((BaseLocal, DateTimeZone?, DateTimeZone?) -> Any)? = null, tzFrom: DateTimeZone? = null, tzTo: DateTimeZone? = null): List> { 37 | return ResultSetHelper.toMaps(this, columns, timeProcessor, tzFrom, tzTo) 38 | } 39 | 40 | fun List.toMaps(sel: SelectInfo, timeProcessor: ((BaseLocal, DateTimeZone?, DateTimeZone?) -> Any)? = null, tzFrom: DateTimeZone? = null, tzTo: DateTimeZone? = null): List> { 41 | return ResultSetHelper.toMaps(this, sel.columns, timeProcessor, tzFrom, tzTo) 42 | } 43 | -------------------------------------------------------------------------------- /src/main/kotlin/io/zeko/model/Select.kt: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (c) 2018 Leng Sheng Hong 3 | * ------------------------------------------------------ 4 | * Licensed to the Apache Software Foundation (ASF) under one 5 | * or more contributor license agreements. See the NOTICE file 6 | * distributed with this work for additional information 7 | * regarding copyright ownership. The ASF licenses this file 8 | * to you under the Apache License, Version 2.0 (the 9 | * "License"); you may not use this file except in compliance 10 | * with the License. You may obtain a copy of the License at 11 | * 12 | * http://www.apache.org/licenses/LICENSE-2.0 13 | * 14 | * Unless required by applicable law or agreed to in writing, software 15 | * distributed under the License is distributed on an "AS IS" BASIS, 16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 17 | * See the License for the specific language governing permissions and 18 | * limitations under the License. 19 | */ 20 | 21 | package io.zeko.model 22 | 23 | import java.util.LinkedHashMap 24 | 25 | data class SelectInfo(val columns: List, val sqlFields: String) 26 | 27 | open class Select { 28 | protected val fieldsToSelect by lazy { 29 | LinkedHashMap>() 30 | } 31 | protected var currentTable: String = "" 32 | 33 | var espChar: String 34 | get() = field 35 | var asChar: String 36 | get() = field 37 | var espTableName: Boolean 38 | get() = field 39 | 40 | constructor(espChar: String = "`", asChar: String = "=", espTableName: Boolean = false) { 41 | this.espChar = espChar 42 | this.asChar = asChar 43 | this.espTableName = espTableName 44 | } 45 | 46 | constructor(espChar: String = "`", espTableName: Boolean = false) { 47 | this.espChar = espChar 48 | this.asChar = "=" 49 | this.espTableName = espTableName 50 | } 51 | 52 | open fun table(name: String): Select { 53 | currentTable = name 54 | return this 55 | } 56 | 57 | open fun fields(vararg names: String): Select { 58 | fieldsToSelect[currentTable] = names as Array 59 | return this 60 | } 61 | 62 | fun prepare(): SelectInfo { 63 | val selectFields = mutableListOf() 64 | val columns = mutableListOf() 65 | 66 | for ((tbl, cols) in fieldsToSelect) { 67 | for (colName in cols) { 68 | if (colName.indexOf("=") != -1) { 69 | val parts = colName.split(asChar) 70 | val partField = parts[0].trim() 71 | var tblLinkedCol: String 72 | if (!espTableName) { 73 | tblLinkedCol = partField 74 | } else { 75 | val fieldParts = partField.split(".") 76 | val tblLinked = fieldParts[0] 77 | tblLinkedCol = "${espChar}${tblLinked}${espChar}.${fieldParts[1]}" 78 | } 79 | val selfCol = parts[1].trim() 80 | if (tbl == "") { 81 | selectFields.add("$colName") 82 | } else { 83 | val aliasName = "$tbl-$selfCol" 84 | columns.add(aliasName) 85 | selectFields.add("$tblLinkedCol as $espChar$aliasName$espChar") 86 | } 87 | } else { 88 | if (tbl == "") { 89 | selectFields.add("$colName") 90 | } else { 91 | val aliasName = "$tbl-$colName" 92 | columns.add(aliasName) 93 | val tblFinal = if (espTableName) "$espChar$tbl$espChar" else tbl 94 | selectFields.add("$tblFinal.$colName as $espChar$aliasName$espChar") 95 | } 96 | } 97 | } 98 | } 99 | 100 | val sqlFields = selectFields.joinToString(", ") 101 | return SelectInfo(columns, sqlFields) 102 | } 103 | } 104 | -------------------------------------------------------------------------------- /docs/assets/zeko-rabbit.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /src/main/kotlin/io/zeko/model/ResultSetHelper.kt: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (c) 2018 Leng Sheng Hong 3 | * ------------------------------------------------------ 4 | * Licensed to the Apache Software Foundation (ASF) under one 5 | * or more contributor license agreements. See the NOTICE file 6 | * distributed with this work for additional information 7 | * regarding copyright ownership. The ASF licenses this file 8 | * to you under the Apache License, Version 2.0 (the 9 | * "License"); you may not use this file except in compliance 10 | * with the License. You may obtain a copy of the License at 11 | * 12 | * http://www.apache.org/licenses/LICENSE-2.0 13 | * 14 | * Unless required by applicable law or agreed to in writing, software 15 | * distributed under the License is distributed on an "AS IS" BASIS, 16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 17 | * See the License for the specific language governing permissions and 18 | * limitations under the License. 19 | */ 20 | 21 | package io.zeko.model 22 | 23 | import com.github.jasync.sql.db.ResultSet 24 | import io.vertx.core.json.JsonArray 25 | import org.joda.time.DateTimeZone 26 | import org.joda.time.LocalDateTime 27 | import org.joda.time.LocalDate 28 | import org.joda.time.LocalTime 29 | import org.joda.time.base.BaseLocal 30 | import java.util.LinkedHashMap 31 | 32 | class ResultSetHelper { 33 | companion object { 34 | 35 | fun toMaps(rows: List, columns: List, timeProcessor: ((BaseLocal, DateTimeZone?, DateTimeZone?) -> Any)? = null, tzFrom: DateTimeZone?, tzTo: DateTimeZone?): List> { 36 | val results = ArrayList>(rows.size) 37 | for (row in rows) { 38 | results.add(convertRowToMap(row.toList(), columns, timeProcessor, tzFrom, tzTo)) 39 | } 40 | return results 41 | } 42 | 43 | fun toMaps(resultSet: io.vertx.ext.sql.ResultSet, columns: List, timeProcessor: ((BaseLocal, DateTimeZone?, DateTimeZone?) -> Any)? = null, tzFrom: DateTimeZone? = null, tzTo: DateTimeZone? = null): List> { 44 | val results = ArrayList>(resultSet.results.size) 45 | val rows = resultSet.results 46 | for (row in rows) { 47 | results.add(convertRowToMap(row.toList(), columns, timeProcessor, tzFrom, tzTo)) 48 | } 49 | return results 50 | } 51 | 52 | fun toMaps(rows: ResultSet, columns: List, timeProcessor: ((BaseLocal, DateTimeZone?, DateTimeZone?) -> Any)? = null, tzFrom: DateTimeZone? = null, tzTo: DateTimeZone? = null): List> { 53 | val results = ArrayList>(rows.size) 54 | for (row in rows) { 55 | results.add(convertRowToMap(row.toList(), columns, timeProcessor, tzFrom, tzTo)) 56 | } 57 | return results 58 | } 59 | 60 | fun toMaps(rs: java.sql.ResultSet, columns: List, timeProcessor: ((BaseLocal, DateTimeZone?, DateTimeZone?) -> Any)? = null, tzFrom: DateTimeZone? = null, tzTo: DateTimeZone? = null): List> { 61 | val results = java.util.ArrayList>() 62 | val md = rs.metaData 63 | val totalColumns = md.columnCount 64 | 65 | while (rs.next()) { 66 | val row = arrayListOf() 67 | for (i in 1..totalColumns) { 68 | row.add(rs.getObject(i)) 69 | } 70 | results.add(convertRowToMap(row, columns, timeProcessor, tzFrom, tzTo)) 71 | } 72 | return results 73 | } 74 | 75 | fun convertRowToMap(row: List, columns: List, timeProcessor: ((BaseLocal, DateTimeZone?, DateTimeZone?) -> Any)? = null, tzFrom: DateTimeZone? = null, tzTo: DateTimeZone? = null): LinkedHashMap { 76 | val obj = java.util.LinkedHashMap() 77 | 78 | for ((i, value) in row.withIndex()) { 79 | val colName = columns[i] 80 | if (timeProcessor != null) { 81 | obj[colName] = when (value) { 82 | is LocalDateTime -> timeProcessor(value, tzFrom, tzTo) 83 | is LocalDate -> timeProcessor(value, tzFrom, tzTo) 84 | is LocalTime -> timeProcessor(value, tzFrom, tzTo) 85 | else -> value 86 | } 87 | } else { 88 | obj[colName] = when (value) { 89 | is LocalDateTime -> DateTimeHelper.toDateTimeStrUTC(value) 90 | is LocalDate -> DateTimeHelper.toDateTimeStrUTC(value) 91 | is LocalTime -> DateTimeHelper.toDateTimeStrUTC(value) 92 | else -> value 93 | } 94 | } 95 | } 96 | return obj 97 | } 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /src/test/kotlin/io/zeko/model/DataMapperSpec.kt: -------------------------------------------------------------------------------- 1 | package io.zeko.model 2 | 3 | import io.vertx.core.json.JsonArray 4 | import java.util.LinkedHashMap 5 | import org.spekframework.spek2.Spek 6 | import org.spekframework.spek2.style.specification.describe 7 | import kotlin.test.assertEquals 8 | 9 | class DataMapperSpec : Spek({ 10 | 11 | describe("A query result with two table joins (1:m and m:m), user has multiple addresses and has many roles") { 12 | 13 | val all = ArrayList>() 14 | all.add(linkedMapOf( 15 | "user-id" to 1, 16 | "user-name" to "Leng", 17 | "user-role_id" to 2, 18 | "role-id" to 2, 19 | "role-role_name" to "Super Admin", 20 | "role-user_id" to 1, //this is selected as alias, not actual field in table. To be used with the mapper 21 | "address-id" to 128, 22 | "address-user_id" to 1, 23 | "address-street1" to "Some block", 24 | "address-street2" to "in the street" 25 | )) 26 | 27 | all.add(linkedMapOf( 28 | "user-id" to 1, 29 | "user-name" to "Leng", 30 | "user-role_id" to 2, 31 | "role-id" to 2, 32 | "role-role_name" to "Super Admin", 33 | "role-user_id" to 1, 34 | "address-id" to 129, 35 | "address-user_id" to 1, 36 | "address-street1" to "Company Block", 37 | "address-street2" to "in the CBD" 38 | )) 39 | 40 | all.add(linkedMapOf( 41 | "user-id" to 2, 42 | "user-name" to "Superman", 43 | "user-role_id" to 2, 44 | "role-id" to 2, 45 | "role-role_name" to "Super Admin", 46 | "role-user_id" to 2, 47 | "address-id" to 131, 48 | "address-user_id" to 2, 49 | "address-street1" to "A capsule", 50 | "address-street2" to "in the yard" 51 | )) 52 | 53 | val table = linkedMapOf() 54 | table["user"] = TableInfo(key = "id") 55 | table["role"] = TableInfo(key = "id", move_under = "user", foreign_key = "user_id", many_to_many = true) 56 | table["address"] = TableInfo(key = "id", move_under = "user", foreign_key = "user_id", many_to_one = true, remove = listOf("user_id")) 57 | 58 | val mapper = DataMapper() 59 | val result = mapper.map(table, all) 60 | 61 | context("mapping of result") { 62 | it("should not be null") { 63 | assertEquals(false, result == null) 64 | } 65 | 66 | if (result != null) { 67 | it("should not be empty") { 68 | assertEquals(false, result.isEmpty()) 69 | } 70 | it("should have 2 elements") { 71 | assertEquals(2, result.size) 72 | } 73 | 74 | it("the first object in the array mapped should have all the user fields selected") { 75 | assertEquals(1, result[0]["id"]) 76 | assertEquals("Leng", result[0]["name"]) 77 | assertEquals(2, result[0]["role_id"]) 78 | } 79 | 80 | val roles = result[0]["role"] as List> 81 | 82 | it("should also have a list of nested role objects") { 83 | assertEquals(true, roles != null) 84 | } 85 | 86 | if (roles != null) { 87 | it("should have one role") { 88 | assertEquals(true, roles.size == 1) 89 | } 90 | 91 | val role = roles[0] 92 | 93 | it("should have role name, role id, user id matched") { 94 | assertEquals(2, role["id"]) 95 | assertEquals("Super Admin", role["role_name"]) 96 | assertEquals(1, role["user_id"]) 97 | } 98 | } 99 | 100 | 101 | val addresses = result[0]["address"] as List> 102 | 103 | it("should also have a list of nested address objects") { 104 | assertEquals(true, addresses != null) 105 | } 106 | 107 | if (addresses != null) { 108 | it("should have 2 addresses") { 109 | assertEquals(true, addresses.size == 2) 110 | } 111 | 112 | it("should have 1st address street1 and street2 matched") { 113 | assertEquals(128, addresses[0]["id"]) 114 | assertEquals("Some block", addresses[0]["street1"]) 115 | assertEquals("in the street", addresses[0]["street2"]) 116 | } 117 | 118 | it("should have 2nd address street1 and street2 matched") { 119 | assertEquals(129, addresses[1]["id"]) 120 | assertEquals("Company Block", addresses[1]["street1"]) 121 | assertEquals("in the CBD", addresses[1]["street2"]) 122 | } 123 | } 124 | 125 | // for 2nd user object 126 | it("the 2nd object in the array mapped should have all the user fields selected") { 127 | assertEquals(2, result[1]["id"]) 128 | assertEquals("Superman", result[1]["name"]) 129 | assertEquals(2, result[1]["role_id"]) 130 | } 131 | 132 | val roles2 = result[1]["role"] as List> 133 | 134 | it("should also have a list of nested role objects") { 135 | assertEquals(true, roles2 != null) 136 | } 137 | 138 | if (roles2 != null) { 139 | it("should have one role") { 140 | assertEquals(true, roles2.size == 1) 141 | } 142 | 143 | val role2 = roles2[0] 144 | 145 | it("should have role name, role id, user id matched") { 146 | assertEquals(2, role2["id"]) 147 | assertEquals("Super Admin", role2["role_name"]) 148 | assertEquals(2, role2["user_id"]) 149 | } 150 | } 151 | 152 | 153 | val addresses2 = result[1]["address"] as List> 154 | 155 | it("should also have a list of nested address objects") { 156 | assertEquals(true, addresses2 != null) 157 | } 158 | 159 | if (addresses2 != null) { 160 | it("should have 1 address") { 161 | assertEquals(true, addresses2.size == 1) 162 | } 163 | 164 | it("should have 1st address street1 and street2 matched") { 165 | assertEquals(131, addresses2[0]["id"]) 166 | assertEquals("A capsule", addresses2[0]["street1"]) 167 | assertEquals("in the yard", addresses2[0]["street2"]) 168 | } 169 | } 170 | } 171 | } 172 | 173 | } 174 | }) 175 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | # Zeko Data Mapper 3 | ![alt Zeko Data Mapper](./docs/assets/zeko-rabbit.svg "Zeko lightweight data mapper") 4 | 5 |

6 | 7 | Maven Central 8 | 9 | 10 | Apache License 2 11 | 12 | 13 | Awesome Kotlin Badge 14 | 15 |

16 | 17 | A lightweight, fast and simple data mapper library in Kotlin that helps to map the result of queries from your normalized dataset(from RDBMS, SQLite, CSV or any source) back into relational mapped Hash maps 18 | 19 | ps: Zeko turns out to be rabbit in Croatian, thus the logo :D 20 | 21 | [Show your ❤ with a ★](https://github.com/darkredz/Zeko-Data-Mapper/stargazers) 22 | 23 | ### To use, add these to your maven pom.xml 24 | 25 | 26 | io.zeko 27 | zeko-data-mapper 28 | 1.6.7 29 | 30 | 31 | 32 | ## Features 33 | - No configuration files, no XML, no YAML, no annotations, lightweight, easy to use 34 | - Fast startup & performance 35 | - No Reflections. Great for [GraalVM](https://www.graalvm.org/) and [Quarkus](https://quarkus.io/) 36 | 37 | ## Basic usage 38 | Use this library to denormalized your data from a plaintext CSV, in-memory data, RDBMS tables, etc. 39 | Instead of using an ORM, do whatever you need to query & retrieve your normalized data, and mapped the data back 40 | into a relational nested object which make sense to your system. 41 | 42 | ### Configuring relationship of the normalized data 43 | Given a MySQL database with tables of user, role and address, where each user should have one or more address and can have multiple roles. Every roles can be assigned to more than one users. So user-role is a many to many (M:M) relationship, user-address is 1:M relationship 44 | 45 | ![alt relationship between tables](./docs/assets/zeko-test-schema.jpg "Relationship between tables") 46 | 47 | 48 | ```kotlin 49 | val table = linkedMapOf() 50 | table["user"] = TableInfo(key = "id") 51 | table["role"] = TableInfo(key = "id", move_under = "user", foreign_key = "user_id", many_to_many = true) 52 | table["address"] = TableInfo(key = "id", move_under = "user", foreign_key = "user_id", many_to_one = true) 53 | ``` 54 | 55 | ### Using DataMapper class 56 | DataMapper::map accepts the mapper config of your tables which it uses to understand the relationship of the flatten data structure. 57 | Pass in the results of your sql query with the field names (with table name as prefix) to mapped the result. 58 | More example can be found in [DataMapperSpec](https://github.com/darkredz/Zeko-Data-Mapper/blob/master/src/test/kotlin/com.zeko.model/DataMapperSpec.kt) 59 | 60 | Example: 61 | ```kotlin 62 | val rawResults = ArrayList>() 63 | rawResults.add(linkedMapOf( 64 | "user-id" to 1, 65 | "user-name" to "Leng", 66 | "role-id" to 2, 67 | "role-role_name" to "Super Admin", 68 | "role-user_id" to 1, //this is selected as alias, not actual field in table. To be used with the mapper 69 | "address-id" to 128, 70 | "address-user_id" to 1, 71 | "address-street1" to "Some block", 72 | "address-street2" to "in the street" 73 | )) 74 | ``` 75 | Map the raw results into a denormalized structure 76 | ```kotlin 77 | val mapper = DataMapper() 78 | val result = mapper.map(tables, rawResults) 79 | val json = Json.encodePrettily(result) 80 | println(json) 81 | ``` 82 | 83 | map() method accepts a third argument which is to specify the delimiter of the column name, eg. user-id 84 | By default it uses "-" but you can always change it when calling map() 85 | ```kotlin 86 | mapper.map(tables, rawResults, "-") 87 | 88 | ``` 89 | 90 | This will give you a list of users with role and address nested under user's role and address field. 91 | ```json 92 | [ { 93 | "id" : 1, 94 | "name" : "Leng", 95 | "role" : [ { 96 | "id" : 2, 97 | "user_id" : 1, 98 | "role_name" : "super admin" 99 | } ], 100 | "address" : [ { 101 | "id" : 1, 102 | "user_id" : 1, 103 | "street1" : "Jalan 123", 104 | "street2" : "Taman Tun" 105 | }, { 106 | "id" : 3, 107 | "user_id" : 1, 108 | "street1" : "Jalan Bunga", 109 | "street2" : "Taman Negara" 110 | } ] 111 | } ] 112 | ``` 113 | 114 | Notice that user_id field is in the role and address data which is quite redundant in many cases. 115 | To automatically remove those linked field from the mapped result, set the list of fields in TableInfo remove property. 116 | ```kotlin 117 | val table = linkedMapOf() 118 | table["user"] = TableInfo(key = "id") 119 | table["role"] = TableInfo(remove = listOf("user_id"), key = "id", move_under = "user", foreign_key = "user_id", many_to_many = true) 120 | table["address"] = TableInfo(remove = listOf("user_id"), key = "id", move_under = "user", foreign_key = "user_id", many_to_one = true) 121 | ``` 122 | 123 | This will give you a result of 124 | ```json 125 | [ { 126 | "id" : 1, 127 | "name" : "Leng", 128 | "role" : [ { 129 | "id" : 2, 130 | "role_name" : "super admin" 131 | } ], 132 | "address" : [ { 133 | "id" : 1, 134 | "street1" : "Jalan 123", 135 | "street2" : "Taman Tun" 136 | }, { 137 | "id" : 3, 138 | "street1" : "Jalan Bunga", 139 | "street2" : "Taman Negara" 140 | } ] 141 | } ] 142 | ``` 143 | 144 | Use MapperConfig class to conveniently set relationship without figuring out the order of nested data and removal of redundant field. 145 | The same relationship just now can be defined as the following with MapperConfig 146 | ```kotlin 147 | // id is the default primary key of all tables, so you don't have to define for every table 148 | // true auto removes the linked fields such as user_id here 149 | 150 | val tables = MapperConfig("id", true) 151 | .table("user") 152 | .table("role").manyToMany(true).moveUnder("user").foreignKey("user_id") 153 | .table("address").manyToOne(true).moveUnder("user").foreignKey("user_id") 154 | ``` 155 | 156 | ### Example usage with [Jasync-sql](https://github.com/jasync-sql/jasync-sql) queries 157 | For the DB schema above, you can find the example Zeko usage with [Jasync-sql here](https://github.com/darkredz/Zeko-Data-Mapper/blob/dev/examples/zeko-vertx-query/src/main/kotlin/com/zeko/example/MainVerticle.kt#L35). 158 | The SQL structure and data for the sample app is [provided](https://github.com/darkredz/Zeko-Data-Mapper/tree/dev/examples/sql) too. 159 | 160 | Jasync-sql is a port of [mauricio/postgresql-async](https://github.com/mauricio/postgresql-async), to Kotlin. 161 | 162 | ### Example usage with [Vert.x Common-sql](http://vertx.io/docs/vertx-sql-common/kotlin) queries 163 | If you use Vert.x common sql interface, such as the async [MySQL and postgresql client](https://vertx.io/docs/vertx-mysql-postgresql-client/java/) 164 | which uses Mauricio Linhares [async driver](https://github.com/mauricio/postgresql-async), you will find that the result return is typed as List of JsonArray 165 | 166 | Example using Zeko with vertx common sql results can be [found here](https://github.com/darkredz/Zeko-Data-Mapper/blob/dev/examples/zeko-vertx-query/src/main/kotlin/com/zeko/example/MainVerticle.kt#L112) 167 | 168 | -------------------------------------------------------------------------------- /src/main/kotlin/io/zeko/model/MapperConfig.kt: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (c) 2017 Leng Sheng Hong 3 | * ------------------------------------------------------ 4 | * Licensed to the Apache Software Foundation (ASF) under one 5 | * or more contributor license agreements. See the NOTICE file 6 | * distributed with this work for additional information 7 | * regarding copyright ownership. The ASF licenses this file 8 | * to you under the Apache License, Version 2.0 (the 9 | * "License"); you may not use this file except in compliance 10 | * with the License. You may obtain a copy of the License at 11 | * 12 | * http://www.apache.org/licenses/LICENSE-2.0 13 | * 14 | * Unless required by applicable law or agreed to in writing, software 15 | * distributed under the License is distributed on an "AS IS" BASIS, 16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 17 | * See the License for the specific language governing permissions and 18 | * limitations under the License. 19 | */ 20 | 21 | package io.zeko.model 22 | 23 | /** 24 | * MapperConfig is a wrapper for the table relationship info (hash map) to be pass into a DataMapper instance for mapping objects 25 | * 26 | * @property defaultPrimaryKey Default primary such as ID field 27 | * @property autoRemoveLinkKey Indicate to automatically remove key that links two object/table together 28 | * @property latestTable latest table that is added to the configuration via table() 29 | * @property tableInfo Underlying configuration hashmap that stores the table name and its relationship/field information 30 | */ 31 | class MapperConfig(defaultPrimaryKey: String, autoRemoveLinkKey: Boolean) { 32 | 33 | var tableInfo = LinkedHashMap() 34 | var latestTable = "" 35 | var defaultPrimaryKey = defaultPrimaryKey 36 | var autoRemoveLinkKey = autoRemoveLinkKey 37 | 38 | /** 39 | * @param defaultPrimaryKey set default primary such as common id field 40 | */ 41 | fun defaultPrimaryKeyTo(defaultPrimaryKey: String): MapperConfig { 42 | this.defaultPrimaryKey = defaultPrimaryKey 43 | return this 44 | } 45 | 46 | /** 47 | * @param autoRemoveLinkKey set true to remove foreign key that is linking the two tables 48 | */ 49 | fun shouldRemoveLinkKey(autoRemoveLinkKey: Boolean): MapperConfig { 50 | this.autoRemoveLinkKey = autoRemoveLinkKey 51 | return this 52 | } 53 | 54 | fun table(tableName: String): MapperConfig { 55 | return table(tableName, null) 56 | } 57 | 58 | fun table(tableName: String, defaultPrimaryKey: String?): MapperConfig { 59 | this.tableInfo[tableName] = TableInfo("id", null, null, null, false, false, false, false, false, null) 60 | this.latestTable = tableName 61 | if (defaultPrimaryKey != null) { 62 | this.primaryKey(defaultPrimaryKey) 63 | } 64 | else if (this.defaultPrimaryKey != null) { 65 | this.primaryKey(this.defaultPrimaryKey) 66 | } 67 | return this 68 | } 69 | 70 | fun currentTable(): TableInfo { 71 | return this.tableInfo[this.latestTable] as TableInfo 72 | } 73 | 74 | fun removeLinkKey(): MapperConfig { 75 | this.remove(this.currentTable().foreign_key as String) 76 | return this 77 | } 78 | 79 | fun remove(fieldName: Any): MapperConfig { 80 | val table = this.currentTable() 81 | if (table.remove == null) { 82 | table.remove = ArrayList() 83 | } 84 | if (fieldName is List<*>) { 85 | (table.remove as ArrayList).addAll((fieldName as List)) 86 | } else if (fieldName is String) { 87 | if (!(table.remove as ArrayList).contains(fieldName)) { 88 | (table.remove as ArrayList).add(fieldName) 89 | } 90 | } 91 | return this 92 | } 93 | 94 | fun primaryKey(fieldName: String): MapperConfig { 95 | val table = this.currentTable() 96 | table.key = fieldName 97 | return this 98 | } 99 | 100 | fun rename(tableName: String): MapperConfig { 101 | val table = this.currentTable() 102 | table.rename = tableName 103 | return this 104 | } 105 | 106 | fun foreignKey(fieldName: String): MapperConfig { 107 | val table = this.currentTable() 108 | table.foreign_key = fieldName 109 | if (this.autoRemoveLinkKey) { 110 | return this.removeLinkKey() 111 | } 112 | return this 113 | } 114 | 115 | fun oneToOne(bool: Boolean): MapperConfig { 116 | val table = this.currentTable() 117 | table.one_to_one = bool 118 | return this 119 | } 120 | 121 | fun manyToMany(bool: Boolean): MapperConfig { 122 | val table = this.currentTable() 123 | table.many_to_many = bool 124 | return this 125 | } 126 | 127 | fun oneToMany(bool: Boolean): MapperConfig { 128 | val table = this.currentTable() 129 | table.one_to_many = bool 130 | return this 131 | } 132 | 133 | fun manyToOne(bool: Boolean): MapperConfig { 134 | val table = this.currentTable() 135 | table.many_to_one = bool 136 | return this 137 | } 138 | 139 | fun moveUnder(fieldName: String): MapperConfig { 140 | val table = this.currentTable() 141 | table.move_under = fieldName 142 | return this 143 | } 144 | 145 | fun mapTo(dataClassHandler: ((dataMap: Map) -> Any)?): MapperConfig { 146 | val table = this.currentTable() 147 | table.dataClassHandler = dataClassHandler 148 | return this 149 | } 150 | 151 | fun toArrayMap(): LinkedHashMap { 152 | this.tableInfo = this.sortTableInfo(this.tableInfo) 153 | return this.tableInfo 154 | } 155 | 156 | fun toTableInfo(): LinkedHashMap { 157 | this.tableInfo = this.sortTableInfo(this.tableInfo) 158 | return this.tableInfo 159 | } 160 | 161 | fun sortTableInfo(tables: LinkedHashMap): LinkedHashMap { 162 | val tableList = tables.keys.asIterable() 163 | val firstTable = tableList.first() 164 | var selfTblIndex = -1 165 | 166 | for (tbl in tableList) { 167 | selfTblIndex++ 168 | var moveUnderTbl = tables[tbl]?.move_under 169 | 170 | //no need to sort if linked table is the root table 171 | if (moveUnderTbl != null && moveUnderTbl != firstTable) { 172 | if (tables[tbl]?.rename != null) { 173 | moveUnderTbl = tables[tbl]?.rename 174 | } 175 | 176 | val linkTblIndex = tableList.indexOf(moveUnderTbl) //\array_search(moveUnderTbl, tableList) 177 | 178 | //if move_under table was before the table(self) to link to, then resort, move it before 179 | if (selfTblIndex > linkTblIndex && linkTblIndex > -1) { 180 | //first table remains the same 181 | val tables2 = LinkedHashMap() 182 | tables2[tbl] = tables[tbl] as TableInfo 183 | tables2.putAll(tables) 184 | 185 | val tables3 = LinkedHashMap() 186 | tables3[firstTable] = tables[firstTable] as TableInfo 187 | tables3.putAll(tables2) 188 | 189 | return this.sortTableInfo(tables3) 190 | } 191 | } 192 | } 193 | return tables 194 | } 195 | } 196 | -------------------------------------------------------------------------------- /examples/zeko-vertx-query/src/main/kotlin/com/zeko/example/MainVerticle.kt: -------------------------------------------------------------------------------- 1 | package com.zeko.example 2 | 3 | import com.github.jasync.sql.db.ResultSet 4 | import com.github.jasync.sql.db.mysql.MySQLConnectionBuilder 5 | import io.vertx.ext.web.Route 6 | import io.vertx.ext.web.Router 7 | import io.vertx.ext.web.RoutingContext 8 | import io.vertx.kotlin.coroutines.CoroutineVerticle 9 | import io.vertx.kotlin.coroutines.awaitEvent 10 | import io.vertx.kotlin.coroutines.dispatcher 11 | import kotlinx.coroutines.launch 12 | import com.zeko.model.* 13 | import com.zeko.model.Select 14 | import io.vertx.core.json.Json 15 | import io.vertx.core.json.JsonObject 16 | import io.vertx.core.json.JsonArray 17 | import io.vertx.ext.asyncsql.MySQLClient 18 | import org.joda.time.* 19 | import java.util.LinkedHashMap 20 | import org.joda.time.base.BaseLocal 21 | 22 | class MainVerticle : CoroutineVerticle() { 23 | 24 | override suspend fun start() { 25 | val router = Router.router(vertx) 26 | router.get("/jasync-raw-sql").coroutineHandler { ctx -> jasyncRawSql(ctx) } 27 | router.get("/jasync-easy").coroutineHandler { ctx -> jasyncEasy(ctx) } 28 | router.get("/common-sql").coroutineHandler { ctx -> commonSql(ctx) } 29 | 30 | vertx.createHttpServer() 31 | .requestHandler(router) 32 | .listen(8080) 33 | } 34 | 35 | suspend fun jasyncRawSql(ctx: RoutingContext) { 36 | val sql = """ 37 | select 38 | user.id as `user-id`, 39 | user.name as `user-name`, 40 | role.id as `role-id`, 41 | user.id as `role-user_id`, 42 | role.role_name as `role-role_name`, 43 | address.id as `address-id`, 44 | user.id as `address-user_id`, 45 | address.street1 as `address-street1`, 46 | address.street2 as `address-street2` 47 | from user 48 | left join address on address.user_id = user.id 49 | left outer join user_has_role on user_has_role.user_id = user.id 50 | left outer join role on role.id = user_has_role.role_id 51 | """.trimIndent() 52 | 53 | // Connection to MySQL DB 54 | val connection = MySQLConnectionBuilder.createConnectionPool("jdbc:mysql://localhost:3306/zeko_test?user=root&password=root") 55 | val resFuture = connection.sendPreparedStatement(sql) 56 | val queryResult = resFuture.get() 57 | connection.disconnect().get() 58 | 59 | val rows = queryResult.rows 60 | val results = rows.toMaps(listOf("user-id", "user-name", "role-id", "role-user_id", "role-role_name", "address-id", "address-user_id", "address-street1", "address-street2")) 61 | 62 | val tables = linkedMapOf() 63 | tables["user"] = TableInfo(key = "id") 64 | tables["role"] = TableInfo(key = "id", move_under = "user", foreign_key = "user_id", many_to_many = true, remove = listOf("user_id")) 65 | tables["address"] = TableInfo(key = "id", move_under = "user", foreign_key = "user_id", many_to_one = true, remove = listOf("user_id")) 66 | 67 | val mapper = DataMapper() 68 | val mappedResults = mapper.map(tables, results) 69 | val json = Json.encodePrettily(mappedResults) 70 | 71 | ctx.response().end(json) 72 | } 73 | 74 | suspend fun jasyncEasy(ctx: RoutingContext) { 75 | val select = Select() 76 | .table("user").fields("id", "name") 77 | .table("role").fields("id", "role_name", "user.id = user_id") 78 | .table("address").fields("id", "street1", "street2", "user.id = user_id") 79 | 80 | val (columns, sqlFields) = select.prepare() 81 | 82 | val sql = """ 83 | select $sqlFields 84 | from user 85 | left join address on address.user_id = user.id 86 | left outer join user_has_role on user_has_role.user_id = user.id 87 | left outer join role on role.id = user_has_role.role_id 88 | """.trimIndent() 89 | 90 | // Connection to MySQL DB 91 | val connection = MySQLConnectionBuilder.createConnectionPool("jdbc:mysql://localhost:3306/zeko_test?user=root&password=root") 92 | val resFuture = connection.sendPreparedStatement(sql) 93 | val queryResult = resFuture.get() 94 | connection.disconnect().get() 95 | 96 | val rows = queryResult.rows 97 | val results = rows.toMaps(columns) 98 | 99 | val tables = MapperConfig("id", true) 100 | .table("user") 101 | .table("role").manyToMany(true).moveUnder("user").foreignKey("user_id") 102 | .table("address").manyToOne(true).moveUnder("user").foreignKey("user_id") 103 | 104 | val mapper = DataMapper() 105 | val mappedResults = mapper.map(tables, results) 106 | val json = Json.encodePrettily(mappedResults) 107 | 108 | ctx.response().end(json) 109 | } 110 | 111 | 112 | suspend fun commonSql(ctx: RoutingContext) { 113 | val clientConf = JsonObject().put("host", "localhost") 114 | .put("port", 3306) 115 | .put("database", "zeko_test") 116 | .put("username", "root") 117 | .put("password", "root") 118 | 119 | val client = MySQLClient.createShared(vertx, clientConf) 120 | 121 | val select = Select() 122 | .table("user").fields("id", "name") 123 | .table("role").fields("id", "role_name", "user.id = user_id") 124 | .table("address").fields("id", "street1", "street2", "user.id = user_id") 125 | 126 | val (columns, sqlFields) = select.prepare() 127 | 128 | val sql = """ 129 | select $sqlFields 130 | from user 131 | left join address on address.user_id = user.id 132 | left outer join user_has_role on user_has_role.user_id = user.id 133 | left outer join role on role.id = user_has_role.role_id 134 | """.trimIndent() 135 | 136 | client.getConnection { res -> 137 | if (res.succeeded()) { 138 | val connection = res.result() 139 | 140 | connection.query(sql, { 141 | val rows = it.result().results 142 | val results = rows.toMaps(columns) 143 | 144 | val tables = MapperConfig("id", true) 145 | .table("user") 146 | .table("role").manyToMany(true).moveUnder("user").foreignKey("user_id") 147 | .table("address").manyToOne(true).moveUnder("user").foreignKey("user_id") 148 | 149 | val mapper = DataMapper() 150 | val mappedResults = mapper.map(tables, results) 151 | 152 | val json = Json.encodePrettily(mappedResults) 153 | ctx.response().end(json) 154 | connection.close() 155 | }) 156 | } 157 | } 158 | } 159 | 160 | /** 161 | * An extension method for simplifying coroutines usage with Vert.x Web routers 162 | */ 163 | fun Route.coroutineHandler(fn: suspend (RoutingContext) -> Unit) { 164 | handler { ctx -> 165 | launch(ctx.vertx().dispatcher()) { 166 | try { 167 | fn(ctx) 168 | } catch (e: Exception) { 169 | ctx.fail(e) 170 | } 171 | } 172 | } 173 | } 174 | 175 | /** 176 | * Extension method to Jasync ResultSet toMaps 177 | */ 178 | fun ResultSet.toMaps(sel: SelectInfo, timeProcessor: ((BaseLocal, DateTimeZone?, DateTimeZone?) -> Any)? = null, tzFrom: DateTimeZone? = null, tzTo: DateTimeZone? = null): List> { 179 | return ResultSetHelper.toMaps(this, sel.columns, timeProcessor, tzFrom, tzTo) 180 | } 181 | 182 | fun ResultSet.toMaps(columns: List, timeProcessor: ((BaseLocal, DateTimeZone?, DateTimeZone?) -> Any)? = null, tzFrom: DateTimeZone? = null, tzTo: DateTimeZone? = null): List> { 183 | return ResultSetHelper.toMaps(this, columns, timeProcessor, tzFrom, tzTo) 184 | } 185 | 186 | /** 187 | * Extension method to vertx common-sql results which returns List in a query 188 | */ 189 | fun List.toMaps(sel: SelectInfo, timeProcessor: ((BaseLocal, DateTimeZone?, DateTimeZone?) -> Any)? = null, tzFrom: DateTimeZone? = null, tzTo: DateTimeZone? = null): List> { 190 | return ResultSetHelper.toMaps(this, sel.columns, timeProcessor, tzFrom, tzTo) 191 | } 192 | 193 | fun List.toMaps(columns: List, timeProcessor: ((BaseLocal, DateTimeZone?, DateTimeZone?) -> Any)? = null, tzFrom: DateTimeZone? = null, tzTo: DateTimeZone? = null): List> { 194 | return ResultSetHelper.toMaps(this, columns, timeProcessor, tzFrom, tzTo) 195 | } 196 | } 197 | -------------------------------------------------------------------------------- /examples/zeko-vertx-query/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 5 | 4.0.0 6 | 7 | com.zeko.example 8 | zeko-query-example 9 | 1.0.0-SNAPSHOT 10 | 11 | 12 | 1.8 13 | 1.3.11 14 | true 15 | 1.8 16 | 17 | 1.8 18 | 1.8 19 | 20 | 5.2.0 21 | 1.2.0 22 | 1.2.0 23 | 24 | 1.1.5 25 | 5.2.0 26 | 27 | 3.6.2 28 | com.zeko.example.MainVerticle 29 | 30 | 31 | 32 | 33 | jcenter 34 | https://jcenter.bintray.com/ 35 | 36 | 37 | Zeko-Data-Mapper-mvn-repo 38 | https://raw.github.com/darkredz/Zeko-Data-Mapper/mvn-repo/ 39 | 40 | true 41 | always 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | io.vertx 50 | vertx-stack-depchain 51 | ${vertx.version} 52 | pom 53 | import 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | com.github.jasync-sql 62 | jasync-mysql 63 | 0.8.54 64 | 65 | 66 | org.jetbrains.kotlin 67 | kotlin-stdlib 68 | ${kotlin.version} 69 | 70 | 71 | org.jetbrains.kotlin 72 | kotlin-reflect 73 | ${kotlin.version} 74 | 75 | 76 | io.vertx 77 | vertx-core 78 | ${vertx.version} 79 | 80 | 81 | io.vertx 82 | vertx-web-api-contract 83 | ${vertx.version} 84 | 85 | 86 | io.vertx 87 | vertx-web-client 88 | ${vertx.version} 89 | 90 | 91 | io.vertx 92 | vertx-mysql-postgresql-client 93 | ${vertx.version} 94 | 95 | 96 | io.vertx 97 | vertx-web 98 | ${vertx.version} 99 | 100 | 101 | io.vertx 102 | vertx-kafka-client 103 | ${vertx.version} 104 | 105 | 106 | io.vertx 107 | vertx-redis-client 108 | ${vertx.version} 109 | 110 | 111 | io.vertx 112 | vertx-lang-kotlin-coroutines 113 | ${vertx.version} 114 | 115 | 116 | io.vertx 117 | vertx-junit5 118 | ${vertx.version} 119 | test 120 | 121 | 122 | org.junit.platform 123 | junit-platform-launcher 124 | ${junit-platform-launcher.version} 125 | test 126 | 127 | 128 | com.zeko.model 129 | data-mapper 130 | 1.4.1 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | io.vertx 141 | vertx-mysql-postgresql-client 142 | ${vertx.version} 143 | 144 | 145 | 146 | 147 | org.jetbrains.spek 148 | spek-api 149 | ${spek.version} 150 | test 151 | 152 | 153 | org.jetbrains.spek 154 | spek-junit-platform-engine 155 | ${spek.version} 156 | test 157 | 158 | 159 | org.junit.jupiter 160 | junit-jupiter-api 161 | ${jupiter.version} 162 | test 163 | 164 | 165 | 166 | 167 | ${project.basedir}/src/main/kotlin 168 | src/test/kotlin 169 | 170 | 171 | 172 | maven-surefire-plugin 173 | 2.22.0 174 | 175 | 176 | org.junit.platform 177 | junit-platform-surefire-provider 178 | 1.2.0 179 | 180 | 181 | org.jetbrains.spek 182 | spek-junit-platform-engine 183 | ${spek.version} 184 | 185 | 186 | org.junit.jupiter 187 | junit-jupiter-engine 188 | ${jupiter.version} 189 | 190 | 191 | 192 | 193 | **/*Spec.* 194 | 195 | 196 | 197 | 198 | 199 | io.reactiverse 200 | vertx-maven-plugin 201 | 1.0.18 202 | 203 | 204 | vmp 205 | 206 | initialize 207 | package 208 | 209 | 210 | 211 | 212 | 213 | false 214 | 215 | 216 | 217 | 218 | kotlin-maven-plugin 219 | org.jetbrains.kotlin 220 | ${kotlin.version} 221 | 222 | 223 | 224 | compile 225 | 226 | compile 227 | 228 | 229 | 230 | ${project.basedir}/src/main/kotlin 231 | 232 | 233 | 234 | enable 235 | 236 | 237 | 238 | 239 | 240 | 241 | 242 | 243 | 244 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /src/test/kotlin/io/zeko/model/DataMapperPOJOSpec.kt: -------------------------------------------------------------------------------- 1 | package io.zeko.model 2 | 3 | import io.vertx.core.json.Json 4 | import io.vertx.core.json.JsonArray 5 | import java.util.LinkedHashMap 6 | import org.spekframework.spek2.Spek 7 | import org.spekframework.spek2.style.specification.describe 8 | import org.spekframework.spek2.style.gherkin.Feature 9 | import kotlin.test.assertEquals 10 | import kotlin.test.assertTrue 11 | 12 | class User : Entity { 13 | constructor(map: Map) : super(map) 14 | constructor(vararg props: Pair) : super(*props) 15 | var id: Int? by map 16 | var name: String? by map 17 | var roleId: Int? by map 18 | var role: List? by map 19 | var address: List
? by map 20 | var customerData: CustomerData? by map 21 | } 22 | 23 | class Role : Entity { 24 | constructor(map: Map) : super(map) 25 | constructor(vararg props: Pair) : super(*props) 26 | val id: Int? by map 27 | val roleName: String? by map 28 | val userId: Int? by map 29 | } 30 | 31 | class Address : Entity { 32 | constructor(map: Map) : super(map) 33 | constructor(vararg props: Pair) : super(*props) 34 | var id: Int? by map 35 | var userId: Int? by map 36 | var street1: String? by map 37 | var street2: String? by map 38 | } 39 | 40 | class CustomerData : Entity { 41 | constructor(map: Map) : super(map) 42 | constructor(vararg props: Pair) : super(*props) 43 | val id: Int? by map 44 | var userId: Int? by map 45 | val totalSpent: Double? by map 46 | val refund: List? by map 47 | } 48 | 49 | class Refund : Entity { 50 | constructor(map: Map) : super(map) 51 | constructor(vararg props: Pair) : super(*props) 52 | val id: Int? by map 53 | val customerDataId: Int? by map 54 | val itemName: String? by map 55 | val quantity: Int? by map 56 | } 57 | 58 | class DataMapperPOJOSpec : Spek({ 59 | 60 | describe("A query result with two table joins (1:m and m:m), user has multiple addresses and has many roles") { 61 | 62 | val all = ArrayList>() 63 | all.add(linkedMapOf( 64 | "user-id" to 1, 65 | "user-name" to "Leng", 66 | "user-role_id" to 2, 67 | "role-id" to 2, 68 | "role-role_name" to "Super Admin", 69 | "role-user_id" to 1, //this is selected as alias, not actual field in table. To be used with the mapper 70 | "address-id" to 128, 71 | "address-user_id" to 1, 72 | "address-street1" to "Some block", 73 | "address-street2" to "in the street", 74 | "customer_data-id" to 99, 75 | "customer_data-user_id" to 1, 76 | "customer_data-total_spent" to 58209.50, 77 | "refund-id" to 150, 78 | "refund-item_name" to "Product One", 79 | "refund-quantity" to 10, 80 | "refund-customer_data_id" to 150 81 | )) 82 | 83 | all.add(linkedMapOf( 84 | "user-id" to 1, 85 | "user-name" to "Leng", 86 | "user-role_id" to 2, 87 | "role-id" to 2, 88 | "role-role_name" to "Super Admin", 89 | "role-user_id" to 1, 90 | "address-id" to 129, 91 | "address-user_id" to 1, 92 | "address-street1" to "Company Block", 93 | "address-street2" to "in the CBD", 94 | "customer_data-id" to 99, 95 | "customer_data-user_id" to 1, 96 | "customer_data-total_spent" to 58209.50, 97 | "refund-id" to 150, 98 | "refund-item_name" to "Product One", 99 | "refund-quantity" to 10, 100 | "refund-customer_data_id" to 150 101 | )) 102 | 103 | all.add(linkedMapOf( 104 | "user-id" to 2, 105 | "user-name" to "Superman", 106 | "user-role_id" to 2, 107 | "role-id" to 2, 108 | "role-role_name" to "Super Admin", 109 | "role-user_id" to 2, 110 | "address-id" to 131, 111 | "address-user_id" to 2, 112 | "address-street1" to "A capsule", 113 | "address-street2" to "in the yard", 114 | "customer_data-id" to 100, 115 | "customer_data-user_id" to 2, 116 | "customer_data-total_spent" to 88.00, 117 | )) 118 | 119 | all.add(linkedMapOf( 120 | "user-id" to 3, 121 | "user-name" to "Batman", 122 | "user-role_id" to 3, 123 | "role-id" to 3, 124 | "role-role_name" to "User", 125 | "role-user_id" to 3, 126 | "address-id" to 133, 127 | "address-user_id" to 3, 128 | "address-street1" to "Market St", 129 | "address-street2" to "Some where", 130 | "customer_data-id" to 125, 131 | "customer_data-user_id" to 3, 132 | "customer_data-total_spent" to 678.30, 133 | "refund-id" to 150, 134 | "refund-item_name" to "Product One", 135 | "refund-quantity" to 10, 136 | "refund-customer_data_id" to 125, 137 | )) 138 | 139 | 140 | all.add(linkedMapOf( 141 | "user-id" to 3, 142 | "user-name" to "Batman", 143 | "user-role_id" to 3, 144 | "role-id" to 3, 145 | "role-role_name" to "User", 146 | "role-user_id" to 3, 147 | "address-id" to 133, 148 | "address-user_id" to 3, 149 | "address-street1" to "Market St", 150 | "address-street2" to "Some where", 151 | "customer_data-id" to 125, 152 | "customer_data-user_id" to 3, 153 | "customer_data-total_spent" to 678.30, 154 | "refund-id" to 151, 155 | "refund-item_name" to "Product OneDotOne", 156 | "refund-quantity" to 11, 157 | "refund-customer_data_id" to 125, 158 | )) 159 | 160 | all.add(linkedMapOf( 161 | "user-id" to 4, 162 | "user-name" to "Joker", 163 | "user-role_id" to 3, 164 | "role-id" to 3, 165 | "role-role_name" to "User", 166 | "role-user_id" to 4, 167 | "address-id" to 155, 168 | "address-user_id" to 4, 169 | "address-street1" to "Market St", 170 | "address-street2" to "Some where", 171 | "customer_data-id" to 177, 172 | "customer_data-user_id" to 4, 173 | "customer_data-total_spent" to 112.00, 174 | "refund-id" to 151, 175 | "refund-item_name" to "Product MMM", 176 | "refund-quantity" to 22, 177 | "refund-customer_data_id" to 177, 178 | )) 179 | 180 | val table = linkedMapOf() 181 | table["user"] = TableInfo(key = "id", dataClassHandler = { User(it) }) 182 | table["role"] = TableInfo(key = "id", move_under = "user", foreign_key = "user_id", many_to_many = true, dataClassHandler = { Role(it) }) 183 | table["address"] = TableInfo(key = "id", move_under = "user", foreign_key = "user_id", many_to_one = true, remove = listOf("user_id"), dataClassHandler = { Address(it) }) 184 | table["refund"] = TableInfo(key = "id", move_under = "customer_data", foreign_key = "customer_data_id", many_to_one = true, dataClassHandler = { Refund(it) }) 185 | table["customer_data"] = TableInfo(key = "id", move_under = "user", foreign_key = "user_id", one_to_one = true, remove = listOf("user_id"), dataClassHandler = { CustomerData(it) }) 186 | 187 | val mapper = DataMapper() 188 | val result = mapper.mapStruct(table, all) as List 189 | 190 | println(Json.encodePrettily(result)) 191 | 192 | context("mapping of result") { 193 | it("should not be null") { 194 | assertEquals(false, result == null) 195 | } 196 | 197 | if (result != null) { 198 | it("should not be empty") { 199 | assertEquals(false, result.isEmpty()) 200 | } 201 | it("should have 4 elements") { 202 | assertEquals(4, result.size) 203 | } 204 | 205 | it("should be User instances") { 206 | assertTrue(User::class.java == result[0]::class.java) 207 | assertTrue(User::class.java == result[1]::class.java) 208 | assertTrue(User::class.java == result[2]::class.java) 209 | } 210 | 211 | it("the first object in the array mapped should have all the user fields selected") { 212 | assertEquals(1, result[0].id) 213 | assertEquals("Leng", result[0].name) 214 | assertEquals(2, result[0].roleId) 215 | } 216 | 217 | val roles = result[0].role 218 | 219 | it("should also have a list of nested role objects") { 220 | assertEquals(true, roles != null) 221 | } 222 | 223 | if (roles != null) { 224 | it("should have one role") { 225 | assertEquals(true, roles.size == 1) 226 | } 227 | 228 | val role = roles[0] 229 | 230 | it("should be a Role instance") { 231 | assertTrue(Role::class.java == role::class.java) 232 | } 233 | 234 | it("should have role name, role id, user id matched") { 235 | assertEquals(2, role.id) 236 | assertEquals("Super Admin", role.roleName) 237 | assertEquals(1, role.userId) 238 | } 239 | } 240 | 241 | 242 | val addresses = result[0].address 243 | 244 | it("should also have a list of nested address objects") { 245 | assertEquals(true, addresses != null) 246 | } 247 | 248 | if (addresses != null) { 249 | it("should have 2 addresses") { 250 | assertEquals(true, addresses.size == 2) 251 | } 252 | 253 | it("should be Address instances") { 254 | assertTrue(Address::class.java == addresses[0]::class.java) 255 | assertTrue(Address::class.java == addresses[1]::class.java) 256 | } 257 | 258 | it("should have 1st address street1 and street2 matched") { 259 | assertEquals(128, addresses[0].id) 260 | assertEquals("Some block", addresses[0].street1) 261 | assertEquals("in the street", addresses[0].street2) 262 | } 263 | 264 | it("should have 2nd address street1 and street2 matched") { 265 | assertEquals(129, addresses[1].id) 266 | assertEquals("Company Block", addresses[1].street1) 267 | assertEquals("in the CBD", addresses[1].street2) 268 | } 269 | } 270 | 271 | // for 2nd user object 272 | it("the 2nd object in the array mapped should have all the user fields selected") { 273 | assertEquals(2, result[1].id) 274 | assertEquals("Superman", result[1].name) 275 | assertEquals(2, result[1].roleId) 276 | } 277 | 278 | val roles2 = result[1].role 279 | 280 | it("should also have a list of nested role objects") { 281 | assertEquals(true, roles2 != null) 282 | } 283 | 284 | if (roles2 != null) { 285 | it("should have one role") { 286 | assertEquals(true, roles2.size == 1) 287 | } 288 | 289 | val role2 = roles2[0] 290 | 291 | it("should be a Role instance") { 292 | assertTrue(Role::class.java == role2::class.java) 293 | } 294 | 295 | it("should have role name, role id, user id matched") { 296 | assertEquals(2, role2.id) 297 | assertEquals("Super Admin", role2.roleName) 298 | assertEquals(2, role2.userId) 299 | } 300 | } 301 | 302 | 303 | val addresses2 = result[1].address 304 | 305 | it("should also have a list of nested address objects") { 306 | assertEquals(true, addresses2 != null) 307 | } 308 | 309 | if (addresses2 != null) { 310 | it("should have 1 address") { 311 | assertEquals(true, addresses2.size == 1) 312 | } 313 | 314 | it("should be Address instances") { 315 | assertTrue(Address::class.java == addresses2[0]::class.java) 316 | } 317 | 318 | it("should have 1st address street1 and street2 matched") { 319 | assertEquals(131, addresses2[0].id) 320 | assertEquals("A capsule", addresses2[0].street1) 321 | assertEquals("in the yard", addresses2[0].street2) 322 | } 323 | } 324 | } 325 | } 326 | 327 | } 328 | }) 329 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4.0.0 4 | 5 | io.zeko 6 | zeko-data-mapper 7 | 1.6.8-SNAPSHOT 8 | jar 9 | 10 | ${project.groupId}:${project.artifactId} 11 | 12 | A lightweight, fast and simple data mapper library in Kotlin that 13 | helps to map the result of queries from your normalized dataset(from RDBMS, SQLite, CSV or any source) 14 | back into relational mapped Hash maps 15 | 16 | https://github.com/darkredz/Zeko-Data-Mapper 17 | 18 | 19 | 20 | 21 | Apache License, Version 2.0 22 | https://www.apache.org/licenses/LICENSE-2.0 23 | repo 24 | 25 | 26 | 27 | 28 | 29 | 30 | Leng Sheng Hong 31 | darkredz@gmail.com 32 | Github 33 | 34 | https://github.com/darkredz 35 | UTC+08:00 36 | 37 | 38 | 39 | 40 | 41 | 42 | scm:git:https://github.com/darkredz/Zeko-Data-Mapper.git 43 | scm:git:ssh://github.com/darkredz/Zeko-Data-Mapper.git 44 | https://github.com/darkredz/Zeko-Data-Mapper 45 | 1.6.2 46 | 47 | 48 | 49 | 50 | ossrh 51 | https://oss.sonatype.org/content/repositories/snapshots 52 | 53 | 54 | ossrh 55 | https://oss.sonatype.org/service/local/staging/deploy/maven2/ 56 | 57 | 58 | 59 | 60 | 61 | 62 | mvnrepository 63 | http://mvnrepository.com/artifact/ 64 | 65 | 66 | jcenter 67 | https://jcenter.bintray.com/ 68 | 69 | 70 | spek-dev 71 | https://dl.bintray.com/spekframework/spek-dev 72 | 73 | 74 | 75 | 76 | 77 | jcenter 78 | JCenter 79 | https://jcenter.bintray.com/ 80 | 81 | 82 | 83 | 84 | 1.6.20 85 | 4.1.1 86 | 2.0.9 87 | 5.2.0 88 | UTF-8 89 | github 90 | 3.6.0 91 | 2.19.1 92 | 3.0.1 93 | 3.0.1 94 | 2.10.4 95 | 1.6.7 96 | 2.5.3 97 | 1.9.5 98 | 0.7.7.201606060606 99 | 4.3.0 100 | 3.5.1 101 | 3.0.1 102 | 3.0.1 103 | 2.3 104 | 2.4 105 | 2.8 106 | 2.10 107 | 2.6 108 | 2.18.1 109 | 2.5 110 | 1.6 111 | 2.8.2 112 | 1.9.5 113 | 114 | 115 | 116 | 117 | org.jetbrains.kotlin 118 | kotlin-stdlib 119 | ${kotlin.version} 120 | 121 | 122 | io.zeko 123 | zeko-sql-builder 124 | 1.2.6 125 | test 126 | 127 | 128 | org.jetbrains.kotlin 129 | kotlin-test 130 | 1.6.20 131 | test 132 | 133 | 134 | io.vertx 135 | vertx-core 136 | ${vertx.version} 137 | 138 | 139 | com.github.jasync-sql 140 | jasync-common 141 | 1.2.3 142 | provided 143 | 144 | 145 | joda-time 146 | joda-time 147 | 2.9.7 148 | 149 | 150 | io.vertx 151 | vertx-jdbc-client 152 | ${vertx.version} 153 | provided 154 | 155 | 156 | com.fasterxml.jackson.core 157 | jackson-databind 158 | 2.11.4 159 | provided 160 | 161 | 162 | org.jetbrains.kotlin 163 | kotlin-stdlib-jdk8 164 | ${kotlin.version} 165 | compile 166 | 167 | 168 | org.spekframework.spek2 169 | spek-dsl-jvm 170 | ${spek.version} 171 | test 172 | 173 | 174 | org.spekframework.spek2 175 | spek-runner-junit5 176 | ${spek.version} 177 | test 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | release-sign-artifacts 186 | 187 | 188 | performRelease 189 | true 190 | 191 | 192 | 193 | 194 | 195 | org.apache.maven.plugins 196 | maven-gpg-plugin 197 | ${version.maven-gpg-plugin} 198 | 199 | 200 | sign-artifacts 201 | verify 202 | 203 | sign 204 | 205 | 206 | 207 | 208 | 209 | 210 | 211 | 212 | 213 | 214 | src/main/kotlin 215 | src/test/kotlin 216 | 217 | 218 | 219 | maven-surefire-plugin 220 | 2.22.0 221 | 222 | 223 | org.junit.platform 224 | junit-platform-surefire-provider 225 | 1.2.0 226 | 227 | 228 | org.junit.jupiter 229 | junit-jupiter-engine 230 | ${jupiter.version} 231 | 232 | 233 | 234 | 235 | **/*Spec.* 236 | 237 | 238 | 239 | 240 | org.jetbrains.kotlin 241 | kotlin-maven-plugin 242 | 1.6.20 243 | 244 | 245 | compile 246 | compile 247 | 248 | compile 249 | 250 | 251 | 252 | test-compile 253 | test-compile 254 | 255 | test-compile 256 | 257 | 258 | 259 | 260 | 11 261 | 262 | 263 | 264 | org.apache.maven.plugins 265 | maven-jar-plugin 266 | 2.6 267 | 268 | 269 | 270 | true 271 | 272 | 273 | 274 | 275 | 276 | 277 | org.apache.maven.plugins 278 | maven-compiler-plugin 279 | ${version.maven-compiler-plugin} 280 | 281 | UTF-8 282 | 11 283 | 11 284 | true 285 | 286 | 287 | 288 | org.apache.maven.plugins 289 | maven-resources-plugin 290 | ${version.maven-resources-plugin} 291 | 292 | UTF-8 293 | 294 | 295 | 296 | org.apache.maven.plugins 297 | maven-source-plugin 298 | ${version.maven-source-plugin} 299 | 300 | 301 | **/*.jar 302 | 303 | 304 | 305 | 306 | attach-sources 307 | 308 | jar 309 | 310 | 311 | 312 | 313 | 314 | org.apache.maven.plugins 315 | maven-javadoc-plugin 316 | ${version.maven-javadoc-plugin} 317 | 318 | UTF-8 319 | 320 | 321 | 322 | attach-javadoc 323 | 324 | jar 325 | 326 | 327 | 328 | 329 | 330 | org.jetbrains.dokka 331 | dokka-maven-plugin 332 | 1.6.10 333 | 334 | 335 | generate-doc 336 | pre-site 337 | 338 | dokka 339 | javadoc 340 | 341 | 342 | 343 | before-package 344 | prepare-package 345 | 346 | javadocJar 347 | 348 | 349 | 350 | 351 | 352 | ${project.basedir}/src/main/kotlin 353 | 354 | 355 | 356 | 357 | maven-deploy-plugin 358 | ${version.maven-deploy-plugin} 359 | 360 | 361 | default-deploy 362 | deploy 363 | 364 | deploy 365 | 366 | 367 | 368 | 369 | 370 | org.sonatype.plugins 371 | nexus-staging-maven-plugin 372 | ${version.nexus-staging-maven-plugin} 373 | true 374 | 375 | ossrh 376 | https://oss.sonatype.org/ 377 | true 378 | 379 | 380 | 381 | org.apache.maven.plugins 382 | maven-release-plugin 383 | ${version.maven-release-plugin} 384 | 385 | true 386 | false 387 | forked-path 388 | -Dgpg.passphrase=${gpg.passphrase} 389 | 390 | 391 | 392 | org.apache.maven.scm 393 | maven-scm-provider-gitexe 394 | ${version.maven-scm-provider-gitexe} 395 | 396 | 397 | 398 | 399 | 400 | 401 | 402 | -------------------------------------------------------------------------------- /src/main/kotlin/io/zeko/model/DataMapper.kt: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (c) 2017 Leng Sheng Hong 3 | * ------------------------------------------------------ 4 | * Licensed to the Apache Software Foundation (ASF) under one 5 | * or more contributor license agreements. See the NOTICE file 6 | * distributed with this work for additional information 7 | * regarding copyright ownership. The ASF licenses this file 8 | * to you under the Apache License, Version 2.0 (the 9 | * "License"); you may not use this file except in compliance 10 | * with the License. You may obtain a copy of the License at 11 | * 12 | * http://www.apache.org/licenses/LICENSE-2.0 13 | * 14 | * Unless required by applicable law or agreed to in writing, software 15 | * distributed under the License is distributed on an "AS IS" BASIS, 16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 17 | * See the License for the specific language governing permissions and 18 | * limitations under the License. 19 | */ 20 | 21 | package io.zeko.model 22 | 23 | import java.util.stream.Collectors 24 | 25 | data class TableInfo(var key: String, var move_under: String? = null, var foreign_key: String? = null, var rename: String? = null, 26 | var many_to_one: Boolean = false, var one_to_many: Boolean = false, var one_to_one: Boolean = false, var many_to_many: Boolean = false, 27 | var multiple_link: Boolean = false, var remove: List? = null, var dataClassHandler: ((dataMap: Map) -> Any)? = null) 28 | 29 | open class DataMapper { 30 | companion object { 31 | fun create(): DataMapper { 32 | return DataMapper() 33 | } 34 | } 35 | 36 | fun mapRaw(allTableInfo: LinkedHashMap, arr: List, delimiter: String = "-", nested: Boolean = true, objectListWithID: Boolean = false): LinkedHashMap?> { 37 | var flattenResult = LinkedHashMap?>(); 38 | var tables = allTableInfo 39 | 40 | for ((tblAlias, tblInfo) in tables) { 41 | val tblPrimeKey = tblInfo?.key 42 | var tblRename = "" 43 | 44 | if (nested) { 45 | tblRename = tblAlias 46 | } 47 | else { 48 | if (tblInfo?.rename != null) { 49 | tblRename = tblInfo.rename as String 50 | } 51 | else { 52 | tblRename = tblAlias 53 | } 54 | } 55 | 56 | var tblRows = flattenResult[tblRename] 57 | var tblHaveValues = false 58 | if (tblRows == null) { 59 | tblRows = LinkedHashMap() 60 | } 61 | 62 | //map all row fields value to the appropriate table object based on the prefix. 63 | for (row0 in arr) { 64 | var row = convertToMap(row0) 65 | if (tblInfo == null || row == null) continue 66 | 67 | val rowPrimeKeyName = tblAlias + delimiter + tblPrimeKey 68 | val tblAliasItem = tables[tblAlias] 69 | if (tblAliasItem == null) continue 70 | tblAliasItem.multiple_link = (tblInfo.many_to_many || tblInfo.one_to_many) 71 | tables[tblAlias] = tblAliasItem 72 | 73 | val primeKeyVal = row[rowPrimeKeyName] 74 | 75 | //if table is not many to many, dun care about the prime key, as rows of same id is reused to link between two tables 76 | if (tblAliasItem.multiple_link == false) { 77 | //if this table row already exists => check with primary key. then skip this row check for this table type. 78 | if (primeKeyVal == null || tblRows[primeKeyVal] != null) continue 79 | } 80 | 81 | //map all related fields to the table attribute 82 | var obj: LinkedHashMap? = null 83 | var allValues = 0 84 | var noValues = 0 85 | 86 | for ((field, value) in row) { 87 | val fieldParts = field.split(delimiter) 88 | val prefix = fieldParts[0] 89 | if (prefix != tblAlias) continue 90 | allValues++ 91 | 92 | val attr = fieldParts[1] 93 | if (value == null) noValues++ 94 | 95 | //exclude field in remove list 96 | val removeList = tblAliasItem.remove 97 | if (!nested && removeList != null && removeList.indexOf(attr) > -1) continue 98 | 99 | if (obj == null) { 100 | obj = LinkedHashMap() 101 | } 102 | obj.put(attr, value) 103 | } 104 | 105 | if (obj != null && noValues != allValues) { 106 | if (!tblAliasItem.multiple_link) { 107 | tblRows.put(primeKeyVal.toString(), obj) 108 | } 109 | else { 110 | tblRows.put(tblRows.size.toString(), obj) 111 | } 112 | tblHaveValues = true 113 | } 114 | } 115 | 116 | if (tblRows != null) { 117 | if (tblHaveValues) { 118 | flattenResult.put(tblRename, tblRows) 119 | } else { 120 | flattenResult.put(tblRename, null) 121 | } 122 | } 123 | } 124 | 125 | 126 | if (!nested) { 127 | return flattenResult 128 | } 129 | 130 | if (nested) { 131 | val tablesRenamed = LinkedHashMap(); 132 | for ((key, va) in tables) { 133 | if (va?.rename != null) { 134 | //also need to rename the move_under field if got rename set 135 | if (tables[va.move_under]?.rename != null) { 136 | val va2 = va.copy() 137 | va2.move_under = tables[va.move_under]?.rename 138 | tables[key] = va2 139 | } 140 | tablesRenamed[va.rename as String] = va 141 | } 142 | } 143 | 144 | if (tablesRenamed.size > 0) { 145 | //rename result table key to the renamed version 146 | for ((key, va) in flattenResult) { 147 | if (tables.containsKey(key) && tables[key]?.rename != null) { 148 | val tname: String = tables[key]?.rename!! 149 | flattenResult[tname] = va 150 | flattenResult.remove(key) 151 | } 152 | } 153 | 154 | tables = tablesRenamed 155 | } 156 | 157 | //make a list of nested objects, move objects under each other appropriately based on their relationships 158 | for ((tbl, tblInfo) in tables) { 159 | var moveToTbl: LinkedHashMap? = LinkedHashMap() 160 | 161 | if (tblInfo?.move_under != null) { 162 | if (flattenResult[tblInfo.move_under.toString()] == null) continue; 163 | moveToTbl = convertToMap(flattenResult[tblInfo.move_under.toString()]!!) 164 | } 165 | 166 | val moveToTbl2 = moveToTbl?.clone() as LinkedHashMap 167 | 168 | for ((key, rows0) in moveToTbl2) { 169 | // just move the original table result list/single, and park under the linked table object, no logic. remove them that are not there later 170 | val oriTblResult = flattenResult[tbl] 171 | val primeKeyName = tables[tbl]?.key 172 | val rows = convertToMap(rows0) 173 | if (rows == null) continue 174 | 175 | val foreignKey = rows[primeKeyName] 176 | 177 | if (tblInfo?.foreign_key != null) { 178 | val fk = tblInfo.foreign_key 179 | //loop self table and check if can link to the foreign table based on key => foreign key 180 | 181 | //for many (many to many, one to many), remove those with same primary key (grouped since rows from query has a lot of same duplicates) 182 | //many results are all stored, without group 183 | if (tblInfo.many_to_many || tblInfo.one_to_many) { 184 | //remove those that are not suppose to linked with the foreign table result attr. 185 | val manyResults = LinkedHashMap?>() 186 | if (oriTblResult != null) { 187 | for ((linkKey, linkRow0) in oriTblResult) { 188 | 189 | var linkRow = convertToMap(linkRow0) 190 | 191 | if (tblInfo.dataClassHandler != null) { 192 | linkRow!!.put("_tbl", tbl) 193 | } 194 | 195 | if (linkRow == null) continue 196 | 197 | if (foreignKey != linkRow[fk]) { 198 | continue; 199 | } 200 | 201 | val linkRowPrimeKey = linkRow[primeKeyName]?.toString() 202 | if (manyResults[linkRowPrimeKey] != null) { 203 | continue 204 | } 205 | 206 | //exclude field in remove list 207 | if (tables[tbl]?.remove != null) { 208 | for (fieldToRemove in tables[tbl]?.remove!!) { 209 | linkRow.remove(fieldToRemove) 210 | } 211 | } 212 | 213 | if (linkRowPrimeKey != null) { 214 | manyResults.put(linkRowPrimeKey, linkRow) 215 | } 216 | } 217 | rows.put(tbl, manyResults) 218 | } 219 | else { 220 | rows.put(tbl, null) 221 | } 222 | } 223 | else if (tblInfo.many_to_one || tblInfo.one_to_one) { 224 | if (oriTblResult == null) continue 225 | 226 | val tblRowConvert = convertToMap(oriTblResult) 227 | val tblRowConvert2 = tblRowConvert?.clone() as LinkedHashMap 228 | rows.put(tbl, tblRowConvert2) 229 | 230 | //remove those that are not suppose to linked with the foreign table result attr. 231 | for ((linkRowKey, linkRow0) in tblRowConvert) { 232 | val linkRow = convertToMap(linkRow0) 233 | if (linkRow == null) continue 234 | 235 | if (tblInfo.dataClassHandler != null) { 236 | linkRow.put("_tbl", tbl) 237 | } 238 | 239 | if (foreignKey != linkRow[fk]) { 240 | (rows[tbl] as LinkedHashMap).remove(linkRowKey) 241 | continue 242 | } 243 | 244 | //exclude field in remove list 245 | if (tables[tbl]?.remove != null) { 246 | for (fieldToRemove in tables[tbl]?.remove!!) { 247 | val linkRowClone = (linkRow.clone() as LinkedHashMap) 248 | linkRowClone.remove(fieldToRemove) 249 | (rows[tbl] as LinkedHashMap)[linkRowKey] = linkRowClone 250 | } 251 | } 252 | } 253 | } 254 | 255 | //if the array is empty then just set as null 256 | if (!rows.containsKey(tbl) || rows[tbl] == null) { 257 | // rows[tbl] = null 258 | } 259 | else { 260 | //if only one is set, the attribute should be just the object itself instead of an array of one object. 261 | if (tblInfo.one_to_many || tblInfo.one_to_one) { 262 | var firstItemKey = LinkedHashMap() 263 | for ((itmKey, item) in (rows[tbl] as LinkedHashMap)) { 264 | firstItemKey = (item as LinkedHashMap) 265 | 266 | if (tblInfo.dataClassHandler != null) { 267 | item.put("_tbl", tbl) 268 | } 269 | break 270 | } 271 | rows[tbl] = firstItemKey 272 | } 273 | } 274 | } 275 | } 276 | 277 | if (tblInfo?.move_under != null) { 278 | flattenResult[tblInfo.move_under.toString()] = moveToTbl 279 | } 280 | 281 | //exclude field in remove list, this is for those that are not moved under, since moved under are already duplicated nested in. 282 | if (tblInfo!!.remove != null) { 283 | val tblToChk = flattenResult[tbl] 284 | if (tblToChk != null) { 285 | for ((key, rows) in tblToChk) { 286 | val rowClone = (rows as LinkedHashMap).clone() as LinkedHashMap 287 | 288 | for ((attr, va) in (rows as LinkedHashMap)) { 289 | if (tblInfo!!.remove!!.contains(attr)) { 290 | rowClone.remove(attr) 291 | tblToChk[key] = rowClone 292 | } 293 | } 294 | } 295 | } 296 | } 297 | } 298 | } 299 | 300 | return flattenResult 301 | } 302 | 303 | fun flatMap(item: LinkedHashMap): List { 304 | val result = item.entries.stream() 305 | .map({ x -> 306 | (x.value as LinkedHashMap).remove("_tbl") 307 | if (x.value is LinkedHashMap<*,*>) { 308 | checkAndFlatMap(x.value as LinkedHashMap) 309 | } else { 310 | x.value 311 | } 312 | }) 313 | .collect(Collectors.toList()) 314 | 315 | return result 316 | } 317 | 318 | open fun checkAndFlatMap(toAdd: LinkedHashMap): Any { 319 | val toAdd2: LinkedHashMap = toAdd.clone() as LinkedHashMap 320 | for ((key, itemNest) in toAdd) { 321 | if (itemNest is LinkedHashMap<*,*>) { 322 | if (itemNest.keys.isEmpty()) { 323 | toAdd2[key] = null 324 | } 325 | else if (itemNest.keys.first().toString().matches(Regex("^\\d+$"))) { 326 | toAdd2[key] = flatMap(itemNest as LinkedHashMap) 327 | } else { 328 | toAdd2[key] = checkAndFlatMap(itemNest as LinkedHashMap) 329 | } 330 | } 331 | } 332 | return toAdd2 333 | } 334 | 335 | open fun map(mapConf: MapperConfig, arr: List, delimiter: String = "-"): ArrayList> { 336 | return map(mapConf.toTableInfo(), arr) 337 | } 338 | 339 | open fun map(allTableInfo: LinkedHashMap, arr: List, delimiter: String = "-"): ArrayList> { 340 | if (arr.size == 0) { 341 | return arrayListOf() 342 | } 343 | val rs = mapRaw(allTableInfo, arr, delimiter, true, false) 344 | //get the root table result since everything now is nested into objects. 345 | if (rs != null && rs.size > 0) { 346 | val rootTable = rs.values.first() as LinkedHashMap> 347 | val arrFinal = ArrayList>() 348 | for ((key, item) in rootTable) { 349 | var toAdd: Any = item 350 | toAdd = checkAndFlatMap(toAdd as LinkedHashMap) 351 | arrFinal.add(toAdd as LinkedHashMap) 352 | } 353 | return arrFinal 354 | } 355 | return arrayListOf() 356 | } 357 | 358 | fun flatMapStruct(item: LinkedHashMap, allTableInfo: LinkedHashMap): List { 359 | val result = item.entries.stream() 360 | .map({ x -> 361 | val row = (x.value as LinkedHashMap) 362 | 363 | if (row is LinkedHashMap<*,*>) { 364 | checkAndFlatMapStruct(row as LinkedHashMap, allTableInfo) 365 | } else { 366 | row 367 | } 368 | }) 369 | .collect(Collectors.toList()) 370 | 371 | return result 372 | } 373 | 374 | open fun checkAndFlatMapStruct(toAdd: Any, allTableInfo: LinkedHashMap): Any { 375 | val toAdd2 = 376 | if (toAdd is LinkedHashMap<*, *>) 377 | toAdd.clone() as LinkedHashMap 378 | else if (toAdd is List<*>) 379 | toAdd 380 | else 381 | toAdd 382 | 383 | if (toAdd is LinkedHashMap<*, *>) { 384 | for ((k, itemNest) in toAdd) { 385 | val key = k.toString() 386 | if (itemNest is LinkedHashMap<*, *>) { 387 | toAdd2 as LinkedHashMap 388 | if (itemNest.keys.isEmpty()) { 389 | toAdd2[key] = null 390 | } else if (itemNest.keys.first().toString().matches(Regex("^\\d+$"))) { 391 | toAdd2[key] = flatMapStruct(itemNest as LinkedHashMap, allTableInfo) 392 | (toAdd as LinkedHashMap).put(key, toAdd2[key] as List) 393 | } else { 394 | val iterator = (itemNest as LinkedHashMap).keys.iterator() 395 | while (iterator.hasNext()) { 396 | val key = iterator.next() 397 | val propValue = itemNest[key] 398 | if (propValue is LinkedHashMap<*, *> && propValue.isEmpty()) { 399 | iterator.remove() 400 | } 401 | } 402 | 403 | val dt = checkAndFlatMapStruct(itemNest, allTableInfo) 404 | toAdd as LinkedHashMap 405 | toAdd2[key] = dt 406 | toAdd[key] = dt 407 | } 408 | } else { 409 | if (key == "_tbl") { 410 | val handler = allTableInfo[toAdd.get("_tbl")]!!.dataClassHandler 411 | if (handler != null) { 412 | val data = handler(toAdd as LinkedHashMap) 413 | return data 414 | } 415 | return toAdd 416 | } 417 | } 418 | } 419 | } 420 | return toAdd2 421 | } 422 | 423 | 424 | open fun mapStruct(mapConf: MapperConfig, arr: List, delimiter: String = "-"): ArrayList { 425 | return mapStruct(mapConf.toTableInfo(), arr) 426 | } 427 | 428 | open fun mapStruct(allTableInfo: LinkedHashMap, arr: List, delimiter: String = "-"): ArrayList { 429 | if (arr.size == 0) { 430 | return arrayListOf() 431 | } 432 | val rs = mapRaw(allTableInfo, arr, delimiter, true, false) 433 | //get the root table result since everything now is nested into objects. 434 | if (rs != null && rs.size > 0) { 435 | val rootTable = rs.values.first() as LinkedHashMap> 436 | val arrFinal = ArrayList() 437 | 438 | var rootTblInfo: TableInfo? = null 439 | for ((tblAlias, tblInfo) in allTableInfo) { 440 | rootTblInfo = tblInfo 441 | break 442 | } 443 | for ((key, item) in rootTable) { 444 | var toAdd: Any = item 445 | toAdd = checkAndFlatMapStruct(toAdd as LinkedHashMap, allTableInfo) as Map 446 | 447 | val handler = rootTblInfo?.dataClassHandler 448 | if (handler != null) { 449 | arrFinal.add(handler(toAdd)) 450 | } 451 | } 452 | return arrFinal 453 | } 454 | return arrayListOf() 455 | } 456 | 457 | open fun convertToMap(oriData: Any?): LinkedHashMap? { 458 | var dataNew = LinkedHashMap() 459 | 460 | if (oriData is Map<*, *>) { 461 | dataNew = oriData as LinkedHashMap 462 | } 463 | return dataNew 464 | } 465 | } 466 | --------------------------------------------------------------------------------