├── .git-blame-ignore-revs ├── .github └── workflows │ ├── ci.yml │ └── release.yaml ├── .gitignore ├── .scalafmt.conf ├── CHANGES.md ├── LICENSE ├── README.md ├── build.sbt ├── project ├── build.properties └── plugins.sbt └── src ├── it ├── resources │ ├── logback-test.xml │ └── migration │ │ └── 1__test_tables.cql └── scala │ └── com │ └── ringcentral │ └── cassandra4io │ ├── Cassandra4IOTests.scala │ ├── CassandraSessionSuite.scala │ ├── CassandraTestsSharedInstances.scala │ └── cql │ └── CqlSuite.scala └── main └── scala └── com └── ringcentral └── cassandra4io ├── CassandraSession.scala ├── cql ├── CassandraTypeMapper.scala ├── FieldName.scala ├── FromUdtValue.scala ├── Reads.scala ├── ToUdtValue.scala ├── UnexpectedNullValue.scala └── package.scala ├── package.scala └── utils └── JavaConcurrentToCats.scala /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # Scala Steward: Reformat with scalafmt 2.7.5 2 | 765d5232875486e54d37bbcbf18ec53b764a8495 3 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | pull_request: 7 | branches: [ main ] 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v2 14 | - name: Set up JDK 15 | uses: actions/setup-java@v1 16 | with: 17 | java-version: 11 18 | - name: Run unit tests 19 | run: sbt +test 20 | it-tests: 21 | runs-on: ubuntu-latest 22 | steps: 23 | - uses: actions/checkout@v2 24 | - name: Set up JDK 25 | uses: actions/setup-java@v1 26 | with: 27 | java-version: 11 28 | - uses: docker-practice/actions-setup-docker@master 29 | with: 30 | docker_channel: stable 31 | docker_version: 20.10 32 | - name: Run it tests 33 | run: sbt +it:test 34 | -------------------------------------------------------------------------------- /.github/workflows/release.yaml: -------------------------------------------------------------------------------- 1 | name: Release 2 | on: 3 | push: 4 | branches: [main] 5 | tags: ["*"] 6 | jobs: 7 | publish: 8 | runs-on: ubuntu-20.04 9 | steps: 10 | - uses: actions/checkout@v2.3.4 11 | with: 12 | fetch-depth: 0 13 | - uses: olafurpg/setup-scala@v10 14 | - run: sbt ci-release 15 | env: 16 | PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }} 17 | PGP_SECRET: ${{ secrets.PGP_SECRET }} 18 | SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }} 19 | SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} 20 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .metals/ 2 | .bsp/ 3 | target/ 4 | .idea/ 5 | -------------------------------------------------------------------------------- /.scalafmt.conf: -------------------------------------------------------------------------------- 1 | version = "3.8.3" 2 | maxColumn = 120 3 | align = most 4 | continuationIndent.defnSite = 2 5 | assumeStandardLibraryStripMargin = true 6 | docstrings = JavaDoc 7 | lineEndings = preserve 8 | includeCurlyBraceInSelectChains = false 9 | danglingParentheses = true 10 | spaces { 11 | inImportCurlyBraces = true 12 | } 13 | optIn.annotationNewlines = true 14 | 15 | rewrite.rules = [SortImports, RedundantBraces] -------------------------------------------------------------------------------- /CHANGES.md: -------------------------------------------------------------------------------- 1 | ### Version 0.1.6 2 | Migrate to cats-effect 3.x 3 | ### Version 0.1.2 4 | First public release. Has no changes compared to 0.1.0 and 0.1.1. -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Cassandra 4 io 2 | 3 | ![CI](https://github.com/ringcentral/cassandra4io/workflows/CI/badge.svg?branch=main) 4 | ![Maven Central](https://img.shields.io/maven-central/v/com.ringcentral/cassandra4io_2.13) 5 | 6 | This is lightweight cats-effect and fs2 IO wrapper for latest datastax 4.x driver. 7 | 8 | Why 4.x? 9 | 10 | 4.x was re-written in immutable first design, within async first API, 11 | optimizations, fewer allocations, metrics improvements, and fully compatible with cassandra 3.x 12 | 13 | ## Goals 14 | - Be safe, type-safe. 15 | - Be fast 16 | - use minimal allocations 17 | - minimize resources and abstractions overhead over original datastax driver, which is good 18 | 19 | 20 | ## How to use 21 | Cassandra4io is currently available for Scala 2.13 and 2.12. 22 | 23 | ### Add a dependency to your project 24 | ```scala 25 | libraryDependencies += ("com.ringcentral" %% "cassandra4io" % "0.1.14") 26 | ``` 27 | 28 | ### Create a connection to Cassandra 29 | ```scala 30 | import com.ringcentral.cassandra4io.CassandraSession 31 | 32 | import com.datastax.oss.driver.api.core.CqlSession 33 | import cats.effect._ 34 | 35 | import java.net.InetSocketAddress 36 | 37 | val builder = CqlSession 38 | .builder() 39 | .addContactPoint(InetSocketAddress.createUnresolved("localhost", 9042)) 40 | .withLocalDatacenter("datacenter1") 41 | .withKeyspace("awesome") 42 | 43 | def makeSession[F[_]: Async]: Resource[F, CassandraSession[F]] = 44 | CassandraSession.connect(builder) 45 | ``` 46 | 47 | ### Write some requests 48 | 49 | package `com.ringcentral.cassandra4io.cql` introduces typed way to deal with cql queries 50 | 51 | ### Simple syntax 52 | 53 | ```scala 54 | import cats.effect.Sync 55 | import cats.syntax.all._ 56 | import com.datastax.oss.driver.api.core.ConsistencyLevel 57 | import com.ringcentral.cassandra4io.CassandraSession 58 | import com.ringcentral.cassandra4io.cql._ 59 | 60 | case class Model(id: Int, data: String) 61 | 62 | trait Dao[F[_]] { 63 | def put(value: Model): F[Unit] 64 | def get(id: Int): F[Option[Model]] 65 | } 66 | 67 | class DaoImpl[F[_]: Async](session: CassandraSession[F]) extends Dao[F] { 68 | 69 | private def insertQuery(value: Model) = 70 | cql"insert into table (id, data) values (${value.id}, ${value.data})" 71 | .config(_.setConsistencyLevel(ConsistencyLevel.ALL)) 72 | 73 | private def selectQuery(id: Int) = 74 | cql"select id, data from table where id = $id".as[Model] 75 | 76 | override def put(value: Model) = insertQuery(value).execute(session).void 77 | override def get(id: Int) = selectQuery(id).select(session).head.compile.last 78 | } 79 | ``` 80 | 81 | this syntax reuse implicit driver prepared statements cache 82 | 83 | ### Templated syntax 84 | 85 | ```scala 86 | import cats.effect._ 87 | import scala.concurrent.duration._ 88 | import cats.syntax.all._ 89 | import scala.jdk.DurationConverters._ 90 | import com.datastax.oss.driver.api.core.ConsistencyLevel 91 | import com.ringcentral.cassandra4io.CassandraSession 92 | import com.ringcentral.cassandra4io.cql._ 93 | 94 | case class Model(pk: Long, ck: String, data: String, metaData: String) 95 | case class Key(pk: Long, ck: String) 96 | case class Data(data: String, metaData: String) 97 | 98 | trait Dao[F[_]] { 99 | def put(value: Model): F[Unit] 100 | def update(key: Key, data: Data): F[Unit] 101 | def get(key: Key): F[Option[Model]] 102 | } 103 | 104 | object Dao { 105 | 106 | private val tableName = "table" 107 | private val insertQuery = 108 | cqlt"insert into ${Const(tableName)} (pk, ck, data, meta_data) values (${Put[Long]}, ${Put[String]}, ${Put[String]}, ${Put[String]})" 109 | .config(_.setTimeout(1.second.toJava)) 110 | private val insertQueryAlternative = 111 | cqlt"insert into ${Const(tableName)} (${Columns[Model]}) values (${Values[Model]})" 112 | private val updateQuery = cqlt"update ${Const(tableName)} set ${Assignment[Data]} where ${EqualsTo[Key]}" 113 | private val selectQuery = cqlt"select ${Columns[Model]} from ${Const(tableName)} where ${EqualsTo[Key]}".as[Model] 114 | 115 | def apply[F[_] : Async](session: CassandraSession[F]) = for { 116 | insert <- insertQuery.prepare(session) 117 | update <- updateQuery.prepare(session) 118 | insertAlternative <- insertQueryAlternative.prepare(session) 119 | select <- selectQuery.prepare(session) 120 | } yield new Dao[F] { 121 | override def put(value: Model) = insert( 122 | value.pk, 123 | value.ck, 124 | value.data, 125 | value.metaData 126 | ).execute.void // insertAlternative(value).execute.void 127 | override def update(key: Key, data: Data): F[Unit] = updateQuery(data, key).execute.void 128 | override def get(key: Key) = select(key).config(_.setExecutionProfileName("default")).select.head.compile.last 129 | } 130 | } 131 | ``` 132 | As you can see `${Columns[Model]}` expands to `pk, ck, data, meta_data`, `${Values[Model]}` to `?, ?, ?, ?`, `${Assignment[Data]}` to `data = ?, meta_data = ?` and `${EqualsTo[Key]}` expands to `pk = ? and ck = ?`. 133 | Latter three types adjust query type as well for being able to bind corresponding values 134 | 135 | ### Handling optional fields (`null`) 136 | 137 | By default, cassandra4io encodes `Option` as a `null` value. Which is ok for most cases. But in Cassandra, there is a difference between a `null` value and an empty value. In java driver this difference is represented by `BoundStatement#setToNull` (default behavior) and `BoundStatement#unset` (setting an empty field). The main advantage of using `unset` instead of `setToNull` is that tombstone will not be created for an empty field. 138 | 139 | To use the `unset` instead of the `setToNull` for your optional value in a `cql` interpolators you could add `.usingUnset` to your optional value. Like in the following example: 140 | ```scala 141 | import com.ringcentral.cassandra4io.cql._ 142 | 143 | cql"insert into entities(foo, bar, baz) values (${e.foo}, ${e.bar}, ${e.baz.usingUnset}" 144 | ``` 145 | 146 | ## User Defined Type (UDT) support 147 | 148 | Cassandra4IO provides support for Cassandra's User Defined Type (UDT) values. 149 | For example, given the following Cassandra schema: 150 | 151 | ```cql 152 | create type basic_info( 153 | weight double, 154 | height text, 155 | datapoints frozen> 156 | ); 157 | 158 | create table person_attributes( 159 | person_id int, 160 | info frozen, 161 | PRIMARY KEY (person_id) 162 | ); 163 | ``` 164 | 165 | **Note:** `frozen` means immutable 166 | 167 | Here is how to insert and select data from the `person_attributes` table: 168 | 169 | ```scala 170 | final case class BasicInfo(weight: Double, height: String, datapoints: Set[Int]) 171 | object BasicInfo { 172 | implicit val cqlReads: Reads[BasicInfo] = FromUdtValue.deriveReads[BasicInfo] 173 | implicit val cqlBinder: Binder[BasicInfo] = ToUdtValue.deriveBinder[BasicInfo] 174 | } 175 | 176 | final case class PersonAttribute(personId: Int, info: BasicInfo) 177 | ``` 178 | 179 | We provide a set of typeclasses (`FromUdtValue` and `ToUDtValue`) under the hood that automatically convert your Scala 180 | types into types that Cassandra can understand without having to manually convert your data-types into Datastax Java 181 | driver's `UdtValue`s. 182 | 183 | ```scala 184 | class UDTUsageExample[F[_]: Async](session: CassandraSession[F]) { 185 | val data = PersonAttribute(1, BasicInfo(180.0, "tall", Set(1, 2, 3, 4, 5))) 186 | val insert: F[Boolean] = 187 | cql"INSERT INTO cassandra4io.person_attributes (person_id, info) VALUES (${data.personId}, ${data.info})" 188 | .execute(session) 189 | 190 | val retrieve: fs2.Stream[F, PersonAttribute] = 191 | cql"SELECT person_id, info FROM cassandra4io.person_attributes WHERE person_id = ${data.personId}" 192 | .as[PersonAttribute] 193 | .select(session) 194 | } 195 | ``` 196 | 197 | ### More control over the transformation process of `UdtValue`s 198 | 199 | If you wanted to have additional control into how you map data-types to and from Cassandra rather than using `FromUdtValue` 200 | & `ToUdtValue`, we expose the Datastax Java driver API to you for full control. Here is an example using `BasicInfo`: 201 | 202 | ```scala 203 | object BasicInfo { 204 | implicit val cqlReads: Reads[BasicInfo] = Reads[UdtValue].map { udtValue => 205 | BasicInfo( 206 | weight = udtValue.getDouble("weight"), 207 | height = udtValue.getString("height"), 208 | datapoints = udtValue 209 | .getSet[java.lang.Integer]("datapoints", classOf[java.lang.Integer]) 210 | .asScala 211 | .toSet 212 | .map { int: java.lang.Integer => Int.unbox(int) } 213 | ) 214 | } 215 | 216 | implicit val cqlBinder: Binder[BasicInfo] = Binder[UdtValue].contramapUDT { (info, constructor) => 217 | constructor 218 | .newValue() 219 | .setDouble("weight", info.weight) 220 | .setString("height", info.height) 221 | .setSet("datapoints", info.datapoints.map(Int.box).asJava, classOf[java.lang.Integer]) 222 | } 223 | } 224 | ``` 225 | 226 | Please note that we recommend using `FromUdtValue` and `ToUdtValue` to automatically derive this hand-written (and error-prone) 227 | code. 228 | 229 | ## Interpolating on CQL parameters 230 | 231 | Cassandra4IO Allows you to interpolate (i.e. using string interpolation) on values that are not valid CQL parameters using 232 | `++` or `concat` to build out your CQL query. For example, you can interpolate on the keyspace and table name using 233 | the `cqlConst` interpolator like so: 234 | 235 | ```scala 236 | val session: CassandraSession[IO] = ??? 237 | val keyspaceName = "cassandra4io" 238 | val tableName = "person_attributes" 239 | val keyspace = cqlConst"$keyspaceName." 240 | val table = cqlConst"$tableName" 241 | 242 | def insert(data: PersonAttribute) = 243 | (cql"INSERT INTO " ++ keyspace ++ table ++ cql" (person_id, info) VALUES (${data.personId}, ${data.info})") 244 | .execute(session) 245 | ``` 246 | 247 | This allows you (the author of the application) to feed in parameters like the table name and keyspace through 248 | configuration. Please be aware that you should not be taking your user's input and feeding this into `cqlConst` as 249 | this will pose an injection risk. 250 | 251 | ## References 252 | - [Datastax Java driver](https://docs.datastax.com/en/developer/java-driver/4.9) 253 | 254 | ## License 255 | Cassandra4io is released under the [Apache License 2.0](https://opensource.org/licenses/Apache-2.0). 256 | -------------------------------------------------------------------------------- /build.sbt: -------------------------------------------------------------------------------- 1 | name := "cassandra4io" 2 | 3 | inThisBuild( 4 | List( 5 | organization := "com.ringcentral", 6 | organizationName := "ringcentral", 7 | licenses := List("Apache-2.0" -> url("http://www.apache.org/licenses/LICENSE-2.0")), 8 | scalaVersion := crossScalaVersions.value.head, 9 | crossScalaVersions := Seq("2.13.15", "2.12.20"), 10 | licenses := Seq(("Apache-2.0", url("https://opensource.org/licenses/Apache-2.0"))), 11 | homepage := Some(url("https://github.com/ringcentral/cassandra4io")), 12 | developers := List( 13 | Developer(id = "narma", name = "Sergey Rublev", email = "alzo@alzo.space", url = url("https://narma.github.io")), 14 | Developer( 15 | id = "alexuf", 16 | name = "Alexey Yuferov", 17 | email = "aleksey.yuferov@icloud.com", 18 | url = url("https://github.com/alexuf") 19 | ) 20 | ) 21 | ) 22 | ) 23 | 24 | lazy val root = (project in file(".")) 25 | .configs(IntegrationTest) 26 | .settings( 27 | Defaults.itSettings, 28 | IntegrationTest / fork := true, 29 | libraryDependencies ++= Seq( 30 | "org.typelevel" %% "cats-effect" % "3.5.7", 31 | "co.fs2" %% "fs2-core" % "3.9.2", 32 | "com.datastax.oss" % "java-driver-core" % "4.17.0", 33 | "com.chuusai" %% "shapeless" % "2.3.10" 34 | ) ++ Seq( 35 | "com.disneystreaming" %% "weaver-cats" % "0.8.3" % "it,test", 36 | "org.testcontainers" % "testcontainers" % "1.20.4" % "it", 37 | "com.dimafeng" %% "testcontainers-scala-cassandra" % "0.41.4" % "it", 38 | "ch.qos.logback" % "logback-classic" % "1.5.12" % "it,test" 39 | ) ++ (scalaBinaryVersion.value match { 40 | case v if v.startsWith("2.13") => 41 | Seq.empty 42 | 43 | case v if v.startsWith("2.12") => 44 | Seq("org.scala-lang.modules" %% "scala-collection-compat" % "2.11.0") 45 | 46 | case other => 47 | sys.error(s"Unsupported scala version: $other") 48 | }) 49 | ) 50 | 51 | Compile / compile / scalacOptions ++= Seq( 52 | "-encoding", 53 | "utf-8", 54 | "-feature", 55 | "-unchecked", 56 | "-deprecation" 57 | ) ++ 58 | (scalaBinaryVersion.value match { 59 | case v if v.startsWith("2.13") => 60 | List( 61 | "-Xlint:strict-unsealed-patmat", 62 | "-Xlint:-serial", 63 | // "-Ywarn-unused", 64 | "-Ymacro-annotations", 65 | "-Yrangepos", 66 | "-Werror", 67 | "-explaintypes", 68 | "-language:higherKinds", 69 | "-language:implicitConversions", 70 | "-Xfatal-warnings", 71 | "-Wconf:any:error" 72 | ) 73 | case v if v.startsWith("2.12") => 74 | List( 75 | "-language:higherKinds", 76 | // "-Ywarn-unused", 77 | "-Yrangepos", 78 | "-explaintypes", 79 | "-language:higherKinds", 80 | "-language:implicitConversions", 81 | "-Xfatal-warnings" 82 | ) 83 | case v if v.startsWith("0.") => 84 | Nil 85 | case other => sys.error(s"Unsupported scala version: $other") 86 | }) 87 | 88 | testFrameworks := Seq(new TestFramework("weaver.framework.CatsEffect")) 89 | -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=1.9.6 2 | -------------------------------------------------------------------------------- /project/plugins.sbt: -------------------------------------------------------------------------------- 1 | addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.9.0") 2 | -------------------------------------------------------------------------------- /src/it/resources/logback-test.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | %green(%d{HH:mm:ss.SSS}) %highlight(%-5level) %logger{30} - %msg%n 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /src/it/resources/migration/1__test_tables.cql: -------------------------------------------------------------------------------- 1 | create table test_data( 2 | id bigint, 3 | data text, 4 | count int, 5 | dataset frozen>, 6 | PRIMARY KEY (id) 7 | ); 8 | 9 | insert into test_data (id, data, count, dataset) values (0, null, null, null); 10 | insert into test_data (id, data, count, dataset) values (1, 'one', 10, {}); 11 | insert into test_data (id, data, count, dataset) values (2, 'two', 20, {201}); 12 | insert into test_data (id, data) values (3, 'three'); 13 | 14 | create table test_data_multiple_keys( 15 | id1 bigint, 16 | id2 int, 17 | data text, 18 | PRIMARY KEY (id1, id2) 19 | ); 20 | 21 | insert into test_data_multiple_keys (id1, id2, data) values (1, 2, 'one-two'); 22 | 23 | create type basic_info( 24 | weight double, 25 | height text, 26 | datapoints frozen> 27 | ); 28 | 29 | create table person_attributes( 30 | person_id int, 31 | info frozen, 32 | PRIMARY KEY (person_id) 33 | ); 34 | 35 | create table test_collection( 36 | id int, 37 | maptest frozen>, 38 | settest frozen>, 39 | listtest frozen>, 40 | PRIMARY KEY (id) 41 | ); 42 | 43 | CREATE TYPE example_type( 44 | x bigint, 45 | y bigint, 46 | date date, 47 | time time 48 | ); 49 | 50 | CREATE TYPE example_nested_type( 51 | a int, 52 | b text, 53 | c frozen 54 | ); 55 | 56 | CREATE TYPE example_collection_nested_udt_type( 57 | a int, 58 | b frozen>>>>>>>>> 59 | ); 60 | 61 | CREATE TYPE example_nested_primitive_type( 62 | a int, 63 | b frozen>>>>>>>>> 64 | ); 65 | 66 | CREATE TABLE heavily_nested_udt_table( 67 | id int, 68 | data example_collection_nested_udt_type, 69 | PRIMARY KEY (id) 70 | ); 71 | 72 | CREATE TABLE heavily_nested_prim_table( 73 | id int, 74 | data example_nested_primitive_type, 75 | PRIMARY KEY (id) 76 | ); 77 | 78 | create table test_data_interpolated( 79 | key bigint, 80 | projection_key text, 81 | projection_data text, 82 | offset bigint, 83 | timestamp bigint, 84 | PRIMARY KEY (key, projection_key) 85 | ); 86 | -------------------------------------------------------------------------------- /src/it/scala/com/ringcentral/cassandra4io/Cassandra4IOTests.scala: -------------------------------------------------------------------------------- 1 | package com.ringcentral.cassandra4io 2 | 3 | import com.ringcentral.cassandra4io.cql.CqlSuite 4 | import weaver.IOSuite 5 | 6 | object Cassandra4IOTests extends IOSuite with CassandraTestsSharedInstances with CassandraSessionSuite with CqlSuite 7 | -------------------------------------------------------------------------------- /src/it/scala/com/ringcentral/cassandra4io/CassandraSessionSuite.scala: -------------------------------------------------------------------------------- 1 | package com.ringcentral.cassandra4io 2 | 3 | import java.net.InetSocketAddress 4 | 5 | import cats.effect._ 6 | import com.datastax.oss.driver.api.core.cql.SimpleStatement 7 | import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException 8 | import com.datastax.oss.driver.api.core.CqlSession 9 | import weaver._ 10 | 11 | trait CassandraSessionSuite { self: IOSuite with CassandraTestsSharedInstances => 12 | 13 | implicit def toStatement(s: String): SimpleStatement = SimpleStatement.newInstance(s) 14 | 15 | test("CassandraSession.connect be referentially transparent") { _ => 16 | val testSession = CassandraSession.connect[IO]( 17 | CqlSession 18 | .builder() 19 | .addContactPoint(InetSocketAddress.createUnresolved(container.host, container.mappedPort(9042))) 20 | .withLocalDatacenter("datacenter1") 21 | ) 22 | val st = SimpleStatement.newInstance(s"select cluster_name from system.local") 23 | for { 24 | r1 <- testSession.use(session => session.selectFirst(st).map(_.map(_.getString(0)))) 25 | r2 <- testSession.use(session => session.selectFirst(st).map(_.map(_.getString(0)))) 26 | } yield expect(r1 == r2) 27 | } 28 | 29 | test("prepare should return PreparedStatement") { session => 30 | for { 31 | st <- session.prepare(s"select data FROM $keyspace.test_data WHERE id = :id") 32 | } yield expect(st.getQuery == s"select data FROM $keyspace.test_data WHERE id = :id") 33 | } 34 | 35 | test("prepare should return error on invalid request") { session => 36 | for { 37 | result <- session.prepare(s"select column404 FROM $keyspace.test_data WHERE id = :id").attempt 38 | error = getError(result) 39 | } yield expect(error.getMessage == "Undefined column name column404") && 40 | expect(error.isInstanceOf[InvalidQueryException]) 41 | } 42 | 43 | test("select should return prepared data") { session => 44 | for { 45 | results <- session 46 | .select(s"select data FROM $keyspace.test_data WHERE id IN (1,2,3)") 47 | .map(_.getString(0)) 48 | .compile 49 | .toList 50 | } yield expect(results == List("one", "two", "three")) 51 | } 52 | 53 | test("select should be pure stream") { session => 54 | val selectStream = session 55 | .select(s"select data FROM $keyspace.test_data WHERE id IN (1,2,3)") 56 | .map(_.getString(0)) 57 | .compile 58 | .toList 59 | for { 60 | _ <- selectStream 61 | results <- selectStream 62 | } yield expect(results == List("one", "two", "three")) 63 | } 64 | 65 | test("selectOne should return None on empty result") { session => 66 | for { 67 | result <- session 68 | .selectFirst(s"select data FROM $keyspace.test_data WHERE id = 404") 69 | .map(_.map(_.getString(0))) 70 | } yield expect(result.isEmpty) 71 | } 72 | 73 | test("selectOne should return Some for one") { session => 74 | for { 75 | result <- session 76 | .selectFirst(s"select data FROM $keyspace.test_data WHERE id = 1") 77 | .map(_.map(_.getString(0))) 78 | } yield expect(result.contains("one")) 79 | } 80 | 81 | test("selectOne should return Some(null) for null") { session => 82 | for { 83 | result <- session 84 | .selectFirst(s"select data FROM $keyspace.test_data WHERE id = 0") 85 | .map(_.map(_.getString(0))) 86 | } yield expect(result.contains(null)) 87 | } 88 | 89 | test("select will emit in chunks sized equal to statement pageSize") { session => 90 | val st = SimpleStatement.newInstance(s"select data from $keyspace.test_data").setPageSize(2) 91 | for { 92 | result <- session.select(st).map(_.getString(0)).chunks.compile.toList 93 | } yield expect(result.nonEmpty && result.map(_.size).forall(_ == 2)) 94 | } 95 | 96 | } 97 | -------------------------------------------------------------------------------- /src/it/scala/com/ringcentral/cassandra4io/CassandraTestsSharedInstances.scala: -------------------------------------------------------------------------------- 1 | package com.ringcentral.cassandra4io 2 | 3 | import cats.effect.{ IO, Resource } 4 | import cats.implicits.catsSyntaxApplicative 5 | import cats.syntax.applicative._ 6 | import cats.syntax.foldable._ 7 | import com.datastax.oss.driver.api.core.cql.SimpleStatement 8 | import com.datastax.oss.driver.api.core.{ CqlSession, CqlSessionBuilder } 9 | import com.dimafeng.testcontainers.CassandraContainer 10 | import com.ringcentral.cassandra4io.utils.JavaConcurrentToCats.fromJavaAsync 11 | import org.testcontainers.utility.DockerImageName 12 | import weaver.IOSuite 13 | 14 | import java.net.InetSocketAddress 15 | import java.time.Duration 16 | import scala.io.BufferedSource 17 | import org.slf4j.LoggerFactory 18 | 19 | trait CassandraTestsSharedInstances { self: IOSuite => 20 | 21 | val logger = LoggerFactory.getLogger(self.getClass) 22 | 23 | val keyspace = "cassandra4io" 24 | val container = CassandraContainer(DockerImageName.parse("cassandra:3.11.11")) 25 | 26 | def migrateSession(session: CassandraSession[IO]): IO[Unit] = { 27 | val migrationSource = IO.blocking(scala.io.Source.fromResource("migration/1__test_tables.cql")) 28 | for { 29 | _ <- session.execute(s"use $keyspace") 30 | source <- migrationSource 31 | migrations = splitToMigrations(source) 32 | _ <- IO(logger.info("start cassandra migration for tests")) 33 | _ <- migrations.toList.traverse_ { migration => 34 | val st = SimpleStatement.newInstance(migration).setTimeout(Duration.ofSeconds(4)) 35 | session.execute(st).onError { error => 36 | IO { 37 | logger.error(s"Error in execution migration $migration", error) 38 | } 39 | } 40 | } 41 | _ <- IO(logger.info("cassandra migration done")) 42 | } yield () 43 | } 44 | 45 | def ensureKeyspaceExists(builder: CqlSessionBuilder): IO[Unit] = 46 | for { 47 | session <- fromJavaAsync(builder.withKeyspace(Option.empty[String].orNull).buildAsync()) 48 | _ <- 49 | fromJavaAsync( 50 | session.executeAsync( 51 | s"CREATE KEYSPACE IF NOT EXISTS $keyspace WITH replication = {'class':'SimpleStrategy', 'replication_factor':1};" 52 | ) 53 | ).unlessA(session.getMetadata.getKeyspace(keyspace).isPresent) 54 | _ <- fromJavaAsync(session.closeAsync()) 55 | } yield () 56 | 57 | override type Res = CassandraSession[IO] 58 | override def sharedResource: Resource[IO, Res] = 59 | Resource 60 | .make(IO.blocking { 61 | container.start() 62 | })(_ => IO.blocking(container.stop())) 63 | .flatMap { _ => 64 | val builder = CqlSession 65 | .builder() 66 | .addContactPoint(InetSocketAddress.createUnresolved(container.host, container.mappedPort(9042))) 67 | .withLocalDatacenter("datacenter1") 68 | .withKeyspace(keyspace) 69 | Resource.liftK(ensureKeyspaceExists(builder)).flatMap(_ => CassandraSession.connect[IO](builder)) 70 | } 71 | .evalTap(migrateSession) 72 | 73 | private def splitToMigrations(source: BufferedSource): Seq[String] = { 74 | val s1 = source 75 | .getLines() 76 | .toList 77 | .filterNot { line => 78 | val l = line.stripLeading() 79 | l.startsWith("//") || l.startsWith("--") 80 | } 81 | .mkString("") 82 | s1.split(';').toList.map(_.strip()) 83 | } 84 | 85 | def getError[T](either: Either[Throwable, T]): Throwable = 86 | either.swap.getOrElse(new RuntimeException("Either is right defined")) 87 | } 88 | -------------------------------------------------------------------------------- /src/it/scala/com/ringcentral/cassandra4io/cql/CqlSuite.scala: -------------------------------------------------------------------------------- 1 | package com.ringcentral.cassandra4io.cql 2 | 3 | import cats.effect.IO 4 | import cats.syntax.parallel._ 5 | import com.datastax.oss.driver.api.core.ConsistencyLevel 6 | import com.ringcentral.cassandra4io.CassandraTestsSharedInstances 7 | import fs2.Stream 8 | import weaver._ 9 | 10 | import java.time.{ Duration, LocalDate, LocalTime } 11 | import java.util.UUID 12 | import java.util.concurrent.atomic.AtomicInteger 13 | 14 | trait CqlSuite { 15 | self: IOSuite with CassandraTestsSharedInstances => 16 | 17 | case class Data(id: Long, data: String) 18 | 19 | case class OptData(id: Long, data: Option[String]) 20 | 21 | case class BasicInfo(weight: Double, height: String, datapoints: Set[Int]) 22 | 23 | object BasicInfo { 24 | implicit val cqlReads: Reads[BasicInfo] = FromUdtValue.deriveReads[BasicInfo] 25 | implicit val cqlBinder: Binder[BasicInfo] = ToUdtValue.deriveBinder[BasicInfo] 26 | } 27 | 28 | case class PersonAttribute(personId: Int, info: BasicInfo) 29 | 30 | object PersonAttribute { 31 | val idxCounter = new AtomicInteger(0) 32 | } 33 | 34 | case class PersonAttributeOpt(personId: Int, info: Option[BasicInfo]) 35 | 36 | case class OptBasicInfo(weight: Option[Double], height: Option[String], datapoints: Option[Set[Int]]) 37 | 38 | object OptBasicInfo { 39 | implicit val cqlReads: Reads[OptBasicInfo] = FromUdtValue.deriveReads[OptBasicInfo] 40 | implicit val cqlBinder: Binder[OptBasicInfo] = ToUdtValue.deriveBinder[OptBasicInfo] 41 | } 42 | 43 | case class PersonAttributeUdtOpt(personId: Int, info: OptBasicInfo) 44 | 45 | case class CollectionTestRow( 46 | id: Int, 47 | maptest: Map[String, UUID], 48 | settest: Set[Int], 49 | listtest: Option[List[LocalDate]] 50 | ) 51 | 52 | case class ExampleType(x: Long, y: Long, date: LocalDate, time: Option[LocalTime]) 53 | 54 | case class ExampleNestedType(a: Int, b: String, c: Option[ExampleType]) 55 | 56 | case class ExampleCollectionNestedUdtType(a: Int, b: Map[Int, Set[Set[Set[Set[ExampleNestedType]]]]]) 57 | 58 | object ExampleCollectionNestedUdtType { 59 | implicit val binderExampleCollectionNestedUdtType: Binder[ExampleCollectionNestedUdtType] = 60 | ToUdtValue.deriveBinder[ExampleCollectionNestedUdtType] 61 | 62 | implicit val readsExampleCollectionNestedUdtType: Reads[ExampleCollectionNestedUdtType] = 63 | FromUdtValue.deriveReads[ExampleCollectionNestedUdtType] 64 | } 65 | 66 | case class ExampleNestedPrimitiveType(a: Int, b: Map[Int, Set[Set[Set[Set[Int]]]]]) 67 | 68 | object ExampleNestedPrimitiveType { 69 | implicit val binderExampleNestedPrimitiveType: Binder[ExampleNestedPrimitiveType] = 70 | ToUdtValue.deriveBinder[ExampleNestedPrimitiveType] 71 | 72 | implicit val readsExampleNestedPrimitiveType: Reads[ExampleNestedPrimitiveType] = 73 | FromUdtValue.deriveReads[ExampleNestedPrimitiveType] 74 | } 75 | 76 | case class TableContainingExampleCollectionNestedUdtType(id: Int, data: ExampleCollectionNestedUdtType) 77 | 78 | case class TableContainingExampleNestedPrimitiveType(id: Int, data: ExampleNestedPrimitiveType) 79 | 80 | test("interpolated select template should return data from migration") { session => 81 | for { 82 | prepared <- cqlt"select data FROM cassandra4io.test_data WHERE id in ${Put[List[Long]]}" 83 | .as[String] 84 | .config(_.setTimeout(Duration.ofSeconds(1))) 85 | .prepare(session) 86 | query = prepared(List[Long](1, 2, 3)) 87 | results <- query.select.compile.toList 88 | } yield expect(results == Seq("one", "two", "three")) 89 | } 90 | 91 | test("interpolated select template should return data from migration if table name is a constant parameter") { 92 | session => 93 | val tableName = "cassandra4io.test_data" 94 | for { 95 | prepared <- cqlt"select data FROM ${Const(tableName)} WHERE id in ${Put[List[Long]]}" 96 | .as[String] 97 | .config(_.setTimeout(Duration.ofSeconds(1))) 98 | .prepare(session) 99 | query = prepared(List[Long](1, 2, 3)) 100 | results <- query.select.compile.toList 101 | } yield expect(results == Seq("one", "two", "three")) 102 | } 103 | 104 | test("interpolated select template should return tuples from migration") { session => 105 | for { 106 | prepared <- cqlt"select id, data, dataset FROM cassandra4io.test_data WHERE id in ${Put[List[Long]]}" 107 | .as[(Long, String, Option[Set[Int]])] 108 | .prepare(session) 109 | query = prepared(List[Long](1, 2, 3)) 110 | results <- query.select.compile.toList 111 | } yield expect(results == Seq((1, "one", Some(Set.empty)), (2, "two", Some(Set(201))), (3, "three", None))) 112 | } 113 | 114 | test("interpolated select template should return tuples from migration with multiple binding") { session => 115 | for { 116 | query <- 117 | cqlt"select data FROM cassandra4io.test_data_multiple_keys WHERE id1 = ${Put[Long]} and id2 = ${Put[Int]}" 118 | .as[String] 119 | .prepare(session) 120 | results <- query(1L, 2).config(_.setExecutionProfileName("default")).select.compile.toList 121 | } yield expect(results == Seq("one-two")) 122 | } 123 | 124 | test("interpolated select template should return tuples from migration with multiple binding and margin stripped") { 125 | session => 126 | for { 127 | query <- cqlt"""select data FROM cassandra4io.test_data_multiple_keys 128 | |WHERE id1 = ${Put[Long]} and id2 = ${Put[Int]}""".stripMargin.as[String].prepare(session) 129 | results <- query(1L, 2).config(_.setExecutionProfileName("default")).select.compile.toList 130 | } yield expect(results == Seq("one-two")) 131 | } 132 | 133 | test("interpolated select template should return data case class from migration") { session => 134 | for { 135 | prepared <- 136 | cqlt"select id, data FROM cassandra4io.test_data WHERE id in ${Put[List[Long]]}".as[Data].prepare(session) 137 | query = prepared(List[Long](1, 2, 3)) 138 | results <- query.select.compile.toList 139 | } yield expect(results == Seq(Data(1, "one"), Data(2, "two"), Data(3, "three"))) 140 | } 141 | 142 | test("interpolated select template should be reusable") { session => 143 | for { 144 | query <- cqlt"select data FROM cassandra4io.test_data WHERE id = ${Put[Long]}".as[String].prepare(session) 145 | result <- Stream.emits(Seq(1L, 2L, 3L)).flatMap(i => query(i).select).compile.toList 146 | } yield expect(result == Seq("one", "two", "three")) 147 | } 148 | 149 | test("interpolated select should return data from migration") { session => 150 | def getDataByIds(ids: List[Long]) = 151 | cql"select data FROM cassandra4io.test_data WHERE id in $ids" 152 | .as[String] 153 | .config(_.setConsistencyLevel(ConsistencyLevel.ALL)) 154 | 155 | for { 156 | results <- getDataByIds(List(1, 2, 3)).select(session).compile.toList 157 | } yield expect(results == Seq("one", "two", "three")) 158 | } 159 | 160 | test("interpolated select should return tuples from migration") { session => 161 | def getAllByIds(ids: List[Long]) = 162 | cql"select id, data FROM cassandra4io.test_data WHERE id in $ids".as[(Long, String)] 163 | 164 | for { 165 | results <- getAllByIds(List(1, 2, 3)).config(_.setQueryTimestamp(0L)).select(session).compile.toList 166 | } yield expect(results == Seq((1, "one"), (2, "two"), (3, "three"))) 167 | } 168 | 169 | test("interpolated select should return tuples from migration with multiple binding") { session => 170 | def getAllByIds(id1: Long, id2: Int) = 171 | cql"select data FROM cassandra4io.test_data_multiple_keys WHERE id1 = $id1 and id2 = $id2".as[String] 172 | 173 | for { 174 | results <- getAllByIds(1, 2).select(session).compile.toList 175 | } yield expect(results == Seq("one-two")) 176 | } 177 | 178 | test("interpolated select should return tuples from migration with multiple binding and margin stripped") { session => 179 | def getAllByIds(id1: Long, id2: Int) = 180 | cql"""select data FROM cassandra4io.test_data_multiple_keys 181 | |WHERE id1 = $id1 and id2 = $id2""".stripMargin.as[String] 182 | 183 | for { 184 | results <- getAllByIds(1, 2).select(session).compile.toList 185 | } yield expect(results == Seq("one-two")) 186 | } 187 | 188 | test("interpolated select should return data case class from migration") { session => 189 | def getIds(ids: List[Long]) = 190 | cql"select id, data FROM cassandra4io.test_data WHERE id in $ids".as[Data] 191 | 192 | for { 193 | results <- getIds(List(1, 2, 3)).select(session).compile.toList 194 | } yield expect(results == Seq(Data(1, "one"), Data(2, "two"), Data(3, "three"))) 195 | } 196 | 197 | test( 198 | "interpolated inserts and selects should work with derived KeyEquals, Columns and Values" 199 | ) { session => 200 | case class Table(key: Long, projectionKey: String, projectionData: String, offset: Long, timestamp: Long) 201 | case class Key(key: Long, projectionKey: String) 202 | 203 | val insert = cqlt"INSERT INTO ${Const("test_data_interpolated")}(${Columns[Table]}) VALUES (${Values[Table]})" 204 | val select = 205 | cqlt"SELECT ${Columns[Table]} FROM ${Const("test_data_interpolated")} WHERE ${EqualsTo[Key]}" 206 | .as[Table] 207 | 208 | val data1 = Table(1, "projection-1", "data-1", 1, 1732547921580L) 209 | val data2 = Table(1, "projection-2", "data-1", 2, 1732547921586L) 210 | val key = Key(1, "projection-1") 211 | 212 | for { 213 | preparedInsert <- insert.prepare(session) 214 | preparedSelect <- select.prepare(session) 215 | _ <- preparedInsert(data1).execute 216 | _ <- preparedInsert(data2).execute 217 | result <- preparedSelect(key).select.compile.toList 218 | } yield expect(result == List(data1)) 219 | } 220 | 221 | test( 222 | "interpolated updates and selects should work with derived KeyEquals and Assignment" 223 | ) { session => 224 | case class Data(projectionData: String, offset: Long, timestamp: Long) 225 | case class Key(key: Long, projectionKey: String) 226 | 227 | val update = cqlt"UPDATE ${Const("test_data_interpolated")} SET ${Assignment[Data]} WHERE ${EqualsTo[Key]}" 228 | val select = 229 | cqlt"SELECT ${Columns[Data]} FROM ${Const("test_data_interpolated")} WHERE ${EqualsTo[Key]}" 230 | .as[Data] 231 | 232 | val data1 = Data("data-1", 1, 1732547921580L) 233 | val data2 = Data("data-1", 2, 1732547921586L) 234 | val key1 = Key(2, "projection-1") 235 | val key2 = Key(2, "projection-2") 236 | 237 | for { 238 | preparedUpdate <- update.prepare(session) 239 | preparedSelect <- select.prepare(session) 240 | _ <- preparedUpdate(data1, key1).execute 241 | _ <- preparedUpdate(data2, key2).execute 242 | result <- preparedSelect(key1).select.compile.toList 243 | } yield expect(result == List(data1)) 244 | } 245 | 246 | test( 247 | "interpolated inserts and selects should produce UDTs and return data case classes when nested case classes are used" 248 | ) { session => 249 | val data = 250 | PersonAttribute(PersonAttribute.idxCounter.incrementAndGet(), BasicInfo(180.0, "tall", Set(1, 2, 3, 4, 5))) 251 | val insert = 252 | cql"INSERT INTO cassandra4io.person_attributes (person_id, info) VALUES (${data.personId}, ${data.info})" 253 | .execute(session) 254 | 255 | val retrieve = cql"SELECT person_id, info FROM cassandra4io.person_attributes WHERE person_id = ${data.personId}" 256 | .as[PersonAttribute] 257 | .select(session) 258 | .compile 259 | .toList 260 | 261 | for { 262 | _ <- insert 263 | result <- retrieve 264 | } yield expect(result.length == 1 && result.head == data) 265 | } 266 | 267 | test("interpolated inserts and selects should handle cassandra collections") { session => 268 | val dataRow1 = CollectionTestRow(1, Map("2" -> UUID.randomUUID()), Set(1, 2, 3), Option(List(LocalDate.now()))) 269 | val dataRow2 = CollectionTestRow(2, Map("3" -> UUID.randomUUID()), Set(4, 5, 6), None) 270 | 271 | def insert(data: CollectionTestRow): IO[Boolean] = 272 | cql"INSERT INTO cassandra4io.test_collection (id, maptest, settest, listtest) VALUES (${data.id}, ${data.maptest}, ${data.settest}, ${data.listtest})" 273 | .execute(session) 274 | 275 | def retrieve(id: Int, ids: Int*): IO[List[CollectionTestRow]] = { 276 | val allIds = id :: ids.toList 277 | cql"SELECT id, maptest, settest, listtest FROM cassandra4io.test_collection WHERE id IN $allIds" 278 | .as[CollectionTestRow] 279 | .select(session) 280 | .compile 281 | .toList 282 | } 283 | 284 | for { 285 | _ <- List(dataRow1, dataRow2).parTraverse(insert) 286 | res1 <- retrieve(dataRow1.id) 287 | res2 <- retrieve(dataRow2.id) 288 | } yield expect(res1.length == 1 && res1.head == dataRow1) and expect(res2.length == 1 && res2.head == dataRow2) 289 | } 290 | 291 | test("interpolated inserts and selects should handle nested UDTs in heavily nested collections") { session => 292 | val row = TableContainingExampleCollectionNestedUdtType( 293 | id = 1, 294 | data = ExampleCollectionNestedUdtType( 295 | a = 2, 296 | b = Map( 297 | 1 -> Set( 298 | Set( 299 | Set( 300 | Set( 301 | ExampleNestedType( 302 | a = 3, 303 | b = "4", 304 | c = Option(ExampleType(x = 5L, y = 6L, date = LocalDate.now(), time = Option(LocalTime.now()))) 305 | ) 306 | ) 307 | ) 308 | ) 309 | ), 310 | 2 -> Set( 311 | Set( 312 | Set( 313 | Set( 314 | ExampleNestedType( 315 | a = 10, 316 | b = "100", 317 | c = Option(ExampleType(x = 105L, y = 106L, date = LocalDate.now(), time = None)) 318 | ) 319 | ) 320 | ) 321 | ) 322 | ), 323 | 3 -> Set( 324 | Set( 325 | Set( 326 | Set( 327 | ExampleNestedType( 328 | a = 24, 329 | b = "101", 330 | c = None 331 | ) 332 | ) 333 | ) 334 | ) 335 | ) 336 | ) 337 | ) 338 | ) 339 | val insert = 340 | cql"INSERT INTO cassandra4io.heavily_nested_udt_table (id, data) VALUES (${row.id}, ${row.data})".execute(session) 341 | 342 | val retrieve = cql"SELECT id, data FROM cassandra4io.heavily_nested_udt_table WHERE id = ${row.id}" 343 | .as[TableContainingExampleCollectionNestedUdtType] 344 | .select(session) 345 | .compile 346 | .toList 347 | 348 | for { 349 | _ <- insert 350 | actual <- retrieve 351 | } yield expect(actual.length == 1 && actual.head == row) 352 | } 353 | 354 | test("interpolated inserts and selects should handle UDTs and primitives in heavily nested collections") { session => 355 | val row = TableContainingExampleNestedPrimitiveType( 356 | id = 1, 357 | data = ExampleNestedPrimitiveType( 358 | a = 1, 359 | b = Map( 360 | 1 -> Set(Set(Set(Set(2, 3), Set(4, 5)))), 361 | 2 -> Set(Set(Set(Set(7, 8)))) 362 | ) 363 | ) 364 | ) 365 | val insert = 366 | cql"INSERT INTO cassandra4io.heavily_nested_prim_table (id, data) VALUES (${row.id}, ${row.data})".execute( 367 | session 368 | ) 369 | 370 | val retrieve = cql"SELECT id, data FROM cassandra4io.heavily_nested_prim_table WHERE id = ${row.id}" 371 | .as[TableContainingExampleNestedPrimitiveType] 372 | .select(session) 373 | .compile 374 | .toList 375 | 376 | for { 377 | _ <- insert 378 | actual <- retrieve 379 | } yield expect(actual.length == 1 && actual.head == row) 380 | } 381 | 382 | test("interpolated select should bind constants") { session => 383 | val query = cql"select data FROM cassandra4io.test_data WHERE id = ${1L}".as[String] 384 | for { 385 | result <- query.select(session).compile.toList 386 | } yield expect(result == Seq("one")) 387 | } 388 | 389 | test("cqlConst allows you to interpolate on what is usually not possible with cql strings") { session => 390 | val data = 391 | PersonAttribute(PersonAttribute.idxCounter.incrementAndGet(), BasicInfo(180.0, "tall", Set(1, 2, 3, 4, 5))) 392 | val keyspaceName = "cassandra4io" 393 | val tableName = "person_attributes" 394 | val selectFrom = cql"SELECT person_id, info FROM " 395 | val keyspace = cqlConst"$keyspaceName." 396 | val table = cqlConst"$tableName" 397 | 398 | def where(personId: Int) = 399 | cql" WHERE person_id = $personId" 400 | 401 | def insert(data: PersonAttribute) = 402 | (cql"INSERT INTO " ++ keyspace ++ table ++ cql" (person_id, info) VALUES (${data.personId}, ${data.info})") 403 | .execute(session) 404 | 405 | for { 406 | _ <- insert(data) 407 | result <- (selectFrom ++ keyspace ++ table ++ where(data.personId)).as[PersonAttribute].selectFirst(session) 408 | } yield expect(result.isDefined && result.get == data) 409 | } 410 | 411 | // handle NULL values 412 | test("decoding from null should return None for Option[String]") { session => 413 | for { 414 | result <- cql"select data FROM cassandra4io.test_data WHERE id = 0".as[Option[String]].selectFirst(session) 415 | } yield expect(result.isDefined && result.get.isEmpty) 416 | } 417 | 418 | test("decoding from null should raise error for String(non-primitive)") { session => 419 | for { 420 | result <- 421 | cql"select data FROM cassandra4io.test_data WHERE id = 0".as[String].selectFirst(session).attempt 422 | } yield expect(result.isLeft) && expect( 423 | getError(result).isInstanceOf[UnexpectedNullValue] 424 | ) 425 | } 426 | 427 | test("decoding from null should raise error for Int(primitive)") { session => 428 | for { 429 | result <- 430 | cql"select count FROM cassandra4io.test_data WHERE id = 0".as[String].selectFirst(session).attempt 431 | } yield expect(result.isLeft) && expect( 432 | getError(result).isInstanceOf[UnexpectedNullValue] 433 | ) 434 | } 435 | 436 | test("decoding from null should raise error for Set(collection)") { session => 437 | for { 438 | result <- 439 | cql"select dataset FROM cassandra4io.test_data WHERE id = 0".as[Set[Int]].selectFirst(session).attempt 440 | } yield expect(result.isLeft) && expect( 441 | getError(result).isInstanceOf[UnexpectedNullValue] 442 | ) 443 | } 444 | 445 | test("decoding from null should return None for Option[String] field in case class") { session => 446 | for { 447 | row <- cql"select id, data FROM cassandra4io.test_data WHERE id = 0".as[OptData].selectFirst(session) 448 | } yield expect(row.isDefined && row.get.data.isEmpty) 449 | } 450 | 451 | test("decoding from null should return None for optional case class first parameter") { session => 452 | case class OptDataReverse(data: Option[String], id: Long) 453 | for { 454 | row <- cql"select data, id FROM cassandra4io.test_data WHERE id = 0".as[OptDataReverse].selectFirst(session) 455 | } yield expect(row.isDefined && row.get.data.isEmpty) 456 | } 457 | 458 | test("decoding from null should raise error String field in case class") { session => 459 | for { 460 | result <- cql"select id, data FROM cassandra4io.test_data WHERE id = 0".as[Data].selectFirst(session).attempt 461 | } yield expect(result.isLeft) && expect(getError(result).isInstanceOf[UnexpectedNullValue]) 462 | } 463 | 464 | test("nullable field should be correctly encoded in inserts") { session => 465 | val id = 5L 466 | val data: Option[String] = None 467 | for { 468 | result <- cql"insert into cassandra4io.test_data (id, data) values ($id, $data)".execute(session).attempt 469 | } yield expect(result.isRight) && expect(result.contains(true)) 470 | } 471 | 472 | test("nullable fields should be correctly set with 'usingUnset'") { session => 473 | case class TestData(id: Long, data: Option[String], count: Option[Int]) 474 | val id = 111L 475 | val data1 = TestData(id, Some("test"), Some(15)) 476 | val data2 = TestData(id, None, None) 477 | 478 | // This test looks a bit awkward. 479 | // It's because there is no easy way to differentiate between a null and empty field. 480 | for { 481 | insertResult1 <- 482 | cql"insert into cassandra4io.test_data (id, data, count) values (${data1.id}, ${data1.data}, ${data1.count})" 483 | .execute(session) 484 | .attempt 485 | selectResult1 <- 486 | cql"select id, data, count from cassandra4io.test_data where id = $id".as[TestData].selectFirst(session) 487 | insertResult2 <- 488 | cql"insert into cassandra4io.test_data (id, data, count) values (${data2.id}, ${data2.data}, ${data2.count.usingUnset})" 489 | .execute(session) 490 | .attempt 491 | selectResult2 <- 492 | cql"select id, data, count from cassandra4io.test_data where id = $id".as[TestData].selectFirst(session) 493 | } yield expect(insertResult1.contains(true)) && 494 | expect(insertResult2.contains(true)) && 495 | expect(selectResult1.contains(data1)) && 496 | expect(selectResult2.contains(data2.copy(count = data1.count))) 497 | } 498 | 499 | // handle NULL values for udt columns 500 | 501 | test("decoding from null at udt column should return None for Option type") { session => 502 | val data = PersonAttributeOpt(PersonAttribute.idxCounter.incrementAndGet(), None) 503 | 504 | for { 505 | _ <- cql"INSERT INTO cassandra4io.person_attributes (person_id, info) VALUES (${data.personId}, ${data.info})" 506 | .execute(session) 507 | result <- cql"SELECT person_id, info FROM cassandra4io.person_attributes WHERE person_id = ${data.personId}" 508 | .as[PersonAttributeOpt] 509 | .select(session) 510 | .compile 511 | .toList 512 | } yield expect(result.length == 1 && result.head == data) 513 | } 514 | 515 | test("decoding from null at udt column should raise Error for non Option type") { session => 516 | val data = PersonAttributeOpt(PersonAttribute.idxCounter.incrementAndGet(), None) 517 | 518 | for { 519 | _ <- cql"INSERT INTO cassandra4io.person_attributes (person_id, info) VALUES (${data.personId}, ${data.info})" 520 | .execute(session) 521 | result <- cql"SELECT person_id, info FROM cassandra4io.person_attributes WHERE person_id = ${data.personId}" 522 | .as[PersonAttribute] 523 | .selectFirst(session) 524 | .attempt 525 | } yield expect(result.isLeft) && expect(getError(result).isInstanceOf[UnexpectedNullValue]) 526 | } 527 | 528 | // handle NULL inside udt 529 | 530 | test("decoding from null at udt field should return None for Option type") { session => 531 | val data = PersonAttributeUdtOpt( 532 | PersonAttribute.idxCounter.incrementAndGet(), 533 | OptBasicInfo(None, None, None) 534 | ) 535 | 536 | for { 537 | _ <- cql"INSERT INTO cassandra4io.person_attributes (person_id, info) VALUES (${data.personId}, ${data.info})" 538 | .execute(session) 539 | result <- cql"SELECT person_id, info FROM cassandra4io.person_attributes WHERE person_id = ${data.personId}" 540 | .as[PersonAttributeUdtOpt] 541 | .selectFirst(session) 542 | } yield expect(result.contains(data)) 543 | } 544 | 545 | test("decoding from null at udt field should raise error for String(non-primitive)") { session => 546 | val data = 547 | PersonAttributeUdtOpt( 548 | PersonAttribute.idxCounter.incrementAndGet(), 549 | OptBasicInfo(Some(160.0), None, Some(Set(1))) 550 | ) 551 | for { 552 | _ <- 553 | cql"INSERT INTO cassandra4io.person_attributes (person_id, info) VALUES (${data.personId}, ${data.info})" 554 | .execute(session) 555 | result <- 556 | cql"SELECT person_id, info FROM cassandra4io.person_attributes WHERE person_id = ${data.personId}" 557 | .as[PersonAttribute] 558 | .selectFirst(session) 559 | .attempt 560 | } yield expect(result.isLeft) && expect(getError(result).isInstanceOf[UnexpectedNullValue]) 561 | } 562 | 563 | test("decoding from null at udt field should raise error for Double(primitive)") { session => 564 | val data = 565 | PersonAttributeUdtOpt( 566 | PersonAttribute.idxCounter.incrementAndGet(), 567 | OptBasicInfo(None, Some("tall"), Some(Set(1))) 568 | ) 569 | for { 570 | _ <- 571 | cql"INSERT INTO cassandra4io.person_attributes (person_id, info) VALUES (${data.personId}, ${data.info})" 572 | .execute(session) 573 | result <- 574 | cql"SELECT person_id, info FROM cassandra4io.person_attributes WHERE person_id = ${data.personId}" 575 | .as[PersonAttribute] 576 | .selectFirst(session) 577 | .attempt 578 | } yield expect(result.isLeft) && expect(getError(result).isInstanceOf[UnexpectedNullValue]) 579 | } 580 | 581 | test("decoding from null at udt field should raise error for Set(collection)") { session => 582 | val data = 583 | PersonAttributeUdtOpt( 584 | PersonAttribute.idxCounter.incrementAndGet(), 585 | OptBasicInfo(Some(180.0), Some("tall"), None) 586 | ) 587 | for { 588 | _ <- 589 | cql"INSERT INTO cassandra4io.person_attributes (person_id, info) VALUES (${data.personId}, ${data.info})" 590 | .execute(session) 591 | result <- 592 | cql"SELECT person_id, info FROM cassandra4io.person_attributes WHERE person_id = ${data.personId}" 593 | .as[PersonAttribute] 594 | .selectFirst(session) 595 | .attempt 596 | } yield expect(result.isLeft) && expect(getError(result).isInstanceOf[UnexpectedNullValue]) 597 | } 598 | 599 | } 600 | -------------------------------------------------------------------------------- /src/main/scala/com/ringcentral/cassandra4io/CassandraSession.scala: -------------------------------------------------------------------------------- 1 | package com.ringcentral.cassandra4io 2 | 3 | import cats.effect.{ Async, Resource } 4 | import cats.syntax.functor._ 5 | import com.datastax.oss.driver.api.core.{ CqlSession, CqlSessionBuilder } 6 | import com.datastax.oss.driver.api.core.cql._ 7 | import com.datastax.oss.driver.api.core.metrics.Metrics 8 | import com.ringcentral.cassandra4io.utils.JavaConcurrentToCats.fromJavaAsync 9 | import fs2.{ Chunk, Pull, Stream } 10 | 11 | import scala.jdk.CollectionConverters._ 12 | 13 | trait CassandraSession[F[_]] { 14 | def prepare(stmt: String): F[PreparedStatement] 15 | def execute(stmt: Statement[_]): F[AsyncResultSet] 16 | def execute(query: String): F[AsyncResultSet] 17 | def select(stmt: Statement[_]): Stream[F, Row] 18 | 19 | // short-cuts 20 | def selectFirst(stmt: Statement[_]): F[Option[Row]] 21 | 22 | // metrics 23 | def metrics: Option[Metrics] 24 | } 25 | 26 | object CassandraSession { 27 | 28 | private class Live[F[_]: Async]( 29 | underlying: CqlSession 30 | ) extends CassandraSession[F] { 31 | 32 | def metrics: Option[Metrics] = underlying.getMetrics.asScala 33 | 34 | override def prepare(stmt: String): F[PreparedStatement] = 35 | fromJavaAsync(underlying.prepareAsync(stmt)) 36 | 37 | override def execute(stmt: Statement[_]): F[AsyncResultSet] = 38 | fromJavaAsync(underlying.executeAsync(stmt)) 39 | 40 | override def select(stmt: Statement[_]): Stream[F, Row] = { 41 | def go(current: F[AsyncResultSet]): Pull[F, Row, Unit] = 42 | Pull 43 | .eval(current) 44 | .flatMap { rs => 45 | val chunk = Chunk.from(rs.currentPage().asScala) 46 | 47 | if (rs.hasMorePages) 48 | Pull.output(chunk) >> go(fromJavaAsync(rs.fetchNextPage())) 49 | else Pull.output(chunk) >> Pull.done 50 | } 51 | go(execute(stmt)).stream 52 | } 53 | 54 | override def execute(query: String): F[AsyncResultSet] = 55 | fromJavaAsync(underlying.executeAsync(query)) 56 | 57 | // short-cuts 58 | def selectFirst(stmt: Statement[_]): F[Option[Row]] = 59 | execute(stmt).map(rs => Option(rs.one())) 60 | } 61 | 62 | /** 63 | * Create CassandraSession from prepared CqlSessionBuilder 64 | * 65 | * @param builder prepared CqlSessionBuilder 66 | * @tparam F rabbit hole 67 | * @return Resource with CassandraSession, use it wisely 68 | */ 69 | def connect[F[_]: Async]( 70 | builder: CqlSessionBuilder 71 | ): Resource[F, CassandraSession[F]] = 72 | Resource 73 | .make[F, CqlSession](fromJavaAsync(builder.buildAsync()))(session => fromJavaAsync(session.closeAsync()).void) 74 | .map(cqlSession => new Live[F](cqlSession)) 75 | 76 | /** 77 | * Create CassandraSession from an existing CqlSession. 78 | * Note: the creator of the CqlSession is responsible for managing the lifecycle and this constructor is meant for interop with an existing codebase 79 | * 80 | * @param session is an existing CqlSession 81 | * @tparam F - effect type that requires the Async capability 82 | * @return CassandraSession 83 | */ 84 | def existing[F[_]: Async](session: CqlSession): CassandraSession[F] = 85 | new Live[F](session) 86 | } 87 | -------------------------------------------------------------------------------- /src/main/scala/com/ringcentral/cassandra4io/cql/CassandraTypeMapper.scala: -------------------------------------------------------------------------------- 1 | package com.ringcentral.cassandra4io.cql 2 | 3 | import com.datastax.oss.driver.api.core.`type`.{ DataType, UserDefinedType } 4 | import com.datastax.oss.driver.api.core.data.UdtValue 5 | import com.datastax.oss.driver.internal.core.`type`.{ DefaultListType, DefaultMapType, DefaultSetType } 6 | import shapeless.Lazy 7 | 8 | import java.nio.ByteBuffer 9 | import java.time.LocalDate 10 | import java.util.Optional 11 | import scala.jdk.CollectionConverters._ 12 | 13 | /** 14 | * A compile-time safe alternative to reflection for primitive Cassandra types 15 | * This typeclass is used to hold onto the associated Cassandra types associated with the Scala types for the underlying Datastax API 16 | * and handle boxing where needed 17 | * 18 | * @tparam Scala is the Scala type that is being mapped to the Datastax type 19 | */ 20 | trait CassandraTypeMapper[Scala] { 21 | type Cassandra 22 | def classType: Class[Cassandra] 23 | def toCassandra(in: Scala, dataType: DataType): Cassandra 24 | def fromCassandra(in: Cassandra, dataType: DataType): Scala 25 | def allowNullable: Boolean = false 26 | } 27 | object CassandraTypeMapper { 28 | type WithCassandra[Sc, Cas] = CassandraTypeMapper[Sc] { type Cassandra = Cas } 29 | 30 | def apply[A](implicit ev: CassandraTypeMapper[A]): CassandraTypeMapper[A] = ev 31 | 32 | implicit val strCassandraTypeMapper: CassandraTypeMapper.WithCassandra[String, String] = 33 | new CassandraTypeMapper[String] { 34 | type Cassandra = String 35 | def classType: Class[Cassandra] = classOf[String] 36 | def toCassandra(in: String, dataType: DataType): Cassandra = in 37 | def fromCassandra(in: Cassandra, dataType: DataType): String = in 38 | } 39 | 40 | implicit val doubleCassandraTypeMapper: CassandraTypeMapper.WithCassandra[Double, java.lang.Double] = 41 | new CassandraTypeMapper[Double] { 42 | type Cassandra = java.lang.Double 43 | def classType: Class[Cassandra] = classOf[java.lang.Double] 44 | def toCassandra(in: Double, dataType: DataType): Cassandra = Double.box(in) 45 | def fromCassandra(in: Cassandra, dataType: DataType): Double = in 46 | } 47 | 48 | implicit val intCassandraTypeMapper: CassandraTypeMapper.WithCassandra[Int, java.lang.Integer] = 49 | new CassandraTypeMapper[Int] { 50 | type Cassandra = java.lang.Integer 51 | def classType: Class[Cassandra] = classOf[java.lang.Integer] 52 | def toCassandra(in: Int, dataType: DataType): Cassandra = scala.Int.box(in) 53 | def fromCassandra(in: Cassandra, dataType: DataType): Int = in 54 | } 55 | 56 | implicit val longCassandraTypeMapper: CassandraTypeMapper.WithCassandra[Long, java.lang.Long] = 57 | new CassandraTypeMapper[Long] { 58 | type Cassandra = java.lang.Long 59 | def classType: Class[Cassandra] = classOf[java.lang.Long] 60 | def toCassandra(in: Long, dataType: DataType): Cassandra = Long.box(in) 61 | def fromCassandra(in: Cassandra, dataType: DataType): Long = in 62 | } 63 | 64 | implicit val byteBufferCassandraTypeMapper: CassandraTypeMapper.WithCassandra[ByteBuffer, ByteBuffer] = 65 | new CassandraTypeMapper[ByteBuffer] { 66 | type Cassandra = java.nio.ByteBuffer 67 | def classType: Class[Cassandra] = classOf[java.nio.ByteBuffer] 68 | def toCassandra(in: ByteBuffer, dataType: DataType): Cassandra = in 69 | def fromCassandra(in: Cassandra, dataType: DataType): ByteBuffer = in 70 | } 71 | 72 | implicit val localDateCassandraTypeMapper: CassandraTypeMapper.WithCassandra[LocalDate, LocalDate] = 73 | new CassandraTypeMapper[java.time.LocalDate] { 74 | type Cassandra = java.time.LocalDate 75 | def classType: Class[Cassandra] = classOf[java.time.LocalDate] 76 | def toCassandra(in: java.time.LocalDate, dataType: DataType): Cassandra = in 77 | def fromCassandra(in: Cassandra, dataType: DataType): java.time.LocalDate = in 78 | } 79 | 80 | implicit val localTimeCassandraTypeMapper 81 | : CassandraTypeMapper.WithCassandra[java.time.LocalTime, java.time.LocalTime] = 82 | new CassandraTypeMapper[java.time.LocalTime] { 83 | type Cassandra = java.time.LocalTime 84 | def classType: Class[Cassandra] = classOf[java.time.LocalTime] 85 | def toCassandra(in: java.time.LocalTime, dataType: DataType): Cassandra = in 86 | def fromCassandra(in: Cassandra, dataType: DataType): java.time.LocalTime = in 87 | } 88 | 89 | implicit val instantCassandraTypeMapper: CassandraTypeMapper.WithCassandra[java.time.Instant, java.time.Instant] = 90 | new CassandraTypeMapper[java.time.Instant] { 91 | type Cassandra = java.time.Instant 92 | def classType: Class[Cassandra] = classOf[java.time.Instant] 93 | def toCassandra(in: java.time.Instant, dataType: DataType): Cassandra = in 94 | def fromCassandra(in: Cassandra, dataType: DataType): java.time.Instant = in 95 | } 96 | 97 | implicit val boolCassandraTypeMapper: CassandraTypeMapper.WithCassandra[Boolean, java.lang.Boolean] = 98 | new CassandraTypeMapper[Boolean] { 99 | type Cassandra = java.lang.Boolean 100 | def classType: Class[Cassandra] = classOf[java.lang.Boolean] 101 | def toCassandra(in: Boolean, dataType: DataType): Cassandra = Boolean.box(in) 102 | def fromCassandra(in: Cassandra, dataType: DataType): Boolean = in 103 | } 104 | 105 | implicit val uuidCassandraTypeMapper: CassandraTypeMapper.WithCassandra[java.util.UUID, java.util.UUID] = 106 | new CassandraTypeMapper[java.util.UUID] { 107 | type Cassandra = java.util.UUID 108 | def classType: Class[Cassandra] = classOf[java.util.UUID] 109 | def toCassandra(in: java.util.UUID, dataType: DataType): Cassandra = in 110 | def fromCassandra(in: Cassandra, dataType: DataType): java.util.UUID = in 111 | } 112 | 113 | implicit val shortCassandraTypeMapper: CassandraTypeMapper.WithCassandra[Short, java.lang.Short] = 114 | new CassandraTypeMapper[Short] { 115 | type Cassandra = java.lang.Short 116 | def classType: Class[Cassandra] = classOf[java.lang.Short] 117 | def toCassandra(in: Short, dataType: DataType): Cassandra = scala.Short.box(in) 118 | def fromCassandra(in: Cassandra, dataType: DataType): Short = in.shortValue() 119 | } 120 | 121 | implicit val bigDecimalCassandraTypeMapper 122 | : CassandraTypeMapper.WithCassandra[scala.BigDecimal, java.math.BigDecimal] = 123 | new CassandraTypeMapper[scala.BigDecimal] { 124 | type Cassandra = java.math.BigDecimal 125 | def classType: Class[Cassandra] = classOf[java.math.BigDecimal] 126 | def toCassandra(in: scala.BigDecimal, dataType: DataType): Cassandra = in.bigDecimal 127 | def fromCassandra(in: Cassandra, dataType: DataType): scala.BigDecimal = in 128 | } 129 | 130 | implicit val bigIntCassandraTypeMapper: CassandraTypeMapper.WithCassandra[scala.BigInt, java.math.BigInteger] = 131 | new CassandraTypeMapper[scala.BigInt] { 132 | type Cassandra = java.math.BigInteger 133 | def classType: Class[Cassandra] = classOf[java.math.BigInteger] 134 | def toCassandra(in: scala.BigInt, dataType: DataType): Cassandra = in.bigInteger 135 | def fromCassandra(in: Cassandra, dataType: DataType): scala.BigInt = in 136 | } 137 | 138 | /** 139 | * We require proof that A has a ToUdtValue[A] in order to turn any A into a UdtValue and proof that A has a 140 | * FromUdtValue[A] in order to turn a UdtValue into an A 141 | * 142 | * This is an example of mutual induction (CassandraTypeMapper relies on ToUdtValue and FromUdtValue) and ToUdtValue 143 | * and FromUdtValue both rely on CassandraTypeMapper 144 | * 145 | * @param evToUdt 146 | * @param evFromUdt 147 | * @tparam A 148 | * @return 149 | */ 150 | implicit def udtCassandraTypeMapper[A](implicit 151 | evToUdt: Lazy[ToUdtValue.Object[A]], 152 | evFromUdt: Lazy[FromUdtValue.Object[A]] 153 | ): CassandraTypeMapper.WithCassandra[A, UdtValue] = 154 | new CassandraTypeMapper[A] { 155 | override type Cassandra = UdtValue 156 | 157 | override def classType: Class[Cassandra] = classOf[UdtValue] 158 | 159 | override def toCassandra(in: A, dataType: DataType): Cassandra = { 160 | val schema = dataType.asInstanceOf[UserDefinedType] 161 | evToUdt.value.convert(FieldName.Unused, in, schema.newValue()) 162 | } 163 | 164 | override def fromCassandra(in: Cassandra, dataType: DataType): A = 165 | evFromUdt.value.convert(FieldName.Unused, in) 166 | } 167 | 168 | implicit def setCassandraTypeMapper[A](implicit 169 | ev: CassandraTypeMapper[A] 170 | ): CassandraTypeMapper.WithCassandra[Set[A], java.util.Set[ev.Cassandra]] = 171 | new CassandraTypeMapper[Set[A]] { 172 | override type Cassandra = java.util.Set[ev.Cassandra] 173 | 174 | override def classType: Class[java.util.Set[ev.Cassandra]] = classOf[Cassandra] 175 | 176 | override def toCassandra(in: Set[A], dataType: DataType): Cassandra = { 177 | val elementOfSetDataType = dataType.asInstanceOf[DefaultSetType].getElementType 178 | in.map(ev.toCassandra(_, elementOfSetDataType)).asJava 179 | } 180 | 181 | override def fromCassandra(in: Cassandra, dataType: DataType): Set[A] = { 182 | val elementOfSetDataType = dataType.asInstanceOf[DefaultSetType].getElementType 183 | in.asScala.map(ev.fromCassandra(_, elementOfSetDataType)).toSet 184 | } 185 | } 186 | 187 | implicit def listCassandraTypeMapper[A](implicit 188 | ev: CassandraTypeMapper[A] 189 | ): CassandraTypeMapper.WithCassandra[List[A], java.util.List[ev.Cassandra]] = 190 | new CassandraTypeMapper[List[A]] { 191 | override type Cassandra = java.util.List[ev.Cassandra] 192 | 193 | override def classType: Class[java.util.List[ev.Cassandra]] = classOf[Cassandra] 194 | 195 | override def toCassandra(in: List[A], dataType: DataType): Cassandra = { 196 | val elementOfSetDataType = dataType.asInstanceOf[DefaultListType].getElementType 197 | in.map(ev.toCassandra(_, elementOfSetDataType)).asJava 198 | } 199 | 200 | override def fromCassandra(in: Cassandra, dataType: DataType): List[A] = { 201 | val elementOfListDataType = dataType.asInstanceOf[DefaultListType].getElementType 202 | in.asScala.map(ev.fromCassandra(_, elementOfListDataType)).toList 203 | } 204 | } 205 | 206 | implicit def mapCassandraTypeMapper[K, V](implicit 207 | kEv: CassandraTypeMapper[K], 208 | vEv: CassandraTypeMapper[V] 209 | ): CassandraTypeMapper.WithCassandra[Map[K, V], java.util.Map[kEv.Cassandra, vEv.Cassandra]] = 210 | new CassandraTypeMapper[Map[K, V]] { 211 | override type Cassandra = java.util.Map[kEv.Cassandra, vEv.Cassandra] 212 | 213 | override def classType: Class[java.util.Map[kEv.Cassandra, vEv.Cassandra]] = classOf[Cassandra] 214 | 215 | override def toCassandra(in: Map[K, V], dataType: DataType): Cassandra = { 216 | val mapDataType = dataType.asInstanceOf[DefaultMapType] 217 | val keyDataType = mapDataType.getKeyType 218 | val valueDataType = mapDataType.getValueType 219 | in.map { case (k, v) => 220 | (kEv.toCassandra(k, keyDataType), vEv.toCassandra(v, valueDataType)) 221 | }.asJava 222 | } 223 | 224 | override def fromCassandra(in: Cassandra, dataType: DataType): Map[K, V] = { 225 | val mapDataType = dataType.asInstanceOf[DefaultMapType] 226 | val keyDataType = mapDataType.getKeyType 227 | val valueDataType = mapDataType.getValueType 228 | in.asScala.map { case (kC, vC) => 229 | (kEv.fromCassandra(kC, keyDataType), vEv.fromCassandra(vC, valueDataType)) 230 | }.toMap 231 | } 232 | } 233 | 234 | implicit def optionCassandraTypeMapper[A, Cass](implicit 235 | ev: CassandraTypeMapper.WithCassandra[A, Cass] 236 | ): CassandraTypeMapper.WithCassandra[Option[A], Cass] = 237 | new CassandraTypeMapper[Option[A]] { 238 | override def allowNullable: Boolean = true 239 | override type Cassandra = Cass 240 | 241 | override def classType: Class[Cassandra] = ev.classType 242 | 243 | // NOTE: This is safe to do as the underlying Datastax driver allows you to use null values to represent the absence of data 244 | override def toCassandra(in: Option[A], dataType: DataType): Cassandra = 245 | in.map(ev.toCassandra(_, dataType)) match { 246 | case Some(value) => value 247 | case None => null.asInstanceOf[Cassandra] 248 | } 249 | 250 | override def fromCassandra(in: Cassandra, dataType: DataType): Option[A] = 251 | Option(in).map(ev.fromCassandra(_, dataType)) 252 | } 253 | } 254 | -------------------------------------------------------------------------------- /src/main/scala/com/ringcentral/cassandra4io/cql/FieldName.scala: -------------------------------------------------------------------------------- 1 | package com.ringcentral.cassandra4io.cql 2 | 3 | /** 4 | * This type is used by FromUdtValue and ToUdtValue to decide whether to utilize schema data 5 | * when reading and writing data from the Datastax DataType 6 | */ 7 | private[cql] sealed trait FieldName 8 | object FieldName { 9 | case object Unused extends FieldName 10 | final case class Labelled(value: String) extends FieldName 11 | } 12 | -------------------------------------------------------------------------------- /src/main/scala/com/ringcentral/cassandra4io/cql/FromUdtValue.scala: -------------------------------------------------------------------------------- 1 | package com.ringcentral.cassandra4io.cql 2 | 3 | import com.datastax.oss.driver.api.core.cql.Row 4 | import com.datastax.oss.driver.api.core.data.UdtValue 5 | import com.ringcentral.cassandra4io.cql.FromUdtValue.{ make, makeWithFieldName } 6 | 7 | /** 8 | * A typeclass that is used to turn a UdtValue into a Scala datatype. Typeclass instances for FromUdtValue 9 | * are (inductively) derived from CassandraTypeMapper 10 | * 11 | * @tparam Scala is the Scala datatype that you intend to read out of a Cassandra UdtValue 12 | */ 13 | trait FromUdtValue[Scala] { self => 14 | def convert(fieldName: FieldName, cassandra: UdtValue): Scala 15 | 16 | def map[Scala2](f: Scala => Scala2): FromUdtValue[Scala2] = (fieldName: FieldName, cassandra: UdtValue) => 17 | f(self.convert(fieldName, cassandra)) 18 | } 19 | object FromUdtValue extends LowerPriorityFromUdtValue with LowestPriorityFromUdtValue { 20 | trait Object[A] extends FromUdtValue[A] 21 | 22 | def deriveReads[A](implicit ev: FromUdtValue.Object[A]): Reads[A] = (row: Row, index: Int) => { 23 | val udtValue = row.getUdtValue(index) 24 | try ev.convert(FieldName.Unused, udtValue) 25 | catch { 26 | case UnexpectedNullValueInUdt.NullValueInUdt(udtValue, fieldName) => 27 | throw new UnexpectedNullValueInUdt(row, index, udtValue, fieldName) 28 | } 29 | } 30 | 31 | // only allowed to summon fully built out FromUdtValue instances which are built by Shapeless machinery 32 | def apply[A](implicit ev: FromUdtValue.Object[A]): FromUdtValue.Object[A] = ev 33 | 34 | def make[A](mk: UdtValue => A): FromUdtValue[A] = 35 | (fieldName: FieldName, constructor: UdtValue) => 36 | fieldName match { 37 | case FieldName.Unused => 38 | mk(constructor) 39 | 40 | case FieldName.Labelled(value) => 41 | throw new RuntimeException( 42 | s"FromUdtValue failure: Expected an unused fieldName for ${constructor.getType.describe(true)} but got $value" 43 | ) 44 | } 45 | 46 | def makeWithFieldName[A](mk: (String, UdtValue) => A): FromUdtValue[A] = 47 | (fieldName: FieldName, constructor: UdtValue) => 48 | fieldName match { 49 | case FieldName.Unused => 50 | throw new RuntimeException( 51 | s"FromUdtValue failure: Expected a labelled fieldName for ${constructor.getType.describe(true)} but got unused" 52 | ) 53 | 54 | case FieldName.Labelled(fieldName) => 55 | mk(fieldName, constructor) 56 | } 57 | } 58 | 59 | trait LowerPriorityFromUdtValue { 60 | 61 | /** 62 | * FromUdtValue relies on the CassandraTypeMapper to convert Scala datatypes into datatypes compatible with the 63 | * Datastax Java driver (bi-directionally) in order to produce instances of FromUdtValue[A]. CassandraTypeMapper will 64 | * also inductively derive instances if you have nested data-types (collections within collections or collections 65 | * within UdtValues within collections or any combinations of these types) inside your UdtValue 66 | * 67 | * @param ev is evidence that there exists a CassandraTypeMapper for your Scala datatype A 68 | * @tparam A is the Scala datatype that must be read out of Cassandra 69 | * @return 70 | */ 71 | implicit def deriveFromCassandraTypeMapper[A](implicit 72 | ev: CassandraTypeMapper[A] 73 | ): FromUdtValue[A] = 74 | makeWithFieldName[A] { (fieldName, udtValue) => 75 | if (udtValue.isNull(fieldName)) { 76 | if (ev.allowNullable) 77 | None.asInstanceOf[A] 78 | else throw UnexpectedNullValueInUdt.NullValueInUdt(udtValue, fieldName) 79 | } else 80 | ev.fromCassandra(udtValue.get(fieldName, ev.classType), udtValue.getType(fieldName)) 81 | } 82 | } 83 | 84 | trait LowestPriorityFromUdtValue { 85 | import shapeless._ 86 | import shapeless.labelled._ 87 | 88 | implicit def hListFromUdtValue[K <: Symbol, H, T <: HList](implicit 89 | witness: Witness.Aux[K], 90 | hUdtValueReads: Lazy[FromUdtValue[H]], 91 | tUdtValueReads: FromUdtValue[T] 92 | ): FromUdtValue[FieldType[K, H] :: T] = make { (constructor: UdtValue) => 93 | val fieldName = FieldName.Labelled(witness.value.name) 94 | val head = hUdtValueReads.value.convert(fieldName, constructor) 95 | 96 | val fieldTypeKH: FieldType[K, H] = field[witness.T](head) 97 | val tail: T = tUdtValueReads.convert(FieldName.Unused, constructor) 98 | 99 | fieldTypeKH :: tail 100 | } 101 | 102 | implicit val hNilFromUdtValue: FromUdtValue[HNil] = 103 | make((_: UdtValue) => HNil) 104 | 105 | implicit def genericFromUdtValue[A, R](implicit 106 | gen: LabelledGeneric.Aux[A, R], 107 | enc: Lazy[FromUdtValue[R]], 108 | evidenceANotOption: A <:!< Option[_] 109 | ): FromUdtValue.Object[A] = { (fieldName: FieldName, udtValue: UdtValue) => 110 | fieldName match { 111 | case FieldName.Unused => gen.from(enc.value.convert(fieldName, udtValue)) 112 | case FieldName.Labelled(fieldName) => gen.from(nestedCaseClass(fieldName, enc.value, udtValue)) 113 | } 114 | } 115 | 116 | /** 117 | * Handles the UserDefinedType schema book-keeping before utilizing the Shapeless machinery to inductively derive 118 | * reading from a UdtValue within a UdtValue 119 | * @param fieldName is the field name of the nested UdtValue within a given UdtValue 120 | * @param reader is the mechanism to read a UdtValue into a Scala type A 121 | * @param top is the top level UdtValue that is used to retrieve the data for the nested UdtValue that resides within it 122 | * @tparam A is the Scala type A that you want to read from a UdtValue 123 | * @return 124 | */ 125 | def nestedCaseClass[A](fieldName: String, reader: FromUdtValue[A], top: UdtValue): A = { 126 | val nestedUdtValue = top.getUdtValue(fieldName) 127 | reader.convert(FieldName.Unused, nestedUdtValue) 128 | } 129 | } 130 | -------------------------------------------------------------------------------- /src/main/scala/com/ringcentral/cassandra4io/cql/Reads.scala: -------------------------------------------------------------------------------- 1 | package com.ringcentral.cassandra4io.cql 2 | 3 | import com.datastax.oss.driver.api.core.cql.Row 4 | import com.datastax.oss.driver.api.core.data.UdtValue 5 | import com.datastax.oss.driver.internal.core.`type`.{ DefaultListType, DefaultMapType, DefaultSetType } 6 | import shapeless.{ ::, Generic, HList, HNil } 7 | 8 | import java.nio.ByteBuffer 9 | import java.time.{ Instant, LocalDate } 10 | import java.util.UUID 11 | import scala.jdk.CollectionConverters._ 12 | 13 | trait Reads[T] { self => 14 | def readNullable(row: Row, index: Int): T 15 | 16 | def read(row: Row, index: Int): T = 17 | if (row.isNull(index)) { 18 | throw new UnexpectedNullValueInColumn(row, index) 19 | } else { 20 | readNullable(row, index) 21 | } 22 | 23 | def nextIndex(index: Int): Int = index + 1 24 | 25 | def map[U](f: T => U): Reads[U] = (row: Row, index: Int) => f(self.read(row, index)) 26 | } 27 | 28 | object Reads extends ReadsLowerPriority with ReadsLowestPriority { 29 | def apply[T](implicit r: Reads[T]): Reads[T] = r 30 | 31 | implicit val rowReads: Reads[Row] = new Reads[Row] { 32 | override def readNullable(row: Row, index: Int): Row = row 33 | override def nextIndex(index: Int): Int = index 34 | } 35 | 36 | implicit val stringReads: Reads[String] = (row: Row, index: Int) => row.getString(index) 37 | implicit val doubleReads: Reads[Double] = (row: Row, index: Int) => row.getDouble(index) 38 | implicit val floatReads: Reads[Float] = (row: Row, index: Int) => row.getFloat(index) 39 | implicit val intReads: Reads[Int] = (row: Row, index: Int) => row.getInt(index) 40 | implicit val longReads: Reads[Long] = (row: Row, index: Int) => row.getLong(index) 41 | implicit val byteBufferReads: Reads[ByteBuffer] = (row: Row, index: Int) => row.getByteBuffer(index) 42 | implicit val localDateReads: Reads[LocalDate] = (row: Row, index: Int) => row.getLocalDate(index) 43 | implicit val instantReads: Reads[Instant] = (row: Row, index: Int) => row.getInstant(index) 44 | implicit val booleanReads: Reads[Boolean] = (row: Row, index: Int) => row.getBoolean(index) 45 | implicit val uuidReads: Reads[UUID] = (row: Row, index: Int) => row.getUuid(index) 46 | implicit val bigIntReads: Reads[BigInt] = (row: Row, index: Int) => row.getBigInteger(index) 47 | implicit val bigDecimalReads: Reads[BigDecimal] = (row: Row, index: Int) => row.getBigDecimal(index) 48 | implicit val shortReads: Reads[Short] = (row: Row, index: Int) => row.getShort(index) 49 | implicit val udtReads: Reads[UdtValue] = (row: Row, index: Int) => row.getUdtValue(index) 50 | 51 | implicit def optionReads[T: Reads]: Reads[Option[T]] = new Reads[Option[T]] { 52 | override def readNullable(row: Row, index: Int): Option[T] = 53 | if (row.isNull(index)) None 54 | else { 55 | Some(Reads[T].readNullable(row, index)) 56 | } 57 | 58 | override def read(row: Row, index: Int): Option[T] = readNullable(row, index) 59 | } 60 | 61 | } 62 | 63 | /** 64 | * Note: We define instances for collections rather than A where A has evidence of a CassandraTypeMapper instance to 65 | * prevent an implicit resolution clash with the case class parser 66 | */ 67 | trait ReadsLowerPriority { 68 | implicit def deriveSetFromCassandraTypeMapper[A](implicit ev: CassandraTypeMapper[A]): Reads[Set[A]] = { 69 | (row: Row, index: Int) => 70 | val datatype = row.getType(index).asInstanceOf[DefaultSetType].getElementType 71 | val cassandraSet = row.getSet(index, ev.classType) 72 | cassandraSet.asScala.map(cas => ev.fromCassandra(cas, datatype)).toSet 73 | } 74 | 75 | implicit def deriveListFromCassandraTypeMapper[A](implicit ev: CassandraTypeMapper[A]): Reads[List[A]] = { 76 | (row: Row, index: Int) => 77 | val datatype = row.getType(index).asInstanceOf[DefaultListType].getElementType 78 | val cassandraSet = row.getList(index, ev.classType) 79 | cassandraSet.asScala.map(cas => ev.fromCassandra(cas, datatype)).toList 80 | } 81 | 82 | implicit def deriveMapFromCassandraTypeMapper[K, V](implicit 83 | evK: CassandraTypeMapper[K], 84 | evV: CassandraTypeMapper[V] 85 | ): Reads[Map[K, V]] = { (row: Row, index: Int) => 86 | val top = row.getType(index).asInstanceOf[DefaultMapType] 87 | val keyType = top.getKeyType 88 | val valueType = top.getValueType 89 | val cassandraMap = row.getMap(index, evK.classType, evV.classType) 90 | cassandraMap.asScala.map { case (k, v) => 91 | (evK.fromCassandra(k, keyType), evV.fromCassandra(v, valueType)) 92 | }.toMap 93 | } 94 | } 95 | 96 | trait ReadsLowestPriority { 97 | implicit val hNilParser: Reads[HNil] = new Reads[HNil] { 98 | override def readNullable(row: Row, index: Int): HNil = HNil 99 | override def read(row: Row, index: Int): HNil = HNil 100 | override def nextIndex(index: Int): Int = index 101 | } 102 | 103 | implicit def hConsParser[H: Reads, T <: HList: Reads]: Reads[H :: T] = new Reads[H :: T] { 104 | 105 | override def readNullable(row: Row, index: Int): H :: T = { 106 | val h = Reads[H].readNullable(row, index) 107 | val nextIndex = Reads[H].nextIndex(index) 108 | val t = Reads[T].readNullable(row, nextIndex) 109 | h :: t 110 | } 111 | 112 | override def read(row: Row, index: Int): H :: T = { 113 | val h = Reads[H].read(row, index) 114 | val nextIndex = Reads[H].nextIndex(index) 115 | val t = Reads[T].read(row, nextIndex) 116 | h :: t 117 | } 118 | 119 | override def nextIndex(index: Int): Int = Reads[T].nextIndex(index) 120 | } 121 | 122 | implicit def caseClassParser[A, R <: HList](implicit 123 | gen: Generic[A] { type Repr = R }, 124 | reprParser: Reads[R] 125 | ): Reads[A] = 126 | new Reads[A] { 127 | override def readNullable(row: Row, index: Int): A = { 128 | val rep = reprParser.read(row, index) 129 | gen.from(rep) 130 | } 131 | override def read(row: Row, index: Int): A = readNullable(row, index) 132 | } 133 | } 134 | -------------------------------------------------------------------------------- /src/main/scala/com/ringcentral/cassandra4io/cql/ToUdtValue.scala: -------------------------------------------------------------------------------- 1 | package com.ringcentral.cassandra4io.cql 2 | 3 | import com.datastax.oss.driver.api.core.`type`.UserDefinedType 4 | import com.datastax.oss.driver.api.core.cql.BoundStatement 5 | import com.datastax.oss.driver.api.core.data.UdtValue 6 | import com.ringcentral.cassandra4io.cql.ToUdtValue.{ make, makeWithFieldName } 7 | 8 | /** 9 | * A typeclass that is used to turn a Scala datatype into a UdtValue. Typeclass instances for FromUdtValue 10 | * are (inductively) derived from CassandraTypeMapper 11 | * 12 | * @tparam Scala is the Scala datatype that you intend to convert into a Cassandra UdtValue 13 | */ 14 | trait ToUdtValue[-Scala] { self => 15 | def convert(fieldName: FieldName, input: Scala, constructor: UdtValue): UdtValue 16 | 17 | def contramap[AnotherScala](f: AnotherScala => Scala): ToUdtValue[AnotherScala] = 18 | (fieldName: FieldName, input: AnotherScala, constructor: UdtValue) => self.convert(fieldName, f(input), constructor) 19 | } 20 | object ToUdtValue extends LowerPriorityToUdtValue with LowestPriorityToUdtValue { 21 | // represents a ToUdtValue whose UdtValue is fully set up 22 | // this can only be generated by the Shapeless machinery 23 | trait Object[-A] extends ToUdtValue[A] 24 | 25 | def deriveBinder[A](implicit ev: ToUdtValue.Object[A]): Binder[A] = { 26 | (statement: BoundStatement, index: Int, value: A) => 27 | val constructor = statement.getType(index).asInstanceOf[UserDefinedType] 28 | val udtValue = ev.convert(FieldName.Unused, value, constructor.newValue()) 29 | (statement.setUdtValue(index, udtValue), index + 1) 30 | } 31 | 32 | // only allowed to summon fully built out ToUdtValue instances which are built by Shapeless machinery 33 | def apply[A](implicit ev: ToUdtValue.Object[A]): ToUdtValue.Object[A] = ev 34 | 35 | def make[A](mk: (A, UdtValue) => UdtValue): ToUdtValue[A] = 36 | (fieldName: FieldName, input: A, constructor: UdtValue) => 37 | fieldName match { 38 | case FieldName.Unused => 39 | mk(input, constructor) 40 | 41 | case FieldName.Labelled(value) => 42 | throw new RuntimeException(s"ToUdtValue failure: Expected an unused fieldName for $input but got $value") 43 | } 44 | 45 | def makeWithFieldName[A](mk: (String, A, UdtValue) => UdtValue): ToUdtValue[A] = 46 | (fieldName: FieldName, input: A, constructor: UdtValue) => 47 | fieldName match { 48 | case FieldName.Unused => 49 | throw new RuntimeException(s"ToUdtValue failure: Expected a labelled fieldName for $input but got unused") 50 | 51 | case FieldName.Labelled(fieldName) => 52 | mk(fieldName, input, constructor) 53 | } 54 | } 55 | trait LowerPriorityToUdtValue { 56 | 57 | /** 58 | * ToUdtValue relies on the CassandraTypeMapper to convert Scala datatypes into datatypes compatible with the 59 | * Datastax Java driver (bi-directionally) in order to produce instances of FromUdtValue[A]. CassandraTypeMapper will 60 | * also inductively derive instances if you have nested data-types (collections within collections or collections 61 | * within UdtValues within collections or any combinations of these types) inside your UdtValue 62 | * 63 | * @param ev is evidence that we can materialize a CassandraTypeMapper for A 64 | * @tparam A is the Scala datatype that we would like to convert into the Datastax Java type 65 | * @return 66 | */ 67 | implicit def deriveFromCassandraTypeMapper[A](implicit 68 | ev: CassandraTypeMapper[A] 69 | ): ToUdtValue[A] = 70 | makeWithFieldName[A] { (fieldName, input, constructor) => 71 | constructor.set[ev.Cassandra](fieldName, ev.toCassandra(input, constructor.getType(fieldName)), ev.classType) 72 | } 73 | } 74 | 75 | trait LowestPriorityToUdtValue { 76 | import shapeless._ 77 | import shapeless.labelled._ 78 | implicit def hListToUdtValue[K <: Symbol, H, T <: HList](implicit 79 | witness: Witness.Aux[K], 80 | hToUdtValue: Lazy[ToUdtValue[H]], 81 | tToUdtValue: ToUdtValue[T] 82 | ): ToUdtValue[FieldType[K, H] :: T] = make { (in: FieldType[K, H] :: T, constructor: UdtValue) => 83 | val headValue = in.head 84 | val fieldName = FieldName.Labelled(witness.value.name) 85 | val nextConstructor = hToUdtValue.value.convert(fieldName, headValue, constructor) 86 | 87 | tToUdtValue.convert(FieldName.Unused, in.tail, nextConstructor) 88 | } 89 | 90 | implicit val hNilToUdtValue: ToUdtValue[HNil] = 91 | make((_: HNil, constructor: UdtValue) => constructor) 92 | 93 | implicit def genericToUdtValue[A, R](implicit 94 | gen: LabelledGeneric.Aux[A, R], 95 | ev: Lazy[ToUdtValue[R]], 96 | evidenceANotOption: A <:!< Option[_] 97 | ): ToUdtValue.Object[A] = { (fieldName: FieldName, in: A, constructor: UdtValue) => 98 | fieldName match { 99 | case FieldName.Unused => ev.value.convert(fieldName, gen.to(in), constructor) 100 | case FieldName.Labelled(fieldName) => nestedCaseClass(fieldName, gen.to(in), ev.value, constructor) 101 | } 102 | } 103 | 104 | def nestedCaseClass[A](fieldName: String, in: A, ev: ToUdtValue[A], top: UdtValue): UdtValue = { 105 | val constructor = top.getType(fieldName).asInstanceOf[UserDefinedType].newValue() 106 | val serialized = ev.convert(FieldName.Unused, in, constructor) 107 | top.setUdtValue(fieldName, serialized) 108 | } 109 | } 110 | -------------------------------------------------------------------------------- /src/main/scala/com/ringcentral/cassandra4io/cql/UnexpectedNullValue.scala: -------------------------------------------------------------------------------- 1 | package com.ringcentral.cassandra4io.cql 2 | 3 | import com.datastax.oss.driver.api.core.cql.Row 4 | import com.datastax.oss.driver.api.core.data.UdtValue 5 | 6 | sealed trait UnexpectedNullValue extends Throwable 7 | 8 | class UnexpectedNullValueInColumn(row: Row, index: Int) extends RuntimeException() with UnexpectedNullValue { 9 | override def getMessage: String = { 10 | val cl = row.getColumnDefinitions.get(index) 11 | val table = cl.getTable.toString 12 | val column = cl.getName.toString 13 | val keyspace = cl.getKeyspace.toString 14 | val tpe = cl.getType.asCql(true, true) 15 | 16 | s"Read NULL value from $keyspace.$table column $column expected $tpe. Row ${row.getFormattedContents}" 17 | } 18 | } 19 | 20 | class UnexpectedNullValueInUdt(row: Row, index: Int, udt: UdtValue, fieldName: String) 21 | extends RuntimeException() 22 | with UnexpectedNullValue { 23 | override def getMessage: String = { 24 | val cl = row.getColumnDefinitions.get(index) 25 | val table = cl.getTable.toString 26 | val column = cl.getName.toString 27 | val keyspace = cl.getKeyspace.toString 28 | val tpe = cl.getType.asCql(true, true) 29 | 30 | val udtTpe = udt.getType(fieldName) 31 | 32 | s"Read NULL value from $keyspace.$table inside UDT column $column with type $tpe. NULL value in $fieldName, expected type $udtTpe. Row ${row.getFormattedContents}" 33 | } 34 | 35 | } 36 | 37 | object UnexpectedNullValueInUdt { 38 | 39 | private[cql] case class NullValueInUdt(udtValue: UdtValue, fieldName: String) extends Throwable("", null, true, false) 40 | 41 | } 42 | -------------------------------------------------------------------------------- /src/main/scala/com/ringcentral/cassandra4io/cql/package.scala: -------------------------------------------------------------------------------- 1 | package com.ringcentral.cassandra4io 2 | 3 | import cats.data.OptionT 4 | import cats.syntax.flatMap._ 5 | import cats.syntax.functor._ 6 | import cats.{ Functor, Monad } 7 | import com.datastax.oss.driver.api.core.`type`.UserDefinedType 8 | import com.datastax.oss.driver.api.core.cql._ 9 | import com.datastax.oss.driver.api.core.data.UdtValue 10 | import fs2.Stream 11 | import shapeless._ 12 | import shapeless.labelled.FieldType 13 | import shapeless.ops.hlist.Prepend 14 | 15 | import java.nio.ByteBuffer 16 | import java.time.{ Instant, LocalDate } 17 | import java.util.UUID 18 | import scala.annotation.{ implicitNotFound, tailrec } 19 | 20 | package object cql { 21 | 22 | case class QueryTemplate[V <: HList: Binder, R: Reads] private[cql] ( 23 | query: String, 24 | config: BoundStatement => BoundStatement 25 | ) { 26 | def +(that: String): QueryTemplate[V, R] = QueryTemplate[V, R](this.query + that, config) 27 | 28 | def ++[W <: HList, Out <: HList](that: QueryTemplate[W, R])(implicit 29 | prepend: Prepend.Aux[V, W, Out], 30 | binderForW: Binder[W], 31 | binderForOut: Binder[Out] 32 | ): QueryTemplate[Out, R] = concat(that) 33 | 34 | def concat[W <: HList, Out <: HList](that: QueryTemplate[W, R])(implicit 35 | prepend: Prepend.Aux[V, W, Out], 36 | binderForW: Binder[W], 37 | binderForOut: Binder[Out] 38 | ): QueryTemplate[Out, R] = QueryTemplate[Out, R]( 39 | this.query + that.query, 40 | statement => (this.config andThen that.config)(statement) 41 | ) 42 | 43 | def as[R1: Reads]: QueryTemplate[V, R1] = QueryTemplate[V, R1](query, config) 44 | 45 | def prepare[F[_]: Functor](session: CassandraSession[F]): F[PreparedQuery[F, V, R]] = 46 | session.prepare(query).map(new PreparedQuery(session, _, config)) 47 | 48 | def config(config: BoundStatement => BoundStatement): QueryTemplate[V, R] = 49 | QueryTemplate[V, R](this.query, this.config andThen config) 50 | 51 | def stripMargin: QueryTemplate[V, R] = QueryTemplate[V, R](this.query.stripMargin, this.config) 52 | } 53 | 54 | type SimpleQuery[Output] = ParameterizedQuery[HNil, Output] 55 | 56 | case class ParameterizedQuery[V <: HList: Binder, R: Reads] private (template: QueryTemplate[V, R], values: V) { 57 | def +(that: String): ParameterizedQuery[V, R] = ParameterizedQuery[V, R](this.template + that, this.values) 58 | 59 | def ++[W <: HList, Out <: HList](that: ParameterizedQuery[W, R])(implicit 60 | prepend: Prepend.Aux[V, W, Out], 61 | binderForW: Binder[W], 62 | binderForOut: Binder[Out] 63 | ): ParameterizedQuery[Out, R] = concat(that) 64 | 65 | def concat[W <: HList, Out <: HList](that: ParameterizedQuery[W, R])(implicit 66 | prepend: Prepend.Aux[V, W, Out], 67 | binderForW: Binder[W], 68 | binderForOut: Binder[Out] 69 | ): ParameterizedQuery[Out, R] = 70 | ParameterizedQuery[Out, R](this.template ++ that.template, prepend(this.values, that.values)) 71 | 72 | def as[R1: Reads]: ParameterizedQuery[V, R1] = ParameterizedQuery[V, R1](template.as[R1], values) 73 | 74 | def select[F[_]: Functor](session: CassandraSession[F]): Stream[F, R] = 75 | Stream.force(template.prepare(session).map(_.applyProduct(values).select)) 76 | 77 | def selectFirst[F[_]: Monad](session: CassandraSession[F]): F[Option[R]] = 78 | template.prepare(session).flatMap(_.applyProduct(values).selectFirst) 79 | 80 | def execute[F[_]: Monad](session: CassandraSession[F]): F[Boolean] = 81 | template.prepare(session).map(_.applyProduct(values)).flatMap(_.execute) 82 | 83 | def config(config: BoundStatement => BoundStatement): ParameterizedQuery[V, R] = 84 | ParameterizedQuery[V, R](template.config(config), values) 85 | 86 | def stripMargin: ParameterizedQuery[V, R] = ParameterizedQuery[V, R](this.template.stripMargin, values) 87 | } 88 | 89 | class PreparedQuery[F[_]: Functor, V <: HList: Binder, R: Reads] private[cql] ( 90 | session: CassandraSession[F], 91 | statement: PreparedStatement, 92 | config: BoundStatement => BoundStatement 93 | ) extends ProductArgs { 94 | def applyProduct(values: V) = new Query[F, R](session, Binder[V].bind(config(statement.bind()), 0, values)._1) 95 | } 96 | 97 | class Query[F[_]: Functor, R: Reads] private[cql] ( 98 | session: CassandraSession[F], 99 | private[cql] val statement: BoundStatement 100 | ) { 101 | def config(statement: BoundStatement => BoundStatement) = new Query[F, R](session, statement(this.statement)) 102 | def select: Stream[F, R] = session.select(statement).map(Reads[R].read(_, 0)) 103 | def selectFirst: F[Option[R]] = OptionT(session.selectFirst(statement)).map(Reads[R].read(_, 0)).value 104 | def execute: F[Boolean] = session.execute(statement).map(_.wasApplied) 105 | } 106 | 107 | class Batch[F[_]: Functor](batchStatementBuilder: BatchStatementBuilder) { 108 | def add(queries: Seq[Query[F, _]]) = new Batch[F](batchStatementBuilder.addStatements(queries.map(_.statement): _*)) 109 | def execute(session: CassandraSession[F]): F[Boolean] = 110 | session.execute(batchStatementBuilder.build()).map(_.wasApplied) 111 | def config(config: BatchStatementBuilder => BatchStatementBuilder): Batch[F] = 112 | new Batch[F](config(batchStatementBuilder)) 113 | } 114 | 115 | object Batch { 116 | def logged[F[_]: Functor] = new Batch[F](new BatchStatementBuilder(BatchType.LOGGED)) 117 | def unlogged[F[_]: Functor] = new Batch[F](new BatchStatementBuilder(BatchType.UNLOGGED)) 118 | } 119 | 120 | class CqlTemplateStringInterpolator(ctx: StringContext) extends ProductArgs { 121 | import CqlTemplateStringInterpolator._ 122 | def applyProduct[P <: HList, V <: HList](params: P)(implicit 123 | bb: BindableBuilder.Aux[P, V] 124 | ): QueryTemplate[V, Row] = { 125 | implicit val binder: Binder[V] = bb.binder 126 | QueryTemplate[V, Row]( 127 | ctx.parts 128 | .foldLeft[(HList, StringBuilder)]((params, new StringBuilder())) { 129 | case ((Const(const) :: tail, builder), part) => (tail, builder.appendAll(part).appendAll(const)) 130 | case (((restriction: EqualsTo[_]) :: tail, builder), part) => 131 | (tail, builder.appendAll(part).appendAll(restriction.keys.map(key => s"${key} = ?").mkString(" AND "))) 132 | case (((assignment: Assignment[_]) :: tail, builder), part) => 133 | (tail, builder.appendAll(part).appendAll(assignment.keys.map(key => s"${key} = ?").mkString(", "))) 134 | case (((columns: Columns[_]) :: tail, builder), part) => 135 | (tail, builder.appendAll(part).appendAll(columns.keys.mkString(", "))) 136 | case (((values: Values[_]) :: tail, builder), part) => 137 | (tail, builder.appendAll(part).appendAll(List.fill(values.size)("?").mkString(", "))) 138 | case ((_ :: tail, builder), part) => (tail, builder.appendAll(part).appendAll("?")) 139 | case ((HNil, builder), part) => (HNil, builder.appendAll(part)) 140 | } 141 | ._2 142 | .toString(), 143 | identity 144 | ) 145 | } 146 | } 147 | 148 | object CqlTemplateStringInterpolator { 149 | 150 | trait BindableBuilder[P] { 151 | type Repr <: HList 152 | def binder: Binder[Repr] 153 | } 154 | 155 | private object BindableBuilder { 156 | type Aux[P, Repr0] = BindableBuilder[P] { type Repr = Repr0 } 157 | def apply[P](implicit builder: BindableBuilder[P]): BindableBuilder.Aux[P, builder.Repr] = builder 158 | implicit def hNilBindableBuilder: BindableBuilder.Aux[HNil, HNil] = new BindableBuilder[HNil] { 159 | override type Repr = HNil 160 | override def binder: Binder[HNil] = Binder[HNil] 161 | } 162 | implicit def hConsBindableBuilder[T: Binder, PT <: HList, RT <: HList](implicit 163 | f: BindableBuilder.Aux[PT, RT] 164 | ): BindableBuilder.Aux[Put[T] :: PT, T :: RT] = new BindableBuilder[Put[T] :: PT] { 165 | override type Repr = T :: RT 166 | override def binder: Binder[T :: RT] = { 167 | implicit val tBinder: Binder[RT] = f.binder 168 | Binder[T :: RT] 169 | } 170 | } 171 | implicit def hConsBindableConstBuilder[PT <: HList, RT <: HList](implicit 172 | f: BindableBuilder.Aux[PT, RT] 173 | ): BindableBuilder.Aux[Const :: PT, RT] = 174 | new BindableBuilder[Const :: PT] { 175 | override type Repr = RT 176 | override def binder: Binder[RT] = f.binder 177 | } 178 | 179 | implicit def hConsBindableColumnsBuilder[T, PT <: HList, RT <: HList](implicit 180 | f: BindableBuilder.Aux[PT, RT] 181 | ): BindableBuilder.Aux[Columns[T] :: PT, RT] = 182 | new BindableBuilder[Columns[T] :: PT] { 183 | override type Repr = RT 184 | override def binder: Binder[RT] = f.binder 185 | } 186 | 187 | implicit def hConsBindableValuesBuilder[T: ColumnsValues, PT <: HList, RT <: HList](implicit 188 | f: BindableBuilder.Aux[PT, RT] 189 | ): BindableBuilder.Aux[Values[T] :: PT, T :: RT] = new BindableBuilder[Values[T] :: PT] { 190 | override type Repr = T :: RT 191 | override def binder: Binder[T :: RT] = { 192 | implicit val hBinder: Binder[T] = Values[T].binder 193 | implicit val tBinder: Binder[RT] = f.binder 194 | Binder[T :: RT] 195 | } 196 | } 197 | 198 | implicit def hConsBindableEqualsToBuilder[T: ColumnsValues, PT <: HList, RT <: HList](implicit 199 | f: BindableBuilder.Aux[PT, RT] 200 | ): BindableBuilder.Aux[EqualsTo[T] :: PT, T :: RT] = new BindableBuilder[EqualsTo[T] :: PT] { 201 | override type Repr = T :: RT 202 | override def binder: Binder[T :: RT] = { 203 | implicit val hBinder: Binder[T] = Values[T].binder 204 | implicit val tBinder: Binder[RT] = f.binder 205 | Binder[T :: RT] 206 | } 207 | } 208 | 209 | implicit def hConsBindableAssignmentsBuilder[T: ColumnsValues, PT <: HList, RT <: HList](implicit 210 | f: BindableBuilder.Aux[PT, RT] 211 | ): BindableBuilder.Aux[Assignment[T] :: PT, T :: RT] = new BindableBuilder[Assignment[T] :: PT] { 212 | override type Repr = T :: RT 213 | override def binder: Binder[T :: RT] = { 214 | implicit val hBinder: Binder[T] = Values[T].binder 215 | implicit val tBinder: Binder[RT] = f.binder 216 | Binder[T :: RT] 217 | } 218 | } 219 | } 220 | } 221 | 222 | /** 223 | * BoundValue is used to capture the value inside the cql interpolated string along with evidence of its Binder so that 224 | * a ParameterizedQuery can be built and the values can be bound to the BoundStatement internally 225 | */ 226 | final case class BoundValue[A](value: A, ev: Binder[A]) 227 | object BoundValue { 228 | // This implicit conversion automatically captures the value and evidence of the Binder in a cql interpolated string 229 | implicit def aToBoundValue[A](a: A)(implicit ev: Binder[A]): BoundValue[A] = 230 | BoundValue(a, ev) 231 | } 232 | 233 | class CqlStringInterpolator(ctx: StringContext) { 234 | @tailrec 235 | private def replaceValuesWithQuestionMark( 236 | strings: Iterator[String], 237 | expressions: Iterator[BoundValue[_]], 238 | acc: String 239 | ): String = 240 | if (strings.hasNext && expressions.hasNext) { 241 | val str = strings.next() 242 | val _ = expressions.next() 243 | replaceValuesWithQuestionMark( 244 | strings = strings, 245 | expressions = expressions, 246 | acc = acc + s"$str?" 247 | ) 248 | } else if (strings.hasNext && !expressions.hasNext) { 249 | val str = strings.next() 250 | replaceValuesWithQuestionMark( 251 | strings = strings, 252 | expressions = expressions, 253 | acc + str 254 | ) 255 | } else acc 256 | 257 | def apply(values: BoundValue[_]*): SimpleQuery[Row] = { 258 | val queryWithQuestionMark = replaceValuesWithQuestionMark(ctx.parts.iterator, values.iterator, "") 259 | val assignValuesToStatement: BoundStatement => BoundStatement = { in: BoundStatement => 260 | val (configuredBoundStatement, _) = 261 | values.foldLeft((in, 0)) { case ((current, index), bv: BoundValue[a]) => 262 | val binder: Binder[a] = bv.ev 263 | val value: a = bv.value 264 | binder.bind(current, index, value) 265 | } 266 | configuredBoundStatement 267 | } 268 | ParameterizedQuery(QueryTemplate[HNil, Row](queryWithQuestionMark, assignValuesToStatement), HNil) 269 | } 270 | } 271 | 272 | /** 273 | * Provides a way to lift arbitrary strings into CQL so you can parameterize on values that are not valid CQL parameters 274 | * Please note that this is not escaped so do not use this with user-supplied input for your application (only use 275 | * cqlConst for input that you as the application author control) 276 | */ 277 | class CqlConstInterpolator(ctx: StringContext) { 278 | def apply(args: Any*): ParameterizedQuery[HNil, Row] = 279 | ParameterizedQuery(QueryTemplate(ctx.s(args: _*), identity), HNil) 280 | } 281 | 282 | implicit class CqlStringContext(val ctx: StringContext) extends AnyVal { 283 | def cqlt = new CqlTemplateStringInterpolator(ctx) 284 | def cql = new CqlStringInterpolator(ctx) 285 | def cqlConst = new CqlConstInterpolator(ctx) 286 | } 287 | 288 | implicit class UnsetOptionValueOps[A](val self: Option[A]) extends AnyVal { 289 | def usingUnset(implicit aBinder: Binder[A]): BoundValue[Option[A]] = 290 | BoundValue(self, Binder.optionUsingUnsetBinder[A]) 291 | } 292 | 293 | @implicitNotFound("""Cannot find or construct a Binder instance for type: 294 | 295 | ${T} 296 | 297 | Construct it if needed, please refer to Binder source code for guidance 298 | """) 299 | trait Binder[T] { self => 300 | def bind(statement: BoundStatement, index: Int, value: T): (BoundStatement, Int) 301 | 302 | def contramap[U](f: U => T): Binder[U] = new Binder[U] { 303 | override def bind(statement: BoundStatement, index: Int, value: U): (BoundStatement, Int) = 304 | self.bind(statement, index, f(value)) 305 | } 306 | } 307 | 308 | trait Put[T] 309 | object Put { 310 | def apply[T: Binder]: Put[T] = new Put[T] {} 311 | } 312 | 313 | case class Const(fragment: String) 314 | trait Columns[T] { 315 | def keys: List[String] 316 | } 317 | object Columns { 318 | def apply[T: ColumnsValues]: Columns[T] = new Columns[T] { 319 | override def keys: List[String] = ColumnsValues[T].keys 320 | } 321 | } 322 | trait Values[T] { 323 | def size: Int 324 | def binder: Binder[T] 325 | } 326 | object Values { 327 | def apply[T: ColumnsValues]: Values[T] = new Values[T] { 328 | override def size: Int = ColumnsValues[T].size 329 | override def binder: Binder[T] = ColumnsValues[T].binder 330 | } 331 | } 332 | trait EqualsTo[T] extends Columns[T] with Values[T] 333 | object EqualsTo { 334 | def apply[T: ColumnsValues]: EqualsTo[T] = new EqualsTo[T] { 335 | override def keys: List[String] = ColumnsValues[T].keys 336 | override def size: Int = ColumnsValues[T].size 337 | override def binder: Binder[T] = ColumnsValues[T].binder 338 | } 339 | } 340 | 341 | trait Assignment[T] extends Columns[T] with Values[T] 342 | object Assignment { 343 | def apply[T: ColumnsValues]: Assignment[T] = new Assignment[T] { 344 | override def keys: List[String] = ColumnsValues[T].keys 345 | override def size: Int = ColumnsValues[T].size 346 | override def binder: Binder[T] = ColumnsValues[T].binder 347 | } 348 | } 349 | 350 | trait ColumnsValues[T] extends Columns[T] with Values[T] 351 | private object ColumnsValues { 352 | def apply[T](implicit ev: ColumnsValues[T]): ColumnsValues[T] = ev 353 | 354 | implicit val hNilColumnsValues: ColumnsValues[HNil] = new ColumnsValues[HNil] { 355 | override def keys: List[String] = List.empty 356 | override def size: Int = 0 357 | override def binder: Binder[HNil] = Binder.hNilBinder 358 | } 359 | 360 | private def camel2snake(text: String) = 361 | text.tail.foldLeft(text.headOption.fold("")(_.toLower.toString)) { 362 | case (acc, c) if c.isUpper => acc + "_" + c.toLower 363 | case (acc, c) => acc + c 364 | } 365 | 366 | implicit def hListColumnsValues[K, V, T <: HList](implicit 367 | witness: Witness.Aux[K], 368 | tColumnsValues: ColumnsValues[T], 369 | vBinder: Binder[V] 370 | ): ColumnsValues[FieldType[K, V] :: T] = 371 | new ColumnsValues[FieldType[K, V] :: T] { 372 | override def keys: List[String] = { 373 | val key = witness.value match { 374 | case Symbol(key) => camel2snake(key) 375 | case _ => witness.value.toString 376 | } 377 | key :: tColumnsValues.keys 378 | } 379 | override def size: Int = tColumnsValues.size + 1 380 | override def binder: Binder[FieldType[K, V] :: T] = { 381 | implicit val hBinder: Binder[FieldType[K, V]] = new Binder[FieldType[K, V]] { 382 | override def bind(statement: BoundStatement, index: Int, value: FieldType[K, V]): (BoundStatement, Int) = 383 | vBinder.bind(statement, index, value) 384 | } 385 | implicit val tBinder: Binder[T] = tColumnsValues.binder 386 | Binder[FieldType[K, V] :: T] 387 | } 388 | } 389 | implicit def genColumnValues[T, TRepr](implicit 390 | gen: LabelledGeneric.Aux[T, TRepr], 391 | columnsValues: ColumnsValues[TRepr] 392 | ): ColumnsValues[T] = new ColumnsValues[T] { 393 | override def keys: List[String] = columnsValues.keys 394 | override def size: Int = columnsValues.size 395 | override def binder: Binder[T] = new Binder[T] { 396 | override def bind(statement: BoundStatement, index: Int, value: T): (BoundStatement, Int) = 397 | columnsValues.binder.bind(statement, index, gen.to(value)) 398 | } 399 | } 400 | } 401 | 402 | object Binder extends BinderLowerPriority with BinderLowestPriority { 403 | 404 | def apply[T](implicit binder: Binder[T]): Binder[T] = binder 405 | 406 | implicit val stringBinder: Binder[String] = new Binder[String] { 407 | override def bind(statement: BoundStatement, index: Int, value: String): (BoundStatement, Int) = 408 | (statement.setString(index, value), index + 1) 409 | } 410 | 411 | implicit val doubleBinder: Binder[Double] = new Binder[Double] { 412 | override def bind(statement: BoundStatement, index: Int, value: Double): (BoundStatement, Int) = 413 | (statement.setDouble(index, value), index + 1) 414 | } 415 | 416 | implicit val floatBinder: Binder[Float] = new Binder[Float] { 417 | override def bind(statement: BoundStatement, index: Int, value: Float): (BoundStatement, Int) = 418 | (statement.setFloat(index, value), index + 1) 419 | } 420 | 421 | implicit val intBinder: Binder[Int] = new Binder[Int] { 422 | override def bind(statement: BoundStatement, index: Int, value: Int): (BoundStatement, Int) = 423 | (statement.setInt(index, value), index + 1) 424 | } 425 | 426 | implicit val longBinder: Binder[Long] = new Binder[Long] { 427 | override def bind(statement: BoundStatement, index: Int, value: Long): (BoundStatement, Int) = 428 | (statement.setLong(index, value), index + 1) 429 | } 430 | 431 | implicit val byteBufferBinder: Binder[ByteBuffer] = new Binder[ByteBuffer] { 432 | override def bind(statement: BoundStatement, index: Int, value: ByteBuffer): (BoundStatement, Int) = 433 | (statement.setByteBuffer(index, value), index + 1) 434 | } 435 | 436 | implicit val localDateBinder: Binder[LocalDate] = new Binder[LocalDate] { 437 | override def bind(statement: BoundStatement, index: Int, value: LocalDate): (BoundStatement, Int) = 438 | (statement.setLocalDate(index, value), index + 1) 439 | } 440 | 441 | implicit val instantBinder: Binder[Instant] = new Binder[Instant] { 442 | override def bind(statement: BoundStatement, index: Int, value: Instant): (BoundStatement, Int) = 443 | (statement.setInstant(index, value), index + 1) 444 | } 445 | 446 | implicit val booleanBinder: Binder[Boolean] = new Binder[Boolean] { 447 | override def bind(statement: BoundStatement, index: Int, value: Boolean): (BoundStatement, Int) = 448 | (statement.setBoolean(index, value), index + 1) 449 | } 450 | 451 | implicit val uuidBinder: Binder[UUID] = new Binder[UUID] { 452 | override def bind(statement: BoundStatement, index: Int, value: UUID): (BoundStatement, Int) = 453 | (statement.setUuid(index, value), index + 1) 454 | } 455 | 456 | implicit val bigIntBinder: Binder[BigInt] = new Binder[BigInt] { 457 | override def bind(statement: BoundStatement, index: Int, value: BigInt): (BoundStatement, Int) = 458 | (statement.setBigInteger(index, value.bigInteger), index + 1) 459 | } 460 | 461 | implicit val bigDecimalBinder: Binder[BigDecimal] = new Binder[BigDecimal] { 462 | override def bind(statement: BoundStatement, index: Int, value: BigDecimal): (BoundStatement, Int) = 463 | (statement.setBigDecimal(index, value.bigDecimal), index + 1) 464 | } 465 | 466 | implicit val shortBinder: Binder[Short] = new Binder[Short] { 467 | override def bind(statement: BoundStatement, index: Int, value: Short): (BoundStatement, Int) = 468 | (statement.setShort(index, value), index + 1) 469 | } 470 | 471 | implicit val userDefinedTypeValueBinder: Binder[UdtValue] = 472 | (statement: BoundStatement, index: Int, value: UdtValue) => (statement.setUdtValue(index, value), index + 1) 473 | 474 | private def commonOptionBinder[T: Binder]( 475 | bindNone: (BoundStatement, Int) => BoundStatement 476 | ): Binder[Option[T]] = new Binder[Option[T]] { 477 | override def bind(statement: BoundStatement, index: Int, value: Option[T]): (BoundStatement, Int) = value match { 478 | case Some(x) => Binder[T].bind(statement, index, x) 479 | case None => (bindNone(statement, index), index + 1) 480 | } 481 | } 482 | 483 | implicit def optionBinder[T: Binder]: Binder[Option[T]] = commonOptionBinder[T] { (statement, index) => 484 | statement.setToNull(index) 485 | } 486 | 487 | def optionUsingUnsetBinder[T: Binder]: Binder[Option[T]] = commonOptionBinder[T] { (statement, index) => 488 | statement.unset(index) 489 | } 490 | 491 | implicit def widenBinder[T: Binder, X <: T](implicit wd: Widen.Aux[X, T]): Binder[X] = new Binder[X] { 492 | override def bind(statement: BoundStatement, index: Int, value: X): (BoundStatement, Int) = 493 | Binder[T].bind(statement, index, wd.apply(value)) 494 | } 495 | 496 | implicit class UdtValueBinderOps(udtBinder: Binder[UdtValue]) { 497 | 498 | /** 499 | * This is necessary for UDT values as you are not allowed to safely create a UDT value, instead you use the 500 | * prepared statement's variable definitions to retrieve a UserDefinedType that can be used as a constructor 501 | * for a UdtValue 502 | * 503 | * @param f is a function that accepts the input value A along with a constructor that you use to build the 504 | * UdtValue that gets sent to Cassandra 505 | * @tparam A 506 | * @return 507 | */ 508 | def contramapUDT[A](f: (A, UserDefinedType) => UdtValue): Binder[A] = new Binder[A] { 509 | override def bind(statement: BoundStatement, index: Int, value: A): (BoundStatement, Int) = { 510 | val udtValue = f( 511 | value, 512 | statement.getPreparedStatement.getVariableDefinitions.get(index).getType.asInstanceOf[UserDefinedType] 513 | ) 514 | udtBinder.bind(statement, index, udtValue) 515 | } 516 | } 517 | } 518 | } 519 | 520 | trait BinderLowerPriority { 521 | 522 | /** 523 | * This typeclass instance is used to (inductively) derive datatypes that can have arbitrary amounts of nesting 524 | * @param ev is evidence that a typeclass instance of CassandraTypeMapper exists for A 525 | * @tparam A is the Scala datatype that needs to be written to Cassandra 526 | * @return 527 | */ 528 | implicit def deriveBinderFromCassandraTypeMapper[A](implicit ev: CassandraTypeMapper[A]): Binder[A] = 529 | (statement: BoundStatement, index: Int, value: A) => { 530 | val datatype = statement.getType(index) 531 | val cassandra = ev.toCassandra(value, datatype) 532 | (statement.set(index, cassandra, ev.classType), index + 1) 533 | } 534 | } 535 | 536 | trait BinderLowestPriority { 537 | implicit val hNilBinder: Binder[HNil] = new Binder[HNil] { 538 | override def bind(statement: BoundStatement, index: Int, value: HNil): (BoundStatement, Int) = (statement, index) 539 | } 540 | implicit def hConsBinder[H: Binder, T <: HList: Binder]: Binder[H :: T] = new Binder[H :: T] { 541 | override def bind(statement: BoundStatement, index: Int, value: H :: T): (BoundStatement, Int) = { 542 | val (applied, nextIndex) = Binder[H].bind(statement, index, value.head) 543 | Binder[T].bind(applied, nextIndex, value.tail) 544 | } 545 | } 546 | } 547 | } 548 | -------------------------------------------------------------------------------- /src/main/scala/com/ringcentral/cassandra4io/package.scala: -------------------------------------------------------------------------------- 1 | package com.ringcentral 2 | 3 | import java.util.Optional 4 | 5 | package object cassandra4io { 6 | // for cross-build between scala 2.13 and 2.12 7 | final implicit class RichOptional[A](private val o: Optional[A]) extends AnyVal { 8 | def asScala: Option[A] = if (o.isPresent) Some(o.get()) else None 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /src/main/scala/com/ringcentral/cassandra4io/utils/JavaConcurrentToCats.scala: -------------------------------------------------------------------------------- 1 | package com.ringcentral.cassandra4io.utils 2 | 3 | import cats.effect.{ Async, Sync } 4 | import cats.syntax.functor._ 5 | 6 | import java.util.concurrent.CompletionStage 7 | 8 | object JavaConcurrentToCats { 9 | 10 | def fromJavaAsync[F[_]: Async, T](cs: => CompletionStage[T]): F[T] = 11 | Async[F].fromCompletableFuture(Sync[F].delay(cs).map(_.toCompletableFuture)) 12 | } 13 | --------------------------------------------------------------------------------