├── .gitignore ├── .scalafmt.conf ├── .travis.yml ├── AUTHORS.md ├── CHANGELOG.md ├── CONTRIBUTING.md ├── LICENSE ├── NOTICE.md ├── README.md ├── build.sbt ├── core └── src │ ├── main │ └── scala │ │ ├── api │ │ ├── ClusterAPI.scala │ │ ├── QueryModule.scala │ │ ├── ResultSetAPI.scala │ │ ├── SessionAPI.scala │ │ ├── StatementAPI.scala │ │ └── package.scala │ │ ├── codecs │ │ └── package.scala │ │ ├── config │ │ ├── Decoders.scala │ │ ├── model.scala │ │ └── reads │ │ │ └── datastax.scala │ │ ├── handlers │ │ └── handlers.scala │ │ ├── implicits.scala │ │ ├── query │ │ ├── FieldLister.scala │ │ ├── StatementGenerator.scala │ │ ├── interpolator │ │ │ ├── CQLInterpolator.scala │ │ │ ├── MacroInterpolator.scala │ │ │ ├── RuntimeCQLInterpolator.scala │ │ │ └── package.scala │ │ ├── mapper │ │ │ ├── ByteBufferMapper.scala │ │ │ ├── ByteBufferToField.scala │ │ │ └── FieldToByteBuffer.scala │ │ ├── model.scala │ │ └── query.scala │ │ └── schema │ │ ├── package.scala │ │ ├── provider │ │ ├── MetadataSchemaProvider.scala │ │ ├── TroySchemaProvider.scala │ │ ├── metadata │ │ │ └── SchemaConversions.scala │ │ └── package.scala │ │ └── validator │ │ ├── TroySchemaValidator.scala │ │ └── package.scala │ └── test │ └── scala │ ├── ListenableFuture2AsyncMSpec.scala │ ├── TestData.scala │ ├── TestUtils.scala │ ├── api │ ├── ClusterAPISpec.scala │ ├── PackageAPISpec.scala │ ├── ResultSetAPISpec.scala │ ├── SessionAPISpec.scala │ └── StatementAPISpec.scala │ ├── codecs │ └── CodecsSpec.scala │ ├── com │ └── datastax │ │ └── driver │ │ └── core │ │ └── CoreClasses.scala │ ├── config │ ├── ClusterConfig.scala │ ├── ClusterDecoderSpec.scala │ ├── ConfigArbitraries.scala │ ├── ConfigStatementOpsSpec.scala │ ├── InstanceClasses.scala │ ├── TestDecoderUtils.scala │ └── reads │ │ └── DatastaxReadsSpec.scala │ ├── handlers │ ├── ClusterAPIHandlerSpec.scala │ ├── ResultSetAPIHandlerSpec.scala │ ├── SessionAPIHandlerSpec.scala │ └── StatementAPIHandlerSpec.scala │ ├── query │ ├── FieldListerSpec.scala │ ├── QueryArbitraries.scala │ ├── StatementGeneratorSpec.scala │ ├── interpolator │ │ ├── CQLInterpolatorSpec.scala │ │ ├── InterpolatorImplicitSpec.scala │ │ └── RuntimeCQLInterpolatorSpec.scala │ └── mapper │ │ ├── ByteBufferMapperSpec.scala │ │ └── ByteBufferToFieldSpec.scala │ └── schema │ ├── MetadataArbitraries.scala │ ├── SchemaErrorSpec.scala │ ├── provider │ ├── MetadataSchemaProviderSpec.scala │ ├── TroySchemaProviderSpec.scala │ └── metadata │ │ └── SchemaConversionsSpec.scala │ └── validator │ ├── SchemaValidatorSpec.scala │ └── TroySchemaValidatorSpec.scala ├── docs └── src │ └── main │ └── tut │ └── README.md ├── macros-tests └── src │ ├── it │ └── scala │ │ └── interpolator │ │ └── MetadataInterpolatorTest.scala │ ├── main │ ├── resources │ │ ├── cluster.conf │ │ └── schema.sql │ └── scala │ │ └── interpolator │ │ ├── MyMetadataInterpolator.scala │ │ └── MySchemaInterpolator.scala │ └── test │ └── scala │ └── interpolator │ └── MacroInterpolatorTest.scala ├── project ├── build.properties └── plugins.sbt ├── pubring.gpg ├── secring.gpg.enc └── version.sbt /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | project/project/ 3 | project/target/ 4 | target/ 5 | deploy_key 6 | .DS_Store 7 | output/* 8 | /secring.gpg -------------------------------------------------------------------------------- /.scalafmt.conf: -------------------------------------------------------------------------------- 1 | style = defaultWithAlign 2 | maxColumn = 100 3 | 4 | continuationIndent.callSite = 2 5 | 6 | newlines { 7 | sometimesBeforeColonInMethodReturnType = false 8 | } 9 | 10 | align { 11 | arrowEnumeratorGenerator = false 12 | ifWhileOpenParen = false 13 | openParenCallSite = false 14 | openParenDefnSite = false 15 | } 16 | 17 | docstrings = JavaDoc 18 | 19 | rewrite { 20 | rules = [SortImports, RedundantBraces] 21 | redundantBraces.maxLines = 1 22 | } -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: scala 2 | scala: 3 | - 2.11.11 4 | - 2.12.3 5 | jdk: 6 | - oraclejdk8 7 | before_cache: 8 | - du -h -d 1 $HOME/.ivy2/ 9 | - du -h -d 2 $HOME/.sbt/ 10 | - du -h -d 4 $HOME/.coursier/ 11 | - find $HOME/.sbt -name "*.lock" -type f -delete 12 | - find $HOME/.ivy2/cache -name "ivydata-*.properties" -type f -delete 13 | cache: 14 | directories: 15 | - $HOME/.sbt/cache 16 | - $HOME/.sbt/0.13 17 | - $HOME/.sbt/boot/ 18 | - $HOME/.sbt/boot/scala* 19 | - $HOME/.sbt/launchers 20 | - $HOME/.ivy2/cache 21 | - $HOME/.ivy2 22 | before_install: 23 | - if [ "$TRAVIS_BRANCH" = "master" -a "$TRAVIS_PULL_REQUEST" = "false" ]; then 24 | openssl aes-256-cbc -K $encrypted_3a2eb420b303_key -iv $encrypted_3a2eb420b303_iv -in secring.gpg.enc -out secring.gpg -d; 25 | fi 26 | - export PATH=${PATH}:./vendor/bundle 27 | script: 28 | - sbt ++$TRAVIS_SCALA_VERSION orgScriptCI 29 | - sbt ++$TRAVIS_SCALA_VERSION embeddedCassandraStart it:test 30 | after_success: 31 | - bash <(curl -s https://codecov.io/bash) 32 | - sbt ++$TRAVIS_SCALA_VERSION orgAfterCISuccess 33 | -------------------------------------------------------------------------------- /AUTHORS.md: -------------------------------------------------------------------------------- 1 | # Authors 2 | 3 | ## Maintainers 4 | 5 | The maintainers of the project are: 6 | 7 | * 47 Degrees (twitter: @47deg) <[47degfreestyle](https://github.com/47degfreestyle)> 8 | 9 | ## Contributors 10 | 11 | These are the people that have contributed to the freestyle-cassandra project: 12 | 13 | * Adrián Ramírez Fornell <[AdrianRaFo](https://github.com/AdrianRaFo)> 14 | * Ana Mª Marquez <[anamariamv](https://github.com/anamariamv)> 15 | * Fede Fernández <[fedefernandez](https://github.com/fedefernandez)> 16 | * Francisco Pérez Paradas <[FPerezP](https://github.com/FPerezP)> 17 | * Gerard Madorell <[GMadorell](https://github.com/GMadorell)> 18 | * Raúl Raja Martínez <[raulraja](https://github.com/raulraja)> -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## 01/08/2018 - Version 0.1.1 4 | 5 | Release changes: 6 | 7 | * Compile Cassandra docs in frees-cassandra repo ([#132](https://github.com/frees-io/freestyle-cassandra/pull/132)) 8 | * Adds the replace blocks for the library version ([#135](https://github.com/frees-io/freestyle-cassandra/pull/135)) 9 | * Upgrade to cats 1.0.1 ([#136](https://github.com/frees-io/freestyle-cassandra/pull/136)) 10 | 11 | 12 | ## 12/21/2017 - Version 0.1.0 13 | 14 | Release changes: 15 | 16 | * Upgrades Troy to latest stable version ([#123](https://github.com/frees-io/freestyle-cassandra/pull/123)) 17 | * Updates the README.md file ([#129](https://github.com/frees-io/freestyle-cassandra/pull/129)) 18 | * Move freestyle to freestyle free ([#131](https://github.com/frees-io/freestyle-cassandra/pull/131)) 19 | 20 | 21 | ## 11/10/2017 - Version 0.0.4 22 | 23 | Release changes: 24 | 25 | * Uuse the async traits for implicits from freestyle ([#121](https://github.com/frees-io/freestyle-cassandra/pull/121)) 26 | * Restores asResultSet[M[_]] definition ([#120](https://github.com/frees-io/freestyle-cassandra/pull/120)) 27 | * Release 0.0.4 ([#122](https://github.com/frees-io/freestyle-cassandra/pull/122)) 28 | 29 | 30 | ## 11/08/2017 - Version 0.0.3 31 | 32 | Release changes: 33 | 34 | * ResultSet API implementations ([#116](https://github.com/frees-io/freestyle-cassandra/pull/116)) 35 | * Refactorizes handlers and provides implicits ([#117](https://github.com/frees-io/freestyle-cassandra/pull/117)) 36 | * Adds the operations for fetching model from a query ([#118](https://github.com/frees-io/freestyle-cassandra/pull/118)) 37 | 38 | 39 | ## 11/06/2017 - Version 0.0.2 40 | 41 | Release changes: 42 | 43 | * Update CHANGELOG.md ([#87](https://github.com/frees-io/freestyle-cassandra/pull/87)) 44 | * Macro for expanding interpolator ([#89](https://github.com/frees-io/freestyle-cassandra/pull/89)) 45 | * Uses sbt-embedded-cassandra and introduce the Integration Tests ([#90](https://github.com/frees-io/freestyle-cassandra/pull/90)) 46 | * Removes the ScalaJS badge from README ([#97](https://github.com/frees-io/freestyle-cassandra/pull/97)) 47 | * Allows setting the consistency level at query level ([#96](https://github.com/frees-io/freestyle-cassandra/pull/96)) 48 | * Adds the code for creating case classes from rows ([#103](https://github.com/frees-io/freestyle-cassandra/pull/103)) 49 | * Add printer arbitrary for scalacheck testing ([#108](https://github.com/frees-io/freestyle-cassandra/pull/108)) 50 | * Allows the validation of schema definition statements ([#109](https://github.com/frees-io/freestyle-cassandra/pull/109)) 51 | * Adds the printer to the FieldLister ([#110](https://github.com/frees-io/freestyle-cassandra/pull/110)) 52 | * Use the printer in the FieldMapper ([#111](https://github.com/frees-io/freestyle-cassandra/pull/111)) 53 | * Release v0.0.2 ([#112](https://github.com/frees-io/freestyle-cassandra/pull/112)) 54 | 55 | 56 | ## 10/09/2017 - Version 0.0.1 57 | 58 | Release changes: 59 | 60 | * Removes macro implementation ([#13](https://github.com/frees-io/freestyle-cassandra/pull/13)) 61 | * Adds a statement generator ([#14](https://github.com/frees-io/freestyle-cassandra/pull/14)) 62 | * Keyspace schema parser ([#15](https://github.com/frees-io/freestyle-cassandra/pull/15)) 63 | * Adds the table model ([#25](https://github.com/frees-io/freestyle-cassandra/pull/25)) 64 | * Adds the cluster configuration for Typesafe Config ([#30](https://github.com/frees-io/freestyle-cassandra/pull/30)) 65 | * Adds statement config model and tests ([#31](https://github.com/frees-io/freestyle-cassandra/pull/31)) 66 | * Upgrades sbt-freestyle ([#34](https://github.com/frees-io/freestyle-cassandra/pull/34)) 67 | * Cluster information ([#35](https://github.com/frees-io/freestyle-cassandra/pull/35)) 68 | * Decouples the typesafe config library ([#36](https://github.com/frees-io/freestyle-cassandra/pull/36)) 69 | * Cluster Config Decoder tests ([#37](https://github.com/frees-io/freestyle-cassandra/pull/37)) 70 | * FieldMapper ([#39](https://github.com/frees-io/freestyle-cassandra/pull/39)) 71 | * Schema validator ([#50](https://github.com/frees-io/freestyle-cassandra/pull/50)) 72 | * Remote scheme provider ([#53](https://github.com/frees-io/freestyle-cassandra/pull/53)) 73 | * Improves test coverage ([#55](https://github.com/frees-io/freestyle-cassandra/pull/55)) 74 | * Allows the creation of distinct names in arbitraries ([#56](https://github.com/frees-io/freestyle-cassandra/pull/56)) 75 | * Abstract schema provider and validator functions over M[_] ([#54](https://github.com/frees-io/freestyle-cassandra/pull/54)) 76 | * Fixes distinct user type name generation ([#64](https://github.com/frees-io/freestyle-cassandra/pull/64)) 77 | * Renames to free cassandra ([#66](https://github.com/frees-io/freestyle-cassandra/pull/66)) 78 | * Adds the basic interpolator ([#69](https://github.com/frees-io/freestyle-cassandra/pull/69)) 79 | * Adds support for params in the interpolator ([#73](https://github.com/frees-io/freestyle-cassandra/pull/73)) 80 | * Abstract ByteBufferCodec over M[_] ([#74](https://github.com/frees-io/freestyle-cassandra/pull/74)) 81 | * Metadata schema provider from cluster configuration ([#75](https://github.com/frees-io/freestyle-cassandra/pull/75)) 82 | * Create StatementAPI ([#76](https://github.com/frees-io/freestyle-cassandra/pull/76)) 83 | * Adds new statement operations ([#77](https://github.com/frees-io/freestyle-cassandra/pull/77)) 84 | * Fixes the project name for sbt-org-policies ([#78](https://github.com/frees-io/freestyle-cassandra/pull/78)) 85 | * Implement asResultSet method for interpolator ([#80](https://github.com/frees-io/freestyle-cassandra/pull/80)) 86 | * Execute query without prepare ([#83](https://github.com/frees-io/freestyle-cassandra/pull/83)) 87 | * Releases RC1 ([#84](https://github.com/frees-io/freestyle-cassandra/pull/84)) 88 | * Returns a FreeS[M, ResultSet] from the interpolator ([#86](https://github.com/frees-io/freestyle-cassandra/pull/86)) -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | Discussion around Freestyle happens in the [Gitter channel](https://gitter.im/47deg/freestyle) as well as on 4 | [GitHub issues](https://github.com/47deg/freestyle/issues) and [pull requests](https://github.com/47deg/freestyle/pulls). 5 | 6 | Feel free to open an issue if you notice a bug, have an idea for a feature, or have a question about 7 | the code. Pull requests are also welcome. 8 | 9 | People are expected to follow the [Typelevel Code of Conduct](http://typelevel.org/conduct.html) when discussing Freestyle on the Github page, Gitter channel, or other venues. 10 | 11 | If you are being harassed, please contact one of [us](AUTHORS.md#maintainers) immediately so that we can support you. In case you cannot get in touch with us please write an email to [47 Degrees](mailto:hello@47deg.com). 12 | 13 | ## How can I help? 14 | 15 | Freestyle follows a standard [fork and pull](https://help.github.com/articles/using-pull-requests/) model for contributions via GitHub pull requests. 16 | 17 | The process is simple: 18 | 19 | 1. Find something you want to work on 20 | 2. Let us know you are working on it via the Gitter channel or GitHub issues/pull requests 21 | 3. Implement your contribution 22 | 4. Write tests 23 | 5. Update the documentation 24 | 6. Submit pull request 25 | 26 | You will be automatically included in the [AUTHORS.md](AUTHORS.md#contributors) file as contributor in the next release. 27 | If you encounter any confusion or frustration during the contribution process, please create a GitHub issue and we'll do our best to improve the process. -------------------------------------------------------------------------------- /NOTICE.md: -------------------------------------------------------------------------------- 1 | Freestyle 2 | Copyright (c) 2017-2018 47 Degrees. All rights reserved. 3 | 4 | Licensed under Apache License. See [LICENSE](LICENSE) for terms. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | [comment]: # (Start Badges) 3 | 4 | [![Build Status](https://travis-ci.org/frees-io/freestyle-cassandra.svg?branch=master)](https://travis-ci.org/frees-io/freestyle-cassandra) [![codecov.io](http://codecov.io/github/frees-io/freestyle-cassandra/coverage.svg?branch=master)](http://codecov.io/github/frees-io/freestyle-cassandra?branch=master) [![Maven Central](https://img.shields.io/badge/maven%20central-0.1.1-green.svg)](https://oss.sonatype.org/#nexus-search;gav~io.frees~freestyle-cassandra*) [![Latest version](https://img.shields.io/badge/freestyle--cassandra-0.1.1-green.svg)](https://index.scala-lang.org/frees-io/freestyle-cassandra) [![License](https://img.shields.io/badge/license-Apache%202-blue.svg)](https://raw.githubusercontent.com/frees-io/freestyle-cassandra/master/LICENSE) [![Join the chat at https://gitter.im/47deg/freestyle](https://badges.gitter.im/47deg/freestyle.svg)](https://gitter.im/47deg/freestyle?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) [![GitHub Issues](https://img.shields.io/github/issues/frees-io/freestyle-cassandra.svg)](https://github.com/frees-io/freestyle-cassandra/issues) 5 | 6 | [comment]: # (End Badges) 7 | # Freestyle Cassandra 8 | 9 | [Cassandra] atop **Freestyle** is **`frees-cassandra`**. 10 | Freestyle Cassandra is Scala Purely Functional driver for Cassandra based on the datastax Java Driver. 11 | 12 | ## What’s frees-cassandra 13 | 14 | [frees-cassandra] is a library to interact with cassandra built atop Free and using the Datastax 15 | Cassandra Driver for connecting to a Cassandra instance. It follows the [Freestyle] philosophy, 16 | being macro-powered. 17 | 18 | ## Installation 19 | 20 | Add the following resolver and library dependency to your project's build file. 21 | 22 | For Scala `2.11.x` and `2.12.x`: 23 | 24 | [comment]: # (Start Replace) 25 | ```scala 26 | Resolver.bintrayRepo("tabdulradi", "maven") 27 | libraryDependencies += "io.frees" %% "frees-cassandra-core" % "0.1.1" 28 | ``` 29 | 30 | [comment]: # (End Replace) 31 | 32 | ## Documentation 33 | 34 | Access to the documentation [here](http://frees.io/docs/cassandra) 35 | 36 | [comment]: # (Start Copyright) 37 | # Copyright 38 | 39 | Freestyle is designed and developed by 47 Degrees 40 | 41 | Copyright (C) 2017-2018 47 Degrees. 42 | 43 | [comment]: # (End Copyright) 44 | -------------------------------------------------------------------------------- /build.sbt: -------------------------------------------------------------------------------- 1 | import sbtorgpolicies.templates.badges._ 2 | import sbtorgpolicies.runnable.syntax._ 3 | 4 | pgpPassphrase := Some(getEnvVar("PGP_PASSPHRASE").getOrElse("").toCharArray) 5 | pgpPublicRing := file(s"$gpgFolder/pubring.gpg") 6 | pgpSecretRing := file(s"$gpgFolder/secring.gpg") 7 | 8 | lazy val freesV = "0.5.1" 9 | 10 | lazy val commonDependencies: Seq[ModuleID] = Seq( 11 | %%("frees-async",freesV), 12 | %%("frees-async-guava",freesV), 13 | %%("shapeless"), 14 | %%("classy-core"), 15 | %%("classy-config-typesafe"), 16 | %("cassandra-driver-core"), 17 | %("cassandra-driver-mapping"), 18 | %("cassandra-driver-extras"), 19 | "io.github.cassandra-scala" %% "troy-schema" % "0.5.0", 20 | "com.propensive" %% "contextual" % "1.0.1" 21 | ) 22 | 23 | lazy val testDependencies: Seq[ModuleID] = 24 | Seq(%%("scalatest"), %%("scalamockScalatest"), %%("scalacheck"), %%("scheckShapeless")) 25 | .map(_ % "it,test") 26 | 27 | lazy val orgSettings = Seq( 28 | orgBadgeListSetting := List( 29 | TravisBadge.apply, 30 | CodecovBadge.apply, 31 | MavenCentralBadge.apply, 32 | ScalaLangBadge.apply, 33 | LicenseBadge.apply, 34 | { info => GitterBadge.apply(info.copy(owner = "47deg", repo = "freestyle")) }, 35 | GitHubIssuesBadge.apply 36 | ), 37 | embeddedCassandraCQLFileSetting := Option(file("macros-tests/src/main/resources/schema.sql")) 38 | ) 39 | orgAfterCISuccessTaskListSetting := List( 40 | depUpdateDependencyIssues.asRunnableItem, 41 | orgPublishReleaseTask.asRunnableItem(allModules = true, aggregated = false, crossScalaVersions = true), 42 | orgUpdateDocFiles.asRunnableItem 43 | ) 44 | 45 | lazy val root = project 46 | .in(file(".")) 47 | .settings(name := "freestyle-cassandra") 48 | .settings(noPublishSettings) 49 | .enablePlugins(EmbeddedCassandraPlugin) 50 | .settings(orgSettings) 51 | .dependsOn(core, `macros-tests`, docs) 52 | .aggregate(core, `macros-tests`, docs) 53 | 54 | lazy val core = project 55 | .in(file("core")) 56 | .settings(moduleName := "frees-cassandra-core") 57 | .settings(scalaMetaSettings) 58 | .configs(IntegrationTest) 59 | .settings(Defaults.itSettings) 60 | .settings(resolvers += Resolver.bintrayRepo("tabdulradi", "maven")) 61 | .settings(libraryDependencies ++= commonDependencies) 62 | .settings(libraryDependencies ++= testDependencies) 63 | 64 | lazy val `macros-tests` = project 65 | .in(file("macros-tests")) 66 | .settings(moduleName := "frees-cassandra-macros-tests") 67 | .settings(scalaMetaSettings) 68 | .configs(IntegrationTest) 69 | .settings(Defaults.itSettings) 70 | .settings(libraryDependencies ++= testDependencies) 71 | .dependsOn(core) 72 | 73 | lazy val docs = project 74 | .in(file("docs")) 75 | .dependsOn(core) 76 | .aggregate(core) 77 | .settings(name := "frees-cassandra-docs") 78 | .settings(noPublishSettings: _*) 79 | .settings( 80 | addCompilerPlugin(%%("scalameta-paradise") cross CrossVersion.full), 81 | libraryDependencies += %%("scalameta", "1.8.0"), 82 | scalacOptions += "-Xplugin-require:macroparadise", 83 | scalacOptions in Tut ~= (_ filterNot Set("-Ywarn-unused-import", "-Xlint").contains), 84 | // Pointing to https://github.com/frees-io/freestyle/tree/master/docs/src/main/tut/docs/rpc 85 | tutTargetDirectory := baseDirectory.value.getParentFile.getParentFile / "docs" / "src" 86 | / "main" / "tut" / "docs" / "cassandra" 87 | ) 88 | .enablePlugins(TutPlugin) -------------------------------------------------------------------------------- /core/src/main/scala/api/ClusterAPI.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package api 19 | 20 | import com.datastax.driver.core.{Configuration, Metadata, Metrics, Session} 21 | import freestyle.free.free 22 | 23 | @free 24 | trait ClusterAPI { 25 | 26 | def connect: FS[Session] 27 | 28 | def connectKeyspace(keyspace: String): FS[Session] 29 | 30 | def close: FS[Unit] 31 | 32 | def configuration: FS[Configuration] 33 | 34 | def metadata: FS[Metadata] 35 | 36 | def metrics: FS[Metrics] 37 | 38 | } 39 | -------------------------------------------------------------------------------- /core/src/main/scala/api/QueryModule.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package api 19 | 20 | import freestyle.free._ 21 | 22 | @module 23 | trait QueryModule { 24 | 25 | val sessionAPI: SessionAPI 26 | val statementAPI: StatementAPI 27 | val resultSetAPI: ResultSetAPI 28 | 29 | } 30 | -------------------------------------------------------------------------------- /core/src/main/scala/api/ResultSetAPI.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package api 19 | 20 | import com.datastax.driver.core.ResultSet 21 | import freestyle.free._ 22 | import freestyle.cassandra.query.mapper.FromReader 23 | 24 | @free 25 | trait ResultSetAPI { 26 | 27 | def read[A](resultSet: ResultSet)(implicit FR: FromReader[A]): FS[A] 28 | 29 | def readOption[A](resultSet: ResultSet)(implicit FR: FromReader[A]): FS[Option[A]] 30 | 31 | def readList[A](resultSet: ResultSet)(implicit FR: FromReader[A]): FS[List[A]] 32 | 33 | } 34 | -------------------------------------------------------------------------------- /core/src/main/scala/api/SessionAPI.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package api 19 | 20 | import com.datastax.driver.core._ 21 | import freestyle.free._ 22 | import freestyle.cassandra.query.model.SerializableValueBy 23 | 24 | @free 25 | trait SessionAPI { 26 | 27 | def init: FS[Session] 28 | 29 | def close: FS[Unit] 30 | 31 | def prepare(query: String): FS[PreparedStatement] 32 | 33 | def prepareStatement(statement: RegularStatement): FS[PreparedStatement] 34 | 35 | def execute(query: String): FS[ResultSet] 36 | 37 | def executeWithValues(query: String, values: Any*): FS[ResultSet] 38 | 39 | def executeWithMap(query: String, values: Map[String, AnyRef]): FS[ResultSet] 40 | 41 | def executeStatement(statement: Statement): FS[ResultSet] 42 | 43 | def executeWithByteBuffer( 44 | query: String, 45 | values: List[SerializableValueBy[Int]], 46 | consistencyLevel: Option[ConsistencyLevel] = None): FS[ResultSet] 47 | 48 | } 49 | -------------------------------------------------------------------------------- /core/src/main/scala/api/StatementAPI.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package api 19 | 20 | import java.nio.ByteBuffer 21 | 22 | import com.datastax.driver.core._ 23 | import freestyle.free._ 24 | import freestyle.cassandra.codecs.ByteBufferCodec 25 | import freestyle.cassandra.query.model.SerializableValueBy 26 | 27 | @free 28 | trait StatementAPI { 29 | 30 | def bind(preparedStatement: PreparedStatement): FS[BoundStatement] 31 | 32 | def setByteBufferByIndex( 33 | boundStatement: BoundStatement, 34 | index: Int, 35 | bytes: ByteBuffer): FS[BoundStatement] 36 | 37 | def setByteBufferByName( 38 | boundStatement: BoundStatement, 39 | name: String, 40 | bytes: ByteBuffer): FS[BoundStatement] 41 | 42 | def setValueByIndex[T]( 43 | boundStatement: BoundStatement, 44 | index: Int, 45 | value: T, 46 | codec: ByteBufferCodec[T]): FS[BoundStatement] 47 | 48 | def setValueByName[T]( 49 | boundStatement: BoundStatement, 50 | name: String, 51 | value: T, 52 | codec: ByteBufferCodec[T]): FS[BoundStatement] 53 | 54 | def setByteBufferListByIndex( 55 | preparedStatement: PreparedStatement, 56 | values: List[SerializableValueBy[Int]]): FS[BoundStatement] 57 | 58 | def setByteBufferListByName( 59 | preparedStatement: PreparedStatement, 60 | values: List[SerializableValueBy[String]]): FS[BoundStatement] 61 | } 62 | -------------------------------------------------------------------------------- /core/src/main/scala/api/package.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | 19 | import cats.data.Kleisli 20 | import cats.{~>, MonadError} 21 | import com.datastax.driver.core.{Cluster, ResultSet, Session} 22 | 23 | import scala.reflect.ClassTag 24 | 25 | package object api { 26 | 27 | type SessionAPIOps[F[_], A] = Kleisli[F, Session, A] 28 | 29 | type ClusterAPIOps[F[_], A] = Kleisli[F, Cluster, A] 30 | 31 | def apiInterpreter[F[_], A](a: A): (Kleisli[F, A, ?] ~> F) = new (Kleisli[F, A, ?] ~> F) { 32 | override def apply[B](fa: Kleisli[F, A, B]): F[B] = fa(a) 33 | } 34 | 35 | def kleisli[M[_], A, B]( 36 | f: A => M[B])(implicit ME: MonadError[M, Throwable], TAG: ClassTag[A]): Kleisli[M, A, B] = 37 | Kleisli { (a: A) => 38 | Option(a) 39 | .map(f) 40 | .getOrElse(ME.raiseError( 41 | new IllegalArgumentException(s"Instance of class ${TAG.runtimeClass.getName} is null"))) 42 | } 43 | 44 | } 45 | -------------------------------------------------------------------------------- /core/src/main/scala/codecs/package.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | 19 | import java.nio.ByteBuffer 20 | 21 | import cats.MonadError 22 | import com.datastax.driver.core.{DataType, ProtocolVersion, TypeCodec} 23 | import com.datastax.driver.core.exceptions.InvalidTypeException 24 | 25 | package object codecs { 26 | 27 | trait ByteBufferCodec[T] { 28 | def deserialize[M[_]](bytes: ByteBuffer)(implicit E: MonadError[M, Throwable]): M[T] 29 | def serialize[M[_]](value: T)(implicit E: MonadError[M, Throwable]): M[ByteBuffer] 30 | } 31 | 32 | abstract class PrimitiveByteBufferCodec[T](dataType: DataType, byteSize: Int, defaultValue: T) 33 | extends ByteBufferCodec[T] { 34 | 35 | override def deserialize[M[_]](bytes: ByteBuffer)(implicit E: MonadError[M, Throwable]): M[T] = 36 | Option(bytes) map { b => 37 | E.flatMap(E.catchNonFatal(b.remaining())) { 38 | case 0 => E.pure(defaultValue) 39 | case `byteSize` => E.catchNonFatal(getValue(b)) 40 | case _ => 41 | E.raiseError[T]( 42 | new InvalidTypeException( 43 | s"Invalid value, expecting $byteSize but got ${b.remaining}")) 44 | } 45 | } getOrElse E.pure(defaultValue) 46 | 47 | override def serialize[M[_]](value: T)(implicit E: MonadError[M, Throwable]): M[ByteBuffer] = 48 | E.catchNonFatal(setValue(ByteBuffer.allocate(byteSize), value)) 49 | 50 | protected def getValue(byteBuffer: ByteBuffer): T 51 | 52 | private[this] def setValue(byteBuffer: ByteBuffer, value: T): ByteBuffer = value match { 53 | case v: Boolean if v => ByteBuffer.wrap(Array[Byte](1)) 54 | case _: Boolean => ByteBuffer.wrap(Array[Byte](0)) 55 | case v: Byte => byteBuffer.put(0, v) 56 | case v: Double => byteBuffer.putDouble(0, v) 57 | case v: Float => byteBuffer.putFloat(0, v) 58 | case v: Int => byteBuffer.putInt(0, v) 59 | case v: Long => byteBuffer.putLong(0, v) 60 | case v: Short => byteBuffer.putShort(0, v) 61 | } 62 | } 63 | 64 | object PrimitiveByteBufferCodec { 65 | def apply[T](dataType: DataType, byteSize: Int, defaultValue: T)( 66 | f: (ByteBuffer) => T): PrimitiveByteBufferCodec[T] = 67 | new PrimitiveByteBufferCodec(dataType, byteSize, defaultValue) { 68 | override protected def getValue(byteBuffer: ByteBuffer): T = f(byteBuffer) 69 | } 70 | } 71 | 72 | implicit val booleanCodec: ByteBufferCodec[Boolean] = 73 | PrimitiveByteBufferCodec[Boolean](DataType.smallint(), byteSize = 1, defaultValue = false) { 74 | byteBuffer => 75 | byteBuffer.get(byteBuffer.position()) == 1 76 | } 77 | 78 | implicit val byteCodec: ByteBufferCodec[Byte] = 79 | PrimitiveByteBufferCodec[Byte](DataType.smallint(), byteSize = 1, defaultValue = 0) { 80 | byteBuffer => 81 | byteBuffer.get(byteBuffer.position()) 82 | } 83 | 84 | implicit val doubleCodec: ByteBufferCodec[Double] = 85 | PrimitiveByteBufferCodec[Double](DataType.cdouble(), byteSize = 8, defaultValue = 0) { 86 | byteBuffer => 87 | byteBuffer.getDouble(byteBuffer.position()) 88 | } 89 | 90 | implicit val floatCodec: ByteBufferCodec[Float] = 91 | PrimitiveByteBufferCodec[Float](DataType.cfloat(), byteSize = 4, defaultValue = 0) { 92 | byteBuffer => 93 | byteBuffer.getFloat(byteBuffer.position()) 94 | } 95 | 96 | implicit val intCodec: ByteBufferCodec[Int] = 97 | PrimitiveByteBufferCodec[Int](DataType.cint, byteSize = 4, defaultValue = 0) { byteBuffer => 98 | byteBuffer.getInt(byteBuffer.position()) 99 | } 100 | 101 | implicit val longCodec: ByteBufferCodec[Long] = 102 | PrimitiveByteBufferCodec[Long](DataType.bigint(), byteSize = 8, defaultValue = 0) { 103 | byteBuffer => 104 | byteBuffer.getLong(byteBuffer.position()) 105 | } 106 | 107 | implicit val shortCodec: ByteBufferCodec[Short] = 108 | PrimitiveByteBufferCodec[Short](DataType.smallint(), byteSize = 2, defaultValue = 0) { 109 | byteBuffer => 110 | byteBuffer.getShort(byteBuffer.position()) 111 | } 112 | 113 | implicit def byteBufferCodec[T]( 114 | implicit tc: TypeCodec[T], 115 | pv: ProtocolVersion): ByteBufferCodec[T] = new ByteBufferCodec[T] { 116 | override def deserialize[M[_]](bytes: ByteBuffer)(implicit E: MonadError[M, Throwable]): M[T] = 117 | E.catchNonFatal(tc.deserialize(bytes, pv)) 118 | 119 | override def serialize[M[_]](value: T)(implicit E: MonadError[M, Throwable]): M[ByteBuffer] = 120 | E.catchNonFatal(tc.serialize(value, pv)) 121 | } 122 | 123 | } 124 | -------------------------------------------------------------------------------- /core/src/main/scala/config/model.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package config 19 | 20 | import java.net.{InetAddress, InetSocketAddress} 21 | import java.nio.ByteBuffer 22 | 23 | import com.datastax.driver.core.policies.RetryPolicy 24 | import com.datastax.driver.core._ 25 | 26 | object model { 27 | 28 | sealed trait ContactPoints extends Product with Serializable 29 | 30 | case class ContactPointList(list: List[InetAddress]) extends ContactPoints 31 | 32 | case class ContactPointWithPortList(list: List[InetSocketAddress]) extends ContactPoints 33 | 34 | case class Credentials(username: String, password: String) 35 | 36 | case class ConnectionsPerHost(distance: HostDistance, core: Int, max: Int) 37 | 38 | case class CoreConnectionsPerHost(distance: HostDistance, newCoreConnections: Int) 39 | 40 | case class MaxConnectionsPerHost(distance: HostDistance, newMaxConnections: Int) 41 | 42 | case class MaxRequestsPerConnection(distance: HostDistance, newMaxRequests: Int) 43 | 44 | case class NewConnectionThreshold(distance: HostDistance, newValue: Int) 45 | 46 | case class ConfigStatement( 47 | tracingEnabled: Option[Boolean] = None, 48 | consistencyLevel: Option[ConsistencyLevel] = None, 49 | serialConsistencyLevel: Option[ConsistencyLevel] = None, 50 | defaultTimestamp: Option[Long] = None, 51 | fetchSize: Option[Int] = None, 52 | idempotent: Option[Boolean] = None, 53 | outgoingPayload: Option[Map[String, ByteBuffer]] = None, 54 | pagingState: Option[ConfigPagingState] = None, 55 | readTimeoutMillis: Option[Int] = None, 56 | retryPolicy: Option[RetryPolicy] = None) 57 | 58 | sealed trait ConfigPagingState extends Product with Serializable 59 | 60 | case class RawPagingState(pagingState: Array[Byte]) extends ConfigPagingState 61 | 62 | case class CodecPagingState(pagingState: PagingState, codecRegistry: Option[CodecRegistry]) 63 | extends ConfigPagingState 64 | 65 | object implicits { 66 | 67 | final class ConfigStatementOps(cs: ConfigStatement) { 68 | 69 | def applyConf(st: Statement): Statement = { 70 | 71 | import scala.collection.JavaConverters._ 72 | 73 | cs.tracingEnabled foreach { 74 | case true => st.enableTracing() 75 | case false => st.disableTracing() 76 | } 77 | cs.consistencyLevel foreach st.setConsistencyLevel 78 | cs.serialConsistencyLevel foreach st.setSerialConsistencyLevel 79 | cs.defaultTimestamp foreach st.setDefaultTimestamp 80 | cs.fetchSize foreach st.setFetchSize 81 | cs.idempotent foreach st.setIdempotent 82 | cs.outgoingPayload foreach (m => st.setOutgoingPayload(m.asJava)) 83 | cs.pagingState foreach { 84 | case CodecPagingState(ps, Some(cr)) => st.setPagingState(ps, cr) 85 | case CodecPagingState(ps, None) => st.setPagingState(ps) 86 | case RawPagingState(array) => st.setPagingStateUnsafe(array) 87 | } 88 | cs.readTimeoutMillis foreach st.setReadTimeoutMillis 89 | cs.retryPolicy foreach st.setRetryPolicy 90 | st 91 | } 92 | 93 | } 94 | 95 | implicit def configStatementOps(cs: ConfigStatement): ConfigStatementOps = 96 | new ConfigStatementOps(cs) 97 | 98 | } 99 | 100 | } 101 | -------------------------------------------------------------------------------- /core/src/main/scala/handlers/handlers.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package handlers 19 | 20 | import java.nio.ByteBuffer 21 | 22 | import cats.instances.list._ 23 | import cats.syntax.flatMap._ 24 | import cats.syntax.foldable._ 25 | import cats.syntax.traverse._ 26 | import cats.{~>, FlatMap, MonadError} 27 | import com.datastax.driver.core._ 28 | import com.google.common.util.concurrent.ListenableFuture 29 | import freestyle.cassandra.api._ 30 | import freestyle.cassandra.codecs.ByteBufferCodec 31 | import freestyle.cassandra.query.mapper.{DatastaxRowReader, FromReader} 32 | import freestyle.cassandra.query.model.SerializableValueBy 33 | 34 | import scala.collection.JavaConverters._ 35 | 36 | class SessionAPIHandler[M[_]: FlatMap]( 37 | implicit H1: ListenableFuture ~> M, 38 | H2: ListenableFuture[Void] => ListenableFuture[Unit], 39 | ME: MonadError[M, Throwable]) 40 | extends SessionAPI.Handler[SessionAPIOps[M, ?]] { 41 | 42 | def init: SessionAPIOps[M, Session] = kleisli(s => H1(s.initAsync())) 43 | 44 | def close: SessionAPIOps[M, Unit] = kleisli(s => H1(s.closeAsync())) 45 | 46 | def prepare(query: String): SessionAPIOps[M, PreparedStatement] = 47 | kleisli(s => H1(s.prepareAsync(query))) 48 | 49 | def prepareStatement(statement: RegularStatement): SessionAPIOps[M, PreparedStatement] = 50 | kleisli(s => H1(s.prepareAsync(statement))) 51 | 52 | def execute(query: String): SessionAPIOps[M, ResultSet] = 53 | kleisli(s => H1(s.executeAsync(query))) 54 | 55 | def executeWithValues(query: String, values: Any*): SessionAPIOps[M, ResultSet] = 56 | kleisli(s => H1(s.executeAsync(query, values))) 57 | 58 | def executeWithMap(query: String, values: Map[String, AnyRef]): SessionAPIOps[M, ResultSet] = 59 | kleisli(s => H1(s.executeAsync(query, values.asJava))) 60 | 61 | def executeStatement(statement: Statement): SessionAPIOps[M, ResultSet] = 62 | kleisli(s => H1(s.executeAsync(statement))) 63 | 64 | def executeWithByteBuffer( 65 | query: String, 66 | values: List[SerializableValueBy[Int]], 67 | consistencyLevel: Option[ConsistencyLevel] = None): SessionAPIOps[M, ResultSet] = 68 | kleisli { session => 69 | values.traverse(_.serializableValue.serialize[M]).flatMap { values => 70 | val st = ByteBufferSimpleStatement(query, values.toArray) 71 | consistencyLevel.foreach(st.setConsistencyLevel) 72 | H1(session.executeAsync(st)) 73 | } 74 | } 75 | 76 | case class ByteBufferSimpleStatement(query: String, values: Array[ByteBuffer]) 77 | extends SimpleStatement(query, values) { 78 | override def getValues( 79 | protocolVersion: ProtocolVersion, 80 | codecRegistry: CodecRegistry): Array[ByteBuffer] = values 81 | } 82 | 83 | } 84 | 85 | class ClusterAPIHandler[M[_]]( 86 | implicit H1: ListenableFuture ~> M, 87 | H2: ListenableFuture[Void] => ListenableFuture[Unit], 88 | ME: MonadError[M, Throwable]) 89 | extends ClusterAPI.Handler[ClusterAPIOps[M, ?]] { 90 | 91 | def connect: ClusterAPIOps[M, Session] = kleisli(c => H1(c.connectAsync())) 92 | 93 | def connectKeyspace(keyspace: String): ClusterAPIOps[M, Session] = 94 | kleisli(c => H1(c.connectAsync(keyspace))) 95 | 96 | def close: ClusterAPIOps[M, Unit] = kleisli(c => H1(c.closeAsync())) 97 | 98 | def configuration: ClusterAPIOps[M, Configuration] = 99 | kleisli(c => ME.catchNonFatal(c.getConfiguration)) 100 | 101 | def metadata: ClusterAPIOps[M, Metadata] = 102 | kleisli(c => ME.catchNonFatal(c.getMetadata)) 103 | 104 | def metrics: ClusterAPIOps[M, Metrics] = 105 | kleisli(c => ME.catchNonFatal(c.getMetrics)) 106 | 107 | } 108 | 109 | class StatementAPIHandler[M[_]](implicit ME: MonadError[M, Throwable]) 110 | extends StatementAPI.Handler[M] { 111 | 112 | def bind(preparedStatement: PreparedStatement): M[BoundStatement] = 113 | ME.catchNonFatal(preparedStatement.bind()) 114 | 115 | def setByteBufferByIndex( 116 | boundStatement: BoundStatement, 117 | index: Int, 118 | bytes: ByteBuffer): M[BoundStatement] = 119 | ME.catchNonFatal(boundStatement.setBytesUnsafe(index, bytes)) 120 | 121 | def setByteBufferByName( 122 | boundStatement: BoundStatement, 123 | name: String, 124 | bytes: ByteBuffer): M[BoundStatement] = 125 | ME.catchNonFatal(boundStatement.setBytesUnsafe(name, bytes)) 126 | 127 | def setValueByIndex[T]( 128 | boundStatement: BoundStatement, 129 | index: Int, 130 | value: T, 131 | codec: ByteBufferCodec[T]): M[BoundStatement] = 132 | ME.flatMap(codec.serialize[M](value))(setByteBufferByIndex(boundStatement, index, _)) 133 | 134 | def setValueByName[T]( 135 | boundStatement: BoundStatement, 136 | name: String, 137 | value: T, 138 | codec: ByteBufferCodec[T]): M[BoundStatement] = 139 | ME.flatMap(codec.serialize[M](value))(setByteBufferByName(boundStatement, name, _)) 140 | 141 | def setByteBufferListByIndex( 142 | statement: PreparedStatement, 143 | values: List[SerializableValueBy[Int]]): M[BoundStatement] = 144 | setByteBufferList(statement, values, setByteBufferByIndex) 145 | 146 | def setByteBufferListByName( 147 | statement: PreparedStatement, 148 | values: List[SerializableValueBy[String]]): M[BoundStatement] = 149 | setByteBufferList(statement, values, setByteBufferByName) 150 | 151 | private[this] def setByteBufferList[T]( 152 | statement: PreparedStatement, 153 | values: List[SerializableValueBy[T]], 154 | setValue: (BoundStatement, T, ByteBuffer) => M[BoundStatement]): M[BoundStatement] = 155 | ME.flatMap(bind(statement)) { boundSt => 156 | values.foldM(boundSt) { (b, v) => 157 | ME.flatMap(v.serializableValue.serialize[M])(setValue(b, v.position, _)) 158 | } 159 | } 160 | 161 | } 162 | 163 | class ResultSetAPIHandler[M[_]](implicit ME: MonadError[M, Throwable]) 164 | extends ResultSetAPI.Handler[M] { 165 | 166 | def read[A](resultSet: ResultSet, FR: FromReader[A]): M[A] = 167 | ME.flatMap(ME.catchNonFatal(resultSet.one())) { 168 | Option(_) 169 | .map(readRow(_, FR)) 170 | .getOrElse(ME.raiseError(new IllegalStateException("Row is empty"))) 171 | } 172 | 173 | def readOption[A](resultSet: ResultSet, FR: FromReader[A]): M[Option[A]] = 174 | ME.flatMap(ME.catchNonFatal(resultSet.one())) { 175 | Option(_) 176 | .map(row => ME.map(readRow(row, FR))(Option(_))) 177 | .getOrElse(ME.pure(None)) 178 | } 179 | 180 | def readList[A](resultSet: ResultSet, FR: FromReader[A]): M[List[A]] = { 181 | import scala.collection.JavaConverters._ 182 | ME.flatMap(ME.catchNonFatal(resultSet.iterator().asScala.toList))(_.traverse(readRow(_, FR))) 183 | } 184 | 185 | private[this] def readRow[A](row: Row, fromReader: FromReader[A]): M[A] = 186 | fromReader(DatastaxRowReader(row)) 187 | } 188 | -------------------------------------------------------------------------------- /core/src/main/scala/implicits.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | 19 | import cats.{~>, MonadError} 20 | import com.datastax.driver.core.{Cluster, Session} 21 | import freestyle.async.AsyncContext 22 | import freestyle.free.async.{Implicits => AsyncImplicits} 23 | import freestyle.free.asyncGuava.AsyncGuavaImplicits 24 | import freestyle.cassandra.api._ 25 | import freestyle.cassandra.handlers._ 26 | 27 | import scala.concurrent.ExecutionContext 28 | 29 | trait CassandraImplicits extends AsyncImplicits with AsyncGuavaImplicits { 30 | 31 | implicit def clusterAPIInterpreter[M[_]]( 32 | implicit cluster: Cluster, 33 | AC: AsyncContext[M], 34 | E: ExecutionContext, 35 | ME: MonadError[M, Throwable]): ClusterAPI.Op ~> M = 36 | new ClusterAPIHandler[M] andThen apiInterpreter[M, Cluster](cluster) 37 | 38 | implicit def sessionAPIInterpreter[M[_]]( 39 | implicit session: Session, 40 | AC: AsyncContext[M], 41 | E: ExecutionContext, 42 | ME: MonadError[M, Throwable]): SessionAPI.Op ~> M = 43 | new SessionAPIHandler[M] andThen apiInterpreter[M, Session](session) 44 | 45 | implicit def statementAPIHandler[M[_]]( 46 | implicit ME: MonadError[M, Throwable]): StatementAPIHandler[M] = 47 | new StatementAPIHandler[M] 48 | 49 | implicit def resultSetAPIHandler[M[_]]( 50 | implicit ME: MonadError[M, Throwable]): ResultSetAPIHandler[M] = 51 | new ResultSetAPIHandler[M] 52 | } 53 | 54 | object implicits extends CassandraImplicits 55 | -------------------------------------------------------------------------------- /core/src/main/scala/query/FieldLister.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package query 19 | 20 | import shapeless._ 21 | import shapeless.labelled.FieldType 22 | 23 | trait FieldLister[A] { 24 | val list: List[String] 25 | } 26 | 27 | trait FieldListerPrimitive { 28 | implicit def primitiveFieldLister[K <: Symbol, H, T <: HList]( 29 | implicit witness: Witness.Aux[K], 30 | printer: Printer, 31 | tLister: FieldLister[T]): FieldLister[FieldType[K, H] :: T] = 32 | FieldLister[FieldType[K, H] :: T](printer.print(witness.value.name) :: tLister.list) 33 | } 34 | 35 | trait FieldListerGeneric extends FieldListerPrimitive { 36 | 37 | implicit def genericLister[A, R]( 38 | implicit gen: LabelledGeneric.Aux[A, R], 39 | lister: Lazy[FieldLister[R]]): FieldLister[A] = FieldLister[A](lister.value.list) 40 | 41 | implicit val hnilLister: FieldLister[HNil] = FieldLister[HNil](Nil) 42 | 43 | } 44 | 45 | object FieldLister extends FieldListerGeneric { 46 | def apply[A](l: List[String]): FieldLister[A] = new FieldLister[A] { 47 | override val list: List[String] = l 48 | } 49 | } 50 | 51 | object FieldListerExpanded extends FieldListerGeneric { 52 | 53 | implicit def hconsLister[K, H, T <: HList]( 54 | implicit hLister: Lazy[FieldLister[H]], 55 | tLister: FieldLister[T]): FieldLister[FieldType[K, H] :: T] = 56 | FieldLister[FieldType[K, H] :: T](hLister.value.list ++ tLister.list) 57 | 58 | } 59 | -------------------------------------------------------------------------------- /core/src/main/scala/query/StatementGenerator.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package query 19 | 20 | trait StatementGenerator[A] { 21 | def select(table: String): String 22 | def insert(table: String): String 23 | } 24 | 25 | object StatementGenerator { 26 | def apply[A](implicit ev: StatementGenerator[A]): StatementGenerator[A] = ev 27 | 28 | implicit def genericGenerator[A](implicit fieldLister: FieldLister[A]): StatementGenerator[A] = 29 | new StatementGenerator[A] { 30 | override def select(table: String): String = { 31 | val fields = fieldLister.list.mkString(",") 32 | s"SELECT $fields FROM $table" 33 | } 34 | 35 | override def insert(table: String): String = { 36 | val fieldNames = fieldLister.list 37 | val fields = fieldNames.mkString(",") 38 | val placeholders = List.fill(fieldNames.size)("?").mkString(",") 39 | s"INSERT INTO $table ($fields) VALUES ($placeholders)" 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /core/src/main/scala/query/interpolator/CQLInterpolator.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package query.interpolator 19 | 20 | import cats.data.Validated.{Invalid, Valid} 21 | import contextual.Interpolator 22 | import freestyle.cassandra.query.model.{SerializableValue, SerializableValueBy} 23 | import freestyle.cassandra.schema.{DDL, DML, Statements} 24 | import freestyle.cassandra.schema.validator.SchemaValidator 25 | import troy.cql.ast.{CqlParser, DataDefinition, DataManipulation} 26 | 27 | import scala.util.control.NonFatal 28 | import scala.util.{Failure, Success, Try} 29 | 30 | class CQLInterpolator(V: SchemaValidator[Try]) extends Interpolator { 31 | 32 | override type ContextType = CQLContext 33 | override type Input = SerializableValue 34 | 35 | override def contextualize(interpolation: StaticInterpolation): Seq[ContextType] = { 36 | 37 | val cql = interpolation.parts.foldLeft("") { 38 | case (prev, _ @Literal(_, string)) => prev + string 39 | case (prev, _ @Hole(_, _)) => prev + "?" 40 | case (prev, _) => prev 41 | } 42 | 43 | parseStatement(cql).flatMap(V.validateStatement(_)(cats.instances.try_.catsStdInstancesForTry)) match { 44 | case Success(Valid(_)) => 45 | Seq.fill(interpolation.parts.size)(CQLLiteral) 46 | case Success(Invalid(list)) => 47 | interpolation.abort(Literal(0, cql), 0, list.map(_.getMessage).toList.mkString(",")) 48 | case Failure(e) => interpolation.abort(Literal(0, cql), 0, e.getMessage) 49 | } 50 | 51 | } 52 | 53 | def evaluate(interpolation: RuntimeInterpolation): (String, List[SerializableValueBy[Int]]) = 54 | interpolation.parts.foldLeft(("", List.empty[SerializableValueBy[Int]])) { 55 | case ((cql, values), Literal(_, string)) => 56 | (cql + string, values) 57 | case ((cql, values), Substitution(index, value)) => 58 | (cql + "?", values :+ SerializableValueBy(index, value)) 59 | } 60 | 61 | private[this] def parseStatement(cql: String): Try[Statements] = { 62 | 63 | def parseStatementWith[T]( 64 | parseFunction: (String) => CqlParser.ParseResult[T], 65 | mapResult: T => Statements): Try[Statements] = 66 | parseFunction(cql) match { 67 | case CqlParser.Success(result, _) => Success(mapResult(result)) 68 | case CqlParser.Failure(msg, _) => Failure(new IllegalArgumentException(msg)) 69 | case CqlParser.Error(msg, _) => Failure(new IllegalArgumentException(msg)) 70 | } 71 | 72 | def parseDataManipulationStatement(cql: String): Try[Statements] = 73 | parseStatementWith[DataManipulation]( 74 | parseFunction = CqlParser.parseDML, 75 | mapResult = dm => DML(dm)) 76 | 77 | def parseDataDefinitionStatement(cql: String): Try[Statements] = 78 | parseStatementWith[Seq[DataDefinition]]( 79 | parseFunction = CqlParser.parseSchema, 80 | mapResult = seq => DDL(seq)) 81 | 82 | parseDataManipulationStatement(cql).recoverWith { 83 | case NonFatal(e1) => 84 | parseDataDefinitionStatement(cql).recoverWith { 85 | case NonFatal(e2) => 86 | Failure(ParseError(e1.getMessage :: e2.getMessage :: Nil)) 87 | } 88 | } 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /core/src/main/scala/query/interpolator/MacroInterpolator.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package query.interpolator 19 | 20 | import scala.collection.immutable.Seq 21 | import scala.meta._ 22 | 23 | object MacroInterpolator { 24 | 25 | sealed trait ValidatorType 26 | case object SchemaFile extends ValidatorType 27 | case object Metadata extends ValidatorType 28 | 29 | import MetaMacroInterpolator._ 30 | 31 | class SchemaFileInterpolator(schemaPath: String) extends scala.annotation.StaticAnnotation { 32 | 33 | inline def apply(defn: Any): Any = 34 | meta(generateMacro(this, defn, getClass.getSimpleName, SchemaFile)) 35 | } 36 | 37 | class SchemaMetadataInterpolator(configPath: String) extends scala.annotation.StaticAnnotation { 38 | 39 | inline def apply(defn: Any): Any = 40 | meta(generateMacro(this, defn, getClass.getSimpleName, Metadata)) 41 | } 42 | 43 | object MetaMacroInterpolator { 44 | 45 | def generateMacro(stat: Stat, defn: Any, annotation: String, validatorType: ValidatorType): Term.Block = { 46 | 47 | val arg = stat match { 48 | case q"new $_(${Lit(argument: String)})" if argument.nonEmpty => 49 | argument 50 | case _ => 51 | abort(s"@$annotation annotation requires a valid path") 52 | } 53 | 54 | defn match { 55 | case t: Defn.Trait => 56 | Term.Block(Seq(t, companion(t.name, arg, validatorType))) 57 | case _ => 58 | abort(s"@$annotation must annotate a trait.") 59 | } 60 | } 61 | 62 | private[this] def inputStreamBlock(typeName: Type.Name, path: String): Term.Block = 63 | q""" 64 | import _root_.scala.util._ 65 | val myPath = ${Lit.String(path)} 66 | Try(Option(${Term.Name(typeName.value)}.getClass.getResourceAsStream(myPath))).flatMap { 67 | case Some(is) => Success(is) 68 | case None => Failure(new IllegalArgumentException("Resource path " + myPath + " not found")) 69 | } 70 | """ 71 | 72 | private[this] def validatorBlock(typeName: Type.Name, path: String, validator: ValidatorType): Term.Block = { 73 | 74 | def schemaFileValidator = 75 | q""" 76 | import _root_.java.io.InputStream 77 | val tryMonadError: MonadError[Try, Throwable] = _root_.cats.instances.try_.catsStdInstancesForTry 78 | val isF: Try[InputStream] = ${inputStreamBlock(typeName, path)} 79 | val schemaProvider: SchemaDefinitionProvider[Try] = TroySchemaProvider[Try](isF)(tryMonadError) 80 | TroySchemaValidator.instance(tryMonadError, schemaProvider) 81 | """ 82 | 83 | def metadataValidator = 84 | q""" 85 | import _root_.java.io.InputStream 86 | val tryMonadError: MonadError[Try, Throwable] = _root_.cats.instances.try_.catsStdInstancesForTry 87 | val isF: Try[InputStream] = ${inputStreamBlock(typeName, path)} 88 | val schemaProvider: SchemaDefinitionProvider[Try] = MetadataSchemaProvider.metadataSchemaProvider[Try](isF)(tryMonadError) 89 | TroySchemaValidator.instance(tryMonadError, schemaProvider) 90 | """ 91 | 92 | validator match { 93 | case SchemaFile => schemaFileValidator 94 | case Metadata => metadataValidator 95 | } 96 | } 97 | 98 | private[this] def companion(typeName: Type.Name, path: String, validator: ValidatorType) = 99 | q"""object ${Term.Name(typeName.value)} { 100 | 101 | import _root_.cats.MonadError 102 | import _root_.contextual.{Case, Prefix} 103 | import _root_.freestyle.cassandra.codecs.ByteBufferCodec 104 | import _root_.freestyle.cassandra.query.interpolator.{CQLInterpolator, CQLLiteral} 105 | import _root_.freestyle.cassandra.query.model.SerializableValue 106 | import _root_.freestyle.cassandra.schema.provider._ 107 | import _root_.freestyle.cassandra.schema.validator._ 108 | import _root_.java.nio.ByteBuffer 109 | import _root_.scala.util.Try 110 | 111 | val schemaValidator: SchemaValidator[Try] = ${validatorBlock(typeName, path, validator)} 112 | 113 | object cqlInterpolator extends CQLInterpolator(schemaValidator) 114 | 115 | implicit def embedArgsNamesInCql[T](implicit C: ByteBufferCodec[T]) = cqlInterpolator.embed[T]( 116 | Case(CQLLiteral, CQLLiteral) { v => 117 | new SerializableValue { 118 | override def serialize[M[_]](implicit E: MonadError[M, Throwable]): M[ByteBuffer] = 119 | C.serialize(v) 120 | } 121 | } 122 | ) 123 | 124 | final class CQLStringContext(sc: StringContext) { 125 | val cql = Prefix(cqlInterpolator, sc) 126 | } 127 | 128 | implicit def cqlStringContext(sc: StringContext): CQLStringContext = 129 | new CQLStringContext(sc) 130 | } 131 | """ 132 | 133 | } 134 | 135 | } 136 | -------------------------------------------------------------------------------- /core/src/main/scala/query/interpolator/RuntimeCQLInterpolator.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package query.interpolator 19 | 20 | import java.nio.ByteBuffer 21 | 22 | import cats.MonadError 23 | import cats.data.Validated.Valid 24 | import cats.data.ValidatedNel 25 | import contextual.{Case, Prefix} 26 | import freestyle.cassandra.codecs.ByteBufferCodec 27 | import freestyle.cassandra.query.model.SerializableValue 28 | import freestyle.cassandra.schema.validator.SchemaValidator 29 | import freestyle.cassandra.schema.{SchemaError, Statements} 30 | 31 | import scala.util.{Success, Try} 32 | 33 | object RuntimeCQLInterpolator { 34 | 35 | private[this] val schemaValidator: SchemaValidator[Try] = new SchemaValidator[Try] { 36 | override def validateStatement(st: Statements)( 37 | implicit E: MonadError[Try, Throwable]): Try[ValidatedNel[SchemaError, Unit]] = 38 | Success(Valid((): Unit)) 39 | } 40 | 41 | object cqlInterpolator extends CQLInterpolator(schemaValidator) 42 | 43 | implicit def embedArgsNamesInCql[T](implicit C: ByteBufferCodec[T]) = cqlInterpolator.embed[T]( 44 | Case(CQLLiteral, CQLLiteral) { v => 45 | new SerializableValue { 46 | override def serialize[M[_]](implicit E: MonadError[M, Throwable]): M[ByteBuffer] = 47 | C.serialize(v) 48 | } 49 | } 50 | ) 51 | 52 | final class CQLStringContext(sc: StringContext) { 53 | val cql = Prefix(cqlInterpolator, sc) 54 | } 55 | 56 | implicit def cqlStringContext(sc: StringContext): CQLStringContext = 57 | new CQLStringContext(sc) 58 | 59 | } 60 | -------------------------------------------------------------------------------- /core/src/main/scala/query/interpolator/package.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package query 19 | 20 | import cats.{~>, MonadError} 21 | import com.datastax.driver.core.{ConsistencyLevel, ResultSet, Session} 22 | import contextual.Context 23 | import freestyle.free._ 24 | import freestyle.free.implicits._ 25 | import freestyle.async.AsyncContext 26 | import freestyle.cassandra.api._ 27 | import freestyle.cassandra.query.mapper.FromReader 28 | import freestyle.cassandra.query.model.SerializableValueBy 29 | 30 | import scala.concurrent.ExecutionContext 31 | 32 | package object interpolator { 33 | 34 | sealed trait CQLContext extends Context 35 | case object CQLLiteral extends CQLContext 36 | 37 | case class ParseError(msgList: List[String]) 38 | extends RuntimeException(s"Parse error: ${msgList.mkString(",")}") 39 | 40 | final class InterpolatorOps(tuple: (String, List[SerializableValueBy[Int]])) { 41 | 42 | import freestyle.cassandra.implicits._ 43 | 44 | def asResultSet[M[_]](consistencyLevel: Option[ConsistencyLevel] = None)( 45 | implicit API: SessionAPI[M]): FreeS[M, ResultSet] = 46 | API.executeWithByteBuffer(tuple._1, tuple._2, consistencyLevel) 47 | 48 | def asFree[M[_]](consistencyLevel: Option[ConsistencyLevel] = None)( 49 | implicit API: SessionAPI[M]): FreeS[M, Unit] = 50 | asResultSet[M](consistencyLevel).map(_ => (): Unit) 51 | 52 | def as[A](consistencyLevel: Option[ConsistencyLevel] = None)( 53 | implicit Q: QueryModule[QueryModule.Op], 54 | FR: FromReader[A]): FreeS[QueryModule.Op, A] = 55 | asResultSet[QueryModule.Op](consistencyLevel).flatMap(Q.resultSetAPI.read[A](_)) 56 | 57 | def asOption[A](consistencyLevel: Option[ConsistencyLevel] = None)( 58 | implicit Q: QueryModule[QueryModule.Op], 59 | FR: FromReader[A]): FreeS[QueryModule.Op, Option[A]] = 60 | asResultSet[QueryModule.Op](consistencyLevel).flatMap(Q.resultSetAPI.readOption[A](_)) 61 | 62 | def asList[A](consistencyLevel: Option[ConsistencyLevel] = None)( 63 | implicit Q: QueryModule[QueryModule.Op], 64 | FR: FromReader[A]): FreeS[QueryModule.Op, List[A]] = 65 | asResultSet[QueryModule.Op](consistencyLevel).flatMap(Q.resultSetAPI.readList[A](_)) 66 | 67 | def attemptResultSet[M[_]](consistencyLevel: Option[ConsistencyLevel] = None)( 68 | implicit API: SessionAPI[SessionAPI.Op], 69 | S: Session, 70 | AC: AsyncContext[M], 71 | E: ExecutionContext, 72 | ME: MonadError[M, Throwable]): M[ResultSet] = 73 | asResultSet[SessionAPI.Op](consistencyLevel).interpret[M] 74 | 75 | def attempt[M[_]](consistencyLevel: Option[ConsistencyLevel] = None)( 76 | implicit API: SessionAPI[SessionAPI.Op], 77 | S: Session, 78 | AC: AsyncContext[M], 79 | E: ExecutionContext, 80 | ME: MonadError[M, Throwable]): M[Unit] = 81 | asFree[SessionAPI.Op](consistencyLevel).interpret[M] 82 | 83 | def attemptAs[M[_], A](consistencyLevel: Option[ConsistencyLevel] = None)( 84 | implicit Q: QueryModule[QueryModule.Op], 85 | S: Session, 86 | AC: AsyncContext[M], 87 | E: ExecutionContext, 88 | ME: MonadError[M, Throwable], 89 | FR: FromReader[A]): M[A] = 90 | as[A](consistencyLevel).interpret[M] 91 | 92 | def attemptAsOption[M[_], A](consistencyLevel: Option[ConsistencyLevel] = None)( 93 | implicit Q: QueryModule[QueryModule.Op], 94 | S: Session, 95 | AC: AsyncContext[M], 96 | E: ExecutionContext, 97 | ME: MonadError[M, Throwable], 98 | FR: FromReader[A]): M[Option[A]] = 99 | asOption[A](consistencyLevel).interpret[M] 100 | 101 | def attemptAsList[M[_], A](consistencyLevel: Option[ConsistencyLevel] = None)( 102 | implicit Q: QueryModule[QueryModule.Op], 103 | S: Session, 104 | AC: AsyncContext[M], 105 | E: ExecutionContext, 106 | ME: MonadError[M, Throwable], 107 | FR: FromReader[A]): M[List[A]] = 108 | asList[A](consistencyLevel).interpret[M] 109 | 110 | } 111 | 112 | implicit def interpolatorOps(tuple: (String, List[SerializableValueBy[Int]])): InterpolatorOps = 113 | new InterpolatorOps(tuple) 114 | 115 | } 116 | -------------------------------------------------------------------------------- /core/src/main/scala/query/mapper/ByteBufferMapper.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package query.mapper 19 | 20 | trait ByteBufferMapper[A] { 21 | def map(a: A): List[FieldMapper] 22 | } 23 | 24 | object ByteBufferMapper { 25 | 26 | def apply[A](implicit ev: ByteBufferMapper[A]): ByteBufferMapper[A] = ev 27 | 28 | implicit def genericMapper[A](implicit fieldLister: FieldListMapper[A]): ByteBufferMapper[A] = 29 | new ByteBufferMapper[A] { 30 | override def map(a: A): List[FieldMapper] = fieldLister.map(a) 31 | } 32 | } -------------------------------------------------------------------------------- /core/src/main/scala/query/mapper/ByteBufferToField.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package query.mapper 19 | 20 | import java.nio.ByteBuffer 21 | 22 | import cats.MonadError 23 | import com.datastax.driver.core.Row 24 | import freestyle.cassandra.codecs.ByteBufferCodec 25 | import freestyle.cassandra.query._ 26 | import shapeless._ 27 | import shapeless.labelled.{FieldBuilder, FieldType} 28 | 29 | trait FromReader[A] { 30 | def apply[M[_]](reader: ByteBufferReader)(implicit ME: MonadError[M, Throwable]): M[A] 31 | } 32 | 33 | case class DatastaxRowReader(row: Row) extends ByteBufferReader { 34 | override def read[M[_]](name: String)(implicit ME: MonadError[M, Throwable]): M[ByteBuffer] = 35 | ME.catchNonFatal(row.getBytesUnsafe(name)) 36 | } 37 | 38 | trait GenericFromReader { 39 | 40 | implicit val hnilFromReader: FromReader[HNil] = new FromReader[HNil] { 41 | override def apply[M[_]](reader: ByteBufferReader)( 42 | implicit ME: MonadError[M, Throwable]): M[HNil] = ME.pure(HNil) 43 | } 44 | 45 | implicit def hconsFromReader[K <: Symbol, V, L <: HList]( 46 | implicit 47 | witness: Witness.Aux[K], 48 | codec: ByteBufferCodec[V], 49 | grT: FromReader[L], 50 | printer: Printer): FromReader[FieldType[K, V] :: L] = 51 | new FromReader[FieldType[K, V] :: L] { 52 | override def apply[M[_]](reader: ByteBufferReader)( 53 | implicit ME: MonadError[M, Throwable]): M[FieldType[K, V] :: L] = { 54 | val newName = printer.print(witness.value.name) 55 | ME.flatMap(reader.read(newName)) { byteBuffer => 56 | ME.map2(codec.deserialize(byteBuffer), grT(reader)) { 57 | case (result, l) => new FieldBuilder[K].apply(result) :: l 58 | } 59 | } 60 | } 61 | } 62 | 63 | implicit def productFromReader[A, L <: HList]( 64 | implicit 65 | gen: LabelledGeneric.Aux[A, L], 66 | grL: FromReader[L]): FromReader[A] = 67 | new FromReader[A] { 68 | override def apply[M[_]](reader: ByteBufferReader)( 69 | implicit ME: MonadError[M, Throwable]): M[A] = ME.map(grL(reader))(gen.from) 70 | } 71 | } 72 | 73 | object GenericFromReader extends GenericFromReader -------------------------------------------------------------------------------- /core/src/main/scala/query/mapper/FieldToByteBuffer.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package query.mapper 19 | 20 | import java.nio.ByteBuffer 21 | 22 | import cats.MonadError 23 | import freestyle.cassandra.codecs.ByteBufferCodec 24 | import freestyle.cassandra.query.Printer 25 | import shapeless._ 26 | import shapeless.labelled.FieldType 27 | 28 | abstract class FieldMapper(val name: String) { 29 | def serialize[M[_]](implicit E: MonadError[M, Throwable]): M[ByteBuffer] 30 | } 31 | 32 | trait FieldListMapper[A] { 33 | def map(a: A): List[FieldMapper] 34 | } 35 | 36 | trait FieldMapperPrimitive { 37 | 38 | implicit def primitiveFieldMapper[K <: Symbol, H, T <: HList]( 39 | implicit witness: Witness.Aux[K], 40 | printer: Printer, 41 | codec: Lazy[ByteBufferCodec[H]], 42 | tMapper: FieldListMapper[T]): FieldListMapper[FieldType[K, H] :: T] = { 43 | val fieldName = printer.print(witness.value.name) 44 | FieldListMapper { hlist => 45 | val fieldMapper = new FieldMapper(fieldName) { 46 | override def serialize[M[_]](implicit E: MonadError[M, Throwable]): M[ByteBuffer] = 47 | codec.value.serialize(hlist.head) 48 | } 49 | fieldMapper :: tMapper.map(hlist.tail) 50 | } 51 | } 52 | } 53 | 54 | trait FieldMapperGeneric extends FieldMapperPrimitive { 55 | 56 | implicit def genericMapper[A, R]( 57 | implicit gen: LabelledGeneric.Aux[A, R], 58 | mapper: Lazy[FieldListMapper[R]]): FieldListMapper[A] = 59 | FieldListMapper(value => mapper.value.map(gen.to(value))) 60 | 61 | implicit val hnilMapper: FieldListMapper[HNil] = FieldListMapper[HNil](_ => Nil) 62 | 63 | } 64 | 65 | object FieldListMapper extends FieldMapperGeneric { 66 | def apply[A](f: (A) => List[FieldMapper]): FieldListMapper[A] = new FieldListMapper[A] { 67 | override def map(a: A): List[FieldMapper] = f(a) 68 | } 69 | } 70 | 71 | object FieldMapperExpanded extends FieldMapperGeneric { 72 | 73 | implicit def hconsMapper[K, H, T <: HList]( 74 | implicit hMapper: Lazy[FieldListMapper[H]], 75 | tMapper: FieldListMapper[T]): FieldListMapper[FieldType[K, H] :: T] = 76 | FieldListMapper(hlist => hMapper.value.map(hlist.head) ++ tMapper.map(hlist.tail)) 77 | } 78 | -------------------------------------------------------------------------------- /core/src/main/scala/query/model.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package query 19 | 20 | import java.nio.ByteBuffer 21 | 22 | import cats.MonadError 23 | 24 | object model { 25 | 26 | trait SerializableValue { 27 | def serialize[M[_]](implicit E: MonadError[M, Throwable]): M[ByteBuffer] 28 | } 29 | 30 | trait SerializableValueBy[T] { 31 | def position: T 32 | def serializableValue: SerializableValue 33 | } 34 | 35 | object SerializableValueBy { 36 | 37 | def apply(p: Int, s: SerializableValue): SerializableValueBy[Int] = 38 | new SerializableValueBy[Int] { 39 | override def position: Int = p 40 | override def serializableValue: SerializableValue = s 41 | } 42 | 43 | def apply(p: String, s: SerializableValue): SerializableValueBy[String] = 44 | new SerializableValueBy[String] { 45 | override def position: String = p 46 | override def serializableValue: SerializableValue = s 47 | } 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /core/src/main/scala/query/query.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | 19 | import java.nio.ByteBuffer 20 | 21 | import cats.MonadError 22 | 23 | package object query { 24 | 25 | trait ByteBufferReader { 26 | def read[M[_]](name: String)(implicit ME: MonadError[M, Throwable]): M[ByteBuffer] 27 | } 28 | 29 | trait Printer { 30 | def print(name: String): String 31 | } 32 | 33 | object Printer { 34 | def apply(f: String => String): Printer = new Printer { 35 | override def print(name: String): String = f(name) 36 | } 37 | } 38 | 39 | val identityPrinter: Printer = Printer(identity) 40 | 41 | val lowerCasePrinter: Printer = Printer(_.toLowerCase) 42 | 43 | val upperCasePrinter: Printer = Printer(_.toUpperCase) 44 | 45 | } 46 | -------------------------------------------------------------------------------- /core/src/main/scala/schema/package.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | 19 | import cats.MonadError 20 | import troy.cql.ast.{DataDefinition, DataManipulation} 21 | 22 | package object schema { 23 | 24 | sealed abstract class SchemaError(msg: String, maybeCause: Option[Throwable] = None) 25 | extends RuntimeException(msg) { 26 | maybeCause foreach initCause 27 | } 28 | 29 | case class SchemaDefinitionProviderError(msg: String, maybeCause: Option[Throwable] = None) 30 | extends SchemaError(msg, maybeCause) 31 | 32 | object SchemaDefinitionProviderError { 33 | def apply(e: Throwable): SchemaDefinitionProviderError = 34 | SchemaDefinitionProviderError(e.getMessage, Some(e)) 35 | } 36 | 37 | case class SchemaValidatorError(msg: String, maybeCause: Option[Throwable] = None) 38 | extends SchemaError(msg, maybeCause) 39 | 40 | type SchemaDefinition = Seq[DataDefinition] 41 | 42 | sealed trait Statements 43 | case class DDL(statements: Seq[DataDefinition]) extends Statements 44 | case class DML(statement: DataManipulation) extends Statements 45 | 46 | def catchNonFatalAsSchemaError[M[_], A](value: => A)(implicit E: MonadError[M, Throwable]): M[A] = 47 | E.handleErrorWith(E.catchNonFatal(value))(e => E.raiseError(SchemaDefinitionProviderError(e))) 48 | 49 | } 50 | -------------------------------------------------------------------------------- /core/src/main/scala/schema/provider/MetadataSchemaProvider.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package schema.provider 19 | 20 | import java.io.{InputStream, InputStreamReader} 21 | 22 | import cats.MonadError 23 | import cats.implicits._ 24 | import com.datastax.driver.core._ 25 | import freestyle.cassandra.config.Decoders 26 | import freestyle.cassandra.schema._ 27 | import freestyle.cassandra.schema.provider.metadata.SchemaConversions 28 | 29 | import scala.collection.JavaConverters._ 30 | import scala.language.postfixOps 31 | 32 | class MetadataSchemaProvider[M[_]](clusterProvider: M[Cluster]) 33 | extends SchemaDefinitionProvider[M] 34 | with SchemaConversions { 35 | 36 | def extractTables(keyspaceMetadata: KeyspaceMetadata): List[AbstractTableMetadata] = 37 | keyspaceMetadata.getTables.asScala.toList 38 | 39 | def extractIndexes(tableMetadataList: List[AbstractTableMetadata]): List[IndexMetadata] = 40 | tableMetadataList.flatMap { 41 | case (t: TableMetadata) => t.getIndexes.asScala.toList 42 | case _ => Nil 43 | } 44 | 45 | def extractUserTypes(keyspaceMetadata: KeyspaceMetadata): List[UserType] = 46 | keyspaceMetadata.getUserTypes.asScala.toList 47 | 48 | override def schemaDefinition(implicit E: MonadError[M, Throwable]): M[SchemaDefinition] = { 49 | 50 | def metadata(): M[Metadata] = { 51 | 52 | def connect(): M[Cluster] = E.flatMap(clusterProvider) { cluster => 53 | catchNonFatalAsSchemaError[M, Cluster] { 54 | cluster.connect() 55 | cluster 56 | } 57 | } 58 | 59 | E.flatMap(connect())(c => E.catchNonFatal(c.getMetadata)) 60 | } 61 | 62 | E.flatMap(metadata()) { metadata => 63 | val keyspaceList: List[KeyspaceMetadata] = metadata.getKeyspaces.asScala.toList 64 | val tableList: List[AbstractTableMetadata] = keyspaceList.flatMap(extractTables) 65 | val indexList: List[IndexMetadata] = extractIndexes(tableList) 66 | val userTypeList: List[UserType] = keyspaceList.flatMap(extractUserTypes) 67 | 68 | E.map4( 69 | keyspaceList.traverse(toCreateKeyspace[M]), 70 | tableList.traverse(toCreateTable[M]), 71 | indexList.traverse(toCreateIndex[M](_)), 72 | userTypeList.traverse(toUserType[M]) 73 | )(_ ++ _ ++ _ ++ _) 74 | } 75 | 76 | } 77 | } 78 | 79 | object MetadataSchemaProvider { 80 | 81 | implicit def metadataSchemaProvider[M[_]]( 82 | implicit cluster: Cluster, 83 | E: MonadError[M, Throwable]): SchemaDefinitionProvider[M] = 84 | new MetadataSchemaProvider[M](E.pure(cluster)) 85 | 86 | def clusterProvider[M[_]](config: InputStream)( 87 | implicit E: MonadError[M, Throwable]): M[Cluster] = { 88 | 89 | import classy.config._ 90 | import classy.{DecodeError, Decoder} 91 | import com.datastax.driver.core.Cluster 92 | import com.typesafe.config.{Config, ConfigFactory} 93 | 94 | def decodeConfig: M[Either[DecodeError, Cluster]] = 95 | catchNonFatalAsSchemaError[M, Either[DecodeError, Cluster]] { 96 | val decoders: Decoders[Config] = new Decoders[Config] 97 | val decoder: Decoder[Config, Cluster.Builder] = readConfig[Config]("cluster") andThen decoders.clusterBuilderDecoder 98 | decoder(ConfigFactory.parseReader(new InputStreamReader(config))).map(_.build()) 99 | } 100 | 101 | E.flatMap(decodeConfig) { 102 | case Right(cluster) => E.pure(cluster) 103 | case Left(error) => E.raiseError(new IllegalArgumentException(error.toPrettyString)) 104 | } 105 | } 106 | 107 | def metadataSchemaProvider[M[_]](isF: M[InputStream])( 108 | implicit E: MonadError[M, Throwable]): SchemaDefinitionProvider[M] = 109 | new MetadataSchemaProvider[M](E.flatMap(isF)(is => clusterProvider[M](is))) 110 | 111 | } 112 | -------------------------------------------------------------------------------- /core/src/main/scala/schema/provider/TroySchemaProvider.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package schema.provider 19 | 20 | import java.io.InputStream 21 | 22 | import cats.MonadError 23 | import freestyle.cassandra.schema._ 24 | import troy.cql.ast.CqlParser 25 | 26 | class TroySchemaProvider[M[_]](cqlF: => M[String]) extends SchemaDefinitionProvider[M] { 27 | 28 | override def schemaDefinition(implicit E: MonadError[M, Throwable]): M[SchemaDefinition] = 29 | E.flatMap(cqlF) { cql => 30 | CqlParser.parseSchema(cql) match { 31 | case CqlParser.Success(res, _) => E.pure(res) 32 | case CqlParser.Failure(msg, next) => 33 | E.raiseError( 34 | SchemaDefinitionProviderError( 35 | s"Parse Failure: $msg, line = ${next.pos.line}, column = ${next.pos.column}")) 36 | case CqlParser.Error(msg, _) => E.raiseError(SchemaDefinitionProviderError(msg)) 37 | } 38 | } 39 | } 40 | 41 | object TroySchemaProvider { 42 | 43 | def apply[M[_]](cql: String)(implicit E: MonadError[M, Throwable]): TroySchemaProvider[M] = 44 | new TroySchemaProvider(E.pure(cql)) 45 | 46 | def apply[M[_]](isF: M[InputStream])( 47 | implicit E: MonadError[M, Throwable]): TroySchemaProvider[M] = { 48 | val cqlF: M[String] = E.flatMap(isF) { is => 49 | catchNonFatalAsSchemaError { 50 | val string = scala.io.Source.fromInputStream(is).mkString 51 | is.close() 52 | string 53 | } 54 | } 55 | new TroySchemaProvider[M](cqlF) 56 | } 57 | 58 | } 59 | -------------------------------------------------------------------------------- /core/src/main/scala/schema/provider/metadata/SchemaConversions.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package schema.provider.metadata 19 | 20 | import cats.MonadError 21 | import cats.instances.list._ 22 | import cats.syntax.traverse._ 23 | import com.datastax.driver.core.{ 24 | AbstractTableMetadata, 25 | ColumnMetadata, 26 | IndexMetadata, 27 | KeyspaceMetadata, 28 | TupleType, 29 | UserType, 30 | DataType => DatastaxDataType 31 | } 32 | import freestyle.cassandra.schema._ 33 | import troy.cql.ast._ 34 | import troy.cql.ast.ddl.Keyspace.Replication 35 | import troy.cql.ast.ddl.Table.PrimaryKey 36 | import troy.cql.ast.ddl.{Field, Index, Table} 37 | 38 | import scala.collection.JavaConverters._ 39 | import scala.language.postfixOps 40 | 41 | trait SchemaConversions { 42 | 43 | def toCreateKeyspace[M[_]](keyspaceMetadata: KeyspaceMetadata)( 44 | implicit E: MonadError[M, Throwable]): M[CreateKeyspace] = 45 | catchNonFatalAsSchemaError { 46 | val name: String = Option(keyspaceMetadata.getName) 47 | .getOrElse(throw new IllegalArgumentException("Schema name is null")) 48 | val replication: Option[Replication] = Option(keyspaceMetadata.getReplication) 49 | .flatMap { m => 50 | val seq = m.asScala.toSeq 51 | if (seq.isEmpty) None else Option(Replication(seq.sortBy(_._1))) 52 | } 53 | CreateKeyspace( 54 | ifNotExists = false, 55 | keyspaceName = KeyspaceName(name), 56 | properties = replication map (Seq(_)) getOrElse Seq.empty) 57 | } 58 | 59 | def toCreateTable[M[_]](metadata: AbstractTableMetadata)( 60 | implicit E: MonadError[M, Throwable]): M[CreateTable] = 61 | E.flatten { 62 | catchNonFatalAsSchemaError { 63 | val columnsM: M[List[Table.Column]] = 64 | metadata.getColumns.asScala.toList.traverse(toTableColumn(_)(E)) 65 | val pKeyM: M[PrimaryKey] = toPrimaryKey( 66 | metadata.getPartitionKey.asScala.toList, 67 | metadata.getClusteringColumns.asScala.toList) 68 | 69 | E.map2(columnsM, pKeyM) { (columns, pKey) => 70 | CreateTable( 71 | ifNotExists = false, 72 | tableName = 73 | TableName(Some(KeyspaceName(metadata.getKeyspace.getName)), metadata.getName), 74 | columns = columns, 75 | primaryKey = Some(pKey), 76 | options = Seq.empty 77 | ) 78 | } 79 | } 80 | } 81 | 82 | def readTable(metadata: IndexMetadata): TableName = 83 | TableName(Some(KeyspaceName(metadata.getTable.getKeyspace.getName)), metadata.getTable.getName) 84 | 85 | def toCreateIndex[M[_]]( 86 | metadata: IndexMetadata, 87 | readTable: (IndexMetadata) => TableName = readTable)( 88 | implicit E: MonadError[M, Throwable]): M[CreateIndex] = 89 | catchNonFatalAsSchemaError { 90 | CreateIndex( 91 | isCustom = metadata.isCustomIndex, 92 | ifNotExists = false, 93 | indexName = Option(metadata.getName), 94 | tableName = readTable(metadata), 95 | identifier = Index.Identifier(metadata.getTarget), 96 | using = 97 | if (metadata.isCustomIndex) 98 | // The options are not visible in the IndexMetadata class 99 | Some(Index.Using(metadata.getIndexClassName, None)) 100 | else None 101 | ) 102 | } 103 | 104 | def toUserType[M[_]](userType: UserType)(implicit E: MonadError[M, Throwable]): M[CreateType] = 105 | E.flatten { 106 | catchNonFatalAsSchemaError { 107 | val fieldsM: M[List[Field]] = 108 | userType.getFieldNames.asScala.toList.traverse { fieldName => 109 | toField(fieldName, userType.getFieldType(fieldName)) 110 | } 111 | 112 | val typeName = TypeName(Some(KeyspaceName(userType.getKeyspace)), userType.getTypeName) 113 | 114 | E.map(fieldsM) { list => 115 | CreateType(ifNotExists = false, typeName = typeName, fields = list) 116 | } 117 | } 118 | } 119 | 120 | private[this] def toField[M[_]](name: String, datastaxDataType: DatastaxDataType)( 121 | implicit E: MonadError[M, Throwable]): M[Field] = 122 | E.map(toDataType(datastaxDataType))(Field(name, _)) 123 | 124 | private[this] def toTableColumn[M[_]](metadata: ColumnMetadata)( 125 | implicit E: MonadError[M, Throwable]): M[Table.Column] = 126 | E.map(toDataType(metadata.getType)) { dataType => 127 | Table.Column( 128 | name = metadata.getName, 129 | dataType = dataType, 130 | isStatic = metadata.isStatic, 131 | isPrimaryKey = false) 132 | } 133 | 134 | private[this] def toDataType[M[_]](dataType: DatastaxDataType)( 135 | implicit E: MonadError[M, Throwable]): M[DataType] = { 136 | 137 | import DatastaxDataType._ 138 | 139 | def toDataTypeNative(dataType: DatastaxDataType): M[DataType.Native] = 140 | dataType.getName match { 141 | case Name.ASCII => E.pure(DataType.Ascii) 142 | case Name.BIGINT => E.pure(DataType.BigInt) 143 | case Name.BLOB => E.pure(DataType.Blob) 144 | case Name.BOOLEAN => E.pure(DataType.Boolean) 145 | case Name.COUNTER => E.pure(DataType.Counter) 146 | case Name.DATE => E.pure(DataType.Date) 147 | case Name.DECIMAL => E.pure(DataType.Decimal) 148 | case Name.DOUBLE => E.pure(DataType.Double) 149 | case Name.FLOAT => E.pure(DataType.Float) 150 | case Name.INET => E.pure(DataType.Inet) 151 | case Name.INT => E.pure(DataType.Int) 152 | case Name.SMALLINT => E.pure(DataType.Smallint) 153 | case Name.TEXT => E.pure(DataType.Text) 154 | case Name.TIME => E.pure(DataType.Time) 155 | case Name.TIMESTAMP => E.pure(DataType.Timestamp) 156 | case Name.TIMEUUID => E.pure(DataType.Timeuuid) 157 | case Name.TINYINT => E.pure(DataType.Tinyint) 158 | case Name.UUID => E.pure(DataType.Uuid) 159 | case Name.VARCHAR => E.pure(DataType.Varchar) 160 | case Name.VARINT => E.pure(DataType.Varint) 161 | case _ => 162 | E.raiseError( 163 | SchemaDefinitionProviderError(s"Native DataType ${dataType.getName} not supported")) 164 | } 165 | 166 | def toCollectionType(collectionType: CollectionType): M[DataType] = { 167 | 168 | val typeArgs: List[DatastaxDataType] = collectionType.getTypeArguments.asScala.toList 169 | 170 | val maybeCol: Option[M[DataType]] = collectionType.getName match { 171 | case Name.LIST => 172 | typeArgs.headOption map { typeArg => 173 | E.map(toDataTypeNative(typeArg))(DataType.List) 174 | } 175 | case Name.SET => 176 | typeArgs.headOption map { typeArg => 177 | E.map(toDataTypeNative(typeArg))(DataType.Set) 178 | } 179 | case Name.MAP => 180 | for { 181 | t1 <- typeArgs.headOption 182 | t2 <- typeArgs.tail.headOption 183 | } yield 184 | E.map2(toDataTypeNative(t1), toDataTypeNative(t2))((t1, t2) => DataType.Map(t1, t2)) 185 | case _ => None 186 | } 187 | 188 | maybeCol getOrElse { 189 | E.raiseError( 190 | SchemaDefinitionProviderError( 191 | s"Error parsing collection DataType '${collectionType.asFunctionParameterString()}'")) 192 | } 193 | } 194 | 195 | def toCustomType(className: String): M[DataType] = 196 | E.pure(DataType.Custom(className)) 197 | 198 | def toUserDefinedType(keyspace: String, typeName: String): M[DataType] = 199 | E.pure(DataType.UserDefined(KeyspaceName(keyspace), typeName)) 200 | 201 | def toTupleType(tupleType: TupleType): M[DataType] = 202 | E.map(tupleType.getComponentTypes.asScala.toList.traverse(toDataTypeNative))(DataType.Tuple) 203 | 204 | dataType match { 205 | case nativeType: NativeType => 206 | E.widen[DataType.Native, DataType](toDataTypeNative(nativeType)) 207 | case customType: CustomType => toCustomType(customType.getCustomTypeClassName) 208 | case collectionType: CollectionType => toCollectionType(collectionType) 209 | case tupleType: TupleType => toTupleType(tupleType) 210 | case userType: UserType => toUserDefinedType(userType.getKeyspace, userType.getTypeName) 211 | } 212 | } 213 | 214 | private[this] def toPrimaryKey[M[_]]( 215 | partitionKeys: List[ColumnMetadata], 216 | clusteringColumns: List[ColumnMetadata])( 217 | implicit E: MonadError[M, Throwable]): M[PrimaryKey] = 218 | E.pure(PrimaryKey(partitionKeys.map(_.getName), clusteringColumns.map(_.getName))) 219 | 220 | } 221 | -------------------------------------------------------------------------------- /core/src/main/scala/schema/provider/package.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package schema 19 | 20 | import cats.MonadError 21 | 22 | package object provider { 23 | 24 | trait SchemaDefinitionProvider[M[_]] { 25 | def schemaDefinition(implicit E: MonadError[M, Throwable]): M[SchemaDefinition] 26 | } 27 | 28 | } 29 | -------------------------------------------------------------------------------- /core/src/main/scala/schema/validator/TroySchemaValidator.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package schema.validator 19 | 20 | import cats.MonadError 21 | import cats.data.{NonEmptyList, Validated, ValidatedNel} 22 | import freestyle.cassandra.schema._ 23 | import freestyle.cassandra.schema.provider.SchemaDefinitionProvider 24 | import troy.cql.ast.DataDefinition 25 | import troy.schema.{Message, Result, SchemaEngine, V} 26 | 27 | class TroySchemaValidator[M[_]]( 28 | implicit E: MonadError[M, Throwable], 29 | SDP: SchemaDefinitionProvider[M]) 30 | extends SchemaValidator[M] { 31 | 32 | override def validateStatement(st: Statements)( 33 | implicit E: MonadError[M, Throwable]): M[ValidatedNel[SchemaError, Unit]] = { 34 | 35 | def toSchemaValidatorError(message: Message): SchemaValidatorError = 36 | SchemaValidatorError(message.message) 37 | 38 | def toValidatedNel[T](result: Result[T]): ValidatedNel[SchemaError, T] = 39 | result match { 40 | case V.Success(res, _) => Validated.valid(res) 41 | case V.Error(es, _) => 42 | Validated.invalid( 43 | NonEmptyList 44 | .fromList(es.map(toSchemaValidatorError).toList) 45 | .getOrElse(NonEmptyList(SchemaValidatorError("Unknown error"), Nil))) 46 | } 47 | 48 | def validateStatement( 49 | schema: Seq[DataDefinition], 50 | st: Statements): M[ValidatedNel[SchemaError, Unit]] = { 51 | 52 | type ValidatedNelSchemaError[T] = ValidatedNel[SchemaError, T] 53 | 54 | def validateWithSchema(schemaEngine: SchemaEngine): Result[Unit] = 55 | (st match { 56 | case DML(sts) => schemaEngine(sts) 57 | case DDL(sts) => SchemaEngine(schema ++ sts) 58 | }).map(_ => (): Unit) 59 | 60 | catchNonFatalAsSchemaError { 61 | toValidatedNel { 62 | SchemaEngine(schema).flatMap(validateWithSchema) 63 | } 64 | } 65 | } 66 | 67 | E.flatMap(SDP.schemaDefinition)(validateStatement(_, st)) 68 | } 69 | 70 | } 71 | 72 | object TroySchemaValidator { 73 | 74 | implicit def instance[M[_]]( 75 | implicit E: MonadError[M, Throwable], 76 | SDP: SchemaDefinitionProvider[M]): SchemaValidator[M] = new TroySchemaValidator[M] 77 | } 78 | -------------------------------------------------------------------------------- /core/src/main/scala/schema/validator/package.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package schema 19 | 20 | import cats.MonadError 21 | import cats.data.ValidatedNel 22 | 23 | package object validator { 24 | 25 | trait SchemaValidator[M[_]] { 26 | 27 | def validateStatement(st: Statements)( 28 | implicit E: MonadError[M, Throwable]): M[ValidatedNel[SchemaError, Unit]] 29 | 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /core/src/test/scala/ListenableFuture2AsyncMSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | 19 | import cats.~> 20 | import com.google.common.util.concurrent.ListenableFuture 21 | import freestyle.cassandra.TestUtils._ 22 | import freestyle.async.implicits._ 23 | import freestyle.cassandra.implicits._ 24 | import org.scalamock.scalatest.MockFactory 25 | import org.scalatest.{Matchers, OneInstancePerTest, WordSpec} 26 | 27 | import scala.concurrent.Future 28 | import scala.concurrent.ExecutionContext.Implicits.global 29 | 30 | class ListenableFuture2AsyncMSpec 31 | extends WordSpec 32 | with Matchers 33 | with OneInstancePerTest 34 | with MockFactory { 35 | 36 | val handler: ListenableFuture ~> Future = listenableFuture2Async[Future] 37 | 38 | "ListenableFuture2AsyncM" should { 39 | 40 | "return a successfully future when a successfully listenable future is passed" in { 41 | val value = "Hello World!" 42 | runF(handler(successfulFuture(value))) shouldEqual value 43 | } 44 | 45 | "return a failed future when a failed listenable future is passed" in { 46 | val value = "Hello World!" 47 | runFFailed(handler(failedFuture[String])) shouldBe exception 48 | } 49 | 50 | } 51 | 52 | } 53 | -------------------------------------------------------------------------------- /core/src/test/scala/TestData.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | 19 | import java.util.UUID 20 | 21 | import org.scalacheck.{Arbitrary, Gen} 22 | 23 | trait TestData { 24 | 25 | case class User(id: UUID, firstName: String, lastName: String, age: Int) 26 | 27 | val usAsciiStringGen: Gen[String] = 28 | Gen.containerOf[Array, Char](Gen.choose[Char](0, 127)).map(_.mkString) 29 | 30 | implicit val userArb: Arbitrary[User] = Arbitrary { 31 | for { 32 | id <- Gen.uuid 33 | firstName <- usAsciiStringGen 34 | lastName <- usAsciiStringGen 35 | age <- Gen.posNum[Int] 36 | } yield User(id, firstName, lastName, age) 37 | } 38 | 39 | } 40 | 41 | object TestData extends TestData -------------------------------------------------------------------------------- /core/src/test/scala/TestUtils.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | 19 | import java.util.concurrent.{Callable, Executors} 20 | 21 | import cats.data.Kleisli 22 | import com.google.common.util.concurrent.{ListenableFuture, ListeningExecutorService, MoreExecutors} 23 | import org.scalatest.Matchers 24 | 25 | import scala.concurrent.{Await, ExecutionContext, Future} 26 | import scala.concurrent.duration._ 27 | 28 | object TestUtils { 29 | 30 | val service: ListeningExecutorService = 31 | MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(10)) 32 | 33 | def successfulFuture[T](value: T): ListenableFuture[T] = 34 | service.submit(new Callable[T] { 35 | override def call(): T = value 36 | }) 37 | 38 | val exception: Throwable = new RuntimeException("Test exception") 39 | 40 | def failedFuture[T]: ListenableFuture[T] = 41 | service.submit(new Callable[T] { 42 | override def call(): T = throw exception 43 | }) 44 | 45 | class Null[A] { var t: A = _ } 46 | 47 | object Null { 48 | def apply[A]: A = new Null[A].t 49 | } 50 | 51 | trait MatchersUtil extends Matchers { 52 | 53 | final class AnyOps[T](value: T) { 54 | 55 | def isEqualTo(other: T): Boolean = { 56 | value shouldBe other 57 | value == other 58 | } 59 | 60 | def isLikeTo(f: T => Boolean): Boolean = f(value) 61 | 62 | } 63 | 64 | implicit def anyOps[T](value: T): AnyOps[T] = new AnyOps[T](value) 65 | 66 | } 67 | 68 | type EitherM[T] = Either[Throwable, T] 69 | 70 | def runF[T](k: Future[T]): T = Await.result(k, 5.seconds) 71 | 72 | def runFFailed[T](k: Future[T]): Throwable = Await.result(k.failed, 5.seconds) 73 | 74 | def runK[A, B](k: Kleisli[Future, A, B], a: A): B = 75 | Await.result(k.run(a), 5.seconds) 76 | 77 | def runKFailed[A, B](k: Kleisli[Future, A, B], a: A): Throwable = 78 | Await.result(k.run(a).failed, 5.seconds) 79 | 80 | val reservedKeywords = List( 81 | "ADD", 82 | "AGGREGATE", 83 | "ALL", 84 | "ALLOW", 85 | "ALTER", 86 | "AND", 87 | "ANY", 88 | "APPLY", 89 | "AS", 90 | "ASC", 91 | "ASCII", 92 | "AUTHORIZE", 93 | "BATCH", 94 | "BEGIN", 95 | "BIGINT", 96 | "BLOB", 97 | "BOOLEAN", 98 | "BY", 99 | "CLUSTERING", 100 | "COLUMNFAMILY", 101 | "COMPACT", 102 | "CONSISTENCY", 103 | "COUNT", 104 | "COUNTER", 105 | "CREATE", 106 | "CUSTOM", 107 | "DECIMAL", 108 | "DELETE", 109 | "DESC", 110 | "DISTINCT", 111 | "DOUBLE", 112 | "DROP", 113 | "EACH_QUORUM", 114 | "ENTRIES", 115 | "EXISTS", 116 | "FILTERING", 117 | "FLOAT", 118 | "FROM", 119 | "FROZEN", 120 | "FULL", 121 | "GRANT", 122 | "IF", 123 | "IN", 124 | "INDEX", 125 | "INET", 126 | "INFINITY", 127 | "INSERT", 128 | "INT", 129 | "INTO", 130 | "KEY", 131 | "KEYSPACE", 132 | "KEYSPACES", 133 | "LEVEL", 134 | "LIMIT", 135 | "LIST", 136 | "LOCAL_ONE", 137 | "LOCAL_QUORUM", 138 | "MAP", 139 | "MATERIALIZED", 140 | "MODIFY", 141 | "NAN", 142 | "NORECURSIVE", 143 | "NOSUPERUSER", 144 | "NOT", 145 | "OF", 146 | "ON", 147 | "ONE", 148 | "ORDER", 149 | "PARTITION", 150 | "PASSWORD", 151 | "PER", 152 | "PERMISSION", 153 | "PERMISSIONS", 154 | "PRIMARY", 155 | "QUORUM", 156 | "RENAME", 157 | "REVOKE", 158 | "SCHEMA", 159 | "SELECT", 160 | "SET", 161 | "STATIC", 162 | "STORAGE", 163 | "SUPERUSER", 164 | "TABLE", 165 | "TEXT", 166 | "TIME", 167 | "TIMESTAMP", 168 | "TIMEUUID", 169 | "THREE", 170 | "TO", 171 | "TOKEN", 172 | "TRUNCATE", 173 | "TTL", 174 | "TUPLE", 175 | "TWO", 176 | "TYPE", 177 | "UNLOGGED", 178 | "UPDATE", 179 | "USE", 180 | "USER", 181 | "USERS", 182 | "USING", 183 | "UUID", 184 | "VALUES", 185 | "VARCHAR", 186 | "VARINT", 187 | "VIEW", 188 | "WHERE", 189 | "WITH", 190 | "WRITETIME" 191 | ) 192 | 193 | } 194 | -------------------------------------------------------------------------------- /core/src/test/scala/api/ClusterAPISpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package api 19 | 20 | import cats.{~>, Id} 21 | import com.datastax.driver.core._ 22 | import freestyle.free._ 23 | import org.scalamock.scalatest.MockFactory 24 | import org.scalatest.{Matchers, OneInstancePerTest, WordSpec} 25 | 26 | class ClusterAPISpec extends WordSpec with Matchers with OneInstancePerTest with MockFactory { 27 | 28 | import TestUtils._ 29 | 30 | val sessionMock: Session = stub[Session] 31 | val configuration: Configuration = new Configuration.Builder().build() 32 | val unit: Unit = () 33 | val keyspace: String = "keyspace" 34 | val metadataTest: Metadata = MetadataTest() 35 | 36 | implicit val clusterAPIHandler: ClusterAPI.Op ~> Id = new (ClusterAPI.Op ~> Id) { 37 | override def apply[A](fa: ClusterAPI.Op[A]): Id[A] = fa match { 38 | case ClusterAPI.ConnectOp() => sessionMock 39 | case ClusterAPI.ConnectKeyspaceOp(_) => sessionMock 40 | case ClusterAPI.CloseOp() => unit 41 | case ClusterAPI.ConfigurationOp() => configuration 42 | case ClusterAPI.MetadataOp() => metadataTest 43 | case ClusterAPI.MetricsOp() => MetricsTest 44 | } 45 | } 46 | 47 | "ClusterAPI" should { 48 | 49 | "work as expect when calling OP" in { 50 | 51 | type ReturnResult = (Session, Session, Unit, Configuration, Metadata, Metrics) 52 | 53 | def program[F[_]](implicit api: ClusterAPI[F]): FreeS[F, ReturnResult] = { 54 | for { 55 | v1 <- api.connect 56 | v2 <- api.connectKeyspace(keyspace) 57 | v3 <- api.close 58 | v4 <- api.configuration 59 | v5 <- api.metadata 60 | v6 <- api.metrics 61 | } yield (v1, v2, v3, v4, v5, v6) 62 | } 63 | 64 | val result = program[ClusterAPI.Op].interpret[Id] 65 | result shouldBe ((sessionMock, sessionMock, unit, configuration, metadataTest, MetricsTest)) 66 | } 67 | 68 | } 69 | 70 | } 71 | -------------------------------------------------------------------------------- /core/src/test/scala/api/PackageAPISpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package api 19 | 20 | import cats.instances.try_._ 21 | import freestyle.cassandra.TestUtils._ 22 | import org.scalacheck.Prop._ 23 | import org.scalatest.WordSpec 24 | import org.scalatest.prop.Checkers 25 | 26 | import scala.util.{Success, Try} 27 | 28 | class PackageAPISpec extends WordSpec with MatchersUtil with Checkers { 29 | 30 | def sample(s: String): Try[Int] = Success(s.length) 31 | 32 | "kleisli" should { 33 | 34 | "apply the Kleisli function for a valid dependency" in { 35 | 36 | check { 37 | forAll { s: String => 38 | kleisli(sample).run(s) isEqualTo Success(s.length) 39 | } 40 | } 41 | 42 | } 43 | 44 | "return an error for a null dependency" in { 45 | kleisli(sample).run(Null[String]).isFailure shouldBe true 46 | } 47 | 48 | } 49 | 50 | } 51 | -------------------------------------------------------------------------------- /core/src/test/scala/api/ResultSetAPISpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package api 19 | 20 | import cats.{~>, Id} 21 | import com.datastax.driver.core._ 22 | import freestyle.free._ 23 | import freestyle.cassandra.query.mapper.GenericFromReader._ 24 | import org.scalamock.scalatest.MockFactory 25 | import org.scalatest.{Matchers, OneInstancePerTest, WordSpec} 26 | 27 | class ResultSetAPISpec extends WordSpec with Matchers with OneInstancePerTest with MockFactory { 28 | 29 | val sessionMock: Session = stub[Session] 30 | val unit: Unit = () 31 | val prepSt: PreparedStatement = stub[PreparedStatement] 32 | val resultSet: ResultSet = stub[ResultSet] 33 | 34 | case class Test() 35 | 36 | val test: Test = Test() 37 | 38 | implicit val resultSetAPIHandler: ResultSetAPI.Op ~> Id = new (ResultSetAPI.Op ~> Id) { 39 | override def apply[A](fa: ResultSetAPI.Op[A]): Id[A] = fa match { 40 | case ResultSetAPI.ReadOp(_, _) => test.asInstanceOf[A] 41 | case ResultSetAPI.ReadOptionOp(_, _) => Some(test) 42 | case ResultSetAPI.ReadListOp(_, _) => List(test) 43 | } 44 | } 45 | 46 | "ResultSetAPI" should { 47 | 48 | "work as expect when calling OP" in { 49 | 50 | type ReturnResult = (Test, Option[Test], List[Test]) 51 | 52 | def program[F[_]](implicit api: ResultSetAPI[F]): FreeS[F, ReturnResult] = { 53 | for { 54 | v1 <- api.read[Test](resultSet) 55 | v2 <- api.readOption[Test](resultSet) 56 | v3 <- api.readList[Test](resultSet) 57 | } yield (v1, v2, v3) 58 | } 59 | 60 | val result = program[ResultSetAPI.Op].interpret[Id] 61 | result shouldBe ((test, Some(test), List(test))) 62 | } 63 | 64 | } 65 | 66 | } 67 | -------------------------------------------------------------------------------- /core/src/test/scala/api/SessionAPISpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package api 19 | 20 | import cats.{~>, Id} 21 | import com.datastax.driver.core._ 22 | import freestyle.free._ 23 | import org.scalamock.scalatest.MockFactory 24 | import org.scalatest.{Matchers, OneInstancePerTest, WordSpec} 25 | 26 | class SessionAPISpec extends WordSpec with Matchers with OneInstancePerTest with MockFactory { 27 | 28 | import TestUtils._ 29 | 30 | val sessionMock: Session = stub[Session] 31 | val unit: Unit = () 32 | val prepSt: PreparedStatement = stub[PreparedStatement] 33 | val resultSet: ResultSet = stub[ResultSet] 34 | 35 | implicit val sessionAPIHandler: SessionAPI.Op ~> Id = new (SessionAPI.Op ~> Id) { 36 | override def apply[A](fa: SessionAPI.Op[A]): Id[A] = fa match { 37 | case SessionAPI.InitOp() => sessionMock 38 | case SessionAPI.CloseOp() => unit 39 | case SessionAPI.PrepareOp(_) => prepSt 40 | case SessionAPI.PrepareStatementOp(_) => prepSt 41 | case SessionAPI.ExecuteOp(_) => resultSet 42 | case SessionAPI.ExecuteWithValuesOp(_, _) => resultSet 43 | case SessionAPI.ExecuteWithMapOp(_, _) => resultSet 44 | case SessionAPI.ExecuteStatementOp(_) => resultSet 45 | case SessionAPI.ExecuteWithByteBufferOp(_, _, _) => resultSet 46 | } 47 | } 48 | 49 | "SessionAPI" should { 50 | 51 | "work as expect when calling OP" in { 52 | 53 | type ReturnResult = ( 54 | Session, 55 | Unit, 56 | PreparedStatement, 57 | PreparedStatement, 58 | ResultSet, 59 | ResultSet, 60 | ResultSet, 61 | ResultSet, 62 | ResultSet) 63 | 64 | def program[F[_]](implicit sessionAPI: SessionAPI[F]): FreeS[F, ReturnResult] = { 65 | for { 66 | v1 <- sessionAPI.init 67 | v2 <- sessionAPI.close 68 | v3 <- sessionAPI.prepare("") 69 | v4 <- sessionAPI.prepareStatement(Null[RegularStatement]) 70 | v5 <- sessionAPI.execute("") 71 | v6 <- sessionAPI.executeWithValues("", Null[Any]) 72 | v7 <- sessionAPI.executeWithMap("", Null[Map[String, AnyRef]]) 73 | v8 <- sessionAPI.executeStatement(Null[Statement]) 74 | v9 <- sessionAPI.executeWithByteBuffer("", Nil, None) 75 | } yield (v1, v2, v3, v4, v5, v6, v7, v8, v9) 76 | } 77 | 78 | val result = program[SessionAPI.Op].interpret[Id] 79 | result shouldBe ( 80 | ( 81 | sessionMock, 82 | unit, 83 | prepSt, 84 | prepSt, 85 | resultSet, 86 | resultSet, 87 | resultSet, 88 | resultSet, 89 | resultSet)) 90 | } 91 | 92 | } 93 | 94 | } 95 | -------------------------------------------------------------------------------- /core/src/test/scala/api/StatementAPISpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package api 19 | 20 | import java.nio.ByteBuffer 21 | 22 | import cats.{~>, Id} 23 | import com.datastax.driver.core._ 24 | import freestyle.free._ 25 | import freestyle.cassandra.codecs._ 26 | import org.scalamock.scalatest.MockFactory 27 | import org.scalatest.{Matchers, OneInstancePerTest, WordSpec} 28 | 29 | class StatementAPISpec extends WordSpec with Matchers with OneInstancePerTest with MockFactory { 30 | 31 | val prepSt: PreparedStatement = stub[PreparedStatement] 32 | (prepSt.getVariables _).when().returns(ColumnDefinitionsTest) 33 | (prepSt.getPreparedId _).when().returns(PreparedIdTest) 34 | val boundSt1: BoundStatement = new BoundStatement(prepSt) 35 | val boundSt2: BoundStatement = new BoundStatement(prepSt) 36 | val boundSt3: BoundStatement = new BoundStatement(prepSt) 37 | val boundSt4: BoundStatement = new BoundStatement(prepSt) 38 | val boundSt5: BoundStatement = new BoundStatement(prepSt) 39 | val boundSt6: BoundStatement = new BoundStatement(prepSt) 40 | val boundSt7: BoundStatement = new BoundStatement(prepSt) 41 | val byteBuffer: ByteBuffer = ByteBuffer.wrap("Hello World!".getBytes) 42 | 43 | implicit val statementAPIHandler: StatementAPI.Op ~> Id = new (StatementAPI.Op ~> Id) { 44 | override def apply[A](fa: StatementAPI.Op[A]): Id[A] = fa match { 45 | case StatementAPI.BindOp(_) => boundSt1 46 | case StatementAPI.SetByteBufferByIndexOp(_, _, _) => boundSt2 47 | case StatementAPI.SetByteBufferByNameOp(_, _, _) => boundSt3 48 | case StatementAPI.SetValueByIndexOp(_, _, _, _) => boundSt4 49 | case StatementAPI.SetValueByNameOp(_, _, _, _) => boundSt5 50 | case StatementAPI.SetByteBufferListByIndexOp(_, _) => boundSt6 51 | case StatementAPI.SetByteBufferListByNameOp(_, _) => boundSt7 52 | } 53 | } 54 | 55 | "SessionAPI" should { 56 | 57 | "work as expect when calling OP" in { 58 | 59 | type ReturnResult = 60 | ( 61 | BoundStatement, 62 | BoundStatement, 63 | BoundStatement, 64 | BoundStatement, 65 | BoundStatement, 66 | BoundStatement, 67 | BoundStatement) 68 | 69 | def program[F[_]](implicit API: StatementAPI[F]): FreeS[F, ReturnResult] = { 70 | for { 71 | v1 <- API.bind(prepSt) 72 | v2 <- API.setByteBufferByIndex(v1, 0, byteBuffer) 73 | v3 <- API.setByteBufferByName(v2, "", byteBuffer) 74 | v4 <- API.setValueByIndex[Double](v3, 0, 15.5, doubleCodec) 75 | v5 <- API.setValueByName[Double](v4, "", 15.5, doubleCodec) 76 | v6 <- API.setByteBufferListByIndex(prepSt, Nil) 77 | v7 <- API.setByteBufferListByName(prepSt, Nil) 78 | } yield (v1, v2, v3, v4, v5, v6, v7) 79 | } 80 | 81 | val result = program[StatementAPI.Op].interpret[Id] 82 | result shouldBe ((boundSt1, boundSt2, boundSt3, boundSt4, boundSt5, boundSt6, boundSt7)) 83 | } 84 | 85 | } 86 | 87 | } 88 | -------------------------------------------------------------------------------- /core/src/test/scala/codecs/CodecsSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package codecs 19 | 20 | import java.nio.ByteBuffer 21 | 22 | import cats.instances.try_._ 23 | import cats.syntax.either._ 24 | import com.datastax.driver.core.exceptions.InvalidTypeException 25 | import com.datastax.driver.core.{DataType, ProtocolVersion, TypeCodec} 26 | import freestyle.cassandra.codecs 27 | import org.scalacheck.{Arbitrary, Gen} 28 | import org.scalacheck.Prop._ 29 | import org.scalamock.scalatest.MockFactory 30 | import org.scalatest.{Assertion, Matchers, WordSpec} 31 | import org.scalatest.prop.Checkers 32 | 33 | import scala.util.{Success, Try} 34 | 35 | class CodecsSpec extends WordSpec with Matchers with Checkers with MockFactory { 36 | 37 | import codecs._ 38 | import TestUtils._ 39 | 40 | def checkInverseCodec[T](codec: ByteBufferCodec[T])(implicit A: Arbitrary[T]): Assertion = 41 | check { 42 | forAll { v: T => 43 | codec.serialize(v) match { 44 | case Success(result) => codec.deserialize(result) == Success(v) 45 | case _ => false 46 | } 47 | } 48 | } 49 | 50 | def byteBufferGen[T](codec: ByteBufferCodec[T], defaultValue: T)( 51 | implicit A: Arbitrary[T]): Gen[(ByteBuffer, T)] = { 52 | 53 | val nullByteBuffer = Null[ByteBuffer] 54 | 55 | def codecGen: Gen[(ByteBuffer, T)] = 56 | for { 57 | value <- A.arbitrary 58 | bb = codec.serialize(value).get 59 | remaining <- Gen.chooseNum[Int](0, bb.limit()) 60 | _ = bb.position(bb.limit() - remaining) 61 | } yield (bb, value) 62 | 63 | Gen.oneOf(Gen.const((nullByteBuffer, defaultValue)), codecGen) 64 | } 65 | 66 | def checkDeserialize[T](codec: ByteBufferCodec[T], byteSize: Int, defaultValue: T)( 67 | implicit A: Arbitrary[T]): Assertion = { 68 | val prop = forAll(byteBufferGen(codec, defaultValue)) { 69 | case (bb, v) => 70 | val deserialized = codec.deserialize(bb) 71 | val remaining = Option(bb).map(_.remaining()).getOrElse(0) 72 | if (remaining == 0) { 73 | deserialized == Success(defaultValue) 74 | } else if (remaining == byteSize) { 75 | deserialized == Success(v) 76 | } else { 77 | deserialized.isFailure && deserialized.failed.get.isInstanceOf[InvalidTypeException] 78 | } 79 | } 80 | check(prop, minSuccessful(500)) 81 | } 82 | 83 | abstract class MyStringTypeCodec extends TypeCodec[String](DataType.varchar(), classOf[String]) 84 | 85 | "Boolean codec" should { 86 | 87 | val codec = codecs.booleanCodec 88 | val byteSize = 1 89 | val defaultValue = false 90 | 91 | "check that the serialize and deserialize are invertible" in { 92 | checkInverseCodec(codec) 93 | } 94 | 95 | "deserialize all possible values" in { 96 | checkDeserialize(codec, byteSize, defaultValue) 97 | } 98 | } 99 | 100 | "Byte codec" should { 101 | 102 | val codec = codecs.byteCodec 103 | val byteSize = 1 104 | val defaultValue = 0 105 | 106 | "check that the serialize and deserialize are invertible" in { 107 | checkInverseCodec(codec) 108 | } 109 | 110 | "deserialize all possible values" in { 111 | checkDeserialize(codec, byteSize, defaultValue.toByte) 112 | } 113 | } 114 | 115 | "Double codec" should { 116 | 117 | val codec = codecs.doubleCodec 118 | val byteSize = 8 119 | val defaultValue = 0d 120 | 121 | "check that the serialize and deserialize are invertible" in { 122 | checkInverseCodec(codec) 123 | } 124 | 125 | "deserialize all possible values" in { 126 | checkDeserialize(codec, byteSize, defaultValue) 127 | } 128 | } 129 | 130 | "Float codec" should { 131 | 132 | val codec = codecs.floatCodec 133 | val byteSize = 4 134 | val defaultValue = 0f 135 | 136 | "check that the serialize and deserialize are invertible" in { 137 | checkInverseCodec(codec) 138 | } 139 | 140 | "deserialize all possible values" in { 141 | checkDeserialize(codec, byteSize, defaultValue) 142 | } 143 | } 144 | 145 | "Int codec" should { 146 | 147 | val codec = codecs.intCodec 148 | val byteSize = 4 149 | val defaultValue = 0 150 | 151 | "check that the serialize and deserialize are invertible" in { 152 | checkInverseCodec(codec) 153 | } 154 | 155 | "deserialize all possible values" in { 156 | checkDeserialize(codec, byteSize, defaultValue) 157 | } 158 | } 159 | 160 | "Long codec" should { 161 | 162 | val codec = codecs.longCodec 163 | val byteSize = 8 164 | val defaultValue = 0l 165 | 166 | "check that the serialize and deserialize are invertible" in { 167 | checkInverseCodec(codec) 168 | } 169 | 170 | "deserialize all possible values" in { 171 | checkDeserialize(codec, byteSize, defaultValue) 172 | } 173 | } 174 | 175 | "Short codec" should { 176 | 177 | val codec = codecs.shortCodec 178 | val byteSize = 2 179 | val defaultValue = 0 180 | 181 | "check that the serialize and deserialize are invertible" in { 182 | checkInverseCodec(codec) 183 | } 184 | 185 | "deserialize all possible values" in { 186 | checkDeserialize(codec, byteSize, defaultValue.toShort) 187 | } 188 | } 189 | 190 | "Generic ByteBufferCodec" should { 191 | 192 | val bb = ByteBuffer.allocate(10) 193 | val pc = ProtocolVersion.V1 194 | 195 | "call to serialize and deserialize with the right parameters" in { 196 | 197 | check { 198 | forAll { value: String => 199 | val tcMock = mock[MyStringTypeCodec] 200 | val codec = codecs.byteBufferCodec(tcMock, pc) 201 | 202 | (tcMock.serialize _).expects(value, pc).returns(bb) 203 | (tcMock.deserialize _).expects(bb, pc).returns(value) 204 | codec.serialize(value) == Success(bb) && codec.deserialize(bb) == Success(value) 205 | } 206 | } 207 | } 208 | } 209 | 210 | } 211 | -------------------------------------------------------------------------------- /core/src/test/scala/config/ClusterDecoderSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package config 19 | 20 | import freestyle.cassandra.config.ClusterConfig.{ 21 | PoolingOptionsConfig, 22 | QueryOptionsConfig, 23 | SocketOptionsConfig 24 | } 25 | import org.scalacheck.Prop._ 26 | 27 | import scala.collection.JavaConverters._ 28 | 29 | class ClusterDecoderSpec extends TestDecoderUtils { 30 | 31 | import ConfigArbitraries._ 32 | import classy.config._ 33 | import com.typesafe.config.Config 34 | import com.typesafe.config.ConfigFactory 35 | import org.scalacheck.ScalacheckShapeless._ 36 | 37 | "PoolingOptions Decoder" should { 38 | 39 | "parse and set the right values in the PoolingOptions builder" in { 40 | check { 41 | forAll { config: PoolingOptionsConfig => 42 | val (builder, poc) = preparePoolingOptionsDecoder(config) 43 | 44 | val decoder = readConfig[Config]("poolingOptions") andThen builder.build 45 | val configString = s"poolingOptions = ${poc.print}" 46 | val rawConfig = ConfigFactory.parseString(configString) 47 | decoder(rawConfig).isRight 48 | } 49 | } 50 | } 51 | } 52 | 53 | "QueryOptions Decoder" should { 54 | 55 | "parse and set the right values in the QueryOptions builder" in { 56 | check { 57 | forAll { config: QueryOptionsConfig => 58 | val (builder, qob) = prepareQueryOptionsDecoder(config) 59 | 60 | val decoder = readConfig[Config]("queryOptions") andThen builder.build 61 | val configString = s"queryOptions = ${qob.print}" 62 | val rawConfig = ConfigFactory.parseString(configString) 63 | decoder(rawConfig).isRight 64 | } 65 | } 66 | } 67 | } 68 | 69 | "SocketOptions Decoder" should { 70 | 71 | "parse and set the right values in the SocketOptions builder" in { 72 | check { 73 | forAll { config: SocketOptionsConfig => 74 | val (builder, sob) = prepareSocketOptionsDecoder(config) 75 | 76 | val decoder = readConfig[Config]("socketOptions") andThen builder.build 77 | val configString = s"socketOptions = ${sob.print}" 78 | val rawConfig = ConfigFactory.parseString(configString) 79 | decoder(rawConfig).isRight 80 | } 81 | } 82 | } 83 | } 84 | 85 | "ClusterBuilder Decoder" should { 86 | 87 | "parse a valid configuration and set the right values in the Cluster.Builder" in { 88 | 89 | val decoder = readConfig[Config]("cluster") andThen decoders.clusterBuilderDecoder 90 | val configString = s"cluster = ${validClusterConfiguration.print}" 91 | val rawConfig = ConfigFactory.parseString(configString) 92 | val result = decoder(rawConfig) 93 | result.isRight shouldBe true 94 | val builder = result.right.get 95 | Option(builder.getClusterName) shouldBe validClusterConfiguration.name 96 | builder.getContactPoints.asScala.headOption.map(_.getHostString) shouldBe Some("127.0.0.1") 97 | } 98 | 99 | } 100 | 101 | } 102 | -------------------------------------------------------------------------------- /core/src/test/scala/config/ConfigArbitraries.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package config 19 | 20 | import java.nio.ByteBuffer 21 | 22 | import com.datastax.driver.core.ProtocolOptions.Compression 23 | import config.model._ 24 | import com.datastax.driver.core._ 25 | import com.datastax.driver.core.policies.{ 26 | DefaultRetryPolicy, 27 | DowngradingConsistencyRetryPolicy, 28 | FallthroughRetryPolicy, 29 | RetryPolicy 30 | } 31 | import org.scalacheck.{Arbitrary, Gen} 32 | 33 | trait ConfigArbitraries { 34 | 35 | implicit val consistencyLevelArb: Arbitrary[ConsistencyLevel] = Arbitrary { 36 | Gen.oneOf( 37 | ConsistencyLevel.ALL, 38 | ConsistencyLevel.ANY, 39 | ConsistencyLevel.EACH_QUORUM, 40 | ConsistencyLevel.LOCAL_ONE, 41 | ConsistencyLevel.LOCAL_QUORUM, 42 | ConsistencyLevel.LOCAL_SERIAL, 43 | ConsistencyLevel.ONE, 44 | ConsistencyLevel.QUORUM, 45 | ConsistencyLevel.SERIAL, 46 | ConsistencyLevel.THREE, 47 | ConsistencyLevel.TWO 48 | ) 49 | } 50 | 51 | implicit val protocolVersionArbitrary: Arbitrary[ProtocolVersion] = Arbitrary { 52 | Gen.oneOf( 53 | ProtocolVersion.NEWEST_BETA, 54 | ProtocolVersion.NEWEST_SUPPORTED, 55 | ProtocolVersion.V1, 56 | ProtocolVersion.V2, 57 | ProtocolVersion.V3, 58 | ProtocolVersion.V4, 59 | ProtocolVersion.V5 60 | ) 61 | } 62 | 63 | implicit val compressionArbitrary: Arbitrary[Compression] = Arbitrary { 64 | Gen.oneOf(Compression.NONE, Compression.LZ4, Compression.SNAPPY) 65 | } 66 | 67 | implicit val hostDistanceArbitrary: Arbitrary[HostDistance] = Arbitrary { 68 | Gen.oneOf(HostDistance.IGNORED, HostDistance.LOCAL, HostDistance.REMOTE) 69 | } 70 | 71 | implicit val byteBufferArbitrary: Arbitrary[ByteBuffer] = Arbitrary { 72 | Gen.identifier map (s => ByteBuffer.wrap(s.getBytes)) 73 | } 74 | 75 | implicit val rawPagingStateArbitrary: Arbitrary[RawPagingState] = Arbitrary { 76 | Gen.identifier map (s => RawPagingState(s.getBytes)) 77 | } 78 | 79 | implicit val codecPagingStateArbitrary: Arbitrary[CodecPagingState] = Arbitrary { 80 | 81 | // Valid PagingState String just to skip the validations 82 | val validPagingState: String = 83 | "0018001010ed3c639da1694885beaa7812eb9202db00f07ffffffd0090a0593939dbd419cd9f9aa16271a49e0004" 84 | 85 | Gen.option(CodecRegistry.DEFAULT_INSTANCE) map { codecRegistry => 86 | CodecPagingState(PagingState.fromString(validPagingState), codecRegistry) 87 | } 88 | } 89 | 90 | implicit val retryPolicyArbitrary: Arbitrary[RetryPolicy] = Arbitrary { 91 | Gen.oneOf( 92 | DefaultRetryPolicy.INSTANCE, 93 | DowngradingConsistencyRetryPolicy.INSTANCE, 94 | FallthroughRetryPolicy.INSTANCE 95 | ) 96 | } 97 | 98 | case class PathValue(path: String, value: Option[String]) { 99 | def print: String = value.map(s => s"""$path = "$s" """).getOrElse("") 100 | } 101 | 102 | case class OptionalValues2(v1: PathValue, v2: PathValue, config: String) 103 | 104 | case class OptionalValues3(v1: PathValue, v2: PathValue, v3: PathValue, config: String) 105 | 106 | def pathValueGen(pos: Int): Gen[PathValue] = 107 | Gen.option(Gen.identifier).map(v => PathValue(s"path$pos", v)) 108 | 109 | implicit val optionalValues2Arbitrary: Arbitrary[OptionalValues2] = Arbitrary { 110 | for { 111 | v1 <- pathValueGen(1) 112 | v2 <- pathValueGen(2) 113 | } yield 114 | OptionalValues2( 115 | v1, 116 | v2, 117 | s""" 118 | |config = { 119 | | ${v1.print} 120 | | ${v2.print} 121 | |}""".stripMargin 122 | ) 123 | } 124 | 125 | implicit val optionalValues3Arbitrary: Arbitrary[OptionalValues3] = Arbitrary { 126 | for { 127 | v1 <- pathValueGen(1) 128 | v2 <- pathValueGen(2) 129 | v3 <- pathValueGen(3) 130 | } yield 131 | OptionalValues3( 132 | v1, 133 | v2, 134 | v3, 135 | s""" 136 | |config = { 137 | | ${v1.print} 138 | | ${v2.print} 139 | | ${v3.print} 140 | |}""".stripMargin 141 | ) 142 | } 143 | 144 | } 145 | 146 | object ConfigArbitraries extends ConfigArbitraries 147 | -------------------------------------------------------------------------------- /core/src/test/scala/config/ConfigStatementOpsSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package config 19 | 20 | import com.datastax.driver.core.{CodecRegistry, PagingState, Statement, StatementTest} 21 | import org.scalacheck.Prop._ 22 | import org.scalamock.scalatest.MockFactory 23 | import org.scalatest.prop.Checkers 24 | import org.scalatest.{Matchers, WordSpec} 25 | 26 | class ConfigStatementOpsSpec extends WordSpec with Matchers with Checkers with MockFactory { 27 | 28 | import ConfigArbitraries._ 29 | import org.scalacheck.ScalacheckShapeless._ 30 | import config.model._ 31 | import config.model.implicits._ 32 | import scala.collection.JavaConverters._ 33 | 34 | "ConfigStatementOps" should { 35 | 36 | "apply all specified values to the statement" in { 37 | check { 38 | forAll { cs: ConfigStatement => 39 | val st: Statement = mock[StatementTest] 40 | cs.tracingEnabled foreach { 41 | case true => (st.enableTracing _).expects().returns(st) 42 | case false => (st.disableTracing _).expects().returns(st) 43 | } 44 | cs.consistencyLevel foreach (v => (st.setConsistencyLevel _).expects(v).returns(st)) 45 | cs.serialConsistencyLevel foreach (v => 46 | (st.setSerialConsistencyLevel _).expects(v).returns(st)) 47 | cs.defaultTimestamp foreach (v => (st.setDefaultTimestamp _).expects(v).returns(st)) 48 | cs.fetchSize foreach (v => (st.setFetchSize _).expects(v).returns(st)) 49 | cs.idempotent foreach (v => (st.setIdempotent _).expects(v).returns(st)) 50 | cs.outgoingPayload foreach (v => (st.setOutgoingPayload _).expects(v.asJava).returns(st)) 51 | cs.pagingState foreach { 52 | case CodecPagingState(ps, Some(cr)) => 53 | (st.setPagingState(_: PagingState, _: CodecRegistry)).expects(ps, cr).returns(st) 54 | case CodecPagingState(ps, None) => 55 | (st.setPagingState(_: PagingState)).expects(ps).returns(st) 56 | case RawPagingState(array) => 57 | (st.setPagingStateUnsafe _).expects(array).returns(st) 58 | } 59 | cs.readTimeoutMillis foreach (v => (st.setReadTimeoutMillis _).expects(v).returns(st)) 60 | cs.retryPolicy foreach (v => (st.setRetryPolicy _).expects(v).returns(st)) 61 | cs.applyConf(st) == st 62 | } 63 | } 64 | } 65 | } 66 | 67 | } 68 | -------------------------------------------------------------------------------- /core/src/test/scala/config/InstanceClasses.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package config 19 | 20 | import java.net.InetSocketAddress 21 | import java.util 22 | import java.util.concurrent.Executor 23 | 24 | import com.datastax.driver.core._ 25 | import com.datastax.driver.core.exceptions.DriverException 26 | import com.datastax.driver.core.policies._ 27 | import io.netty.channel.socket.SocketChannel 28 | import io.netty.handler.ssl.SslHandler 29 | 30 | import TestUtils._ 31 | 32 | class MyJavaExecutor extends Executor { 33 | override def execute(command: Runnable): Unit = {} 34 | } 35 | 36 | class MyAddressTranslator extends AddressTranslator { 37 | override def init(cluster: Cluster): Unit = {} 38 | override def translate(address: InetSocketAddress): InetSocketAddress = Null[InetSocketAddress] 39 | override def close(): Unit = {} 40 | } 41 | 42 | class MyAuthProvider extends AuthProvider { 43 | override def newAuthenticator(host: InetSocketAddress, authenticator: String): Authenticator = 44 | Null[Authenticator] 45 | } 46 | 47 | class MyLoadBalancingPolicy extends LoadBalancingPolicy { 48 | override def newQueryPlan(loggedKeyspace: String, statement: Statement): util.Iterator[Host] = 49 | Null[util.Iterator[Host]] 50 | override def init(cluster: Cluster, hosts: util.Collection[Host]): Unit = {} 51 | override def distance(host: Host): HostDistance = Null[HostDistance] 52 | override def onAdd(host: Host): Unit = {} 53 | override def onUp(host: Host): Unit = {} 54 | override def onDown(host: Host): Unit = {} 55 | override def onRemove(host: Host): Unit = {} 56 | override def close(): Unit = {} 57 | } 58 | 59 | class MyReconnectionPolicy extends ReconnectionPolicy { 60 | override def init(cluster: Cluster): Unit = {} 61 | override def newSchedule(): ReconnectionPolicy.ReconnectionSchedule = 62 | Null[ReconnectionPolicy.ReconnectionSchedule] 63 | override def close(): Unit = {} 64 | } 65 | 66 | class MyRetryPolicy extends RetryPolicy { 67 | override def init(cluster: Cluster): Unit = {} 68 | override def onReadTimeout( 69 | statement: Statement, 70 | cl: ConsistencyLevel, 71 | requiredResponses: Int, 72 | receivedResponses: Int, 73 | dataRetrieved: Boolean, 74 | nbRetry: Int): RetryPolicy.RetryDecision = Null[RetryPolicy.RetryDecision] 75 | override def onWriteTimeout( 76 | statement: Statement, 77 | cl: ConsistencyLevel, 78 | writeType: WriteType, 79 | requiredAcks: Int, 80 | receivedAcks: Int, 81 | nbRetry: Int): RetryPolicy.RetryDecision = Null[RetryPolicy.RetryDecision] 82 | override def onUnavailable( 83 | statement: Statement, 84 | cl: ConsistencyLevel, 85 | requiredReplica: Int, 86 | aliveReplica: Int, 87 | nbRetry: Int): RetryPolicy.RetryDecision = Null[RetryPolicy.RetryDecision] 88 | override def onRequestError( 89 | statement: Statement, 90 | cl: ConsistencyLevel, 91 | e: DriverException, 92 | nbRetry: Int): RetryPolicy.RetryDecision = Null[RetryPolicy.RetryDecision] 93 | override def close(): Unit = {} 94 | } 95 | 96 | class MySpeculativeExecutionPolicy extends SpeculativeExecutionPolicy { 97 | override def init(cluster: Cluster): Unit = {} 98 | override def newPlan( 99 | loggedKeyspace: String, 100 | statement: Statement): SpeculativeExecutionPolicy.SpeculativeExecutionPlan = 101 | Null[SpeculativeExecutionPolicy.SpeculativeExecutionPlan] 102 | override def close(): Unit = {} 103 | } 104 | 105 | class MySSLOptions extends SSLOptions { 106 | override def newSSLHandler(channel: SocketChannel): SslHandler = Null[SslHandler] 107 | } 108 | 109 | class MyThreadingOptions extends ThreadingOptions 110 | 111 | class MyTimestampGenerator extends TimestampGenerator { 112 | override def next(): Long = 0L 113 | } 114 | -------------------------------------------------------------------------------- /core/src/test/scala/config/reads/DatastaxReadsSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package config.reads 19 | 20 | import java.net.{InetAddress, InetSocketAddress} 21 | 22 | import classy.DecodeError.WrongType 23 | import com.typesafe.config.ConfigFactory 24 | import org.scalacheck.Prop._ 25 | import org.scalatest.prop.Checkers 26 | import org.scalatest.{Matchers, WordSpec} 27 | 28 | class DatastaxReadsSpec extends WordSpec with Matchers with Checkers { 29 | 30 | import classy.config._ 31 | import com.typesafe.config.Config 32 | import freestyle.cassandra.config.ConfigArbitraries._ 33 | 34 | val validDatastaxReads: DatastaxReads[Config] = new DatastaxReads[Config] {} 35 | 36 | val exception: RuntimeException = new RuntimeException("Test message") 37 | 38 | val invalidDatastaxReads: DatastaxReads[Config] = new DatastaxReads[Config] { 39 | override def inetAddress(address: String): InetAddress = throw exception 40 | 41 | override def inetSocketAddress(host: String, port: Int): InetSocketAddress = throw exception 42 | } 43 | 44 | "DatastaxReads inetAddressParser" should { 45 | 46 | val wrongType = WrongType("X.X.X.X", Option("")) 47 | 48 | "return a Right if the inet builder return a InetAddress" in { 49 | validDatastaxReads.inetAddressParser("10.10.10.10").isRight shouldBe true 50 | } 51 | 52 | "return a Left if the inet builder throws an exception" in { 53 | invalidDatastaxReads.inetAddressParser("") shouldBe Left(wrongType) 54 | } 55 | 56 | } 57 | 58 | "DatastaxReads inetSocketAddressParser" should { 59 | 60 | val wrongType = WrongType(":", Option("10.10.10.10")) 61 | 62 | "return a Right if the inet builder return a InetSocketAddress" in { 63 | validDatastaxReads.inetSocketAddressParser("10.10.10.10:8080").isRight shouldBe true 64 | } 65 | 66 | "return a Left if the string is not a valid InetSocketAddress" in { 67 | validDatastaxReads.inetSocketAddressParser("10.10.10.10") shouldBe Left(wrongType) 68 | } 69 | 70 | "return a Left if the inet builder throws an exception" in { 71 | invalidDatastaxReads.inetSocketAddressParser("10.10.10.10") shouldBe Left(wrongType) 72 | } 73 | 74 | } 75 | 76 | "DatastaxReads readOption2" should { 77 | 78 | import validDatastaxReads._ 79 | 80 | "work as expected" in { 81 | 82 | check { 83 | forAll { ov: OptionalValues2 => 84 | val decoder = readConfig[Config]("config") andThen readOption2[String, String, String]( 85 | ReadAndPath[String](ov.v1.path), 86 | ReadAndPath[String](ov.v2.path)) { 87 | case (s1, s2) => s1 + "-" + s2 88 | } 89 | 90 | val rawConfig = ConfigFactory.parseString(ov.config) 91 | val result = decoder(rawConfig) 92 | 93 | (ov.v1.value, ov.v2.value) match { 94 | case (Some(v1), Some(v2)) => result == Right(Some(s"$v1-$v2")) 95 | case (None, None) => result == Right(None) 96 | case _ => result.isLeft 97 | } 98 | } 99 | } 100 | 101 | } 102 | 103 | } 104 | 105 | "DatastaxReads readOption3" should { 106 | 107 | import validDatastaxReads._ 108 | 109 | "work as expected" in { 110 | 111 | check { 112 | forAll { ov: OptionalValues3 => 113 | val decoder = readConfig[Config]("config") andThen readOption3[ 114 | String, 115 | String, 116 | String, 117 | String]( 118 | ReadAndPath[String](ov.v1.path), 119 | ReadAndPath[String](ov.v2.path), 120 | ReadAndPath[String](ov.v3.path)) { 121 | case (s1, s2, s3) => s1 + "-" + s2 + "-" + s3 122 | } 123 | 124 | val rawConfig = ConfigFactory.parseString(ov.config) 125 | val result = decoder(rawConfig) 126 | 127 | (ov.v1.value, ov.v2.value, ov.v3.value) match { 128 | case (Some(v1), Some(v2), Some(v3)) => result == Right(Some(s"$v1-$v2-$v3")) 129 | case (None, None, None) => result == Right(None) 130 | case _ => result.isLeft 131 | } 132 | } 133 | } 134 | 135 | } 136 | 137 | } 138 | 139 | } 140 | -------------------------------------------------------------------------------- /core/src/test/scala/handlers/ClusterAPIHandlerSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package handlers 19 | 20 | import com.datastax.driver.core._ 21 | import com.google.common.util.concurrent.ListenableFuture 22 | import org.scalamock.scalatest.MockFactory 23 | import org.scalatest.{Matchers, OneInstancePerTest, WordSpec} 24 | 25 | import scala.concurrent.ExecutionContext.Implicits.global 26 | import scala.concurrent.Future 27 | 28 | class ClusterAPIHandlerSpec 29 | extends WordSpec 30 | with Matchers 31 | with OneInstancePerTest 32 | with MockFactory { 33 | 34 | val clusterMock: Cluster = mock[Cluster] 35 | val sessionMock: Session = mock[Session] 36 | val keyspace: String = "keyspace" 37 | val configuration: Configuration = new Configuration.Builder().build() 38 | val metadataTest: Metadata = MetadataTest() 39 | 40 | import cats.instances.future._ 41 | import freestyle.async.implicits._ 42 | import freestyle.cassandra.implicits._ 43 | import TestUtils._ 44 | val handler: ClusterAPIHandler[Future] = new ClusterAPIHandler[Future] 45 | 46 | "ListenableFutureHandler" should { 47 | 48 | "call to connectAsync when calling connect() method" in { 49 | val result = successfulFuture(sessionMock) 50 | (clusterMock.connectAsync _: () => ListenableFuture[Session]).expects().returns(result) 51 | runK(handler.connect, clusterMock) shouldBe sessionMock 52 | } 53 | 54 | "call to connectAsync when calling connectKeyspace(String) method" in { 55 | val result = successfulFuture(sessionMock) 56 | (clusterMock.connectAsync(_: String)).expects(keyspace).returns(result) 57 | runK(handler.connectKeyspace(keyspace), clusterMock) shouldBe sessionMock 58 | } 59 | 60 | "call to closeAsync when calling close() method" in { 61 | (clusterMock.closeAsync _).expects().returns(CloseFutureTest) 62 | runK(handler.close, clusterMock) shouldBe ((): Unit) 63 | } 64 | 65 | "call to getConfiguration when calling configuration method" in { 66 | (clusterMock.getConfiguration _) 67 | .expects() 68 | .returns(configuration) 69 | runK(handler.configuration, clusterMock) shouldBe configuration 70 | } 71 | 72 | "throw the exception when calling configuration method" in { 73 | (clusterMock.getConfiguration _) 74 | .expects() 75 | .throws(new RuntimeException("")) 76 | intercept[RuntimeException](runK(handler.configuration, clusterMock)) 77 | } 78 | 79 | "call to getMetadata when calling metadata method" in { 80 | (clusterMock.getMetadata _) 81 | .expects() 82 | .returns(metadataTest) 83 | runK(handler.metadata, clusterMock) shouldBe metadataTest 84 | } 85 | 86 | "throw the exception when calling metadata method" in { 87 | (clusterMock.getMetadata _) 88 | .expects() 89 | .throws(new RuntimeException("")) 90 | intercept[RuntimeException](runK(handler.metadata, clusterMock)) 91 | } 92 | 93 | "call to getMetrics when calling metrics method" in { 94 | (clusterMock.getMetrics _) 95 | .expects() 96 | .returns(MetricsTest) 97 | runK(handler.metrics, clusterMock) shouldBe MetricsTest 98 | } 99 | 100 | "throw the exception when calling metrics method" in { 101 | (clusterMock.getMetrics _) 102 | .expects() 103 | .throws(new RuntimeException("")) 104 | intercept[RuntimeException](runK(handler.metrics, clusterMock)) 105 | } 106 | 107 | } 108 | 109 | } 110 | -------------------------------------------------------------------------------- /core/src/test/scala/handlers/ResultSetAPIHandlerSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package handlers 19 | 20 | import java.util.UUID 21 | 22 | import com.datastax.driver.core._ 23 | import freestyle.cassandra.TestData._ 24 | import freestyle.cassandra.TestUtils._ 25 | import freestyle.cassandra.query.FieldLister._ 26 | import freestyle.cassandra.query.mapper.FieldListMapper._ 27 | import freestyle.cassandra.query._ 28 | import freestyle.cassandra.query.QueryArbitraries._ 29 | import freestyle.cassandra.query.mapper.FromReader 30 | import freestyle.cassandra.query.mapper.GenericFromReader._ 31 | import org.scalacheck.Prop._ 32 | import org.scalatest.prop.Checkers 33 | import org.scalatest.{OneInstancePerTest, WordSpec} 34 | 35 | import scala.concurrent.ExecutionContext.Implicits.global 36 | import scala.concurrent.Future 37 | 38 | class ResultSetAPIHandlerSpec 39 | extends WordSpec 40 | with MatchersUtil 41 | with Checkers 42 | with OneInstancePerTest { 43 | 44 | import cats.instances.future._ 45 | 46 | val handler: ResultSetAPIHandler[Future] = new ResultSetAPIHandler[Future] 47 | 48 | implicit val printer: Printer = identityPrinter 49 | 50 | implicit val protocolVersion: ProtocolVersion = ProtocolVersion.V3 51 | implicit val stringTypeCodec: TypeCodec[String] = TypeCodec.ascii() 52 | implicit val uuidTypeCodec: TypeCodec[UUID] = TypeCodec.uuid() 53 | 54 | val reader: FromReader[User] = implicitly[FromReader[User]] 55 | 56 | "ResultSetAPIHandler.read" should { 57 | 58 | "return the valid response for a valid ResultSet" in { 59 | check { 60 | forAll(rowAndDataGen[User]) { 61 | case (resultSet, Nil) => 62 | runFFailed(handler.read[User](resultSet, reader)) isLikeTo { 63 | _.isInstanceOf[IllegalStateException] 64 | } 65 | case (resultSet, list) => 66 | runF(handler.read[User](resultSet, reader)) isEqualTo list.head 67 | } 68 | } 69 | } 70 | 71 | "return a failed future for an invalid ResultSet" in { 72 | runFFailed(handler.read[User](ResultSetBuilder.error, reader)) shouldBe ResultSetBuilder.exception 73 | } 74 | } 75 | 76 | "ResultSetAPIHandler.readOption" should { 77 | 78 | "return the valid response for a valid ResultSet" in { 79 | check { 80 | forAll(rowAndDataGen[User]) { 81 | case (resultSet, Nil) => 82 | runF(handler.readOption[User](resultSet, reader)).isEmpty 83 | case (resultSet, list) => 84 | runF(handler.readOption[User](resultSet, reader)) == list.headOption 85 | } 86 | } 87 | } 88 | 89 | "return a failed future for an invalid ResultSet" in { 90 | runFFailed(handler.readOption[User](ResultSetBuilder.error, reader)) shouldBe ResultSetBuilder.exception 91 | } 92 | } 93 | 94 | "ResultSetAPIHandler.readList" should { 95 | 96 | "return the valid response for a valid ResultSet" in { 97 | check { 98 | forAll(rowAndDataGen[User]) { 99 | case (resultSet, list) => 100 | runF(handler.readList[User](resultSet, reader)) == list 101 | } 102 | } 103 | } 104 | 105 | "return a failed future for an invalid ResultSet" in { 106 | runFFailed(handler.readList[User](ResultSetBuilder.error, reader)) shouldBe ResultSetBuilder.exception 107 | } 108 | } 109 | 110 | } 111 | -------------------------------------------------------------------------------- /core/src/test/scala/handlers/SessionAPIHandlerSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package handlers 19 | 20 | import com.datastax.driver.core._ 21 | import freestyle.cassandra.TestUtils.MatchersUtil 22 | import freestyle.cassandra.config.ConfigArbitraries._ 23 | import freestyle.cassandra.query.QueryArbitraries._ 24 | import org.scalacheck.Gen 25 | import org.scalamock.scalatest.MockFactory 26 | import org.scalacheck.Prop._ 27 | import org.scalatest.prop.Checkers 28 | import org.scalatest.{OneInstancePerTest, WordSpec} 29 | 30 | import scala.collection.JavaConverters._ 31 | import scala.concurrent.Future 32 | import scala.concurrent.ExecutionContext.Implicits.global 33 | 34 | class SessionAPIHandlerSpec 35 | extends WordSpec 36 | with MatchersUtil 37 | with Checkers 38 | with OneInstancePerTest 39 | with MockFactory { 40 | 41 | val sessionMock: Session = mock[Session] 42 | val regStMock: RegularStatement = stub[RegularStatement] 43 | val prepStMock: PreparedStatement = stub[PreparedStatement] 44 | val rsMock: ResultSet = stub[ResultSet] 45 | 46 | import cats.instances.future._ 47 | import freestyle.async.implicits._ 48 | import freestyle.cassandra.implicits._ 49 | import TestUtils._ 50 | val handler: SessionAPIHandler[Future] = new SessionAPIHandler[Future] 51 | 52 | "SessionAPIHandler" should { 53 | 54 | "call to initAsync when calling init() method" in { 55 | val result = successfulFuture(sessionMock) 56 | (sessionMock.initAsync _).expects().returns(result) 57 | runK(handler.init, sessionMock) shouldBe sessionMock 58 | } 59 | 60 | "call to closeAsync when calling close() method" in { 61 | (sessionMock.closeAsync _).expects().returns(CloseFutureTest) 62 | runK(handler.close, sessionMock) shouldBe ((): Unit) 63 | } 64 | 65 | "call to prepareAsync(String) when calling prepare(String) method" in { 66 | check { 67 | forAll(selectQueryGen) { query => 68 | val session = mock[Session] 69 | (session 70 | .prepareAsync(_: String)) 71 | .expects(query) 72 | .returns(successfulFuture(prepStMock)) 73 | runK(handler.prepare(query), session) isEqualTo prepStMock 74 | } 75 | } 76 | } 77 | 78 | "call to prepareAsync(RegularStatement) when calling prepare(RegularStatement) method" in { 79 | val result = successfulFuture(prepStMock) 80 | (sessionMock.prepareAsync(_: RegularStatement)).expects(regStMock).returns(result) 81 | runK(handler.prepareStatement(regStMock), sessionMock) shouldBe prepStMock 82 | } 83 | 84 | "call to executeAsync(String) when calling execute(String) method" in { 85 | check { 86 | forAll(selectQueryGen) { query => 87 | val session = mock[Session] 88 | (session 89 | .executeAsync(_: String)) 90 | .expects(query) 91 | .returns(ResultSetFutureTest(rsMock)) 92 | runK(handler.execute(query), session) isEqualTo rsMock 93 | } 94 | } 95 | } 96 | 97 | "call to executeAsync(String, java.util.Map) when calling executeWithMap(String, Map) method" in { 98 | check { 99 | forAll(selectQueryGen, dataGen) { 100 | case (query, values) => 101 | val session = mock[Session] 102 | 103 | (session 104 | .executeAsync(_: String, _: java.util.Map[String, AnyRef])) 105 | .expects { 106 | where((s, m) => s == query && m.asScala == values) 107 | } 108 | .returns(ResultSetFutureTest(rsMock)) 109 | runK(handler.executeWithMap(query, values), session) isEqualTo rsMock 110 | } 111 | } 112 | } 113 | 114 | "call to executeAsync(Statement) when calling executeStatement(Statement) method" in { 115 | (sessionMock 116 | .executeAsync(_: Statement)) 117 | .expects(regStMock) 118 | .returns(ResultSetFutureTest(rsMock)) 119 | runK(handler.executeStatement(regStMock), sessionMock) shouldBe rsMock 120 | } 121 | 122 | "call to serializableValue and executeAsync(Statement) when calling executeWithByteBuffer method" in { 123 | 124 | check { 125 | forAll( 126 | Gen.option(consistencyLevelArb.arbitrary), 127 | serializableValueByIntListArb.arbitrary, 128 | selectQueryGen) { 129 | case (cl, values, query) => 130 | val session = mock[Session] 131 | 132 | (session 133 | .executeAsync(_: Statement)) 134 | .expects(where { (st: Statement) => 135 | st.isInstanceOf[SimpleStatement] && 136 | st.asInstanceOf[SimpleStatement].getQueryString() == query && 137 | (st 138 | .asInstanceOf[SimpleStatement] 139 | .getValues(Null[ProtocolVersion], Null[CodecRegistry]) 140 | .toList == values.map(_._1)) && 141 | cl.forall(_ == st.getConsistencyLevel) 142 | }) 143 | .returns(ResultSetFutureTest(rsMock)) 144 | runK(handler.executeWithByteBuffer(query, values.map(_._2), cl), session) isEqualTo rsMock 145 | } 146 | } 147 | } 148 | 149 | } 150 | 151 | } 152 | -------------------------------------------------------------------------------- /core/src/test/scala/handlers/StatementAPIHandlerSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package handlers 19 | 20 | import java.nio.ByteBuffer 21 | 22 | import cats.MonadError 23 | import com.datastax.driver.core._ 24 | import freestyle.cassandra.TestUtils._ 25 | import freestyle.cassandra.codecs._ 26 | import freestyle.cassandra.query.model.{SerializableValue, SerializableValueBy} 27 | import org.scalamock.scalatest.MockFactory 28 | import org.scalatest.{Matchers, OneInstancePerTest, WordSpec} 29 | 30 | import scala.concurrent.ExecutionContext.Implicits.global 31 | import scala.concurrent.{Await, Future} 32 | 33 | class StatementAPIHandlerSpec 34 | extends WordSpec 35 | with Matchers 36 | with OneInstancePerTest 37 | with MockFactory { 38 | 39 | val prepStMock: PreparedStatement = stub[PreparedStatement] 40 | (prepStMock.getVariables _).when().returns(ColumnDefinitionsTest) 41 | (prepStMock.getPreparedId _).when().returns(PreparedIdTest) 42 | (prepStMock.getConsistencyLevel _).when().returns(ConsistencyLevel.ALL) 43 | 44 | val boundedStMock: BoundStatement = new BoundStatement(prepStMock) 45 | 46 | val boundStMock: BoundStatement = new BoundStatement(prepStMock) { 47 | override def setBytesUnsafe(i: Int, v: ByteBuffer): BoundStatement = boundedStMock 48 | override def setBytesUnsafe(name: String, v: ByteBuffer): BoundStatement = boundedStMock 49 | } 50 | 51 | (prepStMock.bind _).when().returns(boundStMock) 52 | 53 | val byteBuffer: ByteBuffer = ByteBuffer.wrap("Hello World!".getBytes) 54 | val serializableValue: SerializableValue = new SerializableValue { 55 | override def serialize[M[_]](implicit E: MonadError[M, Throwable]): M[ByteBuffer] = 56 | E.pure(byteBuffer) 57 | } 58 | 59 | import cats.instances.future._ 60 | import freestyle.cassandra.implicits._ 61 | val handler: StatementAPIHandler[Future] = new StatementAPIHandler[Future] 62 | 63 | "StatementAPIHandler" should { 64 | 65 | "call to bind when calling bind(PreparedStatement) method" in { 66 | runF(handler.bind(prepStMock)) shouldBe boundStMock 67 | (prepStMock.bind _).verify() 68 | } 69 | 70 | "call to setBytesUnsafe when calling setByteBufferByIndex(BoundStatement, Int, ByteBuffer) method" in { 71 | runF(handler.setByteBufferByIndex(boundStMock, 10, byteBuffer)) shouldBe boundedStMock 72 | } 73 | 74 | "call to setBytesUnsafe when calling setByteBufferByName(BoundStatement, String, ByteBuffer) method" in { 75 | runF(handler.setByteBufferByName(boundStMock, "name", byteBuffer)) shouldBe boundedStMock 76 | } 77 | 78 | "call to setBytesUnsafe when calling setValueByIndex[T](BoundStatement, Int, T, ByteBufferCodec[T]) method" in { 79 | runF(handler.setValueByIndex(boundStMock, 10, 99.9, doubleCodec)) shouldBe boundedStMock 80 | } 81 | 82 | "call to setBytesUnsafe when calling setValueByName[T](BoundStatement, Int, T, ByteBufferCodec[T]) method" in { 83 | runF(handler.setValueByName(boundStMock, "name", 99.9, doubleCodec)) shouldBe boundedStMock 84 | } 85 | 86 | "call to bind and setBytesUnsafe when calling setByteBufferListByIndex(PreparedStatement, List[SerializableValueByIndex]) method" in { 87 | runF( 88 | handler.setByteBufferListByIndex( 89 | prepStMock, 90 | List(SerializableValueBy(10, serializableValue)))) shouldBe boundedStMock 91 | (prepStMock.bind _).verify() 92 | } 93 | 94 | "call to bind and setBytesUnsafe when calling setByteBufferListByName(PreparedStatement, List[SerializableValueByName]) method" in { 95 | runF( 96 | handler.setByteBufferListByName( 97 | prepStMock, 98 | List(SerializableValueBy("name", serializableValue)))) shouldBe boundedStMock 99 | (prepStMock.bind _).verify() 100 | } 101 | 102 | } 103 | 104 | } 105 | -------------------------------------------------------------------------------- /core/src/test/scala/query/FieldListerSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package query 19 | 20 | import freestyle.cassandra.TestUtils.MatchersUtil 21 | import org.scalacheck.Prop._ 22 | import org.scalatest.WordSpec 23 | import org.scalatest.prop.Checkers 24 | 25 | class FieldListerSpec extends WordSpec with MatchersUtil with Checkers with QueryArbitraries { 26 | 27 | case class A(a1: Int, a2: String, a3: Boolean) 28 | 29 | case class B(b1: Long, b2: String) 30 | 31 | case class C(c1: String, c2: B) 32 | 33 | "FieldLister" should { 34 | 35 | "list the fields for a case class" in { 36 | check { 37 | forAll { printer: Printer => 38 | implicit val _ = printer 39 | implicitly[FieldLister[A]].list isEqualTo List("a1", "a2", "a3").map(printer.print) 40 | } 41 | } 42 | 43 | } 44 | 45 | "list the fields for a case class with another embedded case class" in { 46 | check { 47 | forAll { printer: Printer => 48 | implicit val _ = printer 49 | implicitly[FieldLister[C]].list isEqualTo List("c1", "c2").map(printer.print) 50 | } 51 | } 52 | 53 | } 54 | 55 | "list the expanded fields for a case class with another embedded case class" in { 56 | import FieldListerExpanded._ 57 | check { 58 | forAll { printer: Printer => 59 | implicit val _ = printer 60 | implicitly[FieldLister[C]].list isEqualTo List("c1", "b1", "b2").map(printer.print) 61 | } 62 | } 63 | 64 | } 65 | 66 | } 67 | 68 | } 69 | -------------------------------------------------------------------------------- /core/src/test/scala/query/QueryArbitraries.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package query 19 | 20 | import java.nio.ByteBuffer 21 | 22 | import cats.MonadError 23 | import com.datastax.driver.core._ 24 | import freestyle.cassandra.query.mapper.FieldListMapper 25 | import freestyle.cassandra.query.model.{SerializableValue, SerializableValueBy} 26 | import org.scalacheck.{Arbitrary, Gen} 27 | import freestyle.cassandra.schema.MetadataArbitraries._ 28 | import troy.cql.ast.ddl.Table 29 | import troy.cql.ast.{DataType => TroyDataType} 30 | 31 | import scala.util.Try 32 | 33 | trait QueryArbitraries { 34 | 35 | implicit val arbitraryPrinter: Arbitrary[Printer] = Arbitrary { 36 | Gen.oneOf(List(identityPrinter, lowerCasePrinter, upperCasePrinter)) 37 | } 38 | 39 | val byteBufferIntGen: Gen[ByteBuffer] = 40 | Gen.posNum[Int].map(n => TypeCodec.cint().serialize(n, ProtocolVersion.V3)) 41 | 42 | val byteBufferBooleanGen: Gen[ByteBuffer] = 43 | implicitly[Arbitrary[Boolean]].arbitrary.map { n => 44 | TypeCodec.cboolean().serialize(n, ProtocolVersion.V3) 45 | } 46 | 47 | val byteBufferFloatGen: Gen[ByteBuffer] = 48 | Gen.posNum[Float].map(n => TypeCodec.cfloat().serialize(n, ProtocolVersion.V3)) 49 | 50 | val byteBufferUUIDGen: Gen[ByteBuffer] = 51 | Gen.uuid.map(n => TypeCodec.uuid().serialize(n, ProtocolVersion.V3)) 52 | 53 | val byteBufferStringGen: Gen[ByteBuffer] = 54 | Gen.alphaStr.map(n => TypeCodec.varchar().serialize(n, ProtocolVersion.V3)) 55 | 56 | val byteBufferGen: Gen[ByteBuffer] = 57 | Gen.oneOf( 58 | byteBufferIntGen, 59 | byteBufferBooleanGen, 60 | byteBufferFloatGen, 61 | byteBufferUUIDGen, 62 | byteBufferStringGen) 63 | 64 | val selectQueryGen: Gen[String] = { 65 | 66 | val createSelect: (List[Table.Column]) => String = { 67 | case Nil => "*" 68 | case l => l.map(_.name).mkString(",") 69 | } 70 | 71 | val generateType: (TroyDataType) => Gen[String] = { 72 | case TroyDataType.Ascii => Gen.alphaStr.map(s => s"'$s'") 73 | case TroyDataType.BigInt => Gen.posNum[Long].map(_.toString) 74 | case TroyDataType.Boolean => Gen.oneOf(true, false).map(_.toString) 75 | case TroyDataType.Decimal => Gen.posNum[Float].map(_.toString) 76 | case TroyDataType.Double => Gen.posNum[Float].map(_.toString) 77 | case TroyDataType.Float => Gen.posNum[Float].map(_.toString) 78 | case TroyDataType.Int => Gen.posNum[Int].map(_.toString) 79 | case TroyDataType.Smallint => Gen.posNum[Short].map(_.toString) 80 | case TroyDataType.Text => Gen.alphaStr.map(s => s"'$s'") 81 | case TroyDataType.Tinyint => Gen.posNum[Byte].map(_.toString) 82 | case TroyDataType.Uuid => Gen.uuid.map(_.toString) 83 | case TroyDataType.Varchar => Gen.alphaStr.map(s => s"'$s'") 84 | case _ => Gen.const("null") 85 | } 86 | 87 | val createFilter: (List[Table.Column]) => Gen[String] = { 88 | case Nil => Gen.const("") 89 | case l => 90 | l.foldLeft(Gen.const(List.empty[String])) { 91 | case (genS, c) => 92 | genS.flatMap { strings => 93 | generateType(c.dataType).map(data => strings :+ s"${c.name} = $data") 94 | } 95 | } 96 | .map(_.mkString("WHERE ", " AND ", "")) 97 | } 98 | 99 | for { 100 | generatedTable <- generatedTableArb(None).arbitrary 101 | selectColumns <- Gen 102 | .someOf(generatedTable.createTable.columns) 103 | .map(seq => createSelect(seq.toList)) 104 | filteredColumns <- Gen 105 | .someOf(generatedTable.createTable.columns) 106 | .flatMap(seq => createFilter(seq.toList)) 107 | } yield { 108 | s""" 109 | | SELECT $selectColumns 110 | | FROM ${generatedTable.createTable.tableName} 111 | | $filteredColumns 112 | """.stripMargin 113 | } 114 | } 115 | 116 | val dataGen: Gen[Map[String, String]] = 117 | Gen.mapOf { 118 | for { 119 | key <- Gen.alphaStr 120 | value <- Gen.alphaStr 121 | } yield (key, value) 122 | } 123 | 124 | implicit val serializableValueByIntListArb: Arbitrary[ 125 | List[(ByteBuffer, SerializableValueBy[Int])]] = 126 | Arbitrary { 127 | Gen.listOf(byteBufferGen).map { list => 128 | list.zipWithIndex.map { 129 | case (byteBuffer, index) => 130 | val value = new SerializableValue { 131 | override def serialize[M[_]](implicit E: MonadError[M, Throwable]): M[ByteBuffer] = 132 | E.pure(byteBuffer) 133 | } 134 | (byteBuffer, SerializableValueBy(index, value)) 135 | } 136 | } 137 | } 138 | 139 | def rowAndDataGen[A]( 140 | implicit arb: Arbitrary[A], 141 | fieldListMapper: FieldListMapper[A], 142 | fieldLister: FieldLister[A], 143 | printer: Printer): Gen[(ResultSet, List[A])] = 144 | Gen.listOf(arb.arbitrary).map { list => 145 | import cats.instances.try_._ 146 | import cats.instances.list._ 147 | import cats.syntax.traverse._ 148 | val rows = list.map { element => 149 | val byteBufferList = fieldListMapper.map(element).traverse(_.serialize[Try]).get 150 | ListBackedRow(fieldLister.list, byteBufferList) 151 | } 152 | (ResultSetBuilder(fieldLister.list, rows), list) 153 | } 154 | } 155 | 156 | object QueryArbitraries extends QueryArbitraries 157 | -------------------------------------------------------------------------------- /core/src/test/scala/query/StatementGeneratorSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package query 19 | 20 | import org.scalatest.WordSpec 21 | import StatementGenerator._ 22 | import freestyle.cassandra.TestUtils.MatchersUtil 23 | import org.scalacheck.Prop._ 24 | import org.scalatest.prop.Checkers 25 | 26 | class StatementGeneratorSpec 27 | extends WordSpec 28 | with MatchersUtil 29 | with Checkers 30 | with QueryArbitraries { 31 | 32 | case class A(a1: Int, a2: String, a3: Boolean) 33 | 34 | case class B(b1: Long, b2: String) 35 | 36 | case class C(c1: String, c2: B) 37 | 38 | "insert" should { 39 | 40 | "generate a right statement for a regular case class" in { 41 | 42 | import FieldLister._ 43 | check { 44 | forAll { printer: Printer => 45 | implicit val _ = printer 46 | import printer._ 47 | StatementGenerator[A].insert("A") isEqualTo s"INSERT INTO A (${print("a1")},${print("a2")},${print("a3")}) VALUES (?,?,?)" 48 | } 49 | } 50 | 51 | } 52 | 53 | "generate a right statement for a case class with another embedded case class" in { 54 | 55 | import FieldLister._ 56 | check { 57 | forAll { printer: Printer => 58 | implicit val _ = printer 59 | import printer._ 60 | StatementGenerator[C].insert("C") isEqualTo s"INSERT INTO C (${print("c1")},${print("c2")}) VALUES (?,?)" 61 | } 62 | } 63 | 64 | } 65 | 66 | "generate a right expanded statement for a case class with another embedded case class" in { 67 | 68 | import StatementGenerator._ 69 | import FieldListerExpanded._ 70 | check { 71 | forAll { printer: Printer => 72 | implicit val _ = printer 73 | import printer._ 74 | StatementGenerator[C].insert("C") isEqualTo s"INSERT INTO C (${print("c1")},${print("b1")},${print("b2")}) VALUES (?,?,?)" 75 | } 76 | } 77 | 78 | } 79 | 80 | } 81 | 82 | "select" should { 83 | 84 | "generate a right statement for a regular case class" in { 85 | 86 | import FieldLister._ 87 | check { 88 | forAll { printer: Printer => 89 | implicit val _ = printer 90 | import printer._ 91 | StatementGenerator[A].select("A") isEqualTo s"SELECT ${print("a1")},${print("a2")},${print("a3")} FROM A" 92 | } 93 | } 94 | 95 | } 96 | 97 | "generate a right statement for a case class with another embedded case class" in { 98 | 99 | import FieldLister._ 100 | check { 101 | forAll { printer: Printer => 102 | implicit val _ = printer 103 | import printer._ 104 | StatementGenerator[C].select("C") isEqualTo s"SELECT ${print("c1")},${print("c2")} FROM C" 105 | } 106 | } 107 | 108 | } 109 | 110 | "generate a right expanded statement for a case class with another embedded case class" in { 111 | 112 | import FieldListerExpanded._ 113 | check { 114 | forAll { printer: Printer => 115 | implicit val _ = printer 116 | import printer._ 117 | StatementGenerator[C].select("C") isEqualTo s"SELECT ${print("c1")},${print("b1")},${print("b2")} FROM C" 118 | } 119 | } 120 | 121 | } 122 | 123 | } 124 | 125 | } 126 | -------------------------------------------------------------------------------- /core/src/test/scala/query/interpolator/CQLInterpolatorSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package query.interpolator 19 | 20 | import cats.MonadError 21 | import cats.data.Validated.Valid 22 | import cats.data.ValidatedNel 23 | import contextual.Interpolator 24 | import freestyle.cassandra.schema.{SchemaError, Statements} 25 | import freestyle.cassandra.schema.validator.SchemaValidator 26 | import org.scalatest.{Matchers, WordSpec} 27 | 28 | import scala.util.{Success, Try} 29 | 30 | class CQLInterpolatorSpec extends WordSpec with Matchers { 31 | 32 | "CQLInterpolator" should { 33 | 34 | "provide a cql interpolator" in { 35 | 36 | implicit val E: MonadError[Try, Throwable] = cats.instances.try_.catsStdInstancesForTry 37 | 38 | val schemaValidator: SchemaValidator[Try] = new SchemaValidator[Try] { 39 | override def validateStatement(st: Statements)( 40 | implicit E: MonadError[Try, Throwable]): Try[ValidatedNel[SchemaError, Unit]] = 41 | Success(Valid((): Unit)) 42 | } 43 | 44 | new CQLInterpolator(schemaValidator) shouldBe an[Interpolator] 45 | } 46 | 47 | } 48 | 49 | } 50 | -------------------------------------------------------------------------------- /core/src/test/scala/query/interpolator/RuntimeCQLInterpolatorSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package query.interpolator 19 | 20 | import cats.MonadError 21 | import com.datastax.driver.core.{ProtocolVersion, TypeCodec} 22 | import org.scalatest.{Matchers, WordSpec} 23 | 24 | import scala.util.Try 25 | 26 | class RuntimeCQLInterpolatorSpec extends WordSpec with Matchers { 27 | 28 | "RuntimeCQLInterpolator interpolator" should { 29 | 30 | "return a success for a simple query" in { 31 | 32 | import RuntimeCQLInterpolator._ 33 | 34 | cql"SELECT * FROM users" shouldBe (("SELECT * FROM users", Nil)) 35 | } 36 | 37 | "return a success for a query with params" in { 38 | 39 | import RuntimeCQLInterpolator._ 40 | implicit val E: MonadError[Try, Throwable] = cats.instances.try_.catsStdInstancesForTry 41 | 42 | implicit val protocolVersion: ProtocolVersion = ProtocolVersion.V4 43 | implicit val stringTypeCodec: TypeCodec[String] = TypeCodec.ascii() 44 | import freestyle.cassandra.codecs._ 45 | val stringCodec: ByteBufferCodec[String] = implicitly[ByteBufferCodec[String]] 46 | 47 | val id: Int = 1 48 | val name: String = "username" 49 | 50 | val (query, values) = 51 | cql"SELECT * FROM users WHERE id = $id AND name = $name" 52 | query shouldBe "SELECT * FROM users WHERE id = ? AND name = ?" 53 | values.size shouldBe 2 54 | values.head.position shouldBe 0 55 | values.head.serializableValue.serialize shouldBe intCodec.serialize(id) 56 | values(1).position shouldBe 1 57 | values(1).serializableValue.serialize shouldBe stringCodec.serialize(name) 58 | } 59 | 60 | "not compile for a wrong statement" in { 61 | 62 | import RuntimeCQLInterpolator._ 63 | implicit val E: MonadError[Try, Throwable] = cats.instances.try_.catsStdInstancesForTry 64 | 65 | """cql"Wrong statement"""" shouldNot compile 66 | } 67 | 68 | } 69 | 70 | } 71 | -------------------------------------------------------------------------------- /core/src/test/scala/query/mapper/ByteBufferMapperSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package query.mapper 19 | 20 | import java.nio.ByteBuffer 21 | 22 | import cats.MonadError 23 | import com.datastax.driver.core.{ProtocolVersion, TypeCodec} 24 | import freestyle.cassandra.query.{Printer, QueryArbitraries} 25 | import org.scalacheck.Prop._ 26 | import org.scalatest.{Matchers, WordSpec} 27 | import org.scalacheck.ScalacheckShapeless._ 28 | import org.scalatest.prop.Checkers 29 | 30 | import scala.util.matching.Regex 31 | 32 | class ByteBufferMapperSpec extends WordSpec with Matchers with Checkers with QueryArbitraries { 33 | 34 | case class A(a1: Int, a2: String, a3: Boolean) 35 | 36 | case class B(b1: Long, b2: String) 37 | 38 | case class C(c1: String, c2: B) 39 | 40 | implicit val stringTypeCodec: TypeCodec[String] = TypeCodec.varchar() 41 | implicit val protocol: ProtocolVersion = ProtocolVersion.V3 42 | import codecs._ 43 | 44 | val stringCodec: ByteBufferCodec[String] = implicitly[ByteBufferCodec[String]] 45 | 46 | "ByteBufferMapper.map" should { 47 | 48 | "map the fields for a regular case class" in { 49 | 50 | import FieldListMapper._ 51 | import cats.instances.try_._ 52 | 53 | check { 54 | forAll { (a: A, printer: Printer) => 55 | implicit val _ = printer 56 | val mapper: ByteBufferMapper[A] = ByteBufferMapper[A] 57 | val mapperList: List[FieldMapper] = mapper.map(a) 58 | mapperList.size == 3 && 59 | mapperList.head.name == printer.print("a1") && 60 | mapperList.head.serialize == intCodec.serialize(a.a1) && 61 | mapperList(1).name == printer.print("a2") && 62 | mapperList(1).serialize == stringCodec.serialize(a.a2) && 63 | mapperList(2).name == printer.print("a3") && 64 | mapperList(2).serialize == booleanCodec.serialize(a.a3) 65 | } 66 | } 67 | 68 | } 69 | 70 | "map the fields for a case class with another embedded case class and his decoder" in { 71 | 72 | import FieldListMapper._ 73 | 74 | implicit val decoder: ByteBufferCodec[B] = new ByteBufferCodec[B] { 75 | 76 | val Regex: Regex = "(\\d+);(.+)".r 77 | 78 | override def deserialize[M[_]](bytes: ByteBuffer)( 79 | implicit E: MonadError[M, Throwable]): M[B] = 80 | E.flatMap(stringCodec.deserialize(bytes)) { 81 | case Regex(v1, v2) => E.pure(B(v1.toLong, v2)) 82 | case _ => E.raiseError[B](new RuntimeException("Bad serialized value")) 83 | } 84 | 85 | override def serialize[M[_]](value: B)( 86 | implicit E: MonadError[M, Throwable]): M[ByteBuffer] = 87 | stringCodec.serialize(value.b1 + ";" + value.b2) 88 | } 89 | 90 | import cats.instances.try_._ 91 | 92 | check { 93 | forAll { (c: C, printer: Printer) => 94 | implicit val _ = printer 95 | val mapper: ByteBufferMapper[C] = ByteBufferMapper[C] 96 | val mapperList: List[FieldMapper] = mapper.map(c) 97 | mapperList.size == 2 && 98 | mapperList.head.name == printer.print("c1") && 99 | mapperList.head.serialize == stringCodec.serialize(c.c1) && 100 | mapperList(1).name == printer.print("c2") && 101 | mapperList(1).serialize == decoder.serialize(c.c2) 102 | } 103 | } 104 | 105 | } 106 | 107 | "map the fields for a case class with another embedded case class" in { 108 | 109 | import FieldMapperExpanded._ 110 | import cats.instances.try_._ 111 | 112 | check { 113 | forAll { (c: C, printer: Printer) => 114 | implicit val _ = printer 115 | val mapper = ByteBufferMapper[C] 116 | val mapperList: List[FieldMapper] = mapper.map(c) 117 | mapperList.size == 3 && 118 | mapperList.head.name == printer.print("c1") && 119 | mapperList.head.serialize == stringCodec.serialize(c.c1) && 120 | mapperList(1).name == printer.print("b1") && 121 | mapperList(1).serialize == longCodec.serialize(c.c2.b1) && 122 | mapperList(2).name == printer.print("b2") && 123 | mapperList(2).serialize == stringCodec.serialize(c.c2.b2) 124 | } 125 | } 126 | 127 | } 128 | 129 | } 130 | 131 | } 132 | -------------------------------------------------------------------------------- /core/src/test/scala/query/mapper/ByteBufferToFieldSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package query.mapper 19 | 20 | import java.nio.ByteBuffer 21 | 22 | import cats.MonadError 23 | import com.datastax.driver.core.{ProtocolVersion, TypeCodec} 24 | import freestyle.cassandra.query._ 25 | import org.scalacheck.Prop._ 26 | import org.scalatest.prop.Checkers 27 | import org.scalatest.{Matchers, WordSpec} 28 | import org.scalacheck.ScalacheckShapeless._ 29 | 30 | import scala.util.{Failure, Success, Try} 31 | 32 | class ByteBufferToFieldSpec extends WordSpec with Matchers with Checkers with QueryArbitraries { 33 | 34 | case class User(name: String, age: Int) 35 | 36 | implicit val stringTypeCodec: TypeCodec[String] = TypeCodec.varchar() 37 | implicit val protocolVersion: ProtocolVersion = ProtocolVersion.V3 38 | 39 | import GenericFromReader._ 40 | 41 | "fromReader" should { 42 | 43 | import cats.instances.try_._ 44 | 45 | "return the right value when the reader return a success response" in { 46 | 47 | check { 48 | forAll { (user: User, printer: Printer) => 49 | implicit val _ = printer 50 | val reader = new ByteBufferReader() { 51 | override def read[M[_]](name: String)( 52 | implicit ME: MonadError[M, Throwable]): M[ByteBuffer] = { 53 | 54 | val printedName = printer.print("name") 55 | val printedAge = printer.print("age") 56 | name match { 57 | case `printedName` => ME.pure(stringTypeCodec.serialize(user.name, protocolVersion)) 58 | case `printedAge` => ME.pure(TypeCodec.cint().serialize(user.age, protocolVersion)) 59 | } 60 | } 61 | } 62 | 63 | implicitly[FromReader[User]].apply[Try](reader) == Success(user) 64 | } 65 | } 66 | 67 | } 68 | 69 | "return the failure when the reader fails returning the ByteBuffer for the 'name' field" in { 70 | 71 | check { 72 | forAll { (user: User, printer: Printer) => 73 | implicit val _ = printer 74 | val exception = new RuntimeException("Test Exception") 75 | val reader = new ByteBufferReader() { 76 | override def read[M[_]](name: String)( 77 | implicit ME: MonadError[M, Throwable]): M[ByteBuffer] = { 78 | 79 | val printedName = printer.print("name") 80 | val printedAge = printer.print("age") 81 | name match { 82 | case `printedName` => ME.raiseError(exception) 83 | case `printedAge` => ME.pure(TypeCodec.cint().serialize(user.age, protocolVersion)) 84 | } 85 | } 86 | } 87 | 88 | implicitly[FromReader[User]].apply[Try](reader) == Failure(exception) 89 | } 90 | } 91 | 92 | } 93 | 94 | "return the failure when the reader fails returning the ByteBuffer for the 'age' field" in { 95 | 96 | check { 97 | forAll { (user: User, printer: Printer) => 98 | implicit val _ = printer 99 | val exception = new RuntimeException("Test Exception") 100 | val reader = new ByteBufferReader() { 101 | override def read[M[_]](name: String)( 102 | implicit ME: MonadError[M, Throwable]): M[ByteBuffer] = { 103 | 104 | val printedName = printer.print("name") 105 | val printedAge = printer.print("age") 106 | name match { 107 | case `printedName` => ME.pure(stringTypeCodec.serialize(user.name, protocolVersion)) 108 | case `printedAge` => ME.raiseError(exception) 109 | } 110 | } 111 | } 112 | 113 | implicitly[FromReader[User]].apply[Try](reader) == Failure(exception) 114 | } 115 | } 116 | 117 | } 118 | 119 | } 120 | 121 | } 122 | -------------------------------------------------------------------------------- /core/src/test/scala/schema/SchemaErrorSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package schema 19 | 20 | import org.scalatest.{Matchers, WordSpec} 21 | 22 | class SchemaErrorSpec extends WordSpec with Matchers { 23 | 24 | "SchemaError" should { 25 | 26 | "call to super.init when passing an exception" in { 27 | val exception: RuntimeException = new RuntimeException("Test Exception") 28 | val myError: SchemaDefinitionProviderError = 29 | SchemaDefinitionProviderError("My message", Some(exception)) 30 | 31 | myError.getCause shouldBe exception 32 | } 33 | 34 | } 35 | 36 | } 37 | -------------------------------------------------------------------------------- /core/src/test/scala/schema/provider/MetadataSchemaProviderSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package schema.provider 19 | 20 | import java.io.{ByteArrayInputStream, InputStream} 21 | 22 | import com.datastax.driver.core._ 23 | import freestyle.cassandra.TestUtils._ 24 | import freestyle.cassandra.config.TestDecoderUtils 25 | import freestyle.cassandra.schema.SchemaDefinition 26 | import org.scalacheck.Prop._ 27 | import troy.cql.ast.TableName 28 | 29 | import scala.collection.JavaConverters._ 30 | import scala.concurrent.Future 31 | 32 | class MetadataSchemaProviderSpec extends TestDecoderUtils { 33 | 34 | import cats.instances.future._ 35 | import freestyle.cassandra.schema.MetadataArbitraries._ 36 | 37 | import scala.concurrent.ExecutionContext.Implicits.global 38 | 39 | "schemaDefinition" should { 40 | 41 | "return the right schema definition for valid values" in { 42 | check { 43 | forAll(schemaGen) { 44 | case (keyspace, tables, indexes, userTypes) => 45 | implicit val clusterMock: Cluster = mock[ClusterTest] 46 | val sessionMock: Session = mock[Session] 47 | val metadataMock: Metadata = mock[MetadataTest] 48 | (clusterMock.connect _: () => Session).expects().returns(sessionMock) 49 | (clusterMock.getMetadata _).expects().returns(metadataMock) 50 | (metadataMock.getKeyspaces _).expects().returns(List(keyspace.keyspaceMetadata).asJava) 51 | 52 | val indexedWithTableName = indexes.map { genIndex => 53 | val createIndex = 54 | genIndex.createIndex.copy(tableName = tables.head.createTable.tableName) 55 | genIndex.copy(createIndex = createIndex) 56 | } 57 | 58 | val clusterFuture: Future[Cluster] = Future.successful(clusterMock) 59 | 60 | val metadataSchemaProvider = new MetadataSchemaProvider[Future](clusterFuture) { 61 | 62 | override def readTable(metadata: IndexMetadata): TableName = 63 | tables.head.createTable.tableName 64 | 65 | override def extractTables( 66 | keyspaceMetadata: KeyspaceMetadata): List[AbstractTableMetadata] = 67 | tables.toList.map(_.tableMetadata) 68 | 69 | override def extractIndexes( 70 | tableMetadataList: List[AbstractTableMetadata]): List[IndexMetadata] = 71 | indexedWithTableName.map(_.indexMetadata) 72 | 73 | override def extractUserTypes(keyspaceMetadata: KeyspaceMetadata): List[UserType] = 74 | userTypes.map(_.userType) 75 | } 76 | 77 | val expected: SchemaDefinition = Seq(keyspace.createKeyspace) ++ 78 | tables.toList.map(_.createTable) ++ 79 | indexedWithTableName.map(_.createIndex) ++ 80 | userTypes.map(_.createType) 81 | 82 | runF(metadataSchemaProvider.schemaDefinition) isEqualTo expected 83 | } 84 | } 85 | } 86 | 87 | "return a left if there is an error fetching the metadata from cluster" in { 88 | implicit val clusterMock: Cluster = mock[ClusterTest] 89 | val sessionMock: Session = mock[Session] 90 | val exception: Throwable = new RuntimeException("Test exception") 91 | (clusterMock.connect _: () => Session).expects().returns(sessionMock) 92 | (clusterMock.getMetadata _).expects().throws(exception) 93 | 94 | runFFailed(MetadataSchemaProvider.metadataSchemaProvider[Future].schemaDefinition) shouldBe exception 95 | } 96 | 97 | "clusterProvider" should { 98 | 99 | "return an error if the cluster configuration is not valid" in { 100 | val is: InputStream = new ByteArrayInputStream("cluster = {}".getBytes) 101 | 102 | val clusterProvider = MetadataSchemaProvider.clusterProvider[Future](is) 103 | 104 | runFFailed(clusterProvider) shouldBe a[IllegalArgumentException] 105 | } 106 | 107 | "return the valid configuration" in { 108 | val is: InputStream = 109 | new ByteArrayInputStream(s"cluster = ${validClusterConfiguration.print}".getBytes) 110 | 111 | val cluster = runF(MetadataSchemaProvider.clusterProvider[Future](is)) 112 | 113 | Option(cluster.getClusterName) shouldBe validClusterConfiguration.name 114 | } 115 | 116 | } 117 | 118 | "create a metadataSchemaProvider from a Reader with the configuration" in { 119 | val is: InputStream = new ByteArrayInputStream("cluster = {}".getBytes) 120 | 121 | type SchemaProviderFuture = SchemaDefinitionProvider[Future] 122 | 123 | MetadataSchemaProvider.metadataSchemaProvider[Future](Future.successful(is)) shouldBe a[ 124 | SchemaProviderFuture] 125 | } 126 | } 127 | 128 | } 129 | -------------------------------------------------------------------------------- /core/src/test/scala/schema/provider/TroySchemaProviderSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package schema.provider 19 | 20 | import cats.instances.either._ 21 | import freestyle.cassandra.TestUtils.{EitherM, MatchersUtil, Null} 22 | import java.io.{ByteArrayInputStream, InputStream} 23 | import org.scalacheck.Prop._ 24 | import org.scalatest.WordSpec 25 | import org.scalatest.prop.Checkers 26 | 27 | class TroySchemaProviderSpec extends WordSpec with MatchersUtil with Checkers { 28 | 29 | import freestyle.cassandra.schema.MetadataArbitraries._ 30 | 31 | "schemaDefinition" should { 32 | 33 | "return the keyspace definition for a valid keyspace cql" in { 34 | check { 35 | forAll { keyspace: GeneratedKeyspace => 36 | val is: InputStream = new ByteArrayInputStream(keyspace.cql.getBytes) 37 | val fromString = TroySchemaProvider[EitherM](keyspace.cql).schemaDefinition 38 | val fromInputStream = TroySchemaProvider[EitherM](Right(is)).schemaDefinition 39 | (fromString isEqualTo Right(Seq(keyspace.createKeyspace))) && 40 | (fromInputStream isEqualTo Right(Seq(keyspace.createKeyspace))) 41 | } 42 | } 43 | } 44 | 45 | "return the keyspace definition for a valid table cql" in { 46 | check { 47 | forAll { table: GeneratedTable => 48 | TroySchemaProvider[EitherM](table.cql).schemaDefinition isEqualTo Right( 49 | Seq(table.createTable)) 50 | } 51 | } 52 | } 53 | 54 | "return the keyspace definition for a valid keyspace and table cql" in { 55 | check { 56 | forAll { keyspaceAndTable: GeneratedKeyspaceAndTable => 57 | TroySchemaProvider[EitherM](keyspaceAndTable.cql).schemaDefinition isEqualTo Right( 58 | Seq( 59 | keyspaceAndTable.generatedKeyspace.createKeyspace, 60 | keyspaceAndTable.generatedTable.createTable)) 61 | } 62 | } 63 | } 64 | 65 | "return a left for an invalid cql" in { 66 | TroySchemaProvider[EitherM]("CREATE KEYSPACE WITH replication").schemaDefinition.isLeft shouldBe true 67 | } 68 | 69 | "return a left for an invalid inputstream" in { 70 | TroySchemaProvider[EitherM](Left(new RuntimeException("Test Error"))).schemaDefinition.isLeft shouldBe true 71 | } 72 | 73 | } 74 | 75 | } 76 | -------------------------------------------------------------------------------- /core/src/test/scala/schema/provider/metadata/SchemaConversionsSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package schema.provider.metadata 19 | 20 | import cats.instances.either._ 21 | import com.datastax.driver.core._ 22 | import freestyle.cassandra.TestUtils._ 23 | import org.scalacheck.Prop.forAll 24 | import org.scalamock.scalatest.MockFactory 25 | import org.scalatest.WordSpec 26 | import org.scalatest.prop.Checkers 27 | 28 | import scala.collection.JavaConverters._ 29 | 30 | class SchemaConversionsSpec extends WordSpec with MatchersUtil with MockFactory with Checkers { 31 | 32 | import freestyle.cassandra.schema.MetadataArbitraries._ 33 | 34 | object converter extends SchemaConversions 35 | 36 | "toCreateKeyspace" should { 37 | 38 | "return the right keyspace definition for a valid KeyspaceMetadata object" in { 39 | check { 40 | forAll { keyspace: GeneratedKeyspace => 41 | converter.toCreateKeyspace[EitherM](keyspace.keyspaceMetadata) isEqualTo Right( 42 | keyspace.createKeyspace) 43 | } 44 | } 45 | } 46 | 47 | "return Left if the name is null" in { 48 | val metadata: KeyspaceMetadata = 49 | KeyspaceMetadataTest(Null[String], Map.empty[String, String].asJava) 50 | converter.toCreateKeyspace(metadata).isLeft shouldBe true 51 | } 52 | 53 | } 54 | 55 | "toCreateTable" should { 56 | 57 | "return the right table definition for a valid TableMetadata object" in { 58 | check { 59 | forAll { table: GeneratedTable => 60 | converter.toCreateTable(table.tableMetadata) isEqualTo Right(table.createTable) 61 | } 62 | } 63 | } 64 | 65 | } 66 | 67 | "toCreateIndex" should { 68 | 69 | "return the right index definition for a valid IndexMetadata object" in { 70 | check { 71 | forAll { index: GeneratedIndex => 72 | converter.toCreateIndex(index.indexMetadata, _ => index.createIndex.tableName) isEqualTo Right( 73 | index.createIndex) 74 | } 75 | } 76 | } 77 | 78 | } 79 | 80 | "toUserType" should { 81 | 82 | "return the right user type definition for a valid user type object" in { 83 | check { 84 | forAll { userType: GeneratedUserType => 85 | converter.toUserType(userType.userType) isEqualTo Right(userType.createType) 86 | } 87 | } 88 | } 89 | 90 | "return Left if the field list is null" in { 91 | val userType: UserType = mock[UserTypeTestDefault] 92 | (userType.getFieldNames _).expects().returns(Null[java.util.Collection[String]]) 93 | converter.toUserType(userType).isLeft shouldBe true 94 | } 95 | 96 | } 97 | 98 | } 99 | -------------------------------------------------------------------------------- /core/src/test/scala/schema/validator/SchemaValidatorSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package schema.validator 19 | 20 | import cats.MonadError 21 | import cats.data.{NonEmptyList, Validated, ValidatedNel} 22 | import cats.instances.either._ 23 | import freestyle.cassandra.TestUtils.EitherM 24 | import freestyle.cassandra.schema.provider.SchemaDefinitionProvider 25 | import freestyle.cassandra.schema._ 26 | import org.scalamock.scalatest.MockFactory 27 | import org.scalatest.{Matchers, WordSpec} 28 | import troy.cql.ast.{SelectStatement, TableName} 29 | import troy.cql.ast.dml.Select 30 | 31 | class SchemaValidatorSpec extends WordSpec with Matchers with MockFactory { 32 | 33 | val mockStatement: Statements = DML( 34 | SelectStatement( 35 | mod = None, 36 | selection = Select.SelectClause(Seq.empty), 37 | from = TableName(None, ""), 38 | where = None, 39 | orderBy = None, 40 | perPartitionLimit = None, 41 | limit = None, 42 | allowFiltering = false 43 | )) 44 | 45 | "apply method" should { 46 | 47 | "return Unit if the schema provider and the provided function works as expected" in { 48 | val sv: SchemaValidator[EitherM] = new SchemaValidator[EitherM] { 49 | override def validateStatement(st: Statements)( 50 | implicit E: MonadError[EitherM, Throwable]): Either[ 51 | Throwable, 52 | ValidatedNel[SchemaError, Unit]] = Right(Validated.valid((): Unit)) 53 | } 54 | 55 | sv.validateStatement(mockStatement) shouldBe Right(Validated.valid((): Unit)) 56 | } 57 | 58 | "return an error if the schema provider return an error" in { 59 | 60 | val exc = SchemaDefinitionProviderError("Test error") 61 | val sv: SchemaValidator[EitherM] = new SchemaValidator[EitherM] { 62 | override def validateStatement(st: Statements)( 63 | implicit E: MonadError[EitherM, Throwable]): Either[ 64 | Throwable, 65 | ValidatedNel[SchemaError, Unit]] = Left(exc) 66 | } 67 | 68 | sv.validateStatement(mockStatement) shouldBe Left(exc) 69 | } 70 | 71 | } 72 | 73 | } 74 | -------------------------------------------------------------------------------- /core/src/test/scala/schema/validator/TroySchemaValidatorSpec.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra 18 | package schema.validator 19 | 20 | import cats.MonadError 21 | import cats.instances.either._ 22 | import cats.data.Validated.Valid 23 | import freestyle.cassandra.TestUtils.{EitherM, MatchersUtil} 24 | import freestyle.cassandra.schema.{DDL, DML, SchemaDefinition} 25 | import freestyle.cassandra.schema.provider.SchemaDefinitionProvider 26 | import org.scalacheck.Prop.forAll 27 | import org.scalatest.WordSpec 28 | import org.scalatest.prop.Checkers 29 | 30 | class TroySchemaValidatorSpec extends WordSpec with MatchersUtil with Checkers { 31 | 32 | import freestyle.cassandra.schema.MetadataArbitraries._ 33 | import TroySchemaValidator._ 34 | 35 | "validateStatement" should { 36 | 37 | "work as expected for data manipulation statements" in { 38 | 39 | check { 40 | forAll { st: GeneratedStatement => 41 | implicit val sdp: SchemaDefinitionProvider[EitherM] = 42 | new SchemaDefinitionProvider[EitherM] { 43 | override def schemaDefinition( 44 | implicit E: MonadError[EitherM, Throwable]): EitherM[SchemaDefinition] = 45 | Right(Seq(st.keyspace, st.table)) 46 | } 47 | 48 | (instance[EitherM].validateStatement(DML(st.validStatement._2)) isEqualTo Right( 49 | Valid((): Unit))) && 50 | (instance[EitherM].validateStatement(DML(st.invalidStatement._2)) isLikeTo { either => 51 | either.isRight && either.right.get.isInvalid 52 | }) 53 | } 54 | } 55 | 56 | } 57 | 58 | "work as expected for data definition statements" in { 59 | 60 | check { 61 | forAll { st: GeneratedKeyspaceWithTables => 62 | implicit val sdp: SchemaDefinitionProvider[EitherM] = 63 | new SchemaDefinitionProvider[EitherM] { 64 | override def schemaDefinition( 65 | implicit E: MonadError[EitherM, Throwable]): EitherM[SchemaDefinition] = 66 | Right(Seq(st.keyspace)) 67 | } 68 | 69 | (instance[EitherM].validateStatement(DDL(st.validTables)) isEqualTo Right( 70 | Valid((): Unit))) && 71 | (instance[EitherM].validateStatement(DDL(st.invalidTables)) isLikeTo { either => 72 | either.isRight && either.right.get.isInvalid 73 | }) 74 | } 75 | } 76 | 77 | } 78 | 79 | } 80 | 81 | } 82 | -------------------------------------------------------------------------------- /macros-tests/src/it/scala/interpolator/MetadataInterpolatorTest.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra.macros 18 | package interpolator 19 | 20 | import java.nio.ByteBuffer 21 | import java.util.UUID 22 | 23 | import com.datastax.driver.core.{ProtocolVersion, TypeCodec} 24 | import freestyle.cassandra.query.model.SerializableValueBy 25 | import org.scalatest._ 26 | 27 | import scala.util.{Success, Try} 28 | 29 | class MetadataInterpolatorTest extends WordSpec with Matchers { 30 | 31 | "MetadataInterpolator" should { 32 | 33 | "work as expected for a simple valid query" in { 34 | 35 | import MyMetadataInterpolator._ 36 | cql"SELECT * FROM test.users" shouldBe (("SELECT * FROM test.users", Nil)) 37 | } 38 | 39 | "work as expected for a valid query with params" in { 40 | 41 | import MyMetadataInterpolator._ 42 | implicit val protocolVersion: ProtocolVersion = ProtocolVersion.V4 43 | implicit val stringTypeCodec: TypeCodec[String] = TypeCodec.ascii() 44 | implicit val uuidTypeCodec: TypeCodec[UUID] = TypeCodec.uuid() 45 | import freestyle.cassandra.codecs._ 46 | 47 | val id = UUID.randomUUID() 48 | 49 | val expectedCQL: String = "SELECT id, name FROM test.users WHERE id = ?" 50 | val expectedValue: ByteBuffer = uuidTypeCodec.serialize(id, protocolVersion) 51 | 52 | val (cql: String, values: List[SerializableValueBy[Int]]) = 53 | cql"SELECT id, name FROM test.users WHERE id = $id" 54 | 55 | cql shouldBe expectedCQL 56 | values.size shouldBe 1 57 | values.head.position shouldBe 0 58 | values.head.serializableValue 59 | .serialize[Try](cats.instances.try_.catsStdInstancesForTry) shouldBe Success(expectedValue) 60 | } 61 | 62 | "not compile when passing an invalid schema path" in { 63 | 64 | import MyInvalidMetadataInterpolator._ 65 | """cql"SELECT * FROM unknownTable"""" shouldNot compile 66 | } 67 | 68 | "work as expected for a data definition statement" in { 69 | 70 | import MyMetadataInterpolator._ 71 | cql"CREATE TABLE test.users2 (id uuid, name text, PRIMARY KEY (id))" shouldBe ( 72 | ( 73 | "CREATE TABLE test.users2 (id uuid, name text, PRIMARY KEY (id))", 74 | Nil)) 75 | } 76 | 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /macros-tests/src/main/resources/cluster.conf: -------------------------------------------------------------------------------- 1 | cluster = { 2 | contactPoints = ["127.0.0.1"] 3 | name = "Test Cluster" 4 | port = 9042 5 | } -------------------------------------------------------------------------------- /macros-tests/src/main/resources/schema.sql: -------------------------------------------------------------------------------- 1 | 2 | CREATE KEYSPACE test WITH replication = {'class': 'SimpleStrategy', 'replication_factor' : '3'}; 3 | 4 | CREATE TABLE test.users ( 5 | id uuid, 6 | name text, 7 | PRIMARY KEY (id) 8 | ); -------------------------------------------------------------------------------- /macros-tests/src/main/scala/interpolator/MyMetadataInterpolator.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra.macros 18 | package interpolator 19 | 20 | // $COVERAGE-OFF$Test classes 21 | import freestyle.cassandra.query.interpolator.MacroInterpolator.SchemaMetadataInterpolator 22 | 23 | @SchemaMetadataInterpolator("/cluster.conf") 24 | trait MyMetadataInterpolator 25 | 26 | @SchemaMetadataInterpolator("/invalidPath.conf") 27 | trait MyInvalidMetadataInterpolator 28 | // $COVERAGE-ON$ -------------------------------------------------------------------------------- /macros-tests/src/main/scala/interpolator/MySchemaInterpolator.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra.macros 18 | package interpolator 19 | 20 | // $COVERAGE-OFF$Test classes 21 | import freestyle.cassandra.query.interpolator.MacroInterpolator.SchemaFileInterpolator 22 | 23 | @SchemaFileInterpolator("/schema.sql") 24 | trait MySchemaInterpolator 25 | 26 | @SchemaFileInterpolator("/invalidPath.sql") 27 | trait MyInvalidSchemaInterpolator 28 | // $COVERAGE-ON$ -------------------------------------------------------------------------------- /macros-tests/src/test/scala/interpolator/MacroInterpolatorTest.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017-2018 47 Degrees, LLC. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package freestyle.cassandra.macros 18 | package interpolator 19 | 20 | import java.nio.ByteBuffer 21 | import java.util.UUID 22 | 23 | import com.datastax.driver.core.{ProtocolVersion, TypeCodec} 24 | import freestyle.cassandra.query.model.SerializableValueBy 25 | import org.scalatest.{Matchers, WordSpec} 26 | 27 | import scala.util.{Success, Try} 28 | 29 | class MacroInterpolatorTest extends WordSpec with Matchers { 30 | 31 | "SchemaFileInterpolator" should { 32 | 33 | "work as expected for a simple valid query" in { 34 | 35 | import MySchemaInterpolator._ 36 | cql"SELECT * FROM test.users" shouldBe (("SELECT * FROM test.users", Nil)) 37 | } 38 | 39 | "work as expected for a valid query with params" in { 40 | 41 | import MySchemaInterpolator._ 42 | implicit val protocolVersion: ProtocolVersion = ProtocolVersion.V4 43 | implicit val stringTypeCodec: TypeCodec[String] = TypeCodec.ascii() 44 | implicit val uuidTypeCodec: TypeCodec[UUID] = TypeCodec.uuid() 45 | import freestyle.cassandra.codecs._ 46 | 47 | val id = UUID.randomUUID() 48 | 49 | val expectedCQL: String = "SELECT id, name FROM test.users WHERE id = ?" 50 | val expectedValue: ByteBuffer = uuidTypeCodec.serialize(id, protocolVersion) 51 | 52 | val (cql: String, values: List[SerializableValueBy[Int]]) = 53 | cql"SELECT id, name FROM test.users WHERE id = $id" 54 | 55 | cql shouldBe expectedCQL 56 | values.size shouldBe 1 57 | values.head.position shouldBe 0 58 | values.head.serializableValue 59 | .serialize[Try](cats.instances.try_.catsStdInstancesForTry) shouldBe Success(expectedValue) 60 | } 61 | 62 | "not compile for an invalid query" in { 63 | 64 | import MySchemaInterpolator._ 65 | """cql"SELECT * FROM unknownTable"""" shouldNot compile 66 | } 67 | 68 | "not compile when passing an invalid schema path" in { 69 | 70 | import MyInvalidSchemaInterpolator._ 71 | """cql"SELECT * FROM unknownTable"""" shouldNot compile 72 | } 73 | 74 | "work as expected for a data definition statement" in { 75 | 76 | import MySchemaInterpolator._ 77 | cql"CREATE TABLE test.users2 (id uuid, name text, PRIMARY KEY (id))" shouldBe ( 78 | ( 79 | "CREATE TABLE test.users2 (id uuid, name text, PRIMARY KEY (id))", 80 | Nil)) 81 | } 82 | 83 | "not compile for a data definition statement using a unknown keyspace" in { 84 | 85 | import MySchemaInterpolator._ 86 | """cql"CREATE table unknownKeyspace.users2 (id uuid, name text, PRIMARY KEY (id))"""" shouldNot compile 87 | } 88 | 89 | "not compile for an invalid data definition statement" in { 90 | 91 | import MySchemaInterpolator._ 92 | """cql"CREATEtable test.users2 (id uuid, PRIMARY KEY (id))"""" shouldNot compile 93 | } 94 | 95 | "not compile when passing an empty string to the macro interpolator" in { 96 | """ 97 | |import freestyle.cassandra.query.interpolator.MacroInterpolator.SchemaFileInterpolator 98 | |@SchemaFileInterpolator("") 99 | |trait MyInterpolator 100 | """.stripMargin shouldNot compile 101 | } 102 | 103 | "not compile when trying to apply the macro interpolator to a class" in { 104 | """ 105 | |import freestyle.cassandra.query.interpolator.MacroInterpolator.SchemaFileInterpolator 106 | |@SchemaFileInterpolator("/schema.sql") 107 | |class MyInterpolator 108 | """.stripMargin shouldNot compile 109 | } 110 | 111 | } 112 | 113 | "SchemaMetadataInterpolator" should { 114 | 115 | "not compile when passing an empty string to the macro interpolator" in { 116 | """ 117 | |import freestyle.cassandra.query.interpolator.MacroInterpolator.SchemaMetadataInterpolator 118 | |@SchemaMetadataInterpolator("") 119 | |trait MyInterpolator 120 | """.stripMargin shouldNot compile 121 | } 122 | 123 | "not compile when trying to apply the macro interpolator to a class" in { 124 | """ 125 | |import freestyle.cassandra.query.interpolator.MacroInterpolator.SchemaMetadataInterpolator 126 | |@SchemaMetadataInterpolator("/cluster.conf") 127 | |class MyInterpolator 128 | """.stripMargin shouldNot compile 129 | } 130 | 131 | } 132 | 133 | } 134 | -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=1.0.1 2 | -------------------------------------------------------------------------------- /project/plugins.sbt: -------------------------------------------------------------------------------- 1 | resolvers += Resolver.sonatypeRepo("releases") 2 | addSbtPlugin("io.frees" % "sbt-freestyle" % "0.3.14") 3 | addSbtPlugin("com.47deg" % "sbt-embedded-cassandra" % "0.0.6") 4 | -------------------------------------------------------------------------------- /pubring.gpg: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP PUBLIC KEY BLOCK----- 2 | Version: BCPG v1.51 3 | 4 | mQENBFgaZ4EBCADA/g+xxG8rADqhyXfZ7Uxx4VbOR0IPKSUSOlQyVX6aywUIHHaj 5 | C+DqJ6kR/vBpLbrfi5ZSAtXyp8LYn6e1HQaGlVhYpRLflvicOASfHW2+idVLT3X1 6 | uI26mr3VqUUb4BAMB5d0UCuVCmcgh27jf5gGxTFC0SjFPtjtiNTsZYgz5/XCOvmP 7 | PbIowTcloxtuiq1u/7FIL8p502AxHAse6Dmnc+f0HhGesBsCW6wSppNkL7aw1kQF 8 | LfrGuXGRtT2yYbLvyIpPQFsGA7ZVU8cXS4MJOF+hNpIHejzQVonm6r8P3CTuuBHc 9 | FvjL/t5yfkxFhrf84C0QmHNANI1z2kgObc/RABEBAAG0JTQ3ZGVnL2ZyZWVzdHls 10 | ZSA8ZGV2ZWxvcGVyQDQ3ZGVnLmNvbT6JARwEEwECAAYFAlgaZ4EACgkQaSNddcix 11 | bxbeTwf9HbmCgraCyMJvhV3YxYrPAnYtTVRZQRRMAG4RsFa95l8DsrolfHJOObvL 12 | z9zwb3WiRi2U+SXSpKUJQkZSJRFGhMxJvCzBD+YBt1I0XIm5a49CXqfi/gqbVJD7 13 | 3aqZVv8WSuEuSXT1eIhV4z/HefvmMU9VB2StTfCoGGzsQ+BgP9v7Q+GRwsUCAntO 14 | YnFE6r0EZrT+wvrApsgOdBLiGMvTm/FkOb6QHesDGo18YjgZzK1zs/r7dkY4TG+Y 15 | vE0mX3uZyxT9KmDtHLrzZ0jtOPWvnLklR/BujjzDVEWsyK/HBV+Y4Ef/7gaDpigm 16 | svPdhWroMXwc+Kyzn4gGIXk/9HYl7A== 17 | =U4e2 18 | -----END PGP PUBLIC KEY BLOCK----- 19 | -------------------------------------------------------------------------------- /secring.gpg.enc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/frees-io/freestyle-cassandra/b1d0faef3c02ecb3b1037d74b1d47b5db32a87d5/secring.gpg.enc -------------------------------------------------------------------------------- /version.sbt: -------------------------------------------------------------------------------- 1 | version in ThisBuild := "0.1.2-SNAPSHOT" --------------------------------------------------------------------------------