├── .github
└── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
├── .gitignore
├── .scalariform.conf
├── .travis.yml
├── LICENSE
├── README.md
├── build.sbt
├── docs
├── cass21
│ └── src
│ │ └── main
│ │ └── tut
│ │ └── cass21
│ │ ├── caching-implicits.md
│ │ ├── custom-encoders-decoders.md
│ │ ├── date-codecs.md
│ │ ├── row-extraction.md
│ │ ├── scalasession
│ │ ├── batch.md
│ │ ├── consistency-level.md
│ │ ├── create-table.md
│ │ ├── delete.md
│ │ ├── getting-started.md
│ │ ├── insert.md
│ │ ├── raw.md
│ │ ├── select.md
│ │ └── update.md
│ │ └── type-mappings.md
├── cass3
│ └── src
│ │ └── main
│ │ └── tut
│ │ └── cass3
│ │ ├── caching-implicits.md
│ │ ├── custom-encoders-decoders.md
│ │ ├── date-codecs.md
│ │ ├── row-extraction.md
│ │ ├── scalasession
│ │ ├── batch.md
│ │ ├── consistency-level.md
│ │ ├── create-table.md
│ │ ├── delete.md
│ │ ├── getting-started.md
│ │ ├── insert.md
│ │ ├── raw.md
│ │ ├── select.md
│ │ └── update.md
│ │ └── type-mappings.md
└── root
│ ├── css
│ └── override.css
│ ├── data
│ └── menu.yaml
│ └── src
│ └── main
│ └── tut
│ ├── cassandra-21.md
│ ├── cassandra-30.md
│ └── index.md
├── project
├── Boilerplate.scala
├── build.properties
└── plugins.sbt
├── scripts
├── makeMicrosite.sh
└── sbt-test.sh
└── src
├── main
├── scala
│ └── com
│ │ └── weather
│ │ └── scalacass
│ │ ├── CCCassFormatDecoder.scala
│ │ ├── CCCassFormatEncoder.scala
│ │ ├── CassFormatDecoder.scala
│ │ ├── CassFormatEncoder.scala
│ │ ├── Exceptions.scala
│ │ ├── LRUCache.scala
│ │ ├── Nullable.scala
│ │ ├── Recoverable.scala
│ │ ├── ScalaSession.scala
│ │ ├── TupleCassFormatDecoder.scala
│ │ ├── TupleCassFormatEncoder.scala
│ │ ├── package.scala
│ │ ├── scsession
│ │ ├── QueryBuildingBlock.scala
│ │ └── SCStatement.scala
│ │ └── syntax.scala
├── scala_cass21
│ └── com
│ │ └── weather
│ │ └── scalacass
│ │ ├── CassFormatDecoderVersionSpecific.scala
│ │ ├── CassFormatEncoderVersionSpecific.scala
│ │ ├── NotRecoverableVersionSpecific.scala
│ │ ├── jdk8
│ │ └── Implicits.scala
│ │ ├── joda
│ │ └── Implicits.scala
│ │ └── scsession
│ │ └── SCStatementVersionSpecific.scala
└── scala_cass3
│ └── com
│ └── weather
│ └── scalacass
│ ├── CassFormatDecoderVersionSpecific.scala
│ ├── CassFormatEncoderVersionSpecific.scala
│ ├── NotRecoverableVersionSpecific.scala
│ ├── Time.scala
│ ├── jdk8
│ ├── Implicits.scala
│ └── package.scala
│ ├── joda
│ ├── Implicits.scala
│ └── package.scala
│ └── scsession
│ └── SCStatementVersionSpecific.scala
└── test
├── resources
├── cu-cassandra-rndport-with-fix-22.yaml
├── cu-cassandra-rndport-with-fix-3.yaml
└── logback.xml
├── scala
└── com
│ └── weather
│ └── scalacass
│ ├── CaseClassUnitTests.scala
│ ├── ConsistencyLevelUnitTest.scala
│ ├── CreateTableUnitTests.scala
│ ├── ForProductTest.scala
│ ├── PerfTest.scala
│ ├── ScalaCassUnitTests.scala
│ ├── UpdateBehaviorTests.scala
│ ├── scsession
│ ├── ActionUnitTests.scala
│ ├── DeleteUnitTests.scala
│ ├── InsertUnitTests.scala
│ ├── SelectUnitTests.scala
│ └── UpdateUnitTests.scala
│ └── util
│ ├── CassandraClient.scala
│ ├── CassandraTester.scala
│ ├── CassandraUnitTester.scala
│ └── CassandraWithTableTester.scala
├── scala_cass21
└── com
│ └── weather
│ └── scalacass
│ ├── ScalaCassUnitTestsVersionSpecific.scala
│ └── util
│ ├── CassandraClientVersionSpecific.scala
│ └── CassandraUnitInfo.scala
└── scala_cass3
└── com
└── weather
└── scalacass
├── ScalaCassUnitTestsVersionSpecific.scala
└── util
├── CassandraClientVersionSpecific.scala
└── CassandraUnitInfo.scala
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Description and Reproducible steps for a bug
4 |
5 | ---
6 |
7 | **Describe the bug**
8 | A clear and concise description of what the bug is.
9 |
10 | **To Reproduce**
11 | Steps to reproduce the behavior:
12 | 1. Go to '...'
13 | 2. Run '...' code
14 | 3. See error
15 |
16 | **Expected behavior**
17 | A clear and concise description of what you expected to happen.
18 |
19 | **Code Snippets**
20 | If applicable, add a code snippet to help explain your problem.
21 |
22 | **Additional context**
23 | Add any other context about the problem here.
24 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 |
5 | ---
6 |
7 | **Is your feature request related to a problem? Please describe.**
8 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
9 |
10 | **Describe the solution you'd like**
11 | A clear and concise description of what you want to happen.
12 |
13 | **Describe alternatives you've considered**
14 | A clear and concise description of any alternative solutions or features you've considered.
15 |
16 | **Additional context**
17 | Add any other context or screenshots about the feature request here.
18 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | target
2 | .idea
3 | .ensime
4 | .ensime_cache
5 | cassandra.logdir*
6 | docs/src/main/tut/cass3
7 | docs/src/main/tut/cass21
8 | cassandra-docs
9 | _site
10 | lib
11 | docs/root/src/main/tut/cass3/*
12 | docs/root/src/main/tut/cass21/*
13 | .java-version
14 |
--------------------------------------------------------------------------------
/.scalariform.conf:
--------------------------------------------------------------------------------
1 | autoformat=true
2 | withBaseDirectory=true
3 | #alignArguments=false
4 | #alignParameters=false
5 | alignSingleLineCaseStatements=true
6 | #alignSingleLineCaseStatements.maxArrowIndent=40
7 | allowParamGroupsOnNewlines=true
8 | #compactControlReadability=false
9 | #compactStringConcatenation=false
10 | danglingCloseParenthesis=Force
11 | #doubleIndentClassDeclaration=false
12 | doubleIndentConstructorArguments=true
13 | doubleIndentMethodDeclaration=true
14 | #firstArgumentOnNewline=Force
15 | #firstParameterOnNewline=Force
16 | #formatXml=true
17 | #indentLocalDefs=false
18 | #indentPackageBlocks=true
19 | #indentSpaces=2
20 | #indentWithTabs=false
21 | multilineScaladocCommentsStartOnFirstLine=true
22 | newlineAtEndOfFile=true
23 | placeScaladocAsterisksBeneathSecondAsterisk=true
24 | #preserveSpaceBeforeArguments=false
25 | #rewriteArrowSymbols=false
26 | singleCasePatternOnNewline=false
27 | #spaceBeforeColon=false
28 | spaceBeforeContextColon=true
29 | #spaceInsideBrackets=false
30 | #spaceInsideParentheses=false
31 | #spacesAroundMultiImports=true
32 | #spacesWithinPatternBinders=true
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | # use containers
2 | sudo: false
3 |
4 | language: scala
5 | scala:
6 | - 2.12.10
7 | - 2.11.12
8 | - 2.10.7
9 |
10 | jdk:
11 | - openjdk8
12 |
13 | env:
14 | - CASSANDRA_DRIVER_VERSION=3.5.0
15 | - CASSANDRA_DRIVER_VERSION=2.1.10.3
16 |
17 | sbt_args: -Dcassandra-driver.version=$CASSANDRA_DRIVER_VERSION
18 |
19 | before_cache:
20 | # Tricks to avoid unnecessary cache updates
21 | - find $HOME/.ivy2 -name "ivydata-*.properties" -delete
22 | - find $HOME/.sbt -name "*.lock" -delete
23 |
24 | cache:
25 | directories:
26 | - $HOME/.ivy2/cache
27 | - $HOME/.sbt/boot/
28 |
29 | before_script:
30 | - mkdir lib
31 | - wget https://github.com/Ichoran/thyme/raw/master/Thyme.jar -O lib/Thyme.jar
32 |
33 | notifications:
34 | webhooks:
35 | urls:
36 | - https://webhooks.gitter.im/e/b37d78d8e7cb1d34adb3
37 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2015 thurstonsand
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
23 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # scala-cass
2 |
3 | [](https://travis-ci.org/thurstonsand/scala-cass)
4 | [](https://gitter.im/scala-cass/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
5 |
6 | [See full documentation here.](http://thurstonsand.github.io/scala-cass/)
--------------------------------------------------------------------------------
/build.sbt:
--------------------------------------------------------------------------------
1 | import microsites._
2 |
3 | val cassandra3Version = "3.5.0"
4 | val cassandra2Version = "2.1.10.3"
5 | val cassandraVersion = sys.props.getOrElse("cassandra-driver.version", cassandra3Version) match {
6 | case v @ (`cassandra3Version` | `cassandra2Version`) => v
7 | case _ => throw new IllegalArgumentException(s"cassandra version must be one of $cassandra3Version, $cassandra2Version")
8 | }
9 |
10 | val baseVersion = "3.2.1"
11 |
12 | lazy val codeLinterSettings = {
13 | Seq(
14 | wartremoverWarnings in (Compile, compile) ++= Seq(
15 | Wart.AsInstanceOf, Wart.DefaultArguments, Wart.EitherProjectionPartial, Wart.Enumeration,
16 | Wart.Equals, Wart.ExplicitImplicitTypes, Wart.FinalCaseClass, Wart.FinalVal,
17 | Wart.IsInstanceOf, Wart.JavaConversions, Wart.JavaSerializable, Wart.LeakingSealed,
18 | Wart.Null, Wart.OptionPartial, Wart.Product, Wart.Recursion, Wart.Return,
19 | Wart.Serializable, Wart.TryPartial, Wart.Var, Wart.While),
20 | wartremoverWarnings in (Compile, console) := Seq.empty
21 | )
22 | }
23 |
24 | def addUnmanagedSourceDirsFrom(folder: String) = {
25 | def addSourceFilesTo(conf: Configuration) =
26 | unmanagedSourceDirectories in conf := {
27 | val sds = (unmanagedSourceDirectories in conf).value
28 | val sd = (sourceDirectory in conf).value
29 | sds :+ new java.io.File(sd, folder)
30 | }
31 |
32 | Seq(addSourceFilesTo(Compile), addSourceFilesTo(Test))
33 | }
34 |
35 | lazy val commonSettings = Seq(
36 | // upgrading to 2.13 is a real pain because stuff is getting deprecated, which is causing errors
37 | // dealing with it later
38 | scalaVersion := "2.12.10",
39 | crossScalaVersions := Seq("2.12.10", "2.11.12", "2.10.7"),
40 | scalacOptions ++= Seq(
41 | "-deprecation",
42 | "-encoding", "UTF-8",
43 | "-feature",
44 | "-language:existentials",
45 | "-language:higherKinds",
46 | "-language:implicitConversions",
47 | "-unchecked",
48 | "-Xfatal-warnings",
49 | "-Ywarn-numeric-widen",
50 | "-Ywarn-value-discard"
51 | ) ++ (CrossVersion.partialVersion(scalaVersion.value) match {
52 | case Some((2, 13)) => Seq("-explaintypes", "-language:experimental.macros", "-Xlint:adapted-args,constant,doc-detached,nullary-unit,inaccessible,nullary-override,infer-any,missing-interpolator,doc-detached,private-shadow,type-parameter-shadow,poly-implicit-overload,option-implicit,delayedinit-select,package-object-classes,stars-align", "-Ywarn-unused:patvars,privates,locals", "-Ymacro-annotations", "-Ywarn-extra-implicit", "-Ycache-plugin-class-loader:last-modified", "-Ycache-macro-class-loader:last-modified")
53 | case Some((2, 12)) => Seq("-Yno-adapted-args", "-Xlint:adapted-args,nullary-unit,inaccessible,nullary-override,infer-any,missing-interpolator,doc-detached,private-shadow,type-parameter-shadow,poly-implicit-overload,option-implicit,delayedinit-select,by-name-right-associative,package-object-classes,unsound-match,stars-align", "-Ywarn-unused:privates,locals", "-Xfuture")
54 | case Some((2, 11)) => Seq("-Yno-adapted-args", "-Xlint:adapted-args,nullary-unit,inaccessible,nullary-override,infer-any,missing-interpolator,doc-detached,private-shadow,type-parameter-shadow,poly-implicit-overload,option-implicit,delayedinit-select,by-name-right-associative,package-object-classes,unsound-match,stars-align", "-Ywarn-unused", "-Ywarn-unused-import", "-Xfuture")
55 | case Some((2, 10)) => Seq("-Yno-adapted-args", "-Xlint", "-Xfuture")
56 | case _ => throw new IllegalArgumentException(s"scala version not configured: ${scalaVersion.value}")
57 | }),
58 | (scalacOptions in Test) -= "-Xfatal-warnings",
59 | parallelExecution in Test := false,
60 | ) ++ codeLinterSettings
61 |
62 | lazy val macroSettings = Seq(
63 | libraryDependencies ++= Seq(
64 | "org.scalameta" %% "scalameta" % "3.7.4" % "provided",
65 | "org.scala-lang" % "scala-reflect" % scalaVersion.value,
66 | "org.scala-lang" % "scala-compiler" % scalaVersion.value % "provided",
67 | "com.datastax.cassandra" % "cassandra-driver-core" % cassandraVersion classifier "shaded"
68 | ) ++ (CrossVersion.partialVersion(scalaVersion.value) match {
69 | case Some((2, 10)) => Seq("org.scalamacros" %% "quasiquotes" % "2.1.1" cross CrossVersion.binary)
70 | case _ => Seq.empty
71 | })
72 | )
73 |
74 | lazy val applicationSettings = Seq(
75 | name := "ScalaCass",
76 | organization := "com.github.thurstonsand",
77 | description := "a wrapper for the Java Cassandra driver that uses case classes to simplify and codify creating cached statements in a type-safe manner",
78 | version := s"$baseVersion-$cassandraVersion",
79 | libraryDependencies ++= Seq(
80 | "com.google.code.findbugs" % "jsr305" % "3.0.1" % "provided", // Intellij does not like "compile-internal, test-internal", use "provided" instead
81 | "org.joda" % "joda-convert" % "1.8.1" % "provided", // Intellij does not like "compile-internal, test-internal", use "provided" instead
82 | "org.slf4j" % "slf4j-api" % "1.7.25" % "provided", // Intellij does not like "compile-internal, test-internal", use "provided" instead
83 | "joda-time" % "joda-time" % "2.9.4",
84 | "com.chuusai" %% "shapeless" % "2.3.3",
85 | "com.google.guava" % "guava" % "19.0",
86 | "com.datastax.cassandra" % "cassandra-driver-core" % cassandraVersion classifier "shaded" excludeAll ExclusionRule("com.google.guava", "guava"),
87 | "org.scalatest" %% "scalatest" % "3.0.8" % "test",
88 | ) ++ (if (cassandraVersion startsWith "2.1.") Seq(
89 | "org.cassandraunit" % "cassandra-unit" % "2.2.2.1" % "test"
90 | ) else Seq(
91 | "com.datastax.cassandra" % "cassandra-driver-extras" % cassandraVersion excludeAll (ExclusionRule("com.datastax.cassandra", "cassandra-driver-core"), ExclusionRule("com.google.guava", "guava")),
92 | "org.cassandraunit" % "cassandra-unit" % "3.3.0.2" % "test"
93 | )) ++ (CrossVersion.partialVersion(scalaVersion.value) match {
94 | case Some((2, 13)) => Seq.empty
95 | case _ => Seq(compilerPlugin("org.scalamacros" % "paradise" % "2.1.1" cross CrossVersion.full))
96 | }),
97 | initialize := {
98 | CrossVersion.partialVersion(scalaVersion.value) match {
99 | case Some((2, 10)) => sys.props("scalac.patmat.analysisBudget") = "off"
100 | case _ => sys.props remove "scalac.patmat.analysisBudget"
101 | }
102 | }
103 | )
104 |
105 | lazy val noPublishSettings = Seq(
106 | publish := ((): Unit),
107 | publishLocal := ((): Unit),
108 | publishArtifact := false
109 | )
110 |
111 | lazy val publishSettings = Seq(
112 | homepage := Some(url("https://github.com/thurstonsand")),
113 | licenses := Seq("MIT" -> url("http://www.opensource.org/licenses/mit-license.php")),
114 | pomExtra :=
115 |
116 | git@github.com/thurstonsand/scala-cass.git
117 | scm:git:git@github.com/thurstonsand/scala-cass.git
118 |
119 |
120 |
121 | thurstonsand
122 | Thurston Sandberg
123 | https://github.com/thurstonsand
124 |
125 | ,
126 | publishMavenStyle := true,
127 | pomIncludeRepository := (_ => false),
128 | bintrayReleaseOnPublish in ThisBuild := false,
129 | bintrayPackageLabels := Seq("cassandra")
130 | )
131 |
132 | lazy val micrositeSettings = Seq(
133 | micrositeName := "scala-cass",
134 | micrositeAuthor := "Thurston Sandberg",
135 | micrositeDescription := "Java Cassandra Driver Bindings for Friendlier Scala",
136 | micrositeGithubOwner := "thurstonsand",
137 | micrositeGithubRepo := "scala-cass",
138 | micrositeBaseUrl := sys.props.getOrElse("microsite.baseurl", "scala-cass"),
139 | micrositeImgDirectory := baseDirectory.value / "imgs",
140 | micrositeCssDirectory := baseDirectory.value / "css",
141 | micrositeDataDirectory := baseDirectory.value / "data",
142 | micrositeExternalIncludesDirectory := baseDirectory.value / "includes",
143 | micrositeGitterChannelUrl := "scala-cass/Lobby",
144 | micrositeShareOnSocial := false,
145 | micrositeHighlightTheme := "docco",
146 | micrositeConfigYaml := ConfigYml(
147 | yamlCustomProperties = Map(
148 | "baseVersion" -> baseVersion,
149 | "cassandra2Version" -> cassandra2Version,
150 | "cassandra3Version" -> cassandra3Version
151 | )
152 | ),
153 | includeFilter in makeSite := "*.html" | "*.css" | "*.png" | "*.jpg" | "*.gif" | "*.js" | "*.swf" | "*.yml" | "*.md",
154 | ghpagesNoJekyll := false,
155 | fork in tut := true,
156 | git.remoteRepo := "git@github.com:thurstonsand/scala-cass.git"
157 | )
158 |
159 | // in case I need macros in the future
160 | //lazy val `scala-cass-macros` = project.in(file("macro"))
161 | // .settings(moduleName := "scala-cass-macros")
162 | // .settings(commonSettings: _*)
163 | // .settings(macroSettings: _*)
164 |
165 | lazy val `scala-cass` = project.in(file("."))
166 | .settings(moduleName := "scala-cass",
167 | sourceGenerators in Compile += (sourceManaged in Compile).map(Boilerplate.gen).taskValue)
168 | .settings(commonSettings: _*)
169 | .settings(applicationSettings: _*)
170 | .settings(publishSettings: _*)
171 | .settings(addUnmanagedSourceDirsFrom(if (cassandraVersion startsWith "2.1.") "scala_cass21" else "scala_cass3"))
172 | // .dependsOn(`scala-cass-macros`)
173 |
174 | lazy val `tut-cass3` = project.in(file("docs/cass3"))
175 | .enablePlugins(MicrositesPlugin)
176 | .settings(commonSettings: _*)
177 | .settings(applicationSettings: _*)
178 | .settings(micrositeSettings: _*)
179 | .settings(noPublishSettings: _*)
180 | .settings(addUnmanagedSourceDirsFrom("scala_cass3"): _*)
181 | .dependsOn(`scala-cass`)
182 |
183 | lazy val `tut-cass21`= project.in(file("docs/cass21"))
184 | .enablePlugins(MicrositesPlugin)
185 | .settings(commonSettings: _*)
186 | .settings(applicationSettings: _*)
187 | .settings(micrositeSettings: _*)
188 | .settings(noPublishSettings: _*)
189 | .settings(addUnmanagedSourceDirsFrom("scala_cass21"): _*)
190 | .dependsOn(`scala-cass`)
191 |
192 | lazy val docs = project.in(file("docs/root"))
193 | .enablePlugins(MicrositesPlugin)
194 | .settings(micrositeSettings: _*)
195 | .settings(noPublishSettings: _*)
196 |
--------------------------------------------------------------------------------
/docs/cass21/src/main/tut/cass21/caching-implicits.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Caching Implicits"
4 | section: "c21"
5 | ---
6 | ```tut:invisible
7 | import com.datastax.driver.core.{Cluster, Session}
8 | import com.weather.scalacass.{ScalaSession, CCCassFormatEncoder, CCCassFormatDecoder}
9 | import com.weather.scalacass.syntax._
10 | import com.datastax.driver.core.Row
11 |
12 | implicit val cluster = Cluster.builder.addContactPoint("localhost").build()
13 | implicit val session: Session = cluster.connect()
14 |
15 | val sSession: ScalaSession = ScalaSession("mykeyspace")
16 | sSession.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 1}").execute()
17 |
18 | case class MyTable(s: String, i: Int, l: Long)
19 | val createStatement = sSession.createTable[MyTable]("mytable", 1, 0)
20 | createStatement.execute()
21 |
22 | val insertStatement = sSession.insert("mytable", MyTable("a str", 1234, 5678L))
23 | insertStatement.execute()
24 | ```
25 | # Caching Implicits
26 |
27 | When deriving the encoders and decoders to your case classes, the library is built to be able to automatically resolve
28 | the implicit `CCCassFormatEncoder`/`CCCassFormatDecoder`. This will work fine, but if you are using these
29 | encoders/decoders often, it may be worth it to cache them so that they do not have to be built at every call site. The
30 | best and easiest way to do this is to derive these implicits in the companion object to your case classes, as follows:
31 |
32 | (quick note: the `ImplicitCaching` object is a workaround for the compilation of these docs. It is unnecessary to wrap
33 | your case class/companion object definition in code)
34 |
35 | ```tut
36 | object ImplicitCaching {
37 | case class Query(s: String)
38 | object Query {
39 | implicit val encoder: CCCassFormatEncoder[Query] = CCCassFormatEncoder.derive
40 | implicit val decoder: CCCassFormatDecoder[Query] = CCCassFormatDecoder.derive
41 | }
42 | }
43 | sSession.selectStar("mytable", ImplicitCaching.Query("a str")).execute()
44 | ```
45 |
46 | the `derive` function will implicitly create the encoder/decoder in the companion object, and now at every call site,
47 | this implicit is used instead of one that would be built by the library.
48 | ```tut:invisible
49 | sSession.dropKeyspace.execute()
50 | sSession.close()
51 | cluster.close()
52 | ```
--------------------------------------------------------------------------------
/docs/cass21/src/main/tut/cass21/custom-encoders-decoders.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Custom Encoders/Decoders"
4 | section: "c21"
5 | ---
6 | ```tut:invisible
7 | import com.datastax.driver.core.{Cluster, Session}
8 | import com.weather.scalacass.{Result, ScalaSession, CCCassFormatEncoder, CCCassFormatDecoder}
9 | import com.weather.scalacass.syntax._
10 | import com.datastax.driver.core.Row
11 | import com.weather.scalacass.scsession.SCStatement.RightBiasedEither
12 |
13 | implicit val cluster = Cluster.builder.addContactPoint("localhost").build()
14 | implicit val session: Session = cluster.connect()
15 |
16 | val sSession: ScalaSession = ScalaSession("mykeyspace")
17 | sSession.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 1}").execute()
18 |
19 | case class MyTable(s: String, i: Int, l: Long)
20 | val createStatement = sSession.createTable[MyTable]("mytable", 1, 0)
21 | createStatement.execute()
22 |
23 | val createSpecialTable = sSession.rawStatement("CREATE TABLE specialtable (s varchar PRIMARY KEY, i int, special_long bigint")
24 | createSpecialTable.execute()
25 |
26 | val insertStatement = sSession.insert("mytable", MyTable("a_unique_id", 1234, 5678L))
27 | insertStatement.execute()
28 | ```
29 | # Custom Encoders/Decoders
30 |
31 | In case you need to apply a transformation during the extraction process, don't have a 1-to-1 mapping of case class
32 | names to cassandra table names, or are trying to use a type not included in the ScalaCass library, you can just define a
33 | custom encoder and decoder for any type. We will define a `UniqueId` class as an example for how you might customize
34 | it. Let's say this class will only accept ids less than 15 characters long.
35 |
36 | ```tut:silent
37 | abstract case class UniqueId(id: String)
38 | object UniqueId {
39 | def apply(untestedId: String): UniqueId =
40 | if (untestedId.length > 15) throw new IllegalArgumentException("id must be less than 15 characters long")
41 | else new UniqueId(untestedId) {}
42 | }
43 | ```
44 |
45 | You can provide a custom type in 2 ways:
46 |
47 | ## Using `forProduct$arity` for case classes
48 |
49 | When parsing the entire row into a case class, sometimes it may not be possible to encapsulate specialized logic
50 | using the basic encoders. In these cases, `forProduct$arity` can be used as a way to have complete control over
51 | how values are extracted out of/inserted into a row. They can also be used when names in a Cassandra row do not match
52 | the names in the case class. Since it only applies on operations to an entire row, the functions are available
53 | on `CCCassFormatEncoder` and `CCCassFormatDecoder`. The functions take a format of
54 | `forProduct1`/`forProduct2`/...`forProduct22`, and you choose the one that matches the number of arguments you wish
55 | to extract/insert into a row.
56 |
57 | ```tut
58 | object Wrapper {
59 | case class SpecialInsert(s: String, i: Int, specialLong: Long)
60 | object SpecialInsert {
61 | implicit val ccDecoder: CCCassFormatDecoder[SpecialInsert] =
62 | CCCassFormatDecoder.forProduct3("s", "i", "special_long")((s: String, i: Int, specialLong: Long) => SpecialInsert(s, i, specialLong))
63 | implicit val ccEncoder: CCCassFormatEncoder[SpecialInsert] =
64 | CCCassFormatEncoder.forProduct3("s", "i", "special_long")((sc: SpecialInsert) => (sc.s, sc.i, sc.specialLong))
65 | }
66 | }
67 | import Wrapper._ // Wrapper is necessary for this interpreter, and should be excluded in your code
68 | ```
69 |
70 | And now the SpecialInsert is ready to be used:
71 |
72 | ```tut
73 | val specialInsertStatement = sSession.insert("specialtable", SpecialInsert("asdf", 1234, 5678L))
74 | specialInsertStatement.execute()
75 | ```
76 |
77 | Renaming is not the only usage of `forProduct$arity`, nor is it strictly required to create one for a case class.
78 |
79 | ## Map over an existing type
80 |
81 | This is the easier way to create a custom type, since it is based on an existing decoder/encoder. You first retrieve an
82 | existing encoder/decoder via the `CassFormatEncoder`/`CassFormatDecoder`'s `apply` method.
83 |
84 | ```tut
85 | import com.weather.scalacass.{CassFormatDecoder, CassFormatEncoder}
86 | implicit val uniqueIdDecoder: CassFormatDecoder[UniqueId] = CassFormatDecoder[String].map[UniqueId](UniqueId.apply)
87 | implicit val uniqueIdEncoder: CassFormatEncoder[UniqueId] = CassFormatEncoder[String].map[UniqueId](uniqueId => uniqueId.id)
88 | ```
89 |
90 | With these implicits in scope, you can now use the `UniqueId` type directly when encoding a Row.
91 |
92 | First, insert a row:
93 |
94 | ```tut
95 | case class Insertable(s: UniqueId, i: Int, l: Long)
96 | val insertStatement = sSession.insert("mytable", Insertable(UniqueId("a_unique_id"), 1234, 5678L))
97 | insertStatement.execute()
98 | ```
99 |
100 | Then, select that row:
101 |
102 | ```tut
103 | case class Query(s: UniqueId)
104 | val selectStatement = sSession.selectOneStar("mytable", Query(UniqueId("a_unique_id")))
105 | val res = selectStatement.execute()
106 | ```
107 |
108 | Then, extract using `UniqueId`:
109 |
110 | ```tut
111 | res.map(_.map(_.as[UniqueId]("s")))
112 | ```
113 |
114 | Of course, UniqueId might throw an exception, which may not be the behavior you want, so you can optionally use
115 | `flatMap` for operations that might fail, which uses a `Result[T]` type, which is just an alias to `Either[Throwable, T]`:
116 |
117 | ```tut:silent
118 | abstract case class SafeUniqueId(id: String)
119 | object SafeUniqueId {
120 | def apply(untestedId: String): Result[SafeUniqueId] =
121 | if (untestedId.length > 15) Left(new IllegalArgumentException("id must be less than 15 characters long"))
122 | else Right(new SafeUniqueId(untestedId) {})
123 | }
124 | ```
125 |
126 | And with this definition, let's redefine the encoder/decoder:
127 |
128 | ```tut
129 | implicit val safeUniqueIdDecoder: CassFormatDecoder[SafeUniqueId] = CassFormatDecoder[String].flatMap[SafeUniqueId](SafeUniqueId.apply)
130 | implicit val safeUniqueIdEncoder: CassFormatEncoder[SafeUniqueId] = CassFormatEncoder[String].map[SafeUniqueId](safeId => safeId.id)
131 | ```
132 | So, let's go through the same steps this time, except inject an id that is too long for extraction
133 |
134 | ```tut
135 | case class UnsafeInsertable(s: String, i: Int, l: Long)
136 | val unsafeInsertStatement = sSession.insert("mytable", UnsafeInsertable("this_id_is_definitely_too_long_to_be_safe", 1234, 5678L))
137 | unsafeInsertStatement.execute()
138 | ```
139 |
140 | And then select that row:
141 |
142 | ```tut
143 | case class UnsafeQuery(s: String)
144 | val unsafeSelectStatement = sSession.selectOneStar("mytable", UnsafeQuery("this_id_is_definitely_too_long_to_be_safe"))
145 | val unsafeRes = unsafeSelectStatement.execute()
146 | ```
147 |
148 | And finally, try to extract it:
149 |
150 | ```tut
151 | unsafeRes.map(_.map(_.attemptAs[SafeUniqueId]("s")))
152 | ```
153 |
154 | ## Create a new encoder/decoder from scratch
155 |
156 | You might use a new encoder/decoder from scratch if you've added a user type to Cassandra itself, and want to use the
157 | library to read from it. However, let's continue with the `UniqueId` example, as above.
158 |
159 | ```tut:invisible
160 | // shadow these previous implicits so we can create new ones
161 | val safeUniqueIdDecoder = ""
162 | val safeUniqueIdEncoder = ""
163 | ```
164 |
165 | For decoder
166 | * `type From` is the Java type that is extracted from Cassandra directly, from which you will convert to a Scala type
167 | * `val typeToken` is the special class instance for that type
168 | * `TypeToken` is used over `classOf` because it can correctly encode type parameters to `Map`s, `List`s, and `Set`s
169 | * `CassFormatDecoder` provides 3 helper functions for these types: `CassFormatDecoder.mapOf`, `.listOf`, and `.setOf`
170 | * `def f2t` defines the transformation from the Java type to the Scala type
171 | * `def extract` defines the way to extract the Java type from the Cassandra `Row`
172 | * `def tupleExtract` is the same as `extract`, but for tuples
173 |
174 | ```tut
175 | import com.google.common.reflect.TypeToken, com.datastax.driver.core.{Row, TupleValue}
176 | implicit val safeUniqueIdDecoder: CassFormatDecoder[SafeUniqueId] = new CassFormatDecoder[SafeUniqueId] {
177 | type From = String
178 | val typeToken = TypeToken.of(classOf[String])
179 | def f2t(f: String): Result[SafeUniqueId] = SafeUniqueId(f)
180 | def extract(r: Row, name: String): From = r.getString(name)
181 | def tupleExtract(tup: TupleValue, pos: Int): From = tup.getString(pos)
182 | }
183 | ```
184 |
185 | For encoder
186 | * `type From` is the Scala type which you are encoding from
187 | * `val cassDataType` is the Cassandra type which you are converting to
188 | * `def encode` is the way that you encode that conversion, meaning the Scala -> Java conversion
189 |
190 | ```tut
191 | import com.datastax.driver.core.DataType
192 | implicit val safeUniqueIdEncoder: CassFormatEncoder[SafeUniqueId] = new CassFormatEncoder[SafeUniqueId] {
193 | type From = String
194 | val cassDataType: DataType = DataType.varchar()
195 | def encode(f: SafeUniqueId): Result[String] = Right(f.id)
196 | }
197 | ```
198 |
199 | And as before,
200 |
201 | ```tut:invisible
202 | case class UnsafeInsertable(s: String, i: Int, l: Long)
203 | val unsafeInsertStatement = sSession.insert("mytable", UnsafeInsertable("this_id_is_definitely_too_long_to_be_safe", 1234, 5678L))
204 | unsafeInsertStatement.execute()
205 | ```
206 | ```tut
207 | case class UnsafeQuery(s: String)
208 | val unsafeSelectStatement = sSession.selectOneStar("mytable", UnsafeQuery("this_id_is_definitely_too_long_to_be_safe"))
209 | val unsafeRes = unsafeSelectStatement.execute()
210 | unsafeRes.map(_.map(_.attemptAs[SafeUniqueId]("s")))
211 | ```
212 | ```tut:invisible
213 | sSession.dropKeyspace.execute()
214 | sSession.close()
215 | cluster.close()
216 | ```
--------------------------------------------------------------------------------
/docs/cass21/src/main/tut/cass21/date-codecs.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Date Codecs"
4 | section: "c21"
5 | ---
6 | ```tut:invisible
7 | import com.datastax.driver.core.Cluster
8 | import com.weather.scalacass.ScalaSession
9 | import com.weather.scalacass.joda.Implicits._
10 |
11 | implicit val cluster = Cluster.builder.addContactPoint("localhost").build()
12 | val session: ScalaSession = ScalaSession("datecodeckeyspace")(cluster.connect())
13 | session.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 3}").execute()
14 |
15 | case class TS(str: String, mytimestamp: org.joda.time.Instant)
16 | case class Query(str: String)
17 | val table = "mytable"
18 | session.createTable[TS](table, 1, 0).execute()
19 | val ts = TS("a primary key", org.joda.time.Instant.now)
20 | session.insert(table, ts).execute()
21 | val r = session.selectOneStar(table, Query(ts.str)).execute().right.toOption.flatten.get
22 | ```
23 |
24 | # Date Codecs
25 |
26 | By default, Scala-Cass uses the timestamp format provided as default for the Java driver. It is:
27 |
28 | | Cassandra Type | Scala Type |
29 | |:--------------:|:----------------------------------:|
30 | | timestamp | java.util.Date |
31 |
32 |
33 | You have the option of using the Joda library or Jdk8 date library as a replacement for this default. While the
34 | examples below showcase how to read data of joda/jdk8 types, the same process is required for writing these types to
35 | Cassandra.
36 |
37 | ### Joda Implicits
38 |
39 | All you need to do is import the implicits required to understand the joda `Instant`,
40 | `com.weather.scalacass.joda.Implicits._`
41 |
42 | ```tut
43 | import com.weather.scalacass.syntax._, com.weather.scalacass.joda.Implicits._
44 | r // some row from your table
45 | r.as[org.joda.time.Instant]("mytimestamp") // cassandra "timestamp"
46 | ```
47 |
48 | ### Jdk8 Implicits
49 |
50 | All you need to do is import the implicits required to understand the jdk8 `Instant`,
51 | `com.weather.scalacass.jdk8.Implicits._`
52 |
53 | ```tut
54 | import com.weather.scalacass.syntax._, com.weather.scalacass.jdk8.Implicits._
55 | r // some row from your table
56 | r.as[java.time.Instant]("mytimestamp") // cassandra "timestamp"
57 | ```
58 | ```tut:invisible
59 | session.dropKeyspace.execute()
60 | session.close()
61 | cluster.close()
62 | ```
--------------------------------------------------------------------------------
/docs/cass21/src/main/tut/cass21/row-extraction.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Row Extraction"
4 | section: "c21"
5 | ---
6 | ```tut:invisible
7 | import com.datastax.driver.core.{Cluster, Session}
8 | import com.weather.scalacass.ScalaSession
9 | import com.datastax.driver.core.Row
10 | import com.weather.scalacass.scsession.SCStatement.RightBiasedEither
11 |
12 | implicit val cluster = Cluster.builder.addContactPoint("localhost").build()
13 | implicit val session: Session = cluster.connect()
14 |
15 | val sSession: ScalaSession = ScalaSession("mykeyspace")
16 | sSession.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 1}").execute()
17 |
18 | case class MyTable(s: String, i: Int, l: List[Long])
19 | val createStatement = sSession.createTable[MyTable]("mytable", 1, 0)
20 | createStatement.execute()
21 |
22 | val insertStatement = sSession.insert("mytable", MyTable("a str", 1234, List(5678L)))
23 | insertStatement.execute()
24 | ```
25 | # Row Extraction
26 |
27 | Cassandra's `Row` holds the response from a statement. Using the driver, conversion into a useful Scala data type is
28 | cumbersome both in extracting a value from the Row, and converting it from the Java type. Scala-Cass handles all of that
29 | under the hood.
30 |
31 | As an example, start with a `Row` retrieved from Cassandra table. Let's say this table has a definition of
32 |
33 | ```tut
34 | case class MyRow(s: String, i: Int, l: List[Long]) // s varchar, i int, l bigint
35 | ```
36 | Then we select a row using a `ScalaSession` (see more of `select` [here](/cass3/scalasession/select.html))
37 |
38 | ```tut
39 | case class Select(s: String)
40 | val rowRes = sSession.selectOneStar("mytable", Select("a str")).execute()
41 | val row: Row = rowRes.toOption.flatten.get
42 | ```
43 |
44 | First, let's extract into `MyRow` using the regular driver
45 |
46 | ```tut
47 | import scala.collection.JavaConverters._
48 | val driverDerivedRow = MyRow(row.getString("s"), row.getInt("i"), row.getList("l", classOf[java.lang.Long]).asScala.toList.map(Long.unbox))
49 | ```
50 |
51 | Especially for the `List[Long]`, it is unnecessarily verbose, since we already have the necessary type information. In
52 | addition, we don't know if any of these values came back as `null`, so in truth, we would need null checks as well
53 |
54 | We can hide all of this boilerplate using the ScalaCass library. First, we need to import the syntax,
55 | `com.weather.scalacass.syntax`. Then let's mirror that extraction from above
56 |
57 | ```tut
58 | import com.weather.scalacass.syntax._
59 | val scalaCassDerivedRow = MyRow(row.as[String]("s"), row.as[Int]("i"), row.as[List[Long]]("l"))
60 | ```
61 |
62 | All we need to specify is the type that we
63 | want[*](#type-mappings), and the library handles the rest.
64 | If one of these values came back null, the driver will throw an exception since we do not want to introduce null values
65 | into our code.
66 |
67 | If you do need to handle null types, use the `Option` type to extract values, as this will return `None` instead of
68 | null
69 |
70 | ```tut
71 | row.as[Option[String]]("s")
72 | ```
73 |
74 | There are 2 convenience functions around this, `getAs` and `getOrElse`, that retrieves an Optional response of the type,
75 | and, in the case of `getOrElse`, provides a default in the case it gets back a `None`
76 |
77 | ```tut:invisible
78 | case class DeleteColumn(i: Int)
79 | sSession.delete[DeleteColumn]("mytable", Select("a str")).execute()
80 | ```
81 | ```tut
82 | row.getAs[Int]("i")
83 | row.getOrElse[Int]("i", -12345)
84 | ```
85 | ```tut:invisible
86 | insertStatement.execute()
87 | ```
88 |
89 | If you want to handle the exception yourself, there is a way to simply attempt the extraction and return either the
90 | success or the failure
91 |
92 | ```tut
93 | row.attemptAs[Int]("i")
94 | ```
95 |
96 | ## Case Classes
97 |
98 | We already have a conveniently defined `MyRow` with all of the type information we want, so the Scala-Cass library (with
99 | the help of the [Shapeless library](https://github.com/milessabin/shapeless)) can automatically use the case class
100 | directly for extraction
101 |
102 | ```tut
103 | row.as[MyRow]
104 | row.getAs[MyRow]
105 | row.getOrElse[MyRow](MyRow("default row", -12345, List(-5678L)))
106 | row.attemptAs[MyRow]
107 | ```
108 |
109 | Note that no arguments (aside from the type parameter) are passed when extracting to a case class because you are acting
110 | on the entire row.
111 |
112 | ##### For performance characteristics of these extractions, [see the performance page](/cass3/performance.html)
113 |
114 | ##### For a full list of type mappings between the Cassandra types and Scala types, [see the type mappings page](/cass3/type-mappings.html)
115 | ```tut:invisible
116 | sSession.dropKeyspace.execute()
117 | sSession.close()
118 | cluster.close()
119 | ```
--------------------------------------------------------------------------------
/docs/cass21/src/main/tut/cass21/scalasession/batch.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Batch"
4 | section: "c21"
5 | ---
6 | ```tut:invisible
7 | import com.datastax.driver.core.{Cluster, Session}
8 | import com.weather.scalacass.ScalaSession
9 |
10 | implicit val cluster = Cluster.builder.addContactPoint("localhost").build()
11 | implicit val session: Session = cluster.connect()
12 |
13 | val sSession: ScalaSession = ScalaSession("mykeyspace")
14 | sSession.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 1}").execute()
15 |
16 | case class MyTable(s: String, i: Int, l: Long)
17 | val createStatement = sSession.createTable[MyTable]("mytable", 1, 0)
18 | createStatement.execute()
19 | ```
20 | # Batch Statements
21 |
22 | Insert, updates, and deletes can be batched into a single statement sent to Cassandra, and, using a batch type of
23 | `Logged`, will either all succeed or all fail. There are performance implications to using batch statements, which you
24 | can read about [here](https://docs.datastax.com/en/cql/3.1/cql/cql_using/useBatch.html).
25 |
26 | In the Scala-Cass library, you can model a batch statement by passing prepared statements to a batch statement, which
27 | can be accomplished in a number of ways:
28 |
29 | #### batchOf
30 |
31 | ```tut
32 | case class Query(s: String)
33 | case class NewValue(i: Int, l: Long)
34 | val updateStatement = sSession.update("mytable", NewValue(1234, 5678L), Query("some str"))
35 |
36 | case class Insertable(s: String, i: Int, l: Long)
37 | val insertStatement = sSession.insert("mytable", Insertable("some other str", 4321, 8765L))
38 |
39 | val deleteStatement = sSession.deleteRow("mytable", Query("a third str"))
40 |
41 | val batchOfStatement = sSession.batchOf(updateStatement, insertStatement, deleteStatement)
42 | batchOfStatement.execute()
43 | ```
44 |
45 | #### batch
46 |
47 | ```tut
48 | val statementsToBatch = List(updateStatement, insertStatement, deleteStatement)
49 | val batchStatement = sSession.batch(statementsToBatch)
50 | batchStatement.execute()
51 | ```
52 |
53 |
54 | #### + (build)
55 | ```tut
56 | val oneBatchStatement = sSession.batchOf(updateStatement)
57 | val twoBatchStatement = oneBatchStatement + insertStatement
58 | val threeBatchStatement = twoBatchStatement + deleteStatement
59 | threeBatchStatement.execute()
60 | ```
61 |
62 | #### ++ (append)
63 |
64 | ```tut
65 | val fromListBatchStatement = oneBatchStatement ++ List(insertStatement, deleteStatement)
66 | fromListBatchStatement.execute()
67 | ```
68 | ```tut
69 | val otherBatchStatement = sSession.batchOf(insertStatement, deleteStatement)
70 | val fromOtherBatchStatement = oneBatchStatement ++ otherBatchStatement
71 | fromOtherBatchStatement.execute()
72 | ```
73 |
74 | #### and (build multiple)
75 |
76 | ```tut
77 | val andBatchStatement = oneBatchStatement and (insertStatement, deleteStatement)
78 | andBatchStatement.execute()
79 | ```
80 |
81 | ## Batch Type
82 |
83 | You can additionally specify the batch type of the statement, but it defaults to `LOGGED`.
84 |
85 | ```tut
86 | import com.datastax.driver.core.BatchStatement
87 | val withTypeBatchStatement = batchStatement.withBatchType(BatchStatement.Type.LOGGED)
88 | withTypeBatchStatement.execute()
89 | ```
90 | ```tut:invisible
91 | sSession.dropKeyspace.execute()
92 | sSession.close()
93 | cluster.close()
94 | ```
--------------------------------------------------------------------------------
/docs/cass21/src/main/tut/cass21/scalasession/consistency-level.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Consistency Level"
4 | section: "c21"
5 | ---
6 | ```tut:invisible
7 | import com.datastax.driver.core.{Cluster, Session}
8 | import com.weather.scalacass.ScalaSession
9 |
10 | implicit val cluster = Cluster.builder.addContactPoint("localhost").build()
11 | implicit val session: Session = cluster.connect()
12 |
13 | val sSession: ScalaSession = ScalaSession("mykeyspace")
14 | sSession.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 1}").execute()
15 |
16 | case class MyTable(s: String, i: Int, l: Long)
17 | val createStatement = sSession.createTable[MyTable]("mytable", 1, 0)
18 | createStatement.execute()
19 | ```
20 | # Consistency Level
21 |
22 | By default, statements will use the same consistency as set for the entire cluster (via the `QueryOptions`).
23 | But, Cassandra also allows for statement-specific consistency levels to be used for insert, select, update, delete, batch, and raw statements.
24 |
25 | They all act the same way, so while the examples below focus on insert, the same rules apply to all of the above.
26 |
27 | ```tut
28 | import com.datastax.driver.core.ConsistencyLevel
29 | case class Insertable(s: String, i: Int, l: Long) // same as the table
30 | val insertStatement = sSession.insert("mytable", Insertable("some str", 1234, 5678L))
31 | val consistencyInsert = insertStatement.consistency(ConsistencyLevel.ONE)
32 | consistencyInsert.execute()
33 | ```
34 |
35 | And remove it if necessary, which will mean the statement will be written with the consistency level set for the cluster:
36 |
37 | ```tut
38 | val noConsistencyInsert = insertStatement.defaultConsistency
39 | noConsistencyInsert.execute()
40 | ```
41 |
42 | Finally, certain consistency levels are only allowed for read vs write operations.
43 | See [the documentation](https://docs.datastax.com/en/cassandra/2.1/cassandra/dml/dml_config_consistency_c.html) for more details.
44 | In addition, batch statements can only use `SERIAL` or `LOCAL_SERIAL` level consistency.
45 | ```tut:invisible
46 | sSession.dropKeyspace.execute()
47 | sSession.close()
48 | cluster.close()
49 | ```
--------------------------------------------------------------------------------
/docs/cass21/src/main/tut/cass21/scalasession/create-table.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Creating a Table"
4 | section: "c21"
5 | ---
6 | ```tut:invisible
7 | import com.datastax.driver.core.{Cluster, Session}
8 | import com.weather.scalacass.ScalaSession
9 |
10 | implicit val cluster = Cluster.builder.addContactPoint("localhost").build()
11 | implicit val session: Session = cluster.connect()
12 |
13 | val sSession: ScalaSession = ScalaSession("mykeyspace")
14 | sSession.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 1}").execute()
15 | ```
16 | # Creating a Table
17 |
18 | Like creating a keyspace, this is likely only going to be useful to those who are writing tests. Table creation uses a
19 | case class for the table columns' names and type definitions (see [Type Mappings](/docs/cass3/type-mappings.html) to compare
20 | Scala and Cassandra types).
21 |
22 | ## Characteristics
23 |
24 | * the `createTable` method takes 4 properties
25 | * name of table
26 | * number of partition keys
27 | * number of clustering keys
28 | * partition and clustering keys are chosen left-to-right, in that order. See the example below.
29 | * You can pass an optional parameter for the right hand side of the table definition (after the `WITH`) using the
30 | `` `with` `` builder
31 | * parameters wrapped in `Option` or `Nullable` take the underlying type parameter as its type for table creation
32 | * you must have at least 1 partition key
33 | * the number of partition keys + clustering keys must be less than the number of fields in the case class
34 | * any rules associated with cassandra semantics for data types must be followed (eg no counters in the primary key)
35 |
36 | ```tut
37 | case class MyTable(s: String, i: Int, l: Long)
38 | val createStatement = sSession.createTable[MyTable]("mytable", 1, 0)
39 | createStatement.execute()
40 | ```
41 |
42 | and you can use the `` `with` `` builder to specify anything after the `WITH`
43 |
44 | ```tut:invisible
45 | sSession.dropTable("mytable").execute()
46 | ```
47 |
48 | ```tut
49 | val createStatementWith = createStatement.`with`("compaction = { 'class' : 'SizeTieredCompactionStrategy', 'min_threshold' : 6 }")
50 | createStatementWith.execute()
51 | ```
52 |
53 | Finally, you can truncate and drop the table using the `truncateTable` and `dropTable` commands
54 |
55 | ```tut
56 | val truncateStatement = sSession.truncateTable("mytable")
57 | truncateStatement.execute()
58 | ```
59 |
60 | ```tut
61 | val dropStatement = sSession.dropTable("mytable")
62 | dropStatement.execute()
63 | ```
64 | ```tut:invisible
65 | sSession.dropKeyspace.execute()
66 | sSession.close()
67 | cluster.close()
68 | ```
--------------------------------------------------------------------------------
/docs/cass21/src/main/tut/cass21/scalasession/delete.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Delete"
4 | section: "c21"
5 | ---
6 | ```tut:invisible
7 | import com.datastax.driver.core.{Cluster, Session}
8 | import com.weather.scalacass.ScalaSession
9 |
10 | implicit val cluster = Cluster.builder.addContactPoint("localhost").build()
11 | implicit val session: Session = cluster.connect()
12 |
13 | val sSession: ScalaSession = ScalaSession("mykeyspace")
14 | sSession.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 1}").execute()
15 |
16 | case class MyTable(s: String, i: Int, l: Long)
17 | val createStatement = sSession.createTable[MyTable]("mytable", 1, 0)
18 | createStatement.execute()
19 | ```
20 | # Delete
21 |
22 | Deletes are executed with a query and optional columns, both of which are represented via case classes.
23 |
24 | For deletes that will delete the entire row:
25 |
26 | ```tut
27 | case class Query(s: String)
28 | val deleteStatement = sSession.deleteRow("mytable", Query("some str"))
29 | deleteStatement.execute()
30 | ```
31 |
32 | For deletes that only delete certain columns of that row, specify the columns as a case class. However, you will not
33 | actually use an instance of the case class in the statement, just pass it in as type parameter:
34 |
35 | ```tut
36 | case class ColumnsToRemove(i: Int)
37 | val deleteColumnsStatement = sSession.delete[ColumnsToRemove]("mytable", Query("some str"))
38 | deleteColumnsStatement.execute()
39 | ```
40 |
41 | ## If Statment
42 |
43 | You can use case classes to model If statements. For now, only equivalency is possible. This means that the values
44 | in the if statement are translated to an `=` comparison:
45 |
46 | ```tut
47 | case class If(l: Long)
48 | val deleteWithIf = deleteStatement.`if`(If(5678L))
49 | deleteWithIf.execute()
50 | ```
51 |
52 | You can just specify `IF EXISTS` as well:
53 |
54 | ```tut
55 | val deleteWithIfExists = deleteStatement.ifExists
56 | deleteWithIfExists.execute()
57 | ```
58 |
59 | You can remove any if clause:
60 |
61 | ```tut
62 | val deleteWithoutIf = deleteWithIf.noConditional
63 | deleteWithoutIf.execute()
64 | ```
65 |
66 | ## Timestamp
67 |
68 | You can specify a timestamp:
69 |
70 | ```tut
71 | val timestampDelete = deleteStatement.usingTimestamp(System.currentTimeMillis)
72 | timestampDelete.execute()
73 | ```
74 |
75 | or use shorthand for current time:
76 |
77 | ```tut
78 | val timestampNowDelete = deleteStatement.usingTimestampNow
79 | timestampNowDelete.execute()
80 | ```
81 |
82 | and finally, remove a timestamp from the statement:
83 |
84 | ```tut
85 | val noTimestampDelete = timestampDelete.noTimestamp
86 | noTimestampDelete.execute()
87 | ```
88 | ```tut:invisible
89 | sSession.dropKeyspace.execute()
90 | sSession.close()
91 | cluster.close()
92 | ```
--------------------------------------------------------------------------------
/docs/cass21/src/main/tut/cass21/scalasession/getting-started.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Getting Started"
4 | section: "c21"
5 | ---
6 | # Getting Started with ScalaSession
7 |
8 | Using a `ScalaSession` follows the same general rules as creating the Java driver's `Session`.
9 |
10 | ```tut:silent
11 | import com.datastax.driver.core.Cluster
12 |
13 | val cluster = Cluster.builder.addContactPoint("localhost").build()
14 | ```
15 |
16 | ## Characteristics
17 |
18 | * `PreparedStatement` caching
19 | * acts on a single keyspace
20 | * can optionally create a keyspace on instantiation
21 | * can pick up Java `Session` implicitly
22 | * provides an `execute`, which blocks for the result, and `executeAsync`, which returns a `Future` of the result
23 |
24 | The `ScalaSession` itself is a class that you must keep around, much like you would a Cassandra Java `Session`. This is
25 | because the ScalaSession caches PreparedStatements from every executed command, so if you are calling the same command
26 | multiple times, it will use an existing PreparedStatement instead of generating a new statement every time.
27 |
28 | ```tut
29 | import com.datastax.driver.core.Session, com.weather.scalacass.ScalaSession
30 |
31 | implicit val session: Session = cluster.connect()
32 |
33 | val sSession: ScalaSession = ScalaSession("mykeyspace") // picks up session implicitly
34 | ```
35 |
36 | If the keyspace has not been created yet (for instance, during testing), you can create it using `createKeyspace`
37 | and passing in parameters included after the `WITH` statement:
38 |
39 | ```tut
40 | val createStatement = sSession.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 3}")
41 | createStatement.execute()
42 | ```
43 |
44 | Additionally, you can specify `IF NOT EXISTS` using the `ifNotExists` builder
45 |
46 | ```tut
47 | val createStatementIfNotExists = createStatement.ifNotExists
48 | val result = createStatementIfNotExists.execute()
49 | ```
50 |
51 | Finally, you can drop the keyspace if you are done using it, although this will render the `ScalaSession` unusable
52 |
53 | ```tut
54 | sSession.dropKeyspace.execute()
55 | ```
56 | ```tut:invisible
57 | sSession.close()
58 | cluster.close()
59 | ```
--------------------------------------------------------------------------------
/docs/cass21/src/main/tut/cass21/scalasession/insert.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Insert"
4 | section: "c21"
5 | ---
6 | ```tut:invisible
7 | import com.datastax.driver.core.{Cluster, Session}
8 | import com.weather.scalacass.ScalaSession
9 |
10 | implicit val cluster = Cluster.builder.addContactPoint("localhost").build()
11 | implicit val session: Session = cluster.connect()
12 |
13 | val sSession: ScalaSession = ScalaSession("mykeyspace")
14 | sSession.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 1}").execute()
15 |
16 | case class MyTable(s: String, i: Int, l: Long)
17 | val createStatement = sSession.createTable[MyTable]("mytable", 1, 0)
18 | createStatement.execute()
19 | ```
20 | # Insert
21 |
22 | Use case classes to model the data to insert
23 |
24 | ```tut
25 | case class Insertable(s: String, i: Int, l: Long) // same as the table
26 | val insertStatement = sSession.insert("mytable", Insertable("some str", 1234, 5678L))
27 | insertStatement.execute()
28 | ```
29 |
30 | ## If Statement
31 |
32 | You can also specify existence of the row:
33 |
34 | ```tut
35 | val ifNotExistsInsert = insertStatement.ifNotExists
36 | ifNotExistsInsert.execute()
37 | ```
38 |
39 | Or remove the existence check:
40 |
41 | ```tut
42 | val noIfNotExistsInsert = ifNotExistsInsert.noConditional
43 | noIfNotExistsInsert.execute()
44 | ```
45 |
46 | ## TTL
47 |
48 | You can add a TTL:
49 |
50 | ```tut
51 | val ttlInsert = insertStatement.usingTTL(12345)
52 | ttlInsert.execute()
53 | ```
54 |
55 | And remove the TTL:
56 |
57 | ```tut
58 | val noTtlInsert = ttlInsert.noTTL
59 | noTtlInsert.execute()
60 | ```
61 |
62 | ## Timestamp
63 |
64 | You can specify a timestamp:
65 |
66 | ```tut
67 | val timestampInsert = insertStatement.usingTimestamp(System.currentTimeMillis)
68 | timestampInsert.execute()
69 | ```
70 |
71 | Or use shorthand for current time:
72 |
73 | ```tut
74 | val timestampNowInsert = insertStatement.usingTimestampNow
75 | timestampNowInsert.execute()
76 | ```
77 |
78 | And finally, remove a timestamp from the statement:
79 |
80 | ```tut
81 | val noTimestampInsert = timestampInsert.noTimestamp
82 | noTimestampInsert.execute()
83 | ```
84 | ```tut:invisible
85 | sSession.dropKeyspace.execute()
86 | sSession.close()
87 | cluster.close()
88 | ```
--------------------------------------------------------------------------------
/docs/cass21/src/main/tut/cass21/scalasession/raw.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Raw"
4 | section: "c21"
5 | ---
6 | ```tut:invisible
7 | import com.datastax.driver.core.{Cluster, Session}
8 | import com.weather.scalacass.ScalaSession
9 |
10 | implicit val cluster = Cluster.builder.addContactPoint("localhost").build()
11 | implicit val session: Session = cluster.connect()
12 |
13 | val sSession: ScalaSession = ScalaSession("mykeyspace")
14 | sSession.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 1}").execute()
15 |
16 | case class MyTable(s: String, i: Int, l: Long)
17 | val createStatement = sSession.createTable[MyTable]("mytable", 1, 0)
18 | createStatement.execute()
19 | ```
20 | # Raw Statements
21 |
22 | In case the library cannot fulfill a specific need you have (it does not have 100% parity with the Java driver's
23 | features) or you otherwise need to build up your own queries as `String`s, you can write a raw statement equivalent to
24 | what you would pass to a `session.execute` and still get convenient caching of the prepared statement. Note, however,
25 | that you must provide the exact type that the Java driver expects, meaning you need to manually box any `Int`s, `Long`s,
26 | etc and convert any `Map`s, `List`s, etc to their Java counterparts.
27 |
28 | There are two variants, depending on the kind of response you expect:
29 |
30 | ```tut
31 | val rawStatement = sSession.rawStatement("INSERT INTO mykeyspace.mytable (s, i, l) VALUES (?, ?, ?)", "a str", Int.box(1234), Long.box(5678L))
32 | rawStatement.execute()
33 | ```
34 | ```tut
35 | val rawSelect = sSession.rawSelect("SELECT COUNT(*) FROM mykeyspace.mytable WHERE s = ?", "a str")
36 | rawSelect.execute()
37 | ```
38 | ```tut:invisible
39 | sSession.dropKeyspace.execute()
40 | sSession.close()
41 | cluster.close()
42 | ```
--------------------------------------------------------------------------------
/docs/cass21/src/main/tut/cass21/scalasession/select.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Select"
4 | section: "c21"
5 | ---
6 | ```tut:invisible
7 | import com.datastax.driver.core.{Cluster, Session}
8 | import com.weather.scalacass.ScalaSession
9 |
10 | implicit val cluster = Cluster.builder.addContactPoint("localhost").build()
11 | implicit val session: Session = cluster.connect()
12 |
13 | val sSession: ScalaSession = ScalaSession("mykeyspace")
14 | sSession.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 1}").execute()
15 |
16 | case class MyTable(s: String, i: Int, l: Long)
17 | val createStatement = sSession.createTable[MyTable]("mytable", 1, 0)
18 | createStatement.execute()
19 |
20 | val insertStatement = sSession.insert("mytable", MyTable("a str", 1234, 5678L))
21 | insertStatement.execute()
22 | ```
23 | # Select
24 |
25 | Selects can retrieve the entire row, or only specific columns of the row. The query and the column specifier, if needed,
26 | are both represented via case classes.
27 |
28 | In addition, you can choose to retrieve only a single row from Cassandra, represented by an `Option[Row]` response,
29 | where the Option will be `None` if no values were found that matched the query.
30 |
31 | ## Select Whole Row
32 |
33 | For selects that will return the entire row:
34 |
35 | ```tut
36 | case class Query(s: String)
37 | ```
38 | ##### Retrieve all rows matching the query
39 | ```tut
40 | val selectStatement = sSession.selectStar("mytable", Query("a str"))
41 | selectStatement.execute()
42 | ```
43 | ##### Retrieve a single row matching the query
44 | ```tut
45 | val selectOneStatement = sSession.selectOneStar("mytable", Query("a str"))
46 | selectOneStatement.execute()
47 | ```
48 |
49 | ## Select Columns
50 |
51 | For selects that only retrieve certain columns of that row, specify the columns as a case class. However, you will not
52 | actually use an instance of the case class in the statement, just pass it in as type parameter:
53 |
54 | ```tut
55 | case class ColumnsToRetrieve(s: String, l: Long)
56 | ```
57 | ```tut
58 | val selectColumnsStatement = sSession.select[ColumnsToRetrieve]("mytable", Query("a str"))
59 | selectColumnsStatement.execute()
60 |
61 | val selectColumnsOneStatement = sSession.selectOne[ColumnsToRetrieve]("mytable", Query("a str"))
62 | selectColumnsOneStatement.execute()
63 | ```
64 |
65 | ## Allow Filtering
66 |
67 | You can `ALLOW FILTERING` on the request (read more about ["allow filtering" here](https://www.datastax.com/dev/blog/allow-filtering-explained-2))
68 |
69 | ```tut
70 | val selectAllowFiltering = selectStatement.allowFiltering
71 | selectAllowFiltering.execute()
72 |
73 | val selectOneAllowFiltering = selectOneStatement.allowFiltering
74 | selectOneAllowFiltering.execute()
75 | ```
76 |
77 | You can remove the allow filtering option:
78 |
79 | ```tut
80 | val selectNoAllowFiltering = selectAllowFiltering.noAllowFiltering
81 | selectNoAllowFiltering.execute()
82 |
83 | val selectOneNoAllowFiltering = selectOneAllowFiltering.noAllowFiltering
84 | selectOneNoAllowFiltering.execute()
85 | ```
86 |
87 | ## Limit
88 |
89 | For queries that will return an iterator of responses (ie, not `selectOne` statements), you can impose a limit on the
90 | number of responses:
91 |
92 | ```tut
93 | val selectLimit = selectStatement.limit(100)
94 | selectLimit.execute()
95 | ```
96 |
97 | Finally, you can disable the imposed limit:
98 |
99 | ```tut
100 | val selectNoLimit = selectLimit.noLimit
101 | selectNoLimit.execute()
102 | ```
103 |
104 | ## Reading from the `Row`s
105 |
106 | Scala-Cass provides a Scala-style method of extraction for `Row`, either into Scala values, or directly into case
107 | classes.
108 |
109 | * [Click here](/cass3/row-extraction.html) for a tutorial on how to extract values from `Row`
110 | * [Click here](/cass3/type-mappings.html) for a mapping of Cassandra types to Scala types
111 | ```tut:invisible
112 | sSession.dropKeyspace.execute()
113 | sSession.close()
114 | cluster.close()
115 | ```
--------------------------------------------------------------------------------
/docs/cass21/src/main/tut/cass21/scalasession/update.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Update"
4 | section: "c21"
5 | ---
6 | ```tut:invisible
7 | import com.datastax.driver.core.{Cluster, Session}
8 | import com.weather.scalacass.ScalaSession
9 |
10 | implicit val cluster = Cluster.builder.addContactPoint("localhost").build()
11 | implicit val session: Session = cluster.connect()
12 |
13 | val sSession: ScalaSession = ScalaSession("mykeyspace")
14 | sSession.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 1}").execute()
15 |
16 | case class MyTable(s: String, i: Int, l: Long, li: List[String])
17 | val createStatement = sSession.createTable[MyTable]("mytable", 1, 0)
18 | createStatement.execute()
19 | ```
20 | # Update
21 |
22 | Use case classes to model both the query and new value for updates:
23 |
24 | ```tut
25 | case class Query(s: String)
26 | case class NewValue(i: Int, l: Long)
27 | val updateStatement = sSession.update("mytable", NewValue(1234, 5678L), Query("some str"))
28 | updateStatement.execute()
29 | ```
30 |
31 | ## Add/Subtract
32 |
33 | There is a special class available to specify that you would like to either add or subtract elements
34 | to a Casssandra collection. Namely, `UpdateBehavior.Add` and `UpdateBehavior.Subtract`.
35 |
36 | ```tut
37 | import com.weather.scalacass.syntax._
38 | case class NewValueList(li: UpdateBehavior[List, String])
39 | val updateStatementAdd = sSession.update("mytable", NewValueList(UpdateBehavior.Add(List("a", "b", "c"))), Query("some str"))
40 | updateStatementAdd.execute()
41 | sSession.selectOneStar("mytable", Query("some str")).execute()
42 | ```
43 |
44 | ```tut
45 | val updateStatementSubtract = sSession.update("mytable", NewValueList(UpdateBehavior.Subtract(List("a", "b"))), Query("some str"))
46 | updateStatementSubtract.execute()
47 | sSession.selectOneStar("mytable", Query("some str")).execute()
48 | ```
49 |
50 | For parity, there is also `UpdateBehavior.Replace`, but simply using a class directly will act in the same way.
51 |
52 | Using `UpdateBehavior.Replace`:
53 |
54 | ```tut
55 | val updateStatementReplace = sSession.update("mytable", NewValueList(UpdateBehavior.Replace(List("d", "e", "f"))), Query("some str"))
56 | updateStatementReplace.execute()
57 | sSession.selectOneStar("mytable", Query("some str")).execute()
58 | ```
59 |
60 | Using regular `List`:
61 |
62 | ```tut
63 | case class NewValueListRegular(li: List[String])
64 | val updateStatementRegular = sSession.update("mytable", NewValueListRegular(List("g", "h", "i")), Query("some str"))
65 | updateStatementRegular.execute()
66 | sSession.selectOneStar("mytable", Query("some str")).execute()
67 | ```
68 |
69 | ## If Statment
70 |
71 | You can use case classes to model If statements. For now, only equivalency is possible, meaning values
72 | in the if statement are translated to an `=` comparison. If you need a different comparison operation,
73 | see [raw statements](/cass21/scalasession/raw.html):
74 |
75 | ```tut
76 | case class If(l: Long)
77 | val updateWithIf = updateStatement.`if`(If(5678L))
78 | updateWithIf.execute()
79 | ```
80 |
81 | You can just specify `IF EXISTS` as well:
82 |
83 | ```tut
84 | val updateWithIfExists = updateStatement.ifExists
85 | updateWithIfExists.execute()
86 | ```
87 |
88 | You can remove any if clause:
89 |
90 | ```tut
91 | val updateWithoutIf = updateWithIf.noConditional
92 | updateWithoutIf.execute()
93 | ```
94 |
95 | ## TTL
96 |
97 | you can add a TTL:
98 |
99 | ```tut
100 | val ttlUpdate = updateStatement.usingTTL(12345)
101 | ttlUpdate.execute()
102 | ```
103 |
104 | and remove the TTL:
105 |
106 | ```tut
107 | val noTtlUpdate = ttlUpdate.noTTL
108 | noTtlUpdate.execute()
109 | ```
110 |
111 | ## Timestamp
112 |
113 | You can specify a timestamp:
114 |
115 | ```tut
116 | val timestampUpdate = updateStatement.usingTimestamp(System.currentTimeMillis)
117 | timestampUpdate.execute()
118 | ```
119 |
120 | or use shorthand for current time:
121 |
122 | ```tut
123 | val timestampNowUpdate = updateStatement.usingTimestampNow
124 | timestampNowUpdate.execute()
125 | ```
126 |
127 | and finally, remove a timestamp from the statement:
128 |
129 | ```tut
130 | val noTimestampUpdate = timestampUpdate.noTimestamp
131 | noTimestampUpdate.execute()
132 | ```
133 | ```tut:invisible
134 | sSession.dropKeyspace.execute()
135 | sSession.close()
136 | cluster.close()
137 | ```
--------------------------------------------------------------------------------
/docs/cass21/src/main/tut/cass21/type-mappings.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Type Mappings"
4 | section: "c21"
5 | ---
6 |
7 | # Type Mappings
8 |
9 | ## Cassandra 2.1
10 |
11 | | Cassandra Type | Scala Type |
12 | |:--------------:|:--------------------:|
13 | | varchar | String |
14 | | uuid | java.util.UUID |
15 | | inet | java.net.InetAddress |
16 | | int | Int |
17 | | bigint | Long |
18 | | boolean | Boolean |
19 | | double | Double |
20 | | varint | BigInt |
21 | | decimal | BigDecimal |
22 | | float | Float |
23 | | blob | Array[Byte] |
24 | | list | List |
25 | | map | Map |
26 | | set | Set |
27 | | tuple | Tuple* |
28 | | **timestamp** | **java.util.Date** |
29 |
30 | * There are overrides for the joda library and jdk8 time library that take advantage of Cassandra's new codecs.
31 | [See date codecs](/21/date-codecs.html) for more
32 |
--------------------------------------------------------------------------------
/docs/cass3/src/main/tut/cass3/caching-implicits.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Caching Implicits"
4 | section: "c3"
5 | ---
6 | ```tut:invisible
7 | import com.datastax.driver.core.{Cluster, Session}
8 | import com.weather.scalacass.{ScalaSession, CCCassFormatEncoder, CCCassFormatDecoder}
9 | import com.weather.scalacass.syntax._
10 | import com.datastax.driver.core.Row
11 |
12 | implicit val cluster = Cluster.builder.addContactPoint("localhost").build()
13 | implicit val session: Session = cluster.connect()
14 |
15 | val sSession: ScalaSession = ScalaSession("mykeyspace")
16 | sSession.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 1}").execute()
17 |
18 | case class MyTable(s: String, i: Int, l: Long)
19 | val createStatement = sSession.createTable[MyTable]("mytable", 1, 0)
20 | createStatement.execute()
21 |
22 | val insertStatement = sSession.insert("mytable", MyTable("a str", 1234, 5678L))
23 | insertStatement.execute()
24 | ```
25 | # Caching Implicits
26 |
27 | When deriving the encoders and decoders to your case classes, the library is built to be able to automatically resolve
28 | the implicit `CCCassFormatEncoder`/`CCCassFormatDecoder`. This will work fine, but if you are using these
29 | encoders/decoders often, it may be worth it to cache them so that they do not have to be built at every call site. The
30 | best and easiest way to do this is to derive these implicits in the companion object to your case classes, as follows:
31 |
32 | (quick note: the `ImplicitCaching` object is a workaround for the compilation of these docs. It is unnecessary to wrap
33 | your case class/companion object definition in code)
34 |
35 | ```tut
36 | object ImplicitCaching {
37 | case class Query(s: String)
38 | object Query {
39 | implicit val encoder: CCCassFormatEncoder[Query] = CCCassFormatEncoder.derive
40 | implicit val decoder: CCCassFormatDecoder[Query] = CCCassFormatDecoder.derive
41 | }
42 | }
43 | sSession.selectStar("mytable", ImplicitCaching.Query("a str")).execute()
44 | ```
45 |
46 | the `derive` function will implicitly create the encoder/decoder in the companion object, and now at every call site,
47 | this implicit is used instead of one that would be built by the library.
48 | ```tut:invisible
49 | sSession.dropKeyspace.execute()
50 | sSession.close()
51 | cluster.close()
52 | ```
--------------------------------------------------------------------------------
/docs/cass3/src/main/tut/cass3/custom-encoders-decoders.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Custom Encoders/Decoders"
4 | section: "c3"
5 | ---
6 | ```tut:invisible
7 | import com.datastax.driver.core.{Cluster, Session}
8 | import com.weather.scalacass.{Result, ScalaSession, CCCassFormatEncoder, CCCassFormatDecoder}
9 | import com.weather.scalacass.syntax._
10 | import com.datastax.driver.core.Row
11 | import com.weather.scalacass.scsession.SCStatement.RightBiasedEither
12 |
13 | implicit val cluster = Cluster.builder.addContactPoint("localhost").build()
14 | implicit val session: Session = cluster.connect()
15 |
16 | val sSession: ScalaSession = ScalaSession("mykeyspace")
17 | sSession.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 1}").execute()
18 |
19 | case class MyTable(s: String, i: Int, l: Long)
20 | val createStatement = sSession.createTable[MyTable]("mytable", 1, 0)
21 | createStatement.execute()
22 |
23 | val createSpecialTable = sSession.rawStatement("CREATE TABLE specialtable (s varchar PRIMARY KEY, i int, special_long bigint")
24 | createSpecialTable.execute()
25 |
26 | val insertStatement = sSession.insert("mytable", MyTable("a_unique_id", 1234, 5678L))
27 | insertStatement.execute()
28 | ```
29 | # Custom Encoders/Decoders
30 |
31 | In case you need to apply a transformation during the extraction process, don't have a 1-to-1 mapping of case class
32 | names to cassandra table names, or are trying to use a type not included in the ScalaCass library, you can just define a
33 | custom encoder and decoder for any type. We will define a `UniqueId` class as an example for how you might customize
34 | it. Let's say this class will only accept ids less than 15 characters long.
35 |
36 | ```tut:silent
37 | abstract case class UniqueId(id: String)
38 | object UniqueId {
39 | def apply(untestedId: String): UniqueId =
40 | if (untestedId.length > 15) throw new IllegalArgumentException("id must be less than 15 characters long")
41 | else new UniqueId(untestedId) {}
42 | }
43 | ```
44 |
45 | You can provide a custom type in 3 ways:
46 |
47 | ## Map over an existing type
48 |
49 | This is the easier way to create a custom type, since it is based on an existing decoder/encoder. You first retrieve an
50 | existing encoder/decoder via the `CassFormatEncoder`/`CassFormatDecoder`'s `apply` method.
51 |
52 | ```tut
53 | import com.weather.scalacass.{CassFormatDecoder, CassFormatEncoder}
54 | implicit val uniqueIdDecoder: CassFormatDecoder[UniqueId] = CassFormatDecoder[String].map[UniqueId](UniqueId.apply)
55 | implicit val uniqueIdEncoder: CassFormatEncoder[UniqueId] = CassFormatEncoder[String].map[UniqueId](uniqueId => uniqueId.id)
56 | ```
57 |
58 | With these implicits in scope, you can now use the `UniqueId` type directly when encoding a Row.
59 |
60 | First, insert a row:
61 |
62 | ```tut
63 | case class Insertable(s: UniqueId, i: Int, l: Long)
64 | val insertStatement = sSession.insert("mytable", Insertable(UniqueId("a_unique_id"), 1234, 5678L))
65 | insertStatement.execute()
66 | ```
67 |
68 | Then, select that row:
69 |
70 | ```tut
71 | case class Query(s: UniqueId)
72 | val selectStatement = sSession.selectOneStar("mytable", Query(UniqueId("a_unique_id")))
73 | val res = selectStatement.execute()
74 | ```
75 |
76 | Then, extract using `UniqueId`:
77 |
78 | ```tut
79 | res.map(_.map(_.as[UniqueId]("s")))
80 | ```
81 |
82 | Of course, UniqueId might throw an exception, which may not be the behavior you want, so you can optionally use
83 | `flatMap` for operations that might fail, which uses a `Result[T]` type, which is just an alias to `Either[Throwable, T]`:
84 |
85 | ```tut:silent
86 | abstract case class SafeUniqueId(id: String)
87 | object SafeUniqueId {
88 | def apply(untestedId: String): Result[SafeUniqueId] =
89 | if (untestedId.length > 15) Left(new IllegalArgumentException("id must be less than 15 characters long"))
90 | else Right(new SafeUniqueId(untestedId) {})
91 | }
92 | ```
93 |
94 | And with this definition, let's redefine the encoder/decoder:
95 |
96 | ```tut
97 | implicit val safeUniqueIdDecoder: CassFormatDecoder[SafeUniqueId] = CassFormatDecoder[String].flatMap[SafeUniqueId](SafeUniqueId.apply)
98 | implicit val safeUniqueIdEncoder: CassFormatEncoder[SafeUniqueId] = CassFormatEncoder[String].map[SafeUniqueId](safeId => safeId.id)
99 | ```
100 | So, let's go through the same steps this time, except inject an id that is too long for extraction
101 |
102 | ```tut
103 | case class UnsafeInsertable(s: String, i: Int, l: Long)
104 | val unsafeInsertStatement = sSession.insert("mytable", UnsafeInsertable("this_id_is_definitely_too_long_to_be_safe", 1234, 5678L))
105 | unsafeInsertStatement.execute()
106 | ```
107 |
108 | And then select that row:
109 |
110 | ```tut
111 | case class UnsafeQuery(s: String)
112 | val unsafeSelectStatement = sSession.selectOneStar("mytable", UnsafeQuery("this_id_is_definitely_too_long_to_be_safe"))
113 | val unsafeRes = unsafeSelectStatement.execute()
114 | ```
115 |
116 | And finally, try to extract it:
117 |
118 | ```tut
119 | unsafeRes.map(_.map(_.attemptAs[SafeUniqueId]("s")))
120 | ```
121 |
122 | ## Using `forProduct$arity` for case classes
123 |
124 | When parsing the entire row into a case class, sometimes it may not be possible to encapsulate specialized logic
125 | using the basic encoders. In these cases, `forProduct$arity` can be used as a way to have complete control over
126 | how values are extracted out of/inserted into a row. They can also be used when names in a Cassandra row do not match
127 | the names in the case class. Since it only applies on operations to an entire row, the functions are available
128 | on `CCCassFormatEncoder` and `CCCassFormatDecoder`. The functions take a format of
129 | `forProduct1`/`forProduct2`/...`forProduct22`, and you choose the one that matches the number of arguments you wish
130 | to extract/insert into a row.
131 |
132 | ```tut
133 | object Wrapper {
134 | case class SpecialInsert(s: String, i: Int, specialLong: Long)
135 | object SpecialInsert {
136 | implicit val ccDecoder: CCCassFormatDecoder[SpecialInsert] =
137 | CCCassFormatDecoder.forProduct3("s", "i", "special_long")((s: String, i: Int, specialLong: Long) => SpecialInsert(s, i, specialLong))
138 | implicit val ccEncoder: CCCassFormatEncoder[SpecialInsert] =
139 | CCCassFormatEncoder.forProduct3("s", "i", "special_long")((sc: SpecialInsert) => (sc.s, sc.i, sc.specialLong))
140 | }
141 | }
142 | import Wrapper._ // Wrapper is necessary for this interpreter, and should be excluded in your code
143 | ```
144 |
145 | And now the SpecialInsert is ready to be used:
146 |
147 | ```tut
148 | val specialInsertStatement = sSession.insert("specialtable", SpecialInsert("asdf", 1234, 5678L))
149 | specialInsertStatement.execute()
150 | ```
151 |
152 | Renaming is not the only usage of `forProduct$arity`, nor is it strictly required to create one for a case class.
153 |
154 | ## Create a new encoder/decoder from scratch
155 |
156 | You might use a new encoder/decoder from scratch if you've added a user type to Cassandra itself, and want to use the
157 | library to read from it. However, let's continue with the `UniqueId` example, as above.
158 |
159 | ```tut:invisible
160 | // shadow these previous implicits so we can create new ones
161 | val safeUniqueIdDecoder = ""
162 | val safeUniqueIdEncoder = ""
163 | ```
164 |
165 | For decoder
166 | * `type From` is the Java type that is extracted from Cassandra directly, from which you will convert to a Scala type
167 | * `val typeToken` is the special class instance for that type
168 | * `TypeToken` is used over `classOf` because it can correctly encode type parameters to `Map`s, `List`s, and `Set`s
169 | * `CassFormatDecoder` provides 3 helper functions for these types: `CassFormatDecoder.mapOf`, `.listOf`, and `.setOf`
170 | * `def f2t` defines the transformation from the Java type to the Scala type
171 | * `def extract` defines the way to extract the Java type from the Cassandra `Row`
172 | * `def tupleExtract` is the same as `extract`, but for tuples
173 |
174 | ```tut
175 | import com.google.common.reflect.TypeToken, com.datastax.driver.core.{Row, TupleValue}
176 | implicit val safeUniqueIdDecoder: CassFormatDecoder[SafeUniqueId] = new CassFormatDecoder[SafeUniqueId] {
177 | type From = String
178 | val typeToken = TypeToken.of(classOf[String])
179 | def f2t(f: String): Result[SafeUniqueId] = SafeUniqueId(f)
180 | def extract(r: Row, name: String): From = r.getString(name)
181 | def tupleExtract(tup: TupleValue, pos: Int): From = tup.getString(pos)
182 | }
183 | ```
184 |
185 | For encoder
186 | * `type From` is the Scala type which you are encoding from
187 | * `val cassDataType` is the Cassandra type which you are converting to
188 | * `def encode` is the way that you encode that conversion, meaning the Scala -> Java conversion
189 |
190 | ```tut
191 | import com.datastax.driver.core.DataType
192 | implicit val safeUniqueIdEncoder: CassFormatEncoder[SafeUniqueId] = new CassFormatEncoder[SafeUniqueId] {
193 | type From = String
194 | val cassDataType: DataType = DataType.varchar()
195 | def encode(f: SafeUniqueId): Result[String] = Right(f.id)
196 | }
197 | ```
198 |
199 | And as before,
200 |
201 | ```tut:invisible
202 | case class UnsafeInsertable(s: String, i: Int, l: Long)
203 | val unsafeInsertStatement = sSession.insert("mytable", UnsafeInsertable("this_id_is_definitely_too_long_to_be_safe", 1234, 5678L))
204 | unsafeInsertStatement.execute()
205 | ```
206 | ```tut
207 | case class UnsafeQuery(s: String)
208 | val unsafeSelectStatement = sSession.selectOneStar("mytable", UnsafeQuery("this_id_is_definitely_too_long_to_be_safe"))
209 | val unsafeRes = unsafeSelectStatement.execute()
210 | unsafeRes.map(_.map(_.attemptAs[SafeUniqueId]("s")))
211 | ```
212 | ```tut:invisible
213 | sSession.dropKeyspace.execute()
214 | sSession.close()
215 | cluster.close()
216 | ```
--------------------------------------------------------------------------------
/docs/cass3/src/main/tut/cass3/date-codecs.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Date Codecs"
4 | section: "c3"
5 | ---
6 | ```tut:invisible
7 | import com.datastax.driver.core.Cluster
8 | import com.weather.scalacass.ScalaSession
9 | import com.weather.scalacass.joda.Implicits._
10 | import com.weather.scalacass.syntax._
11 |
12 | implicit val cluster = Cluster.builder.addContactPoint("localhost").build()
13 | com.weather.scalacass.joda.register(cluster)
14 | val session: ScalaSession = ScalaSession("datecodeckeyspace")(cluster.connect())
15 | session.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 3}").execute()
16 |
17 | case class TS(str: String, mytimestamp: org.joda.time.Instant, mydate: org.joda.time.LocalDate, mytime: org.joda.time.LocalTime, mydt: org.joda.time.DateTime)
18 | case class Query(str: String)
19 | val table = "mytable"
20 | session.createTable[TS](table, 1, 0).execute()
21 | val ts = TS("a primary key", org.joda.time.Instant.now, org.joda.time.LocalDate.now, org.joda.time.LocalTime.now, org.joda.time.DateTime.now)
22 | session.insert(table, ts).execute()
23 | val r = session.selectOneStar(table, Query(ts.str)).execute().right.toOption.flatten.get
24 | ```
25 |
26 | # Date Codecs
27 |
28 | By default, Scala-Cass uses the date/time formats provided as default for the Java driver. They are:
29 |
30 | | Cassandra Type | Scala Type |
31 | |:--------------:|:----------------------------------:|
32 | | timestamp | java.util.Date |
33 | | date | com.datastax.driver.core.LocalDate |
34 | | time | Time |
35 |
36 | where `time` is actually a `Long` wrapped in the `Time` case class to prevent collision with `bigint`.
37 |
38 | You have the option of using the Joda library or Jdk8 date library as a replacement for these defaults. While the
39 | examples below showcase how to read data of joda/jdk8 types, the same process is required for writing these types to
40 | Cassandra.
41 |
42 | ### Joda Implicits
43 |
44 | * You will need to provide an implicit instance of your Cluster for `DateTime` because it uses `TupleType`, which
45 | is derived from the `Cluster`
46 | * first, register the override codecs with the cluster, provided as a `register` function
47 | `com.weather.scalacass.joda.register`
48 | * then, import the implicits required to use the joda types, provided in `com.weather.scalacass.joda.Implicits._`
49 |
50 | ```tut
51 | cluster // your cluster, which must be implicit for DateTime
52 | com.weather.scalacass.joda.register(cluster)
53 | import com.weather.scalacass.joda.Implicits._
54 |
55 | r // some row from your cluster
56 | r.as[org.joda.time.Instant]("mytimestamp") // cassandra "timestamp"
57 | r.as[org.joda.time.LocalDate]("mydate") // cassandra "date"
58 | r.as[org.joda.time.LocalTime]("mytime") // cassandra "time"
59 | r.as[org.joda.time.DateTime]("mydt") // cassandra "tuple"
60 | ```
61 |
62 | [See here](https://datastax.github.io/java-driver/manual/custom_codecs/extras/#joda-time) for information about the
63 | format of `DateTime`
64 |
65 | #### Jdk8 Date Implicits
66 |
67 | * You will need to provide an implicit instance of your Cluster for `ZonedDateTime` because it uses `TupleType`, which
68 | is derived from the `Cluster`
69 | * first, register the override codecs with the cluster, provided as a `register` function
70 | `com.weather.scalacass.jdk8.register`
71 | * then, import the implicits required to use the joda types, provided in `com.weather.scalacass.jdk8.Implicits._`
72 |
73 | ```tut
74 | // under the hood ZonedDateTime uses a tuple, meaning the cluster must be implicit
75 | cluster // your cluster, which must be implicit for DateTime
76 | com.weather.scalacass.jdk8.register(cluster)
77 |
78 | import com.weather.scalacass.jdk8.Implicits._
79 |
80 | r // some row from your cluster
81 | r.as[java.time.Instant]("mytimestamp") // cassandra "timestamp"
82 | r.as[java.time.LocalDate]("mydate") // cassandra "date"
83 | r.as[java.time.LocalTime]("mytime") // cassandra "time"
84 | r.as[java.time.ZonedDateTime]("mydt") // cassandra "tuple"
85 | ```
86 |
87 | [See here](https://datastax.github.io/java-driver/manual/custom_codecs/extras/#jdk-8) for information about the format
88 | of `ZonedDateTime`
89 | ```tut:invisible
90 | session.dropKeyspace.execute()
91 | session.close()
92 | cluster.close()
93 | ```
--------------------------------------------------------------------------------
/docs/cass3/src/main/tut/cass3/row-extraction.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Row Extraction"
4 | section: "c3"
5 | ---
6 | ```tut:invisible
7 | import com.datastax.driver.core.{Cluster, Session}
8 | import com.weather.scalacass.ScalaSession
9 | import com.datastax.driver.core.Row
10 |
11 | implicit val cluster = Cluster.builder.addContactPoint("localhost").build()
12 | implicit val session: Session = cluster.connect()
13 |
14 | val sSession: ScalaSession = ScalaSession("mykeyspace")
15 | sSession.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 1}").execute()
16 |
17 | case class MyTable(s: String, i: Int, l: List[Long])
18 | val createStatement = sSession.createTable[MyTable]("mytable", 1, 0)
19 | createStatement.execute()
20 |
21 | val insertStatement = sSession.insert("mytable", MyTable("a str", 1234, List(5678L)))
22 | insertStatement.execute()
23 | ```
24 | # Row Extraction
25 |
26 | Cassandra's `Row` holds the response from a statement. Using the driver, conversion into a useful Scala data type is
27 | cumbersome both in extracting a value from the Row, and converting it from the Java type. Scala-Cass handles all of that
28 | transparently.
29 |
30 | As an example, start with a `Row` retrieved from Cassandra table. Let's say this table has a definition of
31 |
32 | ```tut
33 | case class MyRow(s: String, i: Int, l: List[Long]) // s varchar, i int, l bigint
34 | ```
35 | Then we select a row using a `ScalaSession` (for more on `select`, [see the relevant
36 | page](/cass3/scalasession/select.html))
37 |
38 | ```tut
39 | case class Select(s: String)
40 | val row: Row = sSession.selectOneStar("mytable", Select("a str")).execute().right.toOption.flatten.get
41 | ```
42 |
43 | First, let's extract into `MyRow` using the regular driver
44 |
45 | ```tut
46 | import scala.collection.JavaConverters._
47 | val driverDerivedRow = MyRow(row.getString("s"), row.getInt("i"), row.getList("l", classOf[java.lang.Long]).asScala.toList.map(Long.unbox))
48 | ```
49 |
50 | Especially for the `List[Long]`, it is unnecessarily verbose, since we already have the necessary type information. In
51 | addition, we don't know if any of these values came back as `null`, so in truth, we would need null checks as well
52 |
53 | We can hide all of this boilerplate using the ScalaCass library. First, we need to import the syntax,
54 | `com.weather.scalacass.syntax`. Then let's mirror that extraction from above
55 |
56 | ```tut
57 | import com.weather.scalacass.syntax._
58 | val scalaCassDerivedRow = MyRow(row.as[String]("s"), row.as[Int]("i"), row.as[List[Long]]("l"))
59 | ```
60 |
61 | All we need to specify is the type that we
62 | want[*](#type-mappings), and the library handles the rest.
63 | If one of these values came back null, the driver will throw an exception since we do not want to introduce null values
64 | into our code.
65 |
66 | If you do need to handle null types, use the `Option` type to extract values, as this will return `None` instead of
67 | null
68 |
69 | ```tut
70 | row.as[Option[String]]("s")
71 | ```
72 |
73 | There are 2 convenience functions around this, `getAs` and `getOrElse`, that retrieves an Optional response of the type,
74 | and, in the case of `getOrElse`, provides a default in the case it gets back a `None`
75 |
76 | ```tut:invisible
77 | case class DeleteColumn(i: Int)
78 | sSession.delete[DeleteColumn]("mytable", Select("a str")).execute()
79 | ```
80 | ```tut
81 | row.getAs[Int]("i")
82 | row.getOrElse[Int]("i", -12345)
83 | ```
84 | ```tut:invisible
85 | insertStatement.execute()
86 | ```
87 |
88 | If you want to handle the exception yourself, there is a way to simply attempt the extraction and return either the
89 | success or the failure
90 |
91 | ```tut
92 | row.attemptAs[Int]("i")
93 | ```
94 |
95 | ## Case Classes
96 |
97 | We already have a conveniently defined `MyRow` with all of the type information we want, so the Scala-Cass library (with
98 | the help of the [Shapeless library](https://github.com/milessabin/shapeless)) can automatically use the case class
99 | directly for extraction
100 |
101 | ```tut
102 | row.as[MyRow]
103 | row.getAs[MyRow]
104 | row.getOrElse[MyRow](MyRow("default row", -12345, List(-5678L)))
105 | row.attemptAs[MyRow]
106 | ```
107 |
108 | Note that no arguments (aside from the type parameter) are passed when extracting to a case class because you are acting
109 | on the entire row.
110 |
111 | ##### For performance characteristics of these extractions, [see the performance page](/cass3/performance.html)
112 |
113 | ##### For a full list of type mappings between the Cassandra types and Scala types, [see the type mappings page](/cass3/type-mappings.html)
114 | ```tut:invisible
115 | sSession.dropKeyspace.execute()
116 | sSession.close()
117 | cluster.close()
118 | ```
--------------------------------------------------------------------------------
/docs/cass3/src/main/tut/cass3/scalasession/batch.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Batch"
4 | section: "c3"
5 | ---
6 | ```tut:invisible
7 | import com.datastax.driver.core.{Cluster, Session}
8 | import com.weather.scalacass.ScalaSession
9 |
10 | implicit val cluster = Cluster.builder.addContactPoint("localhost").build()
11 | implicit val session: Session = cluster.connect()
12 |
13 | val sSession: ScalaSession = ScalaSession("mykeyspace")
14 | sSession.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 1}").execute()
15 |
16 | case class MyTable(s: String, i: Int, l: Long)
17 | val createStatement = sSession.createTable[MyTable]("mytable", 1, 0)
18 | createStatement.execute()
19 | ```
20 | # Batch Statements
21 |
22 | Insert, updates, and deletes can be batched into a single statement sent to Cassandra, and, using a batch type of
23 | `Logged`, will either all succeed or all fail. There are performance implications to using batch statements, which you
24 | can read about [here](https://docs.datastax.com/en/cql/3.1/cql/cql_using/useBatch.html).
25 |
26 | In the Scala-Cass library, you can model a batch statement by passing prepared statements to a batch statement, which
27 | can be accomplished in a number of ways:
28 |
29 | #### batchOf
30 |
31 | ```tut
32 | case class Query(s: String)
33 | case class NewValue(i: Int, l: Long)
34 | val updateStatement = sSession.update("mytable", NewValue(1234, 5678L), Query("some str"))
35 |
36 | case class Insertable(s: String, i: Int, l: Long)
37 | val insertStatement = sSession.insert("mytable", Insertable("some other str", 4321, 8765L))
38 |
39 | val deleteStatement = sSession.deleteRow("mytable", Query("a third str"))
40 |
41 | val batchOfStatement = sSession.batchOf(updateStatement, insertStatement, deleteStatement)
42 | batchOfStatement.execute()
43 | ```
44 |
45 | #### batch
46 |
47 | ```tut
48 | val statementsToBatch = List(updateStatement, insertStatement, deleteStatement)
49 | val batchStatement = sSession.batch(statementsToBatch)
50 | batchStatement.execute()
51 | ```
52 |
53 |
54 | #### + (build)
55 | ```tut
56 | val oneBatchStatement = sSession.batchOf(updateStatement)
57 | val twoBatchStatement = oneBatchStatement + insertStatement
58 | val threeBatchStatement = twoBatchStatement + deleteStatement
59 | threeBatchStatement.execute()
60 | ```
61 |
62 | #### ++ (append)
63 |
64 | ```tut
65 | val fromListBatchStatement = oneBatchStatement ++ List(insertStatement, deleteStatement)
66 | fromListBatchStatement.execute()
67 | ```
68 | ```tut
69 | val otherBatchStatement = sSession.batchOf(insertStatement, deleteStatement)
70 | val fromOtherBatchStatement = oneBatchStatement ++ otherBatchStatement
71 | fromOtherBatchStatement.execute()
72 | ```
73 |
74 | #### and (build multiple)
75 |
76 | ```tut
77 | val andBatchStatement = oneBatchStatement and (insertStatement, deleteStatement)
78 | andBatchStatement.execute()
79 | ```
80 |
81 | ## Batch Type
82 |
83 | You can additionally specify the batch type of the statement, but it defaults to `LOGGED`.
84 |
85 | ```tut
86 | import com.datastax.driver.core.BatchStatement
87 | val withTypeBatchStatement = batchStatement.withBatchType(BatchStatement.Type.LOGGED)
88 | withTypeBatchStatement.execute()
89 | ```
90 | ```tut:invisible
91 | sSession.dropKeyspace.execute()
92 | sSession.close()
93 | cluster.close()
94 | ```
--------------------------------------------------------------------------------
/docs/cass3/src/main/tut/cass3/scalasession/consistency-level.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Consistency Level"
4 | section: "c3"
5 | ---
6 | ```tut:invisible
7 | import com.datastax.driver.core.{Cluster, Session}
8 | import com.weather.scalacass.ScalaSession
9 |
10 | implicit val cluster = Cluster.builder.addContactPoint("localhost").build()
11 | implicit val session: Session = cluster.connect()
12 |
13 | val sSession: ScalaSession = ScalaSession("mykeyspace")
14 | sSession.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 1}").execute()
15 |
16 | case class MyTable(s: String, i: Int, l: Long)
17 | val createStatement = sSession.createTable[MyTable]("mytable", 1, 0)
18 | createStatement.execute()
19 | ```
20 | # Consistency Level
21 |
22 | By default, statements will use the same consistency as set for the entire cluster (via the `QueryOptions`).
23 | But, Cassandra also allows for statement-specific consistency levels to be used for insert, select, update, delete, batch, and raw statements.
24 |
25 | They all act the same way, so while the examples below focus on insert, the same rules apply to all of the above.
26 |
27 | ```tut
28 | import com.datastax.driver.core.ConsistencyLevel
29 | case class Insertable(s: String, i: Int, l: Long) // same as the table
30 | val insertStatement = sSession.insert("mytable", Insertable("some str", 1234, 5678L))
31 | val consistencyInsert = insertStatement.consistency(ConsistencyLevel.ONE)
32 | consistencyInsert.execute()
33 | ```
34 |
35 | And remove it if necessary, which will mean the statement will be written with the consistency level set for the cluster:
36 |
37 | ```tut
38 | val noConsistencyInsert = insertStatement.defaultConsistency
39 | noConsistencyInsert.execute()
40 | ```
41 |
42 | Finally, certain consistency levels are only allowed for read vs write operations.
43 | See [the documentation](https://docs.datastax.com/en/cassandra/3.0/cassandra/dml/dmlConfigConsistency.html) for more details.
44 | In addition, batch statements can only use `SERIAL` or `LOCAL_SERIAL` level consistency.
45 | ```tut:invisible
46 | sSession.dropKeyspace.execute()
47 | sSession.close()
48 | cluster.close()
49 | ```
--------------------------------------------------------------------------------
/docs/cass3/src/main/tut/cass3/scalasession/create-table.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Creating a Table"
4 | section: "c3"
5 | ---
6 | ```tut:invisible
7 | import com.datastax.driver.core.{Cluster, Session}
8 | import com.weather.scalacass.ScalaSession
9 |
10 | implicit val cluster = Cluster.builder.addContactPoint("localhost").build()
11 | implicit val session: Session = cluster.connect()
12 |
13 | val sSession: ScalaSession = ScalaSession("mykeyspace")
14 | sSession.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 1}").execute()
15 | ```
16 | # Creating a Table
17 |
18 | Like creating a keyspace, this is likely only going to be useful to those who are writing tests. Table creation uses a
19 | case class for the table columns' names and type definitions (see [Type Mappings](/docs/cass3/type-mappings.html) to compare
20 | Scala and Cassandra types).
21 |
22 | ## Characteristics
23 |
24 | * the `createTable` method takes 4 properties
25 | * name of table
26 | * number of partition keys
27 | * number of clustering keys
28 | * partition and clustering keys are chosen left-to-right, in that order. See the example below.
29 | * You can pass an optional parameter for the right hand side of the table definition (after the `WITH`) using the
30 | `` `with` `` builder
31 | * parameters wrapped in `Option` or `Nullable` take the underlying type parameter as its type for table creation
32 | * you must have at least 1 partition key
33 | * the number of partition keys + clustering keys must be less than the number of fields in the case class
34 | * any rules associated with cassandra semantics for data types must be followed (eg no counters in the primary key)
35 |
36 | ```tut
37 | case class MyTable(s: String, i: Int, l: Long)
38 | val createStatement = sSession.createTable[MyTable]("mytable", 1, 0)
39 | createStatement.execute()
40 | ```
41 |
42 | and you can use the `` `with` `` builder to specify anything after the `WITH`
43 |
44 | ```tut:invisible
45 | sSession.dropTable("mytable").execute()
46 | ```
47 |
48 | ```tut
49 | val createStatementWith = createStatement.`with`("compaction = { 'class' : 'SizeTieredCompactionStrategy', 'min_threshold' : 6 }")
50 | createStatementWith.execute()
51 | ```
52 |
53 | Finally, you can truncate and drop the table using the `truncateTable` and `dropTable` commands
54 |
55 | ```tut
56 | val truncateStatement = sSession.truncateTable("mytable")
57 | truncateStatement.execute()
58 | ```
59 |
60 | ```tut
61 | val dropStatement = sSession.dropTable("mytable")
62 | dropStatement.execute()
63 | ```
64 | ```tut:invisible
65 | sSession.dropKeyspace.execute()
66 | sSession.close()
67 | cluster.close()
68 | ```
--------------------------------------------------------------------------------
/docs/cass3/src/main/tut/cass3/scalasession/delete.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Delete"
4 | section: "c3"
5 | ---
6 | ```tut:invisible
7 | import com.datastax.driver.core.{Cluster, Session}
8 | import com.weather.scalacass.ScalaSession
9 |
10 | implicit val cluster = Cluster.builder.addContactPoint("localhost").build()
11 | implicit val session: Session = cluster.connect()
12 |
13 | val sSession: ScalaSession = ScalaSession("mykeyspace")
14 | sSession.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 1}").execute()
15 |
16 | case class MyTable(s: String, i: Int, l: Long)
17 | val createStatement = sSession.createTable[MyTable]("mytable", 1, 0)
18 | createStatement.execute()
19 | ```
20 | # Delete
21 |
22 | Deletes are executed with a query and optional columns, both of which are represented via case classes.
23 |
24 | For deletes that will delete the entire row:
25 |
26 | ```tut
27 | case class Query(s: String)
28 | val deleteStatement = sSession.deleteRow("mytable", Query("some str"))
29 | deleteStatement.execute()
30 | ```
31 |
32 | For deletes that only delete certain columns of that row, specify the columns as a case class. However, you will not
33 | actually use an instance of the case class in the statement, just pass it in as type parameter:
34 |
35 | ```tut
36 | case class ColumnsToRemove(i: Int)
37 | val deleteColumnsStatement = sSession.delete[ColumnsToRemove]("mytable", Query("some str"))
38 | deleteColumnsStatement.execute()
39 | ```
40 |
41 | ## If Statment
42 |
43 | You can use case classes to model If statements. For now, only equivalency is possible. This means that the values
44 | in the if statement are translated to an `=` comparison:
45 |
46 | ```tut
47 | case class If(l: Long)
48 | val deleteWithIf = deleteStatement.`if`(If(5678L))
49 | deleteWithIf.execute()
50 | ```
51 |
52 | You can just specify `IF EXISTS` as well:
53 |
54 | ```tut
55 | val deleteWithIfExists = deleteStatement.ifExists
56 | deleteWithIfExists.execute()
57 | ```
58 |
59 | You can remove any if clause:
60 |
61 | ```tut
62 | val deleteWithoutIf = deleteWithIf.noConditional
63 | deleteWithoutIf.execute()
64 | ```
65 |
66 | ## Timestamp
67 |
68 | You can specify a timestamp:
69 |
70 | ```tut
71 | val timestampDelete = deleteStatement.usingTimestamp(System.currentTimeMillis)
72 | timestampDelete.execute()
73 | ```
74 |
75 | or use shorthand for current time:
76 |
77 | ```tut
78 | val timestampNowDelete = deleteStatement.usingTimestampNow
79 | timestampNowDelete.execute()
80 | ```
81 |
82 | and finally, remove a timestamp from the statement:
83 |
84 | ```tut
85 | val noTimestampDelete = timestampDelete.noTimestamp
86 | noTimestampDelete.execute()
87 | ```
88 | ```tut:invisible
89 | sSession.dropKeyspace.execute()
90 | sSession.close()
91 | cluster.close()
92 | ```
--------------------------------------------------------------------------------
/docs/cass3/src/main/tut/cass3/scalasession/getting-started.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Getting Started"
4 | section: "c3"
5 | ---
6 | # Getting Started with ScalaSession
7 |
8 | Using a `ScalaSession` follows the same general rules as creating the Java driver's `Session`. The major difference is
9 | that this library requires a cluster instance in implicit scope when working with Cassandra `tuple`s (for instance, the
10 | `date` type). This is because `tuple` types are defined based on the specific codecs associated with a cluster instance.
11 |
12 | This means that you need to make the cluster implicit if you are using cassandra's `tuple` types
13 |
14 | ```tut:silent
15 | import com.datastax.driver.core.Cluster
16 |
17 | // implicit is only necessary if using tuple types
18 | implicit val cluster = Cluster.builder.addContactPoint("localhost").build()
19 | ```
20 |
21 | ## Characteristics
22 |
23 | * `PreparedStatement` caching
24 | * acts on a single keyspace
25 | * can optionally create a keyspace on instantiation
26 | * can pick up Java `Session` implicitly
27 | * provides an `execute`, which blocks for the result, and `executeAsync`, which returns a `Future` of the result
28 |
29 | The `ScalaSession` itself is a class that you must keep around, much like you would a Cassandra Java `Session`. This is
30 | because the ScalaSession caches PreparedStatements from every executed command, so if you are calling the same command
31 | multiple times, it will use an existing PreparedStatement instead of generating a new statement every time.
32 |
33 | ```tut
34 | import com.datastax.driver.core.Session, com.weather.scalacass.ScalaSession
35 |
36 | implicit val session: Session = cluster.connect()
37 |
38 | val sSession: ScalaSession = ScalaSession("mykeyspace") // picks up session implicitly
39 | ```
40 |
41 | If the keyspace has not been created yet (for instance, during testing), you can create it using `createKeyspace`
42 | and passing in parameters included after the `WITH` statement:
43 |
44 | ```tut
45 | val createStatement = sSession.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 3}")
46 | createStatement.execute()
47 | ```
48 |
49 | Additionally, you can specify `IF NOT EXISTS` using the `ifNotExists` builder
50 |
51 | ```tut
52 | val createStatementIfNotExists = createStatement.ifNotExists
53 | val result = createStatementIfNotExists.execute()
54 | ```
55 |
56 | Finally, you can drop the keyspace if you are done using it, although this will render the `ScalaSession` unusable
57 |
58 | ```tut
59 | sSession.dropKeyspace.execute()
60 | ```
61 | ```tut:invisible
62 | sSession.close()
63 | cluster.close()
64 | ```
--------------------------------------------------------------------------------
/docs/cass3/src/main/tut/cass3/scalasession/insert.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Insert"
4 | section: "c3"
5 | ---
6 | ```tut:invisible
7 | import com.datastax.driver.core.{Cluster, Session}
8 | import com.weather.scalacass.ScalaSession
9 |
10 | implicit val cluster = Cluster.builder.addContactPoint("localhost").build()
11 | implicit val session: Session = cluster.connect()
12 |
13 | val sSession: ScalaSession = ScalaSession("mykeyspace")
14 | sSession.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 1}").execute()
15 |
16 | case class MyTable(s: String, i: Int, l: Long)
17 | val createStatement = sSession.createTable[MyTable]("mytable", 1, 0)
18 | createStatement.execute()
19 | ```
20 | # Insert
21 |
22 | Use case classes to model the data to insert
23 |
24 | ```tut
25 | case class Insertable(s: String, i: Int, l: Long) // same as the table
26 | val insertStatement = sSession.insert("mytable", Insertable("some str", 1234, 5678L))
27 | insertStatement.execute()
28 | ```
29 |
30 | ## If Statement
31 |
32 | You can also specify existence of the row:
33 |
34 | ```tut
35 | val ifNotExistsInsert = insertStatement.ifNotExists
36 | ifNotExistsInsert.execute()
37 | ```
38 |
39 | Or remove the existence check:
40 |
41 | ```tut
42 | val noIfNotExistsInsert = ifNotExistsInsert.noConditional
43 | noIfNotExistsInsert.execute()
44 | ```
45 |
46 | ## TTL
47 |
48 | You can add a TTL:
49 |
50 | ```tut
51 | val ttlInsert = insertStatement.usingTTL(12345)
52 | ttlInsert.execute()
53 | ```
54 |
55 | And remove the TTL:
56 |
57 | ```tut
58 | val noTtlInsert = ttlInsert.noTTL
59 | noTtlInsert.execute()
60 | ```
61 |
62 | ## Timestamp
63 |
64 | You can specify a timestamp:
65 |
66 | ```tut
67 | val timestampInsert = insertStatement.usingTimestamp(System.currentTimeMillis)
68 | timestampInsert.execute()
69 | ```
70 |
71 | Or use shorthand for current time:
72 |
73 | ```tut
74 | val timestampNowInsert = insertStatement.usingTimestampNow
75 | timestampNowInsert.execute()
76 | ```
77 |
78 | And finally, remove a timestamp from the statement:
79 |
80 | ```tut
81 | val noTimestampInsert = timestampInsert.noTimestamp
82 | noTimestampInsert.execute()
83 | ```
84 | ```tut:invisible
85 | sSession.dropKeyspace.execute()
86 | sSession.close()
87 | cluster.close()
88 | ```
--------------------------------------------------------------------------------
/docs/cass3/src/main/tut/cass3/scalasession/raw.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Raw"
4 | section: "c3"
5 | ---
6 | ```tut:invisible
7 | import com.datastax.driver.core.{Cluster, Session}
8 | import com.weather.scalacass.ScalaSession
9 |
10 | implicit val cluster = Cluster.builder.addContactPoint("localhost").build()
11 | implicit val session: Session = cluster.connect()
12 |
13 | val sSession: ScalaSession = ScalaSession("mykeyspace")
14 | sSession.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 1}").execute()
15 |
16 | case class MyTable(s: String, i: Int, l: Long)
17 | val createStatement = sSession.createTable[MyTable]("mytable", 1, 0)
18 | createStatement.execute()
19 | ```
20 | # Raw Statements
21 |
22 | In case the library cannot fulfill a specific need you have (it does not have 100% parity with the Java driver's
23 | features) or you otherwise need to build up your own queries as `String`s, you can write a raw statement equivalent to
24 | what you would pass to a `session.execute` and still get convenient caching of the prepared statement. Note, however,
25 | that you must provide the exact type that the Java driver expects, meaning you need to manually box any `Int`s, `Long`s,
26 | etc and convert any `Map`s, `List`s, etc to their Java counterparts.
27 |
28 | There are two variants, depending on the kind of response you expect:
29 |
30 | ```tut
31 | val rawStatement = sSession.rawStatement("INSERT INTO mykeyspace.mytable (s, i, l) VALUES (?, ?, ?)", "a str", Int.box(1234), Long.box(5678L))
32 | rawStatement.execute()
33 | ```
34 | ```tut
35 | val rawSelect = sSession.rawSelect("SELECT COUNT(*) FROM mykeyspace.mytable WHERE s = ?", "a str")
36 | rawSelect.execute()
37 | ```
38 | ```tut:invisible
39 | sSession.dropKeyspace.execute()
40 | sSession.close()
41 | cluster.close()
42 | ```
--------------------------------------------------------------------------------
/docs/cass3/src/main/tut/cass3/scalasession/select.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Select"
4 | section: "c3"
5 | ---
6 | ```tut:invisible
7 | import com.datastax.driver.core.{Cluster, Session}
8 | import com.weather.scalacass.ScalaSession
9 |
10 | implicit val cluster = Cluster.builder.addContactPoint("localhost").build()
11 | implicit val session: Session = cluster.connect()
12 |
13 | val sSession: ScalaSession = ScalaSession("mykeyspace")
14 | sSession.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 1}").execute()
15 |
16 | case class MyTable(s: String, i: Int, l: Long)
17 | val createStatement = sSession.createTable[MyTable]("mytable", 1, 0)
18 | createStatement.execute()
19 |
20 | val insertStatement = sSession.insert("mytable", MyTable("a str", 1234, 5678L))
21 | insertStatement.execute()
22 | ```
23 | # Select
24 |
25 | Selects can retrieve the entire row, or only specific columns of the row. The query and the column specifier, if needed,
26 | are both represented via case classes.
27 |
28 | In addition, you can choose to retrieve only a single row from Cassandra, represented by an `Option[Row]` response,
29 | where the Option will be `None` if no values were found that matched the query.
30 |
31 | ## Select Whole Row
32 |
33 | For selects that will return the entire row:
34 |
35 | ```tut
36 | case class Query(s: String)
37 | ```
38 | ##### Retrieve all rows matching the query
39 | ```tut
40 | val selectStatement = sSession.selectStar("mytable", Query("a str"))
41 | selectStatement.execute()
42 | ```
43 | ##### Retrieve a single row matching the query
44 | ```tut
45 | val selectOneStatement = sSession.selectOneStar("mytable", Query("a str"))
46 | selectOneStatement.execute()
47 | ```
48 |
49 | ## Select Columns
50 |
51 | For selects that only retrieve certain columns of that row, specify the columns as a case class. However, you will not
52 | actually use an instance of the case class in the statement, just pass it in as type parameter:
53 |
54 | ```tut
55 | case class ColumnsToRetrieve(s: String, l: Long)
56 | ```
57 | ```tut
58 | val selectColumnsStatement = sSession.select[ColumnsToRetrieve]("mytable", Query("a str"))
59 | selectColumnsStatement.execute()
60 |
61 | val selectColumnsOneStatement = sSession.selectOne[ColumnsToRetrieve]("mytable", Query("a str"))
62 | selectColumnsOneStatement.execute()
63 | ```
64 |
65 | ## Allow Filtering
66 |
67 | You can `ALLOW FILTERING` on the request (read more about ["allow filtering" here](https://www.datastax.com/dev/blog/allow-filtering-explained-2))
68 |
69 | ```tut
70 | val selectAllowFiltering = selectStatement.allowFiltering
71 | selectAllowFiltering.execute()
72 |
73 | val selectOneAllowFiltering = selectOneStatement.allowFiltering
74 | selectOneAllowFiltering.execute()
75 | ```
76 |
77 | You can remove the allow filtering option:
78 |
79 | ```tut
80 | val selectNoAllowFiltering = selectAllowFiltering.noAllowFiltering
81 | selectNoAllowFiltering.execute()
82 |
83 | val selectOneNoAllowFiltering = selectOneAllowFiltering.noAllowFiltering
84 | selectOneNoAllowFiltering.execute()
85 | ```
86 |
87 | ## Limit
88 |
89 | For queries that will return an iterator of responses (ie, not `selectOne` statements), you can impose a limit on the
90 | number of responses:
91 |
92 | ```tut
93 | val selectLimit = selectStatement.limit(100)
94 | selectLimit.execute()
95 | ```
96 |
97 | Finally, you can disable the imposed limit:
98 |
99 | ```tut
100 | val selectNoLimit = selectLimit.noLimit
101 | selectNoLimit.execute()
102 | ```
103 |
104 | ## Reading from the `Row`s
105 |
106 | Scala-Cass provides a Scala-style method of extraction for `Row`, either into Scala values, or directly into case
107 | classes.
108 |
109 | * [Click here](/cass3/row-extraction.html) for a tutorial on how to extract values from `Row`
110 | * [Click here](/cass3/type-mappings.html) for a mapping of Cassandra types to Scala types
111 | ```tut:invisible
112 | sSession.dropKeyspace.execute()
113 | sSession.close()
114 | cluster.close()
115 | ```
--------------------------------------------------------------------------------
/docs/cass3/src/main/tut/cass3/scalasession/update.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Update"
4 | section: "c3"
5 | ---
6 | ```tut:invisible
7 | import com.datastax.driver.core.{Cluster, Session}
8 | import com.weather.scalacass.ScalaSession
9 |
10 | implicit val cluster = Cluster.builder.addContactPoint("localhost").build()
11 | implicit val session: Session = cluster.connect()
12 |
13 | val sSession: ScalaSession = ScalaSession("mykeyspace")
14 | sSession.createKeyspace("replication = {'class':'SimpleStrategy', 'replication_factor' : 1}").execute()
15 |
16 | case class MyTable(s: String, i: Int, l: Long, li: List[String])
17 | val createStatement = sSession.createTable[MyTable]("mytable", 1, 0)
18 | createStatement.execute()
19 | ```
20 | # Update
21 |
22 | Use case classes to model both the query and new value for updates:
23 |
24 | ```tut
25 | case class Query(s: String)
26 | case class NewValue(i: Int, l: Long)
27 | val updateStatement = sSession.update("mytable", NewValue(1234, 5678L), Query("some str"))
28 | updateStatement.execute()
29 | ```
30 |
31 | ## Add/Subtract
32 |
33 | There is a special class available to specify that you would like to either add or subtract elements
34 | to a Casssandra collection. Namely, `UpdateBehavior.Add` and `UpdateBehavior.Subtract`.
35 |
36 | ```tut
37 | import com.weather.scalacass.syntax._
38 | case class NewValueList(li: UpdateBehavior[List, String])
39 | val updateStatementAdd = sSession.update("mytable", NewValueList(UpdateBehavior.Add(List("a", "b", "c"))), Query("some str"))
40 | updateStatementAdd.execute()
41 | sSession.selectOneStar("mytable", Query("some str")).execute()
42 | ```
43 |
44 | ```tut
45 | val updateStatementSubtract = sSession.update("mytable", NewValueList(UpdateBehavior.Subtract(List("a", "b"))), Query("some str"))
46 | updateStatementSubtract.execute()
47 | sSession.selectOneStar("mytable", Query("some str")).execute()
48 | ```
49 |
50 | For parity, there is also `UpdateBehavior.Replace`, but simply using a class directly will act in the same way.
51 |
52 | Using `UpdateBehavior.Replace`:
53 |
54 | ```tut
55 | val updateStatementReplace = sSession.update("mytable", NewValueList(UpdateBehavior.Replace(List("d", "e", "f"))), Query("some str"))
56 | updateStatementReplace.execute()
57 | sSession.selectOneStar("mytable", Query("some str")).execute()
58 | ```
59 |
60 | Using regular `List`:
61 |
62 | ```tut
63 | case class NewValueListRegular(li: List[String])
64 | val updateStatementRegular = sSession.update("mytable", NewValueListRegular(List("g", "h", "i")), Query("some str"))
65 | updateStatementRegular.execute()
66 | sSession.selectOneStar("mytable", Query("some str")).execute()
67 | ```
68 |
69 | ## If Statment
70 |
71 | You can use case classes to model If statements. For now, only equivalency is possible, meaning values
72 | in the if statement are translated to an `=` comparison. If you need a different comparison operation,
73 | see [raw statements](/cass21/scalasession/raw.html):
74 |
75 | ```tut
76 | case class If(l: Long)
77 | val updateWithIf = updateStatement.`if`(If(5678L))
78 | updateWithIf.execute()
79 | ```
80 |
81 | You can just specify `IF EXISTS` as well:
82 |
83 | ```tut
84 | val updateWithIfExists = updateStatement.ifExists
85 | updateWithIfExists.execute()
86 | ```
87 |
88 | You can remove any if clause:
89 |
90 | ```tut
91 | val updateWithoutIf = updateWithIf.noConditional
92 | updateWithoutIf.execute()
93 | ```
94 |
95 | ## TTL
96 |
97 | you can add a TTL:
98 |
99 | ```tut
100 | val ttlUpdate = updateStatement.usingTTL(12345)
101 | ttlUpdate.execute()
102 | ```
103 |
104 | and remove the TTL:
105 |
106 | ```tut
107 | val noTtlUpdate = ttlUpdate.noTTL
108 | noTtlUpdate.execute()
109 | ```
110 |
111 | ## Timestamp
112 |
113 | You can specify a timestamp:
114 |
115 | ```tut
116 | val timestampUpdate = updateStatement.usingTimestamp(System.currentTimeMillis)
117 | timestampUpdate.execute()
118 | ```
119 |
120 | or use shorthand for current time:
121 |
122 | ```tut
123 | val timestampNowUpdate = updateStatement.usingTimestampNow
124 | timestampNowUpdate.execute()
125 | ```
126 |
127 | and finally, remove a timestamp from the statement:
128 |
129 | ```tut
130 | val noTimestampUpdate = timestampUpdate.noTimestamp
131 | noTimestampUpdate.execute()
132 | ```
133 | ```tut:invisible
134 | sSession.dropKeyspace.execute()
135 | sSession.close()
136 | cluster.close()
137 | ```
--------------------------------------------------------------------------------
/docs/cass3/src/main/tut/cass3/type-mappings.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Type Mappings"
4 | section: "c3"
5 | ---
6 |
7 | # Type Mappings
8 |
9 | ## Cassandra 3.0+
10 |
11 | | Cassandra Type | Scala Type |
12 | |:--------------:|:--------------------------------------:|
13 | | varchar | String |
14 | | uuid | java.util.UUID |
15 | | inet | java.net.InetAddress |
16 | | int | Int |
17 | | bigint | Long |
18 | | boolean | Boolean |
19 | | double | Double |
20 | | varint | BigInt |
21 | | decimal | BigDecimal |
22 | | float | Float |
23 | | blob | Array[Byte] |
24 | | list | List |
25 | | map | Map |
26 | | set | Set |
27 | | tuple | Tuple*
28 | | **timestamp** | **java.util.Date** |
29 | | **date** | **com.datastax.driver.core.LocalDate** |
30 | | **time** | **Time** |
31 |
32 | * Time is a type specific to this library so as not to conflict with `bigint` and `Long`. it is defined as
33 |
34 | ```tut:silent
35 | final case class Time(millis: Long)
36 | ```
37 |
38 | * There are overrides for both the joda library and jdk8 time library that take advantage of Cassandra's new codecs.
39 | These codecs have to be registered with your `Cluster` instance; [See date codecs](/30/date-codecs.html) for more
40 |
--------------------------------------------------------------------------------
/docs/root/css/override.css:
--------------------------------------------------------------------------------
1 | .technologies {
2 | display: none;
3 | }
--------------------------------------------------------------------------------
/docs/root/data/menu.yaml:
--------------------------------------------------------------------------------
1 | options:
2 |
3 | # C* 3.0
4 |
5 | - title: Cassandra 3.0+
6 | url: cassandra-30.html
7 | menu_type: c3
8 |
9 | - title: ScalaSession
10 | url: cass3/scalasession/getting-started.html
11 | menu_type: c3
12 | menu_section: ss3
13 |
14 | nested_options:
15 | - title: Getting Started
16 | url: cass3/scalasession/getting-started.html
17 | menu_section: ss3
18 | - title: Creating a Table
19 | url: cass3/scalasession/create-table.html
20 | menu_section: ss3
21 | - title: Insert
22 | url: cass3/scalasession/insert.html
23 | menu_section: ss3
24 | - title: Update
25 | url: cass3/scalasession/update.html
26 | menu_section: ss3
27 | - title: Delete
28 | url: cass3/scalasession/delete.html
29 | menu_section: ss3
30 | - title: Select
31 | url: cass3/scalasession/select.html
32 | menu_section: ss3
33 | - title: Batch
34 | url: cass3/scalasession/batch.html
35 | menu_section: ss3
36 | - title: Raw Statements
37 | url: cass3/scalasession/raw.html
38 | menu_section: ss3
39 | - title: Consistency Level
40 | url: cass3/scalasession/consistency-level.html
41 | menu_section: ss3
42 |
43 | - title: Row Extraction
44 | url: cass3/row-extraction.html
45 | menu_type: c3
46 |
47 | - title: Type Mappings
48 | url: cass3/type-mappings.html
49 | menu_type: c3
50 |
51 | - title: Date Codecs
52 | url: cass3/date-codecs.html
53 | menu_type: c3
54 |
55 | - title: Custom Encoders/Decoders
56 | url: cass3/custom-encoders-decoders.html
57 | menu_type: c3
58 |
59 | - title: Caching Implicits
60 | url: cass3/caching-implicits.html
61 | menu_type: c3
62 |
63 |
64 |
65 | # C* 2.1
66 |
67 | - title: Cassandra 2.1
68 | url: cassandra-21.html
69 | menu_type: c21
70 | menu_type: c21
71 |
72 | - title: ScalaSession
73 | url: cass21/scalasession/getting-started.html
74 | menu_type: c21
75 | menu_section: ss21
76 |
77 | nested_options:
78 | - title: Getting Started
79 | url: cass21/scalasession/getting-started.html
80 | menu_section: ss21
81 | - title: Creating a Table
82 | url: cass21/scalasession/create-table.html
83 | menu_section: ss21
84 | - title: Insert
85 | url: cass21/scalasession/insert.html
86 | menu_section: ss21
87 | - title: Update
88 | url: cass21/scalasession/update.html
89 | menu_section: ss21
90 | - title: Delete
91 | url: cass21/scalasession/delete.html
92 | menu_section: ss21
93 | - title: Select
94 | url: cass21/scalasession/select.html
95 | menu_section: ss21
96 | - title: Batch
97 | url: cass21/scalasession/batch.html
98 | menu_section: ss21
99 | - title: Raw Statements
100 | url: cass21/scalasession/raw.html
101 | menu_section: ss21
102 | - title: Consistency Level
103 | url: cass21/scalasession/consistency-level.html
104 | menu_section: ss21
105 |
106 | - title: Row Extraction
107 | url: cass21/row-extraction.html
108 | menu_type: c21
109 |
110 | - title: Type Mappings
111 | url: cass21/type-mappings.html
112 | menu_type: c21
113 |
114 | - title: Date Codecs
115 | url: cass21/date-codecs.html
116 | menu_type: c21
117 |
118 | - title: Custom Encoders/Decoders
119 | url: cass21/custom-encoders-decoders.html
120 | menu_type: c21
121 |
122 | - title: Caching Implicits
123 | url: cass21/caching-implicits.html
124 | menu_type: c21
125 |
--------------------------------------------------------------------------------
/docs/root/src/main/tut/cassandra-21.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Cassandra 2.1"
4 | section: "c21"
5 | position: 3
6 | ---
7 | # Cassandra 2.1
--------------------------------------------------------------------------------
/docs/root/src/main/tut/cassandra-30.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: docs
3 | title: "Cassandra 3.0+"
4 | section: "c3"
5 | position: 2
6 | ---
7 | # Cassandra 3.0+
--------------------------------------------------------------------------------
/docs/root/src/main/tut/index.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: home
3 | title: "Home"
4 | section: "home"
5 | position: 1
6 | ---
7 | Scala-Cass is a library that makes working with the [Cassandra Java driver](https://github.com/datastax/java-driver) in
8 | Scala better. It provides type class instances for all of Cassandra's types to Scala types so that you can get a
9 | retrieve data from the db easier. It also uses the Shapeless library to provide type class instances for case classes so
10 | you can read and write data more conveniently.
11 |
12 | # Getting Scala-Cass
13 |
14 | [you can find it on bintray](https://bintray.com/thurstonsand/maven/scala-cass).
15 |
16 | Supports **Scala 2.10**, **Scala 2.11**, and **Scala 2.12** and
17 |
18 | * Cassandra 2.1 on Java 8
19 | * Cassandra 3.0+ on Java 8
20 |
21 | #### SBT
22 |
23 | Add the jcenter resolver
24 |
25 | ```scala
26 | resolvers += Resolver.jcenterRepo
27 | ```
28 |
29 | Add the appropriate version of the library
30 |
31 | ##### Cassandra 3.0+
32 |
33 | ```scala
34 | libraryDependencies += "com.github.thurstonsand" %% "scala-cass" % "{{ site.baseVersion }}-{{ site.cassandra3Version }}"
35 | ```
36 |
37 | ##### Cassandra 2.1
38 |
39 | ```scala
40 | libraryDependencies += "com.github.thurstonsand" %% "scala-cass" % "{{ site.baseVersion }}-{{ site.cassandra2Version }}"
41 | ```
42 |
43 | #### Maven
44 |
45 | Add the jcenter resolver
46 |
47 | ```xml
48 |
49 |
50 | central
51 | bintray
52 | http://jcenter.bintray.com
53 |
54 |
55 | ```
56 |
57 | Pick a version
58 |
59 | ##### Cassandra 3.0+
60 |
61 | ```xml
62 |
63 | {{ site.baseVersion }}-{{ site.cassandra3Version }}
64 |
65 | ```
66 |
67 | ##### Cassandra 2.1
68 |
69 | ```xml
70 |
71 | {{ site.baseVersion }}-{{ site.cassandra2Version }}
72 |
73 | ```
74 |
75 | Include the repo
76 |
77 | ```xml
78 |
79 | com.github.thurstonsand
80 | scala-cass_${scala.version}
81 | ${scalaCass.version}
82 | pom
83 |
84 | ```
85 |
--------------------------------------------------------------------------------
/project/Boilerplate.scala:
--------------------------------------------------------------------------------
1 | import sbt._
2 |
3 | /** Generate boiletplate classes for TupleXX
4 | *
5 | * Copied, with some modifications, from
6 | * [[https://github.com/milessabin/shapeless/blob/master/project/Boilerplate.scala Shapeless]]
7 | *
8 | * @author Miles Sabin
9 | * @author Kevin Wright
10 | * @author Travis Brown
11 | */
12 | object Boilerplate {
13 | import scala.StringContext._
14 |
15 | implicit class BlockHelper(val sc: StringContext) extends AnyVal {
16 | def block(args: Any*): String = {
17 | val interpolated = sc.standardInterpolator(treatEscapes, args)
18 | val rawLines = interpolated.split('\n')
19 | val trimmedLines = rawLines.map(_.dropWhile(_.isWhitespace))
20 | trimmedLines.mkString("\n")
21 | }
22 | }
23 |
24 | val header = "// auto-generated boilerplate"
25 | val maxArity = 22
26 |
27 | class TemplateVals(val arity: Int) {
28 | val synTypes = (0 until arity).map(n => s"A$n")
29 | val synVals = (0 until arity).map(n => s"a$n")
30 | val `A..N` = synTypes.mkString(", ")
31 | val `a..n` = synVals.mkString(", ")
32 | val `_.._` = Seq.fill(arity)("_").mkString(", ")
33 | val `(A..N)` = if (arity == 1) "Tuple1[A0]" else synTypes.mkString("(", ", ", ")")
34 | val `(_.._)` = if (arity == 1) "Tuple1[_]" else Seq.fill(arity)("_").mkString("(", ", ", ")")
35 | val `(a..n)` = if (arity == 1) "Tuple1(a)" else synVals.mkString("(", ", ", ")")
36 | }
37 |
38 | /** Blocks in the templates below use a custom interpolator, combined with post-processing to
39 | * produce the body.
40 | *
41 | * - The contents of the `header` val is output first
42 | * - Then the first block of lines beginning with '|'
43 | * - Then the block of lines beginning with '-' is replicated once for each arity,
44 | * with the `templateVals` already pre-populated with relevant vals for that arity
45 | * - Then the last block of lines prefixed with '|'
46 | *
47 | * The block otherwise behaves as a standard interpolated string with regards to variable
48 | * substitution.
49 | */
50 | trait Template {
51 | def filename(root: File): File
52 | def content(tv: TemplateVals): String
53 | def range: IndexedSeq[Int] = 1 to maxArity
54 | def body: String = {
55 | val headerLines = header.split('\n')
56 | val raw = range.map(n => content(new TemplateVals(n)).split('\n').filterNot(_.isEmpty))
57 | val preBody = raw.head.takeWhile(_.startsWith("|")).map(_.tail)
58 | val instances = raw.flatMap(_.filter(_.startsWith("-")).map(_.tail))
59 | val postBody = raw.head.dropWhile(_.startsWith("|")).dropWhile(_.startsWith("-")).map(_.tail)
60 | (headerLines ++ preBody ++ instances ++ postBody).mkString("\n")
61 | }
62 | }
63 |
64 | object GenProductDecoders extends Template {
65 | override def range: IndexedSeq[Int] = 1 to maxArity
66 |
67 | def filename(root: File): File = root / "com" / "weather" / "scalacass" / "ProductCCCassFormatDecoders.scala"
68 |
69 | def content(tv: TemplateVals): String = {
70 | import tv._
71 |
72 | val instances = synTypes.map(tpe => s"decode$tpe")
73 |
74 | val instanceMembers = synTypes.map(tpe => s"decode$tpe: CassFormatDecoder[$tpe]").mkString(", ")
75 | val names = synTypes.map(tpe => s"name$tpe")
76 | val memberNames = names.map(n => s"$n: String").mkString(", ")
77 | val results = (synVals zip instances zip names).map { case ((v, i), name) => s"$v <- $i.decode(r, $name)" }.mkString("; ")
78 | val fnCombine = s"f(${`a..n`})"
79 |
80 | block"""
81 | |package com.weather.scalacass
82 | |
83 | |import com.datastax.driver.core.Row
84 | |import scsession.SCStatement.RightBiasedEither
85 | |
86 | |private[scalacass] trait ProductCCCassFormatDecoders {
87 | - /**
88 | - * @group Product
89 | - */
90 | - final def forProduct$arity[${`A..N`}, Target]($memberNames)(f: (${`A..N`}) => Target)(implicit $instanceMembers): CCCassFormatDecoder[Target] =
91 | - new CCCassFormatDecoder[Target] {
92 | - def decode(r: Row): Result[Target] = for {
93 | - $results
94 | - } yield $fnCombine
95 | - }
96 | |}
97 | """
98 | }
99 | }
100 |
101 | object GenProductEncoders extends Template {
102 | override def range: IndexedSeq[Int] = 1 to maxArity
103 |
104 | def filename(root: File): File = root / "com" / "weather" / "scalacass" / "ProductCCCassFormatEncoders.scala"
105 |
106 | def content(tv: TemplateVals): String = {
107 | import tv._
108 |
109 | val names = synTypes.map(tpe => s"name$tpe")
110 | val encodedTypes = synTypes.map(tpe => s"encoded$tpe")
111 | val instances = synTypes.map(tpe => s"encode$tpe")
112 |
113 | val memberNames = names.map(n => s"$n: String").mkString(", ")
114 | val instanceMembers = synTypes.map(tpe => s"encode$tpe: CassFormatEncoder[$tpe]").mkString(", ")
115 | val cassTypes = instances.map(i => s"$i.cassType").mkString(", ")
116 | val results = (encodedTypes zip instances zip synVals).map { case ((encodedTpe, i), v) => s"$encodedTpe <- $i.encode($v)" }.mkString("; ")
117 | val namesCombined = (names zip encodedTypes).map { case (n, encodedTpe) => s"($n, $encodedTpe)" }.mkString(", ")
118 | val queryCombined = (instances zip synVals zip names zip encodedTypes).map { case (((i, v), name), encodedTpe) => s"($i.withQuery($v, $name), $encodedTpe)" }.mkString(", ")
119 |
120 | block"""
121 | |package com.weather.scalacass
122 | |
123 | |import scsession.SCStatement.RightBiasedEither
124 | |
125 | |private[scalacass] trait ProductCCCassFormatEncoders {
126 | - /**
127 | - * @group Product
128 | - */
129 | - final def forProduct$arity[${`A..N`}, Source]($memberNames)(f: Source => (${`A..N`}))(implicit $instanceMembers): CCCassFormatEncoder[Source] =
130 | - new CCCassFormatEncoder[Source] {
131 | - val names = List($memberNames)
132 | - val types = List($cassTypes)
133 | - def encodeWithName(from: Source): Result[List[(String, AnyRef)]] = {
134 | - val (${`a..n`}) = f(from)
135 | - for {
136 | - $results
137 | - } yield List($namesCombined)
138 | - }
139 | - def encodeWithQuery(from: Source): Result[List[(String, AnyRef)]] = {
140 | - val (${`a..n`}) = f(from)
141 | - for {
142 | - $results
143 | - } yield List($queryCombined)
144 | - }
145 | - }
146 | |}
147 | """
148 | }
149 | }
150 |
151 | val templates: Seq[Template] = Seq(
152 | GenProductDecoders,
153 | GenProductEncoders
154 | )
155 |
156 | def gen(dir: File): Seq[File] = templates.map { template =>
157 | val tgtFile = template.filename(dir)
158 | IO.write(tgtFile, template.body)
159 | tgtFile
160 | }
161 | }
162 |
--------------------------------------------------------------------------------
/project/build.properties:
--------------------------------------------------------------------------------
1 | sbt.version=1.3.2
2 |
--------------------------------------------------------------------------------
/project/plugins.sbt:
--------------------------------------------------------------------------------
1 | resolvers ++= Seq(
2 | Resolver.sonatypeRepo("releases")
3 | )
4 |
5 | addSbtPlugin("org.foundweekends" % "sbt-bintray" % "0.5.4")
6 |
7 | addSbtPlugin("org.scalariform" % "sbt-scalariform" % "1.8.3")
8 |
9 | addSbtPlugin("org.wartremover" % "sbt-wartremover" % "2.3.7")
10 |
11 | addSbtPlugin("com.47deg" % "sbt-microsites" % "0.9.6")
12 |
--------------------------------------------------------------------------------
/scripts/makeMicrosite.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -ue
3 |
4 | enable_cassandra_2=-1
5 | enable_cassandra_3=-1
6 | enable_jekyll=1
7 | clean_workspace=0
8 | publish=0
9 |
10 | function help {
11 | echo "how to use:"
12 | echo "must be run from project home (same level as build.sbt file)."
13 | echo "first time running will download cassandra binaries. This will require an internet connection"
14 | echo "must use one of:"
15 | echo " -0 -- only combine existing docs"
16 | echo " -2 -- compile cassandra 2 docs"
17 | echo " -3 -- compile cassandra 3 docs"
18 | echo " -23 -- compile cassandra 2 and 3 docs"
19 | echo " -h -- print out this message"
20 | echo "may optionally include any of:"
21 | echo " -x -- disable start up of jekyll at the end of the script"
22 | echo " -c -- clean the workspace first"
23 | echo " -p -- publish the microsite instead of starting jekyll"
24 | exit 1
25 | }
26 |
27 | function in_right_location {
28 | if [[ ! -f build.sbt || ! -n $(cat build.sbt | grep 'com.github.thurstonsand') ]]; then
29 | echo "not in root project folder!"
30 | echo
31 | help
32 | fi
33 | }
34 | function parse_inputs {
35 | while getopts ":023xcph" opt; do
36 | case $opt in
37 | 0)
38 | enable_cassandra_2=0
39 | enable_cassandra_3=0
40 | ;;
41 | 2)
42 | enable_cassandra_2=1
43 | ;;
44 | 3)
45 | enable_cassandra_3=1
46 | ;;
47 | x)
48 | enable_jekyll=0
49 | ;;
50 | c)
51 | clean_workspace=1
52 | ;;
53 | p)
54 | publish=1
55 | enable_jekyll=0
56 | ;;
57 | h)
58 | help
59 | ;;
60 | \?)
61 | echo "Invalid option: -$OPTARG" >&2
62 | help
63 | ;;
64 | :)
65 | echo "Option -$OPTARG requires an argument." >&2
66 | help
67 | ;;
68 | esac
69 | done
70 | if [[ $enable_cassandra_2 -ne -1 && $enable_cassandra_3 -eq -1 ]]; then
71 | enable_cassandra_3=0
72 | elif [[ $enable_cassandra_2 -eq -1 && $enable_cassandra_3 -ne -1 ]]; then
73 | enable_cassandra_2=0
74 | elif [[ $enable_cassandra_2 -eq -1 && $enable_cassandra_3 -eq -1 ]]; then
75 | help
76 | fi
77 | }
78 |
79 | function setup_cassandra {
80 | version=$1
81 | cassandra_path="cassandra-docs/cassandra-$version"
82 | if [[ ! -d $cassandra_path ]]; then
83 | wget -P cassandra-docs "http://archive.apache.org/dist/cassandra/$version/apache-cassandra-$version-bin.tar.gz"
84 | tar -xzf "cassandra-docs/apache-cassandra-$version-bin.tar.gz" -C cassandra-docs
85 | mv "cassandra-docs/apache-cassandra-$version" $cassandra_path
86 | mkdir -p $cassandra_path/data
87 | mkdir -p $cassandra_path/commitlog
88 | echo "data_file_directories:" >> "$cassandra_path/conf/cassandra.yaml"
89 | echo " - $PWD/$cassandra_path/data" >> "$cassandra_path/conf/cassandra.yaml"
90 | echo "commitlog_directory: $PWD/$cassandra_path/commitlog" >> "$cassandra_path/conf/cassandra.yaml"
91 | fi
92 | }
93 |
94 | function clear_cassandra {
95 | rm -rf $cassandra_path/data/*
96 | rm -rf $cassandra_path/commitlog/*
97 | }
98 |
99 | function wait_for_cassandra {
100 | for i in $(seq 1 60); do
101 | if $1/nodetool status 2>/dev/null | grep "^UN" >/dev/null; then
102 | $1/nodetool setlogginglevel ERROR 2>/dev/null
103 | echo "cassandra is running"
104 | return 0
105 | else
106 | echo "waiting on cassandra..."
107 | sleep 2
108 | fi
109 | done
110 | echo "cassandra did not start successfully"
111 | return -1
112 | }
113 |
114 | function run_cassandra {
115 | local folder_ext=$1
116 | local cassandra_version=""
117 | if [[ "$folder_ext" -eq "21" ]]; then
118 | cassandra_version="2.1.10.3"
119 | else
120 | cassandra_version="3.5.0"
121 | fi
122 |
123 | if ./$cassandra_path/bin/nodetool status 2>/dev/null | grep "^UN" >/dev/null; then
124 | echo "cassandra is already running. you must stop that instance first"
125 | exit 1
126 | fi
127 | echo "starting cassandra $version"
128 | trap 'if [[ -n "$cass_pid" ]]; then kill $cass_pid; fi' INT TERM EXIT
129 | ./$cassandra_path/bin/cassandra -f >/dev/null &
130 | cass_pid=$!
131 | wait_for_cassandra "./$cassandra_path/bin"
132 |
133 | if [[ clean_workspace -gt 0 ]]; then
134 | echo "cleaning and compiling cassandra $folder_ext docs"
135 | sbt -Dcassandra-driver.version=$cassandra_version "tut-cass$folder_ext/clean" "tut-cass$folder_ext/tut"
136 | else
137 | echo "compiling cassandra $folder_ext docs"
138 | sbt -Dcassandra-driver.version=$cassandra_version "tut-cass$folder_ext/tut"
139 | fi
140 | kill $cass_pid
141 |
142 | unset cass_pid
143 | trap - INT TERM EXIT
144 | rm -rf docs/root/src/main/tut/cass$folder_ext
145 | cp -r "docs/cass$folder_ext/target/scala-2.12/resource_managed/main/jekyll/cass$folder_ext" docs/root/src/main/tut/
146 | }
147 |
148 | function compile_results {
149 | echo "compiling docs"
150 | sbt -Dmicrosite.baseurl="" "docs/clean" "docs/makeMicrosite"
151 | }
152 |
153 | function run_jekyll {
154 | trap "rm -rf _site" INT TERM EXIT
155 | jekyll serve -s docs/root/target/jekyll/
156 | }
157 |
158 | function publish_site {
159 | echo "publishing docs"
160 | sbt "docs/clean" "docs/publishMicrosite"
161 | }
162 |
163 | mkdir -p cassandra-docs
164 |
165 | in_right_location
166 | parse_inputs $@
167 |
168 | if [[ enable_cassandra_2 -gt 0 ]]; then
169 | setup_cassandra "2.1.20"
170 | clear_cassandra
171 |
172 | run_cassandra 21
173 | fi
174 |
175 | if [[ enable_cassandra_3 -gt 0 ]]; then
176 | setup_cassandra "3.5"
177 | clear_cassandra
178 |
179 | run_cassandra 3
180 | fi
181 |
182 | if [[ publish -gt 0 ]]; then
183 | publish_site
184 | else
185 | compile_results
186 |
187 | if [[ enable_jekyll -gt 0 ]]; then
188 | run_jekyll
189 | fi
190 | fi
191 |
--------------------------------------------------------------------------------
/scripts/sbt-test.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -ue
3 |
4 | if [[ ! -f build.sbt || ! -n $(cat build.sbt | grep 'com.github.thurstonsand') ]]; then
5 | echo "not in root project folder!"
6 | echo
7 | exit 1
8 | fi
9 |
10 | old_j_version=$(sh ./scripts/util/change_j_version.sh)
11 |
12 | function do_test {
13 | local c_version=$1
14 |
15 | sbt -Dcassandra.version=$c_version +test
16 | }
17 |
18 | do_test "2.1"
19 | do_test "3.5"
--------------------------------------------------------------------------------
/src/main/scala/com/weather/scalacass/CCCassFormatDecoder.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | import com.datastax.driver.core.Row
4 | import shapeless.labelled.{ FieldType, field }
5 | import shapeless.{ ::, HList, HNil, LabelledGeneric, Lazy, Witness }
6 |
7 | abstract class DerivedCCCassFormatDecoder[T] extends CCCassFormatDecoder[T]
8 |
9 | object DerivedCCCassFormatDecoder {
10 | implicit val hNilDecoder: DerivedCCCassFormatDecoder[HNil] = new DerivedCCCassFormatDecoder[HNil] {
11 | def decode(r: Row): Result[HNil] = Right(HNil)
12 | }
13 |
14 | implicit def hConsDecoder[K <: Symbol, H, T <: HList](implicit w: Witness.Aux[K], tdH: Lazy[CassFormatDecoder[H]], tdT: Lazy[DerivedCCCassFormatDecoder[T]]): DerivedCCCassFormatDecoder[FieldType[K, H] :: T] =
15 | new DerivedCCCassFormatDecoder[FieldType[K, H] :: T] {
16 | def decode(r: Row) = for {
17 | h <- tdH.value.decode(r, w.value.name.toString).right
18 | t <- tdT.value.decode(r).right
19 | } yield field[K](h) :: t
20 | }
21 |
22 | implicit def ccConverter[T, Repr](implicit gen: LabelledGeneric.Aux[T, Repr], hListDecoder: Lazy[DerivedCCCassFormatDecoder[Repr]]): DerivedCCCassFormatDecoder[T] =
23 | new DerivedCCCassFormatDecoder[T] {
24 | def decode(r: Row): Result[T] = hListDecoder.value.decode(r).right.map(gen.from)
25 | }
26 | }
27 |
28 | trait CCCassFormatDecoder[T] { self =>
29 | private[scalacass] def decode(r: Row): Result[T]
30 | final def map[U](f: T => U): CCCassFormatDecoder[U] = new CCCassFormatDecoder[U] {
31 | def decode(r: Row): Result[U] = self.decode(r).right.map(f)
32 | }
33 | final def flatMap[U](f: T => Result[U]): CCCassFormatDecoder[U] = new CCCassFormatDecoder[U] {
34 | def decode(r: Row): Result[U] = self.decode(r).right.flatMap(f)
35 | }
36 |
37 | final def as(r: Row): T = decode(r) match {
38 | case Right(v) => v
39 | case Left(exc) => throw exc
40 | }
41 | final def getOrElse(r: Row)(default: => T): T = decode(r).right.getOrElse(default)
42 | final def attemptAs(r: Row): Result[T] = decode(r)
43 | }
44 |
45 | object CCCassFormatDecoder extends ProductCCCassFormatDecoders {
46 | implicit def derive[T](implicit derived: Lazy[DerivedCCCassFormatDecoder[T]]): CCCassFormatDecoder[T] = derived.value
47 | def apply[T](implicit decoder: CCCassFormatDecoder[T]) = decoder
48 |
49 | implicit def optionalCodec[T](implicit decoder: CCCassFormatDecoder[T]): CCCassFormatDecoder[Option[T]] =
50 | new CCCassFormatDecoder[Option[T]] {
51 | private[scalacass] def decode(r: Row): Result[Option[T]] = decoder.decode(r) match {
52 | case Left(Recoverable(_)) => Right(None)
53 | case other => other.right.map(Option.apply)
54 | }
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/src/main/scala/com/weather/scalacass/CCCassFormatEncoder.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | import shapeless.labelled.FieldType
4 | import shapeless.{ ::, HList, HNil, LabelledGeneric, Lazy, Witness }
5 |
6 | abstract class DerivedCCCassFormatEncoder[F] extends CCCassFormatEncoder[F]
7 |
8 | object DerivedCCCassFormatEncoder {
9 | implicit val hNilEncoder: DerivedCCCassFormatEncoder[HNil] = new DerivedCCCassFormatEncoder[HNil] {
10 | def encodeWithName(f: HNil) = Right(Nil)
11 | def encodeWithQuery(f: HNil) = Right(Nil)
12 |
13 | val names = Nil
14 | val types = Nil
15 | }
16 |
17 | implicit def hConsEncoder[K <: Symbol, H, T <: HList](implicit w: Witness.Aux[K], tdH: Lazy[CassFormatEncoder[H]], tdT: Lazy[DerivedCCCassFormatEncoder[T]]): DerivedCCCassFormatEncoder[FieldType[K, H] :: T] =
18 | new DerivedCCCassFormatEncoder[FieldType[K, H] :: T] {
19 | def encodeWithName(f: FieldType[K, H] :: T) = for {
20 | h <- tdH.value.encode(f.head).right
21 | t <- tdT.value.encodeWithName(f.tail).right
22 | } yield (w.value.name.toString, h) :: t
23 | def encodeWithQuery(f: FieldType[K, H] :: T) = for {
24 | h <- tdH.value.encode(f.head).right
25 | t <- tdT.value.encodeWithQuery(f.tail).right
26 | } yield (tdH.value.withQuery(f.head, w.value.name.toString), h) :: t
27 | def names = w.value.name.toString :: tdT.value.names
28 | def types = tdH.value.cassType :: tdT.value.types
29 | }
30 |
31 | implicit def ccConverter[T, Repr <: HList](implicit gen: LabelledGeneric.Aux[T, Repr], hListDecoder: Lazy[DerivedCCCassFormatEncoder[Repr]]): DerivedCCCassFormatEncoder[T] =
32 | new DerivedCCCassFormatEncoder[T] {
33 | def encodeWithName(f: T) = hListDecoder.value.encodeWithName(gen.to(f))
34 | def encodeWithQuery(f: T) = hListDecoder.value.encodeWithQuery(gen.to(f))
35 | def names = hListDecoder.value.names
36 | def types = hListDecoder.value.types
37 | }
38 | }
39 |
40 | trait CCCassFormatEncoder[F] { self =>
41 | def encodeWithName(f: F): Result[List[(String, AnyRef)]]
42 | def encodeWithQuery(f: F): Result[List[(String, AnyRef)]]
43 | def names: List[String]
44 | def types: List[String]
45 | def namesAndTypes: List[(String, String)] = names zip types
46 |
47 | final def map[G](fn: G => F): CCCassFormatEncoder[G] = new CCCassFormatEncoder[G] {
48 | def encodeWithName(f: G): Result[List[(String, AnyRef)]] = self.encodeWithName(fn(f))
49 | def encodeWithQuery(f: G): Result[List[(String, AnyRef)]] = self.encodeWithQuery(fn(f))
50 | def names = self.names
51 | def types = self.types
52 | }
53 | final def flatMap[G](fn: G => Result[F]): CCCassFormatEncoder[G] = new CCCassFormatEncoder[G] {
54 | def encodeWithName(f: G): Result[List[(String, AnyRef)]] = fn(f).right.flatMap(self.encodeWithName)
55 | def encodeWithQuery(f: G): Result[List[(String, AnyRef)]] = fn(f).right.flatMap(self.encodeWithQuery)
56 | def names = self.names
57 | def types = self.types
58 | }
59 | }
60 |
61 | object CCCassFormatEncoder extends ProductCCCassFormatEncoders {
62 | implicit def derive[T](implicit derived: Lazy[DerivedCCCassFormatEncoder[T]]): CCCassFormatEncoder[T] = derived.value
63 | def apply[T](implicit instance: CCCassFormatEncoder[T]): CCCassFormatEncoder[T] = instance
64 | }
65 |
--------------------------------------------------------------------------------
/src/main/scala/com/weather/scalacass/CassFormatEncoder.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | import com.datastax.driver.core.DataType
4 | import ScalaSession.UpdateBehavior
5 |
6 | trait CassFormatEncoder[F] { self =>
7 | type From <: AnyRef
8 | def cassDataType: DataType
9 | def encode(f: F): Result[From]
10 | def withQuery(instance: F, name: String) = s"$name=?"
11 | def cassType: String = cassDataType.toString
12 |
13 | final def map[G](fn: G => F): CassFormatEncoder[G] = new CassFormatEncoder[G] {
14 | type From = self.From
15 | val cassDataType = self.cassDataType
16 | def encode(f: G): Result[From] = self.encode(fn(f))
17 | }
18 |
19 | final def flatMap[G](fn: G => Result[F]): CassFormatEncoder[G] = new CassFormatEncoder[G] {
20 | type From = self.From
21 | val cassDataType = self.cassDataType
22 | def encode(f: G): Result[From] = fn(f).right.flatMap(self.encode)
23 | }
24 | }
25 |
26 | object CassFormatEncoder extends CassFormatEncoderVersionSpecific {
27 | type Aux[F, From0] = CassFormatEncoder[F] { type From = From0 }
28 | def apply[T](implicit encoder: CassFormatEncoder[T]) = encoder
29 |
30 | private[scalacass] def sameTypeCassFormatEncoder[F <: AnyRef](_cassDataType: DataType): CassFormatEncoder[F] = new CassFormatEncoder[F] {
31 | type From = F
32 | val cassDataType = _cassDataType
33 | def encode(f: F) = Right(f)
34 | }
35 | private[scalacass] def transCassFormatEncoder[F, T <: AnyRef](_cassDataType: DataType, _encode: F => T): CassFormatEncoder[F] = new CassFormatEncoder[F] {
36 | type From = T
37 | val cassDataType = _cassDataType
38 | def encode(f: F) = Right(_encode(f))
39 | }
40 |
41 | // encoders
42 |
43 | implicit val stringFormat: CassFormatEncoder[String] = sameTypeCassFormatEncoder[String](DataType.varchar)
44 | implicit val uuidFormat: CassFormatEncoder[java.util.UUID] = sameTypeCassFormatEncoder[java.util.UUID](DataType.uuid)
45 | implicit val iNetFormat: CassFormatEncoder[java.net.InetAddress] = sameTypeCassFormatEncoder[java.net.InetAddress](DataType.inet)
46 |
47 | implicit val intFormat: CassFormatEncoder[Int] = transCassFormatEncoder(DataType.cint, Int.box)
48 | implicit val longFormat: CassFormatEncoder[Long] = transCassFormatEncoder(DataType.bigint, Long.box)
49 | implicit val booleanFormat: CassFormatEncoder[Boolean] = transCassFormatEncoder(DataType.cboolean, Boolean.box)
50 | implicit val doubleFormat: CassFormatEncoder[Double] = transCassFormatEncoder(DataType.cdouble, Double.box)
51 | implicit val bigIntegerFormat: CassFormatEncoder[BigInt] = transCassFormatEncoder(DataType.varint, (_: BigInt).underlying)
52 | implicit val bigDecimalFormat: CassFormatEncoder[BigDecimal] = transCassFormatEncoder(DataType.decimal, (_: BigDecimal).underlying)
53 | implicit val floatFormat: CassFormatEncoder[Float] = transCassFormatEncoder(DataType.cfloat, Float.box)
54 | implicit val blobFormat: CassFormatEncoder[Array[Byte]] = transCassFormatEncoder(DataType.blob, java.nio.ByteBuffer.wrap)
55 |
56 | def updateBehaviorListEncoder[A, UB <: UpdateBehavior[List, A]](implicit underlying: CassFormatEncoder[A]) = new CassFormatEncoder[UB] {
57 | type From = java.util.List[underlying.From]
58 | val cassDataType = DataType.list(underlying.cassDataType)
59 | def encode(f: UB): Result[java.util.List[underlying.From]] = {
60 | val acc = new java.util.ArrayList[underlying.From]()
61 | @scala.annotation.tailrec
62 | def process(l: List[A]): Result[java.util.List[underlying.From]] = l.headOption.map(underlying.encode(_)) match {
63 | case Some(Left(ff)) => Left(ff)
64 | case Some(Right(n)) =>
65 | acc.add(n)
66 | process(l.tail)
67 | case None => Right(acc)
68 | }
69 | process(f.coll)
70 | }
71 | override def withQuery(instance: UB, name: String): String = instance withQuery name
72 | }
73 |
74 | def updateBehaviorSetEncoder[A, UB <: UpdateBehavior[Set, A]](implicit underlying: CassFormatEncoder[A]) = new CassFormatEncoder[UB] {
75 | type From = java.util.Set[underlying.From]
76 | val cassDataType = DataType.set(underlying.cassDataType)
77 | def encode(f: UB): Result[java.util.Set[underlying.From]] = {
78 | val acc = new java.util.HashSet[underlying.From]()
79 | @scala.annotation.tailrec
80 | def process(s: Set[A]): Result[java.util.Set[underlying.From]] = s.headOption.map(underlying.encode(_)) match {
81 | case Some(Left(ff)) => Left(ff)
82 | case Some(Right(n)) =>
83 | acc.add(n)
84 | process(s.tail)
85 | case None => Right(acc)
86 | }
87 | process(f.coll)
88 | }
89 | override def withQuery(instance: UB, name: String): String = instance withQuery name
90 | }
91 |
92 | implicit def listFormatAdd[A](implicit underlying: CassFormatEncoder[A]): CassFormatEncoder[UpdateBehavior.Add[List, A]] =
93 | updateBehaviorListEncoder[A, UpdateBehavior.Add[List, A]]
94 | implicit def listFormatSubtract[A](implicit underlying: CassFormatEncoder[A]): CassFormatEncoder[UpdateBehavior.Subtract[List, A]] =
95 | updateBehaviorListEncoder[A, UpdateBehavior.Subtract[List, A]]
96 | implicit def listFormatReplace[A](implicit underlying: CassFormatEncoder[A]): CassFormatEncoder[UpdateBehavior.Replace[List, A]] =
97 | updateBehaviorListEncoder[A, UpdateBehavior.Replace[List, A]]
98 | implicit def listFormatUpdateBehavior[A](implicit underlying: CassFormatEncoder[A]): CassFormatEncoder[UpdateBehavior[List, A]] =
99 | updateBehaviorListEncoder[A, UpdateBehavior[List, A]]
100 |
101 | implicit def listFormat[A](implicit underlying: CassFormatEncoder[A]): CassFormatEncoder[List[A]] =
102 | updateBehaviorListEncoder[A, UpdateBehavior.Replace[List, A]](underlying).map[List[A]](UpdateBehavior.Replace(_))
103 |
104 | implicit def setFormatAdd[A](implicit underlying: CassFormatEncoder[A]): CassFormatEncoder[UpdateBehavior.Add[Set, A]] =
105 | updateBehaviorSetEncoder[A, UpdateBehavior.Add[Set, A]]
106 | implicit def setFormatSubtract[A](implicit underlying: CassFormatEncoder[A]): CassFormatEncoder[UpdateBehavior.Subtract[Set, A]] =
107 | updateBehaviorSetEncoder[A, UpdateBehavior.Subtract[Set, A]]
108 | implicit def setFormatReplace[A](implicit underlying: CassFormatEncoder[A]): CassFormatEncoder[UpdateBehavior.Replace[Set, A]] =
109 | updateBehaviorSetEncoder[A, UpdateBehavior.Replace[Set, A]]
110 | implicit def setFormatUpdateBehavior[A](implicit underlying: CassFormatEncoder[A]): CassFormatEncoder[UpdateBehavior[Set, A]] =
111 | updateBehaviorSetEncoder[A, UpdateBehavior[Set, A]]
112 |
113 | implicit def setFormat[A](implicit underlying: CassFormatEncoder[A]): CassFormatEncoder[Set[A]] =
114 | updateBehaviorSetEncoder[A, UpdateBehavior.Replace[Set, A]](underlying).map[Set[A]](UpdateBehavior.Replace(_))
115 |
116 | implicit def mapFormat[A, B](implicit underlyingA: CassFormatEncoder[A], underlyingB: CassFormatEncoder[B]): CassFormatEncoder[Map[A, B]] =
117 | new CassFormatEncoder[Map[A, B]] {
118 | type From = java.util.Map[underlyingA.From, underlyingB.From]
119 | val cassDataType = DataType.map(underlyingA.cassDataType, underlyingB.cassDataType)
120 | def encode(f: Map[A, B]): Result[java.util.Map[underlyingA.From, underlyingB.From]] = {
121 | val acc = new java.util.HashMap[underlyingA.From, underlyingB.From]()
122 | @scala.annotation.tailrec
123 | def process(l: Iterable[(A, B)]): Result[java.util.Map[underlyingA.From, underlyingB.From]] = l.headOption.map {
124 | case (k, v) => for {
125 | kk <- underlyingA.encode(k).right
126 | vv <- underlyingB.encode(v).right
127 | } yield (kk, vv)
128 | } match {
129 | case Some(Left(ff)) => Left(ff)
130 | case Some(Right(n)) =>
131 | acc.put(n._1, n._2)
132 | process(l.tail)
133 | case None => Right(acc)
134 | }
135 | process(f)
136 | }
137 | }
138 |
139 | implicit def optionFormat[A](implicit underlying: CassFormatEncoder[A]): CassFormatEncoder[Option[A]] = new CassFormatEncoder[Option[A]] {
140 | type From = Option[underlying.From]
141 | val cassDataType = underlying.cassDataType
142 | def encode(f: Option[A]): Result[Option[underlying.From]] = f.map(underlying.encode(_)) match {
143 | case None => Right(None)
144 | case Some(Left(_)) => Right(None)
145 | case Some(Right(n)) => Right(Some(n))
146 | }
147 | }
148 | implicit def eitherFormat[A](implicit underlying: CassFormatEncoder[A]): CassFormatEncoder[Result[A]] = new CassFormatEncoder[Result[A]] {
149 | type From = Result[underlying.From]
150 | val cassDataType = underlying.cassDataType
151 | def encode(f: Result[A]) = f.right.map(underlying.encode(_)) match {
152 | case Left(ff) => Right(Left(ff))
153 | case other => other
154 | }
155 | }
156 |
157 | implicit val nothingFormat: CassFormatEncoder[Nothing] = new CassFormatEncoder[Nothing] {
158 | type From = Nothing
159 | def cassDataType = throw new IllegalArgumentException("Nothing isn't a real type!")
160 | def encode(f: Nothing): Result[From] = throw new IllegalArgumentException("Nothing isn't a real type!")
161 | }
162 | }
163 |
--------------------------------------------------------------------------------
/src/main/scala/com/weather/scalacass/Exceptions.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | import com.datastax.driver.core.exceptions.QueryExecutionException
4 |
5 | class WrongPrimaryKeySizeException(m: String) extends QueryExecutionException(m)
6 |
7 | class ValueNotDefinedException(m: String) extends QueryExecutionException(m)
8 |
--------------------------------------------------------------------------------
/src/main/scala/com/weather/scalacass/LRUCache.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | class LRUCache[K, V](cacheSize: Int) extends java.util.LinkedHashMap[K, V](cacheSize, 0.75f, true) {
4 | override protected def removeEldestEntry(eldest: java.util.Map.Entry[K, V]): Boolean = size >= cacheSize
5 |
6 | def get(key: K, fn: => V): V =
7 | if (containsKey(key)) get(key)
8 | else {
9 | val res = fn
10 | put(key, res)
11 | res
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/src/main/scala/com/weather/scalacass/Nullable.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | import com.datastax.driver.core.{ DataType, Row, TupleValue }
4 |
5 | sealed trait Nullable[+A] {
6 | def toOption: Option[A]
7 | }
8 | final case class Is[+A](x: A) extends Nullable[A] {
9 | def toOption: Option[A] = Some(x)
10 | }
11 | case object IsNotNull extends Nullable[Nothing] {
12 | def toOption: Option[Nothing] = None
13 | }
14 | case object IsNull extends Nullable[Nothing] {
15 | def toOption: Option[Nothing] = None
16 | }
17 |
18 | object Nullable {
19 | def apply[A](x: A): Nullable[A] = if (x.isNull) IsNull else Is(x)
20 | def empty[A]: Nullable[A] = IsNull
21 | implicit def nullable2iterable[A](xo: Nullable[A]): Iterable[A] = xo.toOption.toList
22 |
23 | implicit class NullableOption[+A](val opt: Option[A]) extends AnyVal {
24 | def toNullable: Nullable[A] = opt.fold[Nullable[A]](IsNull)(Is.apply)
25 | }
26 | implicit def option2nullable[A](opt: Option[A]): Nullable[A] = opt.toNullable
27 | implicit def nullable2option[A](nullable: Nullable[A]): Option[A] = nullable.toOption
28 |
29 | implicit def encoder[A](implicit underlying: CassFormatEncoder[A]): CassFormatEncoder[Nullable[A]] = new CassFormatEncoder[Nullable[A]] {
30 | type From = Nullable[underlying.From]
31 |
32 | def cassDataType: DataType = underlying.cassDataType
33 |
34 | def encode(f: Nullable[A]): Result[Nullable[underlying.From]] = f match {
35 | case Is(x) => underlying.encode(x).right.map(Is.apply)
36 | case IsNotNull => Right(IsNotNull)
37 | case IsNull => Right(IsNull)
38 | }
39 |
40 | override def withQuery(instance: Nullable[A], name: String): String = instance match {
41 | case v: Is[A] => super.withQuery(v, name)
42 | case IsNotNull => s"$name!=NULL"
43 | case IsNull => s"$name=NULL"
44 | }
45 | }
46 |
47 | implicit def decoder[A](implicit underlying: CassFormatDecoder[A]): CassFormatDecoder[Nullable[A]] = new CassFormatDecoder[Nullable[A]] {
48 | type From = underlying.From
49 | val typeToken = underlying.typeToken
50 | def f2t(f: From): Result[Nullable[A]] = underlying.f2t(f).right.map(Is.apply)
51 | def extract(r: Row, name: String): From = underlying.extract(r, name)
52 |
53 | override def decode(r: Row, name: String): Result[Nullable[A]] = super.decode(r, name) match {
54 | case Left(Recoverable(_)) => Right(IsNull)
55 | case other => other
56 | }
57 | def tupleExtract(tup: TupleValue, pos: Int): From = underlying.tupleExtract(tup, pos)
58 |
59 | override def tupleDecode(tup: TupleValue, pos: Int): Result[Nullable[A]] = super.tupleDecode(tup, pos) match {
60 | case Left(Recoverable(_)) => Right(IsNull)
61 | case other => other
62 | }
63 | }
64 | }
65 |
--------------------------------------------------------------------------------
/src/main/scala/com/weather/scalacass/Recoverable.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | object NotRecoverable extends NotRecoverableVersionSpecific {
4 | def unapply(t: Throwable): Option[Throwable] = if (apply(t)) Some(t) else None
5 |
6 | implicit class Try2Either[T](val t: scala.util.Try[T]) extends AnyVal {
7 | def unwrap[S](implicit ev: T =:= Result[S]): Result[S] = t match {
8 | case scala.util.Success(res) => res
9 | case scala.util.Failure(NotRecoverable(exc)) => throw exc
10 | case scala.util.Failure(exc) => Left(exc)
11 | }
12 | def toEither: Result[T] = t match {
13 | case scala.util.Success(res) => Right(res)
14 | case scala.util.Failure(NotRecoverable(exc)) => throw exc
15 | case scala.util.Failure(exc) => Left(exc)
16 | }
17 | }
18 | }
19 |
20 | object Recoverable {
21 | def unapply(t: Throwable): Option[Throwable] = if (apply(t)) Some(t) else None
22 |
23 | def apply(t: Throwable) = t match {
24 | case _: ValueNotDefinedException => true
25 | case _ => false
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/src/main/scala/com/weather/scalacass/ScalaSession.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | import java.util.concurrent.Callable
4 |
5 | import com.datastax.driver.core._
6 | import com.google.common.cache.{ Cache, CacheBuilder }
7 | import com.weather.scalacass.scsession._
8 | import org.slf4j.LoggerFactory
9 |
10 | object ScalaSession {
11 | private implicit def Fn02Callable[V](f: => V): Callable[V] = new Callable[V] {
12 | override def call(): V = f
13 | }
14 |
15 | final case class Star(`*`: Nothing)
16 | object Star {
17 | implicit val ccCassEncoder: CCCassFormatEncoder[Star] = CCCassFormatEncoder.derive
18 | }
19 | final case class NoQuery()
20 | object NoQuery {
21 | implicit val ccCassEncoder: CCCassFormatEncoder[NoQuery] = CCCassFormatEncoder.derive
22 | }
23 | final case class NoUpdate()
24 | object NoUpdate {
25 | implicit val ccCassEncoder: CCCassFormatEncoder[NoUpdate] = CCCassFormatEncoder.derive
26 | }
27 |
28 | sealed trait UpdateBehavior[F[_], A] {
29 | def coll: F[A]
30 | def withQuery(name: String): String
31 | }
32 | object UpdateBehavior {
33 | final case class Add[F[_], A] private (coll: F[A]) extends UpdateBehavior[F, A] {
34 | def withQuery(name: String): String = s"$name=$name+?"
35 | }
36 | object Add {
37 | def apply[A](coll: List[A]): Add[List, A] = new Add(coll)
38 | def apply[A](coll: Set[A]): Add[Set, A] = new Add(coll)
39 | implicit def liftList[A](l: List[A]): Add[List, A] = apply(l)
40 | implicit def listSet[A](s: Set[A]): Add[Set, A] = apply(s)
41 | }
42 | final case class Subtract[F[_], A] private (coll: F[A]) extends UpdateBehavior[F, A] {
43 | def withQuery(name: String): String = s"$name=$name-?"
44 | }
45 | object Subtract {
46 | def apply[A](coll: List[A]): Subtract[List, A] = new Subtract(coll)
47 | def apply[A](coll: Set[A]): Subtract[Set, A] = new Subtract(coll)
48 | implicit def liftList[A](l: List[A]): Subtract[List, A] = apply(l)
49 | implicit def listSet[A](s: Set[A]): Subtract[Set, A] = apply(s)
50 | }
51 | final case class Replace[F[_], A] private (coll: F[A]) extends UpdateBehavior[F, A] {
52 | def withQuery(name: String): String = s"$name=?"
53 | }
54 | object Replace {
55 | def apply[A](coll: List[A]) = new Replace(coll)
56 | def apply[A](coll: Set[A]) = new Replace(coll)
57 | implicit def liftList[A](l: List[A]): Replace[List, A] = apply(l)
58 | implicit def listSet[A](s: Set[A]): Replace[Set, A] = apply(s)
59 | }
60 | }
61 | }
62 |
63 | final case class ScalaSession(keyspace: String)(implicit val session: Session) {
64 | import ScalaSession.{ Fn02Callable, Star, NoQuery }
65 |
66 | private val logger = LoggerFactory.getLogger(getClass.getName)
67 |
68 | private[this] val queryCache: Cache[String, Either[Throwable, PreparedStatement]] =
69 | CacheBuilder.newBuilder().maximumSize(1000).build[String, Either[Throwable, PreparedStatement]]()
70 |
71 | private[scalacass] def getFromCacheOrElse(key: String, statement: => PreparedStatement) = {
72 | def onCacheMiss: Either[Throwable, PreparedStatement] = {
73 | logger.debug(s"cache miss for key %s", key)
74 | try Right(statement) catch { case ex: Throwable => Left(ex) }
75 | }
76 | queryCache.get(key, onCacheMiss)
77 | }
78 | def invalidateCache(): Unit = queryCache.invalidateAll()
79 |
80 | def close(): Unit = session.close()
81 |
82 | def createKeyspace(properties: String): SCCreateKeyspaceStatement = SCCreateKeyspaceStatement(keyspace, properties, this)
83 |
84 | def dropKeyspace(): SCDropKeyspaceStatement = SCDropKeyspaceStatement(keyspace, this)
85 |
86 | def createTable[T : CCCassFormatEncoder](name: String, numPartitionKeys: Int, numClusteringKeys: Int): SCCreateTableStatement =
87 | SCCreateTableStatement[T](keyspace, name, numPartitionKeys, numClusteringKeys, this)
88 |
89 | def truncateTable(table: String): SCTruncateTableStatement = SCTruncateTableStatement(keyspace, table, this)
90 | def dropTable(table: String): SCDropTableStatement = SCDropTableStatement(keyspace, table, this)
91 |
92 | def insert[I : CCCassFormatEncoder](table: String, insertable: I): SCInsertStatement = SCInsertStatement(keyspace, table, insertable, this)
93 |
94 | def update[U : CCCassFormatEncoder, Q : CCCassFormatEncoder](table: String, updateable: U, query: Q): SCUpdateStatement =
95 | SCUpdateStatement(keyspace, table, updateable, query, this)
96 |
97 | object delete {
98 | @SuppressWarnings(Array("org.wartremover.warts.AsInstanceOf"))
99 | def apply[D] = partiallyApplied.asInstanceOf[PartiallyApplied[D]]
100 | final class PartiallyApplied[D] {
101 | def apply[Q : CCCassFormatEncoder](table: String, where: Q)(implicit dEncoder: CCCassFormatEncoder[D]): SCDeleteStatement =
102 | SCDeleteStatement[D, Q](keyspace, table, where, ScalaSession.this)
103 | }
104 | private val partiallyApplied = new PartiallyApplied[Nothing]
105 | }
106 | def deleteRow = delete[NoQuery]
107 |
108 | def batch(batches: List[SCStatement.SCBatchableStatement]): SCBatchStatement = SCBatchStatement(batches, this)
109 | def batchOf(batch: SCStatement.SCBatchableStatement, batches: SCStatement.SCBatchableStatement*): SCBatchStatement =
110 | SCBatchStatement((batch +: batches).toList, this)
111 |
112 | object select {
113 | @SuppressWarnings(Array("org.wartremover.warts.AsInstanceOf"))
114 | def apply[S] = partiallyApplied.asInstanceOf[PartiallyApplied[S]]
115 | final class PartiallyApplied[S] {
116 | def apply[Q : CCCassFormatEncoder](table: String, where: Q)(implicit sEncoder: CCCassFormatEncoder[S]): SCSelectItStatement =
117 | SCSelectStatement.apply[S, Q](keyspace, table, where, ScalaSession.this)
118 | }
119 | private val partiallyApplied = new PartiallyApplied[Nothing]
120 | }
121 | def selectStar = select[Star]
122 |
123 | object selectOne {
124 | @SuppressWarnings(Array("org.wartremover.warts.AsInstanceOf"))
125 | def apply[S] = partiallyApplied.asInstanceOf[PartiallyApplied[S]]
126 | final class PartiallyApplied[S] {
127 | def apply[Q : CCCassFormatEncoder](table: String, where: Q)(implicit sEncoder: CCCassFormatEncoder[S]): SCSelectOneStatement =
128 | SCSelectStatement.applyOne[S, Q](keyspace, table, where, ScalaSession.this)
129 | }
130 | private val partiallyApplied = new PartiallyApplied[Nothing]
131 | }
132 | def selectOneStar = selectOne[Star]
133 |
134 | def rawStatement(query: String, anyrefArgs: AnyRef*): SCRawStatement =
135 | SCRaw.apply(query, anyrefArgs.toList, this)
136 | def rawSelect(query: String, anyrefArgs: AnyRef*): SCRawSelectStatement[Iterator] =
137 | SCRaw.applyIterator(query, anyrefArgs.toList, this)
138 | def rawSelectOne(query: String, anyrefArgs: AnyRef*): SCRawSelectStatement[Option] =
139 | SCRaw.applyOne(query, anyrefArgs.toList, this)
140 | }
141 |
--------------------------------------------------------------------------------
/src/main/scala/com/weather/scalacass/TupleCassFormatDecoder.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | import com.datastax.driver.core.TupleValue
4 | import com.datastax.driver.core.exceptions.InvalidTypeException
5 | import shapeless.{ ::, Generic, HList, HNil, IsTuple, Lazy }
6 |
7 | abstract class DerivedTupleCassFormatDecoder[T] extends TupleCassFormatDecoder[T]
8 |
9 | object DerivedTupleCassFormatDecoder {
10 | implicit val hNilDecoder: DerivedTupleCassFormatDecoder[HNil] = new DerivedTupleCassFormatDecoder[HNil] {
11 | def decode(tup: TupleValue, n: Int) = {
12 | val arity = tup.getType.getComponentTypes.size
13 | if (arity !== n) Left(new InvalidTypeException(s"tuple of wrong arity: expecting arity of $n but found $arity"))
14 | else Right(HNil)
15 | }
16 | }
17 |
18 | implicit def hConsDecoder[H, T <: HList](implicit tdH: CassFormatDecoder[H], tdT: DerivedTupleCassFormatDecoder[T]): DerivedTupleCassFormatDecoder[::[H, T]] =
19 | new DerivedTupleCassFormatDecoder[H :: T] {
20 |
21 | def decode(tup: TupleValue, n: Int) = for {
22 | h <- tdH.tupleDecode(tup, n).right
23 | t <- tdT.decode(tup, n + 1).right
24 | } yield h :: t
25 | }
26 |
27 | implicit def tupleDecoder[T <: Product : IsTuple, Repr <: HList](implicit gen: Generic.Aux[T, Repr], hListDecoder: DerivedTupleCassFormatDecoder[Repr]): DerivedTupleCassFormatDecoder[T] =
28 | new DerivedTupleCassFormatDecoder[T] {
29 | def decode(tup: TupleValue, n: Int): Result[T] = {
30 | hListDecoder.decode(tup, n).right.map(gen.from)
31 | }
32 | }
33 | }
34 |
35 | trait TupleCassFormatDecoder[T] {
36 | def decode(tup: TupleValue, n: Int): Result[T]
37 | }
38 |
39 | object TupleCassFormatDecoder {
40 | implicit def derive[T](implicit derived: Lazy[DerivedTupleCassFormatDecoder[T]]): TupleCassFormatDecoder[T] = derived.value
41 | def apply[T](implicit decoder: TupleCassFormatDecoder[T]) = decoder
42 | }
43 |
--------------------------------------------------------------------------------
/src/main/scala/com/weather/scalacass/TupleCassFormatEncoder.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | import com.datastax.driver.core.DataType
4 | import shapeless.{ ::, Generic, HList, HNil, IsTuple, Lazy }
5 |
6 | abstract class DerivedTupleCassFormatEncoder[T] extends TupleCassFormatEncoder[T]
7 |
8 | object DerivedTupleCassFormatEncoder {
9 | implicit val hNilEncoder: DerivedTupleCassFormatEncoder[HNil] = new DerivedTupleCassFormatEncoder[HNil] {
10 | def encode(tup: HNil) = Right(Nil)
11 | def types = Nil
12 | def dataTypes = Nil
13 | }
14 |
15 | implicit def hConsEncoder[H, T <: HList](implicit tdH: CassFormatEncoder[H], tdT: DerivedTupleCassFormatEncoder[T]): DerivedTupleCassFormatEncoder[H :: T] =
16 | new DerivedTupleCassFormatEncoder[H :: T] {
17 | def encode(tup: H :: T): Result[List[AnyRef]] = for {
18 | h <- tdH.encode(tup.head).right
19 | t <- tdT.encode(tup.tail).right
20 | } yield h :: t
21 | def types = tdH.cassType :: tdT.types
22 | def dataTypes = tdH.cassDataType :: tdT.dataTypes
23 | }
24 |
25 | implicit def tupleEncoder[T <: Product : IsTuple, Repr <: HList](implicit gen: Generic.Aux[T, Repr], hListEncoder: DerivedTupleCassFormatEncoder[Repr]): DerivedTupleCassFormatEncoder[T] =
26 | new DerivedTupleCassFormatEncoder[T] {
27 | def encode(tup: T): Result[List[AnyRef]] = hListEncoder.encode(gen.to(tup))
28 | def types = hListEncoder.types
29 | def dataTypes = hListEncoder.dataTypes
30 | }
31 | }
32 |
33 | trait TupleCassFormatEncoder[T] {
34 | def encode(tup: T): Result[List[AnyRef]]
35 | def types: List[String]
36 | def dataTypes: List[DataType]
37 | }
38 |
39 | object TupleCassFormatEncoder {
40 | implicit def derive[T](implicit derived: Lazy[DerivedTupleCassFormatEncoder[T]]): TupleCassFormatEncoder[T] = derived.value
41 | def apply[T](implicit encoder: TupleCassFormatEncoder[T]) = encoder
42 | }
43 |
--------------------------------------------------------------------------------
/src/main/scala/com/weather/scalacass/package.scala:
--------------------------------------------------------------------------------
1 | package com.weather
2 |
3 | package object scalacass {
4 | type Result[T] = Either[Throwable, T]
5 | @SuppressWarnings(Array("org.wartremover.warts.Equals"))
6 | implicit final class AnyOps[A](self: A) {
7 | def ===(other: A): Boolean = self == other
8 | def !==(other: A): Boolean = self != other
9 | def isNull: Boolean = self == null
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/src/main/scala/com/weather/scalacass/syntax.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | import com.datastax.driver.core.Row
4 |
5 | object syntax {
6 | implicit class RichRow(val r: Row) extends AnyVal {
7 | def as[T](name: String)(implicit d: CassFormatDecoder[T]): T = d.as(r)(name)
8 | def getAs[T](name: String)(implicit d: CassFormatDecoder[Option[T]]): Option[T] = d.as(r)(name)
9 | def getOrElse[T](name: String, default: => T)(implicit d: CassFormatDecoder[Option[T]]): T = d.as(r)(name).getOrElse(default)
10 | def attemptAs[T](name: String)(implicit d: CassFormatDecoder[T]): Result[T] = d.attemptAs(r)(name)
11 |
12 | def as[T](implicit ccd: CCCassFormatDecoder[T]): T = ccd.as(r)
13 | def getAs[T](implicit ccd: CCCassFormatDecoder[Option[T]]): Option[T] = ccd.as(r)
14 | def getOrElse[T](default: => T)(implicit ccd: CCCassFormatDecoder[Option[T]]): T = ccd.as(r).getOrElse(default)
15 | def attemptAs[T](implicit ccd: CCCassFormatDecoder[T]): Result[T] = ccd.decode(r)
16 | }
17 |
18 | implicit class RichIterator(val it: Iterator[Row]) extends AnyVal {
19 | def as[T](name: String)(implicit d: CassFormatDecoder[T]): Iterator[T] = it.map(r => d.as(r)(name))
20 | def getAs[T](name: String)(implicit d: CassFormatDecoder[Option[T]]): Iterator[Option[T]] = it.map(r => d.as(r)(name))
21 | def getOrElse[T](name: String, default: => T)(implicit d: CassFormatDecoder[Option[T]]): Iterator[T] = it.map(r => d.as(r)(name).getOrElse(default))
22 | def attemptAs[T](name: String)(implicit d: CassFormatDecoder[T]): Iterator[Result[T]] = it.map(r => d.attemptAs(r)(name))
23 |
24 | def as[T](implicit ccd: CCCassFormatDecoder[T]): Iterator[T] = it.map(r => ccd.as(r))
25 | def getAs[T](implicit ccd: CCCassFormatDecoder[Option[T]]): Iterator[Option[T]] = it.map(r => ccd.as(r))
26 | def getOrElse[T](default: => T)(implicit ccd: CCCassFormatDecoder[Option[T]]): Iterator[T] = it.map(r => ccd.as(r).getOrElse(default))
27 | def attemptAs[T](implicit ccd: CCCassFormatDecoder[T]): Iterator[Result[T]] = it.map(r => ccd.attemptAs(r))
28 | }
29 |
30 | implicit class RichOption(val opt: Option[Row]) extends AnyVal {
31 | def as[T](name: String)(implicit d: CassFormatDecoder[T]): Option[T] = opt.map(r => d.as(r)(name))
32 | def getAs[T](name: String)(implicit d: CassFormatDecoder[Option[T]]): Option[Option[T]] = opt.map(r => d.as(r)(name))
33 | def getOrElse[T](name: String, default: => T)(implicit d: CassFormatDecoder[Option[T]]): Option[T] = opt.map(r => d.as(r)(name).getOrElse(default))
34 | def attemptAs[T](name: String)(implicit d: CassFormatDecoder[T]): Option[Result[T]] = opt.map(r => d.attemptAs(r)(name))
35 |
36 | def as[T](implicit ccd: CCCassFormatDecoder[T]): Option[T] = opt.map(r => ccd.as(r))
37 | def getAs[T](implicit ccd: CCCassFormatDecoder[Option[T]]): Option[Option[T]] = opt.map(r => ccd.as(r))
38 | def getOrElse[T](default: => T)(implicit ccd: CCCassFormatDecoder[Option[T]]): Option[T] = opt.map(r => ccd.as(r).getOrElse(default))
39 | def attemptAs[T](implicit ccd: CCCassFormatDecoder[T]): Option[Result[T]] = opt.map(r => ccd.attemptAs(r))
40 | }
41 |
42 | type UpdateBehavior[F[_], A] = ScalaSession.UpdateBehavior[F, A]
43 | val UpdateBehavior = ScalaSession.UpdateBehavior
44 |
45 | type Star = ScalaSession.Star
46 | val Star = ScalaSession.Star
47 |
48 | type NoQuery = ScalaSession.NoQuery
49 | val NoQuery = ScalaSession.NoQuery
50 |
51 | type NoUpdate = ScalaSession.NoUpdate
52 | val NoUpdate = ScalaSession.NoUpdate
53 | }
54 |
--------------------------------------------------------------------------------
/src/main/scala_cass21/com/weather/scalacass/CassFormatDecoderVersionSpecific.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | import com.google.common.reflect.TypeToken
4 |
5 | object CassFormatDecoderVersionSpecific extends CassFormatDecoderVersionSpecific
6 |
7 | trait CassFormatDecoderVersionSpecific extends LowPriorityCassFormatDecoder {
8 | import CassFormatDecoder.sameTypeCassFormatDecoder
9 |
10 | implicit val dateFormat: CassFormatDecoder[java.util.Date] =
11 | sameTypeCassFormatDecoder(TypeToken.of(classOf[java.util.Date]), _ getDate _, _ getDate _)
12 | }
13 |
--------------------------------------------------------------------------------
/src/main/scala_cass21/com/weather/scalacass/CassFormatEncoderVersionSpecific.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | import com.datastax.driver.core.{ DataType, TupleType, TupleValue }
4 |
5 | trait LowPriorityCassFormatEncoderVersionSpecific {
6 | implicit def tupleFormat[TUP <: Product](implicit underlying: TupleCassFormatEncoder[TUP]): CassFormatEncoder[TUP] = new CassFormatEncoder[TUP] {
7 | type From = TupleValue
8 | val cassDataType = TupleType.of(underlying.dataTypes: _*)
9 | def encode(f: TUP): Result[From] = underlying.encode(f).right.map(ar => cassDataType.newValue(ar: _*))
10 | }
11 | }
12 |
13 | trait CassFormatEncoderVersionSpecific extends LowPriorityCassFormatEncoderVersionSpecific {
14 | import CassFormatEncoder.sameTypeCassFormatEncoder
15 |
16 | implicit val dateFormat: CassFormatEncoder[java.util.Date] = sameTypeCassFormatEncoder(DataType.timestamp)
17 | }
18 |
19 | object CassFormatEncoderVersionSpecific extends CassFormatEncoderVersionSpecific
20 |
--------------------------------------------------------------------------------
/src/main/scala_cass21/com/weather/scalacass/NotRecoverableVersionSpecific.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | import com.datastax.driver.core.exceptions.{ NoHostAvailableException, QueryExecutionException, DriverInternalError, PagingStateException, UnsupportedFeatureException }
4 |
5 | trait NotRecoverableVersionSpecific {
6 | def apply(t: Throwable): Boolean = t match {
7 | case _: NoHostAvailableException | _: QueryExecutionException | _: DriverInternalError | _: PagingStateException |
8 | _: UnsupportedFeatureException => true
9 | case _ => false
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/src/main/scala_cass21/com/weather/scalacass/jdk8/Implicits.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass.jdk8
2 |
3 | import com.weather.scalacass.{ CassFormatDecoder, CassFormatEncoder }
4 | import CassFormatEncoder.transCassFormatEncoder
5 | import CassFormatDecoder.safeConvertCassFormatDecoder
6 | import java.time.Instant
7 |
8 | import com.datastax.driver.core.DataType
9 | import com.google.common.reflect.TypeToken
10 |
11 | object Implicits {
12 | implicit val instantEncoder: CassFormatEncoder[Instant] = transCassFormatEncoder(DataType.timestamp, java.util.Date.from)
13 | implicit val instantDecoder: CassFormatDecoder[Instant] = safeConvertCassFormatDecoder[Instant, java.util.Date](TypeToken.of(classOf[java.util.Date]), _.toInstant, _ getDate _, _ getDate _)
14 | }
15 |
--------------------------------------------------------------------------------
/src/main/scala_cass21/com/weather/scalacass/joda/Implicits.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass.joda
2 |
3 | import com.weather.scalacass.{ CassFormatDecoder, CassFormatEncoder }
4 | import CassFormatEncoder.transCassFormatEncoder
5 | import CassFormatDecoder.safeConvertCassFormatDecoder
6 | import com.datastax.driver.core.DataType
7 | import com.google.common.reflect.TypeToken
8 | import org.joda.time.Instant
9 |
10 | object Implicits {
11 | implicit val instantEncoder: CassFormatEncoder[Instant] =
12 | transCassFormatEncoder(DataType.timestamp, i => new java.util.Date(i.getMillis))
13 | implicit val instantDecoder: CassFormatDecoder[Instant] =
14 | safeConvertCassFormatDecoder[Instant, java.util.Date](TypeToken.of(classOf[java.util.Date]), new Instant(_), _ getDate _, _ getDate _)
15 | }
16 |
--------------------------------------------------------------------------------
/src/main/scala_cass21/com/weather/scalacass/scsession/SCStatementVersionSpecific.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass.scsession
2 |
3 | trait SCUpdateStatementVersionSpecific { this: SCUpdateStatement =>
4 | }
5 |
--------------------------------------------------------------------------------
/src/main/scala_cass3/com/weather/scalacass/CassFormatDecoderVersionSpecific.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | import com.datastax.driver.core.{ Row, TupleValue }
4 | import com.google.common.reflect.TypeToken
5 |
6 | object CassFormatDecoderVersionSpecific extends CassFormatDecoderVersionSpecific {
7 | def codecCassFormatDecoder[T <: AnyRef](_typeToken: TypeToken[T]) = new CassFormatDecoder[T] {
8 | type From = T
9 | val typeToken = _typeToken
10 | def f2t(f: From) = Right(f)
11 | def extract(r: Row, name: String) = r get (name, typeToken)
12 | def tupleExtract(tup: TupleValue, pos: Int) = tup get (pos, typeToken)
13 | }
14 | }
15 | trait CassFormatDecoderVersionSpecific extends LowPriorityCassFormatDecoder {
16 | import CassFormatDecoder.{ sameTypeCassFormatDecoder, safeConvertCassFormatDecoder }
17 | implicit val dateFormat: CassFormatDecoder[java.util.Date] =
18 | sameTypeCassFormatDecoder[java.util.Date](TypeToken.of(classOf[java.util.Date]), _ getTimestamp _, _ getTimestamp _)
19 | implicit val datastaxLocalDateFormat: CassFormatDecoder[com.datastax.driver.core.LocalDate] =
20 | sameTypeCassFormatDecoder[com.datastax.driver.core.LocalDate](TypeToken.of(classOf[com.datastax.driver.core.LocalDate]), _ getDate _, _ getDate _)
21 | implicit val timeFormat: CassFormatDecoder[Time] = safeConvertCassFormatDecoder[Time, java.lang.Long](TypeToken.of(classOf[java.lang.Long]), Time(_), _ getTime _, _ getTime _)
22 | }
23 |
--------------------------------------------------------------------------------
/src/main/scala_cass3/com/weather/scalacass/CassFormatEncoderVersionSpecific.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | import com.datastax.driver.core.{ Cluster, DataType, TupleValue }
4 |
5 | trait LowPriorityCassFormatEncoderVersionSpecific {
6 | implicit def tupleFormat[TUP <: Product](implicit cluster: Cluster, underlying: TupleCassFormatEncoder[TUP]): CassFormatEncoder[TUP] = new CassFormatEncoder[TUP] {
7 | type From = TupleValue
8 | val cassDataType = cluster.getMetadata.newTupleType(underlying.dataTypes: _*)
9 | def encode(f: TUP): Result[From] = underlying.encode(f).right.map(ar => cassDataType.newValue(ar: _*))
10 | }
11 | }
12 | trait CassFormatEncoderVersionSpecific extends LowPriorityCassFormatEncoderVersionSpecific {
13 | import CassFormatEncoder.{ sameTypeCassFormatEncoder, transCassFormatEncoder }
14 |
15 | implicit val dateFormat: CassFormatEncoder[java.util.Date] = sameTypeCassFormatEncoder(DataType.timestamp)
16 | implicit val datastaxLocalDateFormat: CassFormatEncoder[com.datastax.driver.core.LocalDate] =
17 | sameTypeCassFormatEncoder(DataType.date)
18 | implicit val timeFormat: CassFormatEncoder[Time] = transCassFormatEncoder(DataType.time, time => Long.box(time.millis))
19 | }
20 |
--------------------------------------------------------------------------------
/src/main/scala_cass3/com/weather/scalacass/NotRecoverableVersionSpecific.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | import com.datastax.driver.core.exceptions.{ TransportException, QueryExecutionException, NoHostAvailableException, BusyConnectionException, ConnectionException, DriverInternalError, PagingStateException, UnsupportedFeatureException, UnsupportedProtocolVersionException }
4 |
5 | trait NotRecoverableVersionSpecific {
6 | def apply(t: Throwable) = t match {
7 | case _: TransportException | _: QueryExecutionException | _: NoHostAvailableException |
8 | _: BusyConnectionException | _: ConnectionException | _: DriverInternalError |
9 | _: PagingStateException | _: UnsupportedFeatureException |
10 | _: UnsupportedProtocolVersionException => true
11 | case _ => false
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/src/main/scala_cass3/com/weather/scalacass/Time.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | final case class Time(millis: Long)
4 |
--------------------------------------------------------------------------------
/src/main/scala_cass3/com/weather/scalacass/jdk8/Implicits.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass.jdk8
2 |
3 | import com.weather.scalacass.{ CassFormatDecoder, CassFormatEncoder }
4 | import com.weather.scalacass.CassFormatDecoderVersionSpecific.codecCassFormatDecoder
5 | import CassFormatEncoder.sameTypeCassFormatEncoder
6 | import java.time.{ Instant, LocalDate, LocalTime, ZonedDateTime }
7 |
8 | import com.datastax.driver.core.{ Cluster, DataType }
9 | import com.google.common.reflect.TypeToken
10 |
11 | object Implicits {
12 | implicit val timeEncoder: CassFormatEncoder[LocalTime] = sameTypeCassFormatEncoder(DataType.time)
13 | implicit val timeDecoder: CassFormatDecoder[LocalTime] = codecCassFormatDecoder(TypeToken.of(classOf[LocalTime]))
14 |
15 | implicit val dateEncoder: CassFormatEncoder[LocalDate] = sameTypeCassFormatEncoder(DataType.date)
16 | implicit val dateDecoder: CassFormatDecoder[LocalDate] = codecCassFormatDecoder(TypeToken.of(classOf[LocalDate]))
17 |
18 | implicit val instantEncoder: CassFormatEncoder[Instant] = sameTypeCassFormatEncoder(DataType.timestamp)
19 | implicit val instantDecoder: CassFormatDecoder[Instant] = codecCassFormatDecoder(TypeToken.of(classOf[Instant]))
20 |
21 | implicit def zonedDateTimeEncoder(implicit cluster: Cluster): CassFormatEncoder[ZonedDateTime] =
22 | sameTypeCassFormatEncoder(cluster.getMetadata.newTupleType(DataType.timestamp, DataType.varchar))
23 | implicit val zonedDateTimeDecoder: CassFormatDecoder[ZonedDateTime] = codecCassFormatDecoder(TypeToken.of(classOf[ZonedDateTime]))
24 | }
25 |
--------------------------------------------------------------------------------
/src/main/scala_cass3/com/weather/scalacass/jdk8/package.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | import com.datastax.driver.core.{ Cluster, DataType }
4 | import com.datastax.driver.extras.codecs.jdk8.{ InstantCodec, LocalDateCodec, LocalTimeCodec, ZonedDateTimeCodec }
5 |
6 | package object jdk8 {
7 | def register(c: Cluster): Unit = {
8 | val tt = c.getMetadata.newTupleType(DataType.timestamp, DataType.varchar)
9 | c.getConfiguration.getCodecRegistry.register(new ZonedDateTimeCodec(tt), LocalDateCodec.instance, LocalTimeCodec.instance, InstantCodec.instance)
10 | (): Unit
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/src/main/scala_cass3/com/weather/scalacass/joda/Implicits.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass.joda
2 |
3 | import com.datastax.driver.core.{ Cluster, DataType }
4 | import com.google.common.reflect.TypeToken
5 | import com.weather.scalacass.{ CassFormatDecoder, CassFormatEncoder }
6 | import com.weather.scalacass.CassFormatEncoder.sameTypeCassFormatEncoder
7 | import com.weather.scalacass.CassFormatDecoderVersionSpecific.codecCassFormatDecoder
8 | import org.joda.time.{ DateTime, Instant, LocalDate, LocalTime }
9 |
10 | object Implicits {
11 | implicit val timeEncoder: CassFormatEncoder[LocalTime] = sameTypeCassFormatEncoder(DataType.time)
12 | implicit val timeDecoder: CassFormatDecoder[LocalTime] = codecCassFormatDecoder(TypeToken.of(classOf[LocalTime]))
13 |
14 | implicit val dateEncoder: CassFormatEncoder[LocalDate] = sameTypeCassFormatEncoder(DataType.date)
15 | implicit val dateDecoder: CassFormatDecoder[LocalDate] = codecCassFormatDecoder(TypeToken.of(classOf[LocalDate]))
16 |
17 | implicit val instantEncoder: CassFormatEncoder[Instant] = sameTypeCassFormatEncoder(DataType.timestamp)
18 | implicit val instantDecoder: CassFormatDecoder[Instant] = codecCassFormatDecoder(TypeToken.of(classOf[Instant]))
19 |
20 | implicit def timestampEncoder(implicit cluster: Cluster): CassFormatEncoder[DateTime] =
21 | sameTypeCassFormatEncoder(cluster.getMetadata.newTupleType(DataType.timestamp, DataType.varchar))
22 | implicit val timestampDecoder: CassFormatDecoder[DateTime] = codecCassFormatDecoder(TypeToken.of(classOf[DateTime]))
23 | }
24 |
--------------------------------------------------------------------------------
/src/main/scala_cass3/com/weather/scalacass/joda/package.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | import com.datastax.driver.core.{ Cluster, DataType }
4 | import com.datastax.driver.extras.codecs.joda.{ DateTimeCodec, InstantCodec, LocalDateCodec, LocalTimeCodec }
5 |
6 | package object joda {
7 | def register(c: Cluster): Unit = {
8 | val tt = c.getMetadata.newTupleType(DataType.timestamp, DataType.varchar)
9 | c.getConfiguration.getCodecRegistry.register(new DateTimeCodec(tt), LocalDateCodec.instance, LocalTimeCodec.instance, InstantCodec.instance)
10 | (): Unit
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/src/main/scala_cass3/com/weather/scalacass/scsession/SCStatementVersionSpecific.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass.scsession
2 |
3 | import QueryBuildingBlock.If
4 |
5 | trait SCUpdateStatementVersionSpecific { this: SCUpdateStatement =>
6 | def ifNotExists: SCUpdateStatement = copy(ifBlock = If.IfNotExists)
7 | }
8 |
--------------------------------------------------------------------------------
/src/test/resources/logback.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{5} - %msg%n
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/src/test/scala/com/weather/scalacass/CaseClassUnitTests.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | import com.weather.scalacass.util.CassandraWithTableTester
4 | import syntax._
5 | import org.scalatest.OptionValues
6 |
7 | class CaseClassUnitTests extends CassandraWithTableTester("testDB", "testTable", List("str varchar", "str2 ascii", "b blob",
8 | "d decimal", "f float", "net inet", "tid timeuuid", "vi varint", "i int", "bi bigint", "bool boolean", "dub double",
9 | "l list", "m map", "s set", "ts timestamp", "id uuid", "sblob set"), List("str")) with OptionValues {
10 | case class Everything(str: String, d: BigDecimal, f: Float, net: java.net.InetAddress, l: Option[List[String]])
11 | case class Everything2(str2: String, d: BigDecimal, f: Float, net: java.net.InetAddress, l: Option[List[String]])
12 |
13 | "case class with Everything" should "materialize" in {
14 | val e = Everything("asdf", BigDecimal(0), 12.0f, java.net.InetAddress.getByName("localhost"), None)
15 | val e2 = Everything2(e.str, e.d, e.f, e.net, e.l)
16 | insert(Seq(("str", e.str), ("d", e.d.underlying), ("f", Float.box(e.f)), ("net", e.net)))
17 | getOne.as[Everything] shouldBe e
18 | getOne.getAs[Everything] shouldBe Some(e)
19 | getOne.getOrElse(e.copy(str = "asdf2")) shouldBe e
20 | getOne.getOrElse(e2) shouldBe e2
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/src/test/scala/com/weather/scalacass/ConsistencyLevelUnitTest.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | import com.datastax.driver.core.ConsistencyLevel
4 | import com.weather.scalacass.scsession.{ SCBatchStatement, SCStatement }, SCStatement.RightBiasedEither
5 | import com.weather.scalacass.util.CassandraWithTableTester
6 | import org.scalatest.{ Assertion, OptionValues }
7 |
8 | object ConsistencyLevelUnitTest {
9 | val db = "actionsdb"
10 | val table = "actionstable"
11 | }
12 |
13 | class ConsistencyLevelUnitTest extends CassandraWithTableTester(ConsistencyLevelUnitTest.db, ConsistencyLevelUnitTest.table,
14 | List("str varchar", "otherstr varchar", "d double"), List("str")) with OptionValues {
15 | import ConsistencyLevelUnitTest.{ db, table }
16 | lazy val ss = ScalaSession(ConsistencyLevelUnitTest.db)(client.session)
17 |
18 | case class Query(str: String)
19 | case class Insert(str: String, otherstr: String, d: Double)
20 | case class Update(otherstr: String, d: Double)
21 |
22 | val insertValue = Insert("str", "otherstr", 1234.0)
23 | val queryValue = Query(insertValue.str)
24 | val updateValue = Update("updatedStr", 4321.0)
25 |
26 | def checkConsistency[T <: SCStatement[_]](statement: T, clOpt: Option[ConsistencyLevel]): Assertion = {
27 | clOpt match {
28 | case Some(cl) => statement.toString should include(s"")
29 | case None => statement.toString should not include " T, minusConsistency: T => T, cl: ConsistencyLevel): Assertion = {
36 | val statementWithConsistency = plusConsistency(statement, cl)
37 | val statementWithNoConsistency = minusConsistency(statement)
38 |
39 | checkConsistency(statement, None)
40 | checkConsistency(statementWithConsistency, Some(cl))
41 | checkConsistency(statement, None)
42 | checkConsistency(statementWithConsistency, Some(cl))
43 | checkConsistency(statementWithNoConsistency, None)
44 | }
45 |
46 | "setting consistency" should "work with inserts" in {
47 | fullCheck(ss.insert(table, insertValue))(_ consistency _, _.defaultConsistency, ConsistencyLevel.ONE)
48 | }
49 |
50 | it should "work with updates" in {
51 | fullCheck(ss.update(table, updateValue, queryValue))(_ consistency _, _.defaultConsistency, ConsistencyLevel.LOCAL_ONE)
52 | }
53 |
54 | it should "work with selects" in {
55 | fullCheck(ss.selectStar(table, queryValue))(_ consistency _, _.defaultConsistency, ConsistencyLevel.SERIAL)
56 | fullCheck(ss.select[Update](table, queryValue))(_ consistency _, _.defaultConsistency, ConsistencyLevel.SERIAL)
57 | fullCheck(ss.selectOneStar(table, queryValue))(_ consistency _, _.defaultConsistency, ConsistencyLevel.SERIAL)
58 | fullCheck(ss.selectOne[Update](table, queryValue))(_ consistency _, _.defaultConsistency, ConsistencyLevel.SERIAL)
59 | }
60 |
61 | it should "work with deletes" in {
62 | fullCheck(ss.deleteRow(table, queryValue))(_ consistency _, _.defaultConsistency, ConsistencyLevel.ANY)
63 | }
64 |
65 | it should "work with raw" in {
66 | fullCheck(ss.rawStatement(s"INSERT INTO $db.$table (str, otherstr, d) VALUES (?, ?, ?)"))(_ consistency _, _.defaultConsistency, ConsistencyLevel.LOCAL_QUORUM)
67 | fullCheck(ss.rawSelectOne(s"SELECT * FROM $db.$table WHERE str=? LIMIT 1"))(_ consistency _, _.defaultConsistency, ConsistencyLevel.LOCAL_SERIAL)
68 | fullCheck(ss.rawSelect(s"SELECT * FROM $db.$table WHERE str=?"))(_ consistency _, _.defaultConsistency, ConsistencyLevel.LOCAL_SERIAL)
69 | }
70 |
71 | it should "work with batches" in {
72 | def checkConsistencyBatch(statement: SCBatchStatement, clOpt: Option[ConsistencyLevel]): Assertion = {
73 | clOpt match {
74 | case Some(cl) => statement.toString should include(s"")
75 | case None => statement.toString should not include " (ct.myStr, ct.myLong, ct.someiValue))
9 | implicit val decoder: CCCassFormatDecoder[CustomTable] = CCCassFormatDecoder.forProduct3("str", "l", "i")((myStr: String, myLong: Long, someiValue: Int) => CustomTable(myStr, myLong, someiValue))
10 | }
11 |
12 | case class CustomSelect(myStr: String)
13 | object CustomSelect {
14 | implicit val encoder: CCCassFormatEncoder[CustomSelect] = CCCassFormatEncoder.forProduct1("str")(cs => cs.myStr)
15 | implicit val decoder: CCCassFormatDecoder[CustomSelect] = CCCassFormatDecoder.forProduct1("str")((myStr: String) => CustomSelect(myStr))
16 | }
17 |
18 | class ForProductTest extends ActionUnitTests {
19 | "forProduct encoder/decoder" should "work even with different names" in {
20 | val row = CustomTable("mystr", 1234L, 4321)
21 | val insertStatement = ss.insert(table, row)
22 | insertStatement.execute()
23 |
24 | val selectStatement = ss.selectOneStar(table, CustomSelect("mystr"))
25 | selectStatement.execute().right.value.value.as[CustomTable] shouldBe row
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/src/test/scala/com/weather/scalacass/PerfTest.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | import org.scalatest.Tag
4 | import com.weather.scalacass.util.CassandraUnitTester
5 | import syntax._
6 |
7 | object LongRunning extends Tag("LongRunning")
8 |
9 | class PerfTest extends CassandraUnitTester {
10 | val db = "perfdb"
11 | val table = "perftable"
12 |
13 | ignore /* "string repeats" */ should "be decent" taggedAs LongRunning in {
14 | val th = ichi.bench.Thyme.warmed(verbose = print)
15 | session.execute(s"CREATE KEYSPACE $db WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1};")
16 | session.execute(s"CREATE TABLE $db.$table (str varchar, str2 varchar, str3 varchar, str4 varchar, PRIMARY KEY ((str)))")
17 | def n = java.util.UUID.randomUUID.toString
18 | session.execute(s"INSERT INTO $db.$table (str, str2, str3, str4) VALUES (?,?,?,?)", n, n, n, n)
19 | val row = session.execute(s"SELECT * FROM $db.$table").one()
20 |
21 | th.pbenchOffWarm(title = "compare implicit and native get")(th.Warm(List.fill(100000)(row.as[String]("str"))), 2048, "withImplicit")(th.Warm(List.fill(100000)(if (row.isNull("str")) throw new IllegalArgumentException(s"""Cassandra: "str" was not defined in ${row.getColumnDefinitions.getTable("str")}""") else row.getString("str"))), 2048, "native")
22 |
23 | th.pbenchOffWarm(title = "compare implicit and native getAs")(th.Warm(List.fill(100000)(row.getAs[String]("str"))), 2048, "with implicit")(th.Warm(List.fill(100000)(if (row.getColumnDefinitions.contains("str") && !row.isNull("str")) Some(row.getString("str")) else None)), 2048, "native")
24 |
25 | case class Strings(str: String, str2: String, str3: String, str4: Option[String])
26 | def g(name: String) = if (row.isNull("str")) throw new IllegalArgumentException(s"""Cassandra: "str" was not defined in ${row.getColumnDefinitions.getTable("str")}""") else row.getString("str")
27 | th.pbenchOffWarm(title = "compare implicit and native case class as")(th.Warm(List.fill(100000)(row.as[Strings])), 2048, "with implicit")(th.Warm(List.fill(100000)(Strings(g("str"), g("str2"), g("str3"), if (row.getColumnDefinitions.contains("str") && !row.isNull("str")) Some(row.getString("str")) else None))), 2048, "native")
28 |
29 | def fAs() = {
30 | implicit val c: CCCassFormatDecoder[Strings] = shapeless.cachedImplicit
31 | th.pbenchOffWarm(title = "compare implicit and native case class as with cachedImplicit")(th.Warm(List.fill(100000)(row.as[Strings])), 2048, "with implicit")(th.Warm(List.fill(100000)(Strings(g("str"), g("str2"), g("str3"), if (row.getColumnDefinitions.contains("str") && !row.isNull("str")) Some(row.getString("str")) else None))), 2048, "native")
32 | }
33 |
34 | fAs()
35 |
36 | def ga(name: String) = if (row.getColumnDefinitions.contains(name) && !row.isNull(name)) Some(row.getString(name)) else None
37 | def getAs = for {
38 | s1 <- ga("str")
39 | s2 <- ga("str2")
40 | s3 <- ga("str3")
41 | s4 = ga("str4")
42 | } yield Strings(s1, s2, s3, s4)
43 | th.pbenchOffWarm(title = "compare implicit and native case class getAs")(th.Warm(List.fill(100000)(row.getAs[Strings])), 2048, "with implicit")(th.Warm(List.fill(100000)(getAs)), 2048, "native")
44 |
45 | def fgetAs() = {
46 | implicit val c: CCCassFormatDecoder[Strings] = shapeless.cachedImplicit
47 | th.pbenchOffWarm(title = "compare implicit and native case class getAs with cachedImplicit")(th.Warm(List.fill(100000)(row.getAs[Strings])), 2048, "with cachedImplicit")(th.Warm(List.fill(100000)(getAs)), 2038, "native")
48 | }
49 |
50 | fgetAs()
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/src/test/scala/com/weather/scalacass/ScalaCassUnitTests.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | import com.weather.scalacass.ScalaCassUnitTestsVersionSpecific.BadTypeException
4 | import org.scalatest.OptionValues
5 | import com.weather.scalacass.util.CassandraWithTableTester
6 | import com.datastax.driver.core.exceptions.InvalidTypeException
7 | import syntax._
8 |
9 | abstract class ScalaCassUnitTests extends CassandraWithTableTester("testDB", "testTable", ScalaCassUnitTestsVersionSpecific.extraHeaders ::: List("str varchar", "str2 ascii", "b blob",
10 | "d decimal", "f float", "net inet", "tid timeuuid", "vi varint", "i int", "bi bigint", "bool boolean", "dub double",
11 | "l list", "m map", "s set", "id uuid", "sblob set, tup tuple", "nest map>>"), List("str")) with OptionValues {
12 | def testType[GoodType : CassFormatDecoder, BadType : CassFormatDecoder](k: String, v: GoodType, default: GoodType, testCC: Boolean = true)(implicit goodCF: CassFormatEncoder[GoodType]) = {
13 | val args = {
14 | val converted = goodCF.encode(v).fold(throw _, identity).asInstanceOf[AnyRef]
15 | if (k == "str") Seq((k, converted)) else Seq((k, converted), ("str", "asdf"))
16 | }
17 | insert(args)
18 | val res = getOne
19 | k match {
20 | case "sblob" =>
21 | val known = v.asInstanceOf[Iterable[Array[Byte]]].head
22 | res.as[GoodType](k).asInstanceOf[Iterable[Array[Byte]]].head should contain theSameElementsInOrderAs known
23 | res.getAs[GoodType](k).map(_.asInstanceOf[Iterable[Array[Byte]]].head).value should contain theSameElementsInOrderAs known
24 |
25 | res.getOrElse(k, default).asInstanceOf[Iterable[Array[Byte]]].head should contain theSameElementsInOrderAs known
26 | an[IllegalArgumentException] should be thrownBy res.getOrElse(s"not$k", default).asInstanceOf[Iterable[Array[Byte]]].head
27 |
28 | res.attemptAs[GoodType](k).right.toOption.map(_.asInstanceOf[Iterable[Array[Byte]]].head).value should contain theSameElementsInOrderAs known
29 | case _ =>
30 | res.as[GoodType](k) shouldBe v
31 | res.getAs[GoodType](k).value shouldBe v
32 | res.getOrElse(k, default) shouldBe v
33 | an[IllegalArgumentException] should be thrownBy res.getOrElse(s"not$k", default)
34 | res.attemptAs[GoodType](k).right.value shouldBe v
35 | }
36 |
37 | an[IllegalArgumentException] should be thrownBy res.as[GoodType](s"not$k")
38 | a[BadTypeException] should be thrownBy res.as[BadType](k)
39 | an[IllegalArgumentException] should be thrownBy res.as[BadType](s"not$k")
40 |
41 | an[IllegalArgumentException] should be thrownBy res.getAs[GoodType](s"not$k")
42 | a[BadTypeException] should be thrownBy res.getAs[BadType](k)
43 | an[IllegalArgumentException] should be thrownBy res.getAs[BadType](s"not$k")
44 |
45 | res.attemptAs[GoodType](s"not$k").left.value shouldBe an[IllegalArgumentException]
46 | res.attemptAs[BadType](k).left.value shouldBe a[BadTypeException]
47 | res.attemptAs[BadType](s"not$k").left.value shouldBe an[IllegalArgumentException]
48 |
49 | if (testCC) {
50 | case class TestCC(pkField: String, refField: GoodType)
51 | case class QueryCC(pkField: String)
52 | val ss = new ScalaSession(dbName)
53 | val tname = s"testdb${scala.util.Random.alphanumeric.take(12).mkString}"
54 | ss.createTable[TestCC](tname, 1, 0)(CCCassFormatEncoder[TestCC]).execute()
55 | val t1 = TestCC("t1", v)
56 | val q1 = QueryCC(t1.pkField)
57 | ss.insert(tname, t1)(CCCassFormatEncoder[TestCC]).execute()
58 | k match {
59 | case "b" =>
60 | ss.selectOneStar(tname, ScalaSession.NoQuery()).execute().right.toOption.flatten.flatMap(_.getAs[TestCC]).map(_.refField.asInstanceOf[Array[Byte]]).value should contain theSameElementsInOrderAs t1.refField.asInstanceOf[Array[Byte]]
61 | case "sblob" =>
62 | ss.selectOneStar(tname, ScalaSession.NoQuery()).execute().right.toOption.flatten.flatMap(_.getAs[TestCC]).flatMap(_.refField.asInstanceOf[Set[Array[Byte]]].headOption).value should contain theSameElementsInOrderAs t1.refField.asInstanceOf[Set[Array[Byte]]].head
63 | case _ =>
64 | ss.selectOneStar(tname, q1).execute().right.toOption.flatten.flatMap(_.getAs[TestCC]).value shouldBe t1
65 | }
66 | ss.delete[ScalaSession.NoQuery](tname, q1).execute()
67 | ss.select[ScalaSession.Star](tname, q1).execute().right.value.toList.map(_.as[TestCC]) shouldBe empty
68 | ss.dropTable(tname).execute()
69 | }
70 | }
71 | }
72 | class ScalaCassUnitTestsAll extends ScalaCassUnitTests with ScalaCassUnitTestsVersionSpecific {
73 | "strings" should "be extracted correctly" in testType[String, Int]("str", "asdf", "qwerty")
74 | "ints" should "be extracted correctly" in testType[Int, String]("i", 1234, 9876)
75 | "bigints" should "be extracted correctly" in testType[Long, String]("bi", 1234, 9876)
76 | "boolean" should "be extracted correctly" in testType[Boolean, Int]("bool", true, false)
77 | "double" should "be extracted correctly" in testType[Double, String]("dub", 123.4, 987.6)
78 | "list" should "be extracted correctly (wrong basic)" in testType[List[String], String]("l", List("as", "df"), List("fd", "sa"))
79 | "list" should "be extracted correctly (wrong type param)" in testType[List[String], List[Int]]("l", List("as", "df"), List("fd", "sa"))
80 | "map" should "be extracted correctly (wrong basic)" in testType[Map[String, Long], String]("m", Map("asdf" -> 10L), Map("fdsa" -> -10L))
81 | "map" should "be extracted correctly (wrong 1st type param)" in testType[Map[String, Long], Map[Long, Long]]("m", Map("asdf" -> 10L), Map("fdsa" -> -10L))
82 | "map" should "be extracted correctly (wrong 2nd type param)" in testType[Map[String, Long], Map[String, Int]]("m", Map("asdf" -> 10L), Map("fdsa" -> -10L))
83 | // for the moment, createTable does not work because the nested type needs to be frozen, which is currently not possible with the library.
84 | // this is a low-use case, and will eventually be fixed by introducing a `Frozen` case class that will add that in
85 | "map>" should "be extracted correctly" in testType[Map[String, Set[Int]], Map[String, Int]]("nest", Map("asdf" -> Set(1)), Map("fdsa" -> Set(2)), false)
86 | "set" should "be extracted correctly (wrong basic)" in testType[Set[Double], String]("s", Set(123.4), Set(987.6))
87 | "set" should "be extracted correctly (wrong type param)" in testType[Set[Double], Set[String]]("s", Set(123.4), Set(987.6))
88 | "uuid" should "be extracted correctly" in testType[java.util.UUID, String]("id", java.util.UUID.randomUUID, java.util.UUID.randomUUID)
89 | "ascii" should "be extracted correctly" in testType[String, Int]("str2", "asdf", "fdsa")
90 | "blob" should "be extracted correctly (wrong basic)" in testType[Array[Byte], String]("b", "asdf".getBytes, "fdsa".getBytes)
91 | "inet" should "be extracted correctly" in testType[java.net.InetAddress, String]("net", java.net.InetAddress.getByName("localhost"), java.net.InetAddress.getByName("192.168.1.2"))
92 | "decimal" should "be extracted correctly" in testType[BigDecimal, Double]("d", BigDecimal(3.0), BigDecimal(2.0))
93 | "varint" should "be extracted correctly" in testType[BigInt, Long]("vi", 3, 2)
94 | "float" should "be extracted correctly" in testType[Float, Double]("f", 123.4f, 987.6f)
95 | "set" should "be extracted correctly" in testType[Set[Array[Byte]], Set[Double]]("sblob", Set("asdf".getBytes), Set("fdsa".getBytes))
96 | "tup" should "be extracted correctly (wrong basic)" in testType[(Int, String), String]("tup", (4, "fdsa"), (5, "asas"))
97 | "tup" should "be extracted correctly (wrong 1st type)" in testType[(Int, String), (String, String)]("tup", (4, "fdsa"), (5, "qqwer"))
98 | "tup" should "be extracted correctly (wrong arity)" in {
99 | val goodValue = (4, "fdsa")
100 | val args = Seq(("tup", implicitly[CassFormatEncoder[(Int, String)]].encode(goodValue).fold(throw _, identity).asInstanceOf[AnyRef]), ("str", "asdf"))
101 | insert(args)
102 | val res = getOne
103 | res.as[(Int, String)]("tup") shouldBe goodValue
104 | res.getAs[(Int, String)]("tup").value shouldBe goodValue
105 | res.getOrElse("tup", (5, "qqwe")) shouldBe goodValue
106 | an[IllegalArgumentException] should be thrownBy res.getOrElse("nottup", (5, "qqwe"))
107 |
108 | an[IllegalArgumentException] should be thrownBy res.as[(Int, String)]("nottup")
109 | an[InvalidTypeException] should be thrownBy res.as[(Int, String, String)]("tup")
110 | an[InvalidTypeException] should be thrownBy res.as[Tuple1[Int]]("tup")
111 | a[BadTypeException] should be thrownBy res.as[(String, Int)]("tup")
112 | }
113 | "counter" should "be extracted correctly" in {
114 | val pKey = "str"
115 | val k = "count"
116 | val counterTable = "counterTable"
117 | client.session.execute(s"CREATE TABLE $dbName.$counterTable ($pKey varchar, $k counter, PRIMARY KEY (($pKey)))")
118 | client.session.execute(s"UPDATE $dbName.$counterTable SET $k = $k + ? WHERE $pKey='asdf'", Long.box(1L))
119 |
120 | val res = client.session.execute(s"SELECT * FROM $dbName.$counterTable").one()
121 | res.as[Long](k) shouldBe 1
122 | an[IllegalArgumentException] should be thrownBy res.as[Long](s"not$k")
123 | a[BadTypeException] should be thrownBy res.as[String](k)
124 | an[IllegalArgumentException] should be thrownBy res.as[String](s"not$k")
125 |
126 | res.getAs[Long](k).value shouldBe 1
127 | an[IllegalArgumentException] should be thrownBy res.getAs[Long](s"not$k")
128 | a[BadTypeException] should be thrownBy res.getAs[String](k)
129 | an[IllegalArgumentException] should be thrownBy res.getAs[String](s"not$k")
130 |
131 | case class CounterCC(str: String, count: Long)
132 | case class QueryCC(str: String)
133 | val tname = "derivedtable"
134 | val ss = ScalaSession(dbName)
135 | ss.createTable[CounterCC](tname, 1, 0).execute()
136 | val t1 = CounterCC("t1", 1)
137 | val q1 = QueryCC(t1.str)
138 | ss.insert(tname, t1).execute()
139 | ss.selectOneStar(tname, q1).execute().right.toOption.flatten.value.as[CounterCC] shouldBe t1
140 | ss.delete[ScalaSession.NoQuery](tname, q1).execute()
141 | ss.select[ScalaSession.Star](tname, q1).execute().right.value.toList shouldBe empty
142 | }
143 | }
144 |
--------------------------------------------------------------------------------
/src/test/scala/com/weather/scalacass/UpdateBehaviorTests.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | import com.datastax.driver.core.ResultSet
4 | import com.weather.scalacass.ScalaSession.UpdateBehavior
5 | import com.weather.scalacass.util.CassandraWithTableTester
6 | import org.scalatest.OptionValues
7 | import com.weather.scalacass.syntax._
8 |
9 | object UpdateBehaviorTests {
10 | val db = "testDB"
11 | val table = "testTable"
12 | }
13 | class UpdateBehaviorTests extends CassandraWithTableTester(UpdateBehaviorTests.db, UpdateBehaviorTests.table,
14 | List("str varchar", "l list", "s set"),
15 | List("str")) with OptionValues {
16 | import UpdateBehaviorTests.table
17 | lazy val ss = ScalaSession(UpdateBehaviorTests.db)(client.session)
18 |
19 | case class Query(str: String)
20 | case class Insert(str: String, l: List[String], s: Set[Double])
21 | val baseStr = "some item"
22 | val base = Insert(baseStr, List("asdf"), Set(1.0))
23 | val baseQuery = Query(baseStr)
24 | def insertOne(i: Insert = base): Result[ResultSet] = ss.insert(table, i).execute()
25 |
26 | "explicit replacement" should "act as before" in {
27 | case class Replacing(l: UpdateBehavior.Replace[List, String], s: UpdateBehavior.Replace[Set, Double])
28 | val instance = Replacing(List("fdsa"), Set(2.0))
29 |
30 | insertOne()
31 | ss.update(table, instance, baseQuery).execute()
32 |
33 | val res = ss.selectOneStar(table, baseQuery).execute().right.toOption.flatten.value.as[Insert]
34 | res.str shouldBe baseStr
35 | res.l should contain theSameElementsAs instance.l.coll
36 | res.s should contain theSameElementsAs instance.s.coll
37 | }
38 |
39 | "implicit replacement" should "also act as before" in {
40 | case class ReplacingImplicit(l: List[String], s: Set[Double])
41 | val instance = ReplacingImplicit(List("fafa"), Set(3.0))
42 |
43 | insertOne()
44 | ss.update(table, instance, baseQuery).execute()
45 |
46 | val res = ss.selectOneStar(table, baseQuery).execute().right.toOption.flatten.value.as[Insert]
47 | res.str shouldBe baseStr
48 | res.l should contain theSameElementsAs instance.l
49 | res.s should contain theSameElementsAs instance.s
50 | }
51 |
52 | "add" should "combine the two entries" in {
53 | case class Adding(l: UpdateBehavior.Add[List, String], s: UpdateBehavior.Add[Set, Double])
54 | val instance = Adding(List("afaf"), Set(4.0))
55 |
56 | insertOne()
57 | ss.update(table, instance, baseQuery).execute()
58 |
59 | val res = ss.selectOneStar(table, baseQuery).execute().right.toOption.flatten.value.as[Insert]
60 | res.str shouldBe baseStr
61 | res.l should contain theSameElementsAs base.l ::: instance.l.coll
62 | res.s should contain theSameElementsAs base.s ++ instance.s.coll
63 | }
64 |
65 | "subtract" should "subtract from the original entry" in {
66 | case class Subtracting(l: UpdateBehavior.Subtract[List, String], s: UpdateBehavior.Subtract[Set, Double])
67 | val instance = Subtracting(List("another str"), Set(5.0))
68 |
69 | val expandedBase = base.copy(l = instance.l.coll ::: base.l, s = instance.s.coll ++ base.s)
70 | insertOne(expandedBase)
71 |
72 | val preres = ss.selectOneStar(table, baseQuery).execute().right.toOption.flatten.value.as[Insert]
73 | preres.str shouldBe baseStr
74 | preres.l should contain theSameElementsAs expandedBase.l
75 | preres.s should contain theSameElementsAs expandedBase.s
76 |
77 | ss.update(table, instance, baseQuery).execute()
78 |
79 | val res = ss.selectOneStar(table, baseQuery).execute().right.toOption.flatten.value.as[Insert]
80 | res.str shouldBe baseStr
81 | res.l should contain theSameElementsAs base.l
82 | res.s should contain theSameElementsAs base.s
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/src/test/scala/com/weather/scalacass/scsession/ActionUnitTests.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass.scsession
2 |
3 | import com.weather.scalacass.ScalaSession
4 | import com.weather.scalacass.util.CassandraUnitTester
5 |
6 | trait ActionUnitTests extends CassandraUnitTester {
7 | protected val keyspace = "mykeyspace"
8 | private var _table: String = _
9 | private var _ss: ScalaSession = _
10 |
11 | case class Table(str: String, l: Long, i: Option[Int])
12 |
13 | protected def table = _table
14 |
15 | protected def ss = _ss
16 |
17 | override def beforeAll(): Unit = {
18 | super.beforeAll()
19 | _ss = ScalaSession(keyspace)
20 | _ss.createKeyspace("replication = {'class': 'SimpleStrategy', 'replication_factor': 1}").execute()
21 | _table = "mytable" // s"mytable_${java.util.UUID.randomUUID.toString.take(5)}"
22 | ss.createTable[Table](table, 1, 0).execute()
23 | ()
24 | }
25 | override def beforeEach(): Unit = {
26 | super.beforeEach()
27 | ss.truncateTable(table).execute()
28 | ()
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/src/test/scala/com/weather/scalacass/scsession/DeleteUnitTests.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass.scsession
2 |
3 | import com.datastax.driver.core.exceptions.InvalidQueryException
4 | import com.weather.scalacass.{ Result, ScalaSession }
5 |
6 | import concurrent.ExecutionContext.Implicits.global
7 | import scala.concurrent.{ Await, ExecutionContext }
8 | import scala.concurrent.duration._
9 |
10 | class DeleteUnitTests extends ActionUnitTests {
11 | case class SelectiveDelete(i: Int)
12 | case class Query(str: String)
13 | case class IfS(l: Long)
14 |
15 | def executeAsync[T](q: SCStatement[T], shouldSucceed: Boolean = true)(implicit ec: ExecutionContext): Result[T] = {
16 | val res = Await.result(q.executeAsync()(ec), 3.seconds)
17 | res.isRight shouldBe shouldSucceed
18 | res
19 | }
20 |
21 | "delete" should "use selective columns" in {
22 | val query = ss.delete[SelectiveDelete](table, Query("asdf"))
23 | println(query.getStringRepr)
24 | println(executeAsync(query))
25 | }
26 | it should "not use selective columns" in {
27 | val query = ss.delete[ScalaSession.NoQuery](table, Query("asdf"))
28 | println(query.getStringRepr)
29 | println(executeAsync(query))
30 | }
31 | it should "use timestamp" in {
32 | val query = ss.delete[ScalaSession.NoQuery](table, Query("asdf")).usingTimestamp(12345L)
33 | println(query.getStringRepr)
34 | println(executeAsync(query))
35 | }
36 | it should "use if exists" in {
37 | val query = ss.delete[ScalaSession.NoQuery](table, Query("asdf")).ifExists
38 | println(query.getStringRepr)
39 | println(executeAsync(query))
40 | }
41 | it should "use if statement" in {
42 | val query = ss.delete[ScalaSession.NoQuery](table, Query("asdf")).`if`(IfS(1234L))
43 | println(query.getStringRepr)
44 | println(executeAsync(query))
45 | }
46 | it should "use everything" in {
47 | val query = ss.delete[SelectiveDelete](table, Query("asdf")).`if`(IfS(1234L)).usingTimestamp(12345L)
48 | val executed = query.executeAsync()
49 | Await.ready(executed, 3.seconds)
50 | executed.value.value.failure.exception shouldBe an[InvalidQueryException]
51 | query.execute().left.value shouldBe an[InvalidQueryException]
52 |
53 | println(s"broke: ${query.getStringRepr}")
54 | val fixedQuery = query.noTimestamp
55 | println(fixedQuery.getStringRepr)
56 | println(fixedQuery.execute())
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/src/test/scala/com/weather/scalacass/scsession/InsertUnitTests.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass.scsession
2 |
3 | class InsertUnitTests extends ActionUnitTests {
4 | "insert" should "use IF NOT EXISTS" in {
5 | val query = ss.insert(table, Table("some str", 1234, None)).ifNotExists
6 | println(query.getStringRepr)
7 | println(query.execute().right.value)
8 | }
9 |
10 | it should "use TIMESTAMP" in {
11 | val query = ss.insert(table, Table("some str", 1234, Some(123))).usingTimestamp(System.currentTimeMillis)
12 | println(query.getStringRepr)
13 | println(query.execute().right.value)
14 | }
15 |
16 | it should "use TTL" in {
17 | val query = ss.insert(table, Table("some str", 1234, Some(123))).usingTTL(12345)
18 | println(query.getStringRepr)
19 | println(query.execute().right.value)
20 | }
21 |
22 | it should "use everything" in {
23 | val query = ss.insert(table, Table("some str", 1234, Some(123))).ifNotExists.usingTTL(12345)
24 | val query2 = ss.insert(table, Table("some str", 1234, Some(123))).usingTimestampNow.usingTTL(12345)
25 | println(query.getStringRepr)
26 | println(query.execute().right.value)
27 | println(query2.getStringRepr)
28 | println(query2.execute().right.value)
29 | }
30 |
31 | it should "insert where a row has a string with a $ in it" in {
32 | val query = ss.insert(table, Table("""{ "$regex": /yeppers/ }""", 1234, Some(123)))
33 | println(query.getStringRepr)
34 | println(query.execute().right.value)
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/src/test/scala/com/weather/scalacass/scsession/SelectUnitTests.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass.scsession
2 |
3 | import com.weather.scalacass.ScalaSession
4 |
5 | class SelectUnitTests extends ActionUnitTests {
6 | case class SelectiveSelect(i: Int)
7 | case class Query(str: String)
8 |
9 | "select" should "be selective" in {
10 | val query = ss.select[SelectiveSelect](table, Query("asdf"))
11 | println(query.getStringRepr.right.value)
12 | println(query.execute().right.value)
13 | }
14 | it should "not need to be selective" in {
15 | val query = ss.select[ScalaSession.Star](table, Query("asdf"))
16 | println(query.getStringRepr.right.value)
17 | println(query.execute().right.value)
18 | }
19 | it should "limit" in {
20 | val query = ss.select[ScalaSession.Star](table, Query("asdf")).limit(100)
21 | println(query.getStringRepr.right.value)
22 | println(query.execute().right.value)
23 | }
24 | it should "allow filtering" in {
25 | val query = ss.select[ScalaSession.Star](table, Query("asdf")).allowFiltering
26 | println(query.getStringRepr.right.value)
27 | println(query.execute().right.value)
28 | }
29 | it should "do everything" in {
30 | val query = ss.select[SelectiveSelect](table, Query("asdf")).limit(100).allowFiltering
31 | println(query.getStringRepr.right.value)
32 | println(query.execute().right.value)
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/src/test/scala/com/weather/scalacass/scsession/UpdateUnitTests.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass.scsession
2 |
3 | import com.datastax.driver.core.exceptions.InvalidQueryException
4 | import com.weather.scalacass.Result
5 |
6 | import concurrent.ExecutionContext.Implicits.global
7 | import scala.concurrent.{ Await, ExecutionContext }
8 | import scala.concurrent.duration._
9 |
10 | class UpdateUnitTests extends ActionUnitTests {
11 |
12 | case class Query(str: String)
13 | case class Update(l: Long, i: Option[Int])
14 | case class IfS(l: Long)
15 |
16 | def executeAsync[T](q: SCStatement[T], shouldSucceed: Boolean = true)(implicit ec: ExecutionContext): Result[T] = {
17 | val res = Await.result(q.executeAsync()(ec), 3.seconds)
18 | res.isRight shouldBe shouldSucceed
19 | res
20 | }
21 |
22 | "update" should "use IF EXISTS" in {
23 | val query = ss.update(table, Update(123, None), Query("asdf")).ifExists
24 | println(query.getStringRepr)
25 | println(executeAsync(query))
26 | }
27 | it should "use ttl" in {
28 | val query = ss.update(table, Update(123, None), Query("asdf")).usingTTL(1234)
29 | println(query.getStringRepr)
30 | println(executeAsync(query))
31 | }
32 | it should "use timestamp" in {
33 | val query = ss.update(table, Update(123, None), Query("asdf")).usingTimestamp(12345L)
34 | println(query.getStringRepr)
35 | println(executeAsync(query))
36 | }
37 | it should "use if statement" in {
38 | val query = ss.update(table, Update(123, Some(123)), Query("asdf")).`if`(IfS(123L))
39 | println(query.getStringRepr)
40 | println(executeAsync(query))
41 | }
42 | it should "combine all of them" in {
43 | val query = ss.update(table, Update(123, None), Query("asdf")).usingTTL(1234).`if`(IfS(123L)).usingTimestamp(12345L)
44 | val executed = query.executeAsync()
45 | Await.ready(executed, 3.seconds)
46 | executed.value.value.failure.exception shouldBe an[InvalidQueryException]
47 | query.execute().left.value shouldBe an[InvalidQueryException]
48 |
49 | println(s"broke: ${query.getStringRepr}")
50 | val fixedQuery = query.noTimestamp
51 | println(fixedQuery.getStringRepr)
52 | println(executeAsync(fixedQuery.noTimestamp))
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/src/test/scala/com/weather/scalacass/util/CassandraClient.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass.util
2 |
3 | import com.datastax.driver.core.Cluster
4 |
5 | case class CassandraClient(hosts: List[String], port: Option[Int]) extends CassandraClientVersionSpecific {
6 | val cluster = {
7 | val c = Cluster.builder().addContactPoints(hosts: _*)
8 | port.foreach(c.withPort)
9 | clusterStartup(c)
10 | c.build()
11 | }
12 | val session = cluster.connect()
13 |
14 | def close() = cluster.close()
15 | }
16 |
--------------------------------------------------------------------------------
/src/test/scala/com/weather/scalacass/util/CassandraTester.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass.util
2 |
3 | import org.scalatest.{ FlatSpec, Matchers, BeforeAndAfter, BeforeAndAfterAll, OptionValues, EitherValues, TryValues }
4 |
5 | abstract class CassandraTester extends FlatSpec with Matchers with BeforeAndAfter with BeforeAndAfterAll with OptionValues with EitherValues with TryValues {
6 | private[util] var _client: Option[CassandraClient] = _
7 | def client = _client getOrElse sys.error("client must be only be used after beforeAll. Did you override it?")
8 | implicit def cluster = client.cluster
9 | implicit def session = client.session
10 | }
11 |
--------------------------------------------------------------------------------
/src/test/scala/com/weather/scalacass/util/CassandraUnitTester.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass.util
2 |
3 | import org.cassandraunit.utils.EmbeddedCassandraServerHelper
4 |
5 | abstract class CassandraUnitTester extends CassandraTester {
6 | override def beforeAll() = {
7 | super.beforeAll()
8 | EmbeddedCassandraServerHelper.startEmbeddedCassandra(EmbeddedCassandraServerHelper.CASSANDRA_RNDPORT_YML_FILE, 30000L)
9 | _client = Some(CassandraClient(List("localhost"), Some(EmbeddedCassandraServerHelper.getNativeTransportPort)))
10 | }
11 |
12 | override def afterAll() = {
13 | super.afterAll()
14 | EmbeddedCassandraServerHelper.cleanEmbeddedCassandra()
15 | }
16 |
17 | def beforeEach(): Unit = {}
18 | def afterEach(): Unit = {}
19 |
20 | before {
21 | beforeEach()
22 | }
23 |
24 | after {
25 | afterEach()
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/src/test/scala/com/weather/scalacass/util/CassandraWithTableTester.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass.util
2 |
3 | abstract class CassandraWithTableTester(val dbName: String, protected val tableName: String, tableColumns: List[String], primaryKeys: List[String]) extends CassandraUnitTester {
4 | override def beforeAll(): Unit = {
5 | super.beforeAll()
6 | client.session.execute(s"CREATE KEYSPACE $dbName WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1};")
7 | client.session.execute(s"CREATE TABLE $dbName.$tableName ${tableColumns.mkString("(", ", ", ",")} PRIMARY KEY ${primaryKeys.mkString("((", ", ", "))")})")
8 | ()
9 | }
10 | override def afterEach(): Unit = {
11 | client.session.execute(s"TRUNCATE TABLE $dbName.$tableName")
12 | super.afterEach()
13 | }
14 |
15 | protected def insert(pairs: Seq[(String, AnyRef)]) = {
16 | val (strs, objs) = pairs.unzip
17 | client.session.execute(s"INSERT INTO $dbName.$tableName ${strs.mkString("(", ",", ")")} VALUES ${objs.map(_ => "?").mkString("(", ",", ")")}", objs: _*)
18 | }
19 | protected def getOne = client.session.execute(s"SELECT * FROM $dbName.$tableName").one()
20 | }
21 |
--------------------------------------------------------------------------------
/src/test/scala_cass21/com/weather/scalacass/ScalaCassUnitTestsVersionSpecific.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | object ScalaCassUnitTestsVersionSpecific {
4 | type BadTypeException = com.datastax.driver.core.exceptions.InvalidTypeException
5 | val extraHeaders = List("ts timestamp")
6 | }
7 |
8 | trait ScalaCassUnitTestsVersionSpecific { this: ScalaCassUnitTests =>
9 | "timestamp (java util date)" should "be extracted correctly" in testType[java.util.Date, Int]("ts", new java.util.Date(56565L), new java.util.Date(65656L))
10 | }
11 |
12 | class JodaScalaCassUnitTests extends ScalaCassUnitTests {
13 | import com.weather.scalacass.joda.Implicits._
14 |
15 | "timestamp (joda instant)" should "be extracted correctly" in testType[org.joda.time.Instant, Int]("ts", org.joda.time.Instant.now, org.joda.time.Instant.now.plus(12345L))
16 | }
17 |
18 | class Jdk8ScalaCassUnitTests extends ScalaCassUnitTests {
19 | import com.weather.scalacass.jdk8.Implicits._
20 |
21 | "timestamp (jdk instant)" should "be extracted correctly" in testType[java.time.Instant, Int]("ts", java.time.Instant.now, java.time.Instant.now.plus(12345L, java.time.temporal.ChronoUnit.MILLIS))
22 | }
23 |
--------------------------------------------------------------------------------
/src/test/scala_cass21/com/weather/scalacass/util/CassandraClientVersionSpecific.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass.util
2 |
3 | import com.datastax.driver.core.Cluster
4 |
5 | trait CassandraClientVersionSpecific {
6 | def clusterStartup(cb: Cluster.Builder): Cluster.Builder = cb
7 | }
8 |
--------------------------------------------------------------------------------
/src/test/scala_cass21/com/weather/scalacass/util/CassandraUnitInfo.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass.util
2 |
3 | object CassandraUnitInfo {
4 | val cassYaml = "cu-cassandra-rndport-with-fix-22.yaml"
5 | }
6 |
--------------------------------------------------------------------------------
/src/test/scala_cass3/com/weather/scalacass/ScalaCassUnitTestsVersionSpecific.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass
2 |
3 | object ScalaCassUnitTestsVersionSpecific {
4 | type BadTypeException = com.datastax.driver.core.exceptions.CodecNotFoundException
5 | val extraHeaders = List("ts timestamp", "dt date", "t time", "specialdt tuple")
6 | }
7 |
8 | trait ScalaCassUnitTestsVersionSpecific { this: ScalaCassUnitTests =>
9 | "date (datastax date)" should "be extracted correctly" in testType[com.datastax.driver.core.LocalDate, Int]("dt", com.datastax.driver.core.LocalDate.fromDaysSinceEpoch(1000), com.datastax.driver.core.LocalDate.fromDaysSinceEpoch(10000))
10 | "time (long)" should "be extracted correctly" in testType[Time, Int]("t", Time(12345L), Time(54321L))
11 | "timestamp (java util date)" should "be extracted correctly" in testType[java.util.Date, Int]("ts", new java.util.Date(56565L), new java.util.Date(65656L))
12 | }
13 |
14 | class JodaScalaCassUnitTests extends ScalaCassUnitTests {
15 | override def beforeAll(): Unit = {
16 | super.beforeAll()
17 | com.weather.scalacass.joda.register(client.cluster)
18 | }
19 | import com.weather.scalacass.joda.Implicits._
20 |
21 | "date (joda date)" should "be extracted correctly" in testType[org.joda.time.LocalDate, Int]("dt", org.joda.time.LocalDate.now, org.joda.time.LocalDate.now.plusDays(1))
22 | "time (joda time)" should "be extracted correctly" in testType[org.joda.time.LocalTime, Int]("t", org.joda.time.LocalTime.MIDNIGHT, org.joda.time.LocalTime.MIDNIGHT.plusMinutes(4))
23 | "timestamp (joda instant)" should "be extracted correctly" in testType[org.joda.time.Instant, Int]("ts", org.joda.time.Instant.now, org.joda.time.Instant.now.plus(12345L))
24 | "datetime (joda datetime)" should "be extracted correctly" in testType[org.joda.time.DateTime, Int]("specialdt", org.joda.time.DateTime.now, org.joda.time.DateTime.now.plusHours(4))
25 | }
26 |
27 | class Jdk8ScalaCassUnitTests extends ScalaCassUnitTests {
28 | override def beforeAll(): Unit = {
29 | super.beforeAll()
30 | com.weather.scalacass.jdk8.register(client.cluster)
31 | }
32 | import com.weather.scalacass.jdk8.Implicits._
33 |
34 | "date (jdk8 date)" should "be extracted correctly" in testType[java.time.LocalDate, Int]("dt", java.time.LocalDate.now, java.time.LocalDate.now.plusDays(1))
35 | "time (jdk8 time)" should "be extracted correctly" in testType[java.time.LocalTime, Int]("t", java.time.LocalTime.NOON, java.time.LocalTime.MIDNIGHT)
36 | "timestamp (jdk8 instant)" should "be extracted correctly" in testType[java.time.Instant, Int]("ts", java.time.Instant.now, java.time.Instant.now.plusSeconds(56L))
37 | "datetime (jdk8 datetime)" should "be extracted correctly" in testType[java.time.ZonedDateTime, Int]("specialdt", java.time.ZonedDateTime.now, java.time.ZonedDateTime.now.plusHours(4))
38 | }
39 |
--------------------------------------------------------------------------------
/src/test/scala_cass3/com/weather/scalacass/util/CassandraClientVersionSpecific.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass.util
2 |
3 | import com.datastax.driver.core.{ Cluster, CodecRegistry }
4 | import com.datastax.driver.extras.codecs
5 |
6 | trait CassandraClientVersionSpecific {
7 | def clusterStartup(cb: Cluster.Builder): Cluster.Builder = {
8 | val registry = CodecRegistry.DEFAULT_INSTANCE
9 | registry.register(codecs.jdk8.InstantCodec.instance, codecs.jdk8.LocalDateCodec.instance,
10 | codecs.jdk8.LocalTimeCodec.instance, codecs.joda.InstantCodec.instance, codecs.joda.LocalDateCodec.instance,
11 | codecs.joda.LocalTimeCodec.instance)
12 | cb.withCodecRegistry(registry)
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/src/test/scala_cass3/com/weather/scalacass/util/CassandraUnitInfo.scala:
--------------------------------------------------------------------------------
1 | package com.weather.scalacass.util
2 |
3 | object CassandraUnitInfo {
4 | val cassYaml = "cu-cassandra-rndport-with-fix-3.yaml"
5 | }
6 |
--------------------------------------------------------------------------------