├── .gitignore
├── .travis.yml
├── README.md
├── build.sbt
├── project
├── build.properties
└── plugins.sbt
├── src
├── main
│ ├── resources
│ │ └── reference.conf
│ └── scala
│ │ └── org
│ │ └── eigengo
│ │ └── scalad
│ │ ├── experimental
│ │ ├── MongoNativeRestrictions.scala
│ │ └── querydsl.scala
│ │ └── mongo
│ │ ├── MongoAggregate.scala
│ │ ├── MongoCount.scala
│ │ ├── MongoCreate.scala
│ │ ├── MongoCreateOrUpdate.scala
│ │ ├── MongoDelete.scala
│ │ ├── MongoFind.scala
│ │ ├── MongoModify.scala
│ │ ├── MongoRead.scala
│ │ ├── MongoSearch.scala
│ │ ├── MongoUpdate.scala
│ │ ├── ProducerConsumer.scala
│ │ ├── package.scala
│ │ ├── sprayjson
│ │ ├── SprayMongo.scala
│ │ ├── dsl.scala
│ │ ├── marshalling.scala
│ │ └── serialisation.scala
│ │ └── support.scala
└── test
│ └── scala
│ └── org
│ └── eigengo
│ └── scalad
│ ├── experimental
│ └── RestrictionsSpec.scala
│ └── mongo
│ ├── mongodbtests.scala
│ └── sprayjson
│ ├── JsonDslSpec.scala
│ ├── PersistenceSpec.scala
│ ├── SerialisationSpec.scala
│ └── support.scala
└── version.sbt
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | ._.DS_Store*
3 | .metadata
4 | .project
5 | .classpath
6 | .settings
7 | gen
8 | **/*.swp
9 | **/*~.nib
10 | **/build/
11 | **/*.pbxuser
12 | **/*.perspective
13 | **/*.perspectivev3
14 | **/*.xcodeproj/xcuserdata/*
15 | **/*.xcodeproj/project.xcworkspace/xcuserdata/*
16 | **/target
17 | target
18 | *.iml
19 | project/*.ipr
20 | project/*.iml
21 | project/*.iws
22 | project/out
23 | project/*/target
24 | project/target
25 | project/*/bin
26 | project/*/build
27 | project/*.iml
28 | project/*/*.iml
29 | project/.idea
30 | project/.idea/*
31 | .idea
32 | .idea/*
33 | .idea/**/*
34 | .DS_Store
35 | project/.DS_Store
36 | project/*/.DS_Store
37 | tm.out
38 | tmlog*.log
39 | *.tm*.epoch
40 | out
41 | fabfile.py
42 | *.pyc
43 | *.swp
44 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | # See http://about.travis-ci.org/docs/user/build-configuration/
2 | language: scala
3 |
4 | scala:
5 | - 2.10.1
6 |
7 | # Testing with OpenJDK7 as well just to be adventurous!
8 | jdk:
9 | - oraclejdk7
10 | - openjdk7
11 |
12 | # Enabling services by requiring MongoDB
13 | services:
14 | - mongodb
15 |
16 | # Custom notification settings
17 | notifications:
18 | email:
19 | recipients:
20 | - anirvanchakraborty@gmail.com
21 | - jan.machacek@gmail.com
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # ScalaD [](https://travis-ci.org/eigengo/scalad)
2 |
3 | Reuse [Spray JSON](http://github.com/spray/spray-json/) formatters as serialisers for [MongoDB](http://www.mongodb.org) persistence in Scala, and get access to a useful CRUD for performing simple searches.
4 |
5 | ScalaD is an implicit heavy API: users are advised to re-read the Implicits chapter from [Odersky's Book](http://www.amazon.com/dp/0981531644) if feeling overwhelmed.
6 |
7 |
8 | When all relevant marshallers and mongo settings are implicitly in scope, using ScalaD is as simple as:
9 |
10 | ```scala
11 | val entity = ...
12 |
13 | val crud = new SprayMongo
14 |
15 | crud.insert(entity)
16 | crud.findAndUpdate("id":>entity.id, "$set":>{"name":>"Bar"})
17 | val update = crud.findOne("id":>entity.id)
18 |
19 | val popular = crud.find("count":> {"$gte":> update.count}) // awesome DSL for JSON
20 | ```
21 |
22 | However, anybody using this library is strongly encouraged to read the [MongoDB Documentation](http://docs.mongodb.org/manual/) as it is often necessary to get close to the raw queries to understand what is happening, especially the [Aggregation Framework](http://docs.mongodb.org/manual/applications/aggregation/).
23 |
24 | The best place to find more examples are the specs and the akka-patterns project:
25 |
26 | * [PersistenceSpec.scala](src/test/scala/org/cakesolutions/scalad/mongo/sprayjson/PersistenceSpec.scala)
27 | * [Akka Patterns](https://github.com/janm399/akka-patterns)
28 |
29 | ## Dependencies
30 |
31 | Add the dependency to your build file. In SBT, write
32 |
33 | ```scala
34 | "org.eigengo" %% "scalad" % "1.3.1"
35 | ```
36 |
37 | or, if you must use Maven, write
38 |
39 | ```xml
40 |
41 | org.eigengo
42 | scalad_2.10
43 | 1.3.1
44 |
45 | ```
46 |
47 | ## Special Types
48 |
49 | Because we're using Spray JSON to do the marshalling, it means that only JSON compatible objects are naturally supported by ScalaD.
50 |
51 | However, JSON is missing a few key object types, such as: `Date`, `UUID` and any distinction between number types.
52 |
53 | MongoDB BSON is also missing a few key object types, such as: `UUID`, `BigInt` and `BigDecimal`. Indeed, MongoDB treats numbers as primitive types and has no support for arbitrary precision numbers.
54 |
55 |
56 | We provide JSON marshallers for `UuidMarshalling` and `DateMarshalling` which creates JSON marshalled forms of `UUID` and `Date` objects to look like this (so they can still be understood by endpoint clients)
57 |
58 | ```
59 | {"$uuid": "550e8400-e29b-41d4-a716-446655440000"}
60 | {"$date": "2013-02-04T17:51:35.479+0000"} // "yyyy-MM-dd'T'HH:mm:ss.SSSZ"
61 | ```
62 |
63 | the serialisation layer will ensure that these are saved as `BinData` and `DateFormat` accordingly.
64 |
65 |
66 | If you want to use arbitrary precision numbers, we provide case classes (and Spray JSON marshallers) called `StringBigInt` and `StringBigDecimal` which marshall to `String`. Hopefully Spray JSON will address this magically with a fix to their [issue #44](https://github.com/spray/spray-json/issues/44).
67 |
68 |
69 | Be warned that although Spray JSON will correctly marshall raw `BigInt`s and `BigDecimal`s, MongoDB will silently drop the precision (ScalaD will detect this and create a log for every object that loses precision, so hopefully this is caught at development time).
70 |
--------------------------------------------------------------------------------
/build.sbt:
--------------------------------------------------------------------------------
1 | import sbtrelease._
2 |
3 | /** Project */
4 | name := "Scalad"
5 |
6 | organization := "org.eigengo"
7 |
8 | scalaVersion := "2.10.1"
9 |
10 | /** Shell */
11 | shellPrompt := { state => System.getProperty("user.name") + "> " }
12 |
13 | shellPrompt in ThisBuild := { state => Project.extract(state).currentRef.project + "> " }
14 |
15 | /** Dependencies */
16 | resolvers += "spray repo" at "http://repo.spray.io"
17 |
18 | resolvers += "Typesafe Repository" at "http://repo.typesafe.com/typesafe/releases/"
19 |
20 | resolvers += "Sonatype OSS Releases" at "http://oss.sonatype.org/content/repositories/releases/"
21 |
22 | publishTo <<= version { v: String =>
23 | val nexus = "https://oss.sonatype.org/"
24 | if (v.trim.endsWith("SNAPSHOT")) Some("snapshots" at nexus + "content/repositories/snapshots")
25 | else Some("releases" at nexus + "service/local/staging/deploy/maven2")
26 | }
27 |
28 | publishMavenStyle := true
29 |
30 | publishArtifact in Test := false
31 |
32 | pomIncludeRepository := { x => false }
33 |
34 | pomExtra := (
35 | http://www.eigengo.org/scalad.html
36 |
37 |
38 | BSD-style
39 | http://www.opensource.org/licenses/bsd-license.php
40 | repo
41 |
42 |
43 |
44 | git@github.com:janm399/scalad.git
45 | scm:git:git@github.com:janm399/scalad.git
46 |
47 |
48 |
49 | janmachacek
50 | Jan Machacek
51 | http://www.eigengo.org
52 |
53 |
54 | anirvanchakraborty
55 | Anirvan Chakraborty
56 | http://www.eigengo.org
57 |
58 |
59 | )
60 |
61 | credentials += Credentials(Path.userHome / ".sonatype")
62 |
63 | libraryDependencies <<= scalaVersion { scala_version =>
64 | Seq(
65 | "com.github.fommil" % "java-logging" % "1.0",
66 | "com.typesafe.akka" %% "akka-actor" % "2.1.2",
67 | "com.typesafe.akka" %% "akka-contrib" % "2.1.2" intransitive(), // JUL only
68 | "org.mongodb" % "mongo-java-driver" % "2.10.1",
69 | "com.typesafe" % "config" % "1.0.0",
70 | "io.spray" %% "spray-json" % "1.2.3",
71 | "org.specs2" %% "specs2" % "1.13" % "test",
72 | "org.scalacheck" %% "scalacheck" % "1.10.0" % "test"
73 | )
74 | }
75 |
76 | /** Compilation */
77 | javacOptions ++= Seq("-Xmx1812m", "-Xms512m", "-Xss6m")
78 |
79 | javaOptions += "-Xmx2G"
80 |
81 | scalacOptions ++= Seq("-deprecation", "-unchecked", "-feature")
82 |
83 | maxErrors := 20
84 |
85 | pollInterval := 1000
86 |
87 | logBuffered := false
88 |
89 | cancelable := true
90 |
91 | testOptions := Seq(Tests.Filter(s =>
92 | Seq("Spec", "Suite", "Test", "Unit", "all").exists(s.endsWith(_)) &&
93 | !s.endsWith("FeaturesSpec") ||
94 | s.contains("UserGuide") ||
95 | s.contains("index") ||
96 | s.matches("org.specs2.guide.*")))
97 |
98 | /** Console */
99 | initialCommands in console := "import org.eigengo.scalad._"
100 |
101 |
--------------------------------------------------------------------------------
/project/build.properties:
--------------------------------------------------------------------------------
1 | sbt.version=0.12.2
--------------------------------------------------------------------------------
/project/plugins.sbt:
--------------------------------------------------------------------------------
1 | addSbtPlugin("com.github.mpeltonen" % "sbt-idea" % "1.2.0")
2 |
3 | addSbtPlugin("com.github.gseitz" % "sbt-release" % "0.6")
4 |
5 | addSbtPlugin("com.typesafe.sbt" % "sbt-pgp" % "0.7")
6 |
7 |
--------------------------------------------------------------------------------
/src/main/resources/reference.conf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eigengo/scalad/f1874a4657c2b895aa4200769149b3d82ff88470/src/main/resources/reference.conf
--------------------------------------------------------------------------------
/src/main/scala/org/eigengo/scalad/experimental/MongoNativeRestrictions.scala:
--------------------------------------------------------------------------------
1 | package org.eigengo.scalad.experimental
2 |
3 | import org.eigengo.scalad._
4 | import com.mongodb.{BasicDBObjectBuilder, DBObject}
5 | import mongo.MongoSerialiser
6 |
7 | /**
8 | * Native restrictions for JsObject and MongoDB
9 | */
10 | trait MongoJsonNativeRestrictions extends NativeRestrictions with MongoNativeRestrictionMarshallers {
11 | type NativeRestriction = DBObject
12 |
13 | def convertToNative(restriction: Restriction) = {
14 | def convert0(builder: BasicDBObjectBuilder, r: Restriction) {
15 | r match {
16 | case EqualsRestriction(path: String, value) => builder.add(path, value)
17 | case NotEqualsRestriction(path: String, value) => builder.add(path, value)
18 | case ConjunctionRestriction(lhs, rhs) => convert0(builder, lhs); convert0(builder, rhs)
19 | case DisjunctionRestriction(lhs, rhs) => convert0(builder, lhs); convert0(builder, rhs)
20 | case _ => ???
21 | }
22 | }
23 |
24 | val builder = BasicDBObjectBuilder.start()
25 | convert0(builder, restriction)
26 | builder.get()
27 | }
28 |
29 | }
30 |
31 | private[experimental] trait MongoNativeRestrictionMarshallers {
32 |
33 | implicit def getNativeRestrictionsMarshaller[A: MongoSerialiser]: NativeRestrictionsMarshaller[A] = new NativeRestrictionsMarshaller[A] {
34 | val serialiser = implicitly[MongoSerialiser[A]]
35 |
36 | type NativeRestrictionValue = DBObject
37 |
38 | def marshal(value: A) = serialiser.serialiseDB(value)
39 | }
40 |
41 | }
42 |
--------------------------------------------------------------------------------
/src/main/scala/org/eigengo/scalad/experimental/querydsl.scala:
--------------------------------------------------------------------------------
1 | package org.eigengo.scalad.experimental
2 |
3 | import scala.language.implicitConversions
4 |
5 | sealed trait Restriction
6 | case class EqualsRestriction[A, Path](path: Path, value: A) extends Restriction with RestrictionOps
7 | case class NotEqualsRestriction[A, Path](path: Path, value: A) extends Restriction with RestrictionOps
8 | case class OrdRestriction[A : Ordering, Path](path: Path, ord: Symbol, value: A) extends Restriction with RestrictionOps
9 |
10 | case class NotRestriction(restriction: Restriction) extends Restriction
11 |
12 | case class ConjunctionRestriction(lhs: Restriction, rhs: Restriction) extends Restriction with RestrictionOps
13 | case class DisjunctionRestriction(lhs: Restriction, rhs: Restriction) extends Restriction with RestrictionOps
14 |
15 | case object ContradictionRestriction extends Restriction
16 | case object TautologyRestriction extends Restriction
17 |
18 | /**
19 | * Contains functions that construct trees of expressions
20 | */
21 | trait RestrictionOps {
22 | this: Restriction =>
23 |
24 | /**
25 | * Combines this restriction with that restriction in a conjunction
26 | *
27 | * @param that the right hand side
28 | * @return this && that
29 | */
30 | def &&(that: Restriction) = ConjunctionRestriction(this, that)
31 |
32 | /**
33 | * Combines this restriction with that restriction in a disjunction
34 | *
35 | * @param that the right hand side
36 | * @return this || that
37 | */
38 | def ||(that: Restriction) = DisjunctionRestriction(this, that)
39 |
40 | }
41 |
42 | /**
43 | * Contains functions to simplify restrictions
44 | */
45 | trait RestrictionSimplification {
46 |
47 | private def simplifyConjunction(conjunction: ConjunctionRestriction): Restriction = conjunction match {
48 | case ConjunctionRestriction(lhs, rhs) if lhs == rhs => lhs
49 | case ConjunctionRestriction(_, ContradictionRestriction) => ContradictionRestriction
50 | case ConjunctionRestriction(ContradictionRestriction, _) => ContradictionRestriction
51 | case ConjunctionRestriction(EqualsRestriction(p1, v1), NotEqualsRestriction(p2, v2)) if p1 == p2 && v1 == v2 => ContradictionRestriction
52 | case ConjunctionRestriction(NotEqualsRestriction(p1, v1), EqualsRestriction(p2, v2)) if p1 == p2 && v1 == v2 => ContradictionRestriction
53 | case ConjunctionRestriction(lhs, rhs) =>
54 | val simplerLhs = simplify(lhs)
55 | val simplerRhs = simplify(rhs)
56 | if (simplerLhs != lhs || simplerRhs != rhs) simplify(ConjunctionRestriction(simplerLhs, simplerRhs)) else conjunction
57 | }
58 |
59 | private def simplifyDisjunction(disjunction: DisjunctionRestriction): Restriction = disjunction match {
60 | case DisjunctionRestriction(lhs, rhs) if lhs == rhs => lhs
61 | case DisjunctionRestriction(_, TautologyRestriction) => TautologyRestriction
62 | case DisjunctionRestriction(TautologyRestriction, _) => TautologyRestriction
63 | case DisjunctionRestriction(EqualsRestriction(p1, v1), NotEqualsRestriction(p2, v2)) if p1 == p2 && v1 == v2 => TautologyRestriction
64 | case DisjunctionRestriction(NotEqualsRestriction(p1, v1), EqualsRestriction(p2, v2)) if p1 == p2 && v1 == v2 => TautologyRestriction
65 | case DisjunctionRestriction(lhs, rhs) =>
66 | val simplerLhs = simplify(lhs)
67 | val simplerRhs = simplify(rhs)
68 | if (simplerLhs != lhs || simplerRhs != rhs) simplify(DisjunctionRestriction(simplerLhs, simplerRhs)) else disjunction
69 | }
70 |
71 | final def simplify(restriction: Restriction): Restriction = restriction match {
72 | case c: ConjunctionRestriction => simplifyConjunction(c)
73 | case d: DisjunctionRestriction => simplifyDisjunction(d)
74 | case _ => restriction
75 | }
76 |
77 | }
78 |
79 | /**
80 | * Defines the ``RestrictionPath`` to be string, which most databases are happy with:
81 | * - in SQL databases, the ``RestrictionPath`` is the column name,
82 | * - in Mongo, the ``RestrictionPath`` is the property name,
83 | * - ...
84 | */
85 | trait StringRestrictionsPaths {
86 | type RestrictionPath = String
87 | }
88 |
89 | trait NativeRestrictions extends RestrictionSimplification {
90 | type NativeRestriction
91 |
92 | implicit final def doConvertToNative(restriction: Restriction) = convertToNative(simplify(restriction))
93 |
94 | def convertToNative(restriction: Restriction): NativeRestriction
95 |
96 | }
97 |
98 | /**
99 | * Provides the starting point for the restrictions DSL
100 | */
101 | trait Restrictions {
102 | type RestrictionPath
103 |
104 | import language.implicitConversions
105 |
106 | /**
107 | * Begins constructing the restriction by turning the ``RestrictionPath`` into an instance of the
108 | * ``RestrictionBuilder``. You can then call its ``equalTo``, ``lessThan`` and other methods.
109 | *
110 | * @param path the starting path
111 | * @return the ``RestrictionBuilder`` starting from the path
112 | */
113 | implicit final def beginRestriction(path: RestrictionPath): RestrictionBuilder[RestrictionPath] = new RestrictionBuilder(path)
114 |
115 | }
116 |
117 | trait NativeRestrictionsMarshaller[A] {
118 |
119 | type NativeRestrictionValue
120 |
121 | def marshal(value: A): NativeRestrictionValue
122 |
123 | }
124 |
125 | /**
126 | * Begins the construction of the restrictions so that you can construct the entire query tree
127 | *
128 | * @param path the starting path, i.e. "username" ... so that you can construct things like "username" == "foo"
129 | * @tparam Path the type of the path
130 | */
131 | class RestrictionBuilder[Path](path: Path) {
132 |
133 | /**
134 | * Property is equal to the given value
135 | *
136 | * @param value the value the ``path`` must be equal to
137 | * @tparam A the type of the value
138 | * @return the == restriction
139 | */
140 | def equalTo[A](value: A)(implicit marshaller: NativeRestrictionsMarshaller[A]) = EqualsRestriction(path, marshaller.marshal(value))
141 |
142 | /**
143 | * Property is not equal to the given value
144 | *
145 | * @param value the value the ``path`` must not be equal to
146 | * @tparam A the type of the value
147 | * @return the != restriction
148 | */
149 | def notEqualTo[A](value: A)(implicit marshaller: NativeRestrictionsMarshaller[A]) = NotEqualsRestriction(path, marshaller.marshal(value))
150 |
151 | // def lessThan[A : Ordering](value: A) = OrdRestriction(path, '<, nativeValue(value))
152 |
153 | }
--------------------------------------------------------------------------------
/src/main/scala/org/eigengo/scalad/mongo/MongoAggregate.scala:
--------------------------------------------------------------------------------
1 | package org.eigengo.scalad.mongo
2 |
3 | import com.mongodb.{AggregationOutput, BasicDBObject, DBObject}
4 | import collection.convert.{WrapAsScala, WrapAsJava}
5 |
6 | trait MongoAggregate {
7 |
8 | def aggregate[T: CollectionProvider](pipeline: DBObject*): List[DBObject] = {
9 | require(!pipeline.isEmpty)
10 | val collection = implicitly[CollectionProvider[T]].getCollection
11 | val command = new BasicDBObject("aggregate", collection.getName)
12 | command.put("pipeline", WrapAsJava.seqAsJavaList(pipeline))
13 | val res = collection.getDB.command(command)
14 | res.throwOnError()
15 | val results = new AggregationOutput(command, res).results
16 | WrapAsScala.iterableAsScalaIterable(results).toList
17 | }
18 |
19 | private val countCommand = Implicits.JSON2DBObject("""{"$group": {"_id": null, "count": {"$sum": 1 }}}""")
20 |
21 | def aggregateCount[T: CollectionProvider](pipeline: DBObject*): Long = {
22 | val parts = pipeline.toList ::: countCommand :: Nil
23 | aggregate(parts: _*).toList match {
24 | case Nil => 0L
25 | case res :: Nil => res.asInstanceOf[BasicDBObject].getLong("count")
26 | case multi => throw new IllegalStateException(multi.toString)
27 | }
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/src/main/scala/org/eigengo/scalad/mongo/MongoCount.scala:
--------------------------------------------------------------------------------
1 | package org.eigengo.scalad.mongo
2 |
3 | import com.mongodb.DBObject
4 |
5 | trait MongoCount {
6 |
7 | def count[T](query: DBObject)
8 | (implicit provider: CollectionProvider[T]): Long =
9 | provider.getCollection.count(query)
10 |
11 | }
12 |
--------------------------------------------------------------------------------
/src/main/scala/org/eigengo/scalad/mongo/MongoCreate.scala:
--------------------------------------------------------------------------------
1 | package org.eigengo.scalad.mongo
2 |
3 | import com.mongodb.WriteConcern
4 |
5 | /** `CREATE` operations. */
6 | trait MongoCreate {
7 |
8 | /** Use unique indices in MongoDB to ensure that duplicate entries are not created
9 | * (`CollectionProvider` is a good place to do this).
10 | * @return the parameter, or `None` if not added.
11 | */
12 | def create[T: CollectionProvider : MongoSerialiser](entity: T, concern: WriteConcern = null): Option[T] = {
13 | val collection = implicitly[CollectionProvider[T]].getCollection
14 | val serialiser = implicitly[MongoSerialiser[T]]
15 |
16 | val serialised = serialiser serialiseDB entity
17 | val result =
18 | if (concern == null ) collection.insert(serialised).getLastError
19 | else collection.insert(serialised, concern).getLastError
20 |
21 | if (result.ok()) Some(entity)
22 | else None
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/src/main/scala/org/eigengo/scalad/mongo/MongoCreateOrUpdate.scala:
--------------------------------------------------------------------------------
1 | package org.eigengo.scalad.mongo
2 |
3 | /** `UPDATE OR CREATE` Operations. */
4 | trait MongoCreateOrUpdate {
5 | this: MongoUpdate with MongoCreate =>
6 |
7 | /** Updates the first entry that matches the identity query or creates a new entry if
8 | * none was found. Involves two hits to the DB.
9 | * @return the parameter or `None` if the create failed.
10 | */
11 | def createOrUpdateFirst[T: CollectionProvider : MongoSerialiser : IdentityQueryBuilder](entity: T): Option[T] = {
12 | val updated = updateFirst(entity)
13 | updated match {
14 | case Some(_) =>
15 | updated
16 | case None =>
17 | create(entity)
18 | }
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/src/main/scala/org/eigengo/scalad/mongo/MongoDelete.scala:
--------------------------------------------------------------------------------
1 | package org.eigengo.scalad.mongo
2 |
3 | /** `DELETE` operations. */
4 | trait MongoDelete {
5 |
6 | /** @return `None` if the delete failed, otherwise the parameter. */
7 | def deleteFirst[T: CollectionProvider : IdentityQueryBuilder](entity: T): Option[T] = {
8 | val collection = implicitly[CollectionProvider[T]].getCollection
9 | val id = implicitly[IdentityQueryBuilder[T]].createIdQuery(entity)
10 |
11 | if (collection.findAndRemove(id) != null) Some(entity)
12 | else None
13 | }
14 |
15 | }
16 |
--------------------------------------------------------------------------------
/src/main/scala/org/eigengo/scalad/mongo/MongoFind.scala:
--------------------------------------------------------------------------------
1 | package org.eigengo.scalad.mongo
2 |
3 | import com.mongodb.BasicDBObject
4 |
5 | /** `FIND` operations – i.e. requires an "example entity" to query the database.
6 | */
7 | trait MongoFind {
8 | this: MongoSearch =>
9 |
10 | /** @return the found entity or `None` if the entity was not found in the database.
11 | * @throws TooManyResults if more than one result.
12 | */
13 | def findUnique[T: CollectionProvider : MongoSerialiser : IdentityQueryBuilder](entity: T): Option[T] = {
14 | val id = implicitly[IdentityQueryBuilder[T]].createIdQuery(entity)
15 | searchUnique(id)
16 | }
17 |
18 | /** @return the found entity or `None` if the entity was not found in the database. */
19 | def findFirst[T: CollectionProvider : MongoSerialiser : IdentityQueryBuilder](entity: T): Option[T] = {
20 | val id = implicitly[IdentityQueryBuilder[T]].createIdQuery(entity)
21 | searchFirst(id)
22 | }
23 |
24 | /** @return all results of the query. */
25 | def findAll[T: CollectionProvider : MongoSerialiser : IdentityQueryBuilder](entity: T): ConsumerIterator[T] = {
26 | val id = implicitly[IdentityQueryBuilder[T]].createIdQuery(entity)
27 | searchAll(id)
28 | }
29 |
30 | /** @return all results of the query. */
31 | def findAll[T: CollectionProvider : MongoSerialiser]: ConsumerIterator[T] = {
32 | searchAll(new BasicDBObject())
33 | }
34 |
35 | }
36 |
37 | // TODO: selective FIND
--------------------------------------------------------------------------------
/src/main/scala/org/eigengo/scalad/mongo/MongoModify.scala:
--------------------------------------------------------------------------------
1 | package org.eigengo.scalad.mongo
2 |
3 | import com.mongodb.DBObject
4 |
5 |
6 | trait MongoModify {
7 |
8 | def modify[T, K](id: K, rule: DBObject)
9 | (implicit provider: CollectionProvider[T],
10 | builder: KeyQueryBuilder[T, K]) {
11 | val col = provider.getCollection
12 | val query = builder.createKeyQuery(id)
13 | col.findAndModify(query, rule)
14 | }
15 |
16 | def findAndModify[T](query: DBObject, rule: DBObject)
17 | (implicit provider: CollectionProvider[T]) {
18 | val col = provider.getCollection
19 | col.findAndModify(query, rule)
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/src/main/scala/org/eigengo/scalad/mongo/MongoRead.scala:
--------------------------------------------------------------------------------
1 | package org.eigengo.scalad.mongo
2 |
3 | /** `READ` operations – i.e. requires a key to query the database.
4 | */
5 | trait MongoRead {
6 | this: MongoSearch =>
7 |
8 | /** @return the only entity matching the key-based search, or `None`.
9 | * @throws TooManyResults if more than one result.
10 | */
11 | def readUnique[T, K](key: K)(implicit keyBuilder: KeyQueryBuilder[T, K],
12 | collectionProvider: CollectionProvider[T],
13 | serialiser: MongoSerialiser[T]): Option[T] = {
14 | val query = keyBuilder.createKeyQuery(key)
15 | searchUnique(query)
16 | }
17 |
18 | /** @return the first entity matching the key-based search, or `None`. */
19 | def readFirst[T, K](key: K)(implicit keyBuilder: KeyQueryBuilder[T, K],
20 | collectionProvider: CollectionProvider[T],
21 | serialiser: MongoSerialiser[T]): Option[T] = {
22 | val query = keyBuilder.createKeyQuery(key)
23 | searchFirst(query)
24 | }
25 |
26 | /** @return all entities matching the key-based search. */
27 | def readAll[T, K](key: K)(implicit keyBuilder: KeyQueryBuilder[T, K],
28 | collectionProvider: CollectionProvider[T],
29 | serialiser: MongoSerialiser[T]): ConsumerIterator[T] = {
30 | val query = keyBuilder.createKeyQuery(key)
31 | searchAll(query)
32 | }
33 | }
34 |
35 | // TODO: selective READ
36 |
--------------------------------------------------------------------------------
/src/main/scala/org/eigengo/scalad/mongo/MongoSearch.scala:
--------------------------------------------------------------------------------
1 | package org.eigengo.scalad.mongo
2 |
3 | import com.mongodb.DBObject
4 | import concurrent.{ExecutionContext, Future}
5 | import akka.contrib.jul.JavaLogging
6 |
7 | /** Search returned too many results.
8 | */
9 | case class TooManyResults(query: DBObject) extends Exception
10 |
11 | /** Search using MongoDB `DBObject`s.
12 | *
13 | * `searchAll` returns immediately and builds up the results into an `Iterable`
14 | * as they are found.
15 | *
16 | * Implicit conversions from JSON syntax or DSLs bring these methods within reach of
17 | * most users.
18 | */
19 | trait MongoSearch extends JavaLogging {
20 |
21 | /** @return the first result from the result of the query, or `None` if nothing found. */
22 | def searchFirst[T: CollectionProvider : MongoSerialiser](query: DBObject): Option[T] = {
23 | val collection = implicitly[CollectionProvider[T]].getCollection
24 | val serialiser = implicitly[MongoSerialiser[T]]
25 |
26 | val cursor = collection.find(query)
27 | try
28 | if (cursor.hasNext) Some(serialiser deserialise cursor.next())
29 | else None
30 | finally
31 | cursor.close()
32 | }
33 |
34 | /** @return all results from the query. */
35 | def searchAll[T: CollectionProvider : MongoSerialiser](query: DBObject): ConsumerIterator[T] = {
36 | // val iterable = new NonblockingProducerConsumer[T]
37 | val iterable = new BlockingProducerConsumer[T](100)
38 |
39 | import ExecutionContext.Implicits.global
40 | Future {
41 | val collection = implicitly[CollectionProvider[T]].getCollection
42 | val serialiser = implicitly[MongoSerialiser[T]]
43 | val cursor = collection find query
44 |
45 | try {
46 | while (!iterable.stopped && cursor.hasNext) {
47 | val found = serialiser deserialise cursor.next()
48 | iterable.produce(found)
49 | }
50 | } finally {
51 | iterable.close()
52 | cursor.close()
53 | }
54 | }.onFailure {
55 | case t => log.error(t, "Future failed")
56 | }
57 |
58 | iterable
59 | }
60 |
61 | /** @return the only found entry, or `None` if nothing found.
62 | * @throws TooManyResults if more than one result.
63 | */
64 | def searchUnique[T: CollectionProvider : MongoSerialiser](query: DBObject): Option[T] = {
65 | val results = searchAll(query).toList // blocks
66 | if (results.isEmpty) None
67 | else if (results.tail.isEmpty) Some(results.head)
68 | else throw new TooManyResults(query)
69 | }
70 | }
71 |
--------------------------------------------------------------------------------
/src/main/scala/org/eigengo/scalad/mongo/MongoUpdate.scala:
--------------------------------------------------------------------------------
1 | package org.eigengo.scalad.mongo
2 |
3 |
4 | /** `UPDATE` Operations. */
5 | trait MongoUpdate {
6 |
7 | /** Updates the first entry that matches the identity query.
8 | * @return the parameter or `None` if the entity was not found in the database.
9 | */
10 | def updateFirst[T: CollectionProvider : MongoSerialiser : IdentityQueryBuilder](entity: T): Option[T] = {
11 | val collection = implicitly[CollectionProvider[T]].getCollection
12 | val serialiser = implicitly[MongoSerialiser[T]]
13 | val id = implicitly[IdentityQueryBuilder[T]].createIdQuery(entity)
14 |
15 | if (collection.findAndModify(id, serialiser serialiseDB entity) == null) None
16 | else Some(entity)
17 | }
18 |
19 | /** Find the old entry in the database by comparing it to the first parameter,
20 | * and update it with the new one. Appropriate when an identity field is changed.
21 | */
22 | def updateFirst[T: IdentityQueryBuilder: MongoSerialiser: CollectionProvider](old: T, update: T): Option[T] = {
23 | val col = implicitly[CollectionProvider[T]].getCollection
24 | val query = implicitly[IdentityQueryBuilder[T]].createIdQuery(old)
25 | val existing = col.findOne(query)
26 | if (existing == null) return None
27 | val updateDb = implicitly[MongoSerialiser[T]].serialiseDB(update)
28 | if (col.update(existing, updateDb) == null) None
29 | else Some(update)
30 | }
31 |
32 | }
33 |
--------------------------------------------------------------------------------
/src/main/scala/org/eigengo/scalad/mongo/ProducerConsumer.scala:
--------------------------------------------------------------------------------
1 | package org.eigengo.scalad.mongo
2 |
3 | import collection.mutable
4 | import collection.parallel.ThreadPoolTaskSupport
5 | import annotation.tailrec
6 | import concurrent.duration.Duration
7 | import java.util.concurrent.atomic.AtomicBoolean
8 | import java.util.concurrent.{LinkedBlockingQueue, ConcurrentLinkedQueue}
9 | import java.util.concurrent.locks.ReentrantLock
10 |
11 | trait Paging[T] {
12 | this: Iterator[T] =>
13 |
14 | /** Upgrades `Iterator`s to return results in batches of a given size.
15 | *
16 | * (Note that paging does imply anything on the buffering strategy)
17 | */
18 | def page(entries: Int)(f: List[T] => Unit) {
19 | require(entries > 1)
20 | val buffer = new mutable.ListBuffer[T]
21 | while (hasNext) {
22 | buffer append next()
23 | if (buffer.size % entries == 0) {
24 | f(buffer.toList)
25 | buffer.clear()
26 | }
27 | }
28 | if (!buffer.isEmpty) f(buffer.toList)
29 | }
30 | }
31 |
32 | trait ParallelPaging[T] extends Paging[T] {
33 | this: Iterator[T] =>
34 |
35 | private val pool = new ThreadPoolTaskSupport
36 |
37 | def foreachpage(f: T => Unit, size: Int = 100) {
38 | page(size) {
39 | p =>
40 | val par = p.par
41 | par.tasksupport = pool
42 | par.foreach(i => f(i))
43 | }
44 | }
45 |
46 | }
47 |
48 | /** A very clean `Iterator` realisation of the
49 | * Producer / Consumer pattern where the producer and
50 | * consumer run in separate threads.
51 | *
52 | * Both the `hasNext` and `next` methods of the `Iterator`
53 | * may block (e.g. when the consumer catches up with the
54 | * producer).
55 | *
56 | * This is best used by a single producer and single
57 | * consumer but can be extended to multiple consumers
58 | * under the caveat that `next` may return `null` following
59 | * a successful `hasNext` (another thread
60 | * may have grabbed it first).
61 | *
62 | * If the client wishes to cancel iteration early, the
63 | * `stop` method may be called to free up resources.
64 | *
65 | * Functional purists may use this in their `Iteratees`
66 | * patterns.
67 | *
68 | * This is a multi-threaded alternative to the
69 | * [[http://en.wikipedia.org/wiki/Coroutine co-routine]]
70 | * pattern.
71 | *
72 | *
73 | * It is a common misconception that `Iterator.hasNext` is
74 | * not allowed to block.
75 | * However, the API documentation does not preclude
76 | * blocking behaviour. Indeed, the
77 | * Scala standard library encourages consumer blocking in
78 | * the XML Pull API: [[scala.xml.pull.ProducerConsumerIterator]].
79 | */
80 | trait ConsumerIterator[T] extends Iterator[T] with ParallelPaging[T] {
81 |
82 | protected val stopSignal = new AtomicBoolean
83 |
84 | /** Instruct the implementation to truncate at its
85 | * earliest convenience and dispose of resources.
86 | */
87 | def stop() {
88 | stopSignal set true
89 | }
90 | }
91 |
92 | /** The producer's side of
93 | * [[org.eigengo.scalad.mongo.ConsumerIterator]].
94 | *
95 | * Implementations should extend this and be thread safe for
96 | * multiple producer threads and may assume a single
97 | * consumer thread.
98 | */
99 | trait ProducerConsumerIterator[T] extends ConsumerIterator[T] {
100 |
101 | /** Make an element available for the consumer.
102 | */
103 | def produce(el: T)
104 |
105 | /** Finish producing.
106 | */
107 | def close()
108 |
109 | /** @return `true` if the consumer instructed the producer to stop.
110 | */
111 | def stopped() = stopSignal.get
112 | }
113 |
114 | abstract protected class AbstractProducerConsumer[T] extends ProducerConsumerIterator[T] {
115 |
116 | protected val queue: java.util.Queue[T]
117 |
118 | private val closed = new AtomicBoolean
119 |
120 | protected val lock = new ReentrantLock()
121 |
122 | protected val change = lock.newCondition()
123 |
124 | override def close() {
125 | lock lock()
126 | try {
127 | closed set true
128 | change signalAll()
129 | } finally
130 | lock unlock()
131 | }
132 |
133 | @tailrec
134 | override final def hasNext: Boolean =
135 | if (!queue.isEmpty) true
136 | else if (closed.get) !queue.isEmpty // non-locking optimisation
137 | else {
138 | lock lock()
139 | try {
140 | if (closed.get) return !queue.isEmpty
141 | change await()
142 | } finally
143 | lock unlock()
144 | hasNext
145 | }
146 | }
147 |
148 |
149 | /** Appropriate in cases where the producer is not expected to
150 | * create enough data to cause memory problems, regardless
151 | * of consumption rate.
152 | *
153 | * Has an effectively infinite buffer.
154 | */
155 | final class NonblockingProducerConsumer[T] extends AbstractProducerConsumer[T] {
156 |
157 | // it may be cleaner to use mutable.SynchronizedQueue,
158 | // but ConcurrentLinkedQueue is both ridiculously efficient
159 | // and contains one of the greatest algorithms ever written.
160 | protected val queue = new ConcurrentLinkedQueue[T]()
161 |
162 | override def produce(el: T) {
163 | queue add el
164 | lock lock()
165 | try change signalAll()
166 | finally lock unlock()
167 | }
168 |
169 | override def next() = queue.poll()
170 | }
171 |
172 | /** Appropriate for memory constrained environments.
173 | *
174 | * Uses a finitely sized buffer to block the producer from adding
175 | * elements onto a `Queue` when the consumer is slow.
176 | *
177 | * Has an optional timeout on the block operation, at which point an
178 | * exception is raised from all `Iterator` methods. If no timeout
179 | * is used, the producer may block forever. If a zero timeout is given,
180 | * the buffer must never overflow (and the producer will never be
181 | * blocked).
182 | */
183 | final class BlockingProducerConsumer[T](buffer: Int, timeout: Option[Duration] = None) extends AbstractProducerConsumer[T] {
184 | require(buffer > 0)
185 |
186 | protected val queue = new LinkedBlockingQueue[T](buffer)
187 |
188 | private val timedout = new AtomicBoolean
189 |
190 | override def produce(el: T) {
191 | if (timeout.isDefined) {
192 | val duration = timeout.get
193 | val taken = if (duration.length == 0) {
194 | queue.offer(el)
195 | } else
196 | queue.offer(el, duration.length, duration.unit)
197 | if (!taken)
198 | timedout set true
199 | }
200 | else
201 | queue.put(el)
202 |
203 | lock lock()
204 | try change signalAll()
205 | finally lock unlock()
206 | timeoutCheck()
207 | }
208 |
209 | private def timeoutCheck() {
210 | if (!stopped && timedout.get) throw new IllegalStateException(getClass + " timed out.")
211 | }
212 |
213 | override def next() = {
214 | timeoutCheck()
215 | queue.poll()
216 | }
217 | }
218 |
--------------------------------------------------------------------------------
/src/main/scala/org/eigengo/scalad/mongo/package.scala:
--------------------------------------------------------------------------------
1 | package org.eigengo.scalad.mongo
2 |
3 | import scala.collection._
4 | import com.mongodb._
5 | import akka.contrib.jul.JavaLogging
6 |
7 | /** These implicits make the MongoDB API nicer to use, for example by allowing
8 | * JSON search queries to be passed instead of `DBObject`s.
9 | */
10 | object Implicits {
11 | import scala.language.implicitConversions
12 |
13 | implicit var JSON2DBObject = (json: String) => util.JSON.parse(json).asInstanceOf[DBObject]
14 | }
15 |
16 | /** Mechanism for finding an entry in the database
17 | * which matches a query built from an archetype entity.
18 | */
19 | trait IdentityQueryBuilder[T] {
20 | def createIdQuery(entity: T): DBObject
21 | }
22 |
23 | /** Mechanism for finding an entry in the database
24 | * which matches the query built up from a key.
25 | */
26 | trait KeyQueryBuilder[T, K] {
27 | def createKeyQuery(key: K): DBObject
28 | }
29 |
30 | /** Mechanism for converting to/from Scala types and MongoDB `DBObject`s.
31 | *
32 | * Unfortunately, the actual signatures needs to be `Object` so that
33 | * "primitive" types (`String`, `java.lang.Long`, etc) are supported.
34 | */
35 | trait MongoSerialiser[T] {
36 | /** Only to be used when the entity is known to serialise non-trivially. */
37 | def serialiseDB(entity: T) = serialise(entity).asInstanceOf[DBObject]
38 |
39 | def serialise(entity: T): Object
40 |
41 | def deserialise(dbObject: Object): T
42 | }
43 |
44 | /** Access to a MongoDB `DBCollection`.
45 | * Here is a good place to add an index.
46 | */
47 | trait CollectionProvider[T] {
48 | def getCollection: DBCollection
49 | }
50 |
51 | /** Provides CRUD access to a MongoDB collection using client-provided implicits to:
52 | *
53 | * 1. provide the backing MongoDB `DBCollection`.
54 | * 2. serialise/deserialise the MongoDB representation.
55 | * 3. provide a concept of identity for UPDATE/DELETE operations.
56 | * 4. provide a concept of a key for READ operations.
57 | *
58 | * MongoDB adds an internal `_id` field to every object that is persisted in the
59 | * database. It is bad practice to use this `_id` field as the MongoDB documentation
60 | * notes it is possible it may change under highly distributed circumstances.
61 | *
62 | * ALl methods throw [[com.mongodb.MongoException]] if something bad happened that we
63 | * didn't expect (e.g. I/O or config).
64 | *
65 | * @author Sam Halliday
66 | * @author Jan Machacek
67 | * @see Thinking notes on the API design
68 | */
69 | class MongoCrud extends MongoCreate
70 | with MongoSearch
71 | with MongoUpdate
72 | with MongoDelete
73 | with MongoRead
74 | with MongoFind
75 | with MongoCreateOrUpdate
76 | with MongoModify
77 | with MongoAggregate
78 | with MongoCount
79 |
80 | // enables cross-instance concurrent DB indexing
81 | protected object IndexedCollectionProvider {
82 |
83 | // synchronized access only
84 | private val indexed = new mutable.WeakHashMap[DBCollection, Boolean]()
85 |
86 | // true if the calling thread has privileged access to
87 | // create indexes on the collection. Such callers should
88 | // proceed immediately to build the indexes as it is possible
89 | // that no other thread will be granted such privilege.
90 | def privilegedIndexing(collection: DBCollection): Boolean = indexed.synchronized {
91 | indexed.put(collection, true) match {
92 | case Some(_) => false
93 | case None => true
94 | }
95 | }
96 | }
97 |
98 |
99 | /** Easy way to add unique indexes to a Mongo collection. */
100 | trait IndexedCollectionProvider[T] extends CollectionProvider[T] with JavaLogging {
101 |
102 | doIndex()
103 |
104 | def doIndex() {
105 | import Implicits._
106 | if (IndexedCollectionProvider.privilegedIndexing(getCollection)) {
107 | log.debug("Ensuring indexes exist on " + getCollection)
108 | uniqueFields.foreach(field => getCollection.ensureIndex(field, null, true))
109 | indexFields.foreach(field => getCollection.ensureIndex(field, null, false))
110 | }
111 | }
112 |
113 | /** `String`s containing the JSON definition of the index to build. */
114 | protected def uniqueFields: List[String] = Nil
115 |
116 | /** `String`s containing the JSON definition of the unique index to build. */
117 | protected def indexFields: List[String] = Nil
118 | }
119 |
120 |
121 | /** Provides a `read` query that resembles SQL's ``SELECT a WHERE a.field = ...``.
122 | *
123 | * The key must not require any special serialisation.
124 | */
125 | trait FieldQueryBuilder[T, K] extends KeyQueryBuilder[T, K] {
126 | def createKeyQuery(key: K): DBObject = new BasicDBObject(field, key)
127 |
128 | def field: String
129 | }
130 |
131 | /** Provides a concept of identity that resembles a SQL `field` column
132 | *
133 | * The key must not require any special serialisation.
134 | */
135 | trait FieldIdentityQueryBuilder[T, K] extends IdentityQueryBuilder[T] {
136 | def createIdQuery(entity: T): DBObject = new BasicDBObject(field, id(entity))
137 |
138 | def field: String
139 |
140 | def id(entity: T): K
141 | }
142 |
143 | /** Syntactic sugar for [[org.eigengo.scalad.mongo.FieldQueryBuilder]]. */
144 | class StringFieldQuery[T](val field: String) extends FieldQueryBuilder[T, String]
145 |
146 | /** Syntactic sugar for [[org.eigengo.scalad.mongo.FieldQueryBuilder]]. */
147 | class LongFieldQuery[T](val field: String) extends FieldQueryBuilder[T, Long]
148 |
149 | /** Provides a `read` query using serialised fields. */
150 | class SerialisedFieldQueryBuilder[T, K](val field: String)
151 | (implicit serialiser: MongoSerialiser[K])
152 | extends FieldQueryBuilder[T, K] {
153 | override def createKeyQuery(key: K): DBObject = new BasicDBObject(field, serialiser.serialise(key))
154 | }
155 |
156 | /** Provides a concept of identity that resembles a SQL `field` column,
157 | * with serialisation on the field.
158 | */
159 | abstract class SerialisedIdentityQueryBuilder[T, K](val field: String)
160 | (implicit serialiser: MongoSerialiser[K])
161 | extends FieldIdentityQueryBuilder[T, K] {
162 | override def createIdQuery(entity: T) = new BasicDBObject(field, serialiser.serialise(id(entity)))
163 | }
164 |
--------------------------------------------------------------------------------
/src/main/scala/org/eigengo/scalad/mongo/sprayjson/SprayMongo.scala:
--------------------------------------------------------------------------------
1 | package org.eigengo.scalad.mongo.sprayjson
2 |
3 | import org.eigengo.scalad.mongo._
4 | import spray.json.{JsValue, JsObject, JsonFormat}
5 | import akka.contrib.jul.JavaLogging
6 | import com.mongodb.{WriteConcern, DB, DBObject}
7 | import spray.json.pimpAny
8 | import scala.language.implicitConversions
9 |
10 | trait Implicits extends SprayJsonConvertors {
11 | this: JavaLogging =>
12 |
13 | protected implicit def serialiser[T: JsonFormat] = new SprayJsonSerialisation[T]
14 |
15 | protected implicit def SprayJsonToDBObject(json: JsValue) = js2db(json).asInstanceOf[DBObject]
16 |
17 | }
18 |
19 | /** MongoDB format for indexing fields, e.g. {"key": 1} */
20 | class SprayMongoCollection[T](db: DB,
21 | name: String,
22 | uniqueIndexes: JsObject*)
23 | extends CollectionProvider[T] with Implicits with JavaLogging {
24 |
25 | def getCollection = db.getCollection(name)
26 |
27 | if (IndexedCollectionProvider.privilegedIndexing(getCollection)) {
28 | log.debug("Ensuring indexes exist on " + getCollection)
29 | uniqueIndexes.foreach(field => getCollection.ensureIndex(field, null, true))
30 | indexes.foreach(field => getCollection.ensureIndex(field, null, false))
31 | }
32 |
33 | def indexes: List[JsObject] = Nil
34 | }
35 |
36 | /** Forwards all requests to the ScalaD API, independent of the Java MongoDB API.
37 | * Not all MongoCrud operations are exposed, in an effort to encourage good practice
38 | * when using Spray JSON. For example, it is easier to use `findAndModify` with the
39 | * DSL than to define an abstract identity extractor.
40 | */
41 | class SprayMongo extends Implicits with JavaLogging {
42 |
43 | private val scalad = new MongoCrud
44 |
45 | def insert[T: CollectionProvider : JsonFormat](entity: T): Option[T] = scalad.create(entity)
46 |
47 | // good for fire-and-forget writes that happen often, e.g. writing logs
48 | def insertFast[T: CollectionProvider : JsonFormat](entity: T): Option[T] = scalad.create(entity, WriteConcern.UNACKNOWLEDGED)
49 |
50 | def findOne[T: CollectionProvider : JsonFormat](query: JsObject): Option[T] = scalad.searchFirst(query)
51 |
52 | def find[T: CollectionProvider : JsonFormat](query: JsObject): ConsumerIterator[T] = scalad.searchAll(query)
53 |
54 | def findAndModify[T: CollectionProvider](query: JsObject, rule: JsObject) = scalad.findAndModify(query, rule)
55 |
56 | def findAndReplace[T: CollectionProvider : JsonFormat](query: JsObject, update: T) = scalad.findAndModify(query, update.toJson)
57 |
58 | def removeOne[T: CollectionProvider : JsonFormat](query: JsObject): Option[T] = {
59 | val result = implicitly[CollectionProvider[T]].getCollection.findAndRemove(query)
60 | if (result == null) None
61 | else Some(serialiser[T] deserialise result)
62 | }
63 |
64 | def count[T: CollectionProvider : JsonFormat](query: JsObject): Long =
65 | implicitly[CollectionProvider[T]].getCollection.count(query)
66 |
67 | def count[T: CollectionProvider : JsonFormat](): Long =
68 | implicitly[CollectionProvider[T]].getCollection.count()
69 |
70 | // note, mongodb 2.3.x introduced a lot of fixes to the aggregation framework,
71 | // e.g. allowing for binary data to be included in pipelines.
72 | // https://github.com/janm399/scalad/issues/63
73 | def aggregate[T: CollectionProvider](pipeline: JsObject*): List[JsValue] = {
74 | val bsons = pipeline.map {
75 | SprayJsonToDBObject _
76 | }
77 | scalad.aggregate(bsons: _*).map {
78 | obj2js _
79 | }
80 | }
81 | }
82 |
--------------------------------------------------------------------------------
/src/main/scala/org/eigengo/scalad/mongo/sprayjson/dsl.scala:
--------------------------------------------------------------------------------
1 | package org.eigengo.scalad.mongo.sprayjson
2 |
3 | object `package` {
4 | import language.postfixOps
5 | import spray.json._
6 |
7 | implicit def jsonObjectWriter = new JsonWriter[JsObject] {
8 | def write(value: JsObject) = value
9 | }
10 |
11 | implicit def jsonArrayWriter = new JsonWriter[JsArray] {
12 | def write(value: JsArray) = value
13 | }
14 |
15 | def $[V: JsonFormat](elements: V*): JsArray = {
16 | val cf = new CollectionFormats {}
17 | cf.listFormat.write(elements.toList)
18 | }
19 |
20 | implicit class JsObjectBuilder[V: JsonWriter](key: String) extends DefaultJsonProtocol {
21 | val writer = implicitly[JsonWriter[V]]
22 | def :>(that: V): JsObject = new JsObject(Map(key -> writer.write(that)))
23 | }
24 |
25 | implicit class JsObjectMonoidalMappend(obj: JsObject) extends DefaultJsonProtocol {
26 | def <>(that: JsObject) = (obj.fields ++ that.fields).toJson
27 | def ~(that: JsObject) = obj <> that
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/src/main/scala/org/eigengo/scalad/mongo/sprayjson/marshalling.scala:
--------------------------------------------------------------------------------
1 | package org.eigengo.scalad.mongo.sprayjson
2 |
3 | import spray.json._
4 | import java.util.{UUID, Date}
5 | import java.net.URI
6 | import org.eigengo.scalad.mongo.{UuidChecker, IsoDateChecker}
7 |
8 | /** Convenient implicit conversions */
9 | object BigNumberMarshalling {
10 |
11 | import language.implicitConversions
12 |
13 | implicit def StringBigDecimalToBigDecimal(value: StringBigDecimal) = value.value
14 |
15 | implicit def StringBigIntBigDecimal(value: StringBigInt) = value.value
16 |
17 | implicit def StringToStringBigDecimal(value: String) = StringBigDecimal(value)
18 |
19 | implicit def StringToStringBigInt(value: String) = StringBigInt(value)
20 |
21 | implicit def IntToStringBigDecimal(value: Int) = StringBigDecimal(BigDecimal(value))
22 |
23 | implicit def IntToStringBigInt(value: Int) = StringBigInt(BigInt(value))
24 |
25 | implicit def BigDecimalToStringBigDecimal(value: BigDecimal) = StringBigDecimal(value)
26 |
27 | implicit def BigIntToStringBigInt(value: BigInt) = StringBigInt(value)
28 | }
29 |
30 | /** Alternative to [[spray.json.BasicFormats]] `JsNumber` marshalling. */
31 | trait BigNumberMarshalling {
32 |
33 | implicit object StringBigDecimalJsonFormat extends RootJsonFormat[StringBigDecimal] {
34 | def write(obj: StringBigDecimal) = JsString(obj.value.toString())
35 |
36 | def read(json: JsValue) = json match {
37 | case JsString(value) => StringBigDecimal(value)
38 | case _ => deserializationError("Expected String for StringBigDecimal")
39 | }
40 | }
41 |
42 | implicit object StringBigIntJsonFormat extends RootJsonFormat[StringBigInt] {
43 |
44 | def write(obj: StringBigInt) = JsString(obj.value.toString())
45 |
46 | def read(json: JsValue) = json match {
47 | case JsString(value) => StringBigInt(value)
48 | case _ => deserializationError("Expected String for StringBigInt")
49 | }
50 | }
51 |
52 | }
53 |
54 | trait DateMarshalling {
55 |
56 | implicit object DateJsonFormat extends BsonMarshalling[Date] with IsoDateChecker {
57 |
58 | override val key = "$date"
59 |
60 | override def writeString(obj: Date) = dateToIsoString(obj)
61 |
62 | override def readString(value: String) = parseIsoDateString(value) match {
63 | case None => deserializationError("Expected ISO Date format, got %s" format (value))
64 | case Some(date) => date
65 | }
66 | }
67 |
68 | }
69 |
70 | /** [[scala.math.BigDecimal]] wrapper that is marshalled to `String`
71 | * and can therefore be persisted into MongoDB */
72 | final case class StringBigDecimal(value: BigDecimal)
73 |
74 | object StringBigDecimal {
75 | def apply(value: String) = new StringBigDecimal(BigDecimal(value))
76 | }
77 |
78 | /** [[scala.math.BigInt]] wrapper that is marshalled to `String`
79 | * and can therefore be persisted into MongoDB */
80 | final case class StringBigInt(value: BigInt)
81 |
82 | object StringBigInt {
83 | def apply(value: String) = new StringBigInt(BigInt(value))
84 | }
85 |
86 | /** Allows special types to be marshalled into a meta JSON language
87 | * which allows ScalaD Mongo serialisation to convert into the correct
88 | * BSON representation for database persistence.
89 | */
90 | trait BsonMarshalling[T] extends RootJsonFormat[T] {
91 |
92 | val key: String
93 |
94 | def writeString(obj: T): String
95 |
96 | def readString(value: String): T
97 |
98 | def write(obj: T) = JsObject(key -> JsString(writeString(obj)))
99 |
100 | def read(json: JsValue) = json match {
101 | case JsObject(map) => map.get(key) match {
102 | case Some(JsString(text)) => readString(text)
103 | case x => deserializationError("Expected %s, got %s" format(key, x))
104 | }
105 | case x => deserializationError("Expected JsObject, got %s" format (x))
106 | }
107 |
108 | }
109 |
110 | trait UuidMarshalling {
111 |
112 | implicit object UuidJsonFormat extends BsonMarshalling[UUID] with UuidChecker {
113 |
114 | override val key = "$uuid"
115 |
116 | override def writeString(obj: UUID) = obj.toString
117 |
118 | override def readString(value: String) = parseUuidString(value) match {
119 | case None => deserializationError("Expected UUID format, got %s" format (value))
120 | case Some(uuid) => uuid
121 | }
122 | }
123 |
124 | }
125 |
126 |
127 | trait UriMarshalling {
128 |
129 | implicit protected object UriJsonFormat extends RootJsonFormat[URI] {
130 | def write(x: URI) = JsString(x toString())
131 |
132 | def read(value: JsValue) = value match {
133 | case JsString(x) => new URI(x)
134 | case x => deserializationError("Expected URI as JsString, but got " + x)
135 | }
136 | }
137 | }
138 |
139 |
140 | /**
141 | * Flattens the JSON representation of a case class that contains a single `value`
142 | * element from:
143 | *
144 | * {"value": "..."}
145 | *
146 | * to `"..."`
147 | */
148 | case class SingleValueCaseClassFormat[T <: {def value : V}, V](construct: V => T)(implicit delegate: JsonFormat[V]) extends RootJsonFormat[T] {
149 |
150 | import scala.language.reflectiveCalls
151 | override def write(obj: T) = delegate.write(obj.value)
152 |
153 | override def read(json: JsValue) = construct(delegate.read(json))
154 | }
155 |
156 |
157 | // Marshaller for innocent case classes that don't have any parameters
158 | // assumes that the case classes behave like singletons
159 | // https://github.com/spray/spray-json/issues/41
160 | case class NoParamCaseClassFormat[T](instance: T) extends RootJsonFormat[T] {
161 |
162 | override def write(obj: T) = JsString(instance.getClass.getSimpleName)
163 |
164 | override def read(json: JsValue) = json match {
165 | case JsString(x) =>
166 | if(x != instance.getClass.getSimpleName)
167 | deserializationError("Expected %s, but got %s" format (instance.getClass.getSimpleName, x))
168 | instance
169 | case x => deserializationError("Expected JsString, but got " + x)
170 | }
171 | }
172 |
173 | // nulls are used in some Mongo Queries, so don't forget to import this
174 | trait NullMarshalling {
175 | implicit protected val NullFormat = new RootJsonFormat[Null] {
176 | def write(obj: Null) = JsNull
177 | def read(json: JsValue) = null
178 | }
179 | }
--------------------------------------------------------------------------------
/src/main/scala/org/eigengo/scalad/mongo/sprayjson/serialisation.scala:
--------------------------------------------------------------------------------
1 | package org.eigengo.scalad.mongo.sprayjson
2 |
3 | import spray.json._
4 | import scala._
5 | import com.mongodb.{BasicDBObject, BasicDBList}
6 | import java.util.{UUID, Date}
7 | import org.bson.types.ObjectId
8 | import org.eigengo.scalad.mongo.{UuidChecker, MongoSerialiser}
9 | import akka.contrib.jul.JavaLogging
10 |
11 | /** Uses `spray-json` to serialise/deserialise database objects
12 | * directly from `JsObject` -> `DBObject`.
13 | *
14 | * 1. `UUID` and `Date` are treated as special cases and stored using native types.
15 | * 2. MongoDB does not have support for arbitrary precision numbers, see
16 | * [[org.eigengo.scalad.mongo.sprayjson.BigNumberMarshalling]].
17 | */
18 | class SprayJsonSerialisation[T: JsonFormat] extends MongoSerialiser[T] with SprayJsonConvertors with JavaLogging {
19 |
20 | override def serialise(entity: T): Object = js2db(implicitly[JsonFormat[T]].write(entity))
21 |
22 | override def deserialise(found: Object): T = implicitly[JsonFormat[T]].read(obj2js(found))
23 | }
24 |
25 | trait SprayJsonConvertors extends UuidChecker with UuidMarshalling with DateMarshalling {
26 | this: JavaLogging =>
27 |
28 | protected def js2db(jsValue: JsValue): Object = {
29 | import scala.collection.convert.WrapAsJava._
30 |
31 | jsValue match {
32 | case JsString(s) => s
33 | case JsNumber(n) =>
34 | // MongoDB doesn't support arbitrary precision numbers
35 | if (n.isValidLong)
36 | new java.lang.Long(n.toLong)
37 | else {
38 | // https://issues.scala-lang.org/browse/SI-6699
39 | val d = n.toDouble
40 | if (n != BigDecimal(d))
41 | log.info("Lost precision from " + n + " to " + d)
42 | new java.lang.Double(d)
43 | }
44 | case JsNull => null
45 | case JsBoolean(b) => Boolean.box(b)
46 | case a: JsArray =>
47 | val list = new BasicDBList()
48 | list.addAll(a.elements.map(f => js2db(f)))
49 | list
50 | case o: JsObject =>
51 | val fields = o.fields
52 | if (fields.contains("$date")) o.convertTo[Date]
53 | else if (fields.contains("$uuid")) o.convertTo[UUID]
54 | else new BasicDBObject(fields.map(f => (f._1, js2db(f._2))).toMap)
55 | }
56 | }
57 |
58 | protected def obj2js(obj: Object): JsValue = {
59 | import scala.language.postfixOps
60 | import scala.collection.convert.WrapAsScala._
61 |
62 | obj match {
63 | case a: BasicDBList => JsArray(a.toList.map {
64 | f => obj2js(f)
65 | })
66 | case dbObj: BasicDBObject =>
67 | val javaMap = dbObj.toMap.asInstanceOf[java.util.Map[String, Object]]
68 | JsObject(javaMap.map {
69 | f => (f._1, obj2js(f._2))
70 | } toMap)
71 | case objId: ObjectId => JsString(objId.toString)
72 | case s: java.lang.String => JsString(s)
73 | case b: java.lang.Boolean => JsBoolean(b)
74 | case i: java.lang.Integer => JsNumber(i)
75 | case l: java.lang.Long => JsNumber(l)
76 | case d: java.lang.Double => JsNumber(d)
77 | case date: java.util.Date => date.toJson
78 | case uuid: java.util.UUID => uuid.toJson
79 | case null => JsNull
80 | case unsupported =>
81 | throw new UnsupportedOperationException("Deserialising " + unsupported.getClass + ": " + unsupported)
82 | }
83 | }
84 | }
85 |
86 |
--------------------------------------------------------------------------------
/src/main/scala/org/eigengo/scalad/mongo/support.scala:
--------------------------------------------------------------------------------
1 | package org.eigengo.scalad.mongo
2 |
3 | import java.util.{Date, UUID}
4 | import java.text.{ParseException, SimpleDateFormat}
5 |
6 | trait UuidChecker {
7 | def parseUuidString(token: String): Option[UUID] = {
8 | if (token.length != 36) None
9 | else try Some(UUID.fromString(token))
10 | catch {
11 | case p: IllegalArgumentException => return None
12 | }
13 | }
14 | }
15 |
16 | trait IsoDateChecker {
17 | private val localIsoDateFormatter = new ThreadLocal[SimpleDateFormat] {
18 | override def initialValue() = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ")
19 | }
20 |
21 | def dateToIsoString(date: Date) = localIsoDateFormatter.get().format(date)
22 |
23 | def parseIsoDateString(date: String): Option[Date] =
24 | if (date.length != 28) None
25 | else try Some(localIsoDateFormatter.get().parse(date))
26 | catch {
27 | case p: ParseException => None
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/src/test/scala/org/eigengo/scalad/experimental/RestrictionsSpec.scala:
--------------------------------------------------------------------------------
1 | package org.eigengo.scalad.experimental
2 |
3 | import org.specs2.mutable.Specification
4 | import org.eigengo.scalad.mongo.LongEntity
5 |
6 |
7 | class RestrictionsSpec extends Specification with Restrictions with StringRestrictionsPaths with SpecNativeRestrictions {
8 |
9 | noopNativeRestrictionMarshaller[Any]
10 |
11 | "trivial query" in {
12 | val le = LongEntity(1, "foo")
13 | val query: SpecNativeRestriction = "someobj" equalTo le
14 | query.r mustEqual EqualsRestriction("someobj", le)
15 | }
16 |
17 | "conjunctions and disjunctions" should {
18 |
19 | "combine simple restrictions" in {
20 | val query: SpecNativeRestriction = ("username" equalTo "foo") && ("password" equalTo "merde embulante")
21 | query.r mustEqual ConjunctionRestriction(EqualsRestriction("username", "foo"),
22 | EqualsRestriction("password", "merde embulante"))
23 | }
24 |
25 | "simplify well" in {
26 | val query: SpecNativeRestriction = ("username" equalTo "foo") && ("username" notEqualTo "foo")
27 | query.r mustEqual ContradictionRestriction
28 | }
29 |
30 | }
31 |
32 | }
33 |
34 | case class SpecNativeRestriction(r: Restriction)
35 |
36 | trait SpecNativeRestrictions extends NativeRestrictions {
37 |
38 | type NativeRestriction = SpecNativeRestriction
39 |
40 | def convertToNative(restriction: Restriction) = SpecNativeRestriction(restriction)
41 |
42 | implicit def noopNativeRestrictionMarshaller[A] = new NativeRestrictionsMarshaller[A] {
43 | type NativeRestrictionValue = A
44 | def marshal(value: A) = value
45 | }
46 | }
47 |
48 |
--------------------------------------------------------------------------------
/src/test/scala/org/eigengo/scalad/mongo/mongodbtests.scala:
--------------------------------------------------------------------------------
1 | package org.eigengo.scalad.mongo
2 |
3 | import com.mongodb._
4 | import org.specs2.mutable.Specification
5 | import java.util.concurrent.atomic.AtomicInteger
6 |
7 | trait MongoCrudTestAccess {
8 | val m = new Mongo()
9 | m.setWriteConcern(WriteConcern.SAFE)
10 | // write concern needed to catch constraint violations (Mongo Magic)
11 | val db = m.getDB("MongoCrudTest")
12 | db.dropDatabase()
13 | }
14 |
15 | case class LongEntity(id: Long, word: String)
16 |
17 | trait LongEntityPersistence extends MongoCrudTestAccess {
18 |
19 | implicit val LongEntitySerialiser = new MongoSerialiser[LongEntity] {
20 | def serialise(entity: LongEntity) = new BasicDBObjectBuilder().append("id", entity.id).append("word", entity.word).get()
21 |
22 | def deserialise(dbObject: Object) = {
23 | val o = dbObject.asInstanceOf[BasicDBObject]
24 | LongEntity(o.getLong("id"), o.getString("word"))
25 | }
26 | }
27 | implicit val LongEntityCollectionProvider = new IndexedCollectionProvider[LongEntity] {
28 | def getCollection = db.getCollection("long_entities")
29 |
30 | /** `String`s containing the JSON definition of the index to build. */
31 | override protected def uniqueFields = "{id:1}" :: Nil
32 | }
33 | implicit val LongEntityLongKey = new LongFieldQuery[LongEntity]("id")
34 | implicit val LongEntityIdentity = new FieldIdentityQueryBuilder[LongEntity, Long]{
35 | def field = "id"
36 | def id(entity: LongEntity) = entity.id
37 | }
38 | implicit val StringSerialiser = new MongoSerialiser[String] {
39 | def serialise(entity: String) = entity
40 |
41 | def deserialise(dbObject: Object) = dbObject.toString
42 | }
43 | implicit val ReadByWord = new SerialisedFieldQueryBuilder[LongEntity, String]("word")
44 | }
45 |
46 | /**
47 | * MongoDB *must* be running locally.
48 | *
49 | * Start mongodb with `mongod --dbpath mongodb` (after creating the dir).
50 | */
51 | class MongoCrudTest extends Specification with LongEntityPersistence {
52 | sequential
53 |
54 | "Spray String JSON serialisation for Long ids" should {
55 | sequential
56 |
57 | val crud = new MongoCrud
58 | val long = 13L
59 | val jsonQuery = "{'id': 13}"
60 | val entity = LongEntity(long, "original")
61 | val update = LongEntity(long, "update")
62 |
63 | "return self from create()" in {
64 | crud.create(entity).get mustEqual (entity)
65 | }
66 |
67 | "throw MongoException when create() violates constraint" in {
68 | crud.create(entity) must throwA[MongoException]
69 | }
70 |
71 | "be searchable by field" in {
72 | crud.readFirst("original").get mustEqual (entity)
73 | }
74 |
75 | "be searchable by identity field" in {
76 | crud.readUnique(long).get mustEqual (entity)
77 | }
78 |
79 | "be searchable by JSON query" in {
80 | import Implicits._
81 | crud.searchFirst[LongEntity](jsonQuery).get mustEqual (entity)
82 | }
83 |
84 | "be searchable by example" in {
85 | crud.findUnique(entity).get mustEqual (entity)
86 | }
87 |
88 | "be updatable by example" in {
89 | crud.updateFirst(update).get mustEqual (update)
90 | }
91 |
92 | "be searchable with restrictions" in {
93 | todo
94 | }
95 |
96 | "be pageable in searches" in {
97 | val counter = new AtomicInteger()
98 | import Implicits._
99 | crud.searchAll[LongEntity](jsonQuery).page(10) {
100 | e => counter.addAndGet(1)
101 | }
102 | counter.get mustEqual 1
103 | }
104 |
105 | "be stress tested in situations that use the ConsumerIterable" in {
106 | // not implemented yet
107 | todo
108 | }
109 | }
110 |
111 | }
112 |
--------------------------------------------------------------------------------
/src/test/scala/org/eigengo/scalad/mongo/sprayjson/JsonDslSpec.scala:
--------------------------------------------------------------------------------
1 | package org.eigengo.scalad.mongo.sprayjson
2 |
3 | import spray.json._
4 | import org.specs2.mutable.Specification
5 |
6 | class JsonDslSpec extends Specification with DefaultJsonProtocol with NullMarshalling {
7 |
8 | import org.eigengo.scalad.mongo.sprayjson._
9 |
10 | sequential
11 |
12 | "the JSON DSL should" should {
13 |
14 | "produce a valid JsValue out of a simple key/String mapping" in {
15 | {
16 | "foo" :> "bar"
17 | } === JsonParser( """{"foo":"bar"}""")
18 | }
19 |
20 | "produce a valid JsValue out of a simple key/Int mapping" in {
21 | {
22 | "foo" :> 10
23 | } === JsonParser( """{"foo":10}""")
24 | }
25 |
26 | "produce a valid JsValue out of a simple key/Boolean mapping" in {
27 | {
28 | "foo" :> true
29 | } === JsonParser( """{"foo":true}""")
30 | }
31 |
32 | "produce a valid JsValue out of a nested mapping" in {
33 | "foo" :> {
34 | "bar" :> 10
35 | } === JsonParser( """{"foo": {"bar":10}}""")
36 | }
37 |
38 | "allow monoid-like mappending of objects" in {
39 | ({
40 | "foo" :> {
41 | "bar" :> 10
42 | }
43 | } <> {
44 | "age" :> 45
45 | }) === JsonParser( """{"foo":{"bar":10},"age":45}""")
46 | }
47 |
48 | "correctly handle JSON arrays" in {
49 | $(1, 2, 3) === JsonParser("[1,2,3]")
50 | }
51 |
52 | "Correctly handle combination of nested object and arrays" in {
53 | val a1 = {
54 | "lorem" :> $("ipsum", "lorem ipsum")
55 | }
56 | {
57 | "foo" :> 10
58 | } ~ {
59 | "bar" :> a1
60 | } === JsonParser( """{"foo":10,"bar":{"lorem":["ipsum","lorem ipsum"]}}""")
61 | }
62 |
63 | "correctly handle null" in {
64 | "foo" :> null === JsonParser( """{"foo":null}""")
65 | }
66 |
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/src/test/scala/org/eigengo/scalad/mongo/sprayjson/PersistenceSpec.scala:
--------------------------------------------------------------------------------
1 | package org.eigengo.scalad.mongo.sprayjson
2 |
3 | import java.util.UUID
4 | import org.specs2.mutable.Specification
5 | import spray.json.{JsObject, JsNull}
6 | import com.mongodb.MongoException
7 |
8 | class PersistenceSpec extends Specification with SprayJsonTestSupport with NullMarshalling {
9 |
10 | sequential
11 |
12 | val crud = new SprayMongo
13 |
14 | val student = Student(
15 | 101287,
16 | UUID.randomUUID(),
17 | "Alfredo",
18 | List(Person("John", "Doe"), Person("Mary", "Lamb")),
19 | Address("Foo Rd.", 91),
20 | graduated = false
21 | )
22 |
23 | val modified = student.copy(graduated = true)
24 |
25 | implicit val StudentCollectionProvider = new SprayMongoCollection[Student](db, "students", "id":>1) {
26 | override def indexes: List[JsObject] = {"collegeUuid":>1} :: Nil
27 | }
28 |
29 | "SprayJsonSerialisation" should {
30 |
31 | "ensure a Student is created" in {
32 | crud.insert(student) === Some(student)
33 | }
34 |
35 | "ensure the uniqueness constraint is respected" in {
36 | crud.insert(student) should throwA[MongoException]
37 | }
38 |
39 | "ensure a Student is searchable by id" in {
40 | crud.findOne[Student]("id":>student.id) === Some(student)
41 | }
42 |
43 | "ensure a Student is searchable by name" in {
44 | crud.findOne[Student]("name":>student.name) === Some(student)
45 | }
46 |
47 | "ensure a Student is searchable by UUID" in {
48 | crud.findOne[Student]("collegeUuid":>student.collegeUuid) === Some(student)
49 | }
50 |
51 | "ensure a Student is searchable by nested JSON query" in {
52 | crud.findOne[Student]("address":> student.address) === Some(student)
53 | crud.findOne[Student]("address":> {"road":> student.address.road <> "number":> student.address.number}) === Some(student)
54 | }
55 |
56 | "ensure a Student is modifyable" in {
57 | crud.findAndModify[Student]("id":>student.id, "$set":>{"graduated":> modified.graduated})
58 | crud.findOne[Student]("id":>student.id) === Some(modified)
59 | }
60 |
61 | "ensure a Student is replaceable" in {
62 | crud.findAndReplace[Student]("id":>student.id, student)
63 | crud.findOne[Student]("id":>student.id) === Some(student)
64 | }
65 |
66 | "ensure a Student can be deleted" in {
67 | crud.removeOne[Student]("id":>student.id)
68 | crud.findOne[Student]("id":>student.id) === None
69 | }
70 |
71 | "ensure we can run aggregate queries on Students" in {
72 | crud.insert(student)
73 | crud.insert(student.copy(id = 1))
74 | crud.insert(student.copy(id = 2))
75 | crud.insert(student.copy(id = 3, name = "Evil Alfredo"))
76 | crud.insert(student.copy(id = 4, collegeUuid = UUID.randomUUID()))
77 |
78 | // this could be achieved with a count()... it's just a POC
79 | crud.aggregate[Student](
80 | "$match":> {"name":> "Alfredo"},
81 | "$match":> {"collegeUuid" :> student.collegeUuid},
82 | "$group":> {"_id" :> null <> {"count":> {"$sum":> 1}}},
83 | "$project" :> {"_id":> 0 <> "count":> 1}
84 | ) === List({"count":> 3})
85 | // if this fails, you might be running < mongo 2.3.x
86 | }
87 |
88 | "ensure Students can be counted" in {
89 | crud.count[Student]("id":>{"$exists":>true}) === 5
90 | }
91 | }
92 | }
93 |
--------------------------------------------------------------------------------
/src/test/scala/org/eigengo/scalad/mongo/sprayjson/SerialisationSpec.scala:
--------------------------------------------------------------------------------
1 | package org.eigengo.scalad.mongo.sprayjson
2 |
3 | import org.specs2.mutable.Specification
4 | import com.mongodb.{DBObject, BasicDBList, BasicDBObject}
5 | import scala.collection.convert.WrapAsJava._
6 | import scala.collection.convert.WrapAsScala._
7 | import java.util.{Date, UUID}
8 | import spray.json.{JsonFormat, JsNull}
9 |
10 | class SerialisationSpec extends Specification with SprayJsonTestSupport {
11 |
12 | def mustSerialise[T: JsonFormat](entity: T, expected: Object) {
13 | val serialiser = new SprayJsonSerialisation[T]
14 | serialiser.serialise(entity) must beEqualTo(expected)
15 | }
16 |
17 | /* Use this if you want to make sure you get back the same entity
18 | * from the entire serialisation/deserialisation process.
19 | */
20 | def mustSerialiseAndDeserialise[T: JsonFormat](entity: T) {
21 | val serialiser = new SprayJsonSerialisation[T]
22 | serialiser.deserialise(serialiser.serialise(entity)) must beEqualTo(entity)
23 | }
24 |
25 | def mustDeserialise[T: JsonFormat](entity: Object, expected: T) {
26 | val serialiser = new SprayJsonSerialisation[T]
27 | serialiser.deserialise(entity) must beEqualTo(expected)
28 | }
29 |
30 | def mustFailToDeserialiseWith[T: JsonFormat, E <: Throwable : Manifest](entity: DBObject) {
31 | val serialiser = new SprayJsonSerialisation[T]
32 | serialiser.deserialise(entity) must throwA[E]
33 | }
34 |
35 | "Spray-Json-base serialiser" should {
36 |
37 | "be able to serialise an Int" in {
38 | val original = Map("value" -> 20)
39 | mustSerialise(original, new BasicDBObject(original))
40 | }
41 |
42 | "be able to deserialise an Int" in {
43 | val original = Map("v" -> 20)
44 | mustDeserialise(new BasicDBObject(original), original)
45 | }
46 |
47 | "be able to serialise a Double" in {
48 | val original = Map("v" -> 1.23)
49 | mustSerialise(original, new BasicDBObject(original))
50 | }
51 |
52 | "be able to serialise a complex Double" in {
53 | val original = Map("v" -> 3.141592653589793238462643383279502884197169399)
54 | mustSerialise(original, new BasicDBObject(original))
55 | }
56 |
57 |
58 | "be able to deserialise a raw Double" in {
59 | val original = Map("value" -> 10.1)
60 | mustDeserialise(new BasicDBObject(original), original)
61 | }
62 |
63 | "be able to serialise a Long" in {
64 | val original = Map("value" -> 200.toLong)
65 | mustSerialise(original, new BasicDBObject(original))
66 | }
67 |
68 | "be able to deserialise a Long" in {
69 | val original = Map("v" -> 200.toLong)
70 | mustDeserialise(new BasicDBObject(original), original)
71 | }
72 |
73 | "be able to serialise a Boolean" in {
74 | val original = Map("v" -> true)
75 | mustSerialise(original, new BasicDBObject(original))
76 | }
77 |
78 | "be able to deserialise an Boolean" in {
79 | val original = Map("v" -> true)
80 | mustDeserialise(new BasicDBObject(original), original)
81 | }
82 |
83 | "be able to serialise a String" in {
84 | val original = Map("value" -> "hello")
85 | mustSerialise(original, new BasicDBObject(original))
86 | }
87 |
88 | "be able to deserialise a String" in {
89 | val original = Map("v" -> "hello")
90 | mustDeserialise(new BasicDBObject(original), original)
91 | }
92 |
93 | "be able to serialise a UUID" in {
94 | val string = "550e8400-e29b-41d4-a716-446655440000"
95 | val json = Map("$uuid" -> string)
96 | mustSerialise(json, UUID.fromString(string))
97 | }
98 |
99 | "be able to deserialise a UUID" in {
100 | val string = "550e8400-e29b-41d4-a716-446655440000"
101 | val json = Map("$uuid" -> string)
102 | mustDeserialise(UUID.fromString(string), json)
103 | }
104 |
105 | "be able to serialise a Date" in {
106 | val json = Map("$date" -> "2013-02-04T17:51:35.479+0000")
107 | mustSerialise(json, new Date(1360000295479L))
108 | }
109 |
110 | "be able to deserialise a Date" in {
111 | val json = Map("$date" -> "2013-02-04T17:51:35.479+0000")
112 | mustDeserialise(new Date(1360000295479L), json)
113 | }
114 |
115 | // "be able to serialise a StringBigDecimal" in {
116 | // val string = "100000000000000.00000000000001"
117 | // val original = StringBigDecimal(string)
118 | // val expected:DBObject = new BasicDBObject("StringBigDecimal", string)
119 | // mustSerialise(original, expected)
120 | // }
121 | //
122 | // "be able to deserialise a StringBigDecimal" in {
123 | // val string = "100000000000000.00000000000001"
124 | // val original = StringBigDecimal(string)
125 | // val expected = new BasicDBObject("StringBigDecimal", string)
126 | // mustDeserialise(expected, original)
127 | // }
128 | //
129 | // "be able to serialise a StringBigInt" in {
130 | // val string = "10000000000000000000000000001"
131 | // val original = StringBigInt(string)
132 | // val expected:DBObject = new BasicDBObject("StringBigInt", string)
133 | // mustSerialise(original, expected)
134 | // }
135 | //
136 | // "be able to deserialise a StringBigInt" in {
137 | // val string = "10000000000000000000000000001"
138 | // val original = StringBigInt(string)
139 | // val expected = new BasicDBObject("StringBigInt", string)
140 | // mustDeserialise(expected, original)
141 | // }
142 |
143 | "be able to serialise a StringBigDecimal" in {
144 | val string = "100000000000000.00000000000001"
145 | val original = StringBigDecimal(string)
146 | mustSerialise(original, string)
147 | }
148 |
149 | "be able to serialise a StringBigInt" in {
150 | val string = "10000000000000000000000000001"
151 | val original = StringBigInt(string)
152 | mustSerialise(original, string)
153 | }
154 |
155 | "be able to serialise an homogeneous List" in {
156 | val a1 = List("a", "b", "c")
157 | val dbList = new BasicDBList()
158 | dbList.addAll(a1)
159 | val expected = new BasicDBObject(Map("value" -> dbList))
160 | mustSerialise(Map("value" -> a1), expected)
161 | }
162 |
163 | "be able to deserialise an homogeneous List" in {
164 | val a1 = List("a", "b", "c")
165 | val dbList = new BasicDBList()
166 | dbList.addAll(a1)
167 | val original = dbList
168 | mustDeserialise(original, a1)
169 | }
170 |
171 | "be able to serialise a Map" in {
172 | val original = Map("key" -> "value")
173 | mustSerialise(original, new BasicDBObject(original))
174 | }
175 |
176 | "be able to deserialise a Map" in {
177 | val original = Map("key" -> "value")
178 | mustDeserialise(new BasicDBObject(original), original)
179 | }
180 |
181 | "be able to ONLY serialise a nested Map" in {
182 | //Caveat: If you want to test in isolation only the serialiser,
183 | //you can't simply create a new BasicDBObject out of the Scala
184 | //nested Map. You need to embed the nested Map into a BasicDBObject as well!
185 | val nested = Map("b" -> "c")
186 | val original = Map("a" -> nested)
187 | val expected = new BasicDBObject()
188 | expected.put("a", new BasicDBObject(nested))
189 | mustSerialise(original, expected)
190 | }
191 |
192 | "be able to serialise/deserialise a nested Map" in {
193 | val original = Map("a" -> Map("b" -> Map("c" -> "!")))
194 | mustSerialiseAndDeserialise(original)
195 | }
196 |
197 | "be able to serialise a Person" in {
198 | val original = Person("John", "Doe")
199 | val expected = new BasicDBObject(Map("name" -> "John", "surname" -> "Doe", "id" -> original.id))
200 | mustSerialise(original, expected)
201 | }
202 |
203 | "be able to deserialise a Person" in {
204 | mustSerialiseAndDeserialise(Person("John", "Doe"))
205 | }
206 |
207 | "be able to serialise/deserialise a Student" in {
208 | val uuid = UUID.randomUUID()
209 | val original = Student(101287
210 | ,uuid
211 | ,"Alfredo"
212 | ,List(Person("John", "Doe"), Person("Mary", "Lamb"))
213 | ,Address("Foo Rd.", 91)
214 | ,graduated = false
215 | )
216 | mustSerialiseAndDeserialise(original)
217 | }
218 | }
219 |
220 | }
221 |
--------------------------------------------------------------------------------
/src/test/scala/org/eigengo/scalad/mongo/sprayjson/support.scala:
--------------------------------------------------------------------------------
1 | package org.eigengo.scalad.mongo.sprayjson
2 |
3 | import java.util.UUID
4 | import org.eigengo.scalad.mongo.MongoCrudTestAccess
5 | import spray.json.{JsValue, JsonFormat, DefaultJsonProtocol}
6 | import akka.contrib.jul.JavaLogging
7 | import com.mongodb.DBObject
8 | import org.specs2.mutable.Specification
9 |
10 | //Bogus domain-like entities
11 | case class Person(name: String, surname: String, id: UUID = UUID.randomUUID())
12 | case class Address(road: String, number: Int)
13 | case class Student(id: Long,
14 | collegeUuid: UUID,
15 | name: String,
16 | parents: List[Person],
17 | address: Address,
18 | graduated: Boolean)
19 |
20 |
21 | trait SprayJsonTestSupport extends MongoCrudTestAccess
22 | with DefaultJsonProtocol with UuidMarshalling with DateMarshalling with BigNumberMarshalling with JavaLogging {
23 | this: Specification =>
24 |
25 | implicit val PersonFormatter = jsonFormat3(Person)
26 | implicit val AddressFormatter = jsonFormat2(Address)
27 | implicit val StudentFormatter = jsonFormat6(Student)
28 |
29 | }
--------------------------------------------------------------------------------
/version.sbt:
--------------------------------------------------------------------------------
1 |
2 | version in ThisBuild := "1.3.2"
3 |
--------------------------------------------------------------------------------