├── .gitignore ├── LICENSE.txt ├── README.md ├── pom.xml └── src ├── etc └── header.txt ├── main └── scala │ └── com │ └── osinka │ ├── mongodb.scala │ └── mongodb │ ├── DBObjectCollection.scala │ ├── Implicits.scala │ ├── MongoCollection.scala │ ├── MongoObject.scala │ ├── Query.scala │ ├── Serializer.scala │ ├── shape │ ├── Field.scala │ ├── FieldCond.scala │ ├── FieldModify.scala │ ├── Implicits.scala │ ├── Shape.scala │ ├── ShapeQuery.scala │ └── ShapedCollection.scala │ └── wrapper │ ├── DBCollectionWrapper.scala │ ├── DBObj.scala │ ├── DBObjectIterator.scala │ ├── MongoCondition.scala │ └── MongoOp.scala └── test └── scala └── com └── osinka └── mongodb ├── Config.scala ├── Helper.scala ├── baseSpec.scala ├── benchmark ├── BenchmarkSuite.scala ├── ConstraintOverhead.scala ├── Domain.scala ├── SerializationOverhead.scala └── overhead.scala ├── collection.scala ├── conversions.scala ├── orig ├── origSpec.scala └── plain.scala ├── query.scala └── shape ├── ArrayModel.scala ├── ComplexModel.scala ├── MapModel.scala ├── OptModel.scala ├── RefModel.scala ├── SimpleModel.scala ├── collection.scala ├── fields.scala ├── query.scala ├── serializer.scala ├── shapeSpec.scala └── update.scala /.gitignore: -------------------------------------------------------------------------------- 1 | *~ 2 | /target 3 | /lib_managed/ 4 | /src_managed/ 5 | /project/boot/ 6 | /project/build/target/ 7 | /src/test/resources/database.properties 8 | /.ensime 9 | .DS_Store 10 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | ==== 2 | Copyright (C) 2009 Osinka 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | ==== 16 | 17 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # mongo-scala-driver is deprecated 2 | 3 | `mongo-scala-driver` is a library that implements both field mapping 4 | capabilities and databsae access to MongoDB from Scala. 5 | 6 | It is deprecated. Instead use: 7 | 8 | * [Subset](https://github.com/osinka/subset) is the next generation 9 | library that helps working with documents and sub-documents fields 10 | and build queries with them. 11 | 12 | * [Mongo Java Driver](http://www.mongodb.org/display/DOCS/Java+Tutorial) 13 | and [Casbah](https://github.com/mongodb/casbah) are official Java 14 | and Scala drivers for MongoDB. 15 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4.0.0 4 | com.osinka 5 | mongo-scala-driver 6 | 0.8.16-SNAPSHOT 7 | bundle 8 | MongoDB driver for Scala 9 | 2009 10 | 11 | 12 | Osinka.com 13 | http://www.osinka.com 14 | 15 | 16 | 17 | Apache License, Version 2.0 18 | http://www.apache.org/licenses/LICENSE-2.0 19 | 20 | 21 | 22 | 23 | 24 | alaz 25 | azarov@osinka.com 26 | Alexander Azarov 27 | +4 28 | 29 | 30 | 31 | 32 | 33 | mongo-scala-driver Developer mailing list 34 | http://groups.google.com/group/mongodb-scala/topics 35 | mongodb-scala@googlegroups.com 36 | mongodb-scala+subscribe@googlegroups.com 37 | mongodb-scala+unsubscribe@googlegroups.com 38 | 39 | 40 | 41 | 42 | scm:git:git://github.com/osinka/mongo-scala-driver.git 43 | scm:git:git@github.com:osinka/mongo-scala-driver.git 44 | http://github.com/osinka/mongo-scala-driver 45 | 46 | 47 | 48 | github 49 | http://github.com/osinka/mongo-scala-driver/issues 50 | 51 | 52 | 53 | 54 | nexus.scala-tools.org 55 | Scala-Tools Distribution Repository for Releases 56 | http://nexus.scala-tools.org/content/repositories/releases 57 | 58 | 59 | nexus.scala-tools.org 60 | Scala-Tools Distribution Repository for Snapshots 61 | http://nexus.scala-tools.org/content/repositories/snapshots 62 | false 63 | 64 | 65 | 66 | 67 | 68 | scala-tools.org 69 | Scala-Tools Maven2 Repository 70 | http://scala-tools.org/repo-releases 71 | 72 | 73 | 74 | 75 | 76 | scala-tools.org 77 | Scala-Tools Maven2 Repository 78 | http://scala-tools.org/repo-releases 79 | 80 | 81 | 82 | 83 | 84 | org.scala-lang 85 | scala-library 86 | ${scala.version} 87 | 88 | 89 | org.mongodb 90 | mongo-java-driver 91 | 2.7.2 92 | 93 | 94 | org.scala-tools.testing 95 | specs_2.8.1 96 | 1.6.7.2 97 | test 98 | 99 | 100 | junit 101 | junit 102 | 4.10 103 | test 104 | 105 | 106 | org.scala-lang 107 | scala-compiler 108 | ${scala.version} 109 | test 110 | 111 | 112 | 113 | 114 | src/main/scala 115 | src/test/scala 116 | 117 | 118 | org.scala-tools 119 | maven-scala-plugin 120 | 2.15.2 121 | 122 | 123 | 124 | compile 125 | testCompile 126 | 127 | 128 | 129 | 130 | ${scala.version} 131 | 132 | 133 | -Xmx256m 134 | 135 | 136 | -unchecked 137 | -deprecation 138 | -g:vars 139 | -explaintypes 140 | -deprecation 141 | -make:transitivenocp 142 | -dependencyfile 143 | ${project.build.directory}/.scala_dependencies 144 | 145 | 146 | 147 | 148 | overhead 149 | com.osinka.mongodb.benchmark.overhead 150 | 151 | -Xshare:off 152 | 153 | 154 | 155 | 50000 156 | 4 157 | 158 | 159 | 160 | 161 | 162 | 163 | org.apache.maven.plugins 164 | maven-surefire-plugin 165 | 2.10 166 | 167 | 168 | com/osinka/mongodb/benchmark/** 169 | 170 | 171 | 172 | 173 | com.mycila.maven-license-plugin 174 | maven-license-plugin 175 | 1.9.0 176 | 177 |
${basedir}/src/etc/header.txt
178 | false 179 | true 180 | true 181 | 182 | .git/** 183 | .manager/** 184 | *.gitignore 185 | etc/*.txt 186 | *.xml 187 | *.textile 188 | project/** 189 | 190 | true 191 | 192 | JAVADOC_STYLE 193 | 194 | true 195 | UTF-8 196 | 197 | ${project.inceptionYear} 198 | http://osinka.ru 199 | Osinka 200 | 201 |
202 |
203 | 204 | org.apache.felix 205 | maven-bundle-plugin 206 | 2.0.1 207 | true 208 | 209 | 210 | <_versionpolicy>[$(@),$(version;+;$(@))) 211 | com.osinka.mongodb.* 212 | 213 | 214 | 215 | 216 | org.apache.maven.plugins 217 | maven-release-plugin 218 | 2.2.1 219 | 220 | 221 | org.apache.maven.scm 222 | maven-scm-provider-gitexe 223 | 1.5 224 | 225 | 226 | 227 | deploy 228 | release 229 | 230 | 231 |
232 |
233 | 234 | 235 | 236 | 237 | 238 | org.scala-tools 239 | maven-scala-plugin 240 | 2.15.2 241 | 242 | 243 | 244 | 245 | 246 | 247 | coverage 248 | 249 | 250 | BUILD_NUMBER 251 | 252 | 253 | 254 | 255 | 256 | org.codehaus.mojo 257 | cobertura-maven-plugin 258 | 2.5.1 259 | 260 | true 261 | 262 | xml 263 | html 264 | 265 | 266 | 267 | 268 | package 269 | 270 | cobertura 271 | 272 | 273 | 274 | 275 | 276 | 277 | 278 | 279 | release 280 | 281 | 282 | 283 | org.apache.maven.plugins 284 | maven-source-plugin 285 | 2.1.2 286 | 287 | 288 | attach-sources 289 | 290 | jar 291 | 292 | 293 | 294 | 295 | 296 | org.apache.maven.plugins 297 | maven-scm-plugin 298 | 1.5 299 | 300 | 301 | org.apache.maven.scm 302 | maven-scm-provider-gitexe 303 | 1.5 304 | 305 | 306 | 307 | 308 | 309 | 310 | 311 | 312 | 313 | UTF-8 314 | 2.8.2 315 | 316 | 317 | 318 | 2.0.9 319 | 320 |
321 | 322 | -------------------------------------------------------------------------------- /src/etc/header.txt: -------------------------------------------------------------------------------- 1 | Copyright (C) ${year} ${author} <${email}> 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | -------------------------------------------------------------------------------- /src/main/scala/com/osinka/mongodb.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka 17 | 18 | import com.osinka.mongodb._ 19 | 20 | package object mongodb extends Implicits with shape.Implicits -------------------------------------------------------------------------------- /src/main/scala/com/osinka/mongodb/DBObjectCollection.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb 17 | 18 | import com.mongodb.{DBCollection, DBObject} 19 | 20 | /** 21 | * Collection of DBObjects 22 | * 23 | * @author Alexander Azarov 24 | * @see PlainDBOSerializer 25 | */ 26 | class DBObjectCollection(override val underlying: DBCollection) 27 | extends MongoCollection[DBObject] 28 | with QueriedCollection[DBObject, DBObjectCollection] { 29 | 30 | override val serializer: Serializer[DBObject] = PlainDBOSerializer 31 | 32 | // -- QueriedCollection[T] 33 | override val query: Query = Query.empty 34 | override def applied(q: Query): DBObjectCollection = new DBObjectCollection(underlying) { 35 | override val query = q 36 | } 37 | 38 | // -- MongoCollection 39 | override def stringPrefix: String = "DBObjectCollection("+getName+")" 40 | 41 | override def <<(o: DBObject) = underlying.insert(o) 42 | 43 | override def <<(objs: Seq[DBObject]) = underlying.insert(objs.toArray, underlying.getWriteConcern) 44 | 45 | override def < Some(obj) 49 | case msg: String => None 50 | } 51 | } 52 | 53 | override def +=(obj: DBObject) { underlying.save(obj) } 54 | 55 | override def -=(obj: DBObject) { underlying.remove(obj) } 56 | } 57 | 58 | /** 59 | * Serializer of DBObject to DBObject: does nothing and passes the same 60 | * DBObject through 61 | * 62 | * @author Alexander Azarov 63 | */ 64 | object PlainDBOSerializer extends Serializer[DBObject] { 65 | override def in(obj: DBObject) = obj 66 | override def out(dbo: DBObject) = Some(dbo) 67 | override def mirror(x: DBObject)(dbo: DBObject) = dbo 68 | } 69 | -------------------------------------------------------------------------------- /src/main/scala/com/osinka/mongodb/Implicits.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | /* 17 | * Implicits.scala 18 | * 19 | * To change this template, choose Tools | Template Manager 20 | * and open the template in the editor. 21 | */ 22 | 23 | package com.osinka.mongodb 24 | 25 | import com.mongodb.{DBObject, DBCollection} 26 | 27 | trait Implicits { 28 | import wrapper._ 29 | 30 | implicit def collAsScala(coll: DBCollection) = new { 31 | def asScala = new DBObjectCollection(coll) 32 | } 33 | 34 | implicit def queryToColl(q: Query) = new { 35 | def in[T, Self <: QueriedCollection[T, Self]](coll: QueriedCollection[T, Self]): Self = coll.applied(q) 36 | } 37 | 38 | implicit def wrapperToDBO(coll: DBCollectionWrapper): DBCollection = coll.underlying 39 | 40 | implicit def mapToDBObject(m: Map[String, Any]): DBObject = DBO.fromMap(m) 41 | } 42 | -------------------------------------------------------------------------------- /src/main/scala/com/osinka/mongodb/MongoCollection.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb 17 | 18 | import util.control.Exception._ 19 | import org.bson.types.ObjectId 20 | import com.mongodb._ 21 | import wrapper._ 22 | 23 | /** 24 | * Scala collection of objects T backed by MongoDB DBCollection. 25 | * 26 | * @author Alexander Azarov 27 | * @see com.osinka.mongodb.shape.ShapedCollection 28 | */ 29 | trait MongoCollection[T] extends PartialFunction[ObjectId, T] with Iterable[T] with DBCollectionWrapper { 30 | /** 31 | * Serializer for objects of type T 32 | */ 33 | def serializer: Serializer[T] 34 | 35 | protected def cursor(q: Query) = { 36 | val cursor = find(q.query) 37 | for {n <- q.skip } cursor.skip(n) 38 | for {n <- q.limit} cursor.limit(n) 39 | for {sort <- q.sorting} cursor.sort(sort) 40 | // TODO: snapshot mode 41 | cursor 42 | } 43 | 44 | protected def find(q: Query): Iterator[T] = 45 | new DBObjectIterator(cursor(q)).flatMap{serializer.out(_).toList.iterator} 46 | 47 | protected def findOne(q: Query): Option[T] = 48 | if (q.slice_?) find(q take 1).toSeq.headOption 49 | else Option(findOne(q.query)).flatMap{serializer.out} 50 | 51 | protected def getCount(q: Query): Long = { 52 | def lim(n: Int) = q.limit map{_ min n} getOrElse n 53 | def skp(n: Int) = q.skip map{x => (n - x) max 0} getOrElse n 54 | 55 | if (q.slice_?) lim(skp(cursor(q).count)) 56 | else getCount(q.query) 57 | } 58 | 59 | /** 60 | * Wrap related operations, so that they get executed via the same 61 | * database connection 62 | */ 63 | protected def related[T](f: => T) = { 64 | val db = underlying.getDB 65 | try { 66 | db.requestStart 67 | f 68 | } finally { 69 | db.requestDone 70 | } 71 | } 72 | 73 | /** 74 | * Generic update method, not for public usage. See MongoDB's update 75 | */ 76 | protected def update(q: DBObject, op: DBObject, multi: Boolean): Boolean = 77 | related { 78 | underlying.update(q, op, false, multi).getField("updatedExisting") match { 79 | case null => false 80 | case b: java.lang.Boolean => b.booleanValue 81 | } 82 | } 83 | 84 | protected def findAndRemove(q: DBObject): Option[T] = 85 | Option(underlying.findAndRemove(q)) flatMap serializer.out 86 | 87 | protected def findAndModify(q: DBObject, sorting: Option[DBObject], op: DBObject, remove: Boolean, returnNew: Boolean, upsert: Boolean): Option[T] = 88 | Option(underlying.findAndModify(q, null, sorting.orNull, remove, op, returnNew, upsert)) flatMap serializer.out 89 | 90 | protected def remove(q: DBObject) { 91 | underlying remove q 92 | } 93 | 94 | /** 95 | * Returns iterator through collection objects. 96 | * 97 | * This method should not be used 98 | * explicitly, use Scala Iterable[T] methods instead. 99 | */ 100 | protected def find: Iterator[T] = find(Query.empty) 101 | 102 | /** 103 | * Collection size estimate. Rough size estimates the collection size: it does 104 | * not take object shape into account. Do not use this method, use methods from 105 | * Scala Iterable[T] 106 | */ 107 | protected def sizeEstimate = getCount(Query.empty) 108 | 109 | /** 110 | * MongoDB insert method 111 | * @param x object to insert into the collection 112 | * @return WriteResult 113 | */ 114 | def <<(x: T) = { 115 | val dbo = serializer.in(x) 116 | val result = underlying insert dbo 117 | serializer.mirror(x)(dbo) 118 | result 119 | } 120 | 121 | /** 122 | * MongoDB batch insert method 123 | * @param xs sequence of objects to insert into the collection in a batch. 124 | * @return WriteResult 125 | */ 126 | def <<(xs: Seq[T]) = { 127 | val dboList = xs map {serializer.in} 128 | val result = underlying.insert(dboList.toArray, underlying.getWriteConcern) 129 | (xs zip dboList) map {Function.uncurried(serializer.mirror _).tupled} 130 | result 131 | } 132 | 133 | /** 134 | * MongoDB insert with subsequent check for object existance 135 | * @param x object to insert into the collection 136 | * @return None if such object exists already (with the same identity); 137 | * Some(x) in the case of success. 138 | */ 139 | def < Some( serializer.mirror(x)(dbo) ) 144 | case false => None 145 | } 146 | } 147 | } 148 | 149 | /** 150 | * MongoDB DBCollection.save method 151 | * @param x object to save to the collection 152 | */ 153 | def +=(x: T) { 154 | val dbo = serializer.in(x) 155 | underlying save dbo 156 | serializer.mirror(x)(dbo) 157 | } 158 | 159 | /** 160 | * MongoDB DBCollection.remove method 161 | * @param x object to remove from the collection 162 | */ 163 | def -=(x: T) { underlying remove serializer.in(x) } 164 | 165 | /** 166 | * MongoDB DBCollection.remove method 167 | * @param q query 168 | */ 169 | def -=(q: Query) { remove(q.query) } 170 | 171 | /** 172 | * MongoDB DBCollection update method 173 | * @param q filter, which object to update 174 | * @param x new object 175 | */ 176 | def update(q: Query, x: T): Boolean = update(q.query, serializer.in(x), false) 177 | 178 | /** 179 | * MongoDB DBCollection.update method 180 | * @param q filter, which objects to update 181 | * @param op set of modify operations in the form of Scala Map 182 | * @param multi update only one object if false or update 183 | * all matching objects if true 184 | * @return true if any objects has been updated 185 | * @todo TODO: update -> foreach?.. 186 | */ 187 | def update(q: Query, op: Map[String,Any], multi: Boolean): Boolean = update(q.query, DBO.fromMap(op), multi) 188 | 189 | def findAndRemove(q: Query): Option[T] = findAndRemove(q.query) 190 | 191 | def findAndModify(q: Query, op: Map[String, Any]): Option[T] = 192 | findAndModify(q, op, false, false, false) 193 | 194 | def findAndModify(q: Query, op: Map[String,Any], remove: Boolean, returnNew: Boolean, upsert: Boolean): Option[T] = 195 | findAndModify(q.query, q.sorting, DBO.fromMap(op), remove, returnNew, upsert) 196 | 197 | def get(oid: ObjectId): Option[T] = findOne(Query byId oid) 198 | 199 | // -- PartialFunction[ObjectId, T] 200 | override def isDefinedAt(oid: ObjectId) = getCount(Query byId oid) > 0 201 | 202 | override def apply(oid: ObjectId) = get(oid).get 203 | 204 | // -- Collection[T] 205 | override def iterator: Iterator[T] = find 206 | 207 | override def headOption = findOne(Query.empty) 208 | 209 | /** 210 | * Size of the collection. Note: Original MongoDB cursor 211 | * reports collection's size regardless 212 | * of skip and limit modificators. This implementation takes these into 213 | * account: you may expect to get accurate collection length when 214 | * you called drop and take on Query object. 215 | *
216 | * Beware: narrowing as Long value of getCount is cast to Int 217 | */ 218 | override def size: Int = length 219 | def length: Int = sizeEstimate.toInt 220 | 221 | override def stringPrefix: String = "MongoCollection" 222 | 223 | /** 224 | * toString method in Iterable lists all the elements, which can be a 225 | * problem: collections can store a lot of documents in MongoDB. Thus the 226 | * method is overridden to display the collection's size only 227 | */ 228 | override def toString = stringPrefix+"("+getName+"):"+size 229 | } 230 | -------------------------------------------------------------------------------- /src/main/scala/com/osinka/mongodb/MongoObject.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb 17 | 18 | import org.bson.types.ObjectId 19 | 20 | /** 21 | * Domain object with MongoDB identity 22 | * 23 | * Domain object which should be aware of their identity in MongoDB 24 | * should mix this trait 25 | * 26 | * @author Alexander Azarov 27 | * @see com.osinka.mongodb.shape.MongoObjectShape 28 | */ 29 | trait MongoObject { 30 | /** 31 | * MongoDB's OID field (Object ID) 32 | */ 33 | var mongoOID: Option[ObjectId] = None 34 | } -------------------------------------------------------------------------------- /src/main/scala/com/osinka/mongodb/Query.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb 17 | 18 | import org.bson.types.ObjectId 19 | import com.mongodb.{DBObject, BasicDBObject} 20 | import wrapper._ 21 | 22 | /** 23 | * Immutable MongoDB query object. 24 | * 25 | * @author Alexander Azarov 26 | */ 27 | case class Query(final val query: DBObject, 28 | val skip: Option[Int], 29 | val limit: Option[Int], 30 | val sorting: Option[DBObject]) { 31 | 32 | def slice_? = skip.isDefined || limit.isDefined || sorting.isDefined 33 | 34 | /** 35 | * @return the query where n first elements are dropped. 36 | * @param n sets the skip parameter if Some; removes the parameter if None 37 | */ 38 | def drop(n: Option[Int]) = copy(skip = n) 39 | 40 | /** 41 | * @return the query limited by first n elements 42 | * @param n sets the limit parameter if Some; removes the parameter if None 43 | */ 44 | def take(n: Option[Int]) = copy(limit = n) 45 | 46 | /** 47 | * @return the query where n first elements are dropped. 48 | */ 49 | def drop(n: Int): Query = drop(Some(n)) 50 | 51 | /** 52 | * @return the query limited by first n elements 53 | */ 54 | def take(n: Int): Query = take(Some(n)) 55 | 56 | /** 57 | * @return the query with sorting 58 | * @param s sets the sorting if Some; removes the sorting if None 59 | */ 60 | def sort(s: Option[DBObject]): Query = copy(sorting = s) 61 | 62 | /** 63 | * @return the query with sorting 64 | */ 65 | def sort(s: DBObject): Query = sort(Some(s)) 66 | 67 | /** 68 | * Merge two queries. The filter part (query) is merged, while skip, limit and 69 | * sorting are taken from q 70 | */ 71 | def *(q: Query): Query = ++(q.query) drop q.skip take q.limit sort q.sorting 72 | 73 | /** 74 | * Add filtering 75 | */ 76 | def ++(q: DBObject): Query = Query(DBO.merge(query, q), skip, limit, sorting) 77 | } 78 | 79 | /** 80 | * Query factory 81 | * 82 | * @author Alexander Azarov 83 | */ 84 | object Query { 85 | /** 86 | * Empty query 87 | */ 88 | final val empty: Query = Query(DBO.empty, None, None, None) 89 | 90 | /** 91 | * @return empty query 92 | */ 93 | def apply(): Query = empty 94 | 95 | /** 96 | * @return the query with filters defined by q 97 | */ 98 | def apply(q: DBObject) = new Query(q, None, None, None) 99 | 100 | /** 101 | * @return the query to find the object by its ID 102 | */ 103 | def byId(oid: ObjectId) = apply(DBO.fromMap(Map("_id" -> oid))) 104 | } 105 | 106 | /** 107 | * @author Alexander Azarov 108 | */ 109 | sealed case class QueryBuilder(val m: Map[String, Any]) { 110 | def dbo = DBO.fromMap(m) 111 | def and(q: QueryBuilder) = { 112 | def mergeMaps(ms: Map[String,Any]*)(f: (Any, Any) => Any) = 113 | (Map[String,Any]() /: ms.flatMap{x => x}) { (m, kv) => 114 | m + (if (m contains kv._1) kv._1 -> f(m(kv._1), kv._2) 115 | else kv) 116 | } 117 | 118 | def coincidence(v1: Any, v2: Any): Any = (v1, v2) match { 119 | case (m1: Map[_,_], m2: Map[_,_]) => 120 | mergeMaps(m1.asInstanceOf[Map[String,Any]], m2.asInstanceOf[Map[String,Any]]) {coincidence} 121 | // case (m1: Map[String,Any], m2: Map[String,Any]) => 122 | // mergeMaps(m1, m2) {coincidence} 123 | case _ => v2 124 | } 125 | 126 | new QueryBuilder( mergeMaps(m, q.m) {coincidence} ) 127 | } 128 | } 129 | 130 | /** 131 | * @author Alexander Azarov 132 | */ 133 | object QueryBuilder { 134 | def apply() = new QueryBuilder(Map.empty[String, Any]) 135 | def apply(tuple: (String, Any)) = new QueryBuilder(Map(tuple)) 136 | } 137 | 138 | 139 | /** 140 | * Mix-in for MongoCollection descendants. Modifies the behavior so that the query is 141 | * applied 142 | * 143 | * @author Alexander Azarov 144 | */ 145 | trait QueriedCollection[T, This <: QueriedCollection[T, This]] extends MongoCollection[T] { 146 | /** 147 | * @return the query to apply 148 | */ 149 | def query: Query 150 | 151 | /** 152 | * @return new collection with the query q 153 | */ 154 | def applied(q: Query): This 155 | 156 | // -- MongoCollection[T] 157 | override def find = find(query) 158 | override def headOption = findOne(query) 159 | override def sizeEstimate = getCount(query) 160 | } 161 | -------------------------------------------------------------------------------- /src/main/scala/com/osinka/mongodb/Serializer.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb 17 | 18 | import com.mongodb.DBObject 19 | 20 | /** 21 | * Generic serializer of objects to/from {@link DBObject} 22 | * 23 | * @author Alexander Azarov 24 | * @param T type T is meant to be a Scala object reprensenting 25 | * MongoDB's DBObject. 26 | * @see com.osinka.mongodb.MongoCollection 27 | */ 28 | trait Serializer[T] { 29 | /** 30 | * convert a domain object of type T to DBObject 31 | * 32 | * @param obj Scala object of type T 33 | * @return DBObject which holds object obj 34 | */ 35 | def in(obj: T): DBObject 36 | 37 | /** 38 | * convert a DBObject to domain object 39 | * 40 | * @param dbo DBObject 41 | * @return object read from dbo 42 | */ 43 | def out(dbo: DBObject): Option[T] 44 | 45 | /** 46 | * Modify object to save DBObject identity. DBObjects store identity about the 47 | * document in the DB, this method mirrors this identity information onto the 48 | * domain object. 49 | * 50 | * @param x object which represents dbo in Scala world 51 | * @param dbo DBObject 52 | * @return object x 53 | */ 54 | def mirror(x: T)(dbo: DBObject): T = x 55 | } -------------------------------------------------------------------------------- /src/main/scala/com/osinka/mongodb/shape/Field.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.shape 17 | 18 | import org.bson.types.ObjectId 19 | import com.mongodb.DBObject 20 | import com.osinka.mongodb._ 21 | import wrapper.DBO 22 | 23 | /** 24 | * Field declaration in a Shape 25 | * 26 | * @author Alexander Azarov 27 | */ 28 | trait ObjectField { 29 | /** 30 | * Field name. It will be the key in MongoDB document 31 | */ 32 | def mongoFieldName: String 33 | 34 | private[shape] def mongoFieldPath: List[String] = List(mongoFieldName) 35 | 36 | /** 37 | * Long field names separated by dot are required for queries and modificators 38 | */ 39 | lazy val longFieldName = DBO.dotNotation(mongoFieldPath) 40 | 41 | override def hashCode = longFieldName.hashCode 42 | 43 | override def equals(other: Any): Boolean = other match { 44 | case that: ObjectField => (that canEqual this) && this.longFieldName == that.longFieldName 45 | case _ => false 46 | } 47 | 48 | def canEqual(other: Any): Boolean = true 49 | } 50 | 51 | /** 52 | * Abstract field container. Sometimes documents are nested one inside another. 53 | * 54 | * @author Alexander Azarov 55 | */ 56 | trait FieldContainer { 57 | private[shape] def containerPath: List[String] = Nil 58 | } 59 | 60 | /** 61 | * Field declaration builders for a Shape 62 | * 63 | * @author Alexander Azarov 64 | */ 65 | trait ShapeFields[T, QueryType] extends FieldContainer 66 | with FieldQueryConditions[T, QueryType] with FieldModifyOperations[T, QueryType] { parent => 67 | 68 | /** 69 | * MongoDB field 70 | * 71 | * @see ObjectShape 72 | */ 73 | trait MongoField[A] extends ObjectField with FieldConditions[A] with BaseFieldModifyOp { storage: FieldContent[A] => 74 | /** 75 | * @return true if the field is internal MongoDB's field 76 | */ 77 | def mongoInternal_? : Boolean = mongoFieldName startsWith "_" 78 | 79 | /** 80 | * @return constraint for the field to be part of Shape (currently it's the key existance) 81 | */ 82 | def mongoConstraints: QueryTerm[QueryType] = rep postprocess storage.contentConstraints 83 | 84 | private[shape] def mongoReadFrom(x: T): Option[Any] 85 | private[shape] def mongoWriteTo(x: T, v: Option[Any]) 86 | 87 | /** 88 | * @return Scala field representation object 89 | */ 90 | def rep: FieldRep[_] 91 | 92 | def kindString: String 93 | override def toString: String = 94 | getClass.getName+"{"+kindString+"/"+storage.contentString+">"+rep.toString+"}("+longFieldName+")" 95 | 96 | // -- ObjectField 97 | override def mongoFieldPath: List[String] = parent.containerPath ::: super.mongoFieldPath 98 | } 99 | 100 | /** 101 | * Scalar MongoDB field 102 | */ 103 | trait MongoScalar[A] extends MongoField[A] { storage: FieldContent[A] => 104 | /** 105 | * useful method for field, like 106 | * dbo match { case field(value) => ... } 107 | * 108 | * or in case of mandatory constructor argument 109 | * for {field(v) <- Some(dbo)} yield new Obj(...., v, ...) 110 | */ 111 | def unapply(dbo: DBObject): Option[A] = Option(dbo get mongoFieldName) flatMap storage.deserialize 112 | 113 | /** 114 | * in case of optional field 115 | * new Obj(..., field from dbo, ...) 116 | */ 117 | def from(dbo: DBObject) = unapply(dbo) 118 | 119 | // -- MongoField[A] 120 | override def rep: FieldRep[A] 121 | 122 | private[shape] def mongoReadFrom(x: T): Option[Any] = 123 | rep.get(x) flatMap storage.serialize 124 | 125 | private[shape] def mongoWriteTo(x: T, v: Option[Any]) { 126 | rep.put(x)(v flatMap storage.deserialize) 127 | } 128 | 129 | override def kindString = "Scalar" 130 | } 131 | 132 | /** 133 | * Array MongoDB field 134 | */ 135 | trait MongoArray[A] extends MongoField[A] { storage: FieldContent[A] => 136 | protected def unpackField(dbo: DBObject) = DBO.toArray(dbo).flatMap{Option[Any](_)}.flatMap{storage.deserialize} 137 | 138 | /** 139 | * useful method for field, like 140 | * dbo match { case field(value) => ... } 141 | * 142 | * or in case of mandatory constructor argument 143 | * for {field(v) <- Some(dbo)} yield new Obj(...., v, ...) 144 | */ 145 | def unapply(dbo: DBObject): Option[Seq[A]] = Option(dbo get mongoFieldName) map { case dbo: DBObject => unpackField(dbo) } 146 | 147 | /** 148 | * in case of optional field 149 | * new Obj(..., field from dbo, ...) 150 | */ 151 | def from(dbo: DBObject) = unapply(dbo) 152 | 153 | // -- MongoField[A] 154 | override def rep: FieldRep[Seq[A]] 155 | 156 | // An array constraints only array field existance and nothing more. 157 | // It could set a constraint on the array's values, but array can be empty 158 | override def mongoConstraints = exists 159 | 160 | private[shape] def mongoReadFrom(x: T): Option[Any] = 161 | rep.get(x) map { _ map storage.serialize } 162 | 163 | private[shape] def mongoWriteTo(x: T, v: Option[Any]) { 164 | rep.put(x)(v map { case dbo: DBObject => unpackField(dbo) }) 165 | } 166 | 167 | override def kindString = "Array" 168 | } 169 | 170 | trait MongoMap[A] extends MongoField[A] { storage: FieldContent[A] => 171 | // TODO: condition on maps value: it depends on Content type. 172 | 173 | protected def unpackField(dbo: DBObject) = { 174 | def jclMap(dbo: DBObject) = { 175 | import scala.collection.mutable.{Map => MMap} 176 | import scala.collection.JavaConversions._ 177 | 178 | val m: MMap[String,Any] = dbo.toMap.asInstanceOf[java.util.Map[String,Any]] 179 | m 180 | } 181 | 182 | (jclMap(dbo) foldLeft Map[String,A]() ) {(m,e) => 183 | storage.deserialize(e._2) match { 184 | case Some(v) => m + (e._1 -> v) 185 | case None => m 186 | } 187 | } 188 | } 189 | 190 | /** 191 | * useful method for field, like 192 | * dbo match { case field(value) => ... } 193 | * 194 | * or in case of mandatory constructor argument 195 | * for {field(v) <- Some(dbo)} yield new Obj(...., v, ...) 196 | */ 197 | def unapply(dbo: DBObject): Option[Map[String,A]] = Option(dbo get mongoFieldName) map { 198 | case dbo: DBObject => unpackField(dbo) 199 | } 200 | 201 | /** 202 | * in case of optional field 203 | * new Obj(..., field from dbo, ...) 204 | */ 205 | def from(dbo: DBObject) = unapply(dbo) 206 | 207 | // -- MongoField[A] 208 | override def rep: FieldRep[Map[String,A]] 209 | 210 | // Constraints only the field existance (we do not know keys) 211 | override def mongoConstraints = exists 212 | 213 | private[shape] def mongoReadFrom(x: T): Option[Any] = { 214 | def serializeValues(f: Map[String,A]) = (f foldLeft Map[String,Any]() ) { (m,e) => 215 | storage.serialize(e._2) match { 216 | case Some(v) => m + (e._1 -> v) 217 | case None => m 218 | } 219 | } 220 | 221 | rep.get(x) map { m => DBO.fromMap( serializeValues(m) ) } 222 | } 223 | 224 | private[shape] def mongoWriteTo(x: T, v: Option[Any]) { 225 | rep.put(x)(v map { case dbo: DBObject => unpackField(dbo) }) 226 | } 227 | 228 | override def kindString = "Map" 229 | } 230 | 231 | /** 232 | * Field content: scalar, ref, embedded 233 | */ 234 | trait FieldContent[A] { self: ObjectField => 235 | /** 236 | * serializes a field into DBObject value 237 | * @return None if cannot serialize 238 | */ 239 | protected def serialize(x: A): Option[Any] 240 | 241 | /** 242 | * Reads field value from a DBObject value 243 | * @return None if cannot deserialize 244 | */ 245 | protected def deserialize(v: Any): Option[A] 246 | 247 | /** 248 | * Constraints on the content. MongoField will call it, see the 249 | * description there. 250 | * @see MongoField 251 | */ 252 | protected def contentConstraints: QueryTerm[QueryType] 253 | 254 | def contentString: String 255 | } 256 | 257 | /** 258 | * Scalar field content 259 | */ 260 | trait ScalarContent[A] extends FieldContent[A] with ScalarContentConditions[A] { self: MongoField[A] => 261 | // -- FieldContent[A] 262 | override def contentConstraints = exists 263 | 264 | override def serialize(a: A): Option[Any] = Some(a) 265 | override def deserialize(v: Any): Option[A] = Some(v.asInstanceOf[A]) 266 | 267 | override def contentString = "Scalar" 268 | } 269 | 270 | /** 271 | * Reference field content 272 | */ 273 | trait RefContent[V <: MongoObject] extends FieldContent[V] with RefContentConditions[V] { self: MongoField[V] => 274 | protected val coll: MongoCollection[V] 275 | 276 | // -- FieldContent[A] 277 | override def serialize(a: V): Option[Any] = a.mongoOID map {oid => 278 | DBO.fromMap(Map( 279 | "_ref" -> coll.getName, 280 | "_id" -> oid 281 | )) 282 | } 283 | override def deserialize(v: Any): Option[V] = v match { 284 | case dbo: DBObject if dbo.containsField("_id") => 285 | dbo.get("_id") match { 286 | case oid: ObjectId if coll.isDefinedAt(oid) => Some(coll(oid)) 287 | case _ => None 288 | } 289 | case _ => None 290 | } 291 | override def contentConstraints = exists 292 | 293 | override def contentString = "Ref" 294 | } 295 | 296 | /** 297 | * Embedded (nested document) field content 298 | */ 299 | trait EmbeddedContent[V] extends FieldContent[V] with FieldContainer { objectShape: MongoField[V] with ObjectIn[V, QueryType] => 300 | // -- FieldContainer 301 | override def containerPath = mongoFieldPath 302 | 303 | // -- FieldContent[A] 304 | override def contentConstraints = objectShape.constraints 305 | 306 | override def serialize(a: V): Option[Any] = Some(objectShape.in(a)) 307 | override def deserialize(v: Any): Option[V] = v match { 308 | case dbo: DBObject => objectShape.out(dbo) 309 | case _ => None 310 | } 311 | 312 | override def contentString = "Embedded" 313 | } 314 | 315 | /** 316 | * Representation of a field in Scala objects 317 | */ 318 | trait FieldRep[A] { 319 | /** 320 | * Post-process the content's field constraint as/if needed 321 | */ 322 | def postprocess(constraints: QueryTerm[QueryType]) = constraints 323 | 324 | /** 325 | * Getter: get a field value from the object 326 | */ 327 | def get[A1>:A](x: T): Option[A1] 328 | 329 | /** 330 | * Setter: set a field in the object 331 | */ 332 | def put[A2<:A](x: T)(a: Option[A2]) 333 | } 334 | 335 | /** 336 | * typical representation implementations 337 | */ 338 | object Represented { 339 | /** 340 | * FieldRep implemented as field getter and setter 341 | */ 342 | def by[A](g: T => A, p: Option[(T, A) => Unit]) = new FieldRep[A] { 343 | override def get[A1>:A](x: T): Option[A1] = Some(g(x)) 344 | override def put[A2<:A](x: T)(a: Option[A2]) { 345 | for {func <- p; value <- a} func(x, value) 346 | } 347 | 348 | override def toString = "field" 349 | } 350 | 351 | /** 352 | * FieldRep implemented as Option[A] field getter and setter 353 | */ 354 | def byOption[A](g: T => Option[A], p: Option[(T, Option[A]) => Unit]) = new FieldRep[A] { 355 | override def postprocess(constraints: QueryTerm[QueryType]) = QueryTerm[QueryType]() 356 | override def get[A1>:A](x: T): Option[A1] = g(x) 357 | override def put[A2<:A](x: T)(a: Option[A2]) { 358 | for {func <- p} func(x, a) 359 | } 360 | 361 | override def toString = "Option" 362 | } 363 | } 364 | 365 | /** 366 | * Scalar field 367 | * @param mongoFieldName document key 368 | * @param g field getter 369 | * @param p optional field setter 370 | */ 371 | class ScalarField[A](override val mongoFieldName: String, val g: T => A, val p: Option[(T,A) => Unit]) 372 | extends MongoScalar[A] with ScalarContent[A] with ScalarFieldModifyOp[A] { 373 | override val rep = Represented.by(g, p) 374 | override def canEqual(other: Any): Boolean = other.isInstanceOf[ScalarField[_]] 375 | } 376 | 377 | /* 378 | * Optional field 379 | * @param mongoFieldName document key 380 | * @param g field getter 381 | * @param p optional field setter 382 | */ 383 | class OptionalField[A](override val mongoFieldName: String, val g: T => Option[A], val p: Option[(T,Option[A]) => Unit]) 384 | extends MongoScalar[A] with ScalarContent[A] with ScalarFieldModifyOp[A] with Optional[A] { 385 | override val rep = Represented.byOption(g, p) 386 | override def canEqual(other: Any): Boolean = other.isInstanceOf[OptionalField[_]] 387 | } 388 | 389 | /** 390 | * Shape object living in a field. 391 | * 392 | * For instantiation as an object: ObjectIn should be mixed in. 393 | * @param mongoFieldName document key 394 | * @param g field getter 395 | * @param p optional field setter 396 | */ 397 | class EmbeddedField[V](override val mongoFieldName: String, val g: T => V, val p: Option[(T,V) => Unit]) 398 | extends MongoScalar[V] with EmbeddedContent[V] with FieldModifyOp[V] { 399 | self: MongoField[V] with ObjectIn[V, QueryType] => 400 | 401 | override val rep = parent.Represented.by(g, p) 402 | override def canEqual(other: Any): Boolean = other.isInstanceOf[EmbeddedField[_]] 403 | } 404 | 405 | /** 406 | * Reference 407 | * @param mongoFieldName document key 408 | * @param coll MongoCollection (typically ShapedCollection) where objects V live 409 | * @param g field getter 410 | * @param p optional field setter 411 | */ 412 | class RefField[V <: MongoObject](override val mongoFieldName: String, override val coll: MongoCollection[V], 413 | val g: T => V, val p: Option[(T,V) => Unit]) 414 | extends MongoScalar[V] with RefContent[V] { 415 | 416 | override val rep = parent.Represented.by(g, p) 417 | override def canEqual(other: Any): Boolean = other.isInstanceOf[RefField[_]] 418 | } 419 | 420 | /** 421 | * Optional reference 422 | * @param mongoFieldName document key 423 | * @param coll MongoCollection (typically ShapedCollection) where objects V live 424 | * @param g field getter 425 | * @param p optional field setter 426 | */ 427 | class OptionalRefField[V <: MongoObject](override val mongoFieldName: String, override val coll: MongoCollection[V], 428 | val g: T => Option[V], val p: Option[(T,Option[V]) => Unit]) 429 | extends MongoScalar[V] with RefContent[V] with Optional[V] { 430 | 431 | override val rep = parent.Represented.byOption(g, p) 432 | override def canEqual(other: Any): Boolean = other.isInstanceOf[OptionalRefField[_]] 433 | } 434 | 435 | /** 436 | * Array of scalars 437 | * @param mongoFieldName document key 438 | * @param g field getter 439 | * @param p optional field setter 440 | */ 441 | class ArrayField[A](override val mongoFieldName: String, val g: T => Seq[A], val p: Option[(T,Seq[A]) => Unit]) 442 | extends MongoArray[A] with ScalarContent[A] with ArrayFieldModifyOp[A] { 443 | 444 | override val rep = Represented.by[Seq[A]](g, p) 445 | override def canEqual(other: Any): Boolean = other.isInstanceOf[ArrayField[_]] 446 | } 447 | 448 | /** 449 | * Array of embedded objects. Must be subclassed: ObjectIn should be mixed in. 450 | * @param mongoFieldName document key 451 | * @param g field getter 452 | * @param p optional field setter 453 | */ 454 | class ArrayEmbeddedField[V](override val mongoFieldName: String, val g: T => Seq[V], val p: Option[(T,Seq[V]) => Unit]) 455 | extends MongoArray[V] with EmbeddedContent[V] with ArrayFieldModifyOp[V] { 456 | self: MongoField[V] with ObjectIn[V, QueryType] => 457 | 458 | override val rep = parent.Represented.by(g, p) 459 | override def canEqual(other: Any): Boolean = other.isInstanceOf[ArrayEmbeddedField[_]] 460 | 461 | def where(q: QueryTerm[V]) = { 462 | import wrapper.MongoCondition._ 463 | 464 | mkCond(elemMatch, Some(q.m)) 465 | } 466 | } 467 | 468 | /** 469 | * Array of references 470 | * @param mongoFieldName document key 471 | * @param coll MongoCollection (typically ShapedCollection) where objects V live 472 | * @param g field getter 473 | * @param p optional field setter 474 | */ 475 | class ArrayRefField[V <: MongoObject](override val mongoFieldName: String, override val coll: MongoCollection[V], 476 | val g: T => Seq[V], val p: Option[(T,Seq[V]) => Unit]) 477 | extends MongoArray[V] with RefContent[V] { 478 | 479 | override val rep = parent.Represented.by(g, p) 480 | override def canEqual(other: Any): Boolean = other.isInstanceOf[ArrayRefField[_]] 481 | } 482 | 483 | /** 484 | * Map of scalars 485 | * @param mongoFieldName document key 486 | * @param g field getter 487 | * @param p optional field setter 488 | */ 489 | class MapField[A](override val mongoFieldName: String, 490 | val g: T => Map[String,A], val p: Option[(T,Map[String,A]) => Unit]) 491 | extends MongoMap[A] with ScalarContent[A] { field => 492 | 493 | def apply(key: String) = new ScalarField[A](key, (x: T) => g(x)(key), None) { 494 | override def mongoFieldPath = field.mongoFieldPath ::: super.mongoFieldPath 495 | } 496 | 497 | override val rep = Represented.by[Map[String,A]](g, p) 498 | override def canEqual(other: Any): Boolean = other.isInstanceOf[MapField[_]] 499 | } 500 | 501 | /** 502 | * Map of embedded objects. Must be subclassed: ObjectIn should be mixed in. 503 | * @param mongoFieldName document key 504 | * @param g field getter 505 | * @param p optional field setter 506 | */ 507 | class MapEmbeddedField[V](override val mongoFieldName: String, 508 | val g: T => Map[String,V], val p: Option[(T,Map[String,V]) => Unit]) 509 | extends MongoMap[V] with EmbeddedContent[V] { 510 | self: MongoField[V] with ObjectIn[V, QueryType] => 511 | 512 | override val rep = parent.Represented.by(g, p) 513 | override def canEqual(other: Any): Boolean = other.isInstanceOf[MapEmbeddedField[_]] 514 | } 515 | 516 | /** 517 | * Map of references 518 | * @param mongoFieldName document key 519 | * @param coll MongoCollection (typically ShapedCollection) where objects V live 520 | * @param g field getter 521 | * @param p optional field setter 522 | */ 523 | class MapRefField[V <: MongoObject](override val mongoFieldName: String, override val coll: MongoCollection[V], 524 | val g: T => Map[String,V], val p: Option[(T,Map[String,V]) => Unit]) 525 | extends MongoMap[V] with RefContent[V] { 526 | 527 | override val rep = parent.Represented.by(g, p) 528 | override def canEqual(other: Any): Boolean = other.isInstanceOf[MapRefField[_]] 529 | } 530 | 531 | /** 532 | * Factory methods to build pre-cooked field declarations 533 | */ 534 | object Field { 535 | def scalar[A](fieldName: String, getter: T => A) = 536 | new ScalarField[A](fieldName, getter, None) 537 | 538 | def scalar[A](fieldName: String, getter: T => A, setter: (T, A) => Unit) = 539 | new ScalarField[A](fieldName, getter, Some(setter)) 540 | 541 | def optional[A](fieldName: String, getter: T => Option[A]) = 542 | new OptionalField[A](fieldName, getter, None) 543 | 544 | def optional[A](fieldName: String, getter: T => Option[A], setter: (T, Option[A]) => Unit) = 545 | new OptionalField[A](fieldName, getter, Some(setter)) 546 | 547 | def ref[V <: MongoObject](fieldName: String, coll: MongoCollection[V], getter: T => V) = 548 | new RefField[V](fieldName, coll, getter, None) 549 | 550 | def ref[V <: MongoObject](fieldName: String, coll: MongoCollection[V], getter: T => V, setter: (T, V) => Unit) = 551 | new RefField[V](fieldName, coll, getter, Some(setter)) 552 | 553 | def optionalRef[V <: MongoObject](fieldName: String, coll: MongoCollection[V], getter: T => Option[V]) = 554 | new OptionalRefField[V](fieldName, coll, getter, None) 555 | 556 | def optionalRef[V <: MongoObject](fieldName: String, coll: MongoCollection[V], getter: T => Option[V], setter: (T, Option[V]) => Unit) = 557 | new OptionalRefField[V](fieldName, coll, getter, Some(setter)) 558 | 559 | def array[A](fieldName: String, getter: T => Seq[A]) = 560 | new ArrayField[A](fieldName, getter, None) 561 | 562 | def array[A](fieldName: String, getter: T => Seq[A], setter: (T, Seq[A]) => Unit) = 563 | new ArrayField[A](fieldName, getter, Some(setter)) 564 | 565 | def arrayRef[V <: MongoObject](fieldName: String, coll: MongoCollection[V], getter: T => Seq[V]) = 566 | new ArrayRefField[V](fieldName, coll, getter, None) 567 | 568 | def arrayRef[V <: MongoObject](fieldName: String, coll: MongoCollection[V], getter: T => Seq[V], setter: (T, Seq[V]) => Unit) = 569 | new ArrayRefField[V](fieldName, coll, getter, Some(setter)) 570 | 571 | def map[A](fieldName: String, getter: T => Map[String,A]) = 572 | new MapField[A](fieldName, getter, None) 573 | 574 | def map[A](fieldName: String, getter: T => Map[String,A], setter: (T, Map[String,A]) => Unit) = 575 | new MapField[A](fieldName, getter, Some(setter)) 576 | 577 | def mapRef[V <: MongoObject](fieldName: String, coll: MongoCollection[V], getter: T => Map[String,V]) = 578 | new MapRefField[V](fieldName, coll, getter, None) 579 | 580 | def mapRef[V <: MongoObject](fieldName: String, coll: MongoCollection[V], getter: T => Map[String,V], setter: (T, Map[String,V]) => Unit) = 581 | new MapRefField[V](fieldName, coll, getter, Some(setter)) 582 | } 583 | 584 | /** 585 | * Optional field means it imposes no constraint 586 | */ 587 | trait Optional[A] extends MongoField[A] { self: FieldContent[A] => 588 | override def mongoConstraints = QueryTerm[QueryType]() 589 | } 590 | } 591 | -------------------------------------------------------------------------------- /src/main/scala/com/osinka/mongodb/shape/FieldCond.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.shape 17 | 18 | import java.util.regex.Pattern 19 | import scala.util.matching.Regex 20 | import com.osinka.mongodb._ 21 | import wrapper.{MongoCondition,DBO} 22 | import MongoCondition._ 23 | 24 | /** 25 | * Order direction base trait 26 | * 27 | * @author Alexander Azarov 28 | */ 29 | sealed trait SortOrder { 30 | def mongoOrder: Int 31 | } 32 | 33 | /** 34 | * Methods to build constraints on fields. 35 | * 36 | * @author Alexander Azarov 37 | */ 38 | trait FieldQueryConditions[T, QueryType] { shape: ShapeFields[T, QueryType] => 39 | 40 | /** 41 | * Basic all-purpose field conditions. Applicable to a field of any kind 42 | */ 43 | trait FieldConditions[A] { self: MongoField[A] => 44 | protected def mkCond(f: (String,Any) => (String,Any), x: Option[Any]) = 45 | x map {v => QueryTerm[QueryType](f(longFieldName, v)) } getOrElse QueryTerm[QueryType]() 46 | 47 | def exists = QueryTerm[QueryType](MongoCondition.exists(longFieldName, true)) 48 | 49 | def notExists = QueryTerm[QueryType](MongoCondition.exists(longFieldName, false)) 50 | } 51 | 52 | 53 | /** 54 | * Field conditions applicable to scalar fields 55 | */ 56 | trait ScalarContentConditions[A] extends FieldConditions[A] { self: MongoField[A] with ScalarContent[A] => 57 | // Conditions 58 | def is_<(x: A) = mkCond(lt, serialize(x)) 59 | 60 | def is_<=(x: A) = mkCond(le, serialize(x)) 61 | 62 | def is_>(x: A) = mkCond(gt, serialize(x)) 63 | 64 | def is_>=(x: A) = mkCond(ge, serialize(x)) 65 | 66 | def is(x: A) = mkCond(eqTest, serialize(x)) 67 | def is_==(x: A) = is(x) 68 | def eq_?(x: A) = is(x) 69 | def has(x: A) = is(x) // same for occurence in array 70 | 71 | def isNot(x: A) = mkCond(neTest, serialize(x)) 72 | def not_==(x: A) = isNot(x) 73 | def ne_?(x: A) = not_==(x) 74 | 75 | def isIn(x: List[A]) = mkCond(MongoCondition.in, Some(x flatMap { serialize }) ) 76 | def in(x: List[A]) = isIn(x) 77 | 78 | def notIn(x: List[A]) = mkCond(MongoCondition.nin, Some(x flatMap { serialize }) ) 79 | def nin(x: List[A]) = notIn(x) 80 | 81 | def hasAll(x: List[A]) = mkCond(MongoCondition.all, Some(x flatMap { serialize }) ) 82 | def all(x: List[A]) = hasAll(x) 83 | 84 | def hasSize(x: Int) = mkCond(size, Some(x)) 85 | def ofSize(x: Int) = hasSize(x) 86 | 87 | def like(x: Pattern) = QueryTerm[QueryType](regex(longFieldName, x)) 88 | def is_~(x: Pattern) = like(x) 89 | 90 | def like(x: Regex) = QueryTerm[QueryType](regex(longFieldName, x)) 91 | def is_~(x: Regex) = like(x) 92 | 93 | // Sorting 94 | case object Asc extends SortOrder { override val mongoOrder = 1} 95 | case object Desc extends SortOrder { override val mongoOrder = -1 } 96 | 97 | /** 98 | * ascending order by this field 99 | */ 100 | def ascending = this -> Asc 101 | 102 | /** 103 | * descending order by this field 104 | */ 105 | def descending = this -> Desc 106 | } 107 | 108 | /** 109 | * Field conditions applicable to reference fields 110 | */ 111 | trait RefContentConditions[V <: MongoObject] extends FieldConditions[V] { self: MongoField[V] with RefContent[V] => 112 | // Conditions 113 | def is(x: V) = x.mongoOID map { oid => 114 | val fieldId = longFieldName+"._id" 115 | QueryTerm[QueryType]( eqTest(fieldId, oid) ) 116 | } getOrElse notExists 117 | def is_==(x: V) = is(x) 118 | def eq_?(x: V) = is(x) 119 | def has(x: V) = is(x) // same for occurence in array 120 | 121 | def isNot(x: V) = x.mongoOID map { oid => 122 | val fieldId = longFieldName+"._id" 123 | QueryTerm[QueryType]( neTest(fieldId, oid) ) 124 | } getOrElse exists 125 | def not_==(x: V) = isNot(x) 126 | def ne_?(x: V) = not_==(x) 127 | 128 | def isIn(x: List[V]) = { 129 | val fieldId = longFieldName+"._id" 130 | QueryTerm[QueryType]( MongoCondition.in(fieldId, x flatMap {_.mongoOID}) ) 131 | } 132 | def in(x: List[V]) = isIn(x) 133 | 134 | def notIn(x: List[V]) = { 135 | val fieldId = longFieldName+"._id" 136 | QueryTerm[QueryType]( MongoCondition.nin(fieldId, x flatMap {_.mongoOID}) ) 137 | } 138 | def nin(x: List[V]) = notIn(x) 139 | } 140 | } -------------------------------------------------------------------------------- /src/main/scala/com/osinka/mongodb/shape/FieldModify.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.shape 17 | 18 | import com.osinka.mongodb._ 19 | import wrapper.MongoOp 20 | 21 | /** 22 | * Update operations on fields 23 | * 24 | * @author Alexander Azarov 25 | */ 26 | trait FieldModifyOperations[T, QueryType] { shape: ShapeFields[T, QueryType] => 27 | 28 | /** 29 | * Basic field update operations trait. 30 | */ 31 | trait BaseFieldModifyOp { self: ObjectField => 32 | protected def mkOp(f: (String,Any) => (String,Any), x: Option[Any]) = 33 | x map {v => ModifyOp[QueryType](f(longFieldName, v)) } getOrElse ModifyOp[QueryType]() 34 | 35 | def unset: ModifyOp[QueryType] = mkOp(MongoOp.unset, Some(1)) 36 | } 37 | 38 | /** 39 | * Field update operations, applicable to fields of any kind 40 | */ 41 | trait FieldModifyOp[A] extends BaseFieldModifyOp { self: MongoField[A] with FieldContent[A] => 42 | def set(x: A): ModifyOp[QueryType] = mkOp(MongoOp.set, serialize(x)) 43 | } 44 | 45 | /** 46 | * Field update operations, applicable to scalar fields 47 | */ 48 | trait ScalarFieldModifyOp[A] extends FieldModifyOp[A] { self: MongoScalar[A] with ScalarContent[A] => 49 | def inc(x: A): ModifyOp[QueryType] = mkOp(MongoOp.inc, serialize(x)) 50 | } 51 | 52 | /** 53 | * Field update operations, applicable to array fields 54 | */ 55 | trait ArrayFieldModifyOp[A] extends BaseFieldModifyOp { self: MongoArray[A] with FieldContent[A] => 56 | def set(x: Seq[A]): ModifyOp[QueryType] = mkOp(MongoOp.set, Some(x flatMap { serialize }) ) 57 | def push(x: A): ModifyOp[QueryType] = mkOp(MongoOp.push, serialize(x)) 58 | def pushAll(x: Iterable[A]): ModifyOp[QueryType] = mkOp(MongoOp.pushAll, Some(x flatMap { serialize }) ) 59 | def popHead: ModifyOp[QueryType] = mkOp(MongoOp.pop, Some(-1)) 60 | def popTail: ModifyOp[QueryType] = mkOp(MongoOp.pop, Some(1) ) 61 | def pull(x: A): ModifyOp[QueryType] = mkOp(MongoOp.pull, serialize(x)) 62 | def pullAll(x: Iterable[A]): ModifyOp[QueryType] = mkOp(MongoOp.pullAll, Some(x flatMap { serialize }) ) 63 | def addToSet(x: A): ModifyOp[QueryType] = mkOp(MongoOp.addToSet, serialize(x) ) 64 | def addToSet(x: Iterable[A]): ModifyOp[QueryType] = mkOp(MongoOp.addEachToSet, Some(x flatMap { serialize }) ) 65 | } 66 | } 67 | 68 | /** 69 | * @author Alexander Azarov 70 | */ 71 | // TODO: Monadic query? http://github.com/alaz/mongo-scala-driver/issues#issue/13 72 | // TODO: unified ModifyOp with QueryTerm?? 73 | sealed case class ModifyOp[+T](val qb: QueryBuilder) { 74 | def dbo = qb.dbo 75 | def query = Query(dbo) 76 | def and[B >: T](q: ModifyOp[B]) = new ModifyOp[T](qb and q.qb) 77 | } 78 | 79 | /** 80 | * @author Alexander Azarov 81 | */ 82 | object ModifyOp { 83 | def apply[T]() = new ModifyOp[T]( QueryBuilder() ) 84 | def apply[T](tuple: (String, Any)) = new ModifyOp[T]( QueryBuilder(tuple) ) 85 | } 86 | -------------------------------------------------------------------------------- /src/main/scala/com/osinka/mongodb/shape/Implicits.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.shape 17 | 18 | import com.mongodb.DBCollection 19 | import com.osinka.mongodb._ 20 | 21 | /** 22 | * @author Alexander Azarov 23 | */ 24 | trait Implicits { 25 | implicit def collOfShape(coll: DBCollection) = new { 26 | def of[T](element: ObjectShape[T]) = element.collection(coll) 27 | } 28 | 29 | implicit def collWithQuery[T](qt: QueryTerm[T]) = new { 30 | def in(coll: ShapedCollection[T]): ShapedCollection[T] = coll.shape where qt in coll 31 | } 32 | 33 | implicit def queryTofilters[T](q: Queriable[T]#ShapeQuery): QueryTerm[T] = q.filters 34 | } -------------------------------------------------------------------------------- /src/main/scala/com/osinka/mongodb/shape/Shape.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.shape 17 | 18 | import scala.reflect.Manifest 19 | import com.mongodb.{DBObject, DBCollection} 20 | import com.osinka.mongodb._ 21 | import wrapper.DBO 22 | 23 | /** 24 | * Shape of an object held in some other object (being it a Shape or Query). This trait 25 | * is most generic and used to declare embedded fields mostly. 26 | * 27 | * @author Alexander Azarov 28 | */ 29 | trait ObjectIn[T, QueryType] extends Serializer[T] with ShapeFields[T, QueryType] { 30 | /** 31 | * Every Shape must provide the list of the fields in the documents of this shape. 32 | */ 33 | def * : List[MongoField[_]] 34 | 35 | /** 36 | * Every Shape must provide the factory to create object T 37 | * @param dbo the document in MongoDB 38 | * @return None if it's impossible to retrieve object T from dbo 39 | */ 40 | def factory(dbo: DBObject): Option[T] 41 | 42 | protected def fieldList: List[MongoField[_]] = * 43 | 44 | /** 45 | * Document constraint 46 | */ 47 | lazy val constraints = fieldList filterNot {_.mongoInternal_?} map {_.mongoConstraints} reduceLeft {_ and _} 48 | 49 | private[shape] def packFields(x: T, fields: Seq[MongoField[_]]): DBObject = 50 | DBO.fromMap( (fields foldLeft Map[String,Any]() ) { (m,f) => 51 | f.mongoReadFrom(x) match { 52 | case Some(v) => m + (f.mongoFieldName -> v) 53 | case None => m 54 | } 55 | } ) 56 | 57 | private[shape] def updateFields(x: T, dbo: DBObject, fields: Seq[MongoField[_]]) { 58 | fields foreach { f => f.mongoWriteTo(x, Option(dbo get f.mongoFieldName)) } 59 | } 60 | 61 | // -- Serializer[T] 62 | override def in(x: T): DBObject = packFields(x, fieldList) 63 | 64 | override def out(dbo: DBObject) = factory(dbo) map { x => 65 | updateFields(x, dbo, fieldList) 66 | x 67 | } 68 | 69 | override def mirror(x: T)(dbo: DBObject) = { 70 | updateFields(x, dbo, fieldList filter { _.mongoInternal_? }) 71 | x 72 | } 73 | } 74 | 75 | /** 76 | * Shape of an object backed by DBObject ("hosted in") 77 | * 78 | * @author Alexander Azarov 79 | */ 80 | trait ObjectShape[T] extends ObjectIn[T, T] with Queriable[T] { 81 | /** 82 | * Make a collection of T elements 83 | * @param underlying MongoDB collection 84 | * @return ShapedCollection based on this ObjectShape 85 | */ 86 | def collection(underlying: DBCollection) = new ShapedCollection[T](underlying, this) 87 | } 88 | 89 | /** 90 | * Mix-in to make a shape functional 91 | * 92 | * FunctionalShape makes a shape with convinient syntactic sugar 93 | * for converting object to DBObject (apply) and extractor for the opposite 94 | * 95 | * E.g. 96 | * val dbo = UserShape(u) 97 | * dbo match { 98 | * case UserShape(u) => 99 | * } 100 | * 101 | * @author Alexander Azarov 102 | */ 103 | trait FunctionalShape[T] { self: ObjectShape[T] => 104 | def apply(x: T): DBObject = in(x) 105 | def unapply(rep: DBObject): Option[T] = out(rep) 106 | } 107 | 108 | /** 109 | * Shape of MongoObject child. 110 | * 111 | * It has mandatory _id and _ns fields 112 | * 113 | * @author Alexander Azarov 114 | */ 115 | trait MongoObjectShape[T <: MongoObject] extends ObjectShape[T] { 116 | import org.bson.types.ObjectId 117 | 118 | /** 119 | * MongoDB internal Object ID field declaration 120 | */ 121 | lazy val oid = Field.optional("_id", (x: T) => x.mongoOID, (x: T, oid: Option[ObjectId]) => x.mongoOID = oid) 122 | 123 | // -- ObjectShape[T] 124 | override def fieldList : List[MongoField[_]] = oid :: super.fieldList 125 | } -------------------------------------------------------------------------------- /src/main/scala/com/osinka/mongodb/shape/ShapeQuery.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.shape 17 | 18 | import com.osinka.mongodb._ 19 | 20 | /** 21 | * Query factory for ObjectShape 22 | * 23 | * @author Alexander Azarov 24 | */ 25 | trait Queriable[T] { self: ObjectShape[T] => 26 | type SortableFieldType = ObjectField with ScalarContentConditions[_] 27 | 28 | /** 29 | * Empty query: any document qualifies 30 | */ 31 | def any = ShapeQuery() 32 | 33 | /** 34 | * Query based on field conditions 35 | */ 36 | def where(query: QueryTerm[T]): ShapeQuery = ShapeQuery() where query 37 | 38 | /** 39 | * Query that drops n first documents 40 | */ 41 | def drop(n: Int) = ShapeQuery() drop n 42 | 43 | /** 44 | * Query that limits the resulting collection by n documents 45 | */ 46 | def take(n: Int) = ShapeQuery() take n 47 | 48 | /** 49 | * Query where results are sorted 50 | */ 51 | def sortBy(sorting: (SortableFieldType, SortOrder)*) = ShapeQuery().sortBy(sorting:_*) 52 | 53 | /** 54 | * Immutable query to apply to ShapedCollection 55 | */ 56 | sealed case class ShapeQuery(val filters: QueryTerm[T], val sortBy: List[(SortableFieldType, SortOrder)], private val q: Query) { 57 | /** 58 | * Apply the query to collection 59 | */ 60 | def in[Coll <: QueriedCollection[T, Coll]](coll: Coll): Coll = coll.applied(query) 61 | 62 | def where(filter: QueryTerm[T]): ShapeQuery = copy(filters = filters and filter) 63 | 64 | def drop(n: Int): ShapeQuery = drop(Some(n)) 65 | def drop(n: Option[Int]): ShapeQuery = copy(q = q drop n) 66 | 67 | def take(n: Int): ShapeQuery = take(Some(n)) 68 | def take(n: Option[Int]): ShapeQuery = copy(q = q take n) 69 | 70 | /** 71 | * modified query, no sorting 72 | */ 73 | def noSort = ShapeQuery(filters, sortBy, q sort None) 74 | def sortBy(s: (SortableFieldType, SortOrder)*): ShapeQuery = copy(sortBy = s.toList ::: sortBy) 75 | 76 | def query: Query = { 77 | val s = (Map.empty[String, Int] /: sortBy) { (m, x) => 78 | m + (x._1.longFieldName -> x._2.mongoOrder) 79 | } 80 | q ++ filters.dbo sort s 81 | } 82 | } 83 | 84 | /** 85 | * Factory of queries 86 | */ 87 | object ShapeQuery { 88 | /** 89 | * Empty query 90 | */ 91 | def apply() = new ShapeQuery(QueryTerm[T], Nil, Query()) 92 | 93 | /** 94 | * Query based on field conditions 95 | */ 96 | def apply(qt: QueryTerm[T]) = new ShapeQuery(qt, Nil, Query()) 97 | } 98 | 99 | // TODO: Monadic query? http://github.com/alaz/mongo-scala-driver/issues#issue/13 100 | } 101 | 102 | /** 103 | * @author Alexander Azarov 104 | */ 105 | sealed case class QueryTerm[+T](val qb: QueryBuilder) { 106 | def m = qb.m 107 | 108 | def dbo = qb.dbo 109 | 110 | def query = Query(dbo) 111 | 112 | def and[B >: T](q: QueryTerm[B]) = new QueryTerm[T](qb and q.qb) 113 | } 114 | 115 | /** 116 | * @author Alexander Azarov 117 | */ 118 | object QueryTerm { 119 | def apply[T]() = new QueryTerm[T]( QueryBuilder() ) 120 | def apply[T](tuple: (String, Any)) = new QueryTerm[T]( QueryBuilder(tuple) ) 121 | def apply[T](m: Map[String,Any]) = new QueryTerm[T]( QueryBuilder(m) ) 122 | } 123 | -------------------------------------------------------------------------------- /src/main/scala/com/osinka/mongodb/shape/ShapedCollection.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.shape 17 | 18 | import com.mongodb.{DBCollection, DBObject} 19 | import com.osinka.mongodb._ 20 | import wrapper._ 21 | 22 | /** 23 | * Collecton of type T elements, where serializer is an ObjectShape 24 | * 25 | * @author Alexander Azarov 26 | */ 27 | class ShapedCollection[T](override val underlying: DBCollection, val shape: ObjectShape[T]) 28 | extends MongoCollection[T] 29 | with QueriedCollection[T, ShapedCollection[T]] { 30 | 31 | private lazy val shapeConstraints = shape.constraints.dbo 32 | private def embedShapeConstraints(q: DBObject) = DBO.merge(shapeConstraints, q) 33 | 34 | /** 35 | * Save new document based on query 36 | */ 37 | def update(filters: QueryTerm[T], x: T): Boolean = update(filters.query, x) 38 | 39 | /** 40 | * Update elements 41 | * @param multi should update all elements 42 | */ 43 | def update(filters: QueryTerm[T], op: ModifyOp[T], multi: Boolean): Boolean = update(filters.dbo, op.dbo, multi) 44 | 45 | /** 46 | * Update only one element 47 | */ 48 | def updateOne(filters: QueryTerm[T], op: ModifyOp[T]): Boolean = update(filters, op, false) 49 | 50 | /** 51 | * Update all matching elements 52 | */ 53 | def update(filters: QueryTerm[T], op: ModifyOp[T]): Boolean = update(filters, op, true) 54 | 55 | /** 56 | * Remove many elements 57 | */ 58 | def -=(filters: QueryTerm[T]) { 59 | remove(embedShapeConstraints(filters.dbo)) 60 | } 61 | 62 | /** 63 | * Find and remove the first found document 64 | */ 65 | def findAndRemove(filters: QueryTerm[T]): Option[T] = findAndRemove(embedShapeConstraints(filters.dbo)) 66 | 67 | def findAndModify(q: ObjectShape[T]#ShapeQuery, op: ModifyOp[T]): Option[T] = 68 | findAndModify(q, op, false, false, false) 69 | 70 | /** 71 | * Find and modify the first found document (or create it (or modify after create)) 72 | */ 73 | def findAndModify(q: ObjectShape[T]#ShapeQuery, op: ModifyOp[T], remove: Boolean, returnNew: Boolean, upsert: Boolean): Option[T] = { 74 | val query = q.query 75 | findAndModify(embedShapeConstraints(query.query), query.sorting, op.dbo, remove, returnNew, upsert) 76 | } 77 | 78 | def findAndModify(qt: QueryTerm[T], op: ModifyOp[T]): Option[T] = 79 | findAndModify(qt, op, false, false, false) 80 | 81 | def findAndModify(qt: QueryTerm[T], op: ModifyOp[T], remove: Boolean, returnNew: Boolean, upsert: Boolean): Option[T] = 82 | findAndModify(shape.ShapeQuery(qt), op, remove, returnNew, upsert) 83 | 84 | // -- MongoCollection[T] 85 | override val serializer: Serializer[T] = shape 86 | 87 | // -- QueriedCollection[T] 88 | override val query: Query = Query.empty 89 | override def applied(q: Query): ShapedCollection[T] = new ShapedCollection[T](underlying, shape) { 90 | override val query = q 91 | } 92 | 93 | // -- MongoCollection 94 | override def find(q: DBObject) = underlying.find(embedShapeConstraints(q)) 95 | override def findOne(q: DBObject) = underlying.findOne(embedShapeConstraints(q)) 96 | override def getCount(q: DBObject) = find(q).count 97 | override def update(q: DBObject, op: DBObject, multi: Boolean) = super.update(embedShapeConstraints(q), op, multi) 98 | 99 | override def stringPrefix: String = "ShapedCollection["+shape.getClass.getName+"]("+getName+")" 100 | } 101 | -------------------------------------------------------------------------------- /src/main/scala/com/osinka/mongodb/wrapper/DBCollectionWrapper.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.wrapper 17 | 18 | import com.mongodb.{DBCollection, DBObject} 19 | 20 | /** 21 | * Wrapper for MongoDB DBCollection 22 | * 23 | * @author Alexander Azarov 24 | */ 25 | trait DBCollectionWrapper { 26 | /** 27 | * Actual DBCollection object behind 28 | */ 29 | val underlying: DBCollection 30 | 31 | protected def find(dbo: DBObject) = underlying find dbo 32 | protected def findOne(dbo: DBObject) = underlying findOne dbo 33 | protected def getCount(dbo: DBObject) = underlying getCount dbo 34 | 35 | /** 36 | * MongoDB collection name 37 | */ 38 | def getName = underlying.getName 39 | 40 | /** 41 | * MongoDB full collection name 42 | */ 43 | def getFullName = underlying.getFullName 44 | 45 | /** 46 | * Remove collection 47 | */ 48 | def drop: Unit = underlying.drop 49 | 50 | override def equals(obj: Any) = obj match { 51 | case other: DBCollectionWrapper => underlying.equals(other.underlying) 52 | case _ => false 53 | } 54 | } -------------------------------------------------------------------------------- /src/main/scala/com/osinka/mongodb/wrapper/DBObj.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.wrapper 17 | 18 | import annotation.tailrec 19 | import com.mongodb.{DBObject, BasicDBObject} 20 | 21 | /** 22 | * @author Alexander Azarov 23 | */ 24 | object DBO { 25 | def empty = new BasicDBObject 26 | 27 | def dotNotation(l: List[String]) = l.mkString(".") 28 | 29 | /** 30 | * Build DBObject from Map[String,Any]. The method descends, i.e. converts 31 | * Map values as well 32 | */ 33 | def fromMap(m: Map[String,Any]): DBObject = { 34 | import com.mongodb.{BasicDBObjectBuilder, BasicDBList} 35 | 36 | @tailrec 37 | def wrap(obj: Any): Option[Any] = obj match { 38 | case m: Map[_, _] => 39 | // to avoid type erasure warning 40 | Some( fromMap(m.asInstanceOf[Map[String, Any]]) ) 41 | case iterable: Iterable[_] => 42 | val ret = new BasicDBList 43 | for {(v, i) <- iterable.toList.zipWithIndex 44 | wrapped <- wrap(v)} 45 | ret.put(i, wrapped) 46 | Some(ret) 47 | // case ref: Ref[_] => 48 | case None => None 49 | case Some(v) => wrap(v) 50 | case _ => Some(obj) 51 | } 52 | 53 | def acc(dbo: BasicDBObjectBuilder, leaf: (String, Any)): BasicDBObjectBuilder = 54 | wrap(leaf._2).map{dbo.append(leaf._1, _)} getOrElse dbo 55 | 56 | (m foldLeft BasicDBObjectBuilder.start)(acc(_, _)).get 57 | } 58 | 59 | /** 60 | * Interpret DBObject as Array and return it as a Seq 61 | */ 62 | def toArray(dbo: DBObject): Seq[Any] = 63 | Iterator from(0) map {_.toString} takeWhile {dbo.containsField} map{dbo.get} toList 64 | 65 | /** 66 | * Merge many DBObjects into one. The latter can override keys in the former 67 | * ones. 68 | */ 69 | def merge(dbo: DBObject*) = { 70 | val result = empty 71 | dbo foreach result.putAll 72 | result 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /src/main/scala/com/osinka/mongodb/wrapper/DBObjectIterator.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.wrapper 17 | 18 | import com.mongodb.{DBObject, DBCursor} 19 | 20 | /** 21 | * Wrapper around DBCursor 22 | * 23 | * @author Alexander Azarov 24 | */ 25 | private[mongodb] class DBObjectIterator(val cursor: DBCursor) extends Iterator[DBObject] { 26 | override def hasNext: Boolean = cursor.hasNext 27 | override def next: DBObject = cursor.next 28 | } -------------------------------------------------------------------------------- /src/main/scala/com/osinka/mongodb/wrapper/MongoCondition.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.wrapper 17 | 18 | import java.util.regex.Pattern 19 | import scala.util.matching.Regex 20 | 21 | import com.mongodb.QueryOperators._ 22 | 23 | /** 24 | * Helper methods to create conditions on fields to build queries 25 | * 26 | * @author Alexander Azarov 27 | */ 28 | object MongoCondition { 29 | def cond[T](field: String, x: T) = field -> x 30 | 31 | def op[T](op: String)(field: String, x: T) = cond(field, Map(op -> x)) 32 | 33 | def eqTest[T](field: String, x: T) = cond(field, x) 34 | lazy val neTest = op[Any](NE) _ 35 | 36 | lazy val lt = op[Any](LT) _ 37 | lazy val le = op[Any](LTE) _ 38 | lazy val gt = op[Any](GT) _ 39 | lazy val ge = op[Any](GTE) _ 40 | lazy val in = op[Any](IN) _ 41 | lazy val nin = op[Any](NIN) _ 42 | lazy val all = op[Any](ALL) _ 43 | // def mod 44 | lazy val size = op[Any](SIZE) _ 45 | lazy val elemMatch = op[Any]("$elemMatch") _ 46 | def exists(field: String, b: Boolean) = op(EXISTS)(field, b) 47 | 48 | def regex(field: String, x: Regex): (String, Pattern) = regex(field, x.pattern) 49 | def regex(field: String, x: Pattern): (String, Pattern) = eqTest(field, x) 50 | } 51 | -------------------------------------------------------------------------------- /src/main/scala/com/osinka/mongodb/wrapper/MongoOp.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.wrapper 17 | 18 | /** 19 | * Helper methods to create update operations 20 | * 21 | * @author Alexander Azarov 22 | */ 23 | object MongoOp { 24 | def op[T](op: String)(field: String, x: T) = op -> Map(field -> x) 25 | 26 | lazy val inc = op[Any]("$inc") _ 27 | lazy val set = op[Any]("$set") _ 28 | lazy val unset = op[Any]("$unset") _ 29 | lazy val push = op[Any]("$push") _ 30 | lazy val pushAll = op[Any]("$pushAll") _ 31 | lazy val pop = op[Any]("$pop") _ 32 | lazy val pull = op[Any]("$pull") _ 33 | lazy val pullAll = op[Any]("$pullAll") _ 34 | lazy val addToSet = op[Any]("$addToSet") _ 35 | lazy val addEachToSet = (field: String, x: Any) => "$addToSet" -> Map(field -> Map("$each" -> x)) 36 | } 37 | -------------------------------------------------------------------------------- /src/test/scala/com/osinka/mongodb/Config.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb 17 | 18 | import java.util.Properties 19 | 20 | object Config { 21 | private val fileName = "database.properties" 22 | 23 | private lazy val properties: List[Properties] = 24 | for {classLoader <- getClass.getClassLoader :: Thread.currentThread.getContextClassLoader :: Nil 25 | stream <- Option(classLoader.getResourceAsStream(fileName))} 26 | yield 27 | try { 28 | val props = new Properties 29 | props.load(stream) 30 | props 31 | } finally { 32 | stream.close 33 | } 34 | 35 | private def property[T](k: String)(implicit conv: String => T): Option[T] = 36 | properties flatMap{p => Option(p getProperty k)} map {conv} headOption 37 | 38 | implicit val stringToInt = (s: String) => s.toInt 39 | 40 | lazy val Host = property[String]("mongoHost") getOrElse "localhost" 41 | lazy val Port = property[Int]("mongoPort") getOrElse 27017 42 | lazy val Database = property[String]("mongoDB") getOrElse "test" 43 | } 44 | -------------------------------------------------------------------------------- /src/test/scala/com/osinka/mongodb/Helper.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb 17 | 18 | import com.osinka.mongodb._ 19 | 20 | object Helper { 21 | def fillWith[T : Manifest](coll: MongoCollection[T], n: Int)(factory: (Int => T)) { 22 | Array.tabulate(n)(factory) foreach { coll << _ } 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/test/scala/com/osinka/mongodb/baseSpec.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb 17 | 18 | import org.specs._ 19 | import org.specs.runner._ 20 | 21 | class baseTest extends JUnit4(baseSpec) with Console 22 | object baseTestRunner extends ConsoleRunner(baseSpec) 23 | 24 | object baseSpec extends Specification { 25 | "Base".isSpecifiedBy(conversionsSpec, collectionSpec, querySpec) 26 | } -------------------------------------------------------------------------------- /src/test/scala/com/osinka/mongodb/benchmark/BenchmarkSuite.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.benchmark 17 | 18 | import scala.testing.Benchmark 19 | 20 | /** 21 | * @author Alexander Azarov 22 | */ 23 | abstract class BenchmarkSuite(val name: String) { 24 | def benchmarks: List[Benchmark] 25 | 26 | def setUp(collSize: Int): Unit 27 | def tearDown: Unit 28 | 29 | def runOn[R](collSize: Int)(f: (Benchmark => R)): List[R] = 30 | try { 31 | System.err.println(name) 32 | setUp(collSize) 33 | benchmarks map f 34 | } finally { 35 | tearDown 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/test/scala/com/osinka/mongodb/benchmark/ConstraintOverhead.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.benchmark 17 | 18 | import com.mongodb._ 19 | import scala.testing._ 20 | import com.osinka.mongodb._ 21 | import Config._ 22 | 23 | /** 24 | * @author Alexander Azarov 25 | */ 26 | 27 | object ConstraintOverheadNoIndex extends AbstractConstraintOverhead("no indeces") { 28 | override def ensureIndex(maxArity: Int) {} 29 | } 30 | 31 | /** 32 | * @author Alexander Azarov 33 | */ 34 | object ConstraintOverheadWithIndex extends AbstractConstraintOverhead("with indeces") { 35 | override def ensureIndex(maxArity: Int) { 36 | // Single index per field 37 | for {n <- 0 until maxArity} 38 | collection ensureIndex Map("f"+n -> 1) 39 | } 40 | } 41 | 42 | /** 43 | * The idea is to test whether "$exists" constraints of MongoDB lead to 44 | * the overhead in reading the data or not. We are creating a collection with 45 | * 10 fields and are requesting the elements using 1-, 5- and 10-fields 46 | * shapes 47 | * 48 | * @author Alexander Azarov 49 | */ 50 | abstract class AbstractConstraintOverhead(val extraText: String) extends BenchmarkSuite("Constraints overhead, "+extraText) { suite => 51 | override val benchmarks = new FieldRead(1) :: new FieldRead(5) :: new FieldRead(10) :: Nil 52 | 53 | var collectionSize: Int = _ 54 | 55 | val mongo = new Mongo(Host, Port).getDB(Database) 56 | def collection = mongo.getCollection("constraints") 57 | 58 | def ensureIndex(maxArity: Int): Unit 59 | 60 | override def setUp(collSize: Int) { 61 | suite.collectionSize = collSize 62 | 63 | val coll = collection.asScala 64 | coll.drop 65 | 66 | val maxArity = benchmarks.map{_.arity}.reduceLeft{_ max _} 67 | for {i <- 0 until collSize} 68 | coll += ( List.range(0,maxArity).map{n => "f"+n -> i*n} foldLeft Map.empty[String,Int] ) {(m,f) => m + f} 69 | ensureIndex(maxArity) 70 | } 71 | 72 | override def tearDown { 73 | collection.drop 74 | } 75 | 76 | class FieldRead(val arity: Int) extends Benchmark with SUnit.Assert { 77 | object model extends NFieldsTest(arity) 78 | import model._ 79 | 80 | override val prefix = model.arity+" field(s)" 81 | def run { 82 | assertEquals("Model arity", model.Ta.`*`.size, model.arity) 83 | 84 | var i = 0 85 | for {t <- collection of Ta} { 86 | for {n <- 0 until model.arity} 87 | assertEquals("Object field", i*n, t.f(n)) 88 | i += 1 89 | } 90 | assertEquals("complete walk through the collection", suite.collectionSize, i) 91 | } 92 | } 93 | } -------------------------------------------------------------------------------- /src/test/scala/com/osinka/mongodb/benchmark/Domain.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.benchmark 17 | 18 | import com.mongodb.DBObject 19 | import com.osinka.mongodb._ 20 | import com.osinka.mongodb.shape._ 21 | 22 | trait TestObj { 23 | def a: Int 24 | } 25 | 26 | /** 27 | * T1 is a typical case class with immutable field 28 | */ 29 | case class T1(val a: Int) extends MongoObject with TestObj 30 | 31 | object T1 extends MongoObjectShape[T1] { 32 | override lazy val * = a :: Nil 33 | 34 | override def factory(dbo: DBObject) = for {a(x) <- Some(dbo)} yield new T1(x) 35 | 36 | lazy val a = Field.scalar("a", _.a) 37 | } 38 | 39 | /** 40 | * T2 does not inherit MongoObject, thus there is no overhead for 41 | * deserializing MongoDB's OID and NS fields. 42 | * 43 | * T2 is completely immutable 44 | */ 45 | class T2(val a: Int) extends TestObj 46 | 47 | object T2 extends ObjectShape[T2] { 48 | override lazy val * = a :: Nil 49 | override def factory(dbo: DBObject) = for {a(x) <- Some(dbo)} yield new T2(x) 50 | 51 | lazy val a = Field.scalar("a", _.a) 52 | } 53 | 54 | /** 55 | * T3 does not inherit MongoObject and does not have functional field, thus 56 | * uses update to populate the field. 57 | */ 58 | class T3 extends TestObj { 59 | var a: Int = _ 60 | } 61 | 62 | object T3 extends ObjectShape[T3] { 63 | override lazy val * = a :: Nil 64 | override def factory(dbo: DBObject) = Some(new T3) 65 | 66 | lazy val a = Field.scalar("a", _.a, (x: T3, a: Int) => x.a = a) 67 | } 68 | 69 | /** 70 | * Model for arity tests. 71 | * 72 | * the model object hosts an array for fields and its companion object 73 | * is able to update this array 74 | */ 75 | class NFieldsTest(val arity: Int) { 76 | class Ta extends MongoObject { 77 | val f = new Array[Int](arity) 78 | } 79 | 80 | object Ta extends MongoObjectShape[Ta] { 81 | override lazy val * : List[MongoField[_]] = List.range(0,arity).map(fieldObj) 82 | override def factory(dbo: DBObject) = Some(new Ta) 83 | 84 | def fieldObj(i: Int) = { 85 | def get(o: Ta): Int = o.f(i) 86 | def update(o: Ta, x: Int) { 87 | o.f(i) = x 88 | } 89 | 90 | new ScalarField[Int]("f"+i, get _, Some(update _)) 91 | } 92 | } 93 | } -------------------------------------------------------------------------------- /src/test/scala/com/osinka/mongodb/benchmark/SerializationOverhead.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.benchmark 17 | 18 | import com.mongodb._ 19 | import scala.testing._ 20 | import com.osinka.mongodb._ 21 | import com.osinka.mongodb.shape._ 22 | import Config._ 23 | import wrapper.DBO 24 | 25 | /** 26 | * Serialization overhead benchmark 27 | * 28 | * The idea is to insert N objects into a collection and retrieve it using 29 | * pure Java API and using shapes. No sorting, no indexes -- I'd like to 30 | * measure de-serialization overhead only 31 | * 32 | * @author Alexander Azarov 33 | */ 34 | object SerializationOverhead extends BenchmarkSuite("Serialization Overhead") { suite => 35 | override val benchmarks = List(JavaRead, DBORead, ShapeCaseFuncRead, ShapeNoMongoFuncRead, ShapeNoMongoUpdateRead) 36 | 37 | val constraint = DBO.fromMap(T1.constraints.m) 38 | 39 | var collectionSize: Int = _ 40 | 41 | val mongo = new Mongo(Host, Port).getDB(Database) 42 | def collection = mongo.getCollection("deserialize") 43 | 44 | override def setUp(collSize: Int) { 45 | suite.collectionSize = collSize 46 | 47 | val coll = collection.asScala 48 | coll.drop 49 | for {i <- 0 until collSize} coll += Map("a" -> i) 50 | } 51 | 52 | override def tearDown { 53 | collection.drop 54 | } 55 | 56 | object JavaRead extends Benchmark with SUnit.Assert { 57 | override val prefix = "reading using Java" 58 | def run { 59 | val cursor = collection.find(constraint) 60 | var i = 0 61 | while (cursor.hasNext) { 62 | val dbo = cursor.next 63 | assertEquals("Object field", i, dbo.get("a")) 64 | i += 1 65 | } 66 | assertEquals("complete walk through the collection", collectionSize, i) 67 | } 68 | } 69 | 70 | object DBORead extends Benchmark with SUnit.Assert { 71 | override val prefix = "reading from Scala collection" 72 | def run { 73 | var i = 0 74 | for {dbo <- Query(constraint) in collection.asScala} { 75 | assertEquals("Object field", i, dbo.get("a")) 76 | i += 1 77 | } 78 | assertEquals("complete walk through the collection", collectionSize, i) 79 | } 80 | } 81 | 82 | abstract class ShapeRead[T <: TestObj](val shape: ObjectShape[T]) extends Benchmark with SUnit.Assert { 83 | type ObjectType 84 | 85 | def run { 86 | var i = 0 87 | for {t <- collection of shape} { 88 | assertEquals("Object field", i, t.a) 89 | i += 1 90 | } 91 | assertEquals("complete walk through the collection", collectionSize, i) 92 | } 93 | } 94 | 95 | object ShapeCaseFuncRead extends ShapeRead[T1](T1) { 96 | override val prefix = "reading case functional Shapes" 97 | } 98 | 99 | object ShapeNoMongoFuncRead extends ShapeRead[T2](T2) { 100 | override def prefix = "reading non-mongo functional Shapes" 101 | } 102 | 103 | object ShapeNoMongoUpdateRead extends ShapeRead[T3](T3) { 104 | override val prefix = "reading non-mongo updatable Shapes" 105 | } 106 | } -------------------------------------------------------------------------------- /src/test/scala/com/osinka/mongodb/benchmark/overhead.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.benchmark 17 | 18 | import scala.testing.Benchmark 19 | import org.specs.util.SimpleTimer 20 | 21 | object overhead { 22 | val defaultArgs = List(10000, 1) 23 | val benchmarkSuites = SerializationOverhead :: ConstraintOverheadNoIndex :: ConstraintOverheadWithIndex :: Nil 24 | 25 | def main(args: Array[String]) { 26 | val List(collectionSize, repeat) = args.toList.map{_.toInt} ::: defaultArgs.drop(args.size).take(defaultArgs.size-args.size) 27 | 28 | for (suite <- benchmarkSuites) 29 | suite.runOn(collectionSize) {benchmark => report(benchmark.prefix, benchmark.runBenchmark(repeat)) } 30 | } 31 | 32 | def report(name: String, latencies: => List[Long]) { 33 | implicit def longToTimer(l: Long): SimpleTimer = { val t = new SimpleTimer; t.elapsed = l; t } 34 | 35 | // The first run is taken out, it's a warm up 36 | val sorted = latencies.tail.sortWith( (a,b) => a < b ) 37 | 38 | val (total, count, min, max) = ( 39 | (0L /: sorted)((x, y) => x + y), 40 | sorted.size, 41 | sorted.head, 42 | sorted.last 43 | ) 44 | val avg = total / count 45 | 46 | System.err.println(name + ", " + count + " iterations:" 47 | + " total=[" + total.time + "]" 48 | + ", min=[" + min.time + "]" 49 | + ", avg=[" + avg.time + "]" 50 | + ", max=[" + max.time + "]") 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /src/test/scala/com/osinka/mongodb/collection.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb 17 | 18 | import org.specs._ 19 | import org.bson.types.ObjectId 20 | import com.mongodb._ 21 | 22 | import Config._ 23 | import wrapper.DBO 24 | 25 | object collectionSpec extends Specification("Scala way Mongo collections") { 26 | val mongo = new Mongo(Host, Port).getDB(Database) 27 | 28 | doAfter { mongo.dropDatabase } 29 | 30 | "Empty DBOCollection" should { 31 | val coll = mongo.getCollection("test1").asScala 32 | 33 | "have proper inheritance" in { 34 | coll must haveSuperClass[Iterable[DBObject]] 35 | coll must haveSuperClass[MongoCollection[DBObject]] 36 | } 37 | "support Iterable methods" in { 38 | coll.isEmpty must beTrue 39 | coll must beEmpty 40 | } 41 | "support Collection methods" in { 42 | coll must beEmpty 43 | coll.headOption must beNone 44 | } 45 | } 46 | "DBOCollection" should { 47 | val dbColl = mongo.getCollection("test") 48 | val coll = dbColl.asScala 49 | 50 | doAfter { coll.drop } 51 | 52 | "be equal only when DBCollection equals" in { 53 | dbColl.asScala must be_==(coll) 54 | mongo.getCollection("test1").asScala must be_!=(coll) 55 | } 56 | "proxy to DBCollection" in { 57 | coll.getName must be_==(dbColl.getName) 58 | coll.getFullName must be_==(dbColl.getFullName) 59 | } 60 | } 61 | "DBOCollection" can { 62 | val dbColl = mongo.getCollection("test") 63 | val coll = dbColl.asScala 64 | 65 | doAfter { coll.drop } 66 | 67 | "insert" in { 68 | coll must beEmpty 69 | ( coll << Map("key" -> 10) ).getLastError.ok must beTrue 70 | coll must haveSize(1) 71 | val dbo: DBObject = Map("key" -> 10) 72 | ( coll << dbo ).getLastError.ok must beTrue 73 | coll must haveSize(2) 74 | coll.headOption must beSome[DBObject].which{_.get("_id") != null} 75 | } 76 | "insert with oid check" in { 77 | coll must beEmpty 78 | val dbo = coll < 10) 79 | dbo must beSome[DBObject].which {x => 80 | x.get("_id") != null && 81 | x.get("key") == 10 82 | } 83 | coll < 10, "_id" -> dbo.get.get("_id")) must beNone 84 | } 85 | "save" in { 86 | coll must beEmpty 87 | coll += Map("key" -> 10) 88 | coll must haveSize(1) 89 | val dbo: DBObject = Map("key" -> 10) 90 | coll += dbo 91 | coll must haveSize(2) 92 | coll.headOption must beSome[DBObject].which{_.get("_id") != null} 93 | } 94 | "remove" in { 95 | coll must beEmpty 96 | val o: DBObject = Map("key" -> 10) 97 | coll += o 98 | coll must haveSize(1) 99 | coll -= o 100 | coll must beEmpty 101 | } 102 | "remove many elements" in { 103 | coll must beEmpty 104 | for {o <- List(Map("key" -> 10), Map("key" -> 15), Map("key" -> 20))} 105 | coll += o 106 | coll must haveSize(3) 107 | coll -= Map("key" -> Map("$lt" -> 20)) 108 | coll must haveSize(1) 109 | } 110 | "insert many" in { 111 | val N = 20 112 | val objs = for {n <- 1 to N} yield DBO.fromMap(Map("key" -> n)) 113 | ( coll << objs ).getLastError.ok must beTrue 114 | coll must haveSize(N) 115 | coll must haveTheSameElementsAs(objs) 116 | } 117 | "get by oid" in { 118 | coll must beEmpty 119 | val newO: DBObject = Map("key" -> 10) 120 | coll += newO 121 | coll must haveSize(1) 122 | 123 | val oid = newO.get("_id").asInstanceOf[ObjectId] 124 | coll.isDefinedAt(oid) must beTrue 125 | coll(oid) must be_==(newO) 126 | } 127 | "update" in { 128 | val q = Query(Map("key" -> 10)) 129 | 130 | val N = 20 131 | for {n <- 1 to N} coll += Map("key" -> n) 132 | coll must haveSize(N) 133 | 134 | coll.update(q, Map("$inc" -> Map("key" -> N/2)), false) 135 | coll must haveSize(N) 136 | (q in coll).headOption must beNone 137 | (Query(Map("key" -> N)) in coll) must haveSize(2) 138 | } 139 | "update all" in { 140 | val N = 20 141 | for {n <- 1 to N} coll += Map("key" -> n) 142 | coll must haveSize(N) 143 | 144 | coll.update(Query(), Map("$inc" -> Map("key" -> -N/2)), true) must beTrue 145 | (Query(Map("key" -> Map("$gt" -> 0))) in coll) must haveSize(N/2) 146 | (Query(Map("key" -> Map("$lte" -> 0))) in coll) must haveSize(N/2) 147 | } 148 | "update none" in { 149 | coll must beEmpty 150 | coll.update(Query(), Map("$inc" -> Map("i" -> 1)), true) must beFalse 151 | } 152 | "findAndRemove" in { 153 | val N = 20 154 | val objs = for {n <- 1 to N} yield DBO.fromMap(Map("key" -> n)) 155 | coll << objs 156 | coll must haveSize(N) 157 | 158 | coll.findAndRemove(Query(Map("i" -> "a"))) must beNone 159 | 160 | val q = Query( Map("key" -> Map("$gt" -> (N-1))) ) 161 | coll.findAndRemove(q) must beSome[DBObject].which{dbo => 162 | dbo.get("key") == N 163 | } 164 | coll must haveSize(N-1) 165 | } 166 | "findAndModify" in { 167 | val N = 20 168 | val objs = for {n <- 1 to N} yield DBO.fromMap(Map("key" -> n)) 169 | coll << objs 170 | coll must haveSize(N) 171 | 172 | val q = Query(Map("key" -> Map("$gt" -> (N-1)))) 173 | coll.findAndModify(q, Map("$inc" -> Map("key" -> 2))) must beSome[DBObject].which {dbo => 174 | dbo.get("key") == N 175 | } 176 | coll must haveSize(N) 177 | coll exists {dbo => dbo.get("key") == N+2} must beTrue 178 | } 179 | "findAndModify upsert" in { 180 | val N = 20 181 | val objs = for {n <- 1 to N} yield DBO.fromMap(Map("key" -> n)) 182 | coll << objs 183 | coll must haveSize(N) 184 | 185 | val q = Query(Map("key" -> (N+1))) 186 | val r = coll.findAndModify(q, Map("$inc" -> Map("key" -> 2)), false, true, true) 187 | r must beSome[DBObject].which {dbo => 188 | dbo.get("key") == N+3 189 | } 190 | coll must haveSize(N+1) 191 | } 192 | } 193 | } 194 | -------------------------------------------------------------------------------- /src/test/scala/com/osinka/mongodb/conversions.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb 17 | 18 | import org.specs._ 19 | import com.mongodb._ 20 | 21 | import com.osinka.mongodb.Config._ 22 | 23 | object conversionsSpec extends Specification("Implicit conversions") { 24 | import wrapper.DBO 25 | 26 | "Map to DBObject" should { 27 | "convert Any values" in { 28 | DBO.fromMap(Map("a" -> 1)) must (containField("a") and verify(_.get("a") == 1)) 29 | DBO.fromMap(Map("a" -> 2.0)) must (containField("a") and verify(_.get("a") == 2.0)) 30 | DBO.fromMap(Map("a" -> "str")) must (containField("a") and verify(_.get("a") == "str")) 31 | } 32 | "convert Option" in { 33 | DBO.fromMap(Map("a" -> None)) must not(containField("a")) 34 | DBO.fromMap(Map("a" -> Some("b"))) must (containField("a") and verify(_.get("a") == "b")) 35 | } 36 | "convert Lists" in { 37 | listExample(DBO.fromMap(Map("a" -> List("a", "b")))) 38 | } 39 | "convert Map of Lists" in { 40 | val dbo = DBO.fromMap(Map("complex" -> Map("a" -> ("a" :: "b" :: Nil) ))) 41 | dbo must containField("complex") 42 | dbo.get("complex") must haveSuperClass[DBObject] 43 | listExample(dbo.get("complex").asInstanceOf[DBObject]) 44 | } 45 | "convert List of Options" in { 46 | val list = List(Some(1), None, Some(2), None, Some(3)) 47 | val dbo = DBO.fromMap(Map("a" -> list)) 48 | dbo must containField("a") 49 | dbo.get("a") must haveSuperClass[DBObject] 50 | 51 | val ldbo = dbo.get("a").asInstanceOf[DBObject] 52 | ldbo must (containField("0") and verify(_.get("0") == 1)) 53 | ldbo must (containField("1") and verify(_.get("1") == null)) 54 | ldbo must (containField("2") and verify(_.get("2") == 2)) 55 | 56 | DBO.toArray(ldbo).map{Option[Any]} must haveTheSameElementsAs(list) 57 | } 58 | "convert Map of Maps" in { 59 | val dbo = DBO.fromMap(Map("a" -> Map("b" -> "value"))) 60 | dbo must containField("a") 61 | dbo.get("a") must haveSuperClass[DBObject] 62 | dbo.get("a").asInstanceOf[DBObject] must (containField("b") and verify{_.get("b") == "value"}) 63 | } 64 | } 65 | 66 | def listExample(dbo: DBObject) { 67 | dbo must containField("a") 68 | dbo.get("a") must haveSuperClass[DBObject] 69 | dbo.get("a").asInstanceOf[DBObject] must (containField("0") and verify(_.get("0") == "a")) 70 | dbo.get("a").asInstanceOf[DBObject] must (containField("1") and verify(_.get("1") == "b")) 71 | } 72 | } 73 | 74 | import org.specs.matcher.Matcher 75 | case class containField(name: String) extends Matcher[DBObject] { 76 | def apply(dbo: => DBObject) = 77 | (dbo.containsField(name), "Contains "+name, "does not contain "+name) 78 | } 79 | -------------------------------------------------------------------------------- /src/test/scala/com/osinka/mongodb/orig/origSpec.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.orig 17 | 18 | import org.specs._ 19 | import org.specs.runner._ 20 | 21 | class origTest extends JUnit4(origSpec) with Console 22 | object origTestRunner extends ConsoleRunner(origSpec) 23 | 24 | object origSpec extends Specification { 25 | "original Java API extra".isSpecifiedBy( plainSpec ) 26 | } 27 | -------------------------------------------------------------------------------- /src/test/scala/com/osinka/mongodb/orig/plain.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.orig 17 | 18 | import org.specs._ 19 | import com.mongodb._ 20 | 21 | import com.osinka.mongodb._ 22 | import Config._ 23 | 24 | object plainSpec extends Specification { 25 | val mongo = new Mongo(Host, Port).getDB(Database) 26 | 27 | doAfter { mongo.dropDatabase } 28 | 29 | "DBObject" should { 30 | "copy pattern over" in { 31 | import java.util.regex.Pattern 32 | 33 | val re = Pattern.compile("^re.*") 34 | val dbo = new BasicDBObject 35 | dbo putAll BasicDBObjectBuilder.start("a", 3).get 36 | dbo putAll BasicDBObjectBuilder.start("a", re).get 37 | dbo.get("a") must (notBeNull and be_==(re)) 38 | } 39 | } 40 | "Plain collection" should { 41 | val coll = mongo.getCollection("test") 42 | 43 | doBefore { coll.drop } 44 | doAfter { coll.drop } 45 | 46 | "have zero size by default" in { 47 | coll.getCount must be_==(0) 48 | } 49 | "accept DBObjects" in { 50 | coll.getCount must be_==(0) 51 | coll insert BasicDBObjectBuilder.start("key", 100).get 52 | coll.getCount must be_==(1) 53 | } 54 | "return same DBObject" in { 55 | val dbo = BasicDBObjectBuilder.start("key", 100).get 56 | coll save dbo 57 | coll.getCount must be_==(1) 58 | 59 | val o = coll.findOne 60 | o must be_==(dbo) 61 | o.get("key") must be_==(100) 62 | } 63 | "remove DBObjects by object" in { 64 | val o = BasicDBObjectBuilder.start("key", 100).get 65 | coll save o 66 | coll.getCount must be_==(1) 67 | 68 | coll.remove(o) 69 | } 70 | "'save' should replace object" in { 71 | coll.getCount must be_==(0) 72 | 73 | val o = BasicDBObjectBuilder.start("key", 100).get 74 | coll save o 75 | coll.getCount must be_==(1) 76 | 77 | o.put("key", 200) 78 | coll save o 79 | coll.getCount must be_==(1) 80 | 81 | val r = coll.findOne 82 | r must notBeNull 83 | r.get("key") must be_==(200) 84 | } 85 | "not insert duplicate id" in { 86 | coll.getCount must be_==(0) 87 | 88 | val o = BasicDBObjectBuilder.start("key", 100).get 89 | coll save o 90 | coll.getCount must be_==(1) 91 | 92 | o.put("key", 200) 93 | coll insert o 94 | mongo.getLastError aka "error on duplicate key insert" must notBeNull 95 | 96 | coll.getCount must be_==(1) 97 | val r = coll.findOne 98 | r must notBeNull 99 | r.get("key") must be_==(100) 100 | } 101 | "group" in { 102 | skip("TODO: group spec") 103 | } 104 | } 105 | // "Index" should { 106 | // skip("TODO: indexing spec") 107 | // } 108 | "Query" should { 109 | val coll = mongo.getCollection("test") 110 | 111 | doBefore { coll.drop } 112 | doAfter { coll.drop } 113 | 114 | "count by query" in { 115 | coll save BasicDBObjectBuilder.start("a", "value").get 116 | coll getCount BasicDBObjectBuilder.start("a", "value").get must be_==(1) 117 | 118 | coll save BasicDBObjectBuilder.start( 119 | "a", BasicDBObjectBuilder.start("b", "other").get 120 | ).get 121 | coll getCount BasicDBObjectBuilder.start("a.b", "other").get must be_==(1) 122 | coll getCount BasicDBObjectBuilder.start( 123 | "a", BasicDBObjectBuilder.start("b", "other").get 124 | ).get must be_==(1) 125 | } 126 | "count by query and shape" in { 127 | coll save BasicDBObjectBuilder 128 | .start("a", "value") 129 | .get 130 | coll save BasicDBObjectBuilder 131 | .start("a", "value") 132 | .push("b") 133 | .append("c", "other") 134 | .get 135 | coll.find(BasicDBObjectBuilder.start.push("b.c").append("$exists", true).get).count must be_==(1) 136 | // coll.getCount(BasicDBObjectBuilder.start("a", "value").get, 137 | // BasicDBObjectBuilder.start.push("b").append("c", 1).get 138 | // ) must be_==(1) 139 | } 140 | "regexp" in { 141 | import java.util.regex.Pattern 142 | 143 | val reQuery = BasicDBObjectBuilder.start("a", Pattern.compile(".*es.*")).get 144 | val shape = BasicDBObjectBuilder.start("a", 1).get 145 | 146 | doBefore { coll save BasicDBObjectBuilder.start("a", "test").get } 147 | doAfter {} 148 | 149 | "getCount" in { 150 | coll.getCount(reQuery) must be_==(1) 151 | coll.getCount(reQuery, shape) must be_==(1) 152 | } 153 | "findOne" in { 154 | coll.findOne(reQuery) must notBeNull 155 | coll.findOne(reQuery, shape) must notBeNull 156 | } 157 | "find" in { 158 | coll.find(reQuery).count must be_==(1) 159 | coll.find(reQuery, shape).count must be_==(1) 160 | } 161 | } 162 | "update by query" in { 163 | skip("TODO: update") 164 | } 165 | "lookup by query" in { 166 | skip("TODO: lookup") 167 | } 168 | "remove DBObjects by query" in { 169 | val o = BasicDBObjectBuilder.start("key", 100).get 170 | coll save o 171 | coll.getCount must be_==(1) 172 | 173 | coll.remove(o) 174 | coll.getCount must be_==(0) 175 | } 176 | } 177 | "DBCursor" should { 178 | val coll = mongo.getCollection("test") 179 | 180 | def collection(f: (DBCursor => DBCursor)) = new wrapper.DBObjectIterator(f(coll.find)).toSeq 181 | def count(f: (DBCursor => DBCursor)) = f(coll.find).count 182 | 183 | doFirst { 184 | coll.drop 185 | def gen(n: Int) = Array.tabulate(n) { i => Map("a" -> ("a"+i) ) } 186 | for (o <- gen(5)) coll save o 187 | } 188 | doLast { 189 | coll.drop 190 | } 191 | 192 | "count" in { 193 | coll.getCount must be_==(5) 194 | coll.find.count must be_==(5) 195 | 196 | collection(x => x) must haveSize(5) 197 | count(x => x) must be_==(5) 198 | } 199 | "count regexp" in { 200 | import java.util.regex.Pattern 201 | import Pattern._ 202 | 203 | val q = Map("a" -> Pattern.compile("a3$", CASE_INSENSITIVE)) 204 | coll.find(q).count must be_==(1) 205 | } 206 | "limit" in { 207 | collection(_ limit -1) mustNot haveSize(5) 208 | count(_ limit -1) must be_==(5) 209 | 210 | collection(_ limit 2) must haveSize(2) 211 | count(_ limit 2) must be_==(5) 212 | } 213 | "skip" in { 214 | collection(_ skip 0) must haveSize(5) 215 | count(_ skip 0) must be_==(5) 216 | 217 | collection(_ skip 1) must haveSize(4) 218 | count(_ skip 1) must be_==(5) 219 | 220 | collection(_ skip 1 limit 2) must haveSize(2) 221 | count(_ skip 1 limit 2) must be_==(5) 222 | } 223 | "sort ascending" in { 224 | coll.find.count must be_==(5) 225 | 226 | val c = coll.find.sort(Map("a" -> 1)).limit(1) 227 | c.hasNext must beTrue 228 | 229 | val o = c.next 230 | o must notBeNull 231 | o.get("a") must be_==("a0") 232 | } 233 | "sort descending" in { 234 | val c = coll.find.sort(Map("a" -> -1)).limit(1) 235 | c.hasNext must beTrue 236 | 237 | val o = c.next 238 | o must notBeNull 239 | o.get("a") must be_==("a4") 240 | } 241 | } 242 | "DBRef" should { 243 | val coll = mongo.getCollection("test") 244 | 245 | setSequential 246 | doFirst { coll.drop } 247 | doLast { coll.drop } 248 | 249 | "store and fetch" in { 250 | val subobj: DBObject = Map("s" -> "other things", "num" -> 100) 251 | coll save subobj 252 | subobj.get("_id") must notBeNull 253 | 254 | val ref = new DBRef(coll.getDB, "test", subobj.get("_id")) 255 | 256 | val obj: DBObject = Map("object" -> "complex", "sub" -> ref) 257 | coll save obj 258 | obj.get("_id") must notBeNull 259 | 260 | obj.get("sub") must haveSuperClass[DBRefBase] 261 | val deref = obj.get("sub").asInstanceOf[DBRefBase] 262 | val deSubObj = deref.fetch 263 | deSubObj must notBeNull 264 | deSubObj.get("_id") must be_==(subobj.get("_id")) 265 | } 266 | } 267 | "Types save/retrieve" should { 268 | val coll = mongo.getCollection("test") 269 | 270 | doBefore { coll.drop } 271 | doAfter { coll.drop } 272 | 273 | "work out Strings" in { 274 | val dbo = BasicDBObjectBuilder.start("a", "val").get 275 | dbo.get("a") must haveClass[String] 276 | 277 | coll save dbo 278 | val res = coll.findOne.get("a") 279 | res must (haveClass[String] and be_==("val")) 280 | } 281 | "work out Ints" in { 282 | val dbo = BasicDBObjectBuilder.start("a", 1).get 283 | dbo.get("a") must haveClass[java.lang.Integer] 284 | 285 | coll.save(dbo) 286 | val res = coll.findOne.get("a") 287 | res must (haveClass[java.lang.Integer] and be_==(1)) 288 | } 289 | "work out large Ints" in { 290 | val dbo = BasicDBObjectBuilder.start("a", 1000000).get 291 | dbo.get("a") must haveClass[java.lang.Integer] 292 | 293 | coll save dbo 294 | val res = coll.findOne.get("a") 295 | res must (haveClass[java.lang.Integer] and be_==(1000000)) 296 | } 297 | "work out Longs" in { 298 | val dbo = BasicDBObjectBuilder.start("a", 1L).get 299 | dbo.get("a") must haveClass[java.lang.Long] 300 | 301 | coll save dbo 302 | val res = coll.findOne.get("a") 303 | res must (haveClass[java.lang.Long] and be_==(1L)) 304 | } 305 | "work out Floats" in { 306 | val dbo = BasicDBObjectBuilder.start("a", 1.0F).get 307 | dbo.get("a") must haveClass[java.lang.Float] 308 | 309 | coll save dbo 310 | val res = coll.findOne.get("a") 311 | res aka "getting Float out from DBColl will return Double" must (haveClass[java.lang.Double] and be_==(1.0D)) 312 | } 313 | "work out Doubles" in { 314 | val dbo = BasicDBObjectBuilder.start("a", 1.0D).get 315 | dbo.get("a") must haveClass[java.lang.Double] 316 | 317 | coll save dbo 318 | val res = coll.findOne.get("a") 319 | res must (haveClass[java.lang.Double] and be_==(1.0D)) 320 | } 321 | } 322 | "DBObject serialization" should { 323 | "create DBO from Map" in { 324 | import scala.collection.JavaConversions._ 325 | val m = scala.collection.mutable.Map[String, Any]() 326 | m += ("a" -> 1, "b" -> 2) 327 | 328 | val juMap: java.util.Map[String,Any] = m 329 | val dbo = BasicDBObjectBuilder.start(juMap).get 330 | dbo.containsField("a") must beTrue 331 | dbo.get("a") must be_==(1) 332 | dbo.containsField("b") must beTrue 333 | dbo.get("b") must be_==(2) 334 | } 335 | "convert Map of Arrays to DBO" in { 336 | skip("BasicDBObjectBuilder.start(Map) and BasicDBObject.putAll(Map) do not descend, they assume all values to be scalars") 337 | 338 | import scala.collection.JavaConversions._ 339 | val m = scala.collection.mutable.Map[String, Any]() 340 | val a = Array[String]("v1", "v2") 341 | m += "c" -> a 342 | 343 | val juMap: java.util.Map[String,Any] = m 344 | val dbo = BasicDBObjectBuilder.start(juMap).get 345 | dbo.containsField("c") must beTrue 346 | dbo.get("c") must haveSuperClass[DBObject] 347 | val adbo = dbo.get("c").asInstanceOf[DBObject] 348 | adbo.get("0") must be_==("v1") 349 | adbo.get("1") must be_==("v2") 350 | } 351 | } 352 | } 353 | -------------------------------------------------------------------------------- /src/test/scala/com/osinka/mongodb/query.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb 17 | 18 | import org.specs._ 19 | import com.mongodb._ 20 | 21 | import wrapper.DBO 22 | import Config._ 23 | 24 | object querySpec extends Specification { 25 | val mongo = new Mongo(Host, Port).getDB(Database) 26 | 27 | doAfter { mongo.dropDatabase } 28 | 29 | "Query" should { 30 | "be empty initially" in { 31 | val q = Query() 32 | q.query must be_==(DBO.empty) 33 | q.skip must beNone 34 | q.limit must beNone 35 | } 36 | "store DBObject query" in { 37 | val q = Query(Map("a" -> 1), Some(1), Some(1), None) 38 | q.query.get("a") must (notBeNull and be_==(1)) 39 | q.skip must beSome[Int].which{_ == 1} 40 | q.limit must beSome[Int].which{_ == 1} 41 | } 42 | "skip" in { 43 | val q = Query() 44 | (q drop None).skip must beNone 45 | (q drop 10 drop 2).skip must beSome[Int].which{_ == 2} 46 | (q drop 1 drop None).skip must beNone 47 | } 48 | "limit" in { 49 | val q = Query() 50 | (q take None).limit must beNone 51 | (q take 10 take 2).limit must beSome[Int].which{_ == 2} 52 | (q take 1 take None).limit must beNone 53 | } 54 | "() ++" in { 55 | val q = Query() ++ Map("a" -> 1) 56 | q.query.get("a") must (notBeNull and be_==(1)) 57 | q.skip must beNone 58 | q.limit must beNone 59 | 60 | } 61 | "(a,s,l) ++" in { 62 | val q = Query(Map("a" -> 1), Some(1), Some(1), None) ++ Map("b" -> 1) 63 | q.query.get("a") must (notBeNull and be_==(1)) 64 | q.query.get("b") must (notBeNull and be_==(1)) 65 | q.skip must beSome[Int].which{_ == 1} 66 | q.limit must beSome[Int].which{_ == 1} 67 | } 68 | "++ with same key" in { 69 | val q = Query(Map("a" -> 1), Some(1), Some(1), None) ++ Map("a" -> 10) 70 | q.query.get("a") must (notBeNull and be_==(10)) 71 | q.skip must beSome[Int].which{_ == 1} 72 | q.limit must beSome[Int].which{_ == 1} 73 | } 74 | "*" in { 75 | val q = Query(Map("a" -> 1, "b" -> 2), Some(1), Some(2), Some(Map("a" -> 1))) * 76 | Query(Map("b" -> 10, "c" -> 3), Some(2), Some(5), Some(Map("a" -> 2))) 77 | q.query.get("a") must (notBeNull and be_==(1)) 78 | q.query.get("b") must (notBeNull and be_==(10)) 79 | q.query.get("c") must (notBeNull and be_==(3)) 80 | q.skip must beSome[Int].which{_ == 2} 81 | q.limit must beSome[Int].which{_ == 5} 82 | q.sorting must beSome[DBObject].which{_.get("a") == 2} 83 | } 84 | } 85 | 86 | "Query(coll)" should { 87 | val coll = mongo.getCollection("test").asScala 88 | 89 | doFirst { 90 | coll.drop 91 | Helper.fillWith(coll, 5) {i => Map("a" -> i)} 92 | } 93 | doLast { 94 | coll.drop 95 | } 96 | 97 | "support DSL" in { 98 | val q = Query() drop 1 take 2 99 | q must be_==( Query(DBO.empty, Some(1), Some(2), None) ) 100 | q.sort(Map("a" -> 1)).sorting must beSome[DBObject].which{_.get("a") == 1} 101 | (q in coll).query must be_==(q) 102 | } 103 | "apply to DBObjectCollection" in { 104 | val c = Query() in coll 105 | c must haveSuperClass[DBObjectCollection] 106 | (Query() in c) must haveSuperClass[DBObjectCollection] 107 | c must haveSize(5) 108 | c.iterator.toSeq must haveSize(5) 109 | } 110 | "sort ascending" in { 111 | val c = Query() sort Map("a" -> 1) in coll 112 | c.query.sorting must beSome[DBObject].which{_.get("a") == 1} 113 | c.headOption must beSome[DBObject].which{_.get("a") == 0} 114 | } 115 | "sort descending" in { 116 | val c = Query() sort Map("a" -> -1) in coll 117 | c must haveSuperClass[DBObjectCollection] 118 | c.query.sorting must beSome[DBObject].which{_.get("a") == -1} 119 | c.headOption must beSome[DBObject].which{_.get("a") == 4} 120 | } 121 | "support skip" in { 122 | (Query() drop 1 in coll).iterator.toSeq must haveSize(4) 123 | Query() drop 1 in coll must haveSize(4) 124 | (Query() drop 1 in coll).iterator.toSeq must haveSize(4) 125 | Query() drop 1 drop 1 in coll must haveSize(4) 126 | Query() drop 5 in coll must beEmpty 127 | Query() drop 6 in coll must haveSize(0) 128 | (Query() drop 6 in coll).iterator.toSeq must beEmpty 129 | } 130 | "support limit" in { 131 | Query() take 1 in coll must haveSize(1) 132 | Query() drop 1 take 2 in coll must haveSize(2) 133 | Query() take 2 drop 1 in coll must haveSize(2) 134 | (Query() take 2 drop 1 in coll).iterator.toSeq must haveSize(2) 135 | } 136 | } 137 | } 138 | -------------------------------------------------------------------------------- /src/test/scala/com/osinka/mongodb/shape/ArrayModel.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.shape 17 | 18 | import com.mongodb.{DB,DBObject} 19 | import com.osinka.mongodb._ 20 | 21 | object ArrayOfInt { 22 | class ArrayModel(val id: Int) { 23 | var messages: List[Int] = Nil 24 | 25 | override def toString = "ArrayModel("+id+", "+messages.mkString("[",",","]")+")" 26 | } 27 | 28 | object ArrayModel extends ObjectShape[ArrayModel] { shape => 29 | lazy val id = Field.scalar("id", _.id) 30 | 31 | lazy val messages = Field.array("messages", _.messages, (x: ArrayModel, l: Seq[Int]) => x.messages = l.toList ) 32 | // 33 | // same as 34 | // 35 | // object messages extends MongoArray[Int] with ScalarContent[Int] with ArrayFieldModifyOp[Int] { 36 | // override val mongoFieldName = "messages" 37 | // override val rep = Represented.by[Seq[Int]](_.messages, Some( (x: ArrayModel, l: Seq[Int]) => x.messages = l.toList )) 38 | // } 39 | 40 | lazy val * = List(id, messages) 41 | override def factory(dbo: DBObject) = for {id(i) <- Some(dbo)} yield new ArrayModel(i) 42 | } 43 | } 44 | 45 | object ArrayOfEmbedded { 46 | class ArrayModel(val id: Int, val users: List[CaseUser]) 47 | 48 | object ArrayModel extends ObjectShape[ArrayModel] { shape => 49 | lazy val id = Field.scalar("id", _.id) 50 | 51 | object users extends ArrayEmbeddedField[CaseUser]("users", _.users, None) with CaseUserIn[ArrayModel] 52 | // 53 | // same as 54 | // 55 | // object users extends MongoArray[CaseUser] with ArrayFieldModifyOp[CaseUser] with EmbeddedContent[CaseUser] with CaseUserIn[ArrayModel] { 56 | // override val mongoFieldName = "users" 57 | // override val rep = shape.Represented.by[Seq[CaseUser]]( _.users, Some( (x: ArrayModel, l: Seq[CaseUser]) => x.users = l.toList )) 58 | // } 59 | 60 | lazy val * = List(id, users) 61 | override def factory(dbo: DBObject) = for {id(_id) <- Some(dbo); users(_users) <- Some(dbo)} yield new ArrayModel(_id, _users.toList) 62 | } 63 | } 64 | 65 | object ArrayOfRef { 66 | class ArrayModel(val id: Int) { 67 | var users: List[CaseUser] = Nil 68 | } 69 | 70 | class ArrayModelShape(val db: DB, val usersCollName: String) extends ObjectShape[ArrayModel] { shape => 71 | lazy val id = Field.scalar("id", _.id) 72 | 73 | lazy val users = Field.arrayRef("users", CaseUser collection db.getCollection(usersCollName), _.users, (x: ArrayModel, l: Seq[CaseUser]) => x.users = l.toList ) 74 | // 75 | // same as 76 | // 77 | // object users extends MongoArray[CaseUser] with RefContent[CaseUser] { 78 | // override val mongoFieldName = "users" 79 | // override lazy val coll: MongoCollection[CaseUser] = CaseUser collection db.getCollection(usersCollName) 80 | // override val rep = shape.Represented.by[Seq[CaseUser]]( _.users, Some( (x: ArrayModel, l: Seq[CaseUser]) => x.users = l.toList )) 81 | // } 82 | 83 | lazy val * = List(id, users) 84 | override def factory(dbo: DBObject) = for {id(i) <- Some(dbo)} yield new ArrayModel(i) 85 | } 86 | } -------------------------------------------------------------------------------- /src/test/scala/com/osinka/mongodb/shape/ComplexModel.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.shape 17 | 18 | import com.mongodb._ 19 | import com.osinka.mongodb._ 20 | 21 | class ComplexType(val user: CaseUser, val messageCount: Int) extends MongoObject { 22 | override def toString = "ComplexType (" + user + ", " + messageCount + ")" 23 | } 24 | 25 | object ComplexType extends MongoObjectShape[ComplexType] with FunctionalShape[ComplexType] { 26 | object user extends EmbeddedField[CaseUser]("user", _.user, None) with CaseUserIn[ComplexType] 27 | object messageCount extends ScalarField[Int]("msgs", _.messageCount, None) 28 | 29 | override lazy val * = user :: messageCount :: Nil 30 | override def factory(dbo: DBObject): Option[ComplexType] = 31 | for {user(u) <- Some(dbo) 32 | messageCount(x) <- Some(dbo)} 33 | yield new ComplexType(u, x) 34 | } -------------------------------------------------------------------------------- /src/test/scala/com/osinka/mongodb/shape/MapModel.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.shape 17 | 18 | import com.mongodb.{DB,DBObject} 19 | import com.osinka.mongodb._ 20 | 21 | object MapOfScalar { 22 | class MapModel(val id: Int) { 23 | var counts: Map[String, Int] = Map.empty 24 | 25 | override def toString = "ArrayModel("+id+", "+counts.mkString("[",",","]")+")" 26 | } 27 | 28 | object MapModel extends ObjectShape[MapModel] { shape => 29 | lazy val id = Field.scalar("id", _.id) 30 | 31 | lazy val counts = Field.map("counts", _.counts, (x: MapModel, l: Map[String,Int]) => x.counts = l) 32 | 33 | lazy val * = List(id, counts) 34 | override def factory(dbo: DBObject) = for {_id <- id from dbo} yield new MapModel(_id) 35 | } 36 | } 37 | 38 | object MapOfEmbedded { 39 | class MapModel(val id: Int, val users: Map[String,CaseUser]) { 40 | override def toString = "MapModel("+id+","+users+")" 41 | } 42 | 43 | object MapModel extends ObjectShape[MapModel] { shape => 44 | lazy val id = Field.scalar("id", _.id) 45 | 46 | object users extends MapEmbeddedField[CaseUser]("users", _.users, None) with CaseUserIn[MapModel] { field => 47 | 48 | /** 49 | * apply method makes it possible to build queries where condition is set on map value, e.g. 50 | * MapModel.users("strID").exists 51 | * or 52 | * MapModel.users("strID").name is_== "John" 53 | * 54 | * if you do not need such queries, there is no need in "apply" here 55 | */ 56 | def apply(key: String) = new shape.EmbeddedField[CaseUser](key, _.users(key), None) with CaseUserIn[MapModel] { 57 | override def mongoFieldPath = field.mongoFieldPath ::: super.mongoFieldPath 58 | } 59 | } 60 | 61 | lazy val * = List(id, users) 62 | override def factory(dbo: DBObject) = for {_id <- id from dbo; _users <- users from dbo} yield new MapModel(_id, _users) 63 | } 64 | } -------------------------------------------------------------------------------- /src/test/scala/com/osinka/mongodb/shape/OptModel.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.shape 17 | 18 | import com.mongodb._ 19 | 20 | class OptModel(val id: Int, val description: Option[String]) { 21 | var comment: Option[String] = None 22 | } 23 | 24 | object OptModel extends ObjectShape[OptModel] { 25 | lazy val id = Field.scalar("id", _.id) 26 | 27 | // Hurray! Option[A] field! 28 | lazy val description = Field.optional("description", _.description) 29 | // OR much longer: 30 | 31 | object description3 extends OptionalField[String]("description", _.description, None) 32 | 33 | lazy val comment = Field.optional("comment", _.comment, (obj: OptModel, v: Option[String]) => obj.comment = v) 34 | 35 | override def * = List(id, description, comment) 36 | 37 | override def factory(dbo: DBObject) = 38 | for {id(i) <- Some(dbo)} 39 | yield new OptModel(i, description from dbo) 40 | } -------------------------------------------------------------------------------- /src/test/scala/com/osinka/mongodb/shape/RefModel.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.shape 17 | 18 | import com.mongodb.{DB,DBObject} 19 | import com.osinka.mongodb._ 20 | 21 | class RefModel(val message: String, val user: CaseUser) 22 | 23 | class RefModelShape(val db: DB, val usersCollName: String) extends ObjectShape[RefModel] { shape => 24 | lazy val message = Field.scalar("message", _.message) 25 | 26 | lazy val user = Field.ref("user", CaseUser collection db.getCollection(usersCollName), _.user) 27 | // 28 | // same as 29 | // 30 | // object user extends MongoScalar[CaseUser] with RefContent[CaseUser] with Functional[CaseUser] { 31 | // override val mongoFieldName = "user" 32 | // override lazy val coll: MongoCollection[CaseUser] = CaseUser collection db.getCollection(usersCollName) 33 | // override val rep = shape.Represented.by(_.user, None) 34 | // } 35 | 36 | lazy val * = List(message, user) 37 | override def factory(dbo: DBObject) = 38 | for {message(m) <- Some(dbo) 39 | user(u) <- Some(dbo)} yield new RefModel(m, u) 40 | } -------------------------------------------------------------------------------- /src/test/scala/com/osinka/mongodb/shape/SimpleModel.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.shape 17 | 18 | import com.mongodb._ 19 | import com.osinka.mongodb._ 20 | 21 | // case class Model with constant field, its extractor and factory method 22 | case class CaseUser(val name: String) extends MongoObject { 23 | override def toString = "CaseUser(name="+name+",oid="+mongoOID+")" 24 | } 25 | 26 | trait CaseUserIn[T] extends ObjectIn[CaseUser, T] { 27 | object name extends ScalarField[String]("name", _.name, None) 28 | override lazy val * = name :: Nil 29 | override def factory(dbo: DBObject): Option[CaseUser] = for {name(n) <- Some(dbo)} yield new CaseUser(n) 30 | } 31 | 32 | object CaseUser extends MongoObjectShape[CaseUser] with CaseUserIn[CaseUser] 33 | 34 | // ordinary class model with variable and updatable field 35 | class OrdUser extends MongoObject { 36 | var name: String = _ 37 | override def toString = "OrdUser(name="+name+",oid="+mongoOID+")" 38 | } 39 | object OrdUser extends MongoObjectShape[OrdUser] { 40 | override def factory(dbo: DBObject) = Some(new OrdUser) 41 | 42 | lazy val name = Field.scalar("name", 43 | (u: OrdUser) => u.name, 44 | (u: OrdUser, n: String) => u.name = n) 45 | 46 | override lazy val * = name :: Nil 47 | } 48 | 49 | // object holder for serializer tests 50 | case class Holder[T](var value: T) 51 | 52 | class TSerializer[T](val f: () => Holder[T]) extends ObjectShape[Holder[T]] with FunctionalShape[Holder[T]] { 53 | lazy val i = Field.scalar("i", (x: Holder[T]) => x.value, (x: Holder[T], v: T) => x.value = v) 54 | 55 | override lazy val * = List(i) 56 | override def factory(dbo: DBObject): Option[Holder[T]] = Some(f()) 57 | } 58 | -------------------------------------------------------------------------------- /src/test/scala/com/osinka/mongodb/shape/collection.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.shape 17 | 18 | import org.specs._ 19 | import org.bson.types.ObjectId 20 | import com.mongodb._ 21 | 22 | import org.bson.types.ObjectId 23 | import com.osinka.mongodb._ 24 | import Config._ 25 | 26 | object collectionSpec extends Specification("Shape collection") { 27 | val CollName = "test" 28 | val Const = "John Doe" 29 | 30 | val mongo = new Mongo(Host, Port).getDB(Database) 31 | val dbColl = mongo.getCollection(CollName) 32 | 33 | doAfter { mongo.dropDatabase } 34 | 35 | "Collection of class" should { 36 | doBefore { dbColl.drop } 37 | doAfter { dbColl.drop } 38 | 39 | "retrieve" in { 40 | dbColl save Map("name" -> Const) 41 | val coll = dbColl.of(OrdUser) 42 | coll must haveSuperClass[ShapedCollection[OrdUser]] 43 | coll.headOption must beSome[OrdUser].which{x => x.name == Const && x.mongoOID != None} 44 | } 45 | "store" in { 46 | val coll = dbColl.of(OrdUser) 47 | val u = new OrdUser 48 | u.name = Const 49 | 50 | coll += u 51 | u.mongoOID must beSome[ObjectId] 52 | 53 | coll.headOption must beSome[OrdUser].which{x => 54 | x.name == Const && 55 | x.mongoOID != None && 56 | x.mongoOID == u.mongoOID 57 | } 58 | } 59 | } 60 | "Collection of case class" should { 61 | doBefore { dbColl.drop } 62 | doAfter { dbColl.drop } 63 | 64 | "retrieve" in { 65 | dbColl save Map("name" -> Const) 66 | val coll = dbColl.of(CaseUser) 67 | coll must haveSuperClass[ShapedCollection[CaseUser]] 68 | coll.headOption must beSome[CaseUser].which{x => x.name == Const && x.mongoOID != None} 69 | } 70 | "store" in { 71 | val coll = dbColl.of(CaseUser) 72 | coll += CaseUser(Const) 73 | coll.headOption must beSome[CaseUser].which{x => x.name == Const && x.mongoOID != None} 74 | } 75 | "insert many" in { 76 | val coll = dbColl.of(CaseUser) 77 | ( coll << ((1 to 10) map {x => CaseUser(Const+x)}) ).getLastError.ok must beTrue 78 | coll must haveSize(10) 79 | coll foreach { _.mongoOID must beSome[ObjectId] } 80 | } 81 | "findAndRemove" in { 82 | val coll = dbColl.of(CaseUser) 83 | coll << ((1 to 10) map {x => CaseUser(Const+x)}) 84 | coll must haveSize(10) 85 | coll.findAndRemove(CaseUser.name is_== "NoUser") must beNone 86 | coll.findAndRemove(CaseUser.name is_~ "9$".r) must beSome[CaseUser].which{x => 87 | x.name == Const+"9" 88 | } 89 | coll must haveSize(9) 90 | } 91 | "findAndModify" in { 92 | val coll = dbColl.of(CaseUser) 93 | coll << ((1 to 10) map {x => CaseUser(Const+x)}) 94 | coll must haveSize(10) 95 | coll.findAndModify(CaseUser.name is_~ "9$".r, CaseUser.name set "U9") must beSome[CaseUser].which{x => 96 | x.name == Const+"9" 97 | } 98 | CaseUser where {CaseUser.name is_== Const+"9"} in coll must beEmpty 99 | CaseUser where {CaseUser.name is_== "U9"} in coll must haveSize(1) 100 | } 101 | "findAndModify w/ sort" in { 102 | val coll = dbColl.of(CaseUser) 103 | coll << ((1 to 10) map {x => CaseUser(Const+x)}) 104 | coll must haveSize(10) 105 | val r = coll.findAndModify(CaseUser sortBy CaseUser.name.descending, CaseUser.name set "U10") 106 | r must beSome[CaseUser].which{x => 107 | x.name == Const+"9" 108 | } 109 | CaseUser where {CaseUser.name is_== Const+"9"} in coll must beEmpty 110 | CaseUser where {CaseUser.name is_== "U10"} in coll must haveSize(1) 111 | } 112 | } 113 | "Collection of complex" should { 114 | doBefore { dbColl.drop } 115 | doAfter { dbColl.drop } 116 | 117 | "store/retrieve" in { 118 | val coll = dbColl.of(ComplexType) 119 | val c = new ComplexType(CaseUser(Const), 1) 120 | 121 | coll += c 122 | c.mongoOID must beSome[ObjectId] 123 | 124 | coll.headOption must beSome[ComplexType].which{x => 125 | x.user == CaseUser(Const) && 126 | x.messageCount == 1 && 127 | x.mongoOID == c.mongoOID 128 | } 129 | } 130 | } 131 | "Collection of Optional" should { 132 | val N = 10 133 | 134 | doBefore { dbColl.drop } 135 | doAfter { dbColl.drop } 136 | 137 | val coll = dbColl of OptModel 138 | 139 | "store" in { 140 | Helper.fillWith(coll, N) {i => 141 | val c = new OptModel(i, if (i % 3 == 0) Some("d"+i) else None) 142 | if (i % 4 == 0) c.comment = Some("comment"+i) 143 | c 144 | } 145 | coll must haveSize(N) 146 | coll.headOption must beSome[OptModel] 147 | } 148 | } 149 | "Collection of ref" should { 150 | object RefModel extends RefModelShape(mongo, "users") 151 | 152 | val users = mongo.getCollection("users") of CaseUser 153 | val posts = mongo.getCollection("posts") of RefModel 154 | 155 | var user: CaseUser = CaseUser(Const) 156 | doBefore { 157 | users.drop; posts.drop 158 | users << user 159 | posts += new RefModel("text", user) 160 | } 161 | doAfter { users.drop; posts.drop } 162 | 163 | "user has oid" in { 164 | user.mongoOID must beSome[ObjectId] 165 | } 166 | "save post with user ref" in { 167 | val dbo = mongo.getCollection("posts").asScala.headOption 168 | dbo must beSome[DBObject] 169 | dbo.get.get("user") must (notBeNull and haveSuperClass[DBObject]) 170 | 171 | val userDbo = dbo.get.get("user").asInstanceOf[DBObject] 172 | Option(userDbo.get("_ref")) must be_==(Some("users")) 173 | Option(userDbo.get("_id")) must be_==(user.mongoOID) 174 | } 175 | "retrieve user from ref" in { 176 | posts.headOption must beSome[RefModel].which{_.user == user} 177 | } 178 | } 179 | "Collection with ArrayInt" should { 180 | import ArrayOfInt._ 181 | 182 | val objs = mongo.getCollection("objs") of ArrayModel 183 | 184 | doBefore { objs.drop } 185 | doAfter { objs.drop } 186 | "store empty" in { 187 | objs << new ArrayModel(1) 188 | objs must haveSize(1) 189 | objs.headOption must beSome[ArrayModel].which{ x => 190 | x.id == 1 && x.messages.isEmpty 191 | } 192 | } 193 | "store non-empty" in { 194 | val o = new ArrayModel(1) 195 | o.messages = List(1,2,3) 196 | objs << o 197 | objs must haveSize(1) 198 | objs.headOption must beSome[ArrayModel].which{ x => 199 | x.id == 1 && x.messages == List(1,2,3) 200 | } 201 | } 202 | } 203 | "Collection with ArrayEmbedded" should { 204 | import ArrayOfEmbedded._ 205 | 206 | val objs = mongo.getCollection("objs") of ArrayModel 207 | 208 | doBefore { objs.drop } 209 | doAfter { objs.drop } 210 | 211 | "store empty" in { 212 | val o = new ArrayModel(1, Nil) 213 | objs << o 214 | objs must haveSize(1) 215 | objs.headOption must beSome[ArrayModel].which{ x => 216 | x.id == 1 && x.users.isEmpty 217 | } 218 | } 219 | "store non-empty" in { 220 | val o = new ArrayModel(1, CaseUser(Const) :: Nil) 221 | objs << o 222 | 223 | objs must haveSize(1) 224 | objs.headOption must beSome[ArrayModel].which { x => 225 | x.id == 1 && x.users == List(CaseUser(Const)) 226 | } 227 | 228 | objs.underlying.asScala.headOption must beLike { 229 | case Some(dbo) => 230 | dbo match { 231 | case ArrayModel.users(_users) if _users == CaseUser(Const) :: Nil => true 232 | case _ => false 233 | } 234 | } 235 | } 236 | "query sub-document" in { 237 | val o = new ArrayModel(1, CaseUser(Const) :: Nil) 238 | objs << o 239 | 240 | ArrayModel where {(ArrayModel.id is_== 1) and 241 | ArrayModel.users.where(CaseUser.name is_== Const)} in objs must haveSize(1) 242 | } 243 | } 244 | "Collection of ArrayRef" should { 245 | import ArrayOfRef._ 246 | object ArrayModel extends ArrayModelShape(mongo, "users") 247 | 248 | val objs = mongo.getCollection("objs") of ArrayModel 249 | val users = mongo.getCollection("users") of CaseUser 250 | 251 | doBefore { objs.drop; users.drop } 252 | doAfter { objs.drop } 253 | "store empty" in { 254 | val o = new ArrayModel(1) 255 | objs << o 256 | objs must haveSize(1) 257 | objs.headOption must beSome[ArrayModel].which{ x => 258 | x.id == 1 && x.users.isEmpty 259 | } 260 | } 261 | "store non-empty" in { 262 | val user = CaseUser(Const) 263 | users += user 264 | user.mongoOID must beSome[ObjectId] 265 | 266 | val o = new ArrayModel(1) 267 | o.users = List(user) 268 | objs += o 269 | objs must haveSize(1) 270 | objs.headOption must beSome[ArrayModel].which { x => 271 | x.id == 1 && x.users == List(user) && x.users(0).mongoOID == user.mongoOID 272 | } 273 | } 274 | } 275 | "Collection of MapScalar" should { 276 | import MapOfScalar._ 277 | 278 | val objs = mongo.getCollection("objs") of MapModel 279 | 280 | doBefore { objs.drop } 281 | doAfter { objs.drop } 282 | "store empty" in { 283 | objs << new MapModel(1) 284 | objs must haveSize(1) 285 | objs.headOption must beSome[MapModel].which{ x => 286 | x.id == 1 && x.counts.isEmpty 287 | } 288 | } 289 | "store non-empty" in { 290 | val o = new MapModel(1) 291 | o.counts = Map("one" -> 1, "two" -> 2) 292 | objs << o 293 | objs must haveSize(1) 294 | objs.headOption must beSome[MapModel].which{ x => 295 | x.id == 1 && x.counts == Map("one" -> 1, "two" -> 2) 296 | } 297 | } 298 | } 299 | "Collection with MapEmbedded" should { 300 | import MapOfEmbedded._ 301 | 302 | val objs = mongo.getCollection("objs") of MapModel 303 | 304 | doBefore { objs.drop } 305 | doAfter { objs.drop } 306 | 307 | "store empty" in { 308 | val o = new MapModel(1, Map.empty) 309 | objs << o 310 | objs must haveSize(1) 311 | objs.headOption must beSome[MapModel].which{ x => 312 | x.id == 1 && x.users.isEmpty 313 | } 314 | } 315 | "store non-empty" in { 316 | def testMap = Map("one" -> CaseUser(Const)) 317 | val o = new MapModel(1, testMap) 318 | objs << o 319 | 320 | objs must haveSize(1) 321 | objs.headOption must beSome[MapModel].which { x => 322 | x.id == 1 && x.users == testMap 323 | } 324 | 325 | objs.underlying.asScala.headOption must beLike { 326 | case Some(dbo) => 327 | dbo match { 328 | case MapModel.users(_users) if _users == testMap => true 329 | case _ => false 330 | } 331 | } 332 | } 333 | } 334 | } 335 | -------------------------------------------------------------------------------- /src/test/scala/com/osinka/mongodb/shape/fields.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.shape 17 | 18 | import org.specs._ 19 | import com.mongodb._ 20 | 21 | import com.osinka.mongodb._ 22 | import Config._ 23 | 24 | object fieldsSpec extends Specification("Shape fields") { 25 | val CollName = "test" 26 | val Const = "John Doe" 27 | 28 | val mongo = new Mongo(Host, Port).getDB(Database) 29 | 30 | doAfter { mongo.dropDatabase } 31 | 32 | "Case Class" should { 33 | "declare fields" in { 34 | CaseUser.fieldList must haveSize(2) 35 | CaseUser.fieldList must contain(CaseUser.name) 36 | true must beTrue 37 | } 38 | "have proper parentFields" in { 39 | CaseUser.containerPath must beEmpty 40 | CaseUser.name.mongoFieldPath must haveTheSameElementsAs("name" :: Nil) 41 | } 42 | } 43 | "Class Shape" should { 44 | "declare fields" in { 45 | OrdUser.fieldList must haveSize(2) 46 | OrdUser.fieldList must contain(OrdUser.name) 47 | true must beTrue 48 | } 49 | } 50 | "Complex Shape" should { 51 | "declare fields" in { 52 | ComplexType.user must notBeNull 53 | ComplexType.fieldList must haveSize(3) 54 | ComplexType.fieldList must contain(ComplexType.user) 55 | true must beTrue 56 | } 57 | "have proper parentFields" in { 58 | ComplexType.containerPath must beEmpty 59 | ComplexType.user.containerPath must haveTheSameElementsAs("user" :: Nil) 60 | ComplexType.user.name.mongoFieldPath must haveTheSameElementsAs("name" :: "user" :: Nil) 61 | } 62 | "have constraint" in { 63 | ComplexType.user.mongoFieldName must be_==("user") 64 | ComplexType.user.containerPath must haveTheSameElementsAs(List("user")) 65 | ComplexType.constraints.m must havePair("user.name" -> Map("$exists" -> true)) 66 | } 67 | "have proper shape for embedded object" in { 68 | val nameField = ComplexType.user.name 69 | nameField must haveSuperClass[ObjectField] 70 | nameField.mongoConstraints.m must havePair("user.name" -> Map("$exists" -> true)) 71 | } 72 | } 73 | "Ref field" should { 74 | object RefModel extends RefModelShape(mongo, "users") 75 | "have constraint" in { 76 | RefModel.user.mongoFieldName must be_==("user") 77 | RefModel.user.mongoFieldPath must haveTheSameElementsAs(List("user")) 78 | RefModel.constraints.m must havePair("user" -> Map("$exists" -> true)) 79 | } 80 | } 81 | "ArrayOfInt field" should { 82 | import ArrayOfInt._ 83 | "have constraint" in { 84 | ArrayModel.messages.mongoFieldName must be_==("messages") 85 | ArrayModel.constraints.m must havePair("messages" -> Map("$exists" -> true)) 86 | } 87 | } 88 | "ArrayOfEmbedded field" should { 89 | import ArrayOfEmbedded._ 90 | "have constraint" in { 91 | // we cannot ask for "users.name" because the array can be empty 92 | ArrayModel.constraints.m must notHaveKey("users.name") 93 | ArrayModel.constraints.m must havePair("users" -> Map("$exists" -> true)) 94 | } 95 | } 96 | "ArrayOfRef field" should { 97 | import ArrayOfRef._ 98 | object ArrayModel extends ArrayModelShape(mongo, "users") 99 | "have constraint" in { 100 | ArrayModel.constraints.m must havePair("users" -> Map("$exists" -> true)) 101 | } 102 | } 103 | "MapOfScalar field" should { 104 | import MapOfScalar._ 105 | "have constraint" in { 106 | MapModel.counts.mongoFieldName must be_==("counts") 107 | MapModel.constraints.m must havePair("counts" -> Map("$exists" -> true)) 108 | } 109 | } 110 | "Field equality" should { 111 | "the same field" in { 112 | import ComplexType._ 113 | user must be(user) 114 | user must be_==(user) 115 | } 116 | "in the same shape" in { 117 | import ComplexType._ 118 | user must be_!=(messageCount) 119 | } 120 | "between the shapes" in { 121 | ArrayOfInt.ArrayModel.id.longFieldName must be_==(ArrayOfEmbedded.ArrayModel.id.longFieldName) 122 | ArrayOfInt.ArrayModel.id must be_==(ArrayOfEmbedded.ArrayModel.id) 123 | ArrayOfInt.ArrayModel.id.hashCode must be_==(ArrayOfEmbedded.ArrayModel.id.hashCode) 124 | } 125 | } 126 | } 127 | -------------------------------------------------------------------------------- /src/test/scala/com/osinka/mongodb/shape/query.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.shape 17 | 18 | import org.specs._ 19 | import org.bson.types.ObjectId 20 | import com.mongodb._ 21 | 22 | import org.bson.types.ObjectId 23 | import com.osinka.mongodb._ 24 | import Config._ 25 | 26 | object querySpec extends Specification("Query on Shapes and Fields") { 27 | val CollName = "test" 28 | val Const = "John Doe" 29 | 30 | val mongo = new Mongo(Host, Port).getDB(Database) 31 | 32 | doAfter { mongo.dropDatabase } 33 | 34 | "Field conditions" should { 35 | "on scalar fields" in { 36 | import java.util.regex.Pattern 37 | import Pattern._ 38 | 39 | val scalaR = "^test$".r 40 | val javaR = Pattern.compile("^test$") 41 | 42 | CaseUser.name eq_? Const must be_==( QueryTerm( Map("name" -> Const) ) ) 43 | CaseUser.name is_< Const must be_==( QueryTerm( Map("name" -> Map("$lt" -> Const))) ) 44 | CaseUser.name is_<= Const must be_==( QueryTerm( Map("name" -> Map("$lte" -> Const))) ) 45 | CaseUser.name is_> Const must be_==( QueryTerm( Map("name" -> Map("$gt" -> Const))) ) 46 | CaseUser.name is_>= Const must be_==( QueryTerm( Map("name" -> Map("$gte" -> Const))) ) 47 | CaseUser.name in List(Const) must be_==( QueryTerm( Map("name" -> Map("$in" -> List(Const)))) ) 48 | CaseUser.name.exists must be_==( QueryTerm( Map("name" -> Map("$exists" -> true))) ) 49 | (CaseUser.name is_~ scalaR).m.get("name") must beLike { 50 | case Some(p: Pattern) => p.pattern == javaR.pattern 51 | } 52 | } 53 | "on embedded map fields" in { 54 | import MapOfEmbedded._ 55 | 56 | MapModel.users("a").exists must be_==( QueryTerm( Map("users.a" -> Map("$exists" -> true)) ) ) 57 | MapModel.users("a").name is_== Const must be_==( QueryTerm( Map("users.a.name" -> Const) ) ) 58 | } 59 | "on array of embedded documents" in { 60 | import ArrayOfEmbedded._ 61 | 62 | ArrayModel.users.where(CaseUser.name is_== Const) must be_==( QueryTerm( Map("users" -> Map("$elemMatch" -> Map("name" -> Const)) ) )) 63 | } 64 | } 65 | "Shape query" should { 66 | "have DSL" in { 67 | val qt = CaseUser.name is_== Const 68 | qt must haveSuperClass[QueryTerm[CaseUser]] 69 | qt must be_==( QueryTerm[CaseUser]( Map("name" -> Const)) ) 70 | 71 | val q = CaseUser where {CaseUser.name is_< Const} drop 10 take 10 sortBy CaseUser.name.ascending 72 | q must haveSuperClass[ObjectShape[CaseUser]#ShapeQuery] 73 | q.query must be_==( Query(Map("name" -> Map("$lt" -> Const)), Some(10), Some(10), Some(Map("name" -> 1))) ) 74 | 75 | (CaseUser sortBy CaseUser.name.descending).query.sorting must beSome[DBObject].which{_.get("name") == -1} 76 | } 77 | "produce right DBO for regex query" in { 78 | import java.util.regex.Pattern 79 | val qt = CaseUser.name is_~ "^User3$".r 80 | val dboRE = CaseUser.where(qt).query.query.get("name") 81 | dboRE must (notBeNull and beLike { 82 | case p: Pattern => p.pattern == "^User3$" 83 | }) 84 | } 85 | } 86 | "Query" should { 87 | val dbColl = mongo.getCollection(CollName) 88 | val coll = dbColl of CaseUser 89 | val N = 50 90 | 91 | doBefore { 92 | dbColl.drop 93 | Helper.fillWith(coll, N) {x => CaseUser("User"+x)} 94 | } 95 | doAfter { 96 | dbColl.drop 97 | } 98 | 99 | "retain coll type" in { 100 | coll applied Query() must haveSuperClass[ShapedCollection[CaseUser]] 101 | } 102 | "support skip/limit" in { 103 | coll must haveSize(N) 104 | coll applied (Query() take 1) must haveSize(1) 105 | coll applied (Query() drop 10 take 5) must haveSize(5) 106 | coll applied (Query() drop N-5 take 10) must haveSize(5) 107 | } 108 | "ignore different shape" in { 109 | val cmplxColl = dbColl of ComplexType 110 | cmplxColl must beEmpty 111 | cmplxColl.iterator.toSeq must beEmpty 112 | } 113 | "do find" in { 114 | val r = coll applied Query(Map(CaseUser.name.mongoFieldName -> "User2")) 115 | r must haveSize(1) 116 | r must contain( CaseUser("User2") ) 117 | } 118 | "do headOption" in { 119 | val r = coll applied Query(Map(CaseUser.name.mongoFieldName -> "User2")) 120 | r must haveSize(1) 121 | r.headOption must beSome[CaseUser].which{_.name == "User2"} 122 | 123 | (coll applied Query(Map("a" -> 1))).headOption must beNone 124 | } 125 | "apply ==" in { 126 | val c = CaseUser where {CaseUser.name is "User3"} take 1 in coll 127 | c must haveSize(1) 128 | } 129 | "apply <" in { 130 | CaseUser where {CaseUser.name is_< "User3"} in coll must haveSize(23) 131 | } 132 | "apply ~" in { 133 | import java.util.regex.Pattern 134 | import Pattern._ 135 | 136 | CaseUser.name is_~ Pattern.compile("user3$", CASE_INSENSITIVE) in coll must haveSize(1) 137 | CaseUser.name like "^User3$".r in coll must haveSize(1) 138 | } 139 | "remove" in { 140 | coll -= (CaseUser where {CaseUser.name is_~ "^User3.$".r} ) 141 | coll must haveSize(N-10) 142 | } 143 | "sort ascending" in { 144 | val c = CaseUser sortBy CaseUser.name.ascending take 1 in coll 145 | c must haveSize(1) 146 | c.headOption must beSome[CaseUser].which{_.name == "User0"} 147 | } 148 | "sort descending" in { 149 | val c = CaseUser sortBy CaseUser.name.descending take 1 in coll 150 | c must haveSize(1) 151 | c.headOption must beSome[CaseUser].which{_.name == "User9"} 152 | } 153 | "sort by two fields" in { 154 | skip("not implemented") 155 | } 156 | } 157 | "Query embedded" should { 158 | val dbColl = mongo.getCollection(CollName) 159 | val coll = dbColl of ComplexType 160 | val N = 50 161 | 162 | doFirst { 163 | dbColl.drop 164 | Helper.fillWith(coll, N) {x => new ComplexType(CaseUser("User"+x), x*10)} 165 | } 166 | doLast { 167 | dbColl.drop 168 | } 169 | 170 | "apply ==" in { 171 | val c = ComplexType where {ComplexType.user.name eq_? "User3"} take 1 in coll 172 | c must haveSize(1) 173 | 174 | val s = c.toSeq 175 | s(0).user must be_==( CaseUser("User3") ) 176 | s(0).messageCount must be_==(30) 177 | } 178 | "apply <" in { 179 | ComplexType where {ComplexType.user.name is_< "User3"} in coll must haveSize(23) 180 | } 181 | "apply in" in { 182 | ComplexType where { (ComplexType.messageCount is_>= 0) and (ComplexType.messageCount is_< 250)} in coll must haveSize(N/2) 183 | } 184 | "apply ~" in { 185 | import java.util.regex.Pattern 186 | import Pattern._ 187 | 188 | ComplexType.user.name is_~ Pattern.compile("user3$", CASE_INSENSITIVE) in coll must haveSize(1) 189 | ComplexType.user.name like "^User3$".r in coll must haveSize(1) 190 | } 191 | } 192 | "Query optional" should { 193 | val dbColl = mongo.getCollection(CollName) 194 | val coll = dbColl of OptModel 195 | val N = 10 196 | 197 | doBefore { 198 | dbColl.drop 199 | Helper.fillWith(coll, N) {i => 200 | val c = new OptModel(i, if (i < 5) Some("d"+i) else None) 201 | if (i % 2 == 0) c.comment = Some("comment"+i) 202 | c 203 | } 204 | } 205 | doAfter { dbColl.drop } 206 | 207 | "have correct size" in { 208 | coll must haveSize(N) 209 | } 210 | "getCount by shape" in { 211 | OptModel where {OptModel.description.exists} in coll must haveSize(5) 212 | OptModel where {OptModel.comment.exists} in coll must haveSize(5) 213 | OptModel where {OptModel.comment.notExists} in coll must haveSize(5) 214 | } 215 | "find by shape" in { 216 | val c = OptModel where {OptModel.comment is "comment2"} in coll 217 | c must haveSize(1) 218 | c.headOption must beSome[OptModel].which{x => 219 | x.id == 2 && x.description == Some("d2") 220 | } 221 | } 222 | } 223 | "Query mixed collection" should { 224 | val dbColl = mongo.getCollection(CollName) 225 | val N = 10 226 | 227 | doFirst { 228 | dbColl.drop 229 | Helper.fillWith (dbColl of CaseUser, N) {x => CaseUser("User"+x)} 230 | Helper.fillWith (dbColl of ComplexType, N) {x => new ComplexType(CaseUser("User"+x), x*10)} 231 | } 232 | doLast { 233 | dbColl.drop 234 | } 235 | 236 | "have correct total size" in { 237 | dbColl.getCount must be_==(N*2) 238 | } 239 | "getCount by shape" in { 240 | dbColl of CaseUser must haveSize(N) 241 | dbColl of ComplexType must haveSize(N) 242 | } 243 | "findOne by shape" in { 244 | dbColl.of(CaseUser).headOption must beSome[CaseUser].which{_ == CaseUser("User0")} 245 | dbColl.of(ComplexType).headOption must beSome[ComplexType].which{_.user == CaseUser("User0")} 246 | } 247 | "find by shape" in { 248 | CaseUser where {CaseUser.name is_< "User3"} in dbColl.of(CaseUser) must haveSize(3) 249 | ComplexType where {ComplexType.user.name is_< "User3"} in dbColl.of(ComplexType) must haveSize(3) 250 | } 251 | } 252 | "Query collection of ref" should { 253 | object RefModel extends RefModelShape(mongo, "users") 254 | 255 | val users = mongo.getCollection("users") of CaseUser 256 | val posts = mongo.getCollection("posts") of RefModel 257 | 258 | doBefore { users.drop; posts.drop } 259 | doAfter { users.drop; posts.drop } 260 | 261 | "find by ref" in { 262 | var user: CaseUser = CaseUser(Const) 263 | val noOidUser = CaseUser("EmptyOID") 264 | 265 | users << user 266 | posts += new RefModel("text", user) 267 | 268 | user.mongoOID must beSome[ObjectId] 269 | noOidUser.mongoOID must beNone 270 | RefModel where {RefModel.user is_== user} in posts must haveSize(1) 271 | RefModel where {RefModel.user is_== noOidUser} in posts must beEmpty 272 | RefModel where {RefModel.user isNot user} in posts must beEmpty 273 | RefModel where {RefModel.user isNot noOidUser} in posts must haveSize(1) 274 | RefModel where {RefModel.user isIn List(user)} in posts must haveSize(1) 275 | RefModel where {RefModel.user isIn List(noOidUser)} in posts must beEmpty 276 | RefModel where {RefModel.user notIn List(user)} in posts must beEmpty 277 | RefModel where {RefModel.user notIn List(noOidUser)} in posts must haveSize(1) 278 | } 279 | } 280 | "Query collection with arrays" should { 281 | import ArrayOfInt._ 282 | 283 | val N = 10 284 | val objs = mongo.getCollection("objs") of ArrayModel 285 | 286 | doBefore { 287 | objs.drop 288 | Helper.fillWith(objs, N) {x => 289 | val o = new ArrayModel(x) 290 | o.messages = List.tabulate(x%2+1)(y => y+x) 291 | o 292 | } 293 | } 294 | doAfter { objs.drop } 295 | 296 | "have correct total size" in { 297 | objs must haveSize(N) 298 | } 299 | "find by array contents" in { 300 | ArrayModel where {ArrayModel.messages is_== 2} in objs must haveSize(2) 301 | ArrayModel where {ArrayModel.messages hasAll List(5,6)} in objs must haveSize(1) 302 | } 303 | "find by array size" in { 304 | ArrayModel where {ArrayModel.messages hasSize 2} in objs must haveSize(5) 305 | } 306 | } 307 | "Query collection with maps" should { 308 | import MapOfScalar._ 309 | 310 | val N = 10 311 | val objs = mongo.getCollection("objs") of MapModel 312 | 313 | doBefore { 314 | objs.drop 315 | Helper.fillWith(objs, N) {x => 316 | val o = new MapModel(x) 317 | o.counts = Map[String,Int]( List.tabulate(x%2+1)(y => y+x) map {x => x.toString -> x} :_* ) 318 | o 319 | } 320 | } 321 | doAfter { objs.drop } 322 | 323 | "have correct total size" in { 324 | objs must haveSize(N) 325 | } 326 | "find by map contents" in { 327 | MapModel where {MapModel.counts("6").exists} in objs must haveSize(2) 328 | MapModel where {MapModel.counts("5").exists and MapModel.counts("6").exists} in objs must haveSize(1) 329 | MapModel where {MapModel.counts("5") is_== 5} in objs must haveSize(1) 330 | } 331 | } 332 | } 333 | -------------------------------------------------------------------------------- /src/test/scala/com/osinka/mongodb/shape/serializer.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.shape 17 | 18 | import org.specs._ 19 | import java.util.Date 20 | import org.bson.types.ObjectId 21 | import com.mongodb.{DBObject, BasicDBObjectBuilder} 22 | 23 | import com.osinka.mongodb._ 24 | import wrapper.DBO 25 | 26 | object serializerSpec extends Specification { 27 | val Const = "John Doe" 28 | val jd = DBO.fromMap( Map("name" -> Const) ) 29 | 30 | "Shape scalars" should { 31 | object IntS extends TSerializer[Int]( () => Holder[Int](99) ) 32 | object StringS extends TSerializer[String]( () => Holder[String]("init") ) 33 | object DateS extends TSerializer[Date]( () => Holder[Date](new Date)) 34 | 35 | "serialize AnyVals" in { 36 | IntS.i.mongoReadFrom(Holder[Int](1)) must be_==( Some(1) ) 37 | 38 | val h = Holder[Int](10) 39 | IntS.i.mongoWriteTo(h, Some(1)) 40 | h.value must be_==(1) 41 | } 42 | "serialize Ints" in { 43 | BasicDBObjectBuilder.start("i", 1).get must beLike { 44 | case IntS(o) => o.value == 1 45 | } 46 | 47 | val dbo2 = IntS( Holder[Int](1) ) 48 | dbo2.get("i") must (notBeNull and be_==(1)) 49 | } 50 | "serialize Strings" in { 51 | BasicDBObjectBuilder.start("i", "test").get must beLike { 52 | case StringS(o) => o.value == "test" 53 | } 54 | 55 | val dbo2 = StringS( Holder[String]("test") ) 56 | dbo2.get("i") must (notBeNull and be_==("test")) 57 | } 58 | "serialize Dates" in { 59 | BasicDBObjectBuilder.start("i", new Date(1)).get must beLike { 60 | case DateS(o) => o.value == new Date(1) 61 | } 62 | 63 | val dbo2 = DateS( Holder[Date](new Date(1)) ) 64 | dbo2.get("i") must (notBeNull and be_==(new Date(1))) 65 | } 66 | "serialize Maps" in { 67 | skip("not implemented") 68 | } 69 | "serialize Arrays" in { 70 | skip("not implemented") 71 | } 72 | } 73 | "Case class Shape" should { 74 | val jd = DBO.fromMap( Map("name" -> Const) ) 75 | 76 | "serialize to DBObject" in { 77 | val dbo = CaseUser in new CaseUser(Const) 78 | dbo must notBeNull 79 | dbo.get("name") must be_==(Const) 80 | } 81 | "serialize from DBObject" in { 82 | CaseUser.out(jd) must beSome[CaseUser].which{_.name == Const} 83 | } 84 | "not include _id and _ns into DBO" in { 85 | val shape = CaseUser.constraints 86 | shape must haveSuperClass[QueryTerm[CaseUser]] 87 | shape.m.get("name") must beSome[Any].which{_ == Map("$exists" -> true)} 88 | shape.m.get("_id") must beNone 89 | shape.m.get("_ns") must beNone 90 | } 91 | "mirror mongo fields back to object" in { 92 | import org.bson.types.ObjectId 93 | 94 | val dbo = DBO.empty 95 | dbo.putAll(jd) 96 | 97 | val u = CaseUser out dbo 98 | u must beSome[CaseUser] 99 | 100 | val user = u.get 101 | u.get must verify { user => user.name == Const && user.mongoOID == None} 102 | 103 | dbo.put("_id", ObjectId.get) 104 | CaseUser.mirror(user)(dbo) 105 | user.mongoOID must beSome[ObjectId].which{dbo.get("_id") ==} 106 | } 107 | } 108 | "Ordinary class Shape" should { 109 | "serialize to DBObject" in { 110 | val u = new OrdUser 111 | u.name = Const 112 | val dbo = OrdUser in u 113 | dbo.get("name") must be_==(Const) 114 | } 115 | "deserialize from DBObject" in { 116 | OrdUser.out(jd) must beSome[OrdUser].which{_.name == Const} 117 | } 118 | } 119 | "Class with Embedded object Shape" should { 120 | "serialize to DBObject" in { 121 | val dbo = ComplexType in new ComplexType(CaseUser(Const), 1) 122 | dbo.get("user") must haveSuperClass[DBObject] 123 | dbo.get("user").asInstanceOf[DBObject].get("name") must be_==(Const) 124 | dbo.get("msgs") must haveClass[java.lang.Integer] 125 | dbo.get("msgs") must be_==(1) 126 | } 127 | "deserialize from DBObject" in { 128 | DBO.fromMap( Map("user" -> jd, "msgs" -> 1) ) match { 129 | case ComplexType(c) => 130 | c.user must notBeNull 131 | c.user.name must be_==(Const) 132 | c.messageCount must (notBeNull and be_==(1)) 133 | case _ => 134 | fail("had to extract ComplexType out from DBO") 135 | } 136 | } 137 | } 138 | "Optional field" should { 139 | "have empty constraints" in { 140 | OptModel.description.mongoConstraints.m must beEmpty 141 | OptModel.description3.mongoConstraints.m must beEmpty 142 | OptModel.comment.mongoConstraints.m must beEmpty 143 | } 144 | "serialize to DBObject" in { 145 | val some = new OptModel(1, Some(Const)) 146 | val none = new OptModel(1, None) 147 | OptModel.description.mongoReadFrom(none) must beNone 148 | OptModel.description.mongoReadFrom(some) must be_==(Some(Const)) 149 | OptModel.description3.mongoReadFrom(none) must beNone 150 | OptModel.description3.mongoReadFrom(some) must be_==(Some(Const)) 151 | } 152 | "deserialize from DBObject" in { 153 | val t = new OptModel(1, None) 154 | OptModel.comment.mongoWriteTo(t, Some("aa")) 155 | t.comment must be_==(Some("aa")) 156 | 157 | OptModel.comment.mongoWriteTo(t, None) 158 | t.comment must beNone 159 | } 160 | } 161 | "Query" should { 162 | "serialize two conditions per field" in { 163 | val q = (ComplexType.messageCount is_< 2) and (ComplexType.messageCount is_> 3) 164 | q.query.query must be_==( DBO.fromMap( 165 | Map(ComplexType.messageCount.longFieldName -> Map("$lt" -> 2, "$gt" -> 3) ) 166 | ) ) 167 | } 168 | } 169 | "Modifiers" should { 170 | "serialize $set" in { 171 | (ComplexType.user.name set "User2").query.query must be_==( 172 | DBO.fromMap( 173 | Map("$set" -> Map( 174 | ComplexType.user.name.longFieldName -> "User2" 175 | ) ) 176 | ) 177 | ) 178 | } 179 | "serialize $set embedded" in { 180 | (ComplexType.user set CaseUser("User0")).query.query must be_==( 181 | DBO.fromMap( 182 | Map("$set" -> Map( 183 | ComplexType.user.longFieldName -> Map(CaseUser.name.longFieldName -> "User0") 184 | ) ) 185 | ) 186 | ) 187 | } 188 | "serialize $push" in { 189 | import ArrayOfInt._ 190 | (ArrayModel.messages push 10).query.query must be_==( 191 | DBO.fromMap( 192 | Map("$push" -> Map(ArrayModel.messages.longFieldName -> 10)) 193 | ) 194 | ) 195 | } 196 | "serialize join" in { 197 | ((ComplexType.messageCount inc 10) and (ComplexType.user.name set "User1")).query.query must be_==( 198 | DBO.fromMap( 199 | Map("$set" -> Map( ComplexType.user.name.longFieldName -> "User1" ), 200 | "$inc" -> Map( ComplexType.messageCount.longFieldName -> 10 ) ) 201 | ) 202 | ) 203 | } 204 | "serialize two field set" in { 205 | ((ComplexType.messageCount set 10) and (ComplexType.user.name set "User1")).query.query must be_==( 206 | DBO.fromMap( 207 | Map("$set" -> Map( ComplexType.user.name.longFieldName -> "User1", 208 | ComplexType.messageCount.longFieldName -> 10 ) ) 209 | ) 210 | ) 211 | } 212 | } 213 | } 214 | -------------------------------------------------------------------------------- /src/test/scala/com/osinka/mongodb/shape/shapeSpec.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.shape 17 | 18 | import org.specs._ 19 | import org.specs.runner._ 20 | 21 | class shapesTest extends JUnit4(shapesSpec) with Console 22 | object shapesTestRunner extends ConsoleRunner(shapesSpec) 23 | 24 | object shapesSpec extends Specification { 25 | "Shapes".areSpecifiedBy(serializerSpec, fieldsSpec, collectionSpec, querySpec, updateSpec) 26 | } -------------------------------------------------------------------------------- /src/test/scala/com/osinka/mongodb/shape/update.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (C) 2009 Osinka 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.osinka.mongodb.shape 17 | 18 | import org.specs._ 19 | import com.mongodb._ 20 | 21 | import com.osinka.mongodb._ 22 | import Config._ 23 | 24 | object updateSpec extends Specification("Update") { 25 | val CollName = "test" 26 | val Const = "John Doe" 27 | 28 | val mongo = new Mongo(Host, Port).getDB(Database) 29 | 30 | doAfter { mongo.dropDatabase } 31 | 32 | "Update scalar" should { 33 | val dbColl = mongo.getCollection(CollName) 34 | val coll = dbColl of ComplexType 35 | val N = 50 36 | 37 | doBefore { 38 | dbColl.drop 39 | Helper.fillWith(coll, N) {x => new ComplexType(CaseUser("User"+x), x*10)} 40 | } 41 | doAfter { 42 | dbColl.drop 43 | } 44 | 45 | "$set" in { 46 | (coll(ComplexType.user.name is_== "User1") = ComplexType.messageCount set 1) must beTrue 47 | (ComplexType.user.name is_== "User1" in coll).headOption must beSome[ComplexType].which{_.messageCount == 1} 48 | } 49 | "$set in embedded" in { 50 | (coll(ComplexType.user.name is_== "User1") = ComplexType.user.name set "User2") must beTrue 51 | (ComplexType.user.name is_== "User1" in coll) must beEmpty 52 | (ComplexType.user.name is_== "User2" in coll) must haveSize(2) 53 | } 54 | "$set embedded" in { 55 | (coll(ComplexType.messageCount is_== 10) = ComplexType.user set CaseUser("User2")) must beTrue 56 | (ComplexType.user.name is_== "User1" in coll) must beEmpty 57 | (ComplexType.user.name is_== "User2" in coll) must haveSize(2) 58 | } 59 | "$unset" in { 60 | coll(ComplexType.user.name is_== "User1") = ComplexType.messageCount.unset 61 | (ComplexType.messageCount.exists in coll) must haveSize(N-1) 62 | } 63 | "$inc" in { 64 | (coll(ComplexType.user.name is_== "User1") = (ComplexType.messageCount inc 10)) must beTrue 65 | // System.err.println("==>" + (ComplexType.user.name is_== "User1" in coll).mkString(",")) 66 | (ComplexType.messageCount is_== 10 in coll) must beEmpty 67 | (ComplexType.messageCount is_== 20 in coll) must haveSize(2) 68 | } 69 | "do two modifiers for all" in { 70 | (coll(ComplexType.any) = (ComplexType.messageCount inc -100) and (ComplexType.user.name set "User2") ) must beTrue 71 | coll must haveSize(N) 72 | (ComplexType.user.name is_== "User1" in coll) must beEmpty 73 | (ComplexType.user.name is_== "User2" in coll) must haveSize(N) 74 | (ComplexType.messageCount is_< 0 in coll) must haveSize(10) 75 | } 76 | "set two fields at once" in { 77 | (coll(ComplexType.any) = (ComplexType.messageCount set 200) and (ComplexType.user.name set "User200") ) must beTrue 78 | val r = ComplexType.messageCount is_== 200 in coll 79 | r must haveSize(N) 80 | 81 | val head = r.headOption 82 | head must beSome[ComplexType] 83 | head.get.messageCount must be_==(200) 84 | head.get.user.name must be_==("User200") 85 | } 86 | } 87 | "Update array of scalars" should { 88 | import ArrayOfInt._ 89 | 90 | val N = 10 91 | val objs = mongo.getCollection(CollName) of ArrayModel 92 | 93 | doBefore { 94 | Helper.fillWith(objs, N) {x => 95 | val o = new ArrayModel(x) 96 | o.messages = List.tabulate(x%2+1)(y => y+x) 97 | o 98 | } 99 | } 100 | doAfter { objs.drop } 101 | 102 | "$set Seq[T]" in { 103 | objs( ArrayModel.id is_== 0 ) = ArrayModel.messages set List(10) 104 | (ArrayModel.id is_== 0 in objs).headOption must beSome[ArrayModel].which{_.messages == List(10)} 105 | } 106 | "$unset" in { 107 | objs( ArrayModel.id is_== 0 ) = ArrayModel.messages.unset 108 | (ArrayModel.id is_== 0 in objs).headOption must beNone 109 | } 110 | "$push" in { 111 | objs map {_.messages.size} reduceLeft {_ max _} must be_==(2) 112 | (objs(ArrayModel.any) = ArrayModel.messages push 500) must beTrue 113 | objs map {_.messages.size} reduceLeft {_ max _} must be_==(3) 114 | (ArrayModel.messages hasSize 3 in objs) must haveSize(5) 115 | } 116 | "$pushAll" in { 117 | objs map {_.messages.size} reduceLeft {_ max _} must be_==(2) 118 | (objs(ArrayModel.any) = ArrayModel.messages pushAll List(50,60)) must beTrue 119 | objs map {_.messages.size} reduceLeft {_ max _} must be_==(4) 120 | (ArrayModel.messages hasSize 3 in objs) must haveSize(5) 121 | } 122 | "$popHead" in { 123 | val q = ArrayModel.id is_== 1 124 | objs.update(q, ArrayModel.messages.popHead) must beTrue 125 | (q in objs).headOption must beSome[ArrayModel].which{_.messages == List(2)} 126 | } 127 | "$popTail" in { 128 | val q = ArrayModel.id is_== 1 129 | objs.update(q, ArrayModel.messages.popTail) must beTrue 130 | (q in objs).headOption must beSome[ArrayModel].which{_.messages == List(1)} 131 | } 132 | "$pull" in { 133 | (objs(ArrayModel.id in List(5,6)) = ArrayModel.messages pull 6) must beTrue 134 | (ArrayModel.id is_== 6 in objs).headOption must beSome[ArrayModel].which{_.messages == Nil} 135 | } 136 | "$pullAll" in { 137 | (objs(ArrayModel.id in List(5,6)) = ArrayModel.messages pullAll List(5,6)) must beTrue 138 | (ArrayModel.id is_== 5 in objs).headOption must beSome[ArrayModel].which{_.messages == Nil} 139 | } 140 | "$addToSet" in { 141 | (ArrayModel.id is_== 5 in objs).headOption must beSome[ArrayModel].which{_.messages == List(5, 6)} 142 | objs(ArrayModel.id in List(5, 6)) = ArrayModel.messages addToSet 5 143 | (ArrayModel.id is_== 5 in objs).headOption must beSome[ArrayModel].which{_.messages == List(5, 6)} 144 | objs(ArrayModel.id in List(5, 6)) = ArrayModel.messages addToSet 7 145 | (ArrayModel.id is_== 5 in objs).headOption must beSome[ArrayModel].which{_.messages == List(5, 6, 7)} 146 | } 147 | "$addToSet with $each" in { 148 | (ArrayModel.id is_== 5 in objs).headOption must beSome[ArrayModel].which{_.messages == List(5, 6)} 149 | objs(ArrayModel.id in List(5, 6)) = ArrayModel.messages addToSet List(6, 7) 150 | (ArrayModel.id is_== 5 in objs).headOption must beSome[ArrayModel].which{_.messages == List(5, 6, 7)} 151 | } 152 | } 153 | } 154 | --------------------------------------------------------------------------------