├── README.md ├── appendixC ├── canyon-thumb.jpg ├── canyon.jpg ├── gridfs.rb └── single.rb ├── appendixE ├── appendix E.1.js ├── appendix E.2.js ├── appendix E.3.js ├── appendix E.4.js ├── appendix E.5.js └── appendix E.6.js ├── chapter1 └── 1.x_example.js ├── chapter11 └── commands_10.x.txt ├── chapter12 ├── load.rb ├── names.rb └── setup_sharded_cluster.sh ├── chapter13 ├── dump_traffic.sh ├── generate_keys.sh └── run_with_ssl.sh ├── chapter2 ├── chp2.1_code.js ├── chp2.2-2.4_code.js └── chp2.x_examples.js ├── chapter3 ├── Gemfile ├── Gemfile.lock ├── archiver.rb ├── config.rb ├── connect.rb ├── connect_w_irb.rb ├── update.rb ├── viewer.rb └── views │ └── tweets.erb ├── chapter4 └── listing4_6.rb ├── chapter5 └── chp5.js ├── chapter6 ├── chp6.2_code.js ├── chp6.3_code.js ├── chp6.4_code.js ├── chp6.5_code.js ├── chp6.6_code.js └── data │ ├── export.js │ ├── garden.categories.json │ ├── garden.orders.json │ ├── garden.products.json │ ├── garden.reviews.json │ ├── garden.users.json │ └── import.js ├── chapter7 ├── code_7.1.js ├── code_7.2.js ├── code_7.3.js ├── code_7.4.js ├── connect.rb └── inventory_fetcher.rb ├── chapter8 └── code_8.x.js ├── chapter9 ├── 9.2_import_data.txt ├── catalog.books.json ├── code_9.1_search_intro.js ├── code_9.3_indexes.js ├── code_9.4_find.js ├── code_9.5_aggr.js └── code_9.6_language.js └── chapter_storage ├── config.txt.conf ├── configs ├── mmapv1.conf ├── wiredtiger-snappy.conf ├── wiredtiger-uncompressed.conf └── wiredtiger-zlib.conf ├── insert-results.txt ├── insert.js ├── migrate.txt ├── read.js ├── read.sh ├── run.sh ├── timings-mmapv1.conf.txt ├── timings-wiredtiger-snappy.conf.txt ├── timings-wiredtiger-uncompressed.conf.txt ├── timings-wiredtiger-zlib.conf.txt ├── timings.R ├── timings.csv └── timings.png /README.md: -------------------------------------------------------------------------------- 1 | # mongo-in-action-code 2 | This is the code for the 2nd edition of the MongoDB In Action book. 3 | 4 | You can find more about the book at: 5 | https://www.manning.com/books/mongodb-in-action-second-edition 6 | -------------------------------------------------------------------------------- /appendixC/canyon-thumb.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bakks/mongo-in-action-code/1fa0e0b151e64c94f70ba4d6c847e42871486273/appendixC/canyon-thumb.jpg -------------------------------------------------------------------------------- /appendixC/canyon.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bakks/mongo-in-action-code/1fa0e0b151e64c94f70ba4d6c847e42871486273/appendixC/canyon.jpg -------------------------------------------------------------------------------- /appendixC/gridfs.rb: -------------------------------------------------------------------------------- 1 | # Storing an image as binary data in gridfs 2 | 3 | require 'rubygems' 4 | require 'mongo' 5 | 6 | # Writing GridFS 7 | @con = Mongo::MongoClient.new 8 | @db = @con["images"] 9 | @grid = Mongo::Grid.new(@db) 10 | filename = File.join(File.dirname(__FILE__), "canyon.jpg") 11 | file = File.open(filename, "r") 12 | file_id = @grid.put(file, :filename => "canyon.jpg") 13 | 14 | # Reading GridFS 15 | image_io = @grid.get(file_id) 16 | copy_filename = File.join(File.dirname(__FILE__), "canyon-copy.jpg") 17 | copy = File.open(copy_filename, "w") 18 | while !image_io.eof? do 19 | copy.write(image_io.read(256 * 1024)) 20 | end 21 | copy.close 22 | -------------------------------------------------------------------------------- /appendixC/single.rb: -------------------------------------------------------------------------------- 1 | # Storing an image as binary data in a single document 2 | 3 | require 'rubygems' 4 | require 'mongo' 5 | 6 | # Prepare the binary data object for insert 7 | image_filename = File.join(File.dirname(__FILE__), "canyon-thumb.jpg") 8 | image_data = File.open(image_filename).read 9 | bson_image_data = BSON::Binary.new(image_data) 10 | doc = {"name" => "monument-thumb.jpg", 11 | "data" => bson_image_data } 12 | 13 | # Connect and insert the document 14 | @con = Mongo::MongoClient.new 15 | @thumbnails = @con['images']['thumbnails'] 16 | @image_id = @thumbnails.insert(doc) 17 | 18 | # Find the document and make sure it matches the original 19 | doc = @thumbnails.find_one({"_id" => @image_id}) 20 | if image_data == doc["data"].to_s 21 | puts "Stored image is equal to the original file!" 22 | end 23 | -------------------------------------------------------------------------------- /appendixE/appendix E.1.js: -------------------------------------------------------------------------------- 1 | // F.1 String Functions 2 | 3 | // $substr converts values to strings 4 | db.orders.aggregate([ 5 | {$unwind: '$line_items'}, 6 | {$project: { 7 | 'line_items.name': 1, 8 | orderQuantity: 9 | {$substr:['$line_items.quantity',0,10]}}}, 10 | {$project: { 11 | orderSummary: 12 | {$concat: ['$orderQuantity', ' ', '$line_items.name']}}} 13 | ]) 14 | 15 | // /* expected results 16 | // 17 | // { "_id" : ObjectId("6a5b1476238d3b4dd5000048"), 18 | // "orderSummary" : "1 Extra Large Wheel Barrow" } 19 | // { "_id" : ObjectId("6a5b1476238d3b4dd5000048"), 20 | // "orderSummary" : "2 Rubberized Work Glove, Black" } 21 | // 22 | // */ 23 | 24 | // strcmp example 25 | db.users.aggregate([ 26 | {$match: {username: 'kbanker'}}, 27 | {$project: 28 | {firstStringLT: {$strcasecmp:['ABANKER','$username']}, 29 | firstStringEQ: {$strcasecmp:['kbanker','$username']}, 30 | firstStringGT: {$strcasecmp:['ZBANKER','$username']} 31 | } 32 | } 33 | ]) 34 | 35 | // /* Expected results 36 | // 37 | // { "_id" : ObjectId("4c4b1476238d3b4dd5000001"), 38 | // "firstStringLT" : -1, 39 | // "firstStringEQ" : 0, 40 | // "firstStringGT" : 1 } 41 | // 42 | // */ 43 | 44 | 45 | db.users.aggregate([ 46 | {$match: {username: 'kbanker'}}, 47 | {$project: 48 | {firstStringLT: {$cmp:['ABANKER','$username']}, 49 | firstStringEQ: {$cmp:['kbanker','$username']}, 50 | firstStringGT: {$cmp:['ZBANKER','$username']} 51 | } 52 | } 53 | ]) 54 | 55 | // /* Expected results 56 | // 57 | // { "_id" : ObjectId("4c4b1476238d3b4dd5000001"), 58 | // "firstStringLT" : -1, 59 | // "firstStringEQ" : 0, 60 | // "firstStringGT" : -1 61 | // } 62 | // 63 | // */ -------------------------------------------------------------------------------- /appendixE/appendix E.2.js: -------------------------------------------------------------------------------- 1 | 2 | // F.2 Arithmetic 3 | 4 | kbankerOrders = {user_id: ObjectId('4c4b1476238d3b4dd5000001')}; 5 | 6 | selectedFields = 7 | {product: '$line_items.name', // 1 8 | orderQuantity: '$line_items.quantity', // 2 9 | retailPrice: '$line_items.pricing.retail', 10 | salePrice: '$line_items.pricing.sale', 11 | savings: { // 3 12 | $subtract: ['$line_items.pricing.retail', 13 | '$line_items.pricing.sale']}}; 14 | 15 | db.orders.aggregate([ 16 | {$match: kbankerOrders}, 17 | {$unwind: '$line_items'}, 18 | {$project: selectedFields} 19 | ]) 20 | 21 | // /* expected results 22 | // 23 | // 24 | // { "_id" : ObjectId("6a5b1476238d3b4dd5000048"), 25 | // "product" : "Extra Large Wheel Barrow", "orderQuantity" : 1, 26 | // "retailPrice" : 5897, "salePrice" : 4897, "savings" : 1000 } 27 | // 28 | // { "_id" : ObjectId("6a5b1476238d3b4dd5000048"), 29 | // "product" : "Rubberized Work Glove, Black", "orderQuantity" : 2, 30 | // "retailPrice" : 1499, "salePrice" : 1299, "savings" : 200 } 31 | // 32 | // 33 | // */ 34 | 35 | // Additional example 36 | 37 | selectedFieldsPart2 = 38 | {product: 1, orderQuantity: 1, // 1 39 | retailPrice: 1, salePrice: 1, savings: 1, 40 | totalSavings: {$multiply:['$orderQuantity','$savings']}, // 2 41 | percentSavings: {$multiply: 42 | [100, {$divide: ['$savings', '$retailPrice']}]} //3 43 | }; 44 | 45 | db.orders.aggregate([ 46 | {$match: kbankerOrders}, 47 | {$unwind: '$line_items'}, // 1 48 | {$project: selectedFields}, 49 | {$project: selectedFieldsPart2} 50 | ]) 51 | 52 | 53 | // /* expected results 54 | // 55 | // { "_id" : ObjectId("6a5b1476238d3b4dd5000048"), 56 | // "product" : "Extra Large Wheel Barrow", "orderQuantity" : 1, 57 | // "retailPrice" : 5897, "salePrice" : 4897, 58 | // "savings" : 1000, "totalSavings" : 1000, 59 | // "percentSavings" : 16.957775139901646 } 60 | // 61 | // { "_id" : ObjectId("6a5b1476238d3b4dd5000048"), 62 | // "product" : "Rubberized Work Glove, Black", "orderQuantity" : 2, 63 | // "retailPrice" : 1499, "salePrice" : 1299, 64 | // "savings" : 200, "totalSavings" : 400, 65 | // "percentSavings" : 13.342228152101402 } 66 | // 67 | // 68 | // */ 69 | -------------------------------------------------------------------------------- /appendixE/appendix E.3.js: -------------------------------------------------------------------------------- 1 | 2 | // F3.Date Functions 3 | 4 | db.orders.aggregate([ 5 | {$match: {purchase_data: {$gte: new Date(2010, 0, 1)}}}, 6 | {$group: { 7 | _id: {year : {$year :'$purchase_data'}, // 1 8 | month: {$month :'$purchase_data'}}, // 2 9 | count: {$sum:1}, 10 | total: {$sum:'$sub_total'}}}, 11 | {$sort: {_id:-1}} 12 | ]); 13 | 14 | // /* Expected results 15 | // // NOTE: we've modified the "month" value to skip some of the blank months 16 | // 17 | // { "_id" : { "year" : 2014, "month" : 11 }, 18 | // "count" : 1, "total" : 4897 } 19 | // { "_id" : { "year" : 2014, "month" : 10 }, 20 | // "count" : 2, "total" : 11093 } 21 | // { "_id" : { "year" : 2014, "month" : 9 }, 22 | // "count" : 1, "total" : 4897 } 23 | // 24 | // */ -------------------------------------------------------------------------------- /appendixE/appendix E.4.js: -------------------------------------------------------------------------------- 1 | 2 | 3 | // Logical functions 4 | 5 | // use of $ifNull 6 | 7 | db.orders.aggregate([ 8 | {$project: { 9 | orderTotal: { 10 | $add:['$sub_total','$tax']} 11 | } 12 | } 13 | ]) 14 | 15 | // /* expected results 16 | // 17 | // { "_id" : ObjectId("6a5b1476238d3b4dd5000048"), 18 | // "orderTotal" : null } // 1 19 | // { "_id" : ObjectId("6a5b1476238d3b4dd5000049"), 20 | // "orderTotal" : null } 21 | // 22 | // */ 23 | 24 | db.orders.aggregate([ 25 | {$project: { 26 | orderTotal: { 27 | $add:['$sub_total', 28 | {$ifNull: ['$tax',0]}]} //1 29 | } 30 | } 31 | ]) 32 | 33 | // /* expected results 34 | // 35 | // { "_id" : ObjectId("6a5b1476238d3b4dd5000048"), 36 | // "orderTotal" : 6796 } 37 | // { "_id" : ObjectId("6a5b1476238d3b4dd5000049"), 38 | // "orderTotal" : 4897 } 39 | // 40 | // */ 41 | 42 | 43 | // Same as before but use $cond 44 | db.orders.aggregate([ 45 | {$project: { 46 | orderTotal: { 47 | $add:['$sub_total', 48 | {$cond: {if: '$tax', 49 | then: '$tax', 50 | else: 0}} 51 | ]} 52 | } 53 | } 54 | ]) 55 | 56 | 57 | db.orders.aggregate([ 58 | {$project: { 59 | orderTotal: { 60 | $add:['$sub_total', 61 | {$cond: ['$tax','$tax', 0] } 62 | ]} 63 | } 64 | } 65 | ]) 66 | 67 | -------------------------------------------------------------------------------- /appendixE/appendix E.5.js: -------------------------------------------------------------------------------- 1 | 2 | // F.5 - Set functions 3 | 4 | testSet1 = ['gardening'] 5 | db.products.aggregate([ 6 | {$project: 7 | {productName: '$name', 8 | tags:1, 9 | setEquals: {$setEquals:['$tags',testSet1]}, 10 | setIntersection: {$setIntersection:['$tags',testSet1]}, 11 | setDifference: {$setDifference:['$tags',testSet1]}, 12 | setUnion: {$setUnion:['$tags',testSet1]}, 13 | setIsSubset: {$setIsSubset:['$tags',testSet1]}, 14 | setIntersection: {$setIntersection:['$tags',testSet1]} 15 | } 16 | } 17 | ]) 18 | 19 | // /* Expected results of 20 | // > db.setOperatorsTest.find().pretty() 21 | // 22 | // { 23 | // "_id" : ObjectId("4c4b1476238d3b4dd5003981"), 24 | // "productName" : "Extra Large Wheel Barrow", 25 | // "tags" : ["tools", "gardening", "soil"], 26 | // "setEquals" : false, 27 | // "setIntersection" : ["gardening"], 28 | // "setDifference" : ["tools","soil"], 29 | // "setUnion" : ["gardening","tools","soil"], 30 | // "setIsSubset" : false 31 | // } 32 | // 33 | // { 34 | // "_id" : ObjectId("4c4b1476238d3b4dd5003982"), 35 | // "productName" : "Rubberized Work Glove, Black", 36 | // "tags" : ["gardening"], 37 | // "setEquals" : true, 38 | // "setIntersection" : ["gardening"], 39 | // "setDifference" : [ ], 40 | // "setUnion" : ["gardening"], 41 | // "setIsSubset" : true 42 | // } 43 | // 44 | // */ -------------------------------------------------------------------------------- /appendixE/appendix E.6.js: -------------------------------------------------------------------------------- 1 | 2 | // F.6 Misc 3 | 4 | // $size 5 | db.products.aggregate([ 6 | {$project: 7 | {_id: 0, 8 | productName: '$name', 9 | tags:1, 10 | tagCount: {$size:'$tags'}} 11 | } 12 | ]) 13 | 14 | // /* Expected results 15 | // 16 | // { "productName" : "Extra Large Wheel Barrow", 17 | // "tags" : [ "tools", "gardening", "soil" ], 18 | // "tagCount" : 3 } 19 | // 20 | // { "productName" : "Rubberized Work Glove, Black", 21 | // "tags" : [ "gardening" ], 22 | // "tagCount" : 1 } 23 | // 24 | // */ 25 | 26 | // $literal 27 | // suppose we want to create field called useCount and set this to 0 28 | db.categories.aggregate([ 29 | {$project: {categoryName: '$name', 30 | useCount: 0 31 | } 32 | }, 33 | {$out: 'categoryUseCount'} 34 | ]) 35 | 36 | 37 | 38 | 39 | // /* results: 40 | // 41 | // assert: command failed: { 42 | // "errmsg" : "exception: The top-level _id field is the only field currently supported for exclusion", 43 | // "code" : 16406, 44 | // "ok" : 0 45 | // } : aggregate failed 46 | // 47 | // Huh? What's up with that? 48 | // 49 | // What's up is that MongoDB thinks that we are trying to exclude a field named 'useCount', 50 | // and is telling us that we are only allowed to exclude the field '_id'. 51 | // 52 | // $literal to the rescue. 53 | // 54 | // */ 55 | 56 | db.categories.aggregate([ 57 | {$project: 58 | {_id: 0, 59 | categoryName: '$name', 60 | useCount: {$literal: 0}} //1 61 | }, 62 | {$out: 'categoryUseCount'} 63 | ]) 64 | 65 | // /* expected results 66 | // 67 | // { "categoryName" : "Gardening Tools", "useCount" : 0 } 68 | // 69 | // 70 | // /* what if 71 | // 72 | // db.categories.aggregate([ 73 | // {$project: 74 | // {categoryName: '$name', 75 | // useCount: -2 } //1 76 | // }, 77 | // {$out: 'categoryUseCount'} 78 | // 79 | // ]) 80 | // 81 | // db.categories.aggregate([ 82 | // {$project: 83 | // {categoryName: '$name', 84 | // name: 0 } //1 85 | // }, 86 | // {$out: 'categoryUseCount'} 87 | // ]) 88 | // 89 | // 90 | // */ 91 | 92 | db.categories.aggregate([ 93 | {$project: { 94 | someNumber: 3} 95 | } 96 | ]); 97 | 98 | db.categories.aggregate([ 99 | {$project: { 100 | someNumber: {$literal: 3}} 101 | } 102 | ]); 103 | 104 | // error 105 | 106 | db.categories.aggregate([ 107 | {$project: { 108 | dollarSign: '$'} 109 | } 110 | ]); 111 | 112 | // correct 113 | db.categories.aggregate([ 114 | {$project: { 115 | dollarSign: {$literal: '$'}} 116 | } 117 | ]); 118 | 119 | // /* results 120 | // 121 | // assert: command failed: { 122 | // "errmsg" : "exception: '$' by itself is not a valid FieldPath", 123 | // "code" : 16872, 124 | // "ok" : 0 125 | // } : aggregate failed 126 | // 127 | // */ 128 | 129 | db.categories.aggregate([ 130 | {$project: 131 | {catgyName: '$name', 132 | someNumber: {$literal: 3}, 133 | dollarSign: {$literal:'$'}} 134 | } 135 | ]); 136 | 137 | // /* 138 | // { "catgyName" : "Gardening Tools", "userCount" : 0, "numberOne" : 1, "dollarSign" : "$" } 139 | // 140 | // trying NumberOne: 1 is especially confusing. It won't cause an error, but the field won't appear. 141 | // Similarly, having a $something will not return an error. 142 | // 143 | // */ 144 | 145 | db.categories.aggregate([ 146 | {$project: {_id: 0, 147 | catgyName: '$name', 148 | numberOne: 1, 149 | dollarSign: '$something' 150 | } 151 | } 152 | ]); 153 | 154 | // /* returns - missing fields for numberOne and dollarSign 155 | // 156 | // { "catgyName" : "Gardening Tools" } 157 | // 158 | // 159 | // */ 160 | 161 | // ******************* $map 162 | 163 | // /* 164 | // 165 | // { skews: [ 1, 1, 2, 3, 5, 8 ] } 166 | // 167 | // And the following $project statement: 168 | // 169 | // { $project: { adjustments: { $map: { input: "$skews", 170 | // as: "adj", 171 | // in: { $add: [ "$$adj", 12 ] } } } } } 172 | // 173 | // The $map would transform the input document into the following output document: 174 | // 175 | // { adjustments: [ 13, 13, 14, 15, 17, 20 ] } 176 | // 177 | // you've seen how to convert the create a line item summary from the order quantity and item name 178 | // in the descriptions for $let and $substring, let's see one more version. In this case we're going to 179 | // retrieve a single order and then convert the array of order line items to a summary array without using the $unwind. 180 | // 181 | // 182 | // 183 | // */ 184 | 185 | 186 | db.orders.aggregate([ 187 | {$project: { 188 | orderSummary: { 189 | $map: { 190 | input: '$line_items', 191 | as: 'item', 192 | in: { 193 | $concat: [ 194 | {$substr:['$$item.quantity',0,10]}, 195 | ' ', '$$item.name'] 196 | } 197 | } 198 | }} 199 | } 200 | ]).pretty() 201 | 202 | db.orders.aggregate([ 203 | {$project: { 204 | orderSummary: { 205 | $map: { 206 | input: '$line_items', 207 | as: 'item', 208 | in: { 209 | descr: {$concat: [ 210 | {$substr:['$$item.quantity',0,10]}, 211 | ' ', '$$item.name']}, 212 | price: '$$item.pricing.sale' 213 | } 214 | } 215 | }} 216 | } 217 | ]).pretty() 218 | 219 | 220 | // ******************* $let 221 | 222 | // /* Earlier, in the section String Function - $substr, 223 | // we showed a pipeline that converted a number to a string. 224 | // We broke this up into two $project operators to make the example 225 | // more understandable. However we can so something similar, 226 | // define complex intermediate results, using the $let function. 227 | // 228 | // 229 | // */ 230 | 231 | db.orders.aggregate([ 232 | {$unwind: '$line_items'}, 233 | {$project: { 234 | orderSummary: { 235 | $let: { 236 | vars: { 237 | orderQuantity: //1 238 | {$substr:['$line_items.quantity',0,10]} 239 | }, 240 | in: { //2 241 | $concat: ['$$orderQuantity', ' ', 242 | '$line_items.name'] 243 | } 244 | } 245 | } 246 | }} 247 | ]) 248 | 249 | 250 | // /* expected results 251 | // 252 | // { "_id" : ObjectId("6a5b1476238d3b4dd5000048"), 253 | // "orderSummary" : "1 Extra Large Wheel Barrow" } 254 | // { "_id" : ObjectId("6a5b1476238d3b4dd5000048"), 255 | // "orderSummary" : "2 Rubberized Work Glove, Black" } 256 | // 257 | // */ 258 | -------------------------------------------------------------------------------- /chapter1/1.x_example.js: -------------------------------------------------------------------------------- 1 | // verify syntax - won't find anything 2 | db.posts.find({'tags': 'politics', 'vote_count': {'$gt': 10}}); 3 | 4 | // 1.3.2 examples 5 | use my_database 6 | db.users.insert({name: "Kyle"}) 7 | db.users.find() 8 | 9 | -------------------------------------------------------------------------------- /chapter11/commands_10.x.txt: -------------------------------------------------------------------------------- 1 | //***************** 10.2.1 *********************** 2 | 3 | ## create three data directories in your home directory 4 | mkdir ~/node1 5 | mkdir ~/node2 6 | mkdir ~/arbiter 7 | 8 | ## Best to run each command in separate terminal window! 9 | mongod --replSet myapp --dbpath ~/node1 --port 40000 10 | mongod --replSet myapp --dbpath ~/node2 --port 40001 11 | mongod --replSet myapp --dbpath ~/arbiter --port 40002 12 | 13 | ## connect to mongod then enter commands shown below! 14 | mongo --port 40000 15 | 16 | // enter below into mongo console just started with the "mongo --port 40000" command 17 | // start first replica (using the mongod I'm connected to on port 40000) 18 | rs.initiate() 19 | 20 | // add additional replica 21 | // NOTE: change the value "iron.local:40000" 22 | // to the name given in startup response 23 | // "me" : "{yourServerName}:40000", 24 | 25 | rs.add("iron.local:40001") 26 | // my machine specific comand: rs.add("D830J:40001") 27 | 28 | // add arbiter 29 | rs.add("iron.local:40002", {arbiterOnly: true}) 30 | // my machine specific command: rs.add("D830J:40002", {arbiterOnly: true}) 31 | 32 | // Check status 33 | db.isMaster() 34 | 35 | 36 | // for more details 37 | rs.status() 38 | 39 | // ASSUMING you're still in the shell, 40 | // and it should be the master 41 | // try the following test insert 42 | use bookstore 43 | db.books.insert({title: "Oliver Twist"}) 44 | show dbs 45 | 46 | 47 | ## OPEN ANOTHER TERMINAL WINDOW (should be the 5th now) 48 | ## enter following command line to get into secondary mongod 49 | mongo --port 40001 50 | 51 | // now from the console 52 | show dbs 53 | use bookstore 54 | rs.slaveOk() 55 | db.books.find() 56 | 57 | ## NOW - shut down the PRIMARY server... 58 | ## Go to console window that shows the primary mongod, should show something like: 59 | mongo --port 40000 60 | 61 | // should see PRIMARY in the prompt! 62 | myapp:PRIMARY> 63 | 64 | // enter this: 65 | use admin 66 | db.shutdownServer() 67 | 68 | # swap to terminal running the former "SECONDARY" 69 | # or enter command below in a new terminal window 70 | mongo --port 40001 71 | 72 | // should now be the primary 73 | rs.status() 74 | 75 | //************************* 10.2.2 ******************************** 76 | 77 | ## OPTIONAL? Restart mongod on port 40000 78 | ## In a new terminal window (or the one previously used for mongod --port 40000 79 | mongod --replSet myapp --dbpath ~/node1 --port 40000 80 | 81 | ## in the mongo console, if not connected to one, use below 82 | ## assuming 40001 is STILL the primary. If not, use 40000 83 | mongo --port 40001 84 | 85 | // now in console 86 | // make sure you see the prompt below 87 | 88 | myapp:PRIMARY> 89 | 90 | // then enter 91 | user local 92 | 93 | show collections 94 | 95 | db.oplog.rs.findOne({op: "i"}) 96 | 97 | // NOTE: replace the Timestamp value with the one retured from the previous command 98 | // IF you want to see any results 99 | db.oplog.rs.findOne({ts: Timestamp(1383844267, 1)}) 100 | 101 | // Add a few more books 102 | use bookstore 103 | db.books.insert({title: "A Tale of Two Cities"}) 104 | db.books.insert({title: "Great Expectations"}) 105 | 106 | db.books.update({}, {$set: {author: "Dickens"}}, {multi:true}) 107 | 108 | // now look at oplog 109 | use local 110 | db.oplog.rs.find({op: "u"}) 111 | 112 | // get repl info 113 | db.getReplicationInfo() 114 | 115 | // get the last oplog document 116 | db.oplog.rs.find().sort({$natural: -1}).limit(1) 117 | 118 | -------------------------------------------------------------------------------- /chapter12/load.rb: -------------------------------------------------------------------------------- 1 | require 'rubygems' 2 | require 'mongo' 3 | 4 | # We are loading 'names' from the same directory that this script is in, so add 5 | # that directory to the search path before trying to load the file, so the 6 | # script can find it 7 | $LOAD_PATH.unshift(File.expand_path(File.dirname(__FILE__))) \ 8 | unless $LOAD_PATH.include?(File.expand_path(File.dirname(__FILE__))) 9 | require 'names' 10 | 11 | # Open a connection to MongoDB 12 | @con = Mongo::MongoClient.new("localhost", 40000) 13 | 14 | # Create a collection object for the "cloud-docs.spreadsheets" collection 15 | @col = @con['cloud-docs']['spreadsheets'] 16 | 17 | # Fake spreadsheet data 18 | @data = "abcde" * 1000 19 | 20 | # Write the requested number of documents. "name_count" is the number of 21 | # different names to use when saving spreadsheets, and "iterations" is the 22 | # number of documents to save for each person 23 | def write_user_docs(iterations=0, name_count=200) 24 | iterations.times do |iteration| 25 | name_count.times do |name_number| 26 | doc = { :filename => "sheet-#{iteration}", 27 | :updated_at => Time.now.utc, 28 | :username => Names::LIST[name_number], 29 | :data => @data 30 | } 31 | @col.insert(doc) 32 | end 33 | end 34 | end 35 | 36 | # Get command line arguments and run the script 37 | if ARGV.empty? || !(ARGV[0] =~ /^\d+$/) 38 | puts "Usage: load.rb [iterations] [name_count]" 39 | else 40 | iterations = ARGV[0].to_i 41 | 42 | if ARGV[1] && ARGV[1] =~ /^\d+$/ 43 | name_count = ARGV[1].to_i 44 | else 45 | name_count = 200 46 | end 47 | 48 | write_user_docs(iterations, name_count) 49 | end 50 | -------------------------------------------------------------------------------- /chapter12/names.rb: -------------------------------------------------------------------------------- 1 | module Names 2 | LIST = 3 | [ 'Cerny', 4 | 'Hajduk', 5 | 'Stump', 6 | 'Thistle', 7 | 'Mallen', 8 | 'Ascher', 9 | 'Abbott', 10 | 'Line', 11 | 'Chickering', 12 | 'Auxier', 13 | 'Schlabach', 14 | 'Laber', 15 | 'Spelman', 16 | 'Hage', 17 | 'Axel', 18 | 'Mcvea', 19 | 'Wymer', 20 | 'Abdul', 21 | 'Payton', 22 | 'Fitzhenry', 23 | 'Minjares', 24 | 'Wallace', 25 | 'Dayton', 26 | 'Islas', 27 | 'Rardin', 28 | 'Isreal', 29 | 'Seth', 30 | 'Redmon', 31 | 'Heidrick', 32 | 'Sarabia', 33 | 'Bilby', 34 | 'Cheadle', 35 | 'Griffey', 36 | 'Desiderio', 37 | 'Kram', 38 | 'Jehle', 39 | 'Santillanes', 40 | 'Caskey', 41 | 'Ezell', 42 | 'Pilson', 43 | 'Fuselier', 44 | 'Iversen', 45 | 'Whitford', 46 | 'Branner', 47 | 'Emily', 48 | 'Bundick', 49 | 'Plant', 50 | 'Krebs', 51 | 'Scranton', 52 | 'Rosso', 53 | 'Tester', 54 | 'Flatley', 55 | 'Greenburg', 56 | 'Vasconcellos', 57 | 'Straub', 58 | 'Beer', 59 | 'Leach', 60 | 'Feely', 61 | 'Mallett', 62 | 'Raleigh', 63 | 'Ruther', 64 | 'Womble', 65 | 'Sullen', 66 | 'Serio', 67 | 'Richert', 68 | 'Kleiman', 69 | 'Haines', 70 | 'Ruelas', 71 | 'Basile', 72 | 'Iniguez', 73 | 'Charles', 74 | 'Pieper', 75 | 'Thornberry', 76 | 'Hawthorn', 77 | 'Silcox', 78 | 'Blackman', 79 | 'Gildea', 80 | 'Person', 81 | 'Uresti', 82 | 'Michalak', 83 | 'Cabrales', 84 | 'Bucklew', 85 | 'Ruffin', 86 | 'Hice', 87 | 'Mccarroll', 88 | 'Donovan', 89 | 'Heppner', 90 | 'Velasques', 91 | 'Vena', 92 | 'Statler', 93 | 'Higginbottom', 94 | 'Royal', 95 | 'Stone', 96 | 'Kissel', 97 | 'Edgley', 98 | 'Pickford', 99 | 'Maglio', 100 | 'Maiorano', 101 | 'Kunz', 102 | 'Cass', 103 | 'Wilmes', 104 | 'Allgood', 105 | 'Singley', 106 | 'Mouton', 107 | 'Bella', 108 | 'Schor', 109 | 'Sano', 110 | 'Owen', 111 | 'Cordoba', 112 | 'Pagano', 113 | 'Whelpley', 114 | 'Digman', 115 | 'Loden', 116 | 'Harms', 117 | 'Mcconnaughey', 118 | 'Newburn', 119 | 'Giffin', 120 | 'Holgate', 121 | 'Villalon', 122 | 'Dimeo', 123 | 'Heras', 124 | 'Monahan', 125 | 'Tugwell', 126 | 'Mok', 127 | 'Bridge', 128 | 'Hund', 129 | 'Dray', 130 | 'Buettner', 131 | 'Lenard', 132 | 'Morena', 133 | 'Rizzo', 134 | 'Presler', 135 | 'Pilling', 136 | 'Aho', 137 | 'Louviere', 138 | 'Strothers', 139 | 'Lafond', 140 | 'Mori', 141 | 'Albury', 142 | 'Session', 143 | 'Ybarbo', 144 | 'Rideout', 145 | 'Kellerman', 146 | 'Sanon', 147 | 'Busse', 148 | 'Sealy', 149 | 'Ansley', 150 | 'Netherland', 151 | 'Eberly', 152 | 'Ye', 153 | 'Tessman', 154 | 'Berwick', 155 | 'Deveau', 156 | 'Jayne', 157 | 'Keniston', 158 | 'Snavely', 159 | 'An', 160 | 'Livsey', 161 | 'Ung', 162 | 'Chan', 163 | 'Donnelly', 164 | 'Ferretti', 165 | 'Keels', 166 | 'Mccluney', 167 | 'Molitor', 168 | 'Wollard', 169 | 'Mendoza', 170 | 'Maclin', 171 | 'Whitesell', 172 | 'Weidemann', 173 | 'Holliday', 174 | 'Presley', 175 | 'Segovia', 176 | 'Lorence', 177 | 'Minner', 178 | 'Stradford', 179 | 'Neubauer', 180 | 'Mannella', 181 | 'Overman', 182 | 'Heal', 183 | 'Grimmett', 184 | 'Homan', 185 | 'Pears', 186 | 'Fernando', 187 | 'Doucette', 188 | 'Judon', 189 | 'Ohlson', 190 | 'Schor', 191 | 'Papas', 192 | 'Ziegler', 193 | 'Maddix', 194 | 'Blan', 195 | 'Hash', 196 | 'Edgerton', 197 | 'Buskey', 198 | 'Templeman', 199 | 'Honore', 200 | 'Dragoo', 201 | 'Steffensen', 202 | 'Sparling' 203 | ] 204 | end 205 | -------------------------------------------------------------------------------- /chapter12/setup_sharded_cluster.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | echo 5 | echo "##################################################" 6 | echo "# MongoDB In Action Sharded Cluster Setup Script #" 7 | echo "##################################################" 8 | echo 9 | echo This script sets up the example sharded cluster from Chapter 11 of MongoDB in 10 | echo Action Second Edition. It only runs on unix systems, and assumes there is 11 | echo a directory called \"data\" in the current directory, so before running 12 | echo this script run: 13 | echo 14 | echo mkdir ./data 15 | echo 16 | echo You can kill all the processes when you are done by running: 17 | echo 18 | echo killall mongod mongos 19 | echo 20 | echo To cleanup the data being used, just delete the "data" directory 21 | echo 22 | 23 | # NOTE: All commands running using the "--eval" option to the mongo shell are 24 | # wrapped in "printjson". This is not necessary when using the shell in 25 | # interactive mode, since return values of commands get printed automatically. 26 | 27 | # The hostname of the local machine that we are running our cluster on. If this 28 | # doesn't work, try changing this to "localhost" 29 | if [ -z $HOSTNAME ] 30 | then 31 | HOSTNAME=`hostname` 32 | fi 33 | 34 | 35 | 36 | echo 37 | echo "#####################" 38 | echo "# Setting up Shards #" 39 | echo "#####################" 40 | echo 41 | 42 | 43 | 44 | echo Creating all the data directories for the nodes in the Shard A replica set 45 | mkdir ./data/rs-a-1 46 | mkdir ./data/rs-a-2 47 | mkdir ./data/rs-a-3 48 | 49 | echo Creating all the data directories for the nodes in the Shard B replica set 50 | mkdir ./data/rs-b-1 51 | mkdir ./data/rs-b-2 52 | mkdir ./data/rs-b-3 53 | 54 | 55 | 56 | echo Starting all the mongod nodes in the Shard A replica set 57 | mongod --shardsvr --replSet shard-a --dbpath ./data/rs-a-1 --port 30000 \ 58 | --logpath ./data/rs-a-1.log --fork 59 | mongod --shardsvr --replSet shard-a --dbpath ./data/rs-a-2 --port 30001 \ 60 | --logpath ./data/rs-a-2.log --fork 61 | mongod --shardsvr --replSet shard-a --dbpath ./data/rs-a-3 --port 30002 \ 62 | --logpath ./data/rs-a-3.log --fork 63 | 64 | echo Starting all the mongod nodes in the Shard B replica set 65 | mongod --shardsvr --replSet shard-b --dbpath ./data/rs-b-1 --port 30100 \ 66 | --logpath ./data/rs-b-1.log --fork 67 | mongod --shardsvr --replSet shard-b --dbpath ./data/rs-b-2 --port 30101 \ 68 | --logpath ./data/rs-b-2.log --fork 69 | mongod --shardsvr --replSet shard-b --dbpath ./data/rs-b-3 --port 30102 \ 70 | --logpath ./data/rs-b-3.log --fork 71 | 72 | 73 | 74 | echo Initializing the Shard A replica set 75 | mongo $HOSTNAME:30000 --eval "printjson(rs.initiate())" 76 | echo Waiting for the initialization to complete 77 | sleep 60 78 | echo Adding data node to replica set 79 | mongo $HOSTNAME:30000 --eval "printjson(rs.add(\"$HOSTNAME:30001\"))" 80 | echo Adding arbiter to replica set 81 | mongo $HOSTNAME:30000 --eval "printjson(rs.addArb(\"$HOSTNAME:30002\"))" 82 | 83 | echo Initializing the Shard B replica set 84 | mongo $HOSTNAME:30100 --eval "printjson(rs.initiate())" 85 | echo Waiting for the initialization to complete 86 | sleep 60 87 | echo Adding data node to replica set 88 | mongo $HOSTNAME:30100 --eval "printjson(rs.add(\"$HOSTNAME:30101\"))" 89 | echo Adding arbiter to replica set 90 | mongo $HOSTNAME:30100 --eval "printjson(rs.addArb(\"$HOSTNAME:30102\"))" 91 | 92 | 93 | 94 | echo 95 | echo "#############################" 96 | echo "# Setting up Config Servers #" 97 | echo "#############################" 98 | echo 99 | 100 | 101 | 102 | echo Creating all the data directories for the config server nodes 103 | mkdir ./data/config-1 104 | mkdir ./data/config-2 105 | mkdir ./data/config-3 106 | 107 | 108 | 109 | echo Starting all the mongod config server nodes 110 | mongod --configsvr --dbpath ./data/config-1 --port 27019 \ 111 | --logpath ./data/config-1.log --fork 112 | mongod --configsvr --dbpath ./data/config-2 --port 27020 \ 113 | --logpath ./data/config-2.log --fork 114 | mongod --configsvr --dbpath ./data/config-3 --port 27021 \ 115 | --logpath ./data/config-3.log --fork 116 | echo Waiting for config servers to finish starting up 117 | sleep 60 118 | 119 | 120 | 121 | echo 122 | echo "############################" 123 | echo "# Setting up Mongos Router #" 124 | echo "############################" 125 | echo 126 | 127 | 128 | 129 | echo Starting mongos router process 130 | mongos --configdb $HOSTNAME:27019,$HOSTNAME:27020,$HOSTNAME:27021 \ 131 | --logpath ./data/mongos.log --fork --port 40000 132 | 133 | 134 | 135 | echo 136 | echo "############################" 137 | echo "# Initializing the cluster #" 138 | echo "############################" 139 | echo 140 | 141 | 142 | 143 | echo Adding Shard A to the cluster 144 | mongo $HOSTNAME:40000 \ 145 | --eval "printjson(sh.addShard(\"shard-a/$HOSTNAME:30000,$HOSTNAME:30001\"))" 146 | 147 | echo Adding Shard B to the cluster 148 | mongo $HOSTNAME:40000 \ 149 | --eval "printjson(sh.addShard(\"shard-b/$HOSTNAME:30100,$HOSTNAME:30101\"))" 150 | -------------------------------------------------------------------------------- /chapter13/dump_traffic.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # This script shows you how to dump traffic frum our local machine right back to our local machine. 4 | # 5 | # First, you need to get the interface you want to listen on for traffic. Since we are just 6 | # listening for local traffic, we need the LOOPBACK interface. Running ifconfig on my machine 7 | # yields: 8 | # 9 | # > ifconfig 10 | # 11 | # lo: flags=73 mtu 65536 12 | # inet 127.0.0.1 netmask 255.0.0.0 13 | # ... 14 | # ... 15 | # 16 | # This means that the following command will dump all local traffic to the terminal: 17 | # (Note that on my machine I needed root permissions to do this) 18 | sudo tcpdump -i lo -X 19 | 20 | # If you can't find the local interface, or want to listen on all interfaces, try: 21 | #sudo tcpdump -i any -X 22 | -------------------------------------------------------------------------------- /chapter13/generate_keys.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Helper script to generate a PEM file for use in basic SSL encryption in MongoDB 4 | 5 | # Generate a PEM file 6 | openssl req -newkey rsa:2048 -new -x509 -days 365 -nodes -out mongodb-cert.crt -keyout mongodb-cert.key 7 | cat mongodb-cert.key mongodb-cert.crt > mongodb.pem 8 | 9 | # Display information about the certificate we just generated 10 | openssl x509 -text -noout -in mongodb.pem 11 | -------------------------------------------------------------------------------- /chapter13/run_with_ssl.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Examples of how to run with SSL encryption in 2.6 4 | 5 | # Server 6 | mongod --sslMode requireSSL --sslPEMKeyFile mongodb.pem 7 | 8 | # Client 9 | mongo --ssl 10 | -------------------------------------------------------------------------------- /chapter2/chp2.1_code.js: -------------------------------------------------------------------------------- 1 | // example 2.1.2 2 | use tutorial 3 | 4 | // ****************** 2.1.3 inserts and queries ****************************** 5 | db.users.insert({username: "smith"}) 6 | 7 | db.users.find() 8 | 9 | /* results like: (_id will vary) 10 | 11 | { _id : ObjectId("4bf9bec50e32f82523389314"), username : "smith" } 12 | 13 | */ 14 | 15 | db.users.insert({username: "jones"}) 16 | 17 | db.users.count() 18 | 19 | db.users.find() 20 | 21 | /* results like: (_id will vary) 22 | 23 | { _id : ObjectId("4bf9bec50e32f82523389314"), username : "smith" } 24 | { _id : ObjectId("4bf9bec90e32f82523389315"), username : "jones" } 25 | 26 | */ 27 | 28 | db.users.find({username: "jones"}) 29 | 30 | /* results like: 31 | { _id : ObjectId("4bf9bec90e32f82523389315"), username : "jones" } 32 | */ 33 | 34 | 35 | // $or find 36 | db.users.find({ $or: [ 37 | { username: "smith" }, 38 | { username: "jones" } 39 | ]}) 40 | 41 | /* results like: 42 | { _id : ObjectId("4bf9bec50e32f82523389314"), username : "smith" } 43 | { _id : ObjectId("4bf9bec90e32f82523389315"), username : "jones" } 44 | */ 45 | 46 | 47 | // *********************** 2.1.4 updates ************************************** 48 | db.users.find({username: "smith"}) 49 | /* result like: 50 | { 51 | "_id" : ObjectId("4bf9ec440e32f82523389316"), 52 | "username" : "smith" 53 | } 54 | 55 | */ 56 | 57 | // update 58 | db.users.update({username: "smith"}, {$set: {country: "Canada"}}) 59 | 60 | db.users.find({username: "smith"}) 61 | 62 | /* results like: 63 | { 64 | "_id" : ObjectId("4bf9ec440e32f82523389316"), 65 | "country" : "Canada", 66 | "username" : "smith" 67 | } 68 | 69 | */ 70 | 71 | // update that removes username - mistaken way to do it 72 | db.users.update({username: "smith"}, {country: "Canada"}) 73 | 74 | // now have wiped out username 75 | db.users.find({country: "Canada"}) 76 | 77 | /* results like 78 | { 79 | "_id" : ObjectId("4bf9ec440e32f82523389316"), 80 | "country" : "Canada" 81 | } 82 | 83 | */ 84 | 85 | // set username back to smith 86 | db.users.update({country: "Canada"}, {username: "smith", country: "Canada"}) 87 | 88 | // remove country via $unset 89 | db.users.update({username: "smith"}, {$unset: {country: 1}}) 90 | 91 | db.users.find({username: "smith"}) 92 | 93 | 94 | // set favorites for smith 95 | db.users.update( {username: "smith"}, 96 | { 97 | $set: { 98 | favorites: { 99 | cities: ["Chicago", "Cheyenne"], 100 | movies: ["Casablanca", "For a Few Dollars More", "The Sting"] 101 | } 102 | } 103 | }) 104 | 105 | // set favorites for jones 106 | db.users.update( {username: "jones"}, 107 | { 108 | $set: { 109 | favorites: { 110 | movies: ["Casablanca", "Rocky"] 111 | } 112 | } 113 | }) 114 | 115 | // verify update results 116 | db.users.find().pretty() // used pretty to make more readable 117 | 118 | //find users with favorite movie of "Casablanca" 119 | db.users.find({"favorites.movies": "Casablanca"}).pretty() 120 | 121 | // $addToSet 122 | db.users.update( {"favorites.movies": "Casablanca"}, 123 | {$addToSet: {"favorites.movies": "The Maltese Falcon"} }, 124 | false, // insert if not found? 125 | true ) // update all found? (if false, updates just first it finds) 126 | 127 | // ****** 2.1.5 deleting data ******************** 128 | db.foo.remove({}) 129 | 130 | // remove users with favorite city of Cheyenne 131 | db.users.remove({"favorites.cities": "Cheyenne"}) 132 | 133 | 134 | // get rid of users table 135 | db.users.drop() 136 | 137 | 138 | // *************** 2.1.6 - other commands ***************************** 139 | 140 | // MongoDB console help 141 | help 142 | 143 | // BEFORE running MongoDB console 144 | // Help on running mongo command 145 | mongo --help 146 | 147 | 148 | // create large collection 149 | 150 | for(i = 0; i < 200000; i++) { 151 | db.numbers.save({num: i}); 152 | } 153 | 154 | // verify results 155 | db.numbers.count() 156 | 157 | db.numbers.find() 158 | 159 | db.numbers.find({num: 500}) 160 | 161 | // range query 162 | db.numbers.find( {num: {"$gt": 199995 }} ) 163 | 164 | /* results like 165 | 166 | { "_id" : ObjectId("4bfbf1dedba1aa7c30afcade"), "num" : 199996 } 167 | { "_id" : ObjectId("4bfbf1dedba1aa7c30afcadf"), "num" : 199997 } 168 | { "_id" : ObjectId("4bfbf1dedba1aa7c30afcae0"), "num" : 199998 } 169 | 170 | */ 171 | 172 | // upper and lower bounds 173 | db.numbers.find( {num: {"$gt": 20, "$lt": 25 }} ) 174 | 175 | /* results like: 176 | { "_id" : ObjectId("4bfbf132dba1aa7c30ac831f"), "num" : 21 } 177 | { "_id" : ObjectId("4bfbf132dba1aa7c30ac8320"), "num" : 22 } 178 | { "_id" : ObjectId("4bfbf132dba1aa7c30ac8321"), "num" : 23 } 179 | { "_id" : ObjectId("4bfbf132dba1aa7c30ac8322"), "num" : 24 } 180 | 181 | */ 182 | -------------------------------------------------------------------------------- /chapter2/chp2.2-2.4_code.js: -------------------------------------------------------------------------------- 1 | 2 | // *********** 2.2.2 Indexing and Explain ****************************8 3 | db.numbers.find({num: {"$gt": 199995}}).explain() 4 | 5 | /* results like: 6 | { 7 | "cursor" : "BasicCursor", 8 | "isMultiKey" : false, 9 | "n" : 4, // result documents returned 10 | "nscannedObjects" : 200000, 11 | "nscanned" : 200000, 12 | "nscannedObjectsAllPlans" : 200000, 13 | "nscannedAllPlans" : 200000, 14 | "scanAndOrder" : false, 15 | "indexOnly" : false, 16 | "nYields" : 0, 17 | "nChunkSkips" : 0, 18 | "millis" : 171, // time the query took 19 | "indexBounds" : { }, 20 | "server" : "hostname:27017" 21 | } 22 | */ 23 | 24 | // create index 25 | db.numbers.ensureIndex({num: 1}) 26 | 27 | // verify index 28 | db.numbers.getIndexes() 29 | 30 | // explain for indexed collection 31 | db.numbers.find({num: {"$gt": 199995 }}).explain() 32 | 33 | /* results like: 34 | 35 | { 36 | "cursor" : "BtreeCursor num_1", #A// using the num_1 index 37 | "isMultiKey" : false, 38 | "n" : 4, #B// 4 documents returned 39 | "nscannedObjects" : 4, 40 | "nscanned" : 4, #C// only 4 documents scanned 41 | "nscannedObjectsAllPlans" : 4, 42 | "nscannedAllPlans" : 4, 43 | "scanAndOrder" : false, 44 | "indexOnly" : false, 45 | "nYields" : 0, 46 | "nChunkSkips" : 0, 47 | "millis" : 0, #D// much faster! 48 | "indexBounds" : { 49 | "num" : [ 50 | [ 51 | 199995, 52 | 1.7976931348623157e+308 53 | ] 54 | ] 55 | }, 56 | "server" : "hostname:27017" 57 | } 58 | 59 | */ 60 | 61 | // ***************** 2.3.1 getting db info ***************************** 62 | 63 | // show databases 64 | show dbs 65 | 66 | // show collections in current database 67 | show collections 68 | 69 | // db details 70 | db.stats() 71 | 72 | /* results like: 73 | { 74 | "db" : "tutorial", 75 | "collections" : 4, 76 | "objects" : 200010, 77 | "avgObjSize" : 36.00065996700165, 78 | "dataSize" : 7200492, 79 | "storageSize" : 11268096, 80 | "numExtents" : 10, 81 | "indexes" : 3, 82 | "indexSize" : 11560864, 83 | "fileSize" : 201326592, 84 | "nsSizeMB" : 16, 85 | "dataFileVersion" : { 86 | "major" : 4, 87 | "minor" : 5 88 | }, 89 | "ok" : 1 90 | } 91 | 92 | */ 93 | 94 | // stats for one collection 95 | db.numbers.stats() 96 | 97 | /* results like 98 | { 99 | "ns" : "tutorial.numbers", 100 | "count" : 200000, 101 | "size" : 7200036, 102 | "avgObjSize" : 36.00018, 103 | "storageSize" : 11255808, 104 | "numExtents" : 7, 105 | "nindexes" : 2, 106 | "lastExtentSize" : 5664768, 107 | "paddingFactor" : 1, 108 | "systemFlags" : 1, 109 | "userFlags" : 0, 110 | "totalIndexSize" : 11552688, 111 | "indexSizes" : { 112 | "_id_" : 6508096, 113 | "num_1" : 5044592 114 | }, 115 | "ok" : 1 116 | } 117 | 118 | */ 119 | 120 | // ******************* 2.3.2 how commands work ****************************** 121 | 122 | 123 | // db.stats() is a helper for 124 | db.runCommand( {dbstats: 1} ) 125 | 126 | 127 | // db.numbers.stats() is a helper for 128 | db.runCommand( {collstats: "numbers"} ) 129 | 130 | // see definition for a function, such as "runCommand" - type without () 131 | db.runCommand 132 | 133 | // which leads to this version of the command for db.numbers.stats() 134 | db.$cmd.findOne( {collstats: "numbers"} ); 135 | 136 | 137 | // **************** 2.4 getting help ********************** 138 | 139 | // what does this save command do? 140 | db.numbers.save({num: 123123123}) 141 | 142 | // see save function code 143 | db.numbers.save -------------------------------------------------------------------------------- /chapter2/chp2.x_examples.js: -------------------------------------------------------------------------------- 1 | 2 | // *********** 2.2.2 Indexing and Explain ****************************8 3 | db.numbers.find({num: {"$gt": 199995}}).explain() 4 | 5 | /* results like: 6 | { 7 | "cursor" : "BasicCursor", 8 | "isMultiKey" : false, 9 | "n" : 4, // result documents returned 10 | "nscannedObjects" : 200000, 11 | "nscanned" : 200000, 12 | "nscannedObjectsAllPlans" : 200000, 13 | "nscannedAllPlans" : 200000, 14 | "scanAndOrder" : false, 15 | "indexOnly" : false, 16 | "nYields" : 0, 17 | "nChunkSkips" : 0, 18 | "millis" : 171, // time the query took 19 | "indexBounds" : { }, 20 | "server" : "hostname:27017" 21 | } 22 | */ 23 | 24 | // create index 25 | db.numbers.ensureIndex({num: 1}) 26 | 27 | // verify index 28 | db.numbers.getIndexes() 29 | 30 | // explain for indexed collection 31 | db.numbers.find({num: {"$gt": 199995 }}).explain() 32 | 33 | /* results like: 34 | 35 | { 36 | "cursor" : "BtreeCursor num_1", #A// using the num_1 index 37 | "isMultiKey" : false, 38 | "n" : 4, #B// 4 documents returned 39 | "nscannedObjects" : 4, 40 | "nscanned" : 4, #C// only 4 documents scanned 41 | "nscannedObjectsAllPlans" : 4, 42 | "nscannedAllPlans" : 4, 43 | "scanAndOrder" : false, 44 | "indexOnly" : false, 45 | "nYields" : 0, 46 | "nChunkSkips" : 0, 47 | "millis" : 0, #D// much faster! 48 | "indexBounds" : { 49 | "num" : [ 50 | [ 51 | 199995, 52 | 1.7976931348623157e+308 53 | ] 54 | ] 55 | }, 56 | "server" : "hostname:27017" 57 | } 58 | 59 | */ 60 | 61 | // ***************** 2.3.1 getting db info ***************************** 62 | 63 | // show databases 64 | show dbs 65 | 66 | // show collections in current database 67 | show collections 68 | 69 | // db details 70 | db.stats() 71 | 72 | /* results like: 73 | { 74 | "db" : "tutorial", 75 | "collections" : 4, 76 | "objects" : 200010, 77 | "avgObjSize" : 36.00065996700165, 78 | "dataSize" : 7200492, 79 | "storageSize" : 11268096, 80 | "numExtents" : 10, 81 | "indexes" : 3, 82 | "indexSize" : 11560864, 83 | "fileSize" : 201326592, 84 | "nsSizeMB" : 16, 85 | "dataFileVersion" : { 86 | "major" : 4, 87 | "minor" : 5 88 | }, 89 | "ok" : 1 90 | } 91 | 92 | */ 93 | 94 | // stats for one collection 95 | db.numbers.stats() 96 | 97 | /* results like 98 | { 99 | "ns" : "tutorial.numbers", 100 | "count" : 200000, 101 | "size" : 7200036, 102 | "avgObjSize" : 36.00018, 103 | "storageSize" : 11255808, 104 | "numExtents" : 7, 105 | "nindexes" : 2, 106 | "lastExtentSize" : 5664768, 107 | "paddingFactor" : 1, 108 | "systemFlags" : 1, 109 | "userFlags" : 0, 110 | "totalIndexSize" : 11552688, 111 | "indexSizes" : { 112 | "_id_" : 6508096, 113 | "num_1" : 5044592 114 | }, 115 | "ok" : 1 116 | } 117 | 118 | */ 119 | 120 | // ******************* 2.3.2 how commands work ****************************** 121 | 122 | 123 | // db.stats() is a helper for 124 | db.runCommand( {dbstats: 1} ) 125 | 126 | 127 | // db.numbers.stats() is a helper for 128 | db.runCommand( {collstats: "numbers"} ) 129 | 130 | // see definition for a function, such as "runCommand" - type without () 131 | db.runCommand 132 | 133 | // which leads to this version of the command for db.numbers.stats() 134 | db.$cmd.findOne( {collstats: "numbers"} ); 135 | 136 | 137 | // **************** 2.4 getting help ********************** 138 | 139 | // what does this save command do? 140 | db.numbers.save({num: 123123123}) 141 | 142 | // see save function code 143 | db.numbers.save -------------------------------------------------------------------------------- /chapter3/Gemfile: -------------------------------------------------------------------------------- 1 | source 'https://rubygems.org' 2 | 3 | gem 'mongo', '1.9.2' 4 | gem 'bson_ext', '1.9.2' 5 | gem 'twitter', '5.8.0' 6 | gem 'sinatra', '1.4.4' 7 | 8 | -------------------------------------------------------------------------------- /chapter3/Gemfile.lock: -------------------------------------------------------------------------------- 1 | GEM 2 | remote: https://rubygems.org/ 3 | specs: 4 | addressable (2.3.6) 5 | atomic (1.1.16) 6 | bson (1.9.2) 7 | bson_ext (1.9.2) 8 | bson (~> 1.9.2) 9 | buftok (0.2.0) 10 | equalizer (0.0.9) 11 | faraday (0.9.0) 12 | multipart-post (>= 1.2, < 3) 13 | http (0.5.0) 14 | http_parser.rb 15 | http_parser.rb (0.6.0) 16 | json (1.8.1) 17 | memoizable (0.4.2) 18 | thread_safe (~> 0.3, >= 0.3.1) 19 | mongo (1.9.2) 20 | bson (~> 1.9.2) 21 | multipart-post (2.0.0) 22 | naught (1.0.0) 23 | rack (1.5.2) 24 | rack-protection (1.5.2) 25 | rack 26 | simple_oauth (0.2.0) 27 | sinatra (1.4.4) 28 | rack (~> 1.4) 29 | rack-protection (~> 1.4) 30 | tilt (~> 1.3, >= 1.3.4) 31 | thread_safe (0.3.1) 32 | atomic (>= 1.1.7, < 2) 33 | tilt (1.4.1) 34 | twitter (5.8.0) 35 | addressable (~> 2.3) 36 | buftok (~> 0.2.0) 37 | equalizer (~> 0.0.9) 38 | faraday (~> 0.9.0) 39 | http (~> 0.5.0) 40 | http_parser.rb (~> 0.6.0) 41 | json (~> 1.8) 42 | memoizable (~> 0.4.0) 43 | naught (~> 1.0) 44 | simple_oauth (~> 0.2.0) 45 | 46 | PLATFORMS 47 | ruby 48 | 49 | DEPENDENCIES 50 | bson_ext (= 1.9.2) 51 | mongo (= 1.9.2) 52 | sinatra (= 1.4.4) 53 | twitter (= 5.8.0) 54 | -------------------------------------------------------------------------------- /chapter3/archiver.rb: -------------------------------------------------------------------------------- 1 | $LOAD_PATH << File.dirname(__FILE__) 2 | require 'rubygems' 3 | require 'mongo' 4 | require 'twitter' 5 | require 'config' 6 | 7 | class TweetArchiver 8 | 9 | # Create a new instance of TweetArchiver 10 | def initialize(tag) 11 | print "#{DATABASE_HOST}:#{DATABASE_PORT}\n" 12 | connection = Mongo::Client.new("mongodb://#{DATABASE_HOST}:#{DATABASE_PORT}/#{DATABASE_NAME}") 13 | @tweets = connection[COLLECTION_NAME] 14 | @tweets.indexes.create_one({ tags: 1, id: -1 }) 15 | @tag = tag 16 | @tweets_found = 0 17 | 18 | # Configure the twitter client using the values found in config.rb 19 | @client = Twitter::REST::Client.new do |config| 20 | config.consumer_key = API_KEY 21 | config.consumer_secret = API_SECRET 22 | config.access_token = ACCESS_TOKEN 23 | config.access_token_secret = ACCESS_TOKEN_SECRET 24 | end 25 | end 26 | 27 | # Notify the user all the save_save_tweets_for method 28 | def update 29 | puts "Starting Twitter search for '#{@tag}'..." 30 | save_tweets_for(@tag) 31 | print "#{@tweets_found} tweets saved.\n\n" 32 | end 33 | 34 | private 35 | 36 | # Search with the twitter client and save the results to Mongo 37 | def save_tweets_for(term) 38 | @client.search(term).each do |tweet| 39 | @tweets_found += 1 40 | tweet_doc = tweet.to_h 41 | tweet_doc[:tags] = term 42 | tweet_doc[:_id] = tweet_doc[:id] 43 | @tweets.insert_one(tweet_doc) 44 | end 45 | end 46 | end 47 | -------------------------------------------------------------------------------- /chapter3/config.rb: -------------------------------------------------------------------------------- 1 | DATABASE_HOST = 'localhost' 2 | DATABASE_PORT = 27018 3 | DATABASE_NAME = "twitter-archive" 4 | COLLECTION_NAME = "tweets" 5 | TAGS = ["#MongoDB", "#Mongo"] 6 | 7 | API_KEY = "PrRlTUCxXUV7cUz8vCQ9cQ" 8 | API_SECRET = "ZBWEf7NtYrPzshwmyI2yx0YVUjtosXV1ujMUaPusTE" 9 | ACCESS_TOKEN = "420612447-tuKce4clrt3X1Q6rykxBrRFftA2jqMyHwa2nfNWc" 10 | ACCESS_TOKEN_SECRET = "TCnPydNadJcoNkaujQed3kyZVsKkq1DfHkCkQ80C2lx6X" 11 | 12 | 13 | -------------------------------------------------------------------------------- /chapter3/connect.rb: -------------------------------------------------------------------------------- 1 | require 'rubygems' 2 | require 'mongo' 3 | $con = Mongo::Connection.new 4 | $db = $con['tutorial'] 5 | $users = $db['users'] 6 | puts 'connected!' 7 | -------------------------------------------------------------------------------- /chapter3/connect_w_irb.rb: -------------------------------------------------------------------------------- 1 | # 2 | # ******** commands from 3.1 which can be entered in irb 3 | # 4 | 5 | # if run from file - need this 6 | 7 | require 'rubygems' 8 | require 'mongo' 9 | $con = Mongo::Connection.new 10 | $db = $con['tutorial'] 11 | $users = $db['users'] 12 | puts 'connected!' 13 | 14 | # to remove ALL users - in case you do run this from a file more than once 15 | $users.drop() 16 | 17 | # 18 | # ************ Below can be run from interactive ruby (irb) 19 | # after using: 20 | # 21 | # > irb -r ./connect.rb 22 | # 23 | 24 | # part 1 - insert knuth 25 | 26 | id = $users.save({"lastname" => "knuth"}) 27 | $users.find_one({"_id" => id}) 28 | 29 | 30 | #part 2 - insert smith and jones 31 | 32 | smith = {"last_name" => "smith", "age" => 30} 33 | jones = {"last_name" => "jones", "age" => 40} 34 | 35 | smith_id = $users.insert(smith) 36 | jones_id = $users.insert(jones) 37 | 38 | $users.find_one({"_id" => smith_id}) 39 | $users.find_one({"_id" => jones_id}) 40 | 41 | # if run as a file - this is the only result you'll see 42 | p $users.find_one({"_id" => smith_id}) 43 | 44 | # example finds 45 | $users.find({"last_name" => "smith"}) 46 | $users.find({"age" => {"$gt" => 30}}) 47 | 48 | 49 | # use a cursor 50 | cursor = $users.find({"age" => {"$gt" => 30}}) 51 | cursor.each do |doc| 52 | puts doc["last_name"] 53 | end 54 | 55 | # alternative - more language neutral 56 | cursor = $users.find({"age" => {"$gt" => 30}}) 57 | while doc = cursor.next 58 | puts doc["last_name"] 59 | end 60 | 61 | # 62 | # ************* 3.1.4 Updates and deletes 63 | # 64 | 65 | $users.update({"last_name" => "smith"}, {"$set" => {"city" => "Chicago"}}) 66 | 67 | # see change (note: need '.next' at the end to actually show first result from cursor 68 | $users.find({"last_name" => "smith"}).next 69 | 70 | # using multi of true for mutliple updates 71 | $users.update({"last_name" => "smith"}, 72 | {"$set" => {"city" => "New York"}}, {:multi => true}) 73 | 74 | 75 | # remove selected users 76 | $users.remove({"age" => {"$gte" => 40}}) 77 | 78 | # remove all users 79 | $users.remove 80 | 81 | 82 | # 83 | # ***************** 3.1.5 Database Commands 84 | # 85 | 86 | # list databases 87 | $admin_db = $con['admin'] 88 | $admin_db.command({"listDatabases" => 1}) 89 | 90 | # drop users collections - removes indexes, etc. 91 | db = $con['tutorial'] 92 | db.drop_collection('users') 93 | 94 | # or - another way to drop a collection 95 | db.command({"drop" => "users"}) 96 | 97 | # 98 | # ******************** 3.2.1 Object ID generation 99 | # 100 | 101 | require 'mongo' 102 | id = BSON::ObjectId.from_string('4c291856238d3b19b2000001') 103 | id.generation_time 104 | 105 | jun_id = BSON::ObjectId.from_time(Time.utc(2013, 6, 1)) 106 | jul_id = BSON::ObjectId.from_time(Time.utc(2013, 7, 1)) 107 | $users.find({'_id' => {'$gte' => jun_id, '$lt' => jul_id}}) 108 | 109 | 110 | # 111 | # ******************* 3.2.3 Over the network 112 | # 113 | 114 | $users.insert({"last_name" => "james"}, :w => 1) 115 | -------------------------------------------------------------------------------- /chapter3/update.rb: -------------------------------------------------------------------------------- 1 | $LOAD_PATH << File.dirname(__FILE__) 2 | require 'config' 3 | require 'archiver' 4 | 5 | TAGS.each do |tag| 6 | archive = TweetArchiver.new(tag) 7 | archive.update 8 | end 9 | -------------------------------------------------------------------------------- /chapter3/viewer.rb: -------------------------------------------------------------------------------- 1 | $LOAD_PATH << File.dirname(__FILE__) 2 | require 'rubygems' 3 | require 'mongo' 4 | require 'sinatra' 5 | require 'config' 6 | require 'open-uri' 7 | 8 | configure do 9 | conn = Mongo::Client.new("mongodb://#{DATABASE_HOST}:#{DATABASE_PORT}/#{DATABASE_NAME}") 10 | TWEETS = conn[COLLECTION_NAME] 11 | end 12 | 13 | get '/' do 14 | if params['tag'] 15 | selector = {:tags => params['tag']} 16 | else 17 | selector = {} 18 | end 19 | 20 | @tweets = TWEETS.find(selector).sort(["id", -1]) 21 | erb :tweets 22 | end 23 | -------------------------------------------------------------------------------- /chapter3/views/tweets.erb: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 19 | 20 | 21 |

Tweet Archive

22 | <% TAGS.each do |tag| %> 23 | <%= tag %> 24 | <% end %> 25 | <% @tweets.each do |tweet| %> 26 |

<%= tweet['text'] %>

27 |

28 | 29 | <%= tweet['user']['screen_name'] %> 30 | 31 | on <%= tweet['created_at'] %> 32 |

33 | 34 | <% end %> 35 | 36 | 37 | 38 | -------------------------------------------------------------------------------- /chapter4/listing4_6.rb: -------------------------------------------------------------------------------- 1 | require 'rubygems' 2 | require 'mongo' 3 | 4 | VIEW_PRODUCT = 0 # action type constants 5 | ADD_TO_CART = 1 6 | CHECKOUT = 2 7 | PURCHASE = 3 8 | 9 | con = Mongo::Connection.new 10 | db = con['garden'] 11 | db.drop_collection("user.actions") 12 | db.create_collection("user.actions", :capped => true, :size => 16384) 13 | 14 | actions = db['user.actions'] # refers to the garden.user.actions collection 15 | 16 | 500.times do |n| # loop 500 times, using n as the iterator 17 | doc = { 18 | :username => "kbanker", 19 | :action_code => rand(4), # random value between 0 and 3, inclusive 20 | :time => Time.now.utc, 21 | :n => n 22 | } 23 | actions.insert(doc) 24 | end 25 | -------------------------------------------------------------------------------- /chapter5/chp5.js: -------------------------------------------------------------------------------- 1 | 2 | // 5.1.1 3 | product = db.products.findOne({'slug': 'wheel-barrow-9092'}) 4 | db.categories.findOne({'_id': product['main_cat_id']}) 5 | db.reviews.find({'product_id': product['_id']}) 6 | 7 | db.products.find({'slug': 'wheel-barrow-9092'}).limit(1) 8 | 9 | db.reviews.find({'product_id': product['_id']}).skip(0).limit(12) 10 | 11 | product = db.products.findOne({'slug': 'wheel-barrow-9092'}) 12 | db.reviews.find({'product_id': product['_id']}). 13 | sort({'helpful_votes': -1}). 14 | limit(12) 15 | 16 | page_number = 1 17 | product = db.products.findOne({'slug': 'wheel-barrow-9092'}) 18 | category = db.categories.findOne({'_id': product['main_cat_id']}) 19 | reviews_count = db.reviews.count({'product_id': product['_id']}) 20 | reviews = db.reviews.find({'product_id': product['_id']}). 21 | skip((page_number - 1) * 12). 22 | limit(12). 23 | sort({'helpful_votes': -1}) 24 | 25 | page_number = 1 26 | category = db.categories.findOne({'slug': 'outdoors'}) 27 | siblings = db.categories.find({'parent_id': category['_id']}) 28 | products = db.products.find({'category_id': category['_id']}) 29 | .skip((page_number - 1) * 12) 30 | .limit(12) 31 | .sort({'helpful_votes': -1}) 32 | 33 | categories = db.categories.find({'parent_id': null}) 34 | 35 | // 5.1.2 36 | db.users.findOne({ 37 | 'username': 'kbanker', 38 | 'hashed_password': 'bd1cfa194c3a603e7186780824b04419'}) 39 | 40 | db.users.findOne({ 41 | 'username': 'kbanker', 42 | 'hashed_password': 'bd1cfa194c3a603e7186780824b04419'}, 43 | {'_id': 1}) 44 | 45 | db.users.find({'last_name': 'Banker'}) 46 | 47 | 48 | db.users.find({'last_name': /^Ba/}) 49 | 50 | db.users.find({'addresses.zip': {'$gt': 10019, '$lt': 10040}}) 51 | 52 | // 5.2.1 53 | db.users.find({'last_name': "Banker"}) 54 | db.users.find({'first_name': "Smith", birth_year: 1975}) 55 | 56 | // below will not work - though no errors are retured 57 | db.users.find({'birth_year': {'$gte': 1985}, 'birth_year': {'$lte': 2015}}) 58 | 59 | // fixes previous query 60 | db.users.find({'birth_year': {'$gte': 1985, '$lte': 2015}}) 61 | 62 | // insert some test documents 63 | db.items.insert({ "_id" : ObjectId("4caf82011b0978483ea29ada"), "value" : 97 }) 64 | db.items.insert({ "_id" : ObjectId("4caf82031b0978483ea29adb"), "value" : 98 }) 65 | db.items.insert({ "_id" : ObjectId("4caf82051b0978483ea29adc"), "value" : 99 }) 66 | db.items.insert({ "_id" : ObjectId("4caf820d1b0978483ea29ade"), "value" : "a" }) 67 | db.items.insert({ "_id" : ObjectId("4caf820f1b0978483ea29adf"), "value" : "b" }) 68 | db.items.insert({ "_id" : ObjectId("4caf82101b0978483ea29ae0"), "value" : "c" }) 69 | 70 | db.items.find({'value': {'$gte': 97}}) 71 | 72 | db.items.find({'value': {'$gte': "a"}}) 73 | 74 | db.products.find({ 75 | 'main_cat_id': { 76 | '$in': [ 77 | ObjectId("6a5b1476238d3b4dd5000048"), 78 | ObjectId("6a5b1476238d3b4dd5000051"), 79 | ObjectId("6a5b1476238d3b4dd5000057") 80 | ] 81 | } 82 | }) 83 | 84 | db.products.find({ 85 | '$or': [ 86 | {'details.color': 'blue'}, 87 | {'details.manufacturer': 'ACME'} 88 | ] 89 | }) 90 | 91 | db.products.find({ 92 | $and: [ 93 | { 94 | tags: {$in: ['gift', 'holiday']} 95 | }, 96 | { 97 | tags: {$in: ['gardening', 'landscaping']} 98 | } 99 | ] 100 | }) 101 | 102 | 103 | db.products.find({'details.color': {$exists: false}}) 104 | 105 | db.products.find({'details.color': {$exists: true}}) 106 | 107 | db.products.find({tags: "soil"}) 108 | 109 | db.products.ensureIndex({tags: 1}) 110 | db.products.find({tags: "soil"}).explain() 111 | 112 | db.products.find({'tags.0': "soil"}) 113 | 114 | db.users.find({'addresses.0.state': "NY"}) 115 | 116 | db.users.find({'addresses.0.state': "NY"}) 117 | 118 | db.users.ensureIndex({'addresses.state': 1}) 119 | 120 | db.users.find({'addresses.name': 'home', 'addresses.state': 'NY'}) 121 | 122 | db.users.find({ 123 | 'addresses': { 124 | '$elemMatch': { 125 | 'name': 'home', 126 | 'state': 'NY' 127 | } 128 | } 129 | }) 130 | 131 | 132 | db.users.find({'addresses': {$size: 3}}) 133 | 134 | db.reviews.find({ 135 | '$where': "function() { return this.helpful_votes > 3; }" 136 | }) 137 | 138 | db.reviews.find({'$where': "this.helpful_votes > 3"}) 139 | 140 | db.reviews.find({ 141 | 'user_id': ObjectId("4c4b1476238d3b4dd5000001"), 142 | '$where': "(this.rating * .92) > 3" 143 | }) 144 | 145 | db.reviews.find({ 146 | 'user_id': ObjectId("4c4b1476238d3b4dd5000001"), 147 | 'text': /best|worst/i 148 | }) 149 | 150 | db.reviews.find({ 151 | 'user_id': ObjectId("4c4b1476238d3b4dd5000001"), 152 | 'text': { 153 | '$regex': "best|worst", 154 | '$options': "i"} 155 | }) 156 | 157 | db.orders.find({subtotal: {$mod: [3, 0]}}) 158 | 159 | // 5.2.2 160 | db.users.find({}, {'addresses': 0, 'payment_methods': 0}) 161 | 162 | db.products.find({}, {'reviews': {$slice: 12}}) 163 | db.products.find({}, {'reviews': {$slice: -5}}) 164 | 165 | db.products.find({}, {'reviews': {$slice: [24, 12]}}) 166 | 167 | db.products.find({}, {'reviews': {'$slice': [24, 12]}, 'reviews.rating': 1}) 168 | 169 | db.reviews.find({}).sort({'rating': -1}) 170 | 171 | db.reviews.find({}).sort({'helpful_votes':-1, 'rating': -1}) 172 | 173 | db.docs.find({}).skip(500000).limit(10).sort({date: -1}) 174 | 175 | previous_page_date = new Date(2013, 05, 05) 176 | db.docs.find({'date': {'$gt': previous_page_date}}).limit(10).sort({'date': -1}) 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | -------------------------------------------------------------------------------- /chapter6/chp6.2_code.js: -------------------------------------------------------------------------------- 1 | 2 | // 6.2.1 Products, categories, reviews 3 | 4 | 5 | // From chapter 5 - version 2 - reduced amount of code 6 | product = db.products.findOne({'slug': 'wheel-barrow-9092'}) 7 | reviews_count = db.reviews.count({'product_id': product['_id']}) 8 | 9 | // start with summary for all products 10 | ratingSummary = db.reviews.aggregate([ 11 | {$group : { _id:'$product_id', 12 | count:{$sum:1} }} 13 | ]).next(); 14 | 15 | /* Result 16 | 17 | > ratingSummary = db.reviews.aggregate([ 18 | ... {$group : { _id:'$product_id', 19 | ... count:{$sum:1} }} 20 | ... ]); 21 | { "_id" : ObjectId("4c4b1476238d3b4dd5003982"), "count" : 2 } 22 | { "_id" : ObjectId("4c4b1476238d3b4dd5003981"), "count" : 3 } 23 | 24 | */ 25 | 26 | 27 | // rating summary - for selected product 28 | // look up product first 29 | product = db.products.findOne({'slug': 'wheel-barrow-9092'}) 30 | 31 | ratingSummary = db.reviews.aggregate([ 32 | {$match : { product_id: product['_id']} }, 33 | {$group : { _id:'$product_id', 34 | count:{$sum:1} }} 35 | ]).next(); 36 | 37 | ///* Results 38 | // 39 | // { 40 | // "_id" : ObjectId("4c4b1476238d3b4dd5003981"), 41 | // "average" : 4.333333333333333 42 | // } 43 | // 44 | // */ 45 | 46 | // Adding average review 47 | 48 | ratingSummary = db.reviews.aggregate([ 49 | {$match : {'product_id': product['_id']}}, 50 | {$group : { _id:'$product_id', 51 | average:{$avg:'$rating'}, 52 | count: {$sum:1}}} 53 | ]).next(); 54 | 55 | ///* Results 56 | // 57 | // { 58 | // "_id" : ObjectId("4c4b1476238d3b4dd5003981"), 59 | // "average" : 4.333333333333333, 60 | // "count" : 3 61 | // } 62 | // 63 | //THIS WAS MODIFIED TO FIT GRAPHIC 64 | // 65 | // { 66 | // "_id" : ObjectId("4c4b1476238d3b4dd5003981"), 67 | // "average" : 5, 68 | // "count" : 8 69 | // } 70 | // 71 | // */ 72 | 73 | // With group first 74 | 75 | // below was done BEFORE book example 76 | 77 | // below verifies that in fact, the first command will use an index, second will not 78 | 79 | db.reviews.ensureIndex( { product_id: 1 } ) 80 | 81 | countsByRating = db.reviews.aggregate([ 82 | {$match : {'product_id': product['_id']}}, 83 | {$group : { _id:'$rating', 84 | count:{$sum:1}}} 85 | ],{explain:true}) 86 | 87 | countsByRating = db.reviews.aggregate([ 88 | {$group : { _id:{'product_id': '$product_id', rating:'$rating'}, 89 | count:{$sum:1}}}, 90 | {$match : {'_id.product_id': product['_id']}} 91 | ],{explain:true}) 92 | 93 | // BOOK Example: ***TODO: change this in chapter 6 document - page 9 94 | 95 | ratingSummary = db.reviews.aggregate([ 96 | {$group : { _id:'$product_id', 97 | average:{$avg:'$rating'}, 98 | count: {$sum:1}}}, 99 | {$match : {'_id': product['_id']}} 100 | ]).next(); 101 | 102 | 103 | // Counting Reviews by Rating 104 | countsByRating = db.reviews.aggregate([ 105 | {$match : {'product_id': product['_id']}}, 106 | {$group : { _id:'$rating', 107 | count:{$sum:1}}} 108 | ]).toArray(); 109 | 110 | ///* Results 111 | // 112 | // [ { "_id" : 5, "count" : 1 }, { "_id" : 4, "count" : 2 } ] 113 | // 114 | //THIS WAS CHANGED TO MATCH GRAPHIC 115 | // 116 | // [ { "_id" : 5, "count" : 5 }, 117 | // { "_id" : 4, "count" : 2 }, 118 | // { "_id" : 3, "count" : 1 } ] 119 | // 120 | // 121 | // */ 122 | 123 | ///* SQL example not tested 124 | // 125 | // SELECT RATING, COUNT(*) AS COUNT 126 | // FROM REVIEWS 127 | // WHERE PRODUCT_ID = '4c4b1476238d3b4dd5003981' 128 | // GROUP BY RATING 129 | // 130 | // 131 | // */ 132 | 133 | 134 | // Joining collections 135 | db.products.aggregate([ 136 | {$group : { _id:'$main_cat_id', 137 | count:{$sum:1}}} 138 | ]); 139 | 140 | ///* expected results 141 | // 142 | // { "_id" : ObjectId("6a5b1476238d3b4dd5000048"), "count" : 2 } 143 | // 144 | // */ 145 | 146 | // "join" main category summary with categories 147 | db.mainCategorySummary.remove({}); 148 | 149 | db.products.aggregate([ 150 | {$group : { _id:'$main_cat_id', 151 | count:{$sum:1}}} 152 | ]).forEach(function(doc){ 153 | var category = db.categories.findOne({_id:doc._id}); 154 | if (category !== null) { 155 | doc.category_name = category.name; 156 | } 157 | else { 158 | doc.category_name = 'not found'; 159 | } 160 | db.mainCategorySummary.insert(doc); 161 | }) 162 | 163 | // findOne on mainCategorySummary 164 | 165 | db.mainCategorySummary.findOne() 166 | 167 | ///* Expected results 168 | // 169 | // { 170 | // "_id" : ObjectId("6a5b1476238d3b4dd5000048"), 171 | // "count" : 2, 172 | // "category_name" : "Gardening Tools" 173 | // } 174 | // 175 | // */ 176 | 177 | // Faster Joins - $unwind 178 | 179 | // FASTER JOIN - $UNWIND 180 | db.products.aggregate([ 181 | {$project : {category_ids:1}}, 182 | {$unwind : '$category_ids'}, 183 | {$group : { _id:'$category_ids', 184 | count:{$sum:1}}}, 185 | {$out : 'countsByCategory'} 186 | ]); 187 | 188 | // related findOne() - Using $out to create new collections 189 | db.countsByCategory.findOne() 190 | 191 | // expected results 192 | // > db.countsByCategory.findOne() 193 | // { "_id" : ObjectId("6a5b1476238d3b4dd5000049"), "count" : 2 } 194 | 195 | // 196 | 197 | // $out and $project section 198 | 199 | db.products.aggregate([ 200 | {$group : { _id:'$main_cat_id', 201 | count:{$sum:1}}}, 202 | {$out : 'mainCategorySummary'} 203 | ]); 204 | 205 | 206 | db.products.aggregate([ 207 | {$project : {category_ids:1}} 208 | ]); 209 | 210 | /* Expected output 211 | 212 | > db.products.aggregate([ 213 | ... {$project : {category_ids:1}} 214 | ... ]); 215 | { "_id" : ObjectId("4c4b1476238d3b4dd5003981"), 216 | "category_ids" : [ ObjectId("6a5b1476238d3b4dd5000048"), 217 | ObjectId("6a5b1476238d3b4dd5000049") ] } 218 | { "_id" : ObjectId("4c4b1476238d3b4dd5003982"), 219 | "category_ids" : [ ObjectId("6a5b1476238d3b4dd5000048"), 220 | ObjectId("6a5b1476238d3b4dd5000049") ] } 221 | 222 | */ 223 | // 6.2.2 User and Order 224 | db.reviews.aggregate([ 225 | {$group : 226 | {_id : '$user_id', 227 | count : {$sum : 1}, 228 | avg_helpful : {$avg : '$helpful_votes'}} 229 | } 230 | ]) 231 | 232 | ///* Expected result 233 | // 234 | // { "_id" : ObjectId("4c4b1476238d3b4dd5000003"), 235 | // "count" : 1, "avg_helpful" : 10 } 236 | // { "_id" : ObjectId("4c4b1476238d3b4dd5000002"), 237 | // "count" : 2, "avg_helpful" : 4 } 238 | // { "_id" : ObjectId("4c4b1476238d3b4dd5000001"), 239 | // "count" : 2, "avg_helpful" : 5 } 240 | // 241 | // */ 242 | 243 | // summarizing sales by year and month 244 | db.orders.aggregate([ 245 | {"$match": {"purchase_data": 246 | {"$gte" : new Date(2010, 0, 1)}}}, 247 | {"$group": { 248 | "_id": {"year" : {"$year" :"$purchase_data"}, 249 | "month" : {"$month" : "$purchase_data"}}, 250 | "count": {"$sum":1}, 251 | "total": {"$sum":"$sub_total"}}}, 252 | {"$sort": {"_id":-1}} 253 | ]); 254 | 255 | // /* expected results 256 | // 257 | // { "_id" : { "year" : 2014, "month" : 11 }, "count" : 1, "total" : 4897 } 258 | // { "_id" : { "year" : 2014, "month" : 8 }, "count" : 2, "total" : 11093 } 259 | // { "_id" : { "year" : 2014, "month" : 4 }, "count" : 1, "total" : 4897 } 260 | // 261 | // */ 262 | 263 | // Finding best manhattan customers 264 | upperManhattanOrders = {'shipping_address.zip': {$gte: 10019, $lt: 10040}}; 265 | 266 | sumByUserId = {_id: '$user_id', 267 | total: {$sum:'$sub_total'}}; 268 | 269 | orderTotalLarge = {total: {$gt:10000}}; 270 | 271 | sortTotalDesc = {total: -1}; 272 | 273 | db.orders.aggregate([ 274 | {$match: upperManhattanOrders}, 275 | {$group: sumByUserId}, 276 | {$match: orderTotalLarge}, 277 | {$sort: sortTotalDesc} 278 | ]); 279 | 280 | db.orders.aggregate([ 281 | {$group: sumByUserId}, 282 | {$match: orderTotalLarge}, 283 | {$limit: 10} 284 | ]); 285 | 286 | // /* results 287 | // 288 | // { "_id" : ObjectId("4c4b1476238d3b4dd5000002"), "total" : 19588 } 289 | // 290 | // */ 291 | 292 | // easier to modify - example - add count 293 | sumByUserId = {_id: '$user_id', 294 | total: {$sum:'$sub_total'}, 295 | count: {$sum: 1}}; 296 | 297 | // rerun previous 298 | db.orders.aggregate([ 299 | {$group: sumByUserId}, 300 | {$match: orderTotalLarge}, 301 | {$limit: 10} 302 | ]); 303 | 304 | // /* results 305 | // { "_id" : ObjectId("4c4b1476238d3b4dd5000002"), 306 | // "total" : 19588, "count" : 4 } 307 | // 308 | // */ 309 | 310 | db.orders.aggregate([ 311 | {$match: upperManhattanOrders}, 312 | {$group: sumByUserId}, 313 | {$match: orderTotalLarge}, 314 | {$sort: sortTotalDesc}, 315 | {$out: 'targetedCustomers'} 316 | ]); 317 | 318 | // fixed: added order with upper manhattan shipping address 319 | upperManhattanOrders = {'shipping_address.zip': {$gte: 10019, $lt: 11216}}; 320 | 321 | db.orders.aggregate([ 322 | {$match: upperManhattanOrders}, 323 | {$group: sumByUserId}, 324 | {$match: orderTotalLarge}, 325 | {$sort: sortTotalDesc}, 326 | {$out: 'targetedCustomers'} 327 | ]); 328 | 329 | db.targetedCustomers.findOne(); 330 | 331 | // /* expected results: 332 | // 333 | // > db.targetedCustomers.findOne(); 334 | // { "_id" : ObjectId("4c4b1476238d3b4dd5000002"), "total" : 19588 } 335 | // 336 | // 337 | // */ -------------------------------------------------------------------------------- /chapter6/chp6.3_code.js: -------------------------------------------------------------------------------- 1 | 2 | // Section 6.3 3 | 4 | // for 6.3.1 $project 5 | db.users.findOne( 6 | {username: 'kbanker', // 1 7 | hashed_password: 'bd1cfa194c3a603e7186780824b04419'}, 8 | {_id: 1} // 2 9 | ) 10 | 11 | // /* should return 12 | // 13 | // { "_id" : ObjectId("4c4b1476238d3b4dd5000001") } 14 | // 15 | // */ 16 | 17 | 18 | db.users.findOne( 19 | {username: 'kbanker', 20 | hashed_password: 'bd1cfa194c3a603e7186780824b04419'}, 21 | {first_name:1, last_name:1} // 1 22 | ) 23 | 24 | // /* should return 25 | // 26 | // { 27 | // "_id" : ObjectId("4c4b1476238d3b4dd5000001"), 28 | // "first_name" : "Kyle", 29 | // "last_name" : "Banker" 30 | // } 31 | // 32 | // */ 33 | 34 | db.users.aggregate([ 35 | {$match: {username: 'kbanker'}}, 36 | {$project: {name: {first:'$first_name', 37 | last:'$last_name'}} 38 | } 39 | ]) 40 | 41 | // /* Should return 42 | // 43 | // { "_id" : ObjectId("4c4b1476238d3b4dd5000001"), "name" : { "first" : "Kyle", "last" : "Banker" } } 44 | // 45 | // */ 46 | 47 | // 6.3.2 $group 48 | db.orders.aggregate([ 49 | {$project: {user_id:1, line_items:1}}, 50 | {$unwind: '$line_items'}, 51 | {$group: {_id: {user_id:'$user_id'}, purchasedItems: {$push: '$line_items'}}} 52 | ]).toArray(); 53 | 54 | /* expected results 55 | [ 56 | { 57 | "_id" : { 58 | "user_id" : ObjectId("4c4b1476238d3b4dd5000002") 59 | }, 60 | "purchasedItems" : [ 61 | { 62 | "_id" : ObjectId("4c4b1476238d3b4dd5003981"), 63 | "sku" : "9092", 64 | "name" : "Extra Large Wheel Barrow", 65 | "quantity" : 1, 66 | "pricing" : { 67 | "retail" : 5897, 68 | "sale" : 4897 69 | } 70 | }, 71 | { 72 | "_id" : ObjectId("4c4b1476238d3b4dd5003981"), 73 | "sku" : "9092", 74 | "name" : "Extra Large Wheel Barrow", 75 | "quantity" : 1, 76 | "pricing" : { 77 | "retail" : 5897, 78 | "sale" : 4897 79 | } 80 | }, 81 | { 82 | "_id" : ObjectId("4c4b1476238d3b4dd5003981"), 83 | "sku" : "9092", 84 | "name" : "Extra Large Wheel Barrow", 85 | "quantity" : 1, 86 | "pricing" : { 87 | "retail" : 5897, 88 | "sale" : 4897 89 | } 90 | }, 91 | { 92 | "_id" : ObjectId("4c4b1476238d3b4dd5003981"), 93 | "sku" : "9092", 94 | "name" : "Extra Large Wheel Barrow", 95 | "quantity" : 1, 96 | "pricing" : { 97 | "retail" : 5897, 98 | "sale" : 4897 99 | } 100 | } 101 | ] 102 | }, 103 | { 104 | "_id" : { 105 | "user_id" : ObjectId("4c4b1476238d3b4dd5000001") 106 | }, 107 | "purchasedItems" : [ 108 | { 109 | "_id" : ObjectId("4c4b1476238d3b4dd5003981"), 110 | "sku" : "9092", 111 | "name" : "Extra Large Wheel Barrow", 112 | "quantity" : 1, 113 | "pricing" : { 114 | "retail" : 5897, 115 | "sale" : 4897 116 | } 117 | }, 118 | { 119 | "_id" : ObjectId("4c4b1476238d3b4dd5003981"), 120 | "sku" : "10027", 121 | "name" : "Rubberized Work Glove, Black", 122 | "quantity" : 2, 123 | "pricing" : { 124 | "retail" : 1499, 125 | "sale" : 1299 126 | } 127 | } 128 | ] 129 | } 130 | ] 131 | 132 | */ 133 | 134 | db.orders.aggregate([ 135 | {"$match": {"purchase_data": 136 | {"$gte" : new Date(2010, 0, 1)}}}, 137 | {"$group": { 138 | "_id": {"year" : {"$year" :"$purchase_data"}, 139 | "month" : {"$month" : "$purchase_data"}}, 140 | "count": {"$sum":1}, 141 | "total": {"$sum":"$sub_total"}}}, 142 | {"$sort": {"_id":-1}} 143 | ]); 144 | 145 | // /* expected results 146 | // 147 | // { "_id" : { "year" : 2014, "month" : 11 }, "count" : 1, "total" : 4897 } 148 | // { "_id" : { "year" : 2014, "month" : 8 }, "count" : 2, "total" : 11093 } 149 | // { "_id" : { "year" : 2014, "month" : 4 }, "count" : 1, "total" : 4897 } 150 | // 151 | // */ 152 | 153 | // 6.3.3 $match, $sort, $skip, $limit 154 | 155 | // based on the example from from chapter 5.1.1 156 | // PAGINATING YOUR PRODUCT REVIEWS WITH SKIP, LIMIT AND SORT 157 | page_number = 1 158 | product = db.products.findOne({'slug': 'wheel-barrow-9092'}) 159 | 160 | reviews = db.reviews.find({'product_id': product['_id']}). 161 | skip((page_number - 1) * 12). 162 | limit(12). 163 | sort({'helpful_votes': -1}) 164 | 165 | /* expected output 166 | 167 | > reviews = db.reviews.find({'product_id': product['_id']}). 168 | ... skip((page_number - 1) * 12). 169 | ... limit(12). 170 | ... sort({'helpful_votes': -1}) 171 | { "_id" : ObjectId("4c4b1476238d3b4dd5000045"), "product_id" : ObjectId("4c4b1476238d3b4dd5003981"), "user_id" : ObjectId("4c4b1476238d3b4dd5000003"), "rating" : 4, "helpful_votes" : 10 } 172 | { "_id" : ObjectId("4c4b1476238d3b4dd5000043"), "product_id" : ObjectId("4c4b1476238d3b4dd5003981"), "user_id" : ObjectId("4c4b1476238d3b4dd5000002"), "rating" : 5, "helpful_votes" : 7 } 173 | { "_id" : ObjectId("4c4b1476238d3b4dd5000041"), "product_id" : ObjectId("4c4b1476238d3b4dd5003981"), "date" : ISODate("2010-06-07T07:00:00Z"), "title" : "Amazing", "text" : "Has a squeaky wheel, but still a darn good wheel barrow.", "rating" : 4, "user_id" : ObjectId("4c4b1476238d3b4dd5000001"), "username" : "dgreenthumb", "helpful_votes" : 3, "voter_ids" : [ ObjectId("4c4b1476238d3b4dd5000041"), ObjectId("7a4f0376238d3b4dd5000003"), ObjectId("92c21476238d3b4dd5000032") ] } 174 | 175 | */ 176 | 177 | 178 | // same thing in aggregation framework 179 | 180 | reviews2 = db.reviews.aggregate([ 181 | {$match: {'product_id': product['_id']}}, 182 | {$skip : (page_number - 1) * 12}, 183 | {$limit: 12}, 184 | {$sort: {'helpful_votes': -1}} 185 | ]).toArray(); 186 | 187 | /* expected output 188 | 189 | > reviews2 = db.reviews.aggregate([ 190 | ... {$match: {'product_id': product['_id']}}, 191 | ... {$skip : (page_number - 1) * 12}, 192 | ... {$limit: 12}, 193 | ... {$sort: {'helpful_votes': -1}} 194 | ... ]).toArray(); 195 | [ 196 | { 197 | "_id" : ObjectId("4c4b1476238d3b4dd5000045"), 198 | "product_id" : ObjectId("4c4b1476238d3b4dd5003981"), 199 | "user_id" : ObjectId("4c4b1476238d3b4dd5000003"), 200 | "rating" : 4, 201 | "helpful_votes" : 10 202 | }, 203 | { 204 | "_id" : ObjectId("4c4b1476238d3b4dd5000043"), 205 | "product_id" : ObjectId("4c4b1476238d3b4dd5003981"), 206 | "user_id" : ObjectId("4c4b1476238d3b4dd5000002"), 207 | "rating" : 5, 208 | "helpful_votes" : 7 209 | }, 210 | { 211 | "_id" : ObjectId("4c4b1476238d3b4dd5000041"), 212 | "product_id" : ObjectId("4c4b1476238d3b4dd5003981"), 213 | "date" : ISODate("2010-06-07T07:00:00Z"), 214 | "title" : "Amazing", 215 | "text" : "Has a squeaky wheel, but still a darn good wheel barrow.", 216 | "rating" : 4, 217 | "user_id" : ObjectId("4c4b1476238d3b4dd5000001"), 218 | "username" : "dgreenthumb", 219 | "helpful_votes" : 3, 220 | "voter_ids" : [ 221 | ObjectId("4c4b1476238d3b4dd5000041"), 222 | ObjectId("7a4f0376238d3b4dd5000003"), 223 | ObjectId("92c21476238d3b4dd5000032") 224 | ] 225 | } 226 | ] 227 | 228 | */ 229 | 230 | 231 | 232 | // also need these, but not shown in text 233 | upperManhattanOrders = {'shipping_address.zip': {$gte: 10019, $lt: 10040}}; 234 | 235 | sumByUserId = {_id: '$user_id', 236 | total: {$sum:'$sub_total'}}; 237 | 238 | orderTotalLarge = {total: {$gt:10000}}; 239 | 240 | // shown in text 241 | sortTotalDesc = {total: -1}; 242 | 243 | db.orders.aggregate([ 244 | {$match: upperManhattanOrders}, 245 | {$group: sumByUserId}, 246 | {$match: orderTotalLarge}, 247 | {$sort: sortTotalDesc}, 248 | {$out: 'targetedCustomers'} 249 | ]); 250 | 251 | // rerun previous also shown in text 252 | db.orders.aggregate([ 253 | {$group: sumByUserId}, 254 | {$match: orderTotalLarge}, 255 | {$limit: 10} 256 | ]); 257 | 258 | // /* Expected results 259 | // 260 | // { "_id" : ObjectId("4c4b1476238d3b4dd5000002"), "total" : 19588 } 261 | // { "_id" : ObjectId("4c4b1476238d3b4dd5000001"), "total" : 6196 } 262 | // 263 | // */ 264 | 265 | // 6.3.4 $unwind 266 | db.products.aggregate([ 267 | {$project : {category_ids:1}}, 268 | {$unwind : '$category_ids'}, 269 | {$limit: 2} 270 | ]); 271 | 272 | // /* expected results 273 | // 274 | // { "_id" : ObjectId("4c4b1476238d3b4dd5003981"), "category_ids" : ObjectId("6a5b1476238d3b4dd5000048") } 275 | // { "_id" : ObjectId("4c4b1476238d3b4dd5003981"), "category_ids" : ObjectId("6a5b1476238d3b4dd5000049") } 276 | // 277 | // */ 278 | 279 | 280 | // 6.3.5 $out 281 | 282 | // these may be needed before running query 283 | 284 | upperManhattanOrders = {'shipping_address.zip': {$gte: 10019, $lt: 10040}}; 285 | 286 | sumByUserId = {_id: '$user_id', 287 | total: {$sum:'$sub_total'}}; 288 | 289 | orderTotalLarge = {total: {$gt:10000}}; 290 | 291 | sortTotalDesc = {total: -1}; 292 | 293 | 294 | // shown in text 295 | 296 | db.orders.aggregate([ 297 | {$match: upperManhattanOrders}, 298 | {$group: sumByUserId}, 299 | {$match: orderTotalLarge}, 300 | {$sort: sortTotalDesc}, 301 | {$out: 'targetedCustomers'} 302 | ]); 303 | 304 | -------------------------------------------------------------------------------- /chapter6/chp6.4_code.js: -------------------------------------------------------------------------------- 1 | 2 | // 6.4 reshaping documents 3 | db.users.aggregate([ 4 | {$match: {username: 'kbanker'}}, 5 | {$project: {name: {first:'$first_name', 6 | last:'$last_name'}} 7 | } 8 | ]); 9 | 10 | // /* should return 11 | // 12 | // 13 | // { "_id" : ObjectId("4c4b1476238d3b4dd5000001"), 14 | // "name" : { "first" : "Kyle", "last" : "Banker" } } 15 | // 16 | // */ 17 | 18 | 19 | // 6.4.1 String functions 20 | db.users.aggregate([ 21 | {$match: {username: 'kbanker'}}, 22 | {$project: 23 | {name: {$concat:['$first_name', ' ', '$last_name']}, // 1 24 | firstInitial: {$substr: ['$first_name',0,1]}, // 2 25 | usernameUpperCase: {$toUpper: '$username'} // 3 26 | } 27 | } 28 | ]); 29 | 30 | // /* Expected results 31 | // 32 | // { "_id" : ObjectId("4c4b1476238d3b4dd5000001"), 33 | // "name" : "Kyle Banker", 34 | // "firstInitial" : "K", 35 | // "usernameUpperCase" : "KBANKER" } 36 | // 37 | // 38 | // */ 39 | 40 | // additional example for 6.4.1 using $substr function 41 | 42 | db.orders.aggregate([ 43 | {$unwind: '$line_items'}, 44 | {$project: { 45 | 'line_items.name': 1, 46 | orderQuantity: 47 | {$substr:['$line_items.quantity',0,10]}}}, // A 48 | {$project: { 49 | orderSummary: 50 | {$concat: ['$orderQuantity', ' ', '$line_items.name']}}} // B 51 | ]) 52 | // #A convert ‘line_items.quantity’ to a string. 53 | // #B concatenate string version of order quantity with the line item name 54 | 55 | /* expected results 56 | 57 | > db.orders.aggregate([ 58 | ... {$unwind: '$line_items'}, 59 | ... {$project: { 60 | ... 'line_items.name': 1, 61 | ... orderQuantity: 62 | ... {$substr:['$line_items.quantity',0,10]}}}, // A 63 | ... {$project: { 64 | ... orderSummary: 65 | ... {$concat: ['$orderQuantity', ' ', '$line_items.name']}}} // B 66 | ... ]) 67 | { "_id" : ObjectId("6a5b1476238d3b4dd5000048"), "orderSummary" : "1 Extra Large Wheel Barrow" } 68 | { "_id" : ObjectId("6a5b1476238d3b4dd5000048"), "orderSummary" : "2 Rubberized Work Glove, Black" } 69 | { "_id" : ObjectId("6a5b1476238d3b4dd5000049"), "orderSummary" : "1 Extra Large Wheel Barrow" } 70 | { "_id" : ObjectId("6a5b1476238d3b4dd5000050"), "orderSummary" : "1 Extra Large Wheel Barrow" } 71 | { "_id" : ObjectId("6a5b1476238d3b4dd5000051"), "orderSummary" : "1 Extra Large Wheel Barrow" } 72 | { "_id" : ObjectId("6a5b1476238d3b4dd5000052"), "orderSummary" : "1 Extra Large Wheel Barrow" } 73 | 74 | */ 75 | 76 | 77 | // 6.4.2 Arithmetic functions 78 | // na 79 | 80 | // 6.4.3 date functions 81 | // na 82 | 83 | // 6.4.4 Logical functions 84 | // na 85 | 86 | // 6.4.5 Set functions 87 | // given products: 88 | { "_id" : ObjectId("4c4b1476238d3b4dd5003981"), 89 | "productName" : "Extra Large Wheel Barrow", 90 | "tags" : [ "tools", "gardening", "soil" ]} 91 | 92 | { "_id" : ObjectId("4c4b1476238d3b4dd5003982"), 93 | "productName" : "Rubberized Work Glove, Black", 94 | "tags" : [ "gardening" ]} 95 | 96 | // and define tag 97 | testSet1 = ['tools'] 98 | 99 | // a setUnion example as such 100 | db.products.aggregate([ 101 | {$project: 102 | {productName: '$name', 103 | tags:1, 104 | setUnion: {$setUnion:['$tags',testSet1]}, 105 | } 106 | } 107 | ]) 108 | 109 | // would produce 110 | { "_id" : ObjectId("4c4b1476238d3b4dd5003981"), 111 | "productName" : "Extra Large Wheel Barrow", 112 | "tags" : ["tools", "gardening", "soil"], 113 | "setUnion" : ["gardening","tools","soil"] 114 | } 115 | 116 | { "_id" : ObjectId("4c4b1476238d3b4dd5003982"), 117 | "productName" : "Rubberized Work Glove, Black", 118 | "tags" : ["gardening"], 119 | "setUnion" : ["tools", "gardening"] 120 | } 121 | 122 | // 6.4.6 Misc. functions 123 | db.orders.aggregate([ 124 | {$project: { 125 | orderSummary: { 126 | $map: { 127 | input: '$line_items', 128 | as: 'item', 129 | in: { 130 | descr: {$concat: [ 131 | {$substr:['$$item.quantity',0,10]}, 132 | ' ', '$$item.name']}, 133 | price: '$$item.pricing.sale' 134 | } 135 | } 136 | }} 137 | }, 138 | {$limit: 2} 139 | ]).pretty() 140 | -------------------------------------------------------------------------------- /chapter6/chp6.5_code.js: -------------------------------------------------------------------------------- 1 | 2 | 3 | // 6.5 Performance considerations and limits 4 | // listing 6.3 5 | 6 | db.numbers.find({num: {"$gt": 199995 }}).explain() 7 | 8 | // /* should return 9 | // 10 | // > db.numbers.find({num: {"$gt": 199995 }}).explain() 11 | // { 12 | // "cursor" : "BasicCursor", 13 | // "isMultiKey" : false, 14 | // "n" : 0, 15 | // "nscannedObjects" : 0, 16 | // "nscanned" : 0, 17 | // "nscannedObjectsAllPlans" : 0, 18 | // "nscannedAllPlans" : 0, 19 | // "scanAndOrder" : false, 20 | // "indexOnly" : false, 21 | // "nYields" : 0, 22 | // "nChunkSkips" : 0, 23 | // "millis" : 0, 24 | // "server" : "D830J:27017" 25 | // } 26 | // 27 | // */ 28 | 29 | // listing 6.4 30 | 31 | // make sure we have the index 32 | db.reviews.ensureIndex( { product_id: 1 } ) 33 | 34 | product = db.products.findOne({'slug': 'wheel-barrow-9092'}) 35 | 36 | countsByRating = db.reviews.aggregate([ 37 | {$match : {'product_id': product['_id']}}, 38 | {$group : { _id:'$rating', 39 | count:{$sum:1}}} 40 | ],{explain:true}) 41 | 42 | // /* should return 43 | // > countsByRating = db.reviews.aggregate([ 44 | // ... {$match : {'product_id': product['_id']}}, 45 | // ... {$group : { _id:'$rating', 46 | // ... count:{$sum:1}}} 47 | // ... ],{explain:true}) 48 | // { 49 | // "stages" : [ 50 | // { 51 | // "$cursor" : { 52 | // "query" : { 53 | // "product_id" : ObjectId("4c4b1476238d3b4dd5003981") 54 | // }, 55 | // "fields" : { 56 | // "rating" : 1, 57 | // "_id" : 0 58 | // }, 59 | // "plan" : { 60 | // "cursor" : "BtreeCursor ", 61 | // "isMultiKey" : false, 62 | // "scanAndOrder" : false, 63 | // "indexBounds" : { 64 | // "product_id" : [ 65 | // [ 66 | // ObjectId("4c4b1476238d3b4dd5003981"), 67 | // ObjectId("4c4b1476238d3b4dd5003981") 68 | // ] 69 | // ] 70 | // }, 71 | // "allPlans" : [ 72 | // ... 73 | // ] 74 | // } 75 | // } 76 | // }, 77 | // { 78 | // "$group" : { 79 | // "_id" : "$rating", 80 | // "count" : { 81 | // "$sum" : { 82 | // "$const" : 1 83 | // } 84 | // } 85 | // } 86 | // } 87 | // ], 88 | // "ok" : 1 89 | // } 90 | // 91 | // */ 92 | 93 | // allowDiskUse option example 94 | db.orders.aggregate([ 95 | {$match: {purchase_data: {$gte: new Date(2010, 0, 1)}}}, 96 | {$group: { 97 | _id: {year : {$year :'$purchase_data'}, 98 | month: {$month :'$purchase_data'}}, 99 | count: {$sum:1}, 100 | total: {$sum:'$sub_total'}}}, 101 | {$sort: {_id:-1}} 102 | ], {allowDiskUse:true}); 103 | 104 | // aggregate cursor option example 105 | product = db.products.findOne({'slug': 'wheel-barrow-9092'}) 106 | countsByRating = db.reviews.aggregate([ 107 | {$match : {'product_id': product['_id']}}, 108 | {$group : { _id:'$rating', 109 | count:{$sum:1}}} 110 | ],{cursor:{}}) 111 | 112 | 113 | 114 | -------------------------------------------------------------------------------- /chapter6/chp6.6_code.js: -------------------------------------------------------------------------------- 1 | // 6.6.1 count() and distinct() 2 | 3 | product = db.products.findOne({'slug': 'wheel-barrow-9092'}) 4 | reviews_count = db.reviews.count({'product_id': product['_id']}) 5 | 6 | db.orders.distinct('shipping_address.zip') 7 | 8 | // 6.6.2 Map-reduce 9 | db.orders.aggregate([ 10 | {"$match": {"purchase_data": 11 | {"$gte" : new Date(2010, 0, 1)}}}, 12 | {"$group": { 13 | "_id": {"year" : {"$year" :"$purchase_data"}, 14 | "month" : {"$month" : "$purchase_data"}}, 15 | "count": {"$sum":1}, 16 | "total": {"$sum":"$sub_total"}}}, 17 | {"$sort": {"_id":-1}} 18 | ]); 19 | 20 | // NOTE: corrected map reduce example from first edition. 21 | 22 | map = function() { 23 | var shipping_month = (this.purchase_data.getMonth()+1) + 24 | '-' + this.purchase_data.getFullYear(); 25 | 26 | var tmpItems = 0; 27 | this.line_items.forEach(function(item) { 28 | tmpItems += item.quantity; 29 | }); 30 | 31 | emit(shipping_month, {order_total: this.sub_total, items_total: tmpItems}); 32 | }; 33 | 34 | reduce = function(key, values) { 35 | var result = { order_total: 0, items_total: 0 }; 36 | values.forEach(function(value){ 37 | result.order_total += value.order_total; 38 | result.items_total += value.items_total; 39 | }); 40 | return ( result ); 41 | }; 42 | 43 | filter = {purchase_data: {$gte: new Date(2010, 0, 1)}}; 44 | db.orders.mapReduce(map, reduce, {query: filter, out: 'totals'}); 45 | 46 | db.totals.find(); 47 | 48 | // /* FIRST EDITION Stated expected results - NOT FOR THIS DATA 49 | // 50 | // { _id: "1-2011", value: { total: 32002300, items: 59 }} 51 | // { _id: "2-2011", value: { total: 45439500, items: 71 }} 52 | // { _id: "3-2011", value: { total: 54322300, items: 98 }} 53 | // { _id: "4-2011", value: { total: 75534200, items: 115 }} 54 | // { _id: "5-2011", value: { total: 81232100, items: 121 }} 55 | // */ 56 | 57 | // /* ACTUAL Expected results 58 | // To see what data is available: 59 | // 60 | // db.orders.find({purchase_data: {$gte: new Date(2010, 0, 1)}}, 61 | // {purchase_data:1, sub_total:1, 'line_items.quantity':1 }) 62 | // 63 | // which returns: 64 | // 65 | // { "_id" : ObjectId("6a5b1476238d3b4dd5000048"), "purchase_data" : ISODate("2014-08-01T07:00:00Z"), 66 | // "line_items" : [ { "quantity" : 1 }, { "quantity" : 2 } ], "sub_total" : 6196 } 67 | // { "_id" : ObjectId("6a5b1476238d3b4dd5000049"), "purchase_data" : ISODate("2014-04-15T07:00:00Z"), 68 | // "line_items" : [ { "quantity" : 1 } ], "sub_total" : 4897 } 69 | // { "_id" : ObjectId("6a5b1476238d3b4dd5000051"), "purchase_data" : ISODate("2014-08-03T07:00:00Z"), 70 | // "line_items" : [ { "quantity" : 1 } ], "sub_total" : 4897 } 71 | // { "_id" : ObjectId("6a5b1476238d3b4dd5000052"), "purchase_data" : ISODate("2014-11-03T08:00:00Z"), 72 | // "line_items" : [ { "quantity" : 1 } ], "sub_total" : 4897 } 73 | // 74 | // so the db.totals.find() will return: 75 | // 76 | // { "_id" : "11-2014", "value" : { "order_total" : 4897, "items_total" : 1 } } 77 | // { "_id" : "4-2014", "value" : { "order_total" : 4897, "items_total" : 1 } } 78 | // { "_id" : "8-2014", "value" : { "order_total" : 11093, "items_total" : 4 } } 79 | // 80 | // */ 81 | -------------------------------------------------------------------------------- /chapter6/data/export.js: -------------------------------------------------------------------------------- 1 | // export data via mongoexport 2 | // Only works for collections with simple data that fits the limitations of JSON 3 | mongoexport --db garden --collection orders --out garden.orders.json 4 | mongoexport --db garden --collection products --out garden.products.json 5 | mongoexport --db garden --collection categories --out garden.categories.json 6 | mongoexport --db garden --collection reviews --out garden.reviews.json 7 | mongoexport --db garden --collection users --out garden.users.json -------------------------------------------------------------------------------- /chapter6/data/garden.categories.json: -------------------------------------------------------------------------------- 1 | { "_id" : { "$oid" : "6a5b1476238d3b4dd5000048" }, "slug" : "gardening-tools", "ancestors" : [ { "name" : "Home", "_id" : { "$oid" : "8b87fb1476238d3b4dd50003" }, "slug" : "home" }, { "name" : "Outdoors", "_id" : { "$oid" : "9a9fb1476238d3b4dd500001" }, "slug" : "outdoors" } ], "parent_id" : { "$oid" : "9a9fb1476238d3b4dd500001" }, "name" : "Gardening Tools", "description" : "Gardening gadgets galore!" } 2 | -------------------------------------------------------------------------------- /chapter6/data/garden.orders.json: -------------------------------------------------------------------------------- 1 | { "_id" : { "$oid" : "6a5b1476238d3b4dd5000048" }, "user_id" : { "$oid" : "4c4b1476238d3b4dd5000001" }, "purchase_data" : { "$date" : "2014-08-01T00:00:00.000-0700" }, "state" : "CART", "line_items" : [ { "_id" : { "$oid" : "4c4b1476238d3b4dd5003981" }, "sku" : "9092", "name" : "Extra Large Wheel Barrow", "quantity" : 1, "pricing" : { "retail" : 5897, "sale" : 4897 } }, { "_id" : { "$oid" : "4c4b1476238d3b4dd5003982" }, "sku" : "10027", "name" : "Rubberized Work Glove, Black", "quantity" : 1, "pricing" : { "retail" : 1499, "sale" : 1299 } } ], "shipping_address" : { "street" : "588 5th Street", "city" : "Brooklyn", "state" : "NY", "zip" : 11215 }, "sub_total" : 6196, "tax" : 600 } 2 | { "_id" : { "$oid" : "6a5b1476238d3b4dd5000049" }, "user_id" : { "$oid" : "4c4b1476238d3b4dd5000002" }, "purchase_data" : { "$date" : "2014-04-15T00:00:00.000-0700" }, "line_items" : [ { "_id" : { "$oid" : "4c4b1476238d3b4dd5003981" }, "sku" : "9092", "name" : "Extra Large Wheel Barrow", "quantity" : 1, "pricing" : { "retail" : 5897, "sale" : 4897 } } ], "shipping_address" : { "street" : "509 Amsterdam Av", "city" : "New York", "state" : "NY", "zip" : 10024 }, "sub_total" : 4897 } 3 | { "_id" : { "$oid" : "6a5b1476238d3b4dd5000050" }, "user_id" : { "$oid" : "4c4b1476238d3b4dd5000002" }, "purchase_data" : { "$date" : "2009-08-01T00:00:00.000-0700" }, "line_items" : [ { "_id" : { "$oid" : "4c4b1476238d3b4dd5003981" }, "sku" : "9092", "name" : "Extra Large Wheel Barrow", "quantity" : 1, "pricing" : { "retail" : 5897, "sale" : 4897 } } ], "shipping_address" : { "street" : "509 Amsterdam Av", "city" : "New York", "state" : "NY", "zip" : 10024 }, "sub_total" : 4897 } 4 | { "_id" : { "$oid" : "6a5b1476238d3b4dd5000051" }, "user_id" : { "$oid" : "4c4b1476238d3b4dd5000002" }, "purchase_data" : { "$date" : "2014-08-03T00:00:00.000-0700" }, "line_items" : [ { "_id" : { "$oid" : "4c4b1476238d3b4dd5003981" }, "sku" : "9092", "name" : "Extra Large Wheel Barrow", "quantity" : 1, "pricing" : { "retail" : 5897, "sale" : 4897 } } ], "shipping_address" : { "street" : "509 Amsterdam Av", "city" : "New York", "state" : "NY", "zip" : 10024 }, "sub_total" : 4897 } 5 | { "_id" : { "$oid" : "6a5b1476238d3b4dd5000052" }, "user_id" : { "$oid" : "4c4b1476238d3b4dd5000002" }, "purchase_data" : { "$date" : "2014-11-03T00:00:00.000-0800" }, "line_items" : [ { "_id" : { "$oid" : "4c4b1476238d3b4dd5003981" }, "sku" : "9092", "name" : "Extra Large Wheel Barrow", "quantity" : 1, "pricing" : { "retail" : 5897, "sale" : 4897 } } ], "shipping_address" : { "street" : "509 Amsterdam Av", "city" : "New York", "state" : "NY", "zip" : 10024 }, "sub_total" : 4897 } 6 | -------------------------------------------------------------------------------- /chapter6/data/garden.products.json: -------------------------------------------------------------------------------- 1 | { "_id" : { "$oid" : "4c4b1476238d3b4dd5003981" }, "slug" : "wheel-barrow-9092", "sku" : "9092", "name" : "Extra Large Wheel Barrow", "description" : "Heavy duty wheel barrow...", "details" : { "weight" : 47, "weight_units" : "lbs", "model_num" : 4039283402, "manufacturer" : "Acme", "color" : "Green" }, "total_reviews" : 4, "average_review" : 4.5, "pricing" : { "retail" : 5897, "sale" : 4897 }, "price_history" : [ { "retail" : 5297, "sale" : 4297, "start" : { "$date" : "2010-05-01T00:00:00.000-0700" }, "end" : { "$date" : "2010-05-08T00:00:00.000-0700" } }, { "retail" : 5297, "sale" : 5297, "start" : { "$date" : "2010-05-09T00:00:00.000-0700" }, "end" : { "$date" : "2010-05-16T00:00:00.000-0700" } } ], "category_ids" : [ { "$oid" : "6a5b1476238d3b4dd5000048" }, { "$oid" : "6a5b1476238d3b4dd5000049" } ], "main_cat_id" : { "$oid" : "6a5b1476238d3b4dd5000048" }, "tags" : [ "tools", "gardening", "soil" ] } 2 | { "_id" : { "$oid" : "4c4b1476238d3b4dd5003982" }, "slug" : "rubberized-work-gloves-black-10027", "sku" : "10027", "name" : "Rubberized Work Glove, Black", "description" : "Black Rubberized Work Gloves...", "details" : { "weight" : 5, "weight_units" : "oz", "model_num" : 4039, "manufacturer" : "GloveCo", "color" : "black" }, "total_reviews" : 4, "average_review" : 4.5, "pricing" : { "retail" : 1499, "sale" : 1299 }, "price_history" : [ { "retail" : 1399, "sale" : 1199, "start" : { "$date" : "2010-05-01T00:00:00.000-0700" }, "end" : { "$date" : "2010-05-08T00:00:00.000-0700" } } ], "category_ids" : [ { "$oid" : "6a5b1476238d3b4dd5000048" }, { "$oid" : "6a5b1476238d3b4dd5000049" } ], "main_cat_id" : { "$oid" : "6a5b1476238d3b4dd5000048" }, "tags" : [ "gardening" ] } 3 | -------------------------------------------------------------------------------- /chapter6/data/garden.reviews.json: -------------------------------------------------------------------------------- 1 | { "_id" : { "$oid" : "4c4b1476238d3b4dd5000041" }, "product_id" : { "$oid" : "4c4b1476238d3b4dd5003981" }, "date" : { "$date" : "2010-06-07T00:00:00.000-0700" }, "title" : "Amazing", "text" : "Has a squeaky wheel, but still a darn good wheel barrow.", "rating" : 4, "user_id" : { "$oid" : "4c4b1476238d3b4dd5000001" }, "username" : "dgreenthumb", "helpful_votes" : 3, "voter_ids" : [ { "$oid" : "4c4b1476238d3b4dd5000041" }, { "$oid" : "7a4f0376238d3b4dd5000003" }, { "$oid" : "92c21476238d3b4dd5000032" } ] } 2 | { "_id" : { "$oid" : "4c4b1476238d3b4dd5000042" }, "product_id" : { "$oid" : "4c4b1476238d3b4dd5003982" }, "user_id" : { "$oid" : "4c4b1476238d3b4dd5000001" }, "rating" : 3, "helpful_votes" : 7 } 3 | { "_id" : { "$oid" : "4c4b1476238d3b4dd5000043" }, "product_id" : { "$oid" : "4c4b1476238d3b4dd5003981" }, "user_id" : { "$oid" : "4c4b1476238d3b4dd5000002" }, "rating" : 5, "helpful_votes" : 7 } 4 | { "_id" : { "$oid" : "4c4b1476238d3b4dd5000044" }, "product_id" : { "$oid" : "4c4b1476238d3b4dd5003982" }, "user_id" : { "$oid" : "4c4b1476238d3b4dd5000002" }, "rating" : 4, "helpful_votes" : 1 } 5 | { "_id" : { "$oid" : "4c4b1476238d3b4dd5000045" }, "product_id" : { "$oid" : "4c4b1476238d3b4dd5003981" }, "user_id" : { "$oid" : "4c4b1476238d3b4dd5000003" }, "rating" : 4, "helpful_votes" : 10 } 6 | -------------------------------------------------------------------------------- /chapter6/data/garden.users.json: -------------------------------------------------------------------------------- 1 | { "_id" : { "$oid" : "4c4b1476238d3b4dd5000001" }, "username" : "kbanker", "email" : "kylebanker@gmail.com", "first_name" : "Kyle", "last_name" : "Banker", "hashed_password" : "bd1cfa194c3a603e7186780824b04419", "addresses" : [ { "name" : "home", "street" : "588 5th Street", "city" : "Brooklyn", "state" : "NY", "zip" : 11215 }, { "name" : "work", "street" : "1 E. 23rd Street", "city" : "New York", "state" : "NY", "zip" : 10010 } ], "payment_methods" : [ { "name" : "VISA", "last_four" : 2127, "crypted_number" : "43f6ba1dfda6b8106dc7", "expiration_date" : { "$date" : "2016-05-01T00:00:00.000-0700" } } ] } 2 | { "_id" : { "$oid" : "4c4b1476238d3b4dd5000002" }, "username" : "pbakkum", "first_name" : "Peter", "last_name" : "Bakkum" } 3 | { "_id" : { "$oid" : "4c4b1476238d3b4dd5000003" }, "username" : "thawkins", "first_name" : "Tim", "last_name" : "Hawkins" } 4 | { "_id" : { "$oid" : "4c4b1476238d3b4dd5000004" }, "username" : "sverch", "first_name" : "Shaun", "last_name" : "Verch" } 5 | { "_id" : { "$oid" : "4c4b1476238d3b4dd5000005" }, "username" : "dgarrett", "first_name" : "Doug", "last_name" : "Garrett" } 6 | -------------------------------------------------------------------------------- /chapter6/data/import.js: -------------------------------------------------------------------------------- 1 | // import data 2 | mongoimport --db garden --collection orders --drop --file garden.orders.json 3 | mongoimport --db garden --collection products --drop --file garden.products.json 4 | mongoimport --db garden --collection categories --drop --file garden.categories.json 5 | mongoimport --db garden --collection reviews --drop --file garden.reviews.json 6 | mongoimport --db garden --collection users --drop --file garden.users.json 7 | -------------------------------------------------------------------------------- /chapter7/code_7.1.js: -------------------------------------------------------------------------------- 1 | // 7.1 2 | 3 | // Modify with replacement 4 | 5 | // ************** insert data 6 | db.users.insert( 7 | { _id: ObjectId("4c4b1476238d3b4dd5003981"), 8 | username: "kbanker", 9 | email: "kylebanker@gmail.com", 10 | first_name: "Kyle", 11 | last_name: "Banker", 12 | hashed_password: "bd1cfa194c3a603e7186780824b04419", 13 | addresses: [ 14 | { 15 | name: "work", 16 | street: "1 E. 23rd Street", 17 | city: "New York", 18 | state: "NY", 19 | zip: 10010 20 | } 21 | ] 22 | } 23 | ) 24 | 25 | // ******* Update email address 26 | 27 | user_id = ObjectId("4c4b1476238d3b4dd5003981") 28 | doc = db.users.findOne({_id: user_id}) 29 | doc['email'] = 'mongodb-user@mongodb.com' 30 | print('updating ' + user_id) 31 | db.users.update({_id: user_id}, doc) 32 | 33 | /* Result 34 | WriteResult({ "nMatched" : 1, "nUpserted" : 0, "nModified" : 1 }) 35 | */ 36 | 37 | // check update 38 | db.users.findOne({_id: ObjectId("4c4b1476238d3b4dd5003981")}) 39 | 40 | /* result 41 | 42 | > db.users.findOne({_id: ObjectId("4c4b1476238d3b4dd5003981")}) 43 | { 44 | "_id" : ObjectId("4c4b1476238d3b4dd5003981"), 45 | "username" : "kbanker", 46 | "email" : "mongodb-user@mongodb.com", 47 | "first_name" : "Kyle", 48 | "last_name" : "Banker", 49 | "hashed_password" : "bd1cfa194c3a603e7186780824b04419", 50 | "addresses" : [ 51 | { 52 | "name" : "work", 53 | "street" : "1 E. 23rd Street", 54 | "city" : "New York", 55 | "state" : "NY", 56 | "zip" : 10010 57 | } 58 | ] 59 | } 60 | 61 | */ 62 | 63 | // ******************** update by operator 64 | user_id = ObjectId("4c4b1476238d3b4dd5003981") 65 | db.users.update({_id: user_id}, 66 | {$set: {email: 'mongodb-user2@mongodb.com'}}) 67 | 68 | /* result 69 | WriteResult({ "nMatched" : 1, "nUpserted" : 0, "nModified" : 1 }) 70 | */ 71 | 72 | // check update 73 | db.users.findOne({_id: ObjectId("4c4b1476238d3b4dd5003981")},{email:1}) 74 | 75 | /* result 76 | { 77 | "_id" : ObjectId("4c4b1476238d3b4dd5003981"), 78 | "email" : "mongodb-user2@mongodb.com" 79 | } 80 | */ 81 | 82 | // ************ from sidebar on syntax: updates vs. queries 83 | 84 | db.products.update({}, {$addToSet: {tags: 'green'}}) 85 | 86 | db.products.update({price: {$lte: 10}}, 87 | {$addToSet: {tags: 'cheap'}}) 88 | 89 | 90 | // ******* both methods compared 91 | product_id = ObjectId("4c4b1476238d3b4dd5003982") 92 | doc = db.products.findOne({_id: product_id}) 93 | doc['total_reviews'] += 1 // add 1 to the value in total_reviews 94 | db.products.update({_id: product_id}, doc) 95 | 96 | /* result 97 | WriteResult({ "nMatched" : 1, "nUpserted" : 0, "nModified" : 1 }) 98 | */ 99 | 100 | // check results 101 | db.products.findOne({_id: product_id},{total_reviews:1}) 102 | 103 | // now use increment instead of program update 104 | product_id = ObjectId("4c4b1476238d3b4dd5003982") 105 | db.products.update({_id: product_id}, {$inc: {total_reviews: 1}}) 106 | 107 | // result: WriteResult({ "nMatched" : 1, "nUpserted" : 0, "nModified" : 1 }) 108 | 109 | // check results 110 | db.products.findOne({_id: product_id},{total_reviews:1}) 111 | -------------------------------------------------------------------------------- /chapter7/code_7.2.js: -------------------------------------------------------------------------------- 1 | 2 | // ********************* 7.2.1 Products and categories 3 | 4 | // update review ratings 5 | 6 | product_id = ObjectId("4c4b1476238d3b4dd5003982") 7 | count = 0 8 | total = 0 9 | db.reviews.find({product_id: product_id}, {rating: 1}).forEach( 10 | function(review) { 11 | total += review.rating 12 | count++ 13 | }) 14 | average = total / count 15 | db.products.update({_id: product_id}, 16 | {$set: {total_reviews: count, average_review: average}}) 17 | 18 | // result: WriteResult({ "nMatched" : 1, "nUpserted" : 0, "nModified" : 1 }) 19 | 20 | // check result 21 | db.products.findOne({_id: product_id},{total_reviews:1,average_review:1}) 22 | 23 | /* result 24 | { 25 | "_id" : ObjectId("4c4b1476238d3b4dd5003982"), 26 | "total_reviews" : 2, 27 | "average_review" : 3.5 28 | } 29 | */ 30 | 31 | product_id = ObjectId("4c4b1476238d3b4dd5003982") 32 | average = 3.5 33 | total = 7 34 | 35 | db.products.update({_id: product_id}, 36 | { 37 | $set: { 38 | average_review: average, 39 | ratings_total: total 40 | }, 41 | $inc: { 42 | total_reviews: 1 43 | } 44 | }) 45 | 46 | // result: WriteResult({ "nMatched" : 1, "nUpserted" : 0, "nModified" : 1 }) 47 | 48 | // read result 49 | db.products.findOne({_id: product_id},{total_reviews:1,ratings_total:1,average_review:1}) 50 | 51 | /* Results (total_reviews value may vary) 52 | { 53 | "_id" : ObjectId("4c4b1476238d3b4dd5003982"), 54 | "total_reviews" : 3, 55 | "average_review" : 3.5, 56 | "ratings_total" : 7 57 | } 58 | */ 59 | 60 | // category hierarchy 61 | var generate_ancestors = function(_id, parent_id) { 62 | ancestor_list = [] 63 | var cursor = db.categories.find({_id: parent_id}) 64 | while(cursor.size() > 0) { 65 | parent = cursor.next() 66 | ancestor_list.push(parent) 67 | parent_id = parent.parent_id 68 | cursor = db.categories.find({_id: parent_id}) 69 | } 70 | db.categories.update({_id: _id}, {$set: {ancestors: ancestor_list}}) 71 | } 72 | 73 | // below will not run without defining a parent_id 74 | // need other data not currently in db to support this 75 | category = { 76 | parent_id: parent_id, 77 | slug: "gardening", 78 | name: "Gardening", 79 | description: "All gardening implements, tools, seeds, and soil." 80 | } 81 | db.categories.save(category) 82 | generate_ancestors(category._id, parent_id) 83 | 84 | // again, need an outdoors_id and more data 85 | db.categories.update({_id: outdoors_id}, {$set: {parent_id: gardening_id}}) 86 | 87 | db.categories.find({'ancestors.id': outdoors_id}).forEach( 88 | function(category) { 89 | generate_ancestors(category._id, outdoors_id) 90 | }) 91 | 92 | // ***** multi update option - use a fake outdoors_id to test syntax 93 | outdoors_id = ObjectId("4c4b1476238d3b4dd5003982") 94 | doc = db.categories.findOne({_id: outdoors_id}) 95 | doc.name = "The Great Outdoors" 96 | db.categories.update({_id: outdoors_id}, doc) 97 | db.categories.update( 98 | {'ancestors._id': outdoors_id}, 99 | {$set: {'ancestors.$': doc}}, 100 | {multi: true}) 101 | 102 | 103 | // *** another update using the '$' operator 104 | db.users.update({ 105 | _id: ObjectId("4c4b1476238d3b4dd5003981"), 106 | 'addresses.name': 'work'}, 107 | {$set: {'addresses.$.street': '155 E 31st St.'}}) 108 | 109 | // results: WriteResult({ "nMatched" : 1, "nUpserted" : 0, "nModified" : 1 }) 110 | 111 | // ***************** 7.2.2 Reviews 112 | 113 | db.reviews.update({_id: ObjectId("4c4b1476238d3b4dd5000041")}, { 114 | $push: { 115 | voter_ids: ObjectId("4c4b1476238d3b4dd5000001") 116 | }, 117 | $inc: { 118 | helpful_votes: 1 119 | } 120 | }) 121 | 122 | // results: WriteResult({ "nMatched" : 1, "nUpserted" : 0, "nModified" : 1 }) 123 | 124 | // **** better way to do update 125 | query_selector = { 126 | _id: ObjectId("4c4b1476238d3b4dd5000041"), 127 | voter_ids: { 128 | $ne: ObjectId("4c4b1476238d3b4dd5000001") 129 | } 130 | } 131 | 132 | db.reviews.update(query_selector, { 133 | $push: { 134 | voter_ids: ObjectId("4c4b1476238d3b4dd5000001") 135 | }, 136 | $inc : { 137 | helpful_votes: 1 138 | } 139 | }) 140 | 141 | // results: WriteResult({ "nMatched" : 0, "nUpserted" : 0, "nModified" : 0 }) 142 | // (because we did already add the vote) 143 | 144 | // ***************** 7.2.3 Orders 145 | 146 | cart_item = { 147 | _id: ObjectId("4c4b1476238d3b4dd5003981"), 148 | slug: "wheel-barrow-9092", 149 | sku: "9092", 150 | name: "Extra Large Wheel Barrow", 151 | pricing: { 152 | retail: 5897, 153 | sale: 4897 154 | } 155 | } 156 | 157 | selector = { 158 | user_id: ObjectId("4c4b1476238d3b4dd5000001"), 159 | state: 'CART' 160 | } 161 | 162 | update = { 163 | $inc: { 164 | sub_total: cart_item['pricing']['sale'] 165 | } 166 | } 167 | 168 | db.orders.update(selector, update, {upsert: true}) 169 | 170 | // find the order - use find(...) to verify there is only one 171 | db.orders.find(selector).pretty() 172 | 173 | 174 | // add item if it doesn't exist 175 | selector = {user_id: ObjectId("4c4b1476238d3b4dd5000001"), 176 | state: 'CART', 177 | 'line_items._id': 178 | {'$ne': cart_item._id} 179 | } 180 | 181 | update = {'$push': {'line_items': cart_item}} 182 | db.orders.update(selector, update) 183 | 184 | // increment the line item count (already updated order sub_total 185 | selector = { 186 | user_id: ObjectId("4c4b1476238d3b4dd5000001"), 187 | state: 'CART', 188 | 'line_items._id': ObjectId("4c4b1476238d3b4dd5003981") 189 | } 190 | update = { 191 | $inc: { 192 | 'line_items.$.quantity': 1 193 | } 194 | } 195 | db.orders.update(selector, update) 196 | 197 | // find the order again 198 | selector = { 199 | user_id: ObjectId("4c4b1476238d3b4dd5000001"), 200 | state: 'CART' 201 | } 202 | db.orders.find(selector).pretty() 203 | 204 | 205 | -------------------------------------------------------------------------------- /chapter7/code_7.3.js: -------------------------------------------------------------------------------- 1 | 2 | // **************************** 7.3.1 *******************8 3 | 4 | newDoc = db.orders.findAndModify({ 5 | query: { 6 | user_id: ObjectId("4c4b1476238d3b4dd5000001"), 7 | state: 'CART' 8 | }, 9 | update: { 10 | $set: { 11 | state: 'PRE-AUTHORIZE' 12 | } 13 | }, 14 | 'new': true 15 | }) 16 | 17 | /* returns: 18 | 19 | { 20 | "_id" : ObjectId("6a5b1476238d3b4dd5000048"), 21 | "user_id" : ObjectId("4c4b1476238d3b4dd5000001"), 22 | "purchase_data" : ISODate("2014-08-01T07:00:00Z"), 23 | "state" : "PRE-AUTHORIZE", 24 | "line_items" : [ 25 | { 26 | "_id" : ObjectId("4c4b1476238d3b4dd5003981"), 27 | "sku" : "9092", 28 | "name" : "Extra Large Wheel Barrow", 29 | "quantity" : 2, 30 | "pricing" : { 31 | "retail" : 5897, 32 | "sale" : 4897 33 | } 34 | }, 35 | { 36 | "_id" : ObjectId("4c4b1476238d3b4dd5003982"), 37 | "sku" : "10027", 38 | "name" : "Rubberized Work Glove, Black", 39 | "quantity" : 1, 40 | "pricing" : { 41 | "retail" : 1499, 42 | "sale" : 1299 43 | } 44 | } 45 | ], 46 | "shipping_address" : { 47 | "street" : "588 5th Street", 48 | "city" : "Brooklyn", 49 | "state" : "NY", 50 | "zip" : 11215 51 | }, 52 | "sub_total" : 6196, 53 | "tax" : 600 54 | } 55 | */ 56 | 57 | // mimic calculating order total and setting order total to 99000 58 | // not shown in book 59 | query = { 60 | user_id: ObjectId("4c4b1476238d3b4dd5000001"), 61 | state: "PRE-AUTHORIZE" 62 | } 63 | update = {"$set" : {total: 99000}} 64 | db.orders.update(query,update) 65 | 66 | // now set state to AUTHORIZING only if the order total has not changed 67 | oldDoc = db.orders.findAndModify({ 68 | query: { 69 | user_id: ObjectId("4c4b1476238d3b4dd5000001"), 70 | total: 99000, 71 | state: "PRE-AUTHORIZE" 72 | }, 73 | update: { 74 | '$set': { 75 | state: "AUTHORIZING" 76 | } 77 | } 78 | }) 79 | 80 | // and finally pre-shipping 81 | auth_doc = { 82 | ts: new Date(), 83 | cc: 3432003948293040, 84 | id: 2923838291029384483949348, 85 | gateway: "Authorize.net" 86 | } 87 | db.orders.findAndModify({ 88 | query: { 89 | user_id: ObjectId("4c4b1476238d3b4dd5000001"), 90 | state: "AUTHORIZING" 91 | }, 92 | update: { 93 | $set: { 94 | state: "PRE-SHIPPING", 95 | authorization: auth_doc 96 | } 97 | } 98 | }) 99 | 100 | // check the results - get the new order 101 | query = { 102 | user_id: ObjectId("4c4b1476238d3b4dd5000001"), 103 | state: "PRE-SHIPPING" 104 | } 105 | 106 | db.orders.find(query).pretty() 107 | 108 | 109 | 110 | 111 | -------------------------------------------------------------------------------- /chapter7/code_7.4.js: -------------------------------------------------------------------------------- 1 | 2 | // ************* 7.4.1 3 | 4 | // below SHOULD fail 5 | db.products.update({}, {name: "Pitchfork", "$addToSet": {tags: 'cheap'}}) 6 | 7 | // correct version - though you'd never want to do this! 8 | db.products.update({}, 9 | {$set: {name: "Pitchfork"}, $addToSet: {tags: 'cheap'}}) 10 | 11 | // multiple document updates 12 | 13 | db.products.update({}, {$addToSet: {tags: 'cheap'}}, {multi: true}) 14 | 15 | // to see results: 16 | db.products.find().pretty() 17 | 18 | // upserts 19 | 20 | db.products.update({slug: 'hammer'}, {$addToSet: {tags: 'cheap'}}, {upsert: true}) 21 | 22 | // to see results: 23 | db.products.find().pretty() 24 | 25 | 26 | // **************** 7.4.2 update parameters 27 | 28 | // $inc 29 | db.products.update({slug: "shovel"}, {$inc: {review_count: 1}}) 30 | db.users.update({username: "moe"}, {$inc: {password_retires: -1}}) 31 | 32 | db.readings.update({_id: 324}, {$inc: {temp: 2.7435}}) 33 | 34 | db.readings.update({_id: 324}, {$inc: {temp: 2.7435}}, {upsert: true}) 35 | 36 | // see result 37 | db.readings.findOne({_id: 324}) 38 | 39 | // $set and $unset 40 | db.readings.update({_id: 324}, {$set: {temp: 97.6}}) 41 | db.readings.update({_id: 325}, {$set: {temp: {f: 212, c: 100}}}) 42 | db.readings.update({_id: 326}, {$set: {temps: [97.6, 98.4, 99.1]}}) 43 | 44 | db.readings.update({_id: 324}, {$unset: {temp: 1}}) 45 | 46 | // ***************** insert some test data ************** 47 | // rerun this to "reset" test data - useful between some calls 48 | db.readings.remove({_id: 324}) 49 | db.readings.remove({_id: 325}) 50 | db.readings.remove({_id: 326}) 51 | db.readings.insert([{_id: 324, 'temp': 2.7435}, 52 | {_id: 325, 'temp': {f: 212, c: 100}}, 53 | {_id: 326, temps: [97.6, 98.4, 99.1]}]) 54 | 55 | // see results 56 | db.readings.find().pretty() 57 | 58 | // examples of $unset and $pop 59 | db.readings.update({_id: 325}, {$unset: {'temp.f': 1}}) 60 | db.readings.update({_id: 326}, {$pop: {temps: -1}}) 61 | 62 | // using $unset with arrays - sidebar 63 | db.readings.update({_id: 325}, {$unset: {'temp.f': 1}}) 64 | db.readings.update({_id: 326}, {$unset: {'temps.0': 1}}) 65 | 66 | // $rename 67 | db.readings.update({_id: 324}, {$rename: {'temp': 'temperature'}}) 68 | db.readings.update({_id: 325}, {$rename: {'temp.f': 'temp.fahrenheit'}}) 69 | 70 | // $setOnInsert 71 | db.products.remove({slug: 'hammer'}) 72 | db.products.update({slug: 'hammer'}, { 73 | $inc: { 74 | qty: 1 75 | }, 76 | $setOnInsert: { 77 | state: 'AVAILABLE' 78 | } 79 | }, {upsert: true}) 80 | 81 | // see result 82 | db.products.findOne({slug: 'hammer'}) 83 | 84 | 85 | // ******* Array Update Parameters 86 | 87 | // $push, $pushAll, and $each 88 | db.products.insert({slug: 'shovel'}) 89 | db.products.update({slug: 'shovel'}, {$push: {tags: 'tools'}}) 90 | 91 | db.products.find({slug: 'shovel'}).pretty() 92 | 93 | db.products.update({slug: 'shovel'}, 94 | {$push: {tags: {$each: ['tools', 'dirt', 'garden']}}}) 95 | 96 | db.products.remove({slug: 'shovel'}) 97 | db.products.insert({slug: 'shovel'}) 98 | 99 | db.products.update({slug: 'shovel'}, 100 | {$pushAll: {'tags': ['tools', 'dirt', 'garden']}}) 101 | 102 | // $slice 103 | // Create document Document 104 | db.temps.remove({}) 105 | db.temps.insert( 106 | { 107 | _id: 326, 108 | temps: [92, 93, 94] 109 | }) 110 | 111 | db.temps.find().pretty() 112 | 113 | db.temps.update({_id: 326}, { 114 | $push: { 115 | temps: { 116 | $each: [95, 96], 117 | $slice: -4 118 | } 119 | } 120 | }) 121 | 122 | // get results 123 | db.temps.find({_id: 326}) 124 | 125 | /* should look like this 126 | { 127 | _id: 326, 128 | temps: [93, 94, 95, 96] 129 | } 130 | 131 | */ 132 | 133 | // $sort 134 | db.temps.remove({_id: 300}) 135 | db.temps.insert( 136 | { 137 | _id: 300, 138 | temps: [ 139 | { day: 6, temp: 90 }, 140 | { day: 5, temp: 95 } 141 | ] 142 | } 143 | ) 144 | 145 | db.temps.update({_id: 300}, { 146 | $push: { 147 | temps: { 148 | $each: [ 149 | { day: 7, temp: 92 } 150 | ], 151 | $slice: -2, 152 | $sort: { 153 | day: 1 154 | } 155 | } 156 | } 157 | }) 158 | 159 | // get results 160 | db.temps.find({_id: 300}) 161 | 162 | /* should look like this 163 | { 164 | _id: 300, 165 | temps: [ 166 | { day: 6, temp: 90 }, 167 | { day: 7, temp: 92 } 168 | ] 169 | } 170 | */ 171 | 172 | // $addToSet and $each 173 | db.products.update({slug: 'shovel'}, {$addToSet: {'tags': 'tools'}}) 174 | 175 | db.products.update({slug: 'shovel'}, 176 | {$addToSet: {tags: {$each: ['tools', 'dirt', 'steel']}}}) 177 | 178 | // $pop 179 | db.products.update({slug: 'shovel'}, {$pop: {'tags': 1}}) 180 | 181 | db.products.update({slug: 'shovel'}, {$pop: {'tags': -1}}) 182 | 183 | db.products.find({slug: 'shovel'}).pretty() 184 | 185 | // $bit 186 | // add test example 187 | db.permissions.remove({_id: 16,}) 188 | db.permissions.insert({ 189 | _id: 16, 190 | permissions: NumberInt(4) 191 | }) 192 | 193 | db.permissions.update({_id: 16}, {$bit: {permissions: {or: NumberInt(2)}}}) 194 | 195 | // get results 196 | db.permissions.find({_id: 16}) 197 | 198 | /* should get: 199 | { 200 | _id: 16, 201 | permissions: 6 202 | } 203 | */ 204 | 205 | // $pull and $pullAll 206 | db.products.update({slug: 'shovel'}, 207 | {$pullAll: {'tags': ['dirt', 'garden']}}) 208 | 209 | // example data 210 | db.readings.remove({_id: 326}) 211 | db.readings.insert({_id: 326, temps: [97.6, 98.4, 100.5, 99.1, 101.2]}) 212 | 213 | db.readings.update({_id: 326}, {$pull: {temps: {$gt: 100}}}) 214 | 215 | // get result 216 | db.readings.find({_id: 326}) 217 | 218 | // should get {_id: 326, temps: [97.6, 98.4, 99.1]} 219 | 220 | // ************ positional updates 221 | 222 | // insert example 223 | db.orders.remove({_id: ObjectId("6a5b1476238d3b4dd5000048")}) 224 | db.orders.insert( 225 | { 226 | _id: ObjectId("6a5b1476238d3b4dd5000048"), 227 | line_items: [ 228 | { 229 | _id: ObjectId("4c4b1476238d3b4dd5003981"), 230 | sku: "9092", 231 | name: "Extra Large Wheel Barrow", 232 | quantity: 1, 233 | pricing: { 234 | retail: 5897, 235 | sale: 4897 236 | } 237 | }, 238 | { 239 | _id: ObjectId("4c4b1476238d3b4dd5003981"), 240 | sku: "10027", 241 | name: "Rubberized Work Glove, Black", 242 | quantity: 2, 243 | pricing: { 244 | retail: 1499, 245 | sale: 1299 246 | } 247 | } 248 | ] 249 | } 250 | ) 251 | 252 | // find it 253 | db.orders.find(query).pretty() 254 | 255 | // example 256 | query = { 257 | _id: ObjectId("6a5b1476238d3b4dd5000048"), 258 | 'line_items.sku': "10027" 259 | } 260 | update = { 261 | $set: { 262 | 'line_items.$.quantity': 5 263 | } 264 | } 265 | db.orders.update(query, update) 266 | 267 | // get result 268 | db.orders.find({_id: ObjectId("6a5b1476238d3b4dd5000048")}).pretty() 269 | 270 | // ********************* 7.4.3 Find and Modify 271 | 272 | doc = db.orders.findAndModify({ 273 | query: { 274 | user_id: ObjectId("6a5b1476238d3b4dd5000048"), 275 | }, 276 | update: { 277 | $set: { 278 | state: "AUTHORIZING" 279 | } 280 | } 281 | }) 282 | 283 | // ************** 7.4.4 Deletes 284 | 285 | db.reviews.remove({user_id: ObjectId('4c4b1476238d3b4dd5000001')}) 286 | 287 | db.reviews.remove({}) 288 | 289 | 290 | // ************************* 7.4.5 concurrency and isolation 291 | 292 | db.reviews.remove({user_id: ObjectId('4c4b1476238d3b4dd5000001'), 293 | $isolated: true}) 294 | 295 | db.reviews.update({$isolated: true}, {$set: {rating: 0}}, {multi: true}) 296 | 297 | 298 | 299 | // *********************** 7.4.6 Update Performance 300 | db.tweets.stats() 301 | 302 | // or more realistic 303 | db.orders.stats() 304 | 305 | 306 | 307 | 308 | 309 | 310 | -------------------------------------------------------------------------------- /chapter7/connect.rb: -------------------------------------------------------------------------------- 1 | require 'rubygems' 2 | require 'mongo' 3 | require ‘./inventory_fetcher.rb’ 4 | 5 | $client = Mongo::Client.new([ '127.0.0.1:27017' ], :database => 'tutorial') 6 | Mongo::Logger.logger.level = ::Logger::ERROR 7 | 8 | $users = $client[:users] 9 | $inventory = $client[:inventory] 10 | $orders = $client[:orders] 11 | 12 | AVAILABLE = 0 13 | IN_CART = 1 14 | PRE_ORDER = 2 15 | PURCHASED = 3 16 | 17 | puts 'connected!' 18 | -------------------------------------------------------------------------------- /chapter7/inventory_fetcher.rb: -------------------------------------------------------------------------------- 1 | class InventoryFetcher 2 | 3 | def initialize(arg) 4 | @inventory = arg[:inventory] 5 | @orders = arg[:orders] 6 | 7 | unless @inventory && @orders 8 | raise 'must initialize with :inventory and :orders arguments' 9 | end 10 | end 11 | 12 | def add_to_cart(order_id, *items) 13 | item_selectors = [] 14 | items.each do |item| 15 | item[:quantity].times do 16 | item_selectors << {:sku => item[:sku]} 17 | end 18 | end 19 | 20 | transition_state(order_id, item_selectors, {:from => AVAILABLE, :to => IN_CART}) 21 | end 22 | 23 | def transition_state(order_id, selectors, opts={}) 24 | items_transitioned = [] 25 | begin # use a begin/end block so we can do error recovery 26 | 27 | for selector in selectors do 28 | query = selector.merge({:state => opts[:from]}) 29 | physical_item = @inventory.find_and_modify({ 30 | :query => query, 31 | :update => { 32 | '$set' => { 33 | :state => opts[:to], # target state 34 | :ts => Time.now.utc # get the current client time 35 | } 36 | } 37 | }) 38 | 39 | if physical_item.nil? 40 | raise InventoryFetchFailure 41 | end 42 | 43 | items_transitioned << physical_item['_id'] # push item into array 44 | @orders.update({:_id => order_id}, { 45 | '$push' => { 46 | :item_ids => physical_item['_id'] 47 | } 48 | }) 49 | end # of for loop 50 | 51 | rescue Mongo::OperationFailure, InventoryFetchFailure 52 | rollback(order_id, items_transitioned, opts[:from], opts[:to]) 53 | raise InventoryFetchFailure, "Failed to add #{selector[:sku]}" 54 | end 55 | 56 | return items_transitioned.size 57 | end 58 | 59 | def rollback(order_id, item_ids, old_state, new_state) 60 | @orders.update({"_id" => order_id}, 61 | {"$pullAll" => {:item_ids => item_ids}}) 62 | 63 | item_ids.each do |id| 64 | @inventory.find_and_modify({ 65 | :query => { 66 | "_id" => id, 67 | :state => new_state 68 | } 69 | }, 70 | { 71 | :update => { 72 | "$set" => { 73 | :state => old_state, 74 | :ts => Time.now.utc 75 | } 76 | } 77 | }) 78 | end 79 | end 80 | 81 | end 82 | -------------------------------------------------------------------------------- /chapter8/code_8.x.js: -------------------------------------------------------------------------------- 1 | 2 | //***************************** 8.1.2 Core Indexing Concepts 3 | 4 | db.products.find({ 5 | 'details.manufacturer': 'Acme', 6 | 'pricing.sale': { 7 | $lt: 7500 8 | } 9 | }) 10 | 11 | //***************************** 8.2.1 Index Types 12 | 13 | //****** add index for unique on user name 14 | db.users.ensureIndex({username: 1}, {unique: true}) 15 | 16 | // test index 17 | db.users.insert({username: "kbanker"}) 18 | 19 | //****** drop dups example index 20 | 21 | // first drop previous index 22 | db.users.dropIndex({username: 1}) 23 | 24 | // can also use following to get index name and drop by name 25 | db.users.getIndexes() 26 | db.users.dropIndex("username_1") 27 | 28 | // add a duplicate username and show the two users with username "kbanker" 29 | db.users.insert({username: "kbanker"}) 30 | db.users.find({username: "kbanker"}).pretty() 31 | 32 | // now add the drop dups example 33 | db.users.ensureIndex({username: 1}, {unique: true, dropDups: true}) 34 | 35 | // see which duplicate user was dropped 36 | db.users.find({username: "kbanker"}).pretty() 37 | 38 | // drop the index 39 | db.users.dropIndex("username_1") 40 | 41 | //******* sparse indexes 42 | // create two products with only a name 43 | db.products.insert([{name: "test 1"},{name: "test 2"}]) 44 | 45 | // find products with null category (should be only the two just inserted) 46 | db.products.find({category_ids: null}) 47 | 48 | // try adding an index on sku WITHOUT making it sparse - should fail 49 | db.products.ensureIndex({sku: 1}, {unique: true}) 50 | 51 | // now add the sparse index - should work 52 | db.products.ensureIndex({sku: 1}, {unique: true, sparse: true}) 53 | 54 | // drop the index 55 | db.products.dropIndex("sku_1") 56 | 57 | // also remove the two products just added 58 | db.products.remove({sku: null}) 59 | 60 | // user_id in review example. 61 | // NOTE: do NOT want unique or else a user would only be able to do one review on any product 62 | db.reviews.ensureIndex({user_id: 1}, {sparse: true, unique: false}) 63 | 64 | // another helpful way to find ALL indexes for a database 65 | db.system.indexes.find() 66 | 67 | // now drop the one just created 68 | db.reviews.dropIndex("user_id_1") 69 | 70 | //******* hashed indexes 71 | db.products.findOne({name: "Extra Large Wheel Barrow"}) 72 | 73 | db.recipes.ensureIndex({recipe_name: 'hashed'}) 74 | 75 | 76 | //***************************** 8.2.2 Index Administration 77 | 78 | 79 | // *** create the index on green.user 80 | use green 81 | spec = {ns: "green.users", key: {'addresses.zip': 1}, name: 'zip'} 82 | db.system.indexes.insert(spec, true) 83 | 84 | // *** delete an index using the rundCommand 85 | db.runCommand({deleteIndexes: "users", index: "zip"}) 86 | 87 | // *** Now create one 88 | db.users.ensureIndex({zip: 1}) 89 | 90 | // *** and see the results 91 | db.users.getIndexes() 92 | 93 | // *** drop the index use green 94 | db.users.dropIndex("zip_1") 95 | 96 | // *********************** building indexes 97 | db.values.ensureIndex({open: 1, close: 1}) 98 | 99 | // *** won't show anything since we're not running a long running index build 100 | db.currentOp() 101 | 102 | // ********************** background indexing 103 | // drop the previous version of the index first 104 | db.values.dropIndex({open:1,close:1}) 105 | 106 | // now the background index 107 | db.values.ensureIndex({open: 1, close: 1}, {background: true}) 108 | 109 | // *********************** defragmenting 110 | db.values.reIndex(); 111 | 112 | 113 | //***************************** 8.3.1 Identifying Slow Queries 114 | 115 | // download from http://mng.bz/ii49 116 | mongorestore -d stocks dump/stocks 117 | 118 | /* might see at the start... 119 | 2015-01-02T11:41:54.221-0800 dump/stocks/values.bson 120 | 2015-01-02T11:41:54.221-0800 going into namespace [stocks.values] 121 | 2015-01-02T11:41:54.222-0800 dump/stocks/values.metadata.json not found. Skipping. 122 | */ 123 | // followed by ... 124 | /* .. at the end 125 | 4308303 objects found 126 | 2015-01-02T11:44:47.328-0800 dump/stocks/system.indexes.bson 127 | 2015-01-02T11:44:47.329-0800 going into namespace [stocks.system.indexes] 128 | Restoring to stocks.system.indexes without dropping. Restored data will be inserted without raising errors; check your server log 129 | 2015-01-02T11:44:47.992-0800 Creating index: { name: "_id_", ns: "stocks.values", key: { _id: 1 } } 130 | 1 objects found 131 | */ 132 | 133 | //**** Log running query 134 | Use stocks 135 | db.values.find({"stock_symbol": "GOOG"}).sort({date: -1}).limit(1) 136 | 137 | // log location may vary. On Ubuntu Server: /var/log/mongodb/mongod.log 138 | grep -E '([0-9])+ms' mongod.log 139 | grep -E '[0-9]+ms' mongod.log 140 | 141 | //*** Using Profiler 142 | db.setProfilingLevel(2) 143 | 144 | db.setProfilingLevel(1, 50) 145 | 146 | db.values.find({}).sort({close: -1}).limit(1) 147 | 148 | //**** Profiling Results 149 | 150 | db.system.profile.find({millis: {$gt: 150}}) 151 | 152 | db.system.profile.find().sort({$natural: -1}).limit(5).pretty() 153 | 154 | //**** Using and understanding Explain 155 | 156 | db.values.find({}).sort({close: -1}).limit(1).explain() 157 | 158 | db.values.count() 159 | 160 | //**** Adding an index 161 | db.values.ensureIndex({close: 1}) 162 | 163 | db.values.find({}).sort({close: -1}).limit(1).explain() 164 | 165 | /* **** AFTER index 166 | { 167 | "cursor" : "BtreeCursor close_1 reverse", 168 | "isMultiKey" : false, 169 | "n" : 1, 170 | "nscannedObjects" : 1, 171 | "nscanned" : 1, 172 | "nscannedObjectsAllPlans" : 1, 173 | "nscannedAllPlans" : 1, 174 | "scanAndOrder" : false, 175 | "indexOnly" : false, 176 | "nYields" : 0, 177 | "nChunkSkips" : 0, 178 | "millis" : 18, 179 | "indexBounds" : { 180 | "close" : [ 181 | [ 182 | { 183 | "$maxElement" : 1 184 | }, 185 | { 186 | "$minElement" : 1 187 | } 188 | ] 189 | ] 190 | }, 191 | "server" : "localhost:27017", 192 | "filterSet" : false 193 | } 194 | */ 195 | 196 | //**** Using an indexed key 197 | db.values.find({close: {$gt: 500}}).explain() 198 | 199 | //**** MongoDB query optimizer and hint() 200 | db.values.find({stock_symbol: "GOOG", close: {$gt: 200}}) 201 | db.values.find({stock_symbol: "GOOG", close: {$gt: 200}}).explain() 202 | 203 | // now add index 204 | db.values.ensureIndex({stock_symbol: 1, close: 1}) 205 | db.values.find({stock_symbol: "GOOG", close: {$gt: 200}}).explain() 206 | 207 | db.values.getIndexKeys() 208 | 209 | // showing query plans 210 | db.values.dropIndex("stock_symbol_1_close_1") 211 | db.values.ensureIndex({stock_symbol: 1}) 212 | db.values.ensureIndex({close: 1}) 213 | 214 | db.values.find({stock_symbol: "GOOG", close: {$gt: 200}}).explain(true) 215 | 216 | query = {stock_symbol: "GOOG", close: {$gt: 200}} 217 | db.values.find(query).hint({close: 1}).explain() 218 | 219 | 220 | //***************************** 8.3.3 Query Patterna 221 | 222 | //**** Single key indexes 223 | db.values.find({close: 100}) 224 | db.values.find({}).sort({close: 1}) 225 | db.values.find({close: {$gte: 100}}) 226 | 227 | db.values.find({close: {$gte: 100}}).sort({close: 1}).explain() 228 | 229 | //**** multikey index 230 | 231 | // to be sure, you can create a new index and drop others 232 | db.values.ensureIndex({close: 1, open: 1, date: 1}) 233 | 234 | db.values.dropIndex("date_1") 235 | db.values.dropIndex("stock_symbol_1") 236 | db.values.dropIndex("close_1") 237 | db.values.getIndexes() 238 | 239 | 240 | db.values.find({}).sort({close: 1}) 241 | db.values.find({close: {$gt: 1}}) 242 | db.values.find({close: 100}).sort({open: 1}) 243 | db.values.find({close: 100, open: {$gt: 1}}) 244 | db.values.find({close: 1, open: 1.01, date: {$gt: "2005-01-01"}}) 245 | db.values.find({close: 1, open: 1.01}).sort({date: 1}) 246 | 247 | // or add explain to verify index is used - 248 | // WARNING - may take a while to run some of these 249 | db.values.find({}).sort({close: 1}).explain() 250 | db.values.find({close: {$gt: 1}}).explain() 251 | db.values.find({close: 100}).sort({open: 1}).explain() 252 | db.values.find({close: 100, open: {$gt: 1}}).explain() 253 | db.values.find({close: 1, open: 1.01, date: {$gt: "2005-01-01"}}).explain() 254 | db.values.find({close: 1, open: 1.01}).sort({date: 1}).explain() 255 | 256 | //**** Covering indexes 257 | db.ensureIndex({close: 1, open:1, date: 1}) 258 | db.values.find({close: 1}, {open: 1, close: 1, date: 1, _id: 0}).explain() 259 | 260 | -------------------------------------------------------------------------------- /chapter9/9.2_import_data.txt: -------------------------------------------------------------------------------- 1 | 2 | mongoimport --db catalog --collection books --type json --drop --file catalog.books.json -------------------------------------------------------------------------------- /chapter9/code_9.1_search_intro.js: -------------------------------------------------------------------------------- 1 | // first the index 2 | db.products.ensureIndex( 3 | {name: 'text', 4 | description: 'text', 5 | tags: 'text'} 6 | ); 7 | 8 | db.products.find({$text: {$search: 'gardens'}},{_id:0, name:1,description:1,tags:1}).pretty() 9 | 10 | // result 11 | /* 12 | > db.products 13 | .find({$text: {$search: 'gardens'}},{_id:0, name:1,description:1,tags:1}) 14 | .pretty() 15 | 16 | { 17 | "name" : "Rubberized Work Glove, Black", 18 | "description" : "Black Rubberized Work Gloves...", 19 | "tags" : [ 20 | "gardening" 21 | ] 22 | } 23 | { 24 | "name" : "Extra Large Wheel Barrow", 25 | "description" : "Heavy duty wheel barrow...", 26 | "tags" : [ 27 | "tools", 28 | "gardening", 29 | "soil" 30 | ] 31 | } 32 | */ 33 | 34 | -------------------------------------------------------------------------------- /chapter9/code_9.3_indexes.js: -------------------------------------------------------------------------------- 1 | // just one field 2 | 3 | // SEE ALSO: 7.2.2 on creating and deleting indexes. 4 | 5 | 6 | // if NO name 7 | db.books.ensureIndex( 8 | {title: 'text', 9 | shortDescription: 'text', 10 | longDescription: 'text', 11 | authors: 'text', 12 | categories: 'text'}, 13 | 14 | {weights: 15 | {title: 10, 16 | shortDescription: 1, 17 | longDescription:1, 18 | authors: 1, 19 | categories: 5} 20 | } 21 | ); 22 | 23 | // take a look at the index 24 | // 1. amount of space 25 | // 2. long index name 26 | 27 | db.books.stats() 28 | 29 | // /* listing 9.2 30 | // 31 | // > db.books.stats() 32 | // { 33 | // "ns" : "catalog.books", 34 | // "count" : 431, 35 | // "size" : 772368, 36 | // "avgObjSize" : 1792, 37 | // "storageSize" : 2793472, 38 | // "numExtents" : 5, 39 | // "nindexes" : 2, 40 | // "lastExtentSize" : 2097152, 41 | // "paddingFactor" : 1, 42 | // "systemFlags" : 0, 43 | // "userFlags" : 1, 44 | // "totalIndexSize" : 858480, 45 | // "indexSizes" : { 46 | // "_id_" : 24528, 47 | // "title_text_shortDescription_text_longDescription_text_authors_text_categories_text" : 833952 48 | // }, 49 | // "ok" : 1 50 | // } 51 | // */ 52 | 53 | 54 | // THEN builds name based on fields indexed 55 | // in this case: 56 | 57 | // '$**': 'text' 58 | 59 | db.books.dropIndex('title_text_shortDescription_text_longDescription_text_authors_text_categories_text') 60 | 61 | // WARNING: names is getting pretty long: 82 characters, max is 128 (including db name, etc.) 62 | // adding everything (database + collection name) it is 96 characters: 63 | // catalog.books.title_text_shortDescription_text_longDescription_text_authors_text_categories_text 64 | // 65 | // Good practice: assign a name (see next example) 66 | 67 | // use wildcard specification instead 68 | // as well as specify an index name 69 | db.books.ensureIndex( 70 | {'$**': 'text'}, 71 | 72 | {weights: 73 | {title: 10, 74 | categories: 5}, 75 | 76 | name : 'books_text_index' 77 | } 78 | ); 79 | 80 | // ***** NOTES: 81 | // 1. will get weighting of categories and titles just based on few words in those fields 82 | // 2. can not change weights once index built - have to drop it and then redefine it 83 | // ************** 84 | 85 | // /* Results: 86 | // 87 | // > db.books.ensureIndex( 88 | // ... {'$**': 'text'}, 89 | // ... 90 | // ... {weights: 91 | // ... {title: 10, 92 | // ... categories: 5}, 93 | // ... 94 | // ... name : 'books_text_index' 95 | // ... } 96 | // ... ); 97 | // { 98 | // "createdCollectionAutomatically" : false, 99 | // "numIndexesBefore" : 1, 100 | // "numIndexesAfter" : 2, 101 | // "ok" : 1 102 | // } 103 | // 104 | // */ 105 | 106 | // ***************************************************** 107 | // Use below to compare with indexes in chapter 8 108 | // ***************************************************** 109 | 110 | // to show indexes, including the name: 111 | db.books.getIndexes() 112 | 113 | // /* Results 114 | // > db.books.getIndexes() 115 | // [ 116 | // { 117 | // "v" : 1, 118 | // "key" : { 119 | // "_id" : 1 120 | // }, 121 | // "name" : "_id_", 122 | // "ns" : "catalog.books" 123 | // }, 124 | // { 125 | // "v" : 1, 126 | // "key" : { 127 | // "_fts" : "text", 128 | // "_ftsx" : 1 129 | // }, 130 | // "name" : "books_text_index", 131 | // "ns" : "catalog.books", 132 | // "weights" : { 133 | // "$**" : 1, 134 | // "categories" : 5, 135 | // "title" : 10 136 | // }, 137 | // "default_language" : "english", 138 | // "language_override" : "language", 139 | // "textIndexVersion" : 2 140 | // } 141 | // ] 142 | // 143 | // */ 144 | 145 | // find name of index, etc. 146 | db.books.stats() 147 | 148 | // Explain default_language, language_override 149 | // /* See how large the text index is: 150 | // > db.books.stats() 151 | // { 152 | // "ns" : "catalog.books", 153 | // "count" : 431, 154 | // "size" : 769552, 155 | // "avgObjSize" : 1785, 156 | // "storageSize" : 2793472, 157 | // "numExtents" : 5, 158 | // "nindexes" : 2, 159 | // "lastExtentSize" : 2097152, 160 | // "paddingFactor" : 1, 161 | // "systemFlags" : 0, 162 | // "userFlags" : 1, 163 | // "totalIndexSize" : 1005648, 164 | // "indexSizes" : { 165 | // "_id_" : 24528, 166 | // "books_text_index" : 981120 167 | // }, 168 | // "ok" : 1 169 | // } 170 | // */ 171 | 172 | 173 | //****************************************** 174 | // NOTE: that the index is even larger than the data 175 | // - "books_text_index" : 981120 vs "size" : 769552, 176 | // 177 | // Understandable if indexing all of your text 178 | // - it has to create an index entry for each word, 179 | // - less the stop words and 180 | // - less stemming? (which can be shorter - script vs scripting) 181 | // 182 | 183 | -------------------------------------------------------------------------------- /chapter9/code_9.4_find.js: -------------------------------------------------------------------------------- 1 | // super simple search - case insensitive, searches all text fields (v2) 2 | db.books.find({$text: {$search: 'actions'}},{title:1}) 3 | 4 | { "_id" : 755, "title" : "MongoDB in Action, Second Edition" } 5 | { "_id" : 17, "title" : "MongoDB in Action" } 6 | 7 | 8 | 9 | // OR flavor search - plus stop words 10 | db.books.find({$text: {$search: 'MongoDB in Action'}},{title:1}) 11 | 12 | // results: 13 | 14 | // { "_id" : 256, "title" : "Machine Learning in Action" } 15 | // { "_id" : 146, "title" : "Distributed Agile in Action" } 16 | // { "_id" : 233, "title" : "PostGIS in Action" } 17 | // { "_id" : 17, "title" : "MongoDB in Action" } 18 | 19 | 20 | //********************************************************************** 21 | // 9.4.1 - complex searches - same as later section but without text scores 22 | //********************************************************************** 23 | 24 | 25 | db.books. 26 | find({$text: {$search: ' "mongodb" in action'}}, //A 27 | {_id:0, title:1}) 28 | 29 | /* RESULTS 30 | 31 | { "title" : "MongoDB in Action" } 32 | { "title" : "MongoDB in Action, Second Edition" } 33 | 34 | */ 35 | 36 | 37 | db.books. 38 | find({$text: {$search: ' "mongodb" "second edition" '}}, //A 39 | {_id:0, title:1}) 40 | 41 | 42 | db.books. 43 | find({$text: {$search: ' books '}}). 44 | count() 45 | 46 | db.books. 47 | find({$text: {$search: ' "books" '}}). 48 | count() 49 | 50 | 51 | db.books. 52 | find({$text: {$search: ' "book" '}}). 53 | count() 54 | 55 | // ****************** book alternative - developer 56 | 57 | 58 | db.books. 59 | find({$text: {$search: ' developers '}}). 60 | count() 61 | 62 | db.books. 63 | find({$text: {$search: ' "developers" '}}). 64 | count() 65 | 66 | db.books. 67 | find({$text: {$search: ' "developer" '}}). 68 | count() 69 | 70 | 71 | db.books. 72 | find({$text: {$search: ' developing '}}). 73 | count() 74 | 75 | 76 | // ******************************************************** 77 | 78 | db.books. 79 | find({$text: {$search: ' mongodb -second '}}, //A 80 | {_id:0, title:1}) 81 | 82 | db.books. 83 | find({$text: {$search: ' mongodb -"second edition" '}}, //A 84 | {_id:0, title:1}) 85 | 86 | db.books. 87 | find({$text: {$search: ' mongodb '}, status: 'MEAP' }, //A 88 | {_id:0, title:1, status:1}) 89 | 90 | // showing text search score for two different 91 | // search strings which are equivalent 92 | 93 | db.books. 94 | find({$text: {$search: 'mongodb in action'}}, //A 95 | {_id:0, title:1, score: { $meta: "textScore" }}). //B 96 | limit(4); 97 | 98 | 99 | 100 | db.books. 101 | find({$text: {$search: 'the mongodb and actions in it'}}, //D 102 | {_id:0, title:1, score: { $meta: "textScore" }}). 103 | limit(4); 104 | 105 | /******************************** RESULTS 106 | 107 | > db.books. 108 | ... find({$text: {$search: 'mongodb in action'}}, #A 109 | ... {_id:0, title:1, score: { $meta: "textScore" }}). #B 110 | ... limit(4); 111 | { "title" : "Machine Learning in Action", "score" : 16.83933933933934 } 112 | { "title" : "Distributed Agile in Action", "score" : 19.371088861076345 } 113 | { "title" : "PostGIS in Action", "score" : 17.67825896762905 } #C 114 | { "title" : "MongoDB in Action", "score" : 49.48653394500073 } 115 | > 116 | > 117 | > db.books. 118 | ... find({$text: {$search: 'the mongodb and actions in it'}}, #D 119 | ... {_id:0, title:1, score: { $meta: "textScore" }}). 120 | ... limit(4); 121 | { "title" : "Machine Learning in Action", "score" : 16.83933933933934 } 122 | { "title" : "Distributed Agile in Action", "score" : 19.371088861076345 } 123 | { "title" : "PostGIS in Action", "score" : 17.67825896762905 } #E 124 | { "title" : "MongoDB in Action", "score" : 49.48653394500073 } 125 | 126 | ******************************************* */ 127 | 128 | // sorting results by relevancy 129 | db.books. 130 | find({$text: {$search: 'mongodb in action'}}, //A 131 | {title:1, score: { $meta: "textScore" }}). //B 132 | sort({ score: { $meta: "textScore" } }) //C 133 | 134 | /* RESULTS 135 | 136 | { "_id" : 17, "title" : "MongoDB in Action", "score" : 49.48653394500073 } 137 | { "_id" : 186, "title" : "Hadoop in Action", "score" : 24.99910329985653 } 138 | { "_id" : 560, "title" : "HTML5 in Action", "score" : 23.02156177156177 } 139 | 140 | */ 141 | 142 | 143 | //********************************************************************** 144 | // - complex searches WITH text scores 145 | //********************************************************************** 146 | 147 | 148 | db.books. 149 | find({$text: {$search: ' "mongodb" in action'}}, //A 150 | {_id:0, title:1, score: { $meta: "textScore" }}) 151 | 152 | /* RESULTS 153 | 154 | { "title" : "MongoDB in Action", "score" : 49.48653394500073 } 155 | { "title" : "MongoDB in Action, Second Edition", "score" : 12.5 } 156 | 157 | */ 158 | 159 | 160 | // ADVANCED (not in book) 161 | // will search for all books with EITHER mongodb or books 162 | // then look at each document to see if it can find the word "mongodb" 163 | // Not always efficient - for example if I have the word 'book' in my search 164 | // string, it will search almost all of the books in my collection, since almost 165 | // all books have that word. 166 | // It will then find almost all of the books via the index, 167 | // then look for the word "mongo" in each book. 168 | // Seems like this could be made more efficient, so possibly change in future releases? 169 | 170 | 171 | db.books. 172 | find({$text: {$search: ' "mongodb" book'}}, //A 173 | {_id:0, title:1, score: { $meta: "textScore" }}).explain() 174 | 175 | 176 | /* RESULTS 177 | 178 | > db.books.count() 179 | 431 180 | > db.books. 181 | ... find({$text: {$search: ' "mongodb" book'}}, //A 182 | ... {_id:0, title:1, score: { $meta: "textScore" }}).explain() 183 | { 184 | "cursor" : "TextCursor", 185 | "n" : 2, 186 | "nscannedObjects" : 414, 187 | "nscanned" : 416, 188 | "nscannedObjectsAllPlans" : 414, 189 | "nscannedAllPlans" : 416, 190 | "scanAndOrder" : false, 191 | "nYields" : 6, 192 | "nChunkSkips" : 0, 193 | "millis" : 15, 194 | "server" : "D830J:27017", 195 | "filterSet" : false 196 | } 197 | 198 | */ 199 | 200 | 201 | db.books. 202 | find({$text: {$search: ' "mongodb" "second edition" '}}, //A 203 | {_id:0, title:1, score: { $meta: "textScore" }}) 204 | 205 | 206 | db.books. 207 | find({$text: {$search: ' books '}}). 208 | count() 209 | 210 | 211 | db.books. 212 | find({$text: {$search: ' "books" '}}). 213 | count() 214 | 215 | 216 | db.books. 217 | find({$text: {$search: ' "book" '}}). 218 | count() 219 | 220 | 221 | db.books. 222 | find({$text: {$search: ' mongodb -second '}}, //A 223 | {_id:0, title:1, score: { $meta: "textScore" }}) 224 | 225 | db.books. 226 | find({$text: {$search: ' mongodb -"second edition" '}}, //A 227 | {_id:0, title:1, score: { $meta: "textScore" }}) 228 | 229 | db.books. 230 | find({$text: {$search: ' mongodb '}, status: 'MEAP' }, //A 231 | {_id:0, title:1, status:1, score: { $meta: "textScore" }}) -------------------------------------------------------------------------------- /chapter9/code_9.5_aggr.js: -------------------------------------------------------------------------------- 1 | /* Limits 2 | 3 | - The $match stage that includes a $text must be the first stage in the pipeline. 4 | - A text operator can only occur once in the stage. 5 | - The text operator expression cannot appear in $or or $not expressions. 6 | 7 | */ 8 | 9 | // previous example was 10 | db.books. 11 | find({$text: {$search: 'mongodb in action'}}, //A 12 | {title:1, score: { $meta: "textScore" }}). //B 13 | sort({ score: { $meta: "textScore" } }) //C 14 | 15 | 16 | // ****** SORT BY SCORE 17 | db.books.aggregate( 18 | [ 19 | { $match: { $text: { $search: 'mongodb in action' } } }, //A 20 | { $sort: { score: { $meta: 'textScore' } } }, //B 21 | { $project: { title: 1, score: { $meta: 'textScore' } } } //C 22 | ] 23 | ) 24 | 25 | /* Results 26 | { "_id" : 17, "title" : "MongoDB in Action", "score" : 49.48653394500073 } 27 | { "_id" : 186, "title" : "Hadoop in Action", "score" : 24.99910329985653 } 28 | { "_id" : 560, "title" : "HTML5 in Action", "score" : 23.02156177156177 } 29 | { "_id" : 197, "title" : "Erlang and OTP in Action", "score" : 22.069632021922096 } 30 | */ 31 | 32 | 33 | 34 | db.books.aggregate( 35 | [ 36 | { $match: { $text: { $search: 'mongodb in action' } } }, 37 | { $project: { title: 1, score: { $meta: 'textScore' } } }, 38 | { $sort: { score: -1 } } //C 39 | ] 40 | ) 41 | 42 | 43 | db.books.aggregate( 44 | [ 45 | { $match: { $text: { $search: ' "mongodb" in action ' } } }, 46 | { $project: {_id:0, title: 1, score: { $meta: 'textScore' } } } 47 | ] 48 | ) 49 | 50 | /* 51 | > db.books.aggregate( 52 | ... [ 53 | ... { $match: { $text: { $search: ' "mongodb" in action ' } } }, 54 | ... { $project: {_id:0, title: 1, score: { $meta: 'textScore' } } } 55 | ... ] 56 | ... ) 57 | { "title" : "MongoDB in Action", "score" : 49.48653394500073 } 58 | { "title" : "MongoDB in Action, Second Edition", "score" : 12.5 } 59 | 60 | */ 61 | 62 | db.books.findOne({"title" : "MongoDB in Action, Second Edition"}) 63 | 64 | db.books.aggregate( 65 | [ 66 | { $match: { $text: { $search: 'mongodb in action' } } }, 67 | 68 | { $project: { 69 | title: 1, 70 | score: { $meta: 'textScore' }, 71 | multiplier: { $cond: [ '$longDescription',1.0,3.0] } } //A 72 | }, 73 | 74 | { $project: { 75 | _id:0, title: 1, score: 1, multiplier: 1, 76 | adjScore: {$multiply: ['$score','$multiplier']}} //B 77 | }, 78 | 79 | { $sort: {adjScore: -1}} 80 | ] 81 | ); 82 | 83 | 84 | /* 85 | 86 | > db.books.aggregate( 87 | ... [ 88 | ... { $match: { $text: { $search: 'mongodb in action' } } }, 89 | ... 90 | ... { $project: { 91 | ... title: 1, 92 | ... score: { $meta: 'textScore' }, 93 | ... multiplier: { $cond: [ '$longDescription',1.0,3.0] } } //A 94 | ... }, 95 | ... 96 | ... { $project: { 97 | ... _id:0, title: 1, score: 1, multiplier: 1, 98 | ... adjScore: {$multiply: ['$score','$multiplier']}} //B 99 | ... }, 100 | ... 101 | ... { $sort: {adjScore: -1}} 102 | ... ] 103 | ... ); 104 | 105 | { "title" : "MongoDB in Action", "score" : 49.48653394500073, "multiplier" : 1, "adjScore" : 49.48653394500073 } 106 | { "title" : "MongoDB in Action, Second Edition", "score" : 12.5, "multiplier" : 3, "adjScore" : 37.5 } //C 107 | { "title" : "Spring Batch in Action", "score" : 11.666666666666666, "multiplier" : 3, "adjScore" : 35 } 108 | { "title" : "Hadoop in Action", "score" : 24.99910329985653, "multiplier" : 1, "adjScore" : 24.99910329985653 } 109 | { "title" : "HTML5 in Action", "score" : 23.02156177156177, "multiplier" : 1, "adjScore" : 23.02156177156177 } 110 | 111 | 112 | */ -------------------------------------------------------------------------------- /chapter9/code_9.6_language.js: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bakks/mongo-in-action-code/1fa0e0b151e64c94f70ba4d6c847e42871486273/chapter9/code_9.6_language.js -------------------------------------------------------------------------------- /chapter_storage/config.txt.conf: -------------------------------------------------------------------------------- 1 | storage: 2 | dbPath: "/data/db" 3 | journal: 4 | enabled: true 5 | engine: "wiredTiger" 6 | wiredTiger: 7 | engineConfig: 8 | cacheSizeGB: 8 9 | journalCompressor: none 10 | collectionConfig: 11 | blockCompressor: none 12 | indexConfig: 13 | prefixCompression: false 14 | 15 | 16 | -------------------------------------------------------------------------------- /chapter_storage/configs/mmapv1.conf: -------------------------------------------------------------------------------- 1 | storage: 2 | dbPath: "./data-mmapv1" 3 | directoryPerDB: true 4 | journal: 5 | enabled: true 6 | systemLog: 7 | destination: file 8 | path: "./mongodb-server.log" 9 | logAppend: true 10 | timeStampFormat: iso8601-utc 11 | net: 12 | bindIp: 127.0.0.1 13 | port: 27017 14 | unixDomainSocket: 15 | enabled : true 16 | -------------------------------------------------------------------------------- /chapter_storage/configs/wiredtiger-snappy.conf: -------------------------------------------------------------------------------- 1 | storage: 2 | dbPath: "./data-wt-snappy" 3 | directoryPerDB: true 4 | journal: 5 | enabled: true 6 | engine: "wiredTiger" 7 | wiredTiger: 8 | engineConfig: 9 | cacheSizeGB: 8 10 | journalCompressor: none 11 | collectionConfig: 12 | blockCompressor: snappy 13 | indexConfig: 14 | prefixCompression: true 15 | systemLog: 16 | destination: file 17 | path: "./mongodb-server.log" 18 | logAppend: true 19 | timeStampFormat: iso8601-utc 20 | net: 21 | bindIp: 127.0.0.1 22 | port: 27017 23 | unixDomainSocket: 24 | enabled : true 25 | -------------------------------------------------------------------------------- /chapter_storage/configs/wiredtiger-uncompressed.conf: -------------------------------------------------------------------------------- 1 | storage: 2 | dbPath: "./data-wt-uncompressed" 3 | directoryPerDB: true 4 | journal: 5 | enabled: true 6 | engine: "wiredTiger" 7 | wiredTiger: 8 | engineConfig: 9 | cacheSizeGB: 8 10 | journalCompressor: none 11 | collectionConfig: 12 | blockCompressor: none 13 | indexConfig: 14 | prefixCompression: false 15 | systemLog: 16 | destination: file 17 | path: "./mongodb-server.log" 18 | logAppend: true 19 | timeStampFormat: iso8601-utc 20 | net: 21 | bindIp: 127.0.0.1 22 | port: 27017 23 | unixDomainSocket: 24 | enabled : true 25 | -------------------------------------------------------------------------------- /chapter_storage/configs/wiredtiger-zlib.conf: -------------------------------------------------------------------------------- 1 | storage: 2 | dbPath: "./data-wt-zlib" 3 | directoryPerDB: true 4 | journal: 5 | enabled: true 6 | engine: "wiredTiger" 7 | wiredTiger: 8 | engineConfig: 9 | cacheSizeGB: 8 10 | journalCompressor: none 11 | collectionConfig: 12 | blockCompressor: zlib 13 | indexConfig: 14 | prefixCompression: true 15 | systemLog: 16 | destination: file 17 | path: "./mongodb-server.log" 18 | logAppend: true 19 | timeStampFormat: iso8601-utc 20 | net: 21 | bindIp: 127.0.0.1 22 | port: 27017 23 | unixDomainSocket: 24 | enabled : true 25 | -------------------------------------------------------------------------------- /chapter_storage/insert-results.txt: -------------------------------------------------------------------------------- 1 | ===== RUNNING mmapv1.conf ===== 2 | Cleaning up data directory 3 | Starting up mongod... took 102 seconds 4 | Running import loop 16 5 | Insert performance for mmapv1.conf: 105 seconds 6 | Shutting down server... took 1 seconds 7 | Disk usage for mmapv1.conf: 4128.04MB 8 | ===== RUNNING wiredtiger-uncompressed.conf ===== 9 | Cleaning up data directory 10 | Starting up mongod... took 2 seconds 11 | Running import loop 16 12 | Insert performance for wiredtiger-uncompressed.conf: 92 seconds 13 | Shutting down server... took 3 seconds 14 | Disk usage for wiredtiger-uncompressed.conf: 560.56MB 15 | ===== RUNNING wiredtiger-snappy.conf ===== 16 | Cleaning up data directory 17 | Starting up mongod... took 1 seconds 18 | Running import loop 16 19 | Insert performance for wiredtiger-snappy.conf: 93 seconds 20 | Shutting down server... took 2 seconds 21 | Disk usage for wiredtiger-snappy.conf: 380.27MB 22 | ===== RUNNING wiredtiger-zlib.conf ===== 23 | Cleaning up data directory 24 | Starting up mongod... took 2 seconds 25 | Running import loop 16 26 | Insert performance for wiredtiger-zlib.conf: 104 seconds 27 | Shutting down server... took 3 seconds 28 | Disk usage for wiredtiger-zlib.conf: 326.67MB 29 | 30 | -------------------------------------------------------------------------------- /chapter_storage/insert.js: -------------------------------------------------------------------------------- 1 | for (var j = 0; j < 10000; j++) { 2 | var r1 = Math.random(); 3 | 4 | // A nice date around year 2000 5 | var dateFld = new Date(1.5e12 * r1); 6 | var intFld = Math.floor(1e8 * r1); 7 | // A nicely randomized string of about 40 characters 8 | var stringFld = Math.floor(1e64 * r1).toString(36); 9 | var boolFld = intFld % 2; 10 | 11 | doc = { 12 | random_date: dateFld, 13 | random_int: intFld, 14 | random_string: stringFld, 15 | random_bool: boolFld 16 | } 17 | 18 | doc.arr = []; 19 | 20 | for (var i = 0; i < 16; i++) { 21 | var r2 = Math.random(); 22 | 23 | // A nice date around year 2000 24 | var dateFld = new Date(1.5e12 * r2); 25 | var intFld = Math.floor(1e8 * r2); 26 | var stringFld = Math.floor(1e64 * r2).toString(36); 27 | var boolField = intFld % 2; 28 | 29 | if (i < 8) { 30 | doc.arr.push({ 31 | date_field: dateFld, 32 | int_field: intFld, 33 | string_field: stringFld, 34 | bool_field: boolFld 35 | }); 36 | } else { 37 | doc["sub" + i] = { 38 | date_field: dateFld, 39 | int_field: intFld, 40 | string_field: stringFld, 41 | bool_field: boolFld 42 | }; 43 | } 44 | } 45 | 46 | db.benchmark.insert(doc); 47 | } 48 | 49 | -------------------------------------------------------------------------------- /chapter_storage/migrate.txt: -------------------------------------------------------------------------------- 1 | mkdir ~/mongo-migration 2 | cd ~/mongo-migration 3 | 4 | mongodump 5 | 6 | sudo service mongod stop 7 | ps ax | grep mongo 8 | 9 | sudo mv /data/db /data/db-mmapv1 10 | 11 | sudo mkdir /data/db 12 | sudo chown mongodb.mongodb /data/db 13 | sudo chmod 755 /data/db 14 | 15 | sudo service mongod start 16 | 17 | cd ~/mongo-migration 18 | mongorestore dump 19 | 20 | -------------------------------------------------------------------------------- /chapter_storage/read.js: -------------------------------------------------------------------------------- 1 | c = db.benchmark.find(); 2 | while(c.hasNext()) c.next(); 3 | -------------------------------------------------------------------------------- /chapter_storage/read.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | export MONGO_DIR=/storage/mongodb 4 | export NUM_LOOPS=16 5 | 6 | configs=( 7 | mmapv1.conf 8 | wiredtiger-uncompressed.conf 9 | wiredtiger-snappy.conf 10 | wiredtiger-zlib.conf 11 | ) 12 | 13 | sudo echo "Acquired root permissions" 14 | 15 | cd $MONGO_DIR 16 | for config in "${configs[@]}"; do 17 | echo "===== RUNNING $config =====" 18 | echo "Clearing memory caches" 19 | sync 20 | echo 3 | sudo tee /proc/sys/vm/drop_caches 21 | 22 | echo -ne "Starting up mongod... " 23 | T="$(date +%s)" 24 | ./bin/mongod --config configs/$config & 25 | 26 | # wait for mongo to start 27 | while [ 1 ]; do 28 | ./bin/mongostat -n 1 > /dev/null 2>&1 29 | if [ "$?" -eq 0 ]; then 30 | break 31 | fi 32 | sleep 2 33 | done 34 | T="$(($(date +%s)-T))" 35 | echo "took $T seconds" 36 | 37 | rm -f timings-${config}.txt 38 | T="$(date +%s)" 39 | for l in $(seq 1 $NUM_LOOPS); do 40 | echo -ne "\rRunning read loop $l" 41 | /usr/bin/time -f "%e" -o timings-${config}.txt -a --quiet ./bin/mongo benchmark --quiet read.js >/dev/null 2>&1 42 | done 43 | T="$(($(date +%s)-T))" 44 | 45 | echo 46 | echo "Read performance for $config: $T seconds" 47 | 48 | echo -ne "Shutting down server... " 49 | T="$(date +%s)" 50 | ./bin/mongo admin --quiet --eval "db.shutdownServer({force: true})" >/dev/null 2>&1 51 | 52 | while [ 1 ]; do 53 | pgrep -U $USER mongod > /dev/null 2>&1 54 | if [ "$?" -eq 1 ]; then 55 | break 56 | fi 57 | sleep 1 58 | done 59 | T="$(($(date +%s)-T))" 60 | echo "took $T seconds" 61 | done 62 | -------------------------------------------------------------------------------- /chapter_storage/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | export MONGO_DIR=/storage/mongodb 4 | export NUM_LOOPS=16 5 | 6 | configs=( 7 | mmapv1.conf 8 | wiredtiger-uncompressed.conf 9 | wiredtiger-snappy.conf 10 | wiredtiger-zlib.conf 11 | ) 12 | 13 | cd $MONGO_DIR 14 | for config in "${configs[@]}"; do 15 | echo "===== RUNNING $config =====" 16 | echo "Cleaning up data directory" 17 | DATA_DIR=$(grep dbPath configs/$config | awk -F\" '{ print $2 }') 18 | rm -rf $MONGO_DIR/$DATA_DIR/* 19 | 20 | echo -ne "Starting up mongod... " 21 | T="$(date +%s)" 22 | ./bin/mongod --config configs/$config & 23 | 24 | # wait for mongo to start 25 | while [ 1 ]; do 26 | ./bin/mongostat -n 1 > /dev/null 2>&1 27 | if [ "$?" -eq 0 ]; then 28 | break 29 | fi 30 | sleep 2 31 | done 32 | T="$(($(date +%s)-T))" 33 | echo "took $T seconds" 34 | 35 | T="$(date +%s)" 36 | for l in $(seq 1 $NUM_LOOPS); do 37 | echo -ne "\rRunning import loop $l" 38 | ./bin/mongo benchmark --quiet --eval 'load("./insert.js")' >/dev/null 2>&1 39 | done 40 | T="$(($(date +%s)-T))" 41 | 42 | echo 43 | echo "Insert performance for $config: $T seconds" 44 | 45 | echo -ne "Shutting down server... " 46 | T="$(date +%s)" 47 | ./bin/mongo admin --quiet --eval "db.shutdownServer({force: true})" >/dev/null 2>&1 48 | 49 | while [ 1 ]; do 50 | pgrep -U $USER mongod > /dev/null 2>&1 51 | if [ "$?" -eq 1 ]; then 52 | break 53 | fi 54 | sleep 1 55 | done 56 | T="$(($(date +%s)-T))" 57 | echo "took $T seconds" 58 | 59 | SIZE=$(du -s --block-size=1 $MONGO_DIR/$DATA_DIR | cut -f1) 60 | SIZE_MB=$(echo "scale=2; $SIZE/(1024*1024)" | bc) 61 | echo "Disk usage for $config: ${SIZE_MB}MB" 62 | done 63 | 64 | 65 | -------------------------------------------------------------------------------- /chapter_storage/timings-mmapv1.conf.txt: -------------------------------------------------------------------------------- 1 | 17.88 2 | 1.01 3 | 1.00 4 | 1.03 5 | 0.99 6 | 1.03 7 | 0.96 8 | 0.99 9 | 0.97 10 | 0.96 11 | 0.99 12 | 1.01 13 | 0.98 14 | 1.01 15 | 1.08 16 | 1.02 17 | -------------------------------------------------------------------------------- /chapter_storage/timings-wiredtiger-snappy.conf.txt: -------------------------------------------------------------------------------- 1 | 5.67 2 | 1.05 3 | 1.02 4 | 1.05 5 | 1.04 6 | 1.05 7 | 0.99 8 | 1.01 9 | 1.02 10 | 1.03 11 | 1.06 12 | 1.06 13 | 1.08 14 | 1.03 15 | 1.05 16 | 1.04 17 | -------------------------------------------------------------------------------- /chapter_storage/timings-wiredtiger-uncompressed.conf.txt: -------------------------------------------------------------------------------- 1 | 7.37 2 | 1.10 3 | 1.05 4 | 1.03 5 | 1.07 6 | 1.03 7 | 1.06 8 | 1.08 9 | 1.03 10 | 1.03 11 | 1.03 12 | 1.07 13 | 1.04 14 | 1.08 15 | 1.05 16 | 1.06 17 | -------------------------------------------------------------------------------- /chapter_storage/timings-wiredtiger-zlib.conf.txt: -------------------------------------------------------------------------------- 1 | 5.17 2 | 1.05 3 | 1.08 4 | 1.08 5 | 1.08 6 | 1.08 7 | 1.07 8 | 1.06 9 | 1.03 10 | 1.03 11 | 1.06 12 | 1.07 13 | 1.06 14 | 1.07 15 | 1.05 16 | 1.06 17 | -------------------------------------------------------------------------------- /chapter_storage/timings.R: -------------------------------------------------------------------------------- 1 | library(zoo) 2 | 3 | timings <- as.zoo(read.table("timings-mmapv1.conf.txt")) 4 | timings <- merge.zoo(timings, as.zoo(read.table("timings-wiredtiger-uncompressed.conf.txt"))) 5 | timings <- merge.zoo(timings, as.zoo(read.table("timings-wiredtiger-snappy.conf.txt"))) 6 | timings <- merge.zoo(timings, as.zoo(read.table("timings-wiredtiger-zlib.conf.txt"))) 7 | colnames(timings) <- c("MMAPV1", "WT", "WT-snappy", "WT-zlib") 8 | png("timings.png", width=7, height=5, res=300, units="in") 9 | barplot(timings, beside=TRUE, col=c(2, 3, 4, 5)) 10 | legend("topright", legend=colnames(timings), fill = c(2, 3, 4, 5), bty="n") 11 | title("Read performance", xlab="iteration", ylab="duration (s)") 12 | dev.off() 13 | 14 | write.csv(timings, file="timings.csv") 15 | -------------------------------------------------------------------------------- /chapter_storage/timings.csv: -------------------------------------------------------------------------------- 1 | "","MMAPV1","WT","WT-snappy","WT-zlib" 2 | "1",17.88,7.37,5.67,5.17 3 | "2",1.01,1.1,1.05,1.05 4 | "3",1,1.05,1.02,1.08 5 | "4",1.03,1.03,1.05,1.08 6 | "5",0.99,1.07,1.04,1.08 7 | "6",1.03,1.03,1.05,1.08 8 | "7",0.96,1.06,0.99,1.07 9 | "8",0.99,1.08,1.01,1.06 10 | "9",0.97,1.03,1.02,1.03 11 | "10",0.96,1.03,1.03,1.03 12 | "11",0.99,1.03,1.06,1.06 13 | "12",1.01,1.07,1.06,1.07 14 | "13",0.98,1.04,1.08,1.06 15 | "14",1.01,1.08,1.03,1.07 16 | "15",1.08,1.05,1.05,1.05 17 | "16",1.02,1.06,1.04,1.06 18 | -------------------------------------------------------------------------------- /chapter_storage/timings.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bakks/mongo-in-action-code/1fa0e0b151e64c94f70ba4d6c847e42871486273/chapter_storage/timings.png --------------------------------------------------------------------------------