├── .gitignore
├── .npmignore
├── .travis.yml
├── HISTORY
├── Makefile
├── Readme.md
├── TODO
├── dev
├── benchmark
│ ├── bson_benchmark.js
│ ├── bson_buffalo_benchmark.js
│ ├── bson_native_benchmark.js
│ ├── emit_benchmark.js
│ ├── grid_fs_write_benchmark.js
│ ├── gridfs_benchmark.js
│ ├── hammer.js
│ ├── hammer_replicaset.js
│ └── streaming_benchmark.js
├── harness
│ └── memory_leak_harness.js
└── tools
│ ├── build-docs.js
│ ├── connection_proxy_emulator.js
│ ├── doc-templates
│ ├── changelog.ejs
│ ├── class.ejs
│ ├── function.ejs
│ ├── github.ejs
│ ├── index.ejs
│ └── index_no_header.ejs
│ ├── docs.js
│ ├── gleak.js
│ ├── test_all.js
│ └── test_set_runner.js
├── docs
├── README.md
├── articles
│ ├── NodeKOArticle1.md
│ └── NodeKOArticle2.md
├── collections.md
├── database.md
├── gridfs.md
├── indexes.md
├── insert.md
├── queries.md
├── replicaset.md
└── sphinx-docs
│ ├── .gitignore
│ ├── Makefile
│ ├── conf.py
│ ├── npm_dependent_packages.json
│ ├── source
│ ├── content
│ │ ├── awesomeappsvideo.rst
│ │ ├── nodejsvideo.rst
│ │ └── tutorials.rst
│ ├── contents.rst
│ ├── index.rst
│ └── static
│ │ ├── .mongodb
│ │ ├── active.png
│ │ ├── logo-mongodb.png
│ │ └── nonactive.png
│ ├── templates
│ └── .mongodb
│ └── themes
│ └── mongodb
│ ├── globaltoc.html
│ ├── layout.html
│ ├── localtoc.html
│ ├── relations.html
│ ├── static
│ └── mongodb-docs.css_t
│ └── theme.conf
├── examples
├── admin.js
├── blog.js
├── capped.js
├── cursor.js
├── gridfs.js
├── index.js
├── info.js
├── oplog.js
├── queries.js
├── replSetServersQueries.js
├── replSetServersSimple.js
├── simple.js
├── strict.js
├── types.js
└── url.js
├── external-libs
└── bson
│ ├── .gitignore
│ ├── Makefile
│ ├── bson.cc
│ ├── bson.h
│ ├── index.js
│ ├── test
│ ├── test_bson.js
│ ├── test_full_bson.js
│ └── test_stackless_bson.js
│ └── wscript
├── index.js
├── install.js
├── lib
└── mongodb
│ ├── admin.js
│ ├── collection.js
│ ├── commands
│ ├── base_command.js
│ ├── db_command.js
│ ├── delete_command.js
│ ├── get_more_command.js
│ ├── insert_command.js
│ ├── kill_cursor_command.js
│ ├── query_command.js
│ └── update_command.js
│ ├── connection
│ ├── connection.js
│ ├── connection_pool.js
│ ├── connection_utils.js
│ ├── repl_set.js
│ ├── server.js
│ └── strategies
│ │ ├── ping_strategy.js
│ │ └── statistics_strategy.js
│ ├── cursor.js
│ ├── cursorstream.js
│ ├── db.js
│ ├── gridfs
│ ├── chunk.js
│ ├── grid.js
│ ├── gridstore.js
│ └── readstream.js
│ ├── index.js
│ ├── responses
│ └── mongo_reply.js
│ └── utils.js
├── package.json
└── test
├── admin_test.js
├── aggregation_framework_test.js
├── authentication_test.js
├── auxilliary
├── authentication_test.js
├── repl_set_ssl_test.js
├── replicaset_auth_test.js
├── single_server_kill_reconnect.js
└── ssl_test.js
├── certificates
└── mycert.pem
├── collection_test.js
├── commands_test.js
├── connect_test.js
├── connection
├── connection_pool_test.js
└── message_parser_test.js
├── connection_test.js
├── cursor_test.js
├── cursorstream_test.js
├── custom_pk_test.js
├── db_test.js
├── error_test.js
├── exception_handling_test.js
├── find_test.js
├── geo_search_test.js
├── gridstore
├── grid_store_file_test.js
├── grid_store_stream_test.js
├── grid_store_test.js
├── grid_test.js
├── iya_logo_final_bw.jpg
├── readstream_test.js
├── test_gs_weird_bug.png
└── test_gs_working_field_read.pdf
├── index_test.js
├── insert_test.js
├── logging_test.js
├── manual_tests
├── grids_fs_compatibility_test.js
├── hanging_queries_test.js
├── issue_replicaset_test.js
├── large_file_gridstore_stream.js
├── manual_larger_queries.js
├── manual_lock.js
├── motherload_pullback_test.js
├── replicaset_manual_kill_test.js
├── replicaset_test.js
├── server_load.js
├── simple_test.js
├── single_test.js
└── test.js
├── map_reduce_test.js
├── multiple_dbs_on_connection_pool_test.js
├── objectid_test.js
├── raw_test.js
├── reaper_test.js
├── regexp_test.js
├── remove_test.js
├── replicaset
├── connect_test.js
├── count_test.js
├── insert_and_query_on_dead_primary_test.js
├── insert_test.js
├── map_reduce_test.js
├── query_secondaries_test.js
├── read_preference_replicaset_test.js
├── read_preferences_single_test.js
├── tags_test.js
└── two_server_tests.js
├── streaming_test.js
├── tools
├── keyfile.txt
├── replica_set_manager.js
├── server_manager.js
└── sharding_manager.js
└── unicode_test.js
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | *.sw*
3 | *.seed
4 | *.tmp
5 | .project
6 | .settings
7 | data
8 | node_modules/
9 | output
10 | build
11 |
--------------------------------------------------------------------------------
/.npmignore:
--------------------------------------------------------------------------------
1 | .npmignore
2 | .gitignore
3 | .buildinfo
4 | .mongodb
5 | .DS_Store
6 | *,tmp
7 |
8 | HISTORY
9 | Readme.md
10 | TODO
11 |
12 | docs/
13 | docs/sphinx-docs
14 | data/
15 | dev/
16 | examples/
17 | test/
18 |
19 | external-libs/bson/build/
20 | external-libs/bson/build/.wafpickle-7
21 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: node_js
2 | node_js:
3 | - 0.4
4 | - 0.6
5 | - 0.7 # development version of 0.8, may be unstable
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | NODE = node
2 | NPM = npm
3 | NODEUNIT = node_modules/nodeunit/bin/nodeunit
4 | DOX = node_modules/dox/bin/dox
5 | name = all
6 |
7 | total: build_native
8 |
9 | build_native:
10 | # $(MAKE) -C ./external-libs/bson all
11 |
12 | build_native_debug:
13 | $(MAKE) -C ./external-libs/bson all_debug
14 |
15 | build_native_clang:
16 | $(MAKE) -C ./external-libs/bson clang
17 |
18 | build_native_clang_debug:
19 | $(MAKE) -C ./external-libs/bson clang_debug
20 |
21 | clean_native:
22 | $(MAKE) -C ./external-libs/bson clean
23 |
24 | test: build_native
25 | @echo "\n == Run All tests minus replicaset tests=="
26 | $(NODE) dev/tools/test_all.js --noreplicaset --boot
27 |
28 | test_pure: build_native
29 | @echo "\n == Run All tests minus replicaset tests=="
30 | $(NODE) dev/tools/test_all.js --noreplicaset --boot --noactive
31 |
32 | test_junit: build_native
33 | @echo "\n == Run All tests minus replicaset tests=="
34 | $(NODE) dev/tools/test_all.js --junit --noreplicaset
35 |
36 | test_nodeunit_pure:
37 | @echo "\n == Execute Test Suite using Pure JS BSON Parser == "
38 | @$(NODEUNIT) test/ test/gridstore test/bson
39 |
40 | test_js:
41 | @$(NODEUNIT) $(TESTS)
42 |
43 | test_nodeunit_replicaset_pure:
44 | @echo "\n == Execute Test Suite using Pure JS BSON Parser == "
45 | @$(NODEUNIT) test/replicaset
46 |
47 | test_nodeunit_native:
48 | @echo "\n == Execute Test Suite using Native BSON Parser == "
49 | @TEST_NATIVE=TRUE $(NODEUNIT) test/ test/gridstore test/bson
50 |
51 | test_nodeunit_replicaset_native:
52 | @echo "\n == Execute Test Suite using Native BSON Parser == "
53 | @TEST_NATIVE=TRUE $(NODEUNIT) test/replicaset
54 |
55 | test_all: build_native
56 | @echo "\n == Run All tests =="
57 | $(NODE) dev/tools/test_all.js --boot
58 |
59 | test_all_junit: build_native
60 | @echo "\n == Run All tests =="
61 | $(NODE) dev/tools/test_all.js --junit --boot
62 |
63 | clean:
64 | rm ./external-libs/bson/bson.node
65 | rm -r ./external-libs/bson/build
66 |
67 | generate_docs:
68 | $(NODE) dev/tools/build-docs.js
69 | make --directory=./docs/sphinx-docs --file=Makefile html
70 |
71 | .PHONY: total
72 |
--------------------------------------------------------------------------------
/TODO:
--------------------------------------------------------------------------------
1 | TODO for jan 4 - 2011
2 | - Chase down potential memory leak in findAndModify
3 | - Check compatibility for gridFS between python and js
4 | - Ensure Gridfs speed is comparable to other solutions
5 | - Map out python replicaset driver functionality
6 |
7 | ACCOMPLISHED jan 4 - 2011
8 | + Chase down potential memory leak in findAndModify
9 | + Check compatibility for gridFS between python and js
10 | + Ensure Gridfs speed is comparable to other solutions
11 |
12 | 0.9.7.4-dev
13 | - Amortize documents (only deserialize when accessed)
14 | - Mongo tests to handle special Mongos situations
15 | - If a secondary server disappears don't kill the driver connection
16 | - Check for new servers coming online (a new secondary server etc)
17 | - http://api.mongodb.org/python/current/api/pymongo/index.html#pymongo.ReadPreference
18 |
19 | --------------------------------------------------------------------------------------------* Allow secondary read with no master
20 | * Add lowest response time selection of read server for replicaset using a selectable strategy. First one being using the ping command response time
21 | * Allow execution of multiple commands against the same server connection when having a connection pool
22 | * Implement tag support for replicasets
23 | * Change bson c++ parser to use js objects instead of native c++ objects
24 | * Whole stack benchmark with profiling to locate where the driver spends time
25 | * Change bson c++ parser to be stackless to look at performance difference
--------------------------------------------------------------------------------
/dev/benchmark/bson_benchmark.js:
--------------------------------------------------------------------------------
1 | var BSON = require('../../lib/mongodb').BSONNative.BSON,
2 | ObjectID = require('../../lib/mongodb').BSONNative.ObjectID,
3 | Code = require('../../lib/mongodb').BSONNative.Code,
4 | Long = require('../../lib/mongodb').BSONNative.Long,
5 | Binary = require('../../lib/mongodb').BSONNative.Binary,
6 | debug = require('util').debug,
7 | inspect = require('util').inspect,
8 |
9 | Long = require('../../lib/mongodb').Long,
10 | ObjectID = require('../../lib/mongodb').ObjectID,
11 | Binary = require('../../lib/mongodb').Binary,
12 | Code = require('../../lib/mongodb').Code,
13 | DBRef = require('../../lib/mongodb').DBRef,
14 | Symbol = require('../../lib/mongodb').Symbol,
15 | Double = require('../../lib/mongodb').Double,
16 | MaxKey = require('../../lib/mongodb').MaxKey,
17 | MinKey = require('../../lib/mongodb').MinKey,
18 | Timestamp = require('../../lib/mongodb').Timestamp;
19 |
20 |
21 | // var BSON = require('../../lib/mongodb').BSONPure.BSON,
22 | // ObjectID = require('../../lib/mongodb').BSONPure.ObjectID,
23 | // Code = require('../../lib/mongodb').BSONPure.Code,
24 | // Long = require('../../lib/mongodb').BSONPure.Long,
25 | // Binary = require('../../lib/mongodb').BSONPure.Binary;
26 |
27 | var COUNT = 1000;
28 | var COUNT = 100;
29 |
30 | var object = {
31 | string: "Strings are great",
32 | decimal: 3.14159265,
33 | bool: true,
34 | integer: 5,
35 | date: new Date(),
36 | double: new Double(1.4),
37 | id: new ObjectID(),
38 | min: new MinKey(),
39 | max: new MaxKey(),
40 | symbol: new Symbol('hello'),
41 | long: Long.fromNumber(100),
42 | bin: new Binary(new Buffer(100)),
43 |
44 | subObject: {
45 | moreText: "Bacon ipsum dolor sit amet cow pork belly rump ribeye pastrami andouille. Tail hamburger pork belly, drumstick flank salami t-bone sirloin pork chop ribeye ham chuck pork loin shankle. Ham fatback pork swine, sirloin shankle short loin andouille shank sausage meatloaf drumstick. Pig chicken cow bresaola, pork loin jerky meatball tenderloin brisket strip steak jowl spare ribs. Biltong sirloin pork belly boudin, bacon pastrami rump chicken. Jowl rump fatback, biltong bacon t-bone turkey. Turkey pork loin boudin, tenderloin jerky beef ribs pastrami spare ribs biltong pork chop beef.",
46 | longKeylongKeylongKeylongKeylongKeylongKey: "Pork belly boudin shoulder ribeye pork chop brisket biltong short ribs. Salami beef pork belly, t-bone sirloin meatloaf tail jowl spare ribs. Sirloin biltong bresaola cow turkey. Biltong fatback meatball, bresaola tail shankle turkey pancetta ham ribeye flank bacon jerky pork chop. Boudin sirloin shoulder, salami swine flank jerky t-bone pork chop pork beef tongue. Bresaola ribeye jerky andouille. Ribeye ground round sausage biltong beef ribs chuck, shank hamburger chicken short ribs spare ribs tenderloin meatloaf pork loin."
47 | },
48 |
49 | subArray: [1,2,3,4,5,6,7,8,9,10],
50 | anotherString: "another string",
51 | code: new Code("function() {}", {i:1})
52 | }
53 |
54 | // Number of objects
55 | var numberOfObjects = 100;
56 | var bson = new BSON([Long, ObjectID, Binary, Code, DBRef, Symbol, Double, Timestamp, MaxKey, MinKey]);
57 | console.log("---------------------- 1")
58 | // Object serialized
59 | for(var i = 0; i < 1; i++) {
60 | objectBSON = bson.serialize(object, null, true)
61 | }
62 |
63 | // Object serialized
64 | for(var i = 0; i < 100000; i++) {
65 | bson.deserialize(objectBSON);
66 | }
67 |
68 | // // Buffer With copies of the objectBSON
69 | // var data = new Buffer(objectBSON.length * numberOfObjects);
70 | // var index = 0;
71 | //
72 | // // Copy the buffer 1000 times to create a strea m of objects
73 | // for(var i = 0; i < numberOfObjects; i++) {
74 | // // Copy data
75 | // objectBSON.copy(data, index);
76 | // // Adjust index
77 | // index = index + objectBSON.length;
78 | // }
79 | //
80 | // // console.log("-----------------------------------------------------------------------------------")
81 | // // console.dir(objectBSON)
82 | //
83 | // var x, start, end, j
84 | // var objectBSON, objectJSON
85 | //
86 | // // Allocate the return array (avoid concatinating everything)
87 | // var results = new Array(numberOfObjects);
88 | //
89 | // console.log(COUNT + "x (objectBSON = BSON.serialize(object))")
90 | // start = new Date
91 | //
92 | // // var objects = BSON.deserializeStream(data, 0, numberOfObjects);
93 | // // console.log("----------------------------------------------------------------------------------- 0")
94 | // // var objects = BSON.deserialize(data);
95 | // // console.log("----------------------------------------------------------------------------------- 1")
96 | // // console.dir(objects)
97 | //
98 | // for (j=COUNT; --j>=0; ) {
99 | // var nextIndex = BSON.deserializeStream(data, 0, numberOfObjects, results, 0);
100 | // }
101 | //
102 | // end = new Date
103 | // var opsprsecond = COUNT / ((end - start)/1000);
104 | // console.log("bson size (bytes): ", objectBSON.length);
105 | // console.log("time = ", end - start, "ms -", COUNT / ((end - start)/1000), " ops/sec");
106 | // console.log("MB/s = " + ((opsprsecond*objectBSON.length)/1024));
107 | //
108 | // // console.dir(nextIndex)
109 | // // console.dir(results)
110 |
111 |
112 |
--------------------------------------------------------------------------------
/dev/benchmark/emit_benchmark.js:
--------------------------------------------------------------------------------
1 | var debug = require('util').debug,
2 | inspect = require('util').inspect,
3 | inherits = require('util').inherits,
4 | net = require('net'),
5 | EventEmitter = require("events").EventEmitter;
6 |
7 | var COUNT = 1000000;
8 |
9 | var Emitter = function() {
10 | }
11 |
12 | inherits(Emitter, EventEmitter);
13 |
14 | Emitter.prototype.start = function() {
15 | for(var i = 0; i < COUNT; i++) {
16 | this.emit("data", "============================================== data")
17 | }
18 | }
19 |
20 | Emitter.prototype.start2 = function(callback) {
21 | for(var i = 0; i < COUNT; i++) {
22 | callback(null, "============================================== data")
23 | }
24 | }
25 |
26 | // Create test object
27 | var emitObj = new Emitter();
28 | emitObj.on("data", function(data) {
29 | })
30 |
31 | console.log(COUNT + "x (objectBSON = BSON.serialize(object))")
32 | start = new Date
33 |
34 | emitObj.start();
35 |
36 | end = new Date
37 | console.log("time = ", end - start, "ms -", COUNT * 1000 / (end - start), " ops/sec")
38 |
39 |
40 | console.log(COUNT + "x (objectBSON = BSON.serialize(object))")
41 | start = new Date
42 |
43 | emitObj.start2(function(err, data) {
44 | // debug(data)
45 | });
46 |
47 | end = new Date
48 | console.log("time = ", end - start, "ms -", COUNT * 1000 / (end - start), " ops/sec")
49 |
50 |
--------------------------------------------------------------------------------
/dev/benchmark/grid_fs_write_benchmark.js:
--------------------------------------------------------------------------------
1 | var Db = require('../../lib/mongodb').Db,
2 | Server = require('../../lib/mongodb').Server,
3 | ObjectID = require('../../lib/mongodb').ObjectID,
4 | GridStore = require('../../lib/mongodb').GridStore;
5 |
6 | var simulated_buffer = new Buffer(1024*1000*10).toString();
7 |
8 | new Db('grid_fs_write_benchmark', new Server("127.0.0.1", 27017, {auto_reconnect: true}), {}).open(function(err, new_client) {
9 | new_client.dropDatabase(function(err, result) {
10 | new_client.close();
11 |
12 | for(var i = 0; i < 1; i++) {
13 | new Db('grid_fs_write_benchmark', new Server("127.0.0.1", 27017, {auto_reconnect: true}), {}).open(function(err, client) {
14 | var gridStore = new GridStore(client, "foobar" + i, "w");
15 | gridStore.open(function(err, gridStore) {
16 | gridStore.write(simulated_buffer.toString(), function(err, gridStore) {
17 | gridStore.close(function(err, result) {
18 | client.close();
19 | });
20 | });
21 | });
22 | });
23 | }
24 | })
25 | });
26 |
--------------------------------------------------------------------------------
/dev/benchmark/gridfs_benchmark.js:
--------------------------------------------------------------------------------
1 | var Db = require('../../lib/mongodb').Db,
2 | Server = require('../../lib/mongodb').Server,
3 | ObjectID = require('../../lib/mongodb').ObjectID,
4 | GridStore = require('../../lib/mongodb').GridStore;
5 |
6 | var Mongolian = require('mongolian');
7 | var COUNT = 1000;
8 | var currentWritingIndex = 0;
9 | var server = new Server("127.0.0.1", 27017, {auto_reconnect: true, poolSize:1, native_parser:true});
10 | var server2 = new Server("127.0.0.1", 27017, {auto_reconnect: true, poolSize:1, native_parser:true});
11 |
12 | // Read in the test file
13 | var fileData = require('fs').readFileSync("../../test/gridstore/iya_logo_final_bw.jpg");
14 |
15 | // ------------------------------------------------------------------------------
16 | // TEST MONGODB NATIVE
17 | // ------------------------------------------------------------------------------
18 | // Open a db for the file
19 | new Db('gridfs_benchmark', server, {}).open(function(err, new_client) {
20 | new_client.dropDatabase(function(err, result) {
21 | new_client.close();
22 |
23 | new Db('gridfs_benchmark', server2, {}).open(function(err, client) {
24 | // Start Time
25 | var startTime = new Date().getTime();
26 |
27 | // Iterate COUNT times writing file to gridfs
28 | for(var i = 0; i < COUNT; i++) {
29 | var gridStore = new GridStore(client, "foobar" + i, "w");
30 | gridStore.open(function(err, gridStore) {
31 | gridStore.write(fileData, true, function(err, gridStore) {
32 | // Update current write index
33 | currentWritingIndex = currentWritingIndex + 1;
34 |
35 | // finish up
36 | if(currentWritingIndex >= COUNT) {
37 | // Start Time
38 | var endTime = new Date().getTime();
39 | var totalTime = (endTime - startTime);
40 | var msPerOperation = totalTime/COUNT;
41 | var operationsPrSecond = 1000/msPerOperation;
42 | var bytesPrSecond = Math.floor(fileData.length * operationsPrSecond);
43 | var mbsPrSecond = (bytesPrSecond/1024)/1024 ;
44 |
45 | console.log("-------------------------------------------------- DONE NATIVE")
46 | console.log("total time ms :: " + totalTime);
47 | console.log("ms pr operation :: " + msPerOperation);
48 | console.log("operations pr second :: " + operationsPrSecond);
49 | console.log("bytes pr second :: " + bytesPrSecond);
50 | console.log("MB pr second :: " + mbsPrSecond);
51 | // Close db
52 | client.close();
53 | // Execute mongolian test
54 | executeMongolianTest();
55 | }
56 | })
57 | });
58 | }
59 | });
60 | })
61 | });
62 |
63 | // ------------------------------------------------------------------------------
64 | // TEST MONGODB NATIVE
65 | // ------------------------------------------------------------------------------
66 | var executeMongolianTest = function() {
67 | var db = new Mongolian('mongo://localhost/mongolian_test', { log:false })
68 | var gridfs = db.gridfs('testfs')
69 |
70 | // Number of executed operations
71 | var currentWritingIndexM = 0;
72 | // Start Time
73 | var startTime = new Date().getTime();
74 |
75 | // Execute Mongolian Count times writing data
76 | for(var i = 0; i < COUNT; i++) {
77 | var stream = gridfs.create('foo' + i).writeStream();
78 | stream.on('close', function() {
79 | currentWritingIndexM = currentWritingIndexM + 1;
80 |
81 | if(currentWritingIndexM >= COUNT) {
82 | // Start Time
83 | var endTime = new Date().getTime();
84 | var totalTime = (endTime - startTime);
85 | var msPerOperation = totalTime/COUNT;
86 | var operationsPrSecond = 1000/msPerOperation;
87 | var bytesPrSecond = Math.floor(fileData.length * operationsPrSecond);
88 | var mbsPrSecond = (bytesPrSecond/1024)/1024 ;
89 |
90 | console.log("-------------------------------------------------- DONE MONGOLIAN")
91 | console.log("total time ms :: " + totalTime);
92 | console.log("ms pr operation :: " + msPerOperation);
93 | console.log("operations pr second :: " + operationsPrSecond);
94 | console.log("bytes pr second :: " + bytesPrSecond);
95 | console.log("MB pr second :: " + mbsPrSecond);
96 |
97 | // Close connection
98 | db.server.close()
99 | }
100 | });
101 |
102 | // Write file
103 | stream.end(fileData);
104 | }
105 | }
106 |
--------------------------------------------------------------------------------
/dev/benchmark/streaming_benchmark.js:
--------------------------------------------------------------------------------
1 | var Db = require('../lib/mongodb').Db,
2 | Server = require('../lib/mongodb').Server,
3 | Cursor = require('../lib/mongodb').Cursor,
4 | Collection = require('../lib/mongodb').Collection,
5 | sys = require('util'),
6 | debug = require('util').debug,
7 | inspect = require('util').inspect;
8 |
9 | var parser = require('../lib/mongodb').BSONPure;
10 | var objectID = require('../lib/mongodb').ObjectID;
11 |
12 | var db = new Db('streaming_benchmark', new Server("127.0.0.1", 27017, {auto_reconnect: true, poolSize:4}), {})
13 | // Open the db
14 | db.open(function(err, client) {
15 | client.collection('streaming_benchmark', function(err, collection) {
16 | collection.remove({}, function(err, result) {
17 | // Benchmark
18 | var started_at = new Date().getTime();
19 | // Add documents
20 | for(var i = 0; i < 100000; i++) {
21 | // for(var i = 0; i < 10000; i++) {
22 | collection.save({'i':i, 'a':i, 'c':i, 'd':{'i':i}}, function(err, result){});
23 | }
24 | sys.puts("save recs: " + ((new Date().getTime() - started_at)/1000) + "seconds");
25 |
26 | // Benchmark
27 | var started_at = new Date().getTime();
28 | var count = 0;
29 | collection.find(function(err, cursor) {
30 | var stream = cursor.streamRecords(function(er,item) {});
31 | stream.addListener('end', function() {
32 | client.close();
33 | });
34 | stream.addListener('data',function(data){
35 | if(count == 0) started_at = new Date().getTime();
36 | count++;
37 | if ((count%10000)==0) sys.puts("recs:" + count + " :: " +
38 | ((new Date().getTime() - started_at)/10000) + "seconds");
39 | });
40 | });
41 | })
42 | })
43 | });
--------------------------------------------------------------------------------
/dev/harness/memory_leak_harness.js:
--------------------------------------------------------------------------------
1 | var http = require('http'),
2 | Server = require('../lib/mongodb').Server,
3 | ObjectID = require('../lib/mongodb').ObjectID,
4 | Db = require('../lib/mongodb').Db;
5 |
6 | // Set up the mongodb instance
7 | var db = new Db('memory_leak_harness', new Server("127.0.0.1", 27017, {auto_reconnect: true, poolSize: 4}), {native_parser:false});
8 |
9 | // Set up http server
10 | var server = http.createServer();
11 | server.on('request', function(request, response) {
12 | // Fetch the url
13 | var url = request.url;
14 |
15 | // Switch on the url
16 | if(url === "/findAndModify") {
17 | findAndModifyCommand(request, response);
18 | } else {
19 | response.end('Command not supported');
20 | }
21 | })
22 |
23 | // Open the db connection
24 | db.open(function(err, db) {
25 | server.listen(8080, '127.0.0.1');
26 | });
27 |
28 | // Find And Modify Command
29 | var findAndModifyCommand = function(request, response) {
30 | // Perform an insert and the modify that one
31 | var objectId = new ObjectID();
32 | // Fetch collection and insert document then modify it
33 | db.createCollection('findAndModify', function(err, collection) {
34 | collection.insert({_id:objectId, a:1, b:true, date:new Date()}, {safe:true}, function(err, result) {
35 | if(err != null) {
36 | response.end("findAndModifyCommand ERROR :: " + err.toString());
37 | return;
38 | }
39 |
40 | // Perform the modifyAndModify
41 | collection.findAndModify({_id:objectId}, [['_id', 1]], {'$set':{'a':2}}, {'new':true, safe:true}, function(err, updated_doc) {
42 | response.end("findAndModifyCommand SUCCESS :: " + JSON.stringify(updated_doc));
43 | });
44 | })
45 | });
46 | }
--------------------------------------------------------------------------------
/dev/tools/connection_proxy_emulator.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Parameters for the proxy
3 | **/
4 | var inputHost = 'localhost';
5 | var inputPort = 27017;
6 | var outputHost = 'localhost';
7 | var outputPort = 27018;
8 | var webServerPort = 8080;
9 |
10 | /**
11 | * Proxy handling
12 | **/
13 | var net = require('net'),
14 | http = require('http'),
15 | format = require('util').format;
16 | var connectionNumber = 0,
17 | incomingConnections = {},
18 | outgoingConnections = {};
19 |
20 | // Server instance
21 | var server = net.createServer(function(connection) {
22 | console.log("=============================================== server connected");
23 | // console.dir(connection)
24 | // Set the id
25 | connection.id = connectionNumber++;
26 | // Outgoing connection
27 | var outgoingConnection = net.createConnection(outputPort, outputHost);
28 | outgoingConnection.id = connection.id;
29 | // Create a connection
30 | outgoingConnections[connection.id] = outgoingConnection;
31 | incomingConnections[connection.id] = connection;
32 | // Listen to incoming data
33 | connection.on("data", function(data) {
34 | outgoingConnections[this.id].write(data);
35 | });
36 |
37 | connection.on("close", function() {
38 | console.log("===================================== closing incoming connection :: " + this.id)
39 | if(outgoingConnections[this.id]) outgoingConnections[this.id].destroy();
40 | delete outgoingConnections[this.id];
41 | })
42 |
43 | outgoingConnections[connection.id].on("data", function(data) {
44 | incomingConnections[this.id].write(data);
45 | });
46 |
47 | outgoingConnections[connection.id].on("close", function(data) {
48 | console.log("===================================== closing outgoing connection :: " + this.id)
49 | if(incomingConnections[this.id]) incomingConnections[this.id].destroy();
50 | delete incomingConnections[this.id];
51 | });
52 | });
53 |
54 | // Boot up server letting you control the connection
55 | var webserver = http.createServer(function(request, response) {
56 | console.log("----------------------------------------------------------- 8080")
57 | // console.dir(request.url.)
58 | if(request.url == '/sockets') {
59 | renderSocketList(incomingConnections, response);
60 | } else if(request.url.indexOf('/sockets/close') != -1) {
61 | // Get the id and kill it
62 | var id = request.url.split("/").pop();
63 | id = id != null ? parseInt(id) : null;
64 | if(id != null && incomingConnections[id] != null) {
65 | }
66 | // Render the socket list
67 | renderSocketList(incomingConnections, response);
68 | } else if(request.url.indexOf('/rest/kill_random_socket')) {
69 | // Grab all the connection ids
70 | var keys = Object.keys(incomingConnections);
71 | // Grab a random one in the space
72 | var id = keys[Math.floor(Math.random(keys.length))];
73 | // Terminate the connection
74 |
75 | } else {
76 | // Write 401 error out
77 | response.writeHead(401, { 'Content-Type': 'text/plain'});
78 | response.write("No such page found");
79 | response.end();
80 | }
81 | });
82 | // Listen
83 | webserver.listen(webServerPort);
84 |
85 | var renderSocketList = function(_incomingConnections, _response) {
86 | // Write out the list of available sockets we can kill if we wish
87 | _response.writeHead(200, { 'Content-Type': 'text/html'});
88 | // Map the array
89 | var socketids = Object.keys(_incomingConnections).map(function(item) {
90 | return format("
Socket %s [Close] ", item, item);
91 | });
92 | // Write out the data
93 | _response.write(format("", socketids.join("")))
94 | _response.end();
95 | }
96 |
97 | var terminateConnection = function(id) {
98 | // Get the connections
99 | var incomingConnection = incomingConnections[id];
100 | var outgoingConnection = outgoingConnections[id];
101 | // Remove from the list
102 | delete incomingConnections[id];
103 | delete outgoingConnections[id];
104 | // Kill them
105 | incomingConnection.destroy();
106 | outgoingConnection.destroy();
107 | }
108 |
109 | // Listen to port
110 | server.listen(inputPort, inputHost, function() {
111 | console.log("server bound")
112 | });
--------------------------------------------------------------------------------
/dev/tools/doc-templates/changelog.ejs:
--------------------------------------------------------------------------------
1 | =========
2 | Changelog
3 | =========
4 |
5 | <%= content %>
--------------------------------------------------------------------------------
/dev/tools/doc-templates/function.ejs:
--------------------------------------------------------------------------------
1 | Function
--------------------------------------------------------------------------------
/dev/tools/doc-templates/github.ejs:
--------------------------------------------------------------------------------
1 | ==============================================
2 | Github libraries and projects using the driver
3 | ==============================================
4 |
5 | <%
6 | var addLine = function(char, length) {
7 | var chars = [];
8 | for(var i = 0; i < length; i++) chars[i] = char;
9 | return chars.join('');
10 | }
11 |
12 | var keys = Object.keys(objectByTags);
13 | for(var j = 0; j < keys.length; j++) {
14 | var key = keys[j];
15 | var entries = objectByTags[key];
16 | var title = tags[key] != null ? tags[key] : key;
17 |
18 | %><%= format("%s\n%s\n\n", title, addLine('-', title.length)) %><%
19 |
20 | // Iterate over all the objects
21 | for(var i = 0; i < entries.length; i++) {
22 | // for(var i = 0; i < 1; i++) {
23 | var entry = entries[i];
24 | var content = entry.content;
25 |
26 | if(content != null) {
27 | // Parse the pushed at date
28 | var t = Date.parse(content.pushed_at );
29 | var d = new Date();
30 | d.setTime(t);
31 |
32 | // If there is no activity newer than 6 months it's dead
33 | var fourMonths = 1000*60*60*24*31*4;
34 | var currentDate = (new Date()).getTime() - fourMonths;
35 |
36 | var activityArrow = ".. image:: ../static/active.png";
37 | if(t < currentDate) {
38 | var activityArrow = ".. image:: ../static/nonactive.png";
39 | }
40 |
41 | // var minutes = 1000 * 60;
42 | // var hours = minutes * 60;
43 | // var days = hours * 24;
44 | // var years = days * 365;
45 | // var y = t / years;
46 |
47 | // document.write("It's been: " + y + " years from Jan 1, 1970");
48 | // document.write(" to Jul 8, 2005.");
49 |
50 | %><%- format(".. topic:: %s\n\n\ %s\n", content.name, content.description) %><%
51 | %>
52 | .. list-table::
53 | :widths: 25 85
54 | :header-rows: 0
55 |
56 | * - **Activity Level**
57 | - <%- activityArrow %>
58 | * - **Last push**
59 | - <%= format("%s:%s:%s on %s/%s/%s", d.getUTCHours(), d.getUTCMinutes(), d.getUTCSeconds(), d.getUTCDate(), d.getUTCMonth(), d.getUTCFullYear()) %>
60 | * - **Homepage**
61 | - <%= format("http://%s", content.homepage) %>
62 | * - **Url**
63 | - <%= content.html_url %>
64 | * - **Clone Url**
65 | - <%= content.clone_url %>
66 | * - **Forks**
67 | - <%= content.forks %>
68 | * - **Watchers**
69 | - <%= content.watchers %>
70 |
71 | <%
72 | }
73 | }
74 | }
75 |
76 | %>
--------------------------------------------------------------------------------
/dev/tools/doc-templates/index.ejs:
--------------------------------------------------------------------------------
1 | ==================
2 | <%- title %>
3 | ==================
4 |
5 | .. toctree::
6 | :maxdepth: 2
7 |
8 | <%
9 | for(var i = 0; i < entries.length; i++) {
10 | // Classname
11 | var name = entries[i];
12 | // Write out the name
13 | %><%= format(" %s\n", name) %><%
14 | }
15 | %>
--------------------------------------------------------------------------------
/dev/tools/doc-templates/index_no_header.ejs:
--------------------------------------------------------------------------------
1 | .. toctree::
2 | :maxdepth: 1
3 |
4 | <%
5 | for(var i = 0; i < entries.length; i++) {
6 | // Classname
7 | var name = entries[i];
8 | // Write out the name
9 | %><%= format(" %s\n", name) %><%
10 | }
11 | %>
--------------------------------------------------------------------------------
/dev/tools/gleak.js:
--------------------------------------------------------------------------------
1 |
2 | var gleak = require('gleak')();
3 | gleak.ignore('AssertionError');
4 | gleak.ignore('testFullSpec_param_found');
5 | gleak.ignore('events');
6 |
7 | module.exports = gleak;
8 |
--------------------------------------------------------------------------------
/dev/tools/test_set_runner.js:
--------------------------------------------------------------------------------
1 | var nodeunit = require('nodeunit');
2 |
3 | // Let's parse the argv (ensure we have all the number of parameters)
4 | if(process.argv.length === 4) {
5 | // Pop the arguments off
6 | var options = JSON.parse(process.argv.pop());
7 | var files = JSON.parse(process.argv.pop());
8 |
9 | // Basic default test runner
10 | var runner = options['junit'] == true ? nodeunit.reporters.junit : nodeunit.reporters.default;
11 | var nativeExecution = options['native'] == null ? false : options['native'];
12 | // Remove junit tag if it exists
13 | delete options['junit'];
14 | delete options['native'];
15 |
16 | // Set native process
17 | if(nativeExecution) {
18 | process.env['TEST_NATIVE'] = 'TRUE';
19 | }
20 |
21 | // Let's set up nodeunit to run
22 | runner.run(files, options, function() {
23 | process.exit(0);
24 | });
25 | } else {
26 | console.error("Must pass in a list of files and options object");
27 | }
28 |
--------------------------------------------------------------------------------
/docs/README.md:
--------------------------------------------------------------------------------
1 | node-mongodb-native
2 | ===================
3 |
4 | Select a topic of interest for detailed description:
5 |
6 | * [Database](https://github.com/christkv/node-mongodb-native/tree/master/docs/database.md)
7 | * [Collections](https://github.com/christkv/node-mongodb-native/tree/master/docs/collections.md)
8 | * [Querying documents](https://github.com/christkv/node-mongodb-native/tree/master/docs/queries.md)
9 | * [Inserting/updating documents](https://github.com/christkv/node-mongodb-native/tree/master/docs/insert.md)
10 | * [GridStore](https://github.com/christkv/node-mongodb-native/tree/master/docs/gridfs.md)
11 | * [Indexes](https://github.com/christkv/node-mongodb-native/tree/master/docs/indexes.md)
12 | * [Replicasets](https://github.com/christkv/node-mongodb-native/tree/master/docs/replicaset.md)
13 |
14 | This documentation is incomplete, the best source for documentation on all possible methods is [the source for node-mongodb-native](https://github.com/christkv/node-mongodb-native) and [the MongoDB manual](http://www.mongodb.org/display/DOCS/Manual).
--------------------------------------------------------------------------------
/docs/collections.md:
--------------------------------------------------------------------------------
1 | Collections
2 | ===========
3 |
4 | See also:
5 |
6 | * [Database](database.md)
7 | * [Queries](queries.md)
8 |
9 | ## Collection objects
10 |
11 | Collection object is a pointer to a specific collection in the [database](database.md). If you want to [insert](insert.md) new records or
12 | [query](queries.md) existing ones then you need to have a valid collection object.
13 |
14 | **NB** Collection names can't start or end with a period nor contain a dollar sign! (`.tes$t` is not allowed)
15 |
16 | ## Creating collections
17 |
18 | Collections can be created with `createCollection`
19 |
20 | db.createCollection([[name[, options]], callback)
21 |
22 | where `name` is the name of the collection, options a set of configuration parameters and `callback` is a callback function. `db` is the database object.
23 |
24 | The first parameter for the callback is the error object (null if no error) and the second one is the pointer to the newly created collection. If strict mode is on and the table exists, the operation yields in error. With strict mode off (default) the function simple returns the pointer to the existing collection and does not truncate it.
25 |
26 | db.createCollection("test", function(err, collection){
27 | collection.insert({"test":"value"});
28 | });
29 |
30 | ## Creating collections options
31 | Several options can be passed to the `createCollection` function with `options` parameter.
32 |
33 | * `raw` - driver returns documents as bson binary Buffer objects, `default:false`
34 |
35 | ### Collection properties
36 |
37 | * `collectionName` is the name of the collection (not including the database name as a prefix)
38 | * `db` is the pointer to the corresponding databse object
39 |
40 | Example of usage:
41 |
42 | console.log("Collection name: "+collection.collectionName)
43 |
44 | ## List existing collections
45 |
46 | ### List names
47 |
48 | Collections can be listed with `collectionNames`
49 |
50 | db.collectionNames(callback);
51 |
52 | `callback` gets two parameters - an error object (if error occured) and an array of collection names as strings.
53 |
54 | Collection names also include database name, so a collection named `posts` in a database `blog` will be listed as `blog.posts`.
55 |
56 | Additionally there's system collections which should not be altered without knowing exactly what you are doing, these sollections can be identified with `system` prefix. For example `posts.system.indexes`.
57 |
58 | Example:
59 |
60 | var mongodb = require("mongodb"),
61 | mongoserver = new mongodb.Server("localhost"),
62 | db_connector = new mongodb.Db("blog", mongoserver);
63 |
64 | db_connector.open(function(err, db){
65 | db.collectionNames(function(err, collections){
66 | console.log(collections); // ["blog.posts", "blog.system.indexes"]
67 | });
68 | });
69 |
70 | ## List collections
71 |
72 | Collection objects can be listed with database method `collections`
73 |
74 | db.collections(callback)
75 |
76 | Where `callback` gets two parameters - an error object (if an error occured) and an array of collection objects.
77 |
78 | ## Selecting collections
79 |
80 | Existing collections can be opened with `collection`
81 |
82 | db.collection([[name[, options]], callback);
83 |
84 | If strict mode is off, then a new collection is created if not already present.
85 |
86 | ## Selecting collections options
87 | Several options can be passed to the `collection` function with `options` parameter.
88 |
89 | * `raw` - driver returns documents as bson binary Buffer objects, `default:false`
90 |
91 | ## Renaming collections
92 |
93 | A collection can be renamed with collection method `rename`
94 |
95 | collection.rename(new_name, callback);
96 |
97 | ## Removing records from collections
98 |
99 | Records can be erased from a collection with `remove`
100 |
101 | collection.remove([[query[, options]], callback]);
102 |
103 | Where
104 |
105 | * `query` is the query that records to be removed need to match. If not set all records will be removed
106 | * `options` indicate advanced options. For example use `{safe: true}` when using callbacks
107 | * `callback` callback function that gets two parameters - an error object (if an error occured) and the count of removed records
108 |
109 | ## Removing collections
110 |
111 | A collection can be dropped with `drop`
112 |
113 | collection.drop(callback);
114 |
115 | or with `dropCollection`
116 |
117 | db.dropCollection(collection_name, callback)
--------------------------------------------------------------------------------
/docs/gridfs.md:
--------------------------------------------------------------------------------
1 | GridStore
2 | ======
3 |
4 | GridFS is a scalable MongoDB *filesystem* for storing and retrieving large files. The default limit for a MongoDB record is 16MB, so to store data that is larger than this limit, GridFS can be used. GridFS shards the data into smaller chunks automatically. See [MongoDB documentation](http://www.mongodb.org/display/DOCS/GridFS+Specification) for details.
5 |
6 | GridStore is a single file inside GridFS that can be managed by the script.
7 |
8 | ## Open GridStore
9 |
10 | Opening a GridStore (a single file in GridFS) is a bit similar to opening a database. At first you need to create a GridStore object and then `open` it.
11 |
12 | var gs = new mongodb.GridStore(db, filename, mode[, options])
13 |
14 | Where
15 |
16 | * `db` is the database object
17 | * `filename` is the name of the file in GridFS that needs to be accessed/created
18 | * `mode` indicates the operation, can be one of:
19 | * "r" (Read): Looks for the file information in fs.files collection, or creates a new id for this object.
20 | * "w" (Write): Erases all chunks if the file already exist.
21 | * "w+" (Append): Finds the last chunk, and keeps writing after it.
22 | * `options` can be used to specify some metadata for the file, for example `content_type`, `metadata` and `chunk_size`
23 |
24 | Example:
25 |
26 | var gs = new mongodb.GridStore(db, "test.png", "w", {
27 | "content_type": "image/png",
28 | "metadata":{
29 | "author": "Daniel"
30 | },
31 | "chunk_size": 1024*4
32 | });
33 |
34 | When GridStore object is created, it needs to be opened.
35 |
36 | gs.open(callback);
37 |
38 | `callback` gets two parameters - and error object (if error occured) and the GridStore object.
39 |
40 | Opened GridStore object has a set of useful properties
41 |
42 | * `gs.length` - length of the file in bytes
43 | * `gs.contentType` - the content type for the file
44 | * `gs.uploadDate` - when the file was uploaded
45 | * `gs.metadata` - metadata that was saved with the file
46 | * `gs.chunkSize` - chunk size
47 |
48 | Example
49 |
50 | gs.open(function(err, gs){
51 | console.log("this file was uploaded at "+gs.uploadDate);
52 | });
53 |
54 | ## Writing to GridStore
55 |
56 | Writing can be done with `write`
57 |
58 | gs.write(data, callback)
59 |
60 | where `data` is a `Buffer` or a string, callback gets two parameters - an error object (if error occured) and result value which indicates if the write was successful or not.
61 |
62 | While the GridStore is not closed, every write is appended to the opened GridStore.
63 |
64 | ## Writing a file to GridStore
65 |
66 | This functions opens the gridstore, streams the contents of the file into gridstore, and closes the gridstore.
67 |
68 | gs.writeFile( file, callback )
69 |
70 | where
71 |
72 | * `file` is a file descriptior, or a string file path
73 | * `callback` is a function with two parameters - error object (if error occured) and the GridStore object.
74 |
75 |
76 | ## Reading from GridStore
77 |
78 | Reading from GridStore can be done with `read`
79 |
80 | gs.read([size], callback)
81 |
82 | where
83 |
84 | * `size` is the length of the data to be read
85 | * `callback` is a callback function with two parameters - error object (if an error occured) and data (binary string)
86 |
87 | ## Streaming from GridStore
88 |
89 | You can stream data as it comes from the database using `stream`
90 |
91 | gs.stream([autoclose=false])
92 |
93 | where
94 |
95 | * `autoclose` If true current GridStore will be closed when EOF and 'close' event will be fired
96 |
97 | The function returns [read stream](http://nodejs.org/docs/v0.4.12/api/streams.html#readable_Stream) based on this GridStore file. It supports the events 'read', 'error', 'close' and 'end'.
98 |
99 | ## Delete a GridStore
100 |
101 | GridStore files can be unlinked with `unlink`
102 |
103 | mongodb.GridStore.unlink(db, name, callback)
104 |
105 | Where
106 |
107 | * `db` is the databse object
108 | * `name` is either the name of a GridStore object or an array of GridStore object names
109 | * `callback` is the callback function
110 |
111 | ## Closing the GridStore
112 |
113 | GridStore needs to be closed after usage. This can be done with `close`
114 |
115 | gs.close(callback)
116 |
117 | ## Check the existance of a GridStore file
118 |
119 | Checking if a file exists in GridFS can be done with `exist`
120 |
121 | mongodb.GridStore.exist(db, filename, callback)
122 |
123 | Where
124 |
125 | * `db` is the database object
126 | * `filename` is the name of the file to be checked or a regular expression
127 | * `callback` is a callback function with two parameters - an error object (if an error occured) and a boolean value indicating if the file exists or not
128 |
129 | ## Seeking in a GridStore
130 |
131 | Seeking can be done with `seek`
132 |
133 | gs.seek(position);
134 |
135 | This function moves the internal pointer to the specified position.
136 |
--------------------------------------------------------------------------------
/docs/indexes.md:
--------------------------------------------------------------------------------
1 | Indexes
2 | =======
3 |
4 | Indexes are needed to make queries faster. For example if you need to find records by a field named *username* and the field has a related index set, then the query will be a lot faster compared to if the index was not present.
5 |
6 | See [MongoDB documentation](http://www.mongodb.org/display/DOCS/Indexes) for details.
7 |
8 | ## Create indexes with createIndex()
9 |
10 | `createIndex` adds a new index to a collection. For checking if the index was already set, use `ensureIndex` instead.
11 |
12 | collection.createIndex(index[, options], callback)
13 |
14 | or
15 |
16 | db.createIndex(collectionname, index[, options], callback)
17 |
18 | where
19 |
20 | * `index` is the field or fields to be indexed. See *index field*
21 | * `options` are options, for example `{sparse: true}` to include only records that have indexed field set or `{unique: true}` for unique indexes. If the `options` is a boolean value, then it indicates if it's an unique index or not.
22 | * `callback` gets two parameters - an error object (if an error occured) and the name for the newly created index
23 |
24 | ## Ensure indexes with ensureIndex()
25 |
26 | Same as `createIndex` with the difference that the index is checked for existence before adding to avoid duplicate indexes.
27 |
28 | ## Index field
29 |
30 | Index field can be a simple string like `"username"` to index certain field (in this case, a field named as *username*).
31 |
32 | collection.ensureIndex("username",callback)
33 |
34 | It is possible to index fields inside nested objects, for example `"user.firstname"` to index field named *firstname* inside a document named *user*.
35 |
36 | collection.ensureIndex("user.firstname",callback)
37 |
38 | It is also possible to create mixed indexes to include several fields at once.
39 |
40 | collection.ensureIndex({firstname:1, lastname:1}, callback)
41 |
42 | or with tuples
43 |
44 | collection.ensureIndex([["firstname", 1], ["lastname", 1]], callback)
45 |
46 | The number value indicates direction - if it's 1, then it is an ascending value, if it's -1 then it's descending. For example if you have documents with a field *date* and you want to sort these records in descending order then you might want to add corresponding index
47 |
48 | collection.ensureIndex({date:-1}, callback)
49 |
50 | ## Remove indexes with dropIndex()
51 |
52 | All indexes can be dropped at once with `dropIndexes`
53 |
54 | collection.dropIndexes(callback)
55 |
56 | `callback` gets two parameters - an error object (if an error occured) and a boolean value true if operation succeeded.
57 |
58 | ## Get index information with indexInformation()
59 |
60 | `indexInformation` can be used to fetch some useful information about collection indexes.
61 |
62 | collection.indexInformation(callback)
63 |
64 | Where `callback` gets two parameters - an error object (if an error occured) and an index information object.
65 |
66 | The keys in the index object are the index names and the values are tuples of included fields.
67 |
68 | For example if a collection has two indexes - as a default an ascending index for the `_id` field and an additonal descending index for `"username"` field, then the index information object would look like the following
69 |
70 | {
71 | "_id":[["_id", 1]],
72 | "username_-1":[["username", -1]]
73 | }
--------------------------------------------------------------------------------
/docs/replicaset.md:
--------------------------------------------------------------------------------
1 | Replicasets
2 | ===========
3 |
4 | ## Introduction
5 |
6 | Replica sets is the asynchronous master/slave replication added to Mongodb that takes care off all the failover and recovery for the member nodes. According to the mongodb documentation a replicaset is
7 |
8 | * Two or more nodes that are copies of each other
9 | * Automatic assignment of a primary(master) node if none is available
10 | * Drivers that automatically detect the new master and send writes to it
11 |
12 | More information at [Replicasets](http://www.mongodb.org/display/DOCS/Replica+Sets)
13 |
14 | ## Driver usage
15 |
16 | To create a new replicaset follow the instructions on the mongodb site to setup the config and the replicaset instances. Then using the driver.
17 |
18 | var replSet = new ReplSetServers( [
19 | new Server( 127.0.0.1, 30000, { auto_reconnect: true } ),
20 | new Server( 127.0.0.1, 30001, { auto_reconnect: true } ),
21 | new Server( 127.0.0.1, 30002, { auto_reconnect: true } )
22 | ],
23 | {rs_name:RS.name}
24 | );
25 |
26 | var db = new Db('integration_test_', replSet);
27 | db.open(function(err, p_db) {
28 | // Do you app stuff :)
29 | })
30 |
31 | The ReplSetSrvers object has the following parameters
32 |
33 | var replSet = new ReplSetSrvers(servers, options)
34 |
35 | Where
36 |
37 | * `servers` is an array of `Server` objects
38 | * `options` can contain the following options
39 |
40 | ## Replicaset options
41 | Several options can be passed to the `Replicaset` constructor with `options` parameter.
42 |
43 | * `rs_name` is the name of the replicaset you configured when you started the server, you can have multiple replicasets running on your servers.
44 | * `read_secondary` set's the driver to read from secondary servers (slaves) instead of only from the primary(master) server.
45 | * `socketOptions` - a collection of pr socket settings
46 |
47 | ## Socket options
48 | Several options can be set for the `socketOptions`.
49 |
50 | * `timeout` = set seconds before connection times out `default:0`
51 | * `noDelay` = Disables the Nagle algorithm `default:true`
52 | * `keepAlive` = Set if keepAlive is used `default:0`, which means no keepAlive, set higher than 0 for keepAlive
53 | * `encoding` = 'ascii'|'utf8'|'base64' `default:null`
--------------------------------------------------------------------------------
/docs/sphinx-docs/.gitignore:
--------------------------------------------------------------------------------
1 | build/
2 | source/api-generated
3 | source/api-bson-generated
4 | source/api-articles
5 | source/markdown-docs
6 | source/changelog
7 | source/github
8 |
--------------------------------------------------------------------------------
/docs/sphinx-docs/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS = -c ./
6 | SPHINXBUILD = sphinx-build
7 | PAPER =
8 | BUILDDIR = build
9 |
10 | # Internal variables.
11 | PAPEROPT_a4 = -D latex_paper_size=a4
12 | PAPEROPT_letter = -D latex_paper_size=letter
13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
14 |
15 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest
16 |
17 | help:
18 | @echo "Please use \`make ' where is one of"
19 | @echo " html to make standalone HTML files"
20 | @echo " dirhtml to make HTML files named index.html in directories"
21 | @echo " singlehtml to make a single large HTML file"
22 | @echo " pickle to make pickle files"
23 | @echo " json to make JSON files"
24 | @echo " htmlhelp to make HTML files and a HTML help project"
25 | @echo " qthelp to make HTML files and a qthelp project"
26 | @echo " devhelp to make HTML files and a Devhelp project"
27 | @echo " epub to make an epub"
28 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
29 | @echo " latexpdf to make LaTeX files and run them through pdflatex"
30 | @echo " text to make text files"
31 | @echo " man to make manual pages"
32 | @echo " changes to make an overview of all changed/added/deprecated items"
33 | @echo " linkcheck to check all external links for integrity"
34 | @echo " doctest to run all doctests embedded in the documentation (if enabled)"
35 |
36 | clean:
37 | -rm -rf $(BUILDDIR)/*
38 |
39 | html:
40 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
41 | @echo
42 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
43 |
44 | dirhtml:
45 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
46 | @echo
47 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
48 |
49 | singlehtml:
50 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
51 | @echo
52 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
53 |
54 | pickle:
55 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
56 | @echo
57 | @echo "Build finished; now you can process the pickle files."
58 |
59 | json:
60 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
61 | @echo
62 | @echo "Build finished; now you can process the JSON files."
63 |
64 | htmlhelp:
65 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
66 | @echo
67 | @echo "Build finished; now you can run HTML Help Workshop with the" \
68 | ".hhp project file in $(BUILDDIR)/htmlhelp."
69 |
70 | qthelp:
71 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
72 | @echo
73 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \
74 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
75 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/MongoDB.qhcp"
76 | @echo "To view the help file:"
77 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/MongoDB.qhc"
78 |
79 | devhelp:
80 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
81 | @echo
82 | @echo "Build finished."
83 | @echo "To view the help file:"
84 | @echo "# mkdir -p $$HOME/.local/share/devhelp/MongoDB"
85 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/MongoDB"
86 | @echo "# devhelp"
87 |
88 | epub:
89 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
90 | @echo
91 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
92 |
93 | latex:
94 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
95 | @echo
96 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
97 | @echo "Run \`make' in that directory to run these through (pdf)latex" \
98 | "(use \`make latexpdf' here to do that automatically)."
99 |
100 | latexpdf:
101 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
102 | @echo "Running LaTeX files through pdflatex..."
103 | $(MAKE) -C $(BUILDDIR)/latex all-pdf
104 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
105 |
106 | text:
107 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
108 | @echo
109 | @echo "Build finished. The text files are in $(BUILDDIR)/text."
110 |
111 | man:
112 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
113 | @echo
114 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
115 |
116 | changes:
117 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
118 | @echo
119 | @echo "The overview file is in $(BUILDDIR)/changes."
120 |
121 | linkcheck:
122 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
123 | @echo
124 | @echo "Link check complete; look for any errors in the above output " \
125 | "or in $(BUILDDIR)/linkcheck/output.txt."
126 |
127 | doctest:
128 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
129 | @echo "Testing of doctests in the sources finished, look at the " \
130 | "results in $(BUILDDIR)/doctest/output.txt."
131 |
--------------------------------------------------------------------------------
/docs/sphinx-docs/source/content/awesomeappsvideo.rst:
--------------------------------------------------------------------------------
1 | ======================================
2 | Awesome Node.js + MongoDB Applications
3 | ======================================
4 |
5 | Node.js blackboard - socket.io
6 | ------------------------------
7 | Nodejs test using express, jade, stylus, socket.io and Mongodb
8 | for persist the data
9 | Source: http://github.com/gotik/nodejs-blackboard
10 | http://node.kamikazepanda.com
11 |
12 | .. raw:: html
13 |
14 |
--------------------------------------------------------------------------------
/docs/sphinx-docs/source/content/tutorials.rst:
--------------------------------------------------------------------------------
1 | ===================================
2 | Tutorials using Node.JS and MongoDB
3 | ===================================
4 |
5 | Node.js and MongoDB Tutorial - Using CoffeeScript and Mongoose (ODM)
6 | --------------------------------------------------------------------
7 | This is a brief tutorial on how to use Node.js (using CoffeeScript) to connect to a MongoDB (using Mongoose) for a small project I am working on with a group of awesome students.
8 |
9 | .. raw:: html
10 |
11 |
12 |
13 | Part 1/6 of the tutorial
14 | Part 2/6 of the tutorial
15 |
16 |
17 |
18 |
19 |
20 | Part 3/6 of the tutorial
21 | Part 4/6 of the tutorial
22 |
23 |
24 |
25 |
26 |
27 | Part 5/6 of the tutorial
28 | Part 6/6 of the tutorial
29 |
30 |
31 |
32 |
33 |
34 |
--------------------------------------------------------------------------------
/docs/sphinx-docs/source/contents.rst:
--------------------------------------------------------------------------------
1 | ======================================
2 | Node.JS MongoDB Driver Manual Contents
3 | ======================================
4 |
5 | .. toctree::
6 | :maxdepth: 3
7 |
8 | api-generated/db
9 | api-generated/collection
10 | api-generated/admin
11 | api-generated/cursor
12 | api-generated/cursorstream
13 | api-generated/grid
14 | api-generated/gridstore
15 | api-generated/readstream
16 | api-bson-generated/bson
17 | api-bson-generated/objectid
18 | api-bson-generated/binary
19 | api-bson-generated/code
20 | api-bson-generated/double
21 | api-bson-generated/long
22 | api-bson-generated/timestamp
23 | api-bson-generated/maxkey
24 | api-bson-generated/symbol
25 |
26 | - :ref:`genindex`
27 |
--------------------------------------------------------------------------------
/docs/sphinx-docs/source/index.rst:
--------------------------------------------------------------------------------
1 | =================================
2 | The Node.JS MongoDB Driver Manual
3 | =================================
4 |
5 | Usage
6 | -----
7 |
8 | .. toctree::
9 | :maxdepth: 1
10 |
11 | markdown-docs/index
12 |
13 | Tutorials
14 | ---------
15 |
16 | .. toctree::
17 | :maxdepth: 3
18 |
19 | api-articles/index
20 |
21 | API Documentation
22 | -----------------
23 |
24 | .. toctree::
25 | :maxdepth: 2
26 |
27 | api-generated/index
28 | api-bson-generated/index
29 |
30 |
31 | External Tutorials
32 | ------------------
33 |
34 | * http://howtonode.org/express-mongodb, Blog rolling with mongoDB, express and Node.js
35 | * http://nodetuts.com/tutorials/18-mongodb-and-mongoose.html, Node Tuts - episode 18 - Node.js, MongoDB and Mongoose from Pedro Teixeira on Vimeo.
36 | * http://howtonode.org/node-js-and-mongodb-getting-started-with-mongojs, Node.js and MongoDB - Getting started with MongoJS
37 |
38 | Video Presentations about different topics around the MongoDB and Node.js
39 | -------------------------------------------------------------------------
40 |
41 | .. toctree::
42 | :maxdepth: 3
43 |
44 | content/nodejsvideo
45 | content/awesomeappsvideo
46 | content/tutorials
47 |
48 | Projects and Libraries using the driver
49 | ---------------------------------------
50 |
51 | .. toctree::
52 | :maxdepth: 1
53 |
54 | github/github
55 |
56 | MongoDB in General
57 | ------------------
58 | There are two main places to learn about MongoDB itself. I've included links below.
59 |
60 | * http://www.mongodb.org, all the documentation for the database
61 | * http://www.10gen.com/presentations, lots of presentations and slides about using the database.
62 |
63 | There are also some very good books out these days
64 |
65 | * `MongoDB: The Definitive Guide `_
66 | * `MongoDB in Action `_
67 | * `The Definitive Guide to MongoDB: The NoSQL Database for Cloud and Desktop Computing `_
68 | * `50 Tips and Tricks for MongoDB Developers `_
69 | * `Scaling MongoDB `_
70 | * `MongoDB and Python: Patterns and processes for the popular document-oriented database `_
71 | * `MongoDB and PHP `_
72 | * `PHP and MongoDB Web Development Beginner's Guide `_
73 | * `Node Web Development `_
74 | * `Document Design for MongoDB `_
75 |
76 | Changelog
77 | ---------
78 |
79 | .. toctree::
80 | :maxdepth: 1
81 |
82 | changelog/changelog
83 |
84 | Indices
85 | -------
86 |
87 | - :ref:`genindex`
88 | - :ref:`search`
--------------------------------------------------------------------------------
/docs/sphinx-docs/source/static/.mongodb:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/christkv/node-mongodb-native/2cac80902898dbcfa58de45c9b017fdee92f7f19/docs/sphinx-docs/source/static/.mongodb
--------------------------------------------------------------------------------
/docs/sphinx-docs/source/static/active.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/christkv/node-mongodb-native/2cac80902898dbcfa58de45c9b017fdee92f7f19/docs/sphinx-docs/source/static/active.png
--------------------------------------------------------------------------------
/docs/sphinx-docs/source/static/logo-mongodb.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/christkv/node-mongodb-native/2cac80902898dbcfa58de45c9b017fdee92f7f19/docs/sphinx-docs/source/static/logo-mongodb.png
--------------------------------------------------------------------------------
/docs/sphinx-docs/source/static/nonactive.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/christkv/node-mongodb-native/2cac80902898dbcfa58de45c9b017fdee92f7f19/docs/sphinx-docs/source/static/nonactive.png
--------------------------------------------------------------------------------
/docs/sphinx-docs/templates/.mongodb:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/christkv/node-mongodb-native/2cac80902898dbcfa58de45c9b017fdee92f7f19/docs/sphinx-docs/templates/.mongodb
--------------------------------------------------------------------------------
/docs/sphinx-docs/themes/mongodb/globaltoc.html:
--------------------------------------------------------------------------------
1 | {#
2 | basic/globaltoc.html
3 | ~~~~~~~~~~~~~~~~~~~~
4 |
5 | Sphinx sidebar template: global table of contents.
6 |
7 | :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS.
8 | :license: BSD, see LICENSE for details.
9 | #}
10 |
11 |
12 | {{ toctree(maxdepth=3) }}
13 |
--------------------------------------------------------------------------------
/docs/sphinx-docs/themes/mongodb/localtoc.html:
--------------------------------------------------------------------------------
1 | {#
2 | basic/localtoc.html
3 | ~~~~~~~~~~~~~~~~~~~
4 |
5 | Sphinx sidebar template: local table of contents.
6 |
7 | :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS.
8 | :license: BSD, see LICENSE for details.
9 | #}
10 | {{ _('Page Contents') }}
11 | {{ toc }}
12 |
--------------------------------------------------------------------------------
/docs/sphinx-docs/themes/mongodb/relations.html:
--------------------------------------------------------------------------------
1 | {#
2 | basic/relations.html
3 | ~~~~~~~~~~~~~~~~~~~~
4 |
5 | Sphinx sidebar template: relation links.
6 |
7 | :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS.
8 | :license: BSD, see LICENSE for details.
9 | #}
10 | {%- if prev %}
11 | {{ _('Previous page') }}
12 | {{ prev.title }}
14 | {%- endif %}
15 | {%- if next %}
16 | {{ _('Next page') }}
17 | {{ next.title }}
19 | {%- endif %}
20 |
--------------------------------------------------------------------------------
/docs/sphinx-docs/themes/mongodb/theme.conf:
--------------------------------------------------------------------------------
1 | [theme]
2 | inherit = basic
3 | stylesheet = mongodb-docs.css
4 | pygments_style = tango
5 |
--------------------------------------------------------------------------------
/examples/admin.js:
--------------------------------------------------------------------------------
1 | var Db = require('../lib/mongodb').Db,
2 | Connection = require('../lib/mongodb').Connection,
3 | Server = require('../lib/mongodb').Server;
4 |
5 | var host = process.env['MONGO_NODE_DRIVER_HOST'] != null ? process.env['MONGO_NODE_DRIVER_HOST'] : 'localhost';
6 | var port = process.env['MONGO_NODE_DRIVER_PORT'] != null ? process.env['MONGO_NODE_DRIVER_PORT'] : Connection.DEFAULT_PORT;
7 |
8 | console.log("Connecting to " + host + ":" + port);
9 | var db = new Db('node-mongo-examples', new Server(host, port, {}), {native_parser:true});
10 | db.open(function(err, db) {
11 | db.dropDatabase(function(err, result){
12 | db.dropCollection('test', function(err, result) {
13 | db.createCollection('test', function(err, collection) {
14 |
15 | // Erase all records in collection
16 | collection.remove({}, function(err, r) {
17 | db.admin(function(err, admin) {
18 |
19 | // Profiling level set/get
20 | admin.profilingLevel(function(err, profilingLevel) {
21 | console.log("Profiling level: " + profilingLevel);
22 | });
23 |
24 | // Start profiling everything
25 | admin.setProfilingLevel('all', function(err, level) {
26 | console.log("Profiling level: " + level);
27 |
28 | // Read records, creating a profiling event
29 | collection.find(function(err, cursor) {
30 | cursor.toArray(function(err, items) {
31 | // Stop profiling
32 | admin.setProfilingLevel('off', function(err, level) {
33 | // Print all profiling info
34 | admin.profilingInfo(function(err, info) {
35 | console.dir(info);
36 |
37 | // Validate returns a hash if all is well or return an error hash if there is a
38 | // problem.
39 | admin.validateCollection(collection.collectionName, function(err, result) {
40 | console.dir(result);
41 | db.close();
42 | });
43 | });
44 | });
45 | });
46 | });
47 | });
48 | });
49 | });
50 | });
51 | });
52 | });
53 | });
--------------------------------------------------------------------------------
/examples/capped.js:
--------------------------------------------------------------------------------
1 | var Db = require('../lib/mongodb').Db,
2 | Connection = require('../lib/mongodb').Connection,
3 | Server = require('../lib/mongodb').Server;
4 |
5 | var host = process.env['MONGO_NODE_DRIVER_HOST'] != null ? process.env['MONGO_NODE_DRIVER_HOST'] : 'localhost';
6 | var port = process.env['MONGO_NODE_DRIVER_PORT'] != null ? process.env['MONGO_NODE_DRIVER_PORT'] : Connection.DEFAULT_PORT;
7 |
8 | console.log("Connecting to " + host + ":" + port);
9 | var db = new Db('node-mongo-examples', new Server(host, port, {}), {native_parser:true});
10 | db.open(function(err, db) {
11 | db.dropCollection('test', function(err, result) {
12 | // A capped collection has a max size and optionally a max number of records.
13 | // Old records get pushed out by new ones once the size or max num records is
14 | // reached.
15 | db.createCollection('test', {'capped':true, 'size':1024, 'max':12}, function(err, collection) {
16 | for(var i = 0; i < 100; i++) { collection.insert({'a':i}); }
17 |
18 | // We will only see the last 12 records
19 | collection.find(function(err, cursor) {
20 | cursor.toArray(function(err, items) {
21 | console.log("The number of records: " + items.length);
22 | db.close();
23 | })
24 | })
25 | });
26 | });
27 | });
--------------------------------------------------------------------------------
/examples/cursor.js:
--------------------------------------------------------------------------------
1 | var Db = require('../lib/mongodb').Db,
2 | Connection = require('../lib/mongodb').Connection,
3 | Server = require('../lib/mongodb').Server;
4 |
5 | var host = process.env['MONGO_NODE_DRIVER_HOST'] != null ? process.env['MONGO_NODE_DRIVER_HOST'] : 'localhost';
6 | var port = process.env['MONGO_NODE_DRIVER_PORT'] != null ? process.env['MONGO_NODE_DRIVER_PORT'] : Connection.DEFAULT_PORT;
7 |
8 | console.log("Connecting to " + host + ":" + port);
9 | var db = new Db('node-mongo-examples', new Server(host, port, {}), {native_parser:true});
10 | db.open(function(err, db) {
11 | db.collection('test', function(err, collection) {
12 | // Erase all records from collection, if any
13 | collection.remove(function(err, result) {
14 |
15 | // Insert 3 records
16 | for(var i = 0; i < 3; i++) {
17 | collection.insert({'a':i});
18 | }
19 |
20 | // Cursors don't run their queries until you actually attempt to retrieve data
21 | // from them.
22 |
23 | // Find returns a Cursor, which is Enumerable. You can iterate:
24 | collection.find(function(err, cursor) {
25 | cursor.each(function(err, item) {
26 | if(item != null) console.dir(item);
27 | });
28 | });
29 |
30 | // You can turn it into an array
31 | collection.find(function(err, cursor) {
32 | cursor.toArray(function(err, items) {
33 | console.log("count: " + items.length);
34 | });
35 | });
36 |
37 | // You can iterate after turning it into an array (the cursor will iterate over
38 | // the copy of the array that it saves internally.)
39 | collection.find(function(err, cursor) {
40 | cursor.toArray(function(err, items) {
41 | cursor.each(function(err, item) {
42 | if(item != null) console.dir(item);
43 | });
44 | });
45 | });
46 |
47 | // You can get the next object
48 | collection.find(function(err, cursor) {
49 | cursor.nextObject(function(err, item) {
50 | if(item != null) console.dir(item);
51 | });
52 | });
53 |
54 | // next_object returns null if there are no more objects that match
55 | collection.find(function(err, cursor) {
56 | cursor.nextObject(function(err, item) {
57 | cursor.nextObject(function(err, item) {
58 | cursor.nextObject(function(err, item) {
59 | cursor.nextObject(function(err, item) {
60 | console.log("nextObject returned: ");
61 | console.dir(item);
62 | db.close();
63 | });
64 | });
65 | });
66 | });
67 | });
68 | });
69 | });
70 | });
--------------------------------------------------------------------------------
/examples/index.js:
--------------------------------------------------------------------------------
1 | var Db = require('../lib/mongodb').Db,
2 | Connection = require('../lib/mongodb').Connection,
3 | Server = require('../lib/mongodb').Server,
4 | mongo = require('../lib/mongodb');
5 |
6 | var host = process.env['MONGO_NODE_DRIVER_HOST'] != null ? process.env['MONGO_NODE_DRIVER_HOST'] : 'localhost';
7 | var port = process.env['MONGO_NODE_DRIVER_PORT'] != null ? process.env['MONGO_NODE_DRIVER_PORT'] : Connection.DEFAULT_PORT;
8 |
9 | console.log(">> Connecting to " + host + ":" + port);
10 | var db = new Db('node-mongo-examples', new Server(host, port, {}), {native_parser:true});
11 | db.open(function(err, db) {
12 | console.log(">> Dropping collection test");
13 | db.dropCollection('test', function(err, result) {
14 | console.log("dropped: ");
15 | console.dir(result);
16 | });
17 |
18 | console.log(">> Creating collection test");
19 | db.collection('test', function(err, collection) {
20 | console.log("created: ");
21 | console.dir(collection);
22 |
23 | var objectCount = 100;
24 | var objects = [];
25 | var messages = ["hola", "hello", "aloha", "ciao"];
26 | console.log(">> Generate test data");
27 | for(var i = 0; i < objectCount; i++) {
28 | objects.push({'number':i, 'rndm':((5*Math.random()) + 1), 'msg':messages[parseInt(4*Math.random())]})
29 | }
30 | console.log("generated");
31 |
32 | console.log(">> Inserting data (" + objects.length + ")");
33 | collection.insert(objects);
34 | console.log("inserted");
35 |
36 | console.log(">> Creating index")
37 | collection.createIndex([['all'], ['_id', 1], ['number', 1], ['rndm', 1], ['msg', 1]], function(err, indexName) {
38 | console.log("created index: " + indexName);
39 |
40 | console.log(">> Gathering index information");
41 |
42 | collection.indexInformation(function(err, doc) {
43 | console.log("indexInformation: ");
44 | console.dir(doc);
45 |
46 | console.log(">> Dropping index");
47 | collection.dropIndex('all_1__id_1_number_1_rndm_1_msg_1', function(err, result) {
48 | console.log("dropped: ");
49 | console.dir(result);
50 |
51 | console.log(">> Gathering index information");
52 | collection.indexInformation(function(err, doc) {
53 | console.log("indexInformation: ");
54 | console.dir(doc);
55 | console.log(">> Closing connection");
56 | db.close();
57 | });
58 | });
59 | });
60 | });
61 | });
62 | });
--------------------------------------------------------------------------------
/examples/info.js:
--------------------------------------------------------------------------------
1 | var Db = require('../lib/mongodb').Db,
2 | Connection = require('../lib/mongodb').Connection,
3 | Server = require('../lib/mongodb').Server;
4 |
5 | var host = process.env['MONGO_NODE_DRIVER_HOST'] != null ? process.env['MONGO_NODE_DRIVER_HOST'] : 'localhost';
6 | var port = process.env['MONGO_NODE_DRIVER_PORT'] != null ? process.env['MONGO_NODE_DRIVER_PORT'] : Connection.DEFAULT_PORT;
7 |
8 | console.log("Connecting to " + host + ":" + port);
9 | var db = new Db('node-mongo-examples', new Server(host, port, {}), {native_parser:true});
10 | db.open(function(err, db) {
11 | db.collection('test', function(err, collection) {
12 |
13 | // Remove all existing documents in collection
14 | collection.remove(function(err, result) {
15 |
16 | // Insert 3 records
17 | for(var i = 0; i < 3; i++) {
18 | collection.insert({'a':i});
19 | }
20 |
21 | // Show collection names in the database
22 | db.collectionNames(function(err, names) {
23 | names.forEach(function(name) {
24 | console.dir(name);
25 | });
26 | });
27 |
28 | // More information about each collection
29 | db.collectionsInfo(function(err, cursor) {
30 | cursor.toArray(function(err, items) {
31 | items.forEach(function(item) {
32 | console.dir(item);
33 | });
34 | });
35 | })
36 |
37 | // Index information
38 | db.createIndex('test', 'a', function(err, indexName) {
39 | db.indexInformation('test', function(err, doc) {
40 | console.dir(doc);
41 | collection.drop(function(err, result) {
42 | db.close();
43 | });
44 | });
45 | });
46 | });
47 | });
48 | });
--------------------------------------------------------------------------------
/examples/oplog.js:
--------------------------------------------------------------------------------
1 | var Db = require('../lib/mongodb').Db,
2 | Connection = require('../lib/mongodb').Connection,
3 | Server = require('../lib/mongodb').Server,
4 | Cursor = require('../lib/mongodb').Cursor;
5 |
6 | var host = process.env['MONGO_NODE_DRIVER_HOST'] != null ? process.env['MONGO_NODE_DRIVER_HOST'] : 'localhost';
7 | var port = process.env['MONGO_NODE_DRIVER_PORT'] != null ? process.env['MONGO_NODE_DRIVER_PORT'] : Connection.DEFAULT_PORT;
8 |
9 | Slave = function() {
10 | this.running = false;
11 | this.callbacks = [];
12 | //no native_parser right now (because timestamps)
13 | //no strict mode (because system db signed with $ db.js line 189)
14 | //connect without dbName for querying not only "local" db
15 | console.log("Connecting to " + host + ":" + port);
16 | this.db = new Db('testing', new Server(host, port, {}), {});
17 | }
18 |
19 | //start watching
20 | Slave.prototype.start = function() {
21 | var self = this;
22 | if (this.running) return;
23 |
24 | this.db.open(function(err, db) {
25 | if (err) {
26 | console.log('> MongoSlave error' + err);
27 | process.exit(1);
28 | }
29 |
30 | db.collection('local.oplog.$main', function(err, collection) {
31 | if (! collection) {
32 | console.log('> MongoSlave - local.oplog.$main not found');
33 | self.stop();
34 | return false;
35 | }
36 |
37 | process.on('SIGINT', function () {
38 | self.stop(); //tailable cursor should be stopped manually
39 | });
40 |
41 | //get last row for init TS
42 | collection.find({}, {'limit': 1, 'sort': [['$natural', -1]]}, function(err, cursor) {
43 | cursor.toArray(function(err, items) {
44 | if (items.length) {
45 | console.log('> MongoSlave started');
46 | self.running = true;
47 | self._runSlave(collection, items[0]['ts']);
48 | } else if (err) {
49 | console.log(err);
50 | self.stop();
51 | }
52 | });
53 | });
54 | });
55 | });
56 | }
57 |
58 | //stop watching
59 | Slave.prototype.stop = function() {
60 | if (!this.running) return;
61 | console.log('> MongoSlave stopped');
62 | this.running = false;
63 | this.db.close();
64 | }
65 |
66 | Slave.prototype._runSlave = function(collection, time) {
67 |
68 | var self = this;
69 |
70 | //watch oplog INFINITE (until Slave.stop())
71 | collection.find({'ts': {'$gt': time}}, {'tailable': 1, 'sort': [['$natural', 1]]}, function(err, cursor) {
72 | cursor.each(function(err, item) {
73 | if (cursor.state == Cursor.CLOSED) { //broken cursor
74 | self.running && self._runSlave(collection, time);
75 | return;
76 | }
77 | time = item['ts'];
78 |
79 | switch(item['op']) {
80 | case 'i': //inserted
81 | self._emitObj(item['o']);
82 | break;
83 | case 'u': //updated
84 | self.db.collection(item['ns'], function(err, collection) {
85 | collection.findOne(item['o2']['_id'], {}, function(err, item) {
86 | item && self._emitObj(item);
87 | });
88 | });
89 | break;
90 | case 'd': //deleted
91 | //nothing to do
92 | break;
93 | }
94 | });
95 | });
96 | }
97 |
98 | Slave.prototype._emitObj = function (obj) {
99 | for(var i in this.callbacks) this.callbacks[i].call(this, obj);
100 | }
101 |
102 | Slave.prototype.onObject = function(callback) {
103 | this.callbacks.push(callback);
104 | }
105 |
106 |
107 | //just for example
108 | var watcher = new Slave();
109 |
110 | watcher.onObject(function(obj) {
111 | console.dir(obj);
112 | });
113 |
114 | watcher.start();
--------------------------------------------------------------------------------
/examples/replSetServersSimple.js:
--------------------------------------------------------------------------------
1 | var Db = require('../lib/mongodb').Db,
2 | Admin = require('../lib/mongodb').Admin,
3 | DbCommand = require('../lib/mongodb/commands/db_command').DbCommand,
4 | Connection = require('../lib/mongodb').Connection,
5 | Server = require('../lib/mongodb').Server,
6 | ReplSetServers = require('../lib/mongodb').ReplSetServers,
7 | CheckMaster = require('../lib/mongodb').CheckMaster;
8 |
9 | var host = process.env['MONGO_NODE_DRIVER_HOST'] != null ? process.env['MONGO_NODE_DRIVER_HOST'] : 'localhost';
10 | var port = process.env['MONGO_NODE_DRIVER_PORT'] != null ? process.env['MONGO_NODE_DRIVER_PORT'] : Connection.DEFAULT_PORT;
11 |
12 | var port1 = 27018;
13 | var port2 = 27019;
14 |
15 |
16 | console.log("Connecting to " + host + ":" + port);
17 | console.log("Connecting to " + host + ":" + port1);
18 | console.log("Connecting to " + host + ":" + port2);
19 |
20 | var server = new Server(host, port, {});
21 | var server1 = new Server(host, port1, {});
22 | var server2 = new Server(host, port2, {});
23 | var servers = new Array();
24 | servers[0] = server2;
25 | servers[1] = server1;
26 | servers[2] = server;
27 |
28 | var replStat = new ReplSetServers(servers);
29 |
30 | var db = new Db('mongo-example', replStat, {native_parser:true});
31 | db.open(function(err, db) {
32 |
33 | db.dropDatabase(function(err, result) {
34 | db.collection('test', function(err, collection) {
35 | collection.remove(function(err, collection) {
36 | // Insert 3 records
37 | for(var i = 0; i < 3; i++) {
38 | collection.insert({'a':i});
39 | }
40 |
41 | collection.count(function(err, count) {
42 | console.log("There are " + count + " records in the test collection. Here they are:");
43 |
44 | collection.find(function(err, cursor) {
45 | cursor.each(function(err, item) {
46 | if(item != null) {
47 | console.dir(item);
48 | console.log("created at " + new Date(item._id.generationTime) + "\n")
49 | }
50 | // Null signifies end of iterator
51 | if(item == null) {
52 | // Destory the collection
53 | collection.drop(function(err, collection) {
54 | db.close();
55 | });
56 | }
57 | });
58 | });
59 | });
60 | });
61 | });
62 | });
63 | });
64 |
65 |
66 |
67 |
--------------------------------------------------------------------------------
/examples/simple.js:
--------------------------------------------------------------------------------
1 | var Db = require('../lib/mongodb').Db,
2 | Connection = require('../lib/mongodb').Connection,
3 | Server = require('../lib/mongodb').Server;
4 |
5 | var host = process.env['MONGO_NODE_DRIVER_HOST'] != null ? process.env['MONGO_NODE_DRIVER_HOST'] : 'localhost';
6 | var port = process.env['MONGO_NODE_DRIVER_PORT'] != null ? process.env['MONGO_NODE_DRIVER_PORT'] : Connection.DEFAULT_PORT;
7 |
8 | console.log("Connecting to " + host + ":" + port);
9 | var db = new Db('node-mongo-examples', new Server(host, port, {}), {native_parser:true});
10 | db.open(function(err, db) {
11 | db.dropDatabase(function(err, result) {
12 | db.collection('test', function(err, collection) {
13 | // Erase all records from the collection, if any
14 | collection.remove({}, function(err, result) {
15 | // Insert 3 records
16 | for(var i = 0; i < 3; i++) {
17 | collection.insert({'a':i});
18 | }
19 |
20 | collection.count(function(err, count) {
21 | console.log("There are " + count + " records in the test collection. Here they are:");
22 |
23 | collection.find(function(err, cursor) {
24 | cursor.each(function(err, item) {
25 | if(item != null) {
26 | console.dir(item);
27 | console.log("created at " + new Date(item._id.generationTime) + "\n")
28 | }
29 | // Null signifies end of iterator
30 | if(item == null) {
31 | // Destory the collection
32 | collection.drop(function(err, collection) {
33 | db.close();
34 | });
35 | }
36 | });
37 | });
38 | });
39 | });
40 | });
41 | });
42 | });
--------------------------------------------------------------------------------
/examples/strict.js:
--------------------------------------------------------------------------------
1 | var Db = require('../lib/mongodb').Db,
2 | Connection = require('../lib/mongodb').Connection,
3 | Server = require('../lib/mongodb').Server;
4 |
5 | var host = process.env['MONGO_NODE_DRIVER_HOST'] != null ? process.env['MONGO_NODE_DRIVER_HOST'] : 'localhost';
6 | var port = process.env['MONGO_NODE_DRIVER_PORT'] != null ? process.env['MONGO_NODE_DRIVER_PORT'] : Connection.DEFAULT_PORT;
7 |
8 | console.log("Connecting to " + host + ":" + port);
9 | var db = new Db('node-mongo-examples', new Server(host, port, {}), {native_parser:true});
10 | db.open(function(err, db) {
11 | db.dropCollection('does-not-exist', function(err, result) {
12 | db.createCollection('test', function(err, collection) {
13 | db.strict = true;
14 |
15 | // Can't reference collections that does not exist in strict mode
16 | db.collection('does-not-exist', function(err, collection) {
17 | if(err instanceof Error) {
18 | console.log("expected error: " + err.message);
19 | }
20 |
21 | // Can't create collections that does not exist in strict mode
22 | db.createCollection('test', function(err, collection) {
23 | if(err instanceof Error) {
24 | console.log("expected error: " + err.message);
25 | }
26 |
27 | // Remove the strict mode
28 | db.strict = false;
29 | db.dropCollection('test', function(err, collection) {
30 | db.close();
31 | });
32 | });
33 | });
34 | });
35 | });
36 | });
--------------------------------------------------------------------------------
/examples/types.js:
--------------------------------------------------------------------------------
1 | var Db = require('../lib/mongodb').Db,
2 | Connection = require('../lib/mongodb').Connection,
3 | Server = require('../lib/mongodb').Server,
4 | BSON = require('../lib/mongodb').BSONPure;
5 |
6 | var host = process.env['MONGO_NODE_DRIVER_HOST'] != null ? process.env['MONGO_NODE_DRIVER_HOST'] : 'localhost';
7 | var port = process.env['MONGO_NODE_DRIVER_PORT'] != null ? process.env['MONGO_NODE_DRIVER_PORT'] : Connection.DEFAULT_PORT;
8 |
9 | console.log("Connecting to " + host + ":" + port);
10 | var db = new Db('node-mongo-examples', new Server(host, port, {}), {});
11 | db.open(function(err, db) {
12 | db.collection('test', function(err, collection) {
13 | // Remove all existing documents in collection
14 | collection.remove(function(err, result) {
15 | // Insert record with all the available types of values
16 | collection.insert({
17 | 'array':[1,2,3],
18 | 'string':'hello',
19 | 'hash':{'a':1, 'b':2},
20 | 'date':new Date(), // Stores only milisecond resolution
21 | 'oid':new BSON.ObjectID(),
22 | 'binary':new BSON.Binary("123"),
23 | 'int':42,
24 | 'float':33.3333,
25 | 'regexp':/foobar/i,
26 | 'regexp2':/foobar2/,
27 | 'boolean':true,
28 | 'where':new BSON.Code('this.x == 3'),
29 | 'dbref':new BSON.DBRef(collection.collectionName, new BSON.ObjectID()),
30 | 'null':null
31 | }, function(err, doc) {
32 | // Locate the first document
33 | collection.findOne(function(err, document) {
34 | console.dir(document);
35 | collection.remove(function(err, collection) {
36 | db.close();
37 | });
38 | })
39 | });
40 | });
41 | });
42 | });
--------------------------------------------------------------------------------
/examples/url.js:
--------------------------------------------------------------------------------
1 | var Db = require('../lib/mongodb').Db,
2 | connect = require('../lib/mongodb').connect;
3 |
4 | console.log('Connecting to ' + Db.DEFAULT_URL);
5 | connect(Db.DEFAULT_URL, function(err, db) {
6 | db.dropDatabase(function(err, result) {
7 | db.collection('test', function(err, collection) {
8 | collection.insert({'a':1});
9 | db.close();
10 | });
11 | });
12 | });
13 |
--------------------------------------------------------------------------------
/external-libs/bson/.gitignore:
--------------------------------------------------------------------------------
1 | build/
2 | bson.node
3 | .lock-wscript
--------------------------------------------------------------------------------
/external-libs/bson/Makefile:
--------------------------------------------------------------------------------
1 | NODE = node
2 | name = all
3 | JOBS = 1
4 |
5 | all:
6 | rm -rf build .lock-wscript bson.node
7 | node-waf configure build
8 | cp -R ./build/Release/bson.node . || true
9 | @$(NODE) --expose-gc test/test_bson.js
10 | @$(NODE) --expose-gc test/test_full_bson.js
11 | # @$(NODE) --expose-gc test/test_stackless_bson.js
12 |
13 | all_debug:
14 | rm -rf build .lock-wscript bson.node
15 | node-waf --debug configure build
16 | cp -R ./build/Release/bson.node . || true
17 | @$(NODE) --expose-gc test/test_bson.js
18 | @$(NODE) --expose-gc test/test_full_bson.js
19 | # @$(NODE) --expose-gc test/test_stackless_bson.js
20 |
21 | test:
22 | @$(NODE) --expose-gc test/test_bson.js
23 | @$(NODE) --expose-gc test/test_full_bson.js
24 | # @$(NODE) --expose-gc test/test_stackless_bson.js
25 |
26 | clang:
27 | rm -rf build .lock-wscript bson.node
28 | CXX=clang node-waf configure build
29 | cp -R ./build/Release/bson.node . || true
30 | @$(NODE) --expose-gc test/test_bson.js
31 | @$(NODE) --expose-gc test/test_full_bson.js
32 | # @$(NODE) --expose-gc test/test_stackless_bson.js
33 |
34 | clang_debug:
35 | rm -rf build .lock-wscript bson.node
36 | CXX=clang node-waf --debug configure build
37 | cp -R ./build/Release/bson.node . || true
38 | @$(NODE) --expose-gc test/test_bson.js
39 | @$(NODE) --expose-gc test/test_full_bson.js
40 | # @$(NODE) --expose-gc test/test_stackless_bson.js
41 |
42 | clean:
43 | rm -rf build .lock-wscript bson.node
44 |
45 | .PHONY: all
--------------------------------------------------------------------------------
/external-libs/bson/bson.h:
--------------------------------------------------------------------------------
1 | #ifndef BSON_H_
2 | #define BSON_H_
3 |
4 | #include
5 | #include
6 | #include
7 |
8 | using namespace v8;
9 | using namespace node;
10 |
11 | class BSON : public ObjectWrap {
12 | public:
13 | BSON() : ObjectWrap() {}
14 | ~BSON() {}
15 |
16 | static void Initialize(Handle target);
17 | static Handle BSONDeserializeStream(const Arguments &args);
18 |
19 | // JS based objects
20 | static Handle BSONSerialize(const Arguments &args);
21 | static Handle BSONDeserialize(const Arguments &args);
22 |
23 | // Calculate size of function
24 | static Handle CalculateObjectSize(const Arguments &args);
25 | static Handle SerializeWithBufferAndIndex(const Arguments &args);
26 |
27 | // Experimental
28 | static Handle CalculateObjectSize2(const Arguments &args);
29 | static Handle BSONSerialize2(const Arguments &args);
30 |
31 | // Constructor used for creating new BSON objects from C++
32 | static Persistent constructor_template;
33 |
34 | private:
35 | static Handle New(const Arguments &args);
36 | static Handle deserialize(BSON *bson, char *data, uint32_t dataLength, uint32_t startIndex, bool is_array_item);
37 | static uint32_t serialize(BSON *bson, char *serialized_object, uint32_t index, Handle name, Handle value, bool check_key, bool serializeFunctions);
38 |
39 | static char* extract_string(char *data, uint32_t offset);
40 | static const char* ToCString(const v8::String::Utf8Value& value);
41 | static uint32_t calculate_object_size(BSON *bson, Handle object, bool serializeFunctions);
42 |
43 | static void write_int32(char *data, uint32_t value);
44 | static void write_int64(char *data, int64_t value);
45 | static void write_double(char *data, double value);
46 | static uint16_t deserialize_int8(char *data, uint32_t offset);
47 | static uint32_t deserialize_int32(char* data, uint32_t offset);
48 | static char *check_key(Local key);
49 |
50 | // BSON type instantiate functions
51 | Persistent longConstructor;
52 | Persistent objectIDConstructor;
53 | Persistent binaryConstructor;
54 | Persistent codeConstructor;
55 | Persistent dbrefConstructor;
56 | Persistent symbolConstructor;
57 | Persistent doubleConstructor;
58 | Persistent timestampConstructor;
59 | Persistent minKeyConstructor;
60 | Persistent maxKeyConstructor;
61 |
62 | // Equality Objects
63 | Persistent longString;
64 | Persistent objectIDString;
65 | Persistent binaryString;
66 | Persistent codeString;
67 | Persistent dbrefString;
68 | Persistent symbolString;
69 | Persistent doubleString;
70 | Persistent timestampString;
71 | Persistent minKeyString;
72 | Persistent maxKeyString;
73 |
74 | // Equality speed up comparision objects
75 | Persistent _bsontypeString;
76 | Persistent _longLowString;
77 | Persistent _longHighString;
78 | Persistent _objectIDidString;
79 | Persistent _binaryPositionString;
80 | Persistent _binarySubTypeString;
81 | Persistent _binaryBufferString;
82 | Persistent _doubleValueString;
83 | Persistent _symbolValueString;
84 |
85 | Persistent _dbRefRefString;
86 | Persistent _dbRefIdRefString;
87 | Persistent _dbRefDbRefString;
88 | Persistent _dbRefNamespaceString;
89 | Persistent _dbRefDbString;
90 | Persistent _dbRefOidString;
91 |
92 | // Decode JS function
93 | static Handle decodeLong(BSON *bson, char *data, uint32_t index);
94 | static Handle decodeTimestamp(BSON *bson, char *data, uint32_t index);
95 | static Handle decodeOid(BSON *bson, char *oid);
96 | static Handle decodeBinary(BSON *bson, uint32_t sub_type, uint32_t number_of_bytes, char *data);
97 | static Handle decodeCode(BSON *bson, char *code, Handle scope);
98 | static Handle decodeDBref(BSON *bson, Local ref, Local oid, Local db);
99 |
100 | // Experimental
101 | static uint32_t calculate_object_size2(Handle object);
102 | static uint32_t serialize2(char *serialized_object, uint32_t index, Handle name, Handle value, uint32_t object_size, bool check_key);
103 | };
104 |
105 | #endif // BSON_H_
106 |
--------------------------------------------------------------------------------
/external-libs/bson/index.js:
--------------------------------------------------------------------------------
1 | var bson = require('./bson');
2 | exports.BSON = bson.BSON;
3 | exports.Long = require('../../lib/mongodb/bson/long').Long;
4 | exports.ObjectID = require('../../lib/mongodb/bson/objectid').ObjectID;
5 | exports.DBRef = require('../../lib/mongodb/bson/db_ref').DBRef;
6 | exports.Code = require('../../lib/mongodb/bson/code').Code;
7 | exports.Timestamp = require('../../lib/mongodb/bson/timestamp').Timestamp;
8 | exports.Binary = require('../../lib/mongodb/bson/binary').Binary;
9 | exports.Double = require('../../lib/mongodb/bson/double').Double;
10 | exports.MaxKey = require('../../lib/mongodb/bson/max_key').MaxKey;
11 | exports.MinKey = require('../../lib/mongodb/bson/min_key').MinKey;
12 | exports.Symbol = require('../../lib/mongodb/bson/symbol').Symbol;
13 |
14 | // Just add constants tot he Native BSON parser
15 | exports.BSON.BSON_BINARY_SUBTYPE_DEFAULT = 0;
16 | exports.BSON.BSON_BINARY_SUBTYPE_FUNCTION = 1;
17 | exports.BSON.BSON_BINARY_SUBTYPE_BYTE_ARRAY = 2;
18 | exports.BSON.BSON_BINARY_SUBTYPE_UUID = 3;
19 | exports.BSON.BSON_BINARY_SUBTYPE_MD5 = 4;
20 | exports.BSON.BSON_BINARY_SUBTYPE_USER_DEFINED = 128;
21 |
--------------------------------------------------------------------------------
/external-libs/bson/wscript:
--------------------------------------------------------------------------------
1 | import Options
2 | from os import unlink, symlink, popen
3 | from os.path import exists
4 |
5 | srcdir = "."
6 | blddir = "build"
7 | VERSION = "0.1.0"
8 |
9 | def set_options(opt):
10 | opt.tool_options("compiler_cxx")
11 | opt.add_option( '--debug'
12 | , action='store_true'
13 | , default=False
14 | , help='Build debug variant [Default: False]'
15 | , dest='debug'
16 | )
17 |
18 | def configure(conf):
19 | conf.check_tool("compiler_cxx")
20 | conf.check_tool("node_addon")
21 | conf.env.append_value('CXXFLAGS', ['-O3', '-funroll-loops'])
22 |
23 | # conf.env.append_value('CXXFLAGS', ['-DDEBUG', '-g', '-O0', '-Wall', '-Wextra'])
24 | # conf.check(lib='node', libpath=['/usr/lib', '/usr/local/lib'], uselib_store='NODE')
25 |
26 | def build(bld):
27 | obj = bld.new_task_gen("cxx", "shlib", "node_addon")
28 | obj.target = "bson"
29 | obj.source = ["bson.cc"]
30 | # obj.uselib = "NODE"
31 |
32 | def shutdown():
33 | # HACK to get compress.node out of build directory.
34 | # better way to do this?
35 | if Options.commands['clean']:
36 | if exists('bson.node'): unlink('bson.node')
37 | else:
38 | if exists('build/default/bson.node') and not exists('bson.node'):
39 | symlink('build/default/bson.node', 'bson.node')
40 |
--------------------------------------------------------------------------------
/index.js:
--------------------------------------------------------------------------------
1 | module.exports = require('./lib/mongodb');
2 |
--------------------------------------------------------------------------------
/install.js:
--------------------------------------------------------------------------------
1 | var spawn = require('child_process').spawn,
2 | exec = require('child_process').exec;
3 |
4 | process.stdout.write("================================================================================\n");
5 | process.stdout.write("= =\n");
6 | process.stdout.write("= To install with C++ bson parser do =\n");
7 | process.stdout.write("= =\n");
8 | process.stdout.write("================================================================================\n");
9 |
10 | // Check if we want to build the native code
11 | var build_native = process.env['npm_package_config_native'] != null ? process.env['npm_package_config_native'] : 'false';
12 | build_native = build_native == 'true' ? true : false;
13 | // If we are building the native bson extension ensure we use gmake if available
14 | if(build_native) {
15 | // Check if we need to use gmake
16 | exec('which gmake', function(err, stdout, stderr) {
17 | // Set up spawn command
18 | var make = null;
19 | // No gmake build using make
20 | if(err != null) {
21 | make = spawn('make', ['total']);
22 | } else {
23 | make = spawn('gmake', ['total']);
24 | }
25 |
26 | // Execute spawn
27 | make.stdout.on('data', function(data) {
28 | process.stdout.write(data);
29 | })
30 |
31 | make.stderr.on('data', function(data) {
32 | process.stdout.write(data);
33 | })
34 |
35 | make.on('exit', function(code) {
36 | process.stdout.write('child process exited with code ' + code + "\n");
37 | })
38 | });
39 | }
40 |
41 |
--------------------------------------------------------------------------------
/lib/mongodb/commands/base_command.js:
--------------------------------------------------------------------------------
1 | /**
2 | Base object used for common functionality
3 | **/
4 | var BaseCommand = exports.BaseCommand = function() {
5 | };
6 |
7 | var id = 1;
8 | BaseCommand.prototype.getRequestId = function() {
9 | if (!this.requestId) this.requestId = id++;
10 | return this.requestId;
11 | };
12 |
13 | BaseCommand.prototype.updateRequestId = function() {
14 | this.requestId = id++;
15 | return this.requestId;
16 | };
17 |
18 | // OpCodes
19 | BaseCommand.OP_REPLY = 1;
20 | BaseCommand.OP_MSG = 1000;
21 | BaseCommand.OP_UPDATE = 2001;
22 | BaseCommand.OP_INSERT = 2002;
23 | BaseCommand.OP_GET_BY_OID = 2003;
24 | BaseCommand.OP_QUERY = 2004;
25 | BaseCommand.OP_GET_MORE = 2005;
26 | BaseCommand.OP_DELETE = 2006;
27 | BaseCommand.OP_KILL_CURSORS = 2007;
--------------------------------------------------------------------------------
/lib/mongodb/commands/delete_command.js:
--------------------------------------------------------------------------------
1 | var BaseCommand = require('./base_command').BaseCommand,
2 | inherits = require('util').inherits;
3 |
4 | /**
5 | Insert Document Command
6 | **/
7 | var DeleteCommand = exports.DeleteCommand = function(db, collectionName, selector) {
8 | BaseCommand.call(this);
9 |
10 | // Validate correctness off the selector
11 | var object = selector;
12 | if(Buffer.isBuffer(object)) {
13 | var object_size = object[0] | object[1] << 8 | object[2] << 16 | object[3] << 24;
14 | if(object_size != object.length) {
15 | var error = new Error("delete raw message size does not match message header size [" + object.length + "] != [" + object_size + "]");
16 | error.name = 'MongoError';
17 | throw error;
18 | }
19 | }
20 |
21 | this.collectionName = collectionName;
22 | this.selector = selector;
23 | this.db = db;
24 | };
25 |
26 | inherits(DeleteCommand, BaseCommand);
27 |
28 | DeleteCommand.OP_DELETE = 2006;
29 |
30 | /*
31 | struct {
32 | MsgHeader header; // standard message header
33 | int32 ZERO; // 0 - reserved for future use
34 | cstring fullCollectionName; // "dbname.collectionname"
35 | int32 ZERO; // 0 - reserved for future use
36 | mongo.BSON selector; // query object. See below for details.
37 | }
38 | */
39 | DeleteCommand.prototype.toBinary = function() {
40 | // Calculate total length of the document
41 | var totalLengthOfCommand = 4 + Buffer.byteLength(this.collectionName) + 1 + 4 + this.db.bson.calculateObjectSize(this.selector, false, true) + (4 * 4);
42 | // Let's build the single pass buffer command
43 | var _index = 0;
44 | var _command = new Buffer(totalLengthOfCommand);
45 | // Write the header information to the buffer
46 | _command[_index + 3] = (totalLengthOfCommand >> 24) & 0xff;
47 | _command[_index + 2] = (totalLengthOfCommand >> 16) & 0xff;
48 | _command[_index + 1] = (totalLengthOfCommand >> 8) & 0xff;
49 | _command[_index] = totalLengthOfCommand & 0xff;
50 | // Adjust index
51 | _index = _index + 4;
52 | // Write the request ID
53 | _command[_index + 3] = (this.requestId >> 24) & 0xff;
54 | _command[_index + 2] = (this.requestId >> 16) & 0xff;
55 | _command[_index + 1] = (this.requestId >> 8) & 0xff;
56 | _command[_index] = this.requestId & 0xff;
57 | // Adjust index
58 | _index = _index + 4;
59 | // Write zero
60 | _command[_index++] = 0;
61 | _command[_index++] = 0;
62 | _command[_index++] = 0;
63 | _command[_index++] = 0;
64 | // Write the op_code for the command
65 | _command[_index + 3] = (DeleteCommand.OP_DELETE >> 24) & 0xff;
66 | _command[_index + 2] = (DeleteCommand.OP_DELETE >> 16) & 0xff;
67 | _command[_index + 1] = (DeleteCommand.OP_DELETE >> 8) & 0xff;
68 | _command[_index] = DeleteCommand.OP_DELETE & 0xff;
69 | // Adjust index
70 | _index = _index + 4;
71 |
72 | // Write zero
73 | _command[_index++] = 0;
74 | _command[_index++] = 0;
75 | _command[_index++] = 0;
76 | _command[_index++] = 0;
77 |
78 | // Write the collection name to the command
79 | _index = _index + _command.write(this.collectionName, _index, 'utf8') + 1;
80 | _command[_index - 1] = 0;
81 |
82 | // Write zero
83 | _command[_index++] = 0;
84 | _command[_index++] = 0;
85 | _command[_index++] = 0;
86 | _command[_index++] = 0;
87 |
88 | // Document binary length
89 | var documentLength = 0
90 |
91 | // Serialize the selector
92 | // If we are passing a raw buffer, do minimal validation
93 | if(Buffer.isBuffer(this.selector)) {
94 | documentLength = this.selector.length;
95 | // Copy the data into the current buffer
96 | this.selector.copy(_command, _index);
97 | } else {
98 | documentLength = this.db.bson.serializeWithBufferAndIndex(this.selector, this.checkKeys, _command, _index) - _index + 1;
99 | }
100 |
101 | // Write the length to the document
102 | _command[_index + 3] = (documentLength >> 24) & 0xff;
103 | _command[_index + 2] = (documentLength >> 16) & 0xff;
104 | _command[_index + 1] = (documentLength >> 8) & 0xff;
105 | _command[_index] = documentLength & 0xff;
106 | // Update index in buffer
107 | _index = _index + documentLength;
108 | // Add terminating 0 for the object
109 | _command[_index - 1] = 0;
110 | return _command;
111 | };
--------------------------------------------------------------------------------
/lib/mongodb/commands/get_more_command.js:
--------------------------------------------------------------------------------
1 | var BaseCommand = require('./base_command').BaseCommand,
2 | inherits = require('util').inherits,
3 | binaryutils = require('../utils');
4 |
5 | /**
6 | Get More Document Command
7 | **/
8 | var GetMoreCommand = exports.GetMoreCommand = function(db, collectionName, numberToReturn, cursorId) {
9 | BaseCommand.call(this);
10 |
11 | this.collectionName = collectionName;
12 | this.numberToReturn = numberToReturn;
13 | this.cursorId = cursorId;
14 | this.db = db;
15 | };
16 |
17 | inherits(GetMoreCommand, BaseCommand);
18 |
19 | GetMoreCommand.OP_GET_MORE = 2005;
20 |
21 | GetMoreCommand.prototype.toBinary = function() {
22 | // Calculate total length of the document
23 | var totalLengthOfCommand = 4 + Buffer.byteLength(this.collectionName) + 1 + 4 + 8 + (4 * 4);
24 | // Let's build the single pass buffer command
25 | var _index = 0;
26 | var _command = new Buffer(totalLengthOfCommand);
27 | // Write the header information to the buffer
28 | _command[_index++] = totalLengthOfCommand & 0xff;
29 | _command[_index++] = (totalLengthOfCommand >> 8) & 0xff;
30 | _command[_index++] = (totalLengthOfCommand >> 16) & 0xff;
31 | _command[_index++] = (totalLengthOfCommand >> 24) & 0xff;
32 |
33 | // Write the request ID
34 | _command[_index++] = this.requestId & 0xff;
35 | _command[_index++] = (this.requestId >> 8) & 0xff;
36 | _command[_index++] = (this.requestId >> 16) & 0xff;
37 | _command[_index++] = (this.requestId >> 24) & 0xff;
38 |
39 | // Write zero
40 | _command[_index++] = 0;
41 | _command[_index++] = 0;
42 | _command[_index++] = 0;
43 | _command[_index++] = 0;
44 |
45 | // Write the op_code for the command
46 | _command[_index++] = GetMoreCommand.OP_GET_MORE & 0xff;
47 | _command[_index++] = (GetMoreCommand.OP_GET_MORE >> 8) & 0xff;
48 | _command[_index++] = (GetMoreCommand.OP_GET_MORE >> 16) & 0xff;
49 | _command[_index++] = (GetMoreCommand.OP_GET_MORE >> 24) & 0xff;
50 |
51 | // Write zero
52 | _command[_index++] = 0;
53 | _command[_index++] = 0;
54 | _command[_index++] = 0;
55 | _command[_index++] = 0;
56 |
57 | // Write the collection name to the command
58 | _index = _index + _command.write(this.collectionName, _index, 'utf8') + 1;
59 | _command[_index - 1] = 0;
60 |
61 | // Number of documents to return
62 | _command[_index++] = this.numberToReturn & 0xff;
63 | _command[_index++] = (this.numberToReturn >> 8) & 0xff;
64 | _command[_index++] = (this.numberToReturn >> 16) & 0xff;
65 | _command[_index++] = (this.numberToReturn >> 24) & 0xff;
66 |
67 | // Encode the cursor id
68 | var low_bits = this.cursorId.getLowBits();
69 | // Encode low bits
70 | _command[_index++] = low_bits & 0xff;
71 | _command[_index++] = (low_bits >> 8) & 0xff;
72 | _command[_index++] = (low_bits >> 16) & 0xff;
73 | _command[_index++] = (low_bits >> 24) & 0xff;
74 |
75 | var high_bits = this.cursorId.getHighBits();
76 | // Encode high bits
77 | _command[_index++] = high_bits & 0xff;
78 | _command[_index++] = (high_bits >> 8) & 0xff;
79 | _command[_index++] = (high_bits >> 16) & 0xff;
80 | _command[_index++] = (high_bits >> 24) & 0xff;
81 | // Return command
82 | return _command;
83 | };
--------------------------------------------------------------------------------
/lib/mongodb/commands/kill_cursor_command.js:
--------------------------------------------------------------------------------
1 | var BaseCommand = require('./base_command').BaseCommand,
2 | inherits = require('util').inherits,
3 | binaryutils = require('../utils');
4 |
5 | /**
6 | Insert Document Command
7 | **/
8 | var KillCursorCommand = exports.KillCursorCommand = function(db, cursorIds) {
9 | BaseCommand.call(this);
10 |
11 | this.cursorIds = cursorIds;
12 | this.db = db;
13 | };
14 |
15 | inherits(KillCursorCommand, BaseCommand);
16 |
17 | KillCursorCommand.OP_KILL_CURSORS = 2007;
18 |
19 | /*
20 | struct {
21 | MsgHeader header; // standard message header
22 | int32 ZERO; // 0 - reserved for future use
23 | int32 numberOfCursorIDs; // number of cursorIDs in message
24 | int64[] cursorIDs; // array of cursorIDs to close
25 | }
26 | */
27 | KillCursorCommand.prototype.toBinary = function() {
28 | // Calculate total length of the document
29 | var totalLengthOfCommand = 4 + 4 + (4 * 4) + (this.cursorIds.length * 8);
30 | // Let's build the single pass buffer command
31 | var _index = 0;
32 | var _command = new Buffer(totalLengthOfCommand);
33 | // Write the header information to the buffer
34 | _command[_index + 3] = (totalLengthOfCommand >> 24) & 0xff;
35 | _command[_index + 2] = (totalLengthOfCommand >> 16) & 0xff;
36 | _command[_index + 1] = (totalLengthOfCommand >> 8) & 0xff;
37 | _command[_index] = totalLengthOfCommand & 0xff;
38 | // Adjust index
39 | _index = _index + 4;
40 | // Write the request ID
41 | _command[_index + 3] = (this.requestId >> 24) & 0xff;
42 | _command[_index + 2] = (this.requestId >> 16) & 0xff;
43 | _command[_index + 1] = (this.requestId >> 8) & 0xff;
44 | _command[_index] = this.requestId & 0xff;
45 | // Adjust index
46 | _index = _index + 4;
47 | // Write zero
48 | _command[_index++] = 0;
49 | _command[_index++] = 0;
50 | _command[_index++] = 0;
51 | _command[_index++] = 0;
52 | // Write the op_code for the command
53 | _command[_index + 3] = (KillCursorCommand.OP_KILL_CURSORS >> 24) & 0xff;
54 | _command[_index + 2] = (KillCursorCommand.OP_KILL_CURSORS >> 16) & 0xff;
55 | _command[_index + 1] = (KillCursorCommand.OP_KILL_CURSORS >> 8) & 0xff;
56 | _command[_index] = KillCursorCommand.OP_KILL_CURSORS & 0xff;
57 | // Adjust index
58 | _index = _index + 4;
59 |
60 | // Write zero
61 | _command[_index++] = 0;
62 | _command[_index++] = 0;
63 | _command[_index++] = 0;
64 | _command[_index++] = 0;
65 |
66 | // Number of cursors to kill
67 | var numberOfCursors = this.cursorIds.length;
68 | _command[_index + 3] = (numberOfCursors >> 24) & 0xff;
69 | _command[_index + 2] = (numberOfCursors >> 16) & 0xff;
70 | _command[_index + 1] = (numberOfCursors >> 8) & 0xff;
71 | _command[_index] = numberOfCursors & 0xff;
72 | // Adjust index
73 | _index = _index + 4;
74 |
75 | // Encode all the cursors
76 | for(var i = 0; i < this.cursorIds.length; i++) {
77 | // Encode the cursor id
78 | var low_bits = this.cursorIds[i].getLowBits();
79 | // Encode low bits
80 | _command[_index + 3] = (low_bits >> 24) & 0xff;
81 | _command[_index + 2] = (low_bits >> 16) & 0xff;
82 | _command[_index + 1] = (low_bits >> 8) & 0xff;
83 | _command[_index] = low_bits & 0xff;
84 | // Adjust index
85 | _index = _index + 4;
86 |
87 | var high_bits = this.cursorIds[i].getHighBits();
88 | // Encode high bits
89 | _command[_index + 3] = (high_bits >> 24) & 0xff;
90 | _command[_index + 2] = (high_bits >> 16) & 0xff;
91 | _command[_index + 1] = (high_bits >> 8) & 0xff;
92 | _command[_index] = high_bits & 0xff;
93 | // Adjust index
94 | _index = _index + 4;
95 | }
96 |
97 | return _command;
98 | };
--------------------------------------------------------------------------------
/lib/mongodb/connection/connection_utils.js:
--------------------------------------------------------------------------------
1 | exports.setIntegerParameter = function(object, field, defaultValue) {
2 | if(object[field] == null) {
3 | object[field] = defaultValue;
4 | } else if(typeof object[field] !== "number" && object[field] !== parseInt(object[field], 10)) {
5 | throw "object field [" + field + "] must be a numeric integer value, attempted to set to [" + object[field] + "] type of [" + typeof object[field] + "]";
6 | }
7 | }
8 |
9 | exports.setBooleanParameter = function(object, field, defaultValue) {
10 | if(object[field] == null) {
11 | object[field] = defaultValue;
12 | } else if(typeof object[field] !== "boolean") {
13 | throw "object field [" + field + "] must be a boolean value, attempted to set to [" + object[field] + "] type of [" + typeof object[field] + "]";
14 | }
15 | }
16 |
17 | exports.setStringParameter = function(object, field, defaultValue) {
18 | if(object[field] == null) {
19 | object[field] = defaultValue;
20 | } else if(typeof object[field] !== "string") {
21 | throw "object field [" + field + "] must be a string value, attempted to set to [" + object[field] + "] type of [" + typeof object[field] + "]";
22 | }
23 | }
--------------------------------------------------------------------------------
/lib/mongodb/connection/strategies/ping_strategy.js:
--------------------------------------------------------------------------------
1 | var Server = require("../server").Server;
2 |
3 | // The ping strategy uses pings each server and records the
4 | // elapsed time for the server so it can pick a server based on lowest
5 | // return time for the db command {ping:true}
6 | var PingStrategy = exports.PingStrategy = function(replicaset) {
7 | this.replicaset = replicaset;
8 | this.state = 'disconnected';
9 | // Class instance
10 | this.Db = require("../../db").Db;
11 | }
12 |
13 | // Starts any needed code
14 | PingStrategy.prototype.start = function(callback) {
15 | this.state = 'connected';
16 | // Start ping server
17 | this._pingServer(callback);
18 | }
19 |
20 | // Stops and kills any processes running
21 | PingStrategy.prototype.stop = function(callback) {
22 | // Stop the ping process
23 | this.state = 'disconnected';
24 | // Call the callback
25 | callback(null, null);
26 | }
27 |
28 | PingStrategy.prototype.checkoutSecondary = function() {
29 | // Get all secondary server keys
30 | var keys = Object.keys(this.replicaset._state.secondaries);
31 | // Contains the picked instance
32 | var minimumPingMs = null;
33 | var selectedInstance = null;
34 | // Pick server key by the lowest ping time
35 | for(var i = 0; i < keys.length; i++) {
36 | // Fetch a server
37 | var server = this.replicaset._state.secondaries[keys[i]];
38 | // If we don't have a ping time use it
39 | if(server.runtimeStats['pingMs'] == null) {
40 | // Set to 0 ms for the start
41 | server.runtimeStats['pingMs'] = 0;
42 | // Pick server
43 | selectedInstance = server;
44 | break;
45 | } else {
46 | // If the next server's ping time is less than the current one choose than one
47 | if(minimumPingMs == null || server.runtimeStats['pingMs'] < minimumPingMs) {
48 | minimumPingMs = server.runtimeStats['pingMs'];
49 | selectedInstance = server;
50 | }
51 | }
52 | }
53 |
54 | // Return the selected instance
55 | return selectedInstance != null ? selectedInstance.checkoutReader() : null;
56 | }
57 |
58 | PingStrategy.prototype._pingServer = function(callback) {
59 | var self = this;
60 |
61 | // Ping server function
62 | var pingFunction = function() {
63 | if(self.state == 'disconnected') return;
64 | var addresses = self.replicaset._state != null && self.replicaset._state.addresses != null ? self.replicaset._state.addresses : null;
65 | // Grab all servers
66 | var serverKeys = Object.keys(addresses);
67 | // Number of server entries
68 | var numberOfEntries = serverKeys.length;
69 | // We got keys
70 | for(var i = 0; i < serverKeys.length; i++) {
71 | // We got a server instance
72 | var server = addresses[serverKeys[i]];
73 | // Create a new server object, avoid using internal connections as they might
74 | // be in an illegal state
75 | new function(serverInstance) {
76 | var server = new Server(serverInstance.host, serverInstance.port, {poolSize:1, timeout:500});
77 | var db = new self.Db(self.replicaset.db.databaseName, server);
78 | // Add error listener
79 | db.on("error", function(err) {
80 | // Adjust the number of checks
81 | numberOfEntries = numberOfEntries - 1;
82 | // Close connection
83 | db.close();
84 | // If we are done with all results coming back trigger ping again
85 | if(numberOfEntries == 0 && self.state == 'connected') {
86 | setTimeout(pingFunction, 1000);
87 | }
88 | })
89 |
90 | // Open the db instance
91 | db.open(function(err, p_db) {
92 | if(err != null) {
93 | db.close();
94 | } else {
95 | // Startup time of the command
96 | var startTime = new Date().getTime();
97 | // Execute ping on this connection
98 | p_db.executeDbCommand({ping:1}, function(err, result) {
99 | // Adjust the number of checks
100 | numberOfEntries = numberOfEntries - 1;
101 | // Get end time of the command
102 | var endTime = new Date().getTime();
103 | // Store the ping time in the server instance state variable, if there is one
104 | if(serverInstance != null && serverInstance.runtimeStats != null && serverInstance.isConnected()) {
105 | serverInstance.runtimeStats['pingMs'] = (endTime - startTime);
106 | }
107 |
108 | // Close server
109 | p_db.close();
110 | // If we are done with all results coming back trigger ping again
111 | if(numberOfEntries == 0 && self.state == 'connected') {
112 | setTimeout(pingFunction, 1000);
113 | }
114 | })
115 | }
116 | })
117 | }(server);
118 | }
119 | }
120 |
121 | // Start pingFunction
122 | setTimeout(pingFunction, 1000);
123 | // Do the callback
124 | callback(null);
125 | }
126 |
--------------------------------------------------------------------------------
/lib/mongodb/connection/strategies/statistics_strategy.js:
--------------------------------------------------------------------------------
1 | // The Statistics strategy uses the measure of each end-start time for each
2 | // query executed against the db to calculate the mean, variance and standard deviation
3 | // and pick the server which the lowest mean and deviation
4 | var StatisticsStrategy = exports.StatisticsStrategy = function(replicaset) {
5 | this.replicaset = replicaset;
6 | }
7 |
8 | // Starts any needed code
9 | StatisticsStrategy.prototype.start = function(callback) {
10 | callback(null, null);
11 | }
12 |
13 | StatisticsStrategy.prototype.stop = function(callback) {
14 | // Remove reference to replicaset
15 | this.replicaset = null;
16 | // Perform callback
17 | callback(null, null);
18 | }
19 |
20 | StatisticsStrategy.prototype.checkoutSecondary = function() {
21 | // Get all secondary server keys
22 | var keys = Object.keys(this.replicaset._state.secondaries);
23 | // Contains the picked instance
24 | var minimumSscore = null;
25 | var selectedInstance = null;
26 |
27 | // Pick server key by the lowest ping time
28 | for(var i = 0; i < keys.length; i++) {
29 | // Fetch a server
30 | var server = this.replicaset._state.secondaries[keys[i]];
31 | // Pick server by lowest Sscore
32 | if(minimumSscore == null || (server.queryStats.sScore < minimumSscore)) {
33 | minimumSscore = server.queryStats.sScore;
34 | selectedInstance = server;
35 | }
36 | }
37 |
38 | // Return the selected instance
39 | return selectedInstance != null ? selectedInstance.checkoutReader() : null;
40 | }
41 |
--------------------------------------------------------------------------------
/lib/mongodb/cursorstream.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Module dependecies.
3 | */
4 | var Stream = require('stream').Stream;
5 |
6 | /**
7 | * CursorStream
8 | *
9 | * Returns a stream interface for the **cursor**.
10 | *
11 | * Events
12 | * - **data** {function(item) {}} the data event triggers when a document is ready.
13 | * - **error** {function(err) {}} the error event triggers if an error happens.
14 | * - **end** {function() {}} the end event triggers when there is no more documents available.
15 | *
16 | * @class Represents a CursorStream.
17 | * @param {Cursor} cursor a cursor object that the stream wraps.
18 | * @return {Stream}
19 | */
20 | function CursorStream(cursor) {
21 | if(!(this instanceof CursorStream)) return new CursorStream(cursor);
22 |
23 | Stream.call(this);
24 |
25 | this.readable = true;
26 | this.paused = false;
27 | this._cursor = cursor;
28 | this._destroyed = null;
29 |
30 | // give time to hook up events
31 | var self = this;
32 | process.nextTick(function () {
33 | self._init();
34 | });
35 | }
36 |
37 | /**
38 | * Inherit from Stream
39 | * @ignore
40 | * @api private
41 | */
42 | CursorStream.prototype.__proto__ = Stream.prototype;
43 |
44 | /**
45 | * Flag stating whether or not this stream is readable.
46 | */
47 | CursorStream.prototype.readable;
48 |
49 | /**
50 | * Flag stating whether or not this stream is paused.
51 | */
52 | CursorStream.prototype.paused;
53 |
54 | /**
55 | * Initialize the cursor.
56 | * @ignore
57 | * @api private
58 | */
59 | CursorStream.prototype._init = function () {
60 | if (this._destroyed) return;
61 | this._next();
62 | }
63 |
64 | /**
65 | * Pull the next document from the cursor.
66 | * @ignore
67 | * @api private
68 | */
69 | CursorStream.prototype._next = function () {
70 | if (this.paused || this._destroyed) return;
71 |
72 | var self = this;
73 |
74 | // nextTick is necessary to avoid stack overflows when
75 | // dealing with large result sets.
76 | process.nextTick(function () {
77 | self._cursor.nextObject(function (err, doc) {
78 | self._onNextObject(err, doc);
79 | });
80 | });
81 | }
82 |
83 | /**
84 | * Handle each document as its returned from the cursor.
85 | * @ignore
86 | * @api private
87 | */
88 | CursorStream.prototype._onNextObject = function (err, doc) {
89 | if (err) return this.destroy(err);
90 |
91 | // when doc is null we hit the end of the cursor
92 | if (!doc) return this.destroy();
93 |
94 | this.emit('data', doc);
95 | this._next();
96 | }
97 |
98 | /**
99 | * Pauses the stream.
100 | *
101 | * @api public
102 | */
103 | CursorStream.prototype.pause = function () {
104 | this.paused = true;
105 | }
106 |
107 | /**
108 | * Resumes the stream.
109 | *
110 | * @api public
111 | */
112 | CursorStream.prototype.resume = function () {
113 | this.paused = false;
114 | this._next();
115 | }
116 |
117 | /**
118 | * Destroys the stream, closing the underlying
119 | * cursor. No more events will be emitted.
120 | *
121 | * @api public
122 | */
123 | CursorStream.prototype.destroy = function (err) {
124 | if (this._destroyed) return;
125 | this._destroyed = true;
126 | this.readable = false;
127 |
128 | this._cursor.close();
129 |
130 | if (err) {
131 | this.emit('error', err);
132 | }
133 |
134 | this.emit('close');
135 | }
136 |
137 | // TODO - maybe implement the raw option to pass binary?
138 | //CursorStream.prototype.setEncoding = function () {
139 | //}
140 |
141 | module.exports = exports = CursorStream;
142 |
--------------------------------------------------------------------------------
/lib/mongodb/gridfs/grid.js:
--------------------------------------------------------------------------------
1 | var GridStore = require('./gridstore').GridStore,
2 | ObjectID = require('bson').ObjectID;
3 |
4 | /**
5 | * A class representation of a simple Grid interface.
6 | *
7 | * @class Represents the Grid.
8 | * @param {Db} db A database instance to interact with.
9 | * @param {String} [fsName] optional different root collection for GridFS.
10 | * @return {Grid}
11 | */
12 | function Grid(db, fsName) {
13 |
14 | if(!(this instanceof Grid)) return new Grid(db, fsName);
15 |
16 | this.db = db;
17 | this.fsName = fsName == null ? GridStore.DEFAULT_ROOT_COLLECTION : fsName;
18 | }
19 |
20 | /**
21 | * Puts binary data to the grid
22 | *
23 | * @param {Buffer} data buffer with Binary Data.
24 | * @param {Object} [options] the options for the files.
25 | * @callback {Function} this will be called after this method is executed. The first parameter will contain an Error object if an error occured or null otherwise. The second parameter will contain a reference to this object.
26 | * @return {null}
27 | * @api public
28 | */
29 | Grid.prototype.put = function(data, options, callback) {
30 | var self = this;
31 | var args = Array.prototype.slice.call(arguments, 1);
32 | callback = args.pop();
33 | options = args.length ? args.shift() : {};
34 | // If root is not defined add our default one
35 | options['root'] = options['root'] == null ? this.fsName : options['root'];
36 |
37 | // Return if we don't have a buffer object as data
38 | if(!(Buffer.isBuffer(data))) return callback(new Error("Data object must be a buffer object"), null);
39 | // Get filename if we are using it
40 | var filename = options['filename'];
41 | // Create gridstore
42 | var gridStore = new GridStore(this.db, filename, "w", options);
43 | gridStore.open(function(err, gridStore) {
44 | if(err) return callback(err, null);
45 |
46 | gridStore.write(data, function(err, result) {
47 | if(err) return callback(err, null);
48 |
49 | gridStore.close(function(err, result) {
50 | if(err) return callback(err, null);
51 | callback(null, result);
52 | })
53 | })
54 | })
55 | }
56 |
57 | /**
58 | * Get binary data to the grid
59 | *
60 | * @param {ObjectID} id ObjectID for file.
61 | * @callback {Function} this will be called after this method is executed. The first parameter will contain an Error object if an error occured or null otherwise. The second parameter will contain a reference to this object.
62 | * @return {null}
63 | * @api public
64 | */
65 | Grid.prototype.get = function(id, callback) {
66 | // Validate that we have a valid ObjectId
67 | if(!(id instanceof ObjectID)) return callback(new Error("Not a valid ObjectID", null));
68 | // Create gridstore
69 | var gridStore = new GridStore(this.db, id, "r", {root:this.fsName});
70 | gridStore.open(function(err, gridStore) {
71 | if(err) return callback(err, null);
72 |
73 | // Return the data
74 | gridStore.read(function(err, data) {
75 | return callback(err, data)
76 | });
77 | })
78 | }
79 |
80 | /**
81 | * Delete file from grid
82 | *
83 | * @param {ObjectID} id ObjectID for file.
84 | * @callback {Function} this will be called after this method is executed. The first parameter will contain an Error object if an error occured or null otherwise. The second parameter will contain a reference to this object.
85 | * @return {null}
86 | * @api public
87 | */
88 | Grid.prototype.delete = function(id, callback) {
89 | // Validate that we have a valid ObjectId
90 | if(!(id instanceof ObjectID)) return callback(new Error("Not a valid ObjectID", null));
91 | // Create gridstore
92 | GridStore.unlink(this.db, id, {root:this.fsName}, function(err, result) {
93 | if(err) return callback(err, false);
94 | return callback(null, true);
95 | });
96 | }
97 |
98 | exports.Grid = Grid;
99 |
--------------------------------------------------------------------------------
/lib/mongodb/gridfs/readstream.js:
--------------------------------------------------------------------------------
1 | var Stream = require('stream').Stream,
2 | util = require('util');
3 |
4 | /**
5 | * ReadStream
6 | *
7 | * Returns a stream interface for the **file**.
8 | *
9 | * Events
10 | * - **data** {function(item) {}} the data event triggers when a document is ready.
11 | * - **end** {function() {}} the end event triggers when there is no more documents available.
12 | * - **close** {function() {}} the close event triggers when the stream is closed.
13 | * - **error** {function(err) {}} the error event triggers if an error happens.
14 | *
15 | * @class Represents a GridFS File Stream.
16 | * @param {Boolean} autoclose automatically close file when the stream reaches the end.
17 | * @param {GridStore} cursor a cursor object that the stream wraps.
18 | * @return {ReadStream}
19 | */
20 | function ReadStream(autoclose, gstore) {
21 | if (!(this instanceof ReadStream)) return new ReadStream(autoclose, gstore);
22 | Stream.call(this);
23 |
24 | this.autoclose = !!autoclose;
25 | this.gstore = gstore;
26 |
27 | this.finalLength = gstore.length - gstore.position;
28 | this.completedLength = 0;
29 |
30 | this.paused = false;
31 | this.readable = true;
32 | this.pendingChunk = null;
33 | this.executing = false;
34 |
35 | var self = this;
36 | process.nextTick(function() {
37 | self._execute();
38 | });
39 | };
40 |
41 | /**
42 | * Inherit from Stream
43 | * @ignore
44 | * @api private
45 | */
46 | ReadStream.prototype.__proto__ = Stream.prototype;
47 |
48 | /**
49 | * Flag stating whether or not this stream is readable.
50 | */
51 | ReadStream.prototype.readable;
52 |
53 | /**
54 | * Flag stating whether or not this stream is paused.
55 | */
56 | ReadStream.prototype.paused;
57 |
58 | /**
59 | * @ignore
60 | * @api private
61 | */
62 | ReadStream.prototype._execute = function() {
63 | if(this.paused === true || this.readable === false) {
64 | return;
65 | }
66 |
67 | var gstore = this.gstore;
68 | var self = this;
69 | // Set that we are executing
70 | this.executing = true;
71 |
72 | var last = false;
73 | var toRead = 0;
74 |
75 | if ((gstore.currentChunk.length() - gstore.currentChunk.position + 1 + self.completedLength) >= self.finalLength) {
76 | toRead = self.finalLength - self.completedLength;
77 | self.executing = false;
78 | last = true;
79 | } else {
80 | toRead = gstore.currentChunk.length();
81 | }
82 |
83 | var data = gstore.currentChunk.readSlice(toRead);
84 |
85 | if(data != null) {
86 | self.completedLength += data.length;
87 | self.pendingChunk = null;
88 | self.emit("data", data);
89 | }
90 |
91 | if(last === true) {
92 | self.readable = false;
93 | self.emit("end");
94 |
95 | if(self.autoclose === true) {
96 | if(gstore.mode[0] == "w") {
97 | gstore.close(function(err, doc) {
98 | if (err) {
99 | self.emit("error", err);
100 | return;
101 | }
102 | self.readable = false;
103 | self.emit("close", doc);
104 | });
105 | } else {
106 | self.readable = false;
107 | self.emit("close");
108 | }
109 | }
110 | } else {
111 | gstore._nthChunk(gstore.currentChunk.chunkNumber + 1, function(err, chunk) {
112 | if(err) {
113 | self.readable = false;
114 | self.emit("error", err);
115 | self.executing = false;
116 | return;
117 | }
118 |
119 | self.pendingChunk = chunk;
120 | if(self.paused === true) {
121 | self.executing = false;
122 | return;
123 | }
124 |
125 | gstore.currentChunk = self.pendingChunk;
126 | self._execute();
127 | });
128 | }
129 | };
130 |
131 | /**
132 | * Pauses this stream, then no farther events will be fired.
133 | *
134 | * @ignore
135 | * @api public
136 | */
137 | ReadStream.prototype.pause = function() {
138 | if(!this.executing) {
139 | this.paused = true;
140 | }
141 | };
142 |
143 | /**
144 | * Destroys the stream, then no farther events will be fired.
145 | *
146 | * @ignore
147 | * @api public
148 | */
149 | ReadStream.prototype.destroy = function() {
150 | this.readable = false;
151 | // Emit close event
152 | this.emit("close");
153 | };
154 |
155 | /**
156 | * Resumes this stream.
157 | *
158 | * @ignore
159 | * @api public
160 | */
161 | ReadStream.prototype.resume = function() {
162 | if(this.paused === false || !this.readable) {
163 | return;
164 | }
165 |
166 | this.paused = false;
167 | var self = this;
168 | if(self.pendingChunk != null) {
169 | self.currentChunk = self.pendingChunk;
170 | process.nextTick(function() {
171 | self._execute();
172 | });
173 | } else {
174 | self.readable = false;
175 | self.emit("close");
176 | }
177 | };
178 |
179 | exports.ReadStream = ReadStream;
180 |
--------------------------------------------------------------------------------
/lib/mongodb/index.js:
--------------------------------------------------------------------------------
1 | try {
2 | exports.BSONPure = require('bson').BSONPure;
3 | exports.BSONNative = require('bson').BSONNative;
4 | } catch(err) {
5 | // do nothing
6 | }
7 |
8 | [ 'commands/base_command'
9 | , 'commands/db_command'
10 | , 'commands/delete_command'
11 | , 'commands/get_more_command'
12 | , 'commands/insert_command'
13 | , 'commands/kill_cursor_command'
14 | , 'commands/query_command'
15 | , 'commands/update_command'
16 | , 'responses/mongo_reply'
17 | , 'admin'
18 | , 'collection'
19 | , 'connection/connection'
20 | , 'connection/server'
21 | , 'connection/repl_set'
22 | , 'cursor'
23 | , 'db'
24 | , 'gridfs/grid'
25 | , 'gridfs/chunk'
26 | , 'gridfs/gridstore'].forEach(function (path) {
27 | var module = require('./' + path);
28 | for (var i in module) {
29 | exports[i] = module[i];
30 | }
31 |
32 | // backwards compat
33 | exports.ReplSetServers = exports.ReplSet;
34 |
35 | // Add BSON Classes
36 | exports.Binary = require('bson').Binary;
37 | exports.Code = require('bson').Code;
38 | exports.DBRef = require('bson').DBRef;
39 | exports.Double = require('bson').Double;
40 | exports.Long = require('bson').Long;
41 | exports.MinKey = require('bson').MinKey;
42 | exports.MaxKey = require('bson').MaxKey;
43 | exports.ObjectID = require('bson').ObjectID;
44 | exports.Symbol = require('bson').Symbol;
45 | exports.Timestamp = require('bson').Timestamp;
46 |
47 | // Add BSON Parser
48 | exports.BSON = require('bson').BSONPure.BSON;
49 | });
50 |
51 | // Exports all the classes for the PURE JS BSON Parser
52 | exports.pure = function() {
53 | var classes = {};
54 | // Map all the classes
55 | [ 'commands/base_command'
56 | , 'commands/db_command'
57 | , 'commands/delete_command'
58 | , 'commands/get_more_command'
59 | , 'commands/insert_command'
60 | , 'commands/kill_cursor_command'
61 | , 'commands/query_command'
62 | , 'commands/update_command'
63 | , 'responses/mongo_reply'
64 | , 'admin'
65 | , 'collection'
66 | , 'connection/connection'
67 | , 'connection/server'
68 | , 'connection/repl_set'
69 | , 'cursor'
70 | , 'db'
71 | , 'gridfs/grid'
72 | , 'gridfs/chunk'
73 | , 'gridfs/gridstore'].forEach(function (path) {
74 | var module = require('./' + path);
75 | for (var i in module) {
76 | classes[i] = module[i];
77 | }
78 | });
79 |
80 | // backwards compat
81 | classes.ReplSetServers = exports.ReplSet;
82 |
83 | // Add BSON Classes
84 | classes.Binary = require('bson').Binary;
85 | classes.Code = require('bson').Code;
86 | classes.DBRef = require('bson').DBRef;
87 | classes.Double = require('bson').Double;
88 | classes.Long = require('bson').Long;
89 | classes.MinKey = require('bson').MinKey;
90 | classes.MaxKey = require('bson').MaxKey;
91 | classes.ObjectID = require('bson').ObjectID;
92 | classes.Symbol = require('bson').Symbol;
93 | classes.Timestamp = require('bson').Timestamp;
94 |
95 | // Add BSON Parser
96 | classes.BSON = require('bson').BSONPure.BSON;
97 |
98 | // Return classes list
99 | return classes;
100 | }
101 |
102 | // Exports all the classes for the PURE JS BSON Parser
103 | exports.native = function() {
104 | var classes = {};
105 | // Map all the classes
106 | [ 'commands/base_command'
107 | , 'commands/db_command'
108 | , 'commands/delete_command'
109 | , 'commands/get_more_command'
110 | , 'commands/insert_command'
111 | , 'commands/kill_cursor_command'
112 | , 'commands/query_command'
113 | , 'commands/update_command'
114 | , 'responses/mongo_reply'
115 | , 'admin'
116 | , 'collection'
117 | , 'connection/connection'
118 | , 'connection/server'
119 | , 'connection/repl_set'
120 | , 'cursor'
121 | , 'db'
122 | , 'gridfs/grid'
123 | , 'gridfs/chunk'
124 | , 'gridfs/gridstore'].forEach(function (path) {
125 | var module = require('./' + path);
126 | for (var i in module) {
127 | classes[i] = module[i];
128 | }
129 | });
130 |
131 | // Add BSON Classes
132 | classes.Binary = require('bson').Binary;
133 | classes.Code = require('bson').Code;
134 | classes.DBRef = require('bson').DBRef;
135 | classes.Double = require('bson').Double;
136 | classes.Long = require('bson').Long;
137 | classes.MinKey = require('bson').MinKey;
138 | classes.MaxKey = require('bson').MaxKey;
139 | classes.ObjectID = require('bson').ObjectID;
140 | classes.Symbol = require('bson').Symbol;
141 | classes.Timestamp = require('bson').Timestamp;
142 |
143 | // backwards compat
144 | classes.ReplSetServers = exports.ReplSet;
145 |
146 | // Add BSON Parser
147 | classes.BSON = require('bson').BSONNative.BSON;
148 |
149 | // Return classes list
150 | return classes;
151 | }
152 |
--------------------------------------------------------------------------------
/lib/mongodb/utils.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Sort functions, Normalize and prepare sort parameters
3 | */
4 | var formatSortValue = exports.formatSortValue = function(sortDirection) {
5 | var value = ("" + sortDirection).toLowerCase();
6 |
7 | switch (value) {
8 | case 'ascending':
9 | case 'asc':
10 | case '1':
11 | return 1;
12 | case 'descending':
13 | case 'desc':
14 | case '-1':
15 | return -1;
16 | default:
17 | throw new Error("Illegal sort clause, must be of the form "
18 | + "[['field1', '(ascending|descending)'], "
19 | + "['field2', '(ascending|descending)']]");
20 | }
21 | };
22 |
23 | var formattedOrderClause = exports.formattedOrderClause = function(sortValue) {
24 | var orderBy = {};
25 |
26 | if (Array.isArray(sortValue)) {
27 | for(var i = 0; i < sortValue.length; i++) {
28 | if(sortValue[i].constructor == String) {
29 | orderBy[sortValue[i]] = 1;
30 | } else {
31 | orderBy[sortValue[i][0]] = formatSortValue(sortValue[i][1]);
32 | }
33 | }
34 | } else if(Object.prototype.toString.call(sortValue) === '[object Object]') {
35 | orderBy = sortValue;
36 | } else if (sortValue.constructor == String) {
37 | orderBy[sortValue] = 1;
38 | } else {
39 | throw new Error("Illegal sort clause, must be of the form " +
40 | "[['field1', '(ascending|descending)'], ['field2', '(ascending|descending)']]");
41 | }
42 |
43 | return orderBy;
44 | };
45 |
46 | exports.encodeInt = function(value) {
47 | var buffer = new Buffer(4);
48 | buffer[3] = (value >> 24) & 0xff;
49 | buffer[2] = (value >> 16) & 0xff;
50 | buffer[1] = (value >> 8) & 0xff;
51 | buffer[0] = value & 0xff;
52 | return buffer;
53 | }
54 |
55 | exports.encodeIntInPlace = function(value, buffer, index) {
56 | buffer[index + 3] = (value >> 24) & 0xff;
57 | buffer[index + 2] = (value >> 16) & 0xff;
58 | buffer[index + 1] = (value >> 8) & 0xff;
59 | buffer[index] = value & 0xff;
60 | }
61 |
62 | exports.encodeCString = function(string) {
63 | var buf = new Buffer(string, 'utf8');
64 | return [buf, new Buffer([0])];
65 | }
66 |
67 | exports.decodeUInt32 = function(array, index) {
68 | return array[index] | array[index + 1] << 8 | array[index + 2] << 16 | array[index + 3] << 24;
69 | }
70 |
71 | // Decode the int
72 | exports.decodeUInt8 = function(array, index) {
73 | return array[index];
74 | }
75 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | { "name" : "mongodb"
2 | , "description" : "A node.js driver for MongoDB"
3 | , "keywords" : ["mongodb", "mongo", "driver", "db"]
4 | , "version" : "0.9.9-8"
5 | , "author" : "Christian Amor Kvalheim "
6 | , "contributors" : [ "Aaron Heckmann",
7 | "Christoph Pojer",
8 | "Pau Ramon Revilla",
9 | "Nathan White",
10 | "Emmerman",
11 | "Seth LaForge",
12 | "Boris Filipov",
13 | "Stefan Schärmeli",
14 | "Tedde Lundgren",
15 | "renctan",
16 | "Sergey Ukustov",
17 | "Ciaran Jessup",
18 | "kuno",
19 | "srimonti",
20 | "Erik Abele",
21 | "Pratik Daga",
22 | "Slobodan Utvic",
23 | "Kristina Chodorow",
24 | "Yonathan Randolph",
25 | "Brian Noguchi",
26 | "Sam Epstein",
27 | "James Harrison Fisher",
28 | "Vladimir Dronnikov",
29 | "Ben Hockey",
30 | "Henrik Johansson",
31 | "Simon Weare",
32 | "Alex Gorbatchev",
33 | "Shimon Doodkin",
34 | "Kyle Mueller",
35 | "Eran Hammer-Lahav",
36 | "Marcin Ciszak",
37 | "François de Metz",
38 | "Vinay Pulim",
39 | "nstielau",
40 | "Adam Wiggins",
41 | "entrinzikyl",
42 | "Jeremy Selier",
43 | "Ian Millington",
44 | "Public Keating",
45 | "andrewjstone",
46 | "Christopher Stott",
47 | "Corey Jewett",
48 | "brettkiefer",
49 | "Rob Holland",
50 | "Senmiao Liu",
51 | "heroic",
52 | "gitfy",
53 | "Andrew Stone",
54 | "John Le Drew"]
55 |
56 | , "repository" : { "type" : "git"
57 | , "url" : "http://github.com/christkv/node-mongodb-native.git" }
58 | , "bugs" : { "mail" : "node-mongodb-native@googlegroups.com"
59 | , "url" : "http://github.com/christkv/node-mongodb-native/issues" }
60 | , "dependencies" : {
61 | "bson": "0.0.4"
62 | }
63 | , "devDependencies": {
64 | "dox": "0.2.0"
65 | , "uglify-js": "1.2.5"
66 | , "ejs": "0.6.1"
67 | , "nodeunit": "0.7.3"
68 | , "github3": ">=0.3.0"
69 | , "markdown": "0.3.1"
70 | , "gleak": "0.2.3"
71 | , "step": "0.0.5"
72 | }
73 | , "config": { "native" : false }
74 | , "main": "./lib/mongodb/index"
75 | , "directories" : { "lib" : "./lib/mongodb" }
76 | , "engines" : { "node" : ">=0.4.0" }
77 | , "scripts": { "test" : "make test_pure" }
78 | , "licenses" : [ { "type" : "Apache License, Version 2.0"
79 | , "url" : "http://www.apache.org/licenses/LICENSE-2.0" } ]
80 | }
81 |
--------------------------------------------------------------------------------
/test/auxilliary/repl_set_ssl_test.js:
--------------------------------------------------------------------------------
1 | var mongodb = process.env['TEST_NATIVE'] != null ? require('../../lib/mongodb').native() : require('../../lib/mongodb').pure();
2 |
3 | var testCase = require('nodeunit').testCase,
4 | debug = require('util').debug,
5 | inspect = require('util').inspect,
6 | nodeunit = require('nodeunit'),
7 | gleak = require('../../dev/tools/gleak'),
8 | Db = mongodb.Db,
9 | Cursor = mongodb.Cursor,
10 | Collection = mongodb.Collection,
11 | Server = mongodb.Server,
12 | ReplSetServers = mongodb.ReplSetServers,
13 | ReplicaSetManager = require('../../test/tools/replica_set_manager').ReplicaSetManager,
14 | Step = require("step");
15 |
16 | var MONGODB = 'integration_tests';
17 | var serverManager = null;
18 | var RS = RS == null ? null : RS;
19 | var ssl = true;
20 |
21 | /**
22 | * Retrieve the server information for the current
23 | * instance of the db client
24 | *
25 | * @ignore
26 | */
27 | exports.setUp = function(callback) {
28 | RS = new ReplicaSetManager({retries:120,
29 | ssl:ssl,
30 | arbiter_count:1,
31 | secondary_count:1,
32 | passive_count:1});
33 | RS.startSet(true, function(err, result) {
34 | if(err != null) throw err;
35 | // Finish setup
36 | callback();
37 | });
38 | }
39 |
40 | /**
41 | * Retrieve the server information for the current
42 | * instance of the db client
43 | *
44 | * @ignore
45 | */
46 | exports.tearDown = function(callback) {
47 | RS.restartKilledNodes(function(err, result) {
48 | callback();
49 | });
50 | }
51 |
52 | exports.shouldCorrectlyConncetToSSLBasedReplicaset = function(test) {
53 | var replSet = new ReplSetServers( [
54 | new Server( RS.host, RS.ports[1], { auto_reconnect: true } ),
55 | new Server( RS.host, RS.ports[0], { auto_reconnect: true } ),
56 | ],
57 | {rs_name:RS.name, ssl:ssl}
58 | );
59 |
60 | // Connect to the replicaset
61 | var slaveDb = null;
62 | var db = new Db('foo', replSet, {native_parser: (process.env['TEST_NATIVE'] != null)});
63 | db.open(function(err, p_db) {
64 | test.equal(null, err);
65 | test.done();
66 | p_db.close();
67 | });
68 | }
69 |
70 | /**
71 | * Retrieve the server information for the current
72 | * instance of the db client
73 | *
74 | * @ignore
75 | */
76 | exports.noGlobalsLeaked = function(test) {
77 | var leaks = gleak.detectNew();
78 | test.equal(0, leaks.length, "global var leak detected: " + leaks.join(', '));
79 | test.done();
80 | }
--------------------------------------------------------------------------------
/test/auxilliary/ssl_test.js:
--------------------------------------------------------------------------------
1 | var mongodb = process.env['TEST_NATIVE'] != null ? require('../../lib/mongodb').native() : require('../../lib/mongodb').pure();
2 |
3 | var testCase = require('nodeunit').testCase,
4 | debug = require('util').debug,
5 | inspect = require('util').inspect,
6 | nodeunit = require('nodeunit'),
7 | gleak = require('../../dev/tools/gleak'),
8 | Db = mongodb.Db,
9 | Cursor = mongodb.Cursor,
10 | Collection = mongodb.Collection,
11 | Server = mongodb.Server,
12 | ServerManager = require('../../test/tools/server_manager').ServerManager,
13 | Step = require("step");
14 |
15 | var MONGODB = 'integration_tests';
16 | var serverManager = null;
17 | var ssl = true;
18 |
19 | /**
20 | * Retrieve the server information for the current
21 | * instance of the db client
22 | *
23 | * @ignore
24 | */
25 | exports.setUp = function(callback) {
26 | callback();
27 | }
28 |
29 | /**
30 | * Retrieve the server information for the current
31 | * instance of the db client
32 | *
33 | * @ignore
34 | */
35 | exports.tearDown = function(callback) {
36 | callback();
37 | }
38 |
39 | exports.shouldCorrectlyCommunicateUsingSSLSocket = function(test) {
40 | var db1 = new Db(MONGODB, new Server("127.0.0.1", 27017, {auto_reconnect: false, poolSize:4, ssl:ssl}), {native_parser: (process.env['TEST_NATIVE'] != null)});
41 | // All inserted docs
42 | var docs = [];
43 | var errs = [];
44 | var insertDocs = [];
45 |
46 | // Start server
47 | serverManager = new ServerManager({auth:false, purgedirectories:true, journal:true, ssl:ssl})
48 | serverManager.start(true, function() {
49 | db1.open(function(err, db) {
50 | // Create a collection
51 | db.createCollection('shouldCorrectlyCommunicateUsingSSLSocket', function(err, collection) {
52 | collection.insert([{a:1}, {b:2}, {c:'hello world'}]);
53 | collection.insert([{a:1}, {b:2}, {c:'hello world'}]);
54 | collection.insert([{a:1}, {b:2}, {c:'hello world'}]);
55 | collection.insert([{a:1}, {b:2}, {c:'hello world'}]);
56 | collection.insert([{a:1}, {b:2}, {c:'hello world'}], {safe:true}, function(err, result) {
57 | collection.find({}).toArray(function(err, items) {
58 | // test.equal(3, items.length);
59 | db.close();
60 | test.done();
61 | })
62 | });
63 | });
64 | })
65 | });
66 | }
67 |
68 | /**
69 | * Retrieve the server information for the current
70 | * instance of the db client
71 | *
72 | * @ignore
73 | */
74 | exports.noGlobalsLeaked = function(test) {
75 | var leaks = gleak.detectNew();
76 | test.equal(0, leaks.length, "global var leak detected: " + leaks.join(', '));
77 | test.done();
78 | }
--------------------------------------------------------------------------------
/test/certificates/mycert.pem:
--------------------------------------------------------------------------------
1 | -----BEGIN RSA PRIVATE KEY-----
2 | Proc-Type: 4,ENCRYPTED
3 | DEK-Info: DES-EDE3-CBC,5BC1D8935109F6C8
4 |
5 | cbr7PTUV8ySzjBsvtCLng+0m7aez0D/Q76JnsW265oLxwqqID9mgS3rIZUgbu2SQ
6 | +rfTTG+xcrONJs41Pbao1D1BNcUrmLF+8pl6055xFOPWrE1cxHQRShlhgG/pLVE3
7 | JqLFLV4Pd8Tf+o3FwbZ3zqgqwMPVZN/TLfzw94qcrXiidNvWuWf3oyU4w+CzD4Vt
8 | f9HYNOeZWCUtaGGM5koUU/qu/RYQdKXZTRPz9wCHTjSsrznE4BpAJgtBbaOpr850
9 | c3WP48aveK9NZ9aoR1c+BW6MN+HPN2HhwA9rQUBSwfwlVVxxY1Ir2ArbP7fStlvK
10 | TRtuE7Ro0ZEOUiHB5c9X7p6clKgshP7K19ZG6O0ns1P9d9z7l35f1WG/XQxA66tg
11 | h8haN8nOtPJfrAyn5NcmOS2kTA0kL6Lk2TWwqoVErvpCRgdyhQ94GxjMiHLvkfxx
12 | z5fVQqoXuYV8O6ozfdx+58qJnRTLC1cHf8iwWc9sDE/IP9OTpxwMUBKX4EYOL8MQ
13 | 4pjv0qnD/PQN4B5CbQ0RViBLykl22SScxqS3Zq14/sItEjM44ctjgAfmoPZSElTz
14 | n9zhc8VQzgyjuNRt02xAi+tx2RD5I44ylm7QTYnXdWVgftnSgY+Ir4npTK5bnxIB
15 | b9CLPljXbj8k5utoTyFkZa+bRES3a3+MEq5dNFRb0neQ3nJXqB83hMEDE35XWbU0
16 | gJwG7KsVS6Vr3SfBi47fsoIH1Ii92hZxWrtbTlzjy884zSps+mTWGA6TuU8jb6jn
17 | b2JyNJDhVqDk19DPP+TtMv+GgXuoj7EXenQbwRXO/NVyaWpyBU7dHA==
18 | -----END RSA PRIVATE KEY-----
19 | -----BEGIN CERTIFICATE-----
20 | MIIC4TCCAkqgAwIBAgIJAM0r5teNISyyMA0GCSqGSIb3DQEBBQUAMFUxCzAJBgNV
21 | BAYTAkVTMRIwEAYDVQQIEwlCYXJjZWxvbmExEjAQBgNVBAcTCUJhcmNlbG9uYTEO
22 | MAwGA1UEChMFMTBnZW4xDjAMBgNVBAMTBTEwZ2VuMB4XDTExMTIwMjEwMTcwM1oX
23 | DTEyMTIwMTEwMTcwM1owVTELMAkGA1UEBhMCRVMxEjAQBgNVBAgTCUJhcmNlbG9u
24 | YTESMBAGA1UEBxMJQmFyY2Vsb25hMQ4wDAYDVQQKEwUxMGdlbjEOMAwGA1UEAxMF
25 | MTBnZW4wgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAPBY5u20gJQAXw/m3hiY
26 | kC1zqDt4lahjUauPUDeshceAuBIgQLePZwckR3caPqS1a1uPsKMQIKN1E/z781MY
27 | iAdgRPwpQfKt1MRZRZdUVzOsGQdDtF1XEnIbQgdiahbXVqVeULLLuypWnw1ceAys
28 | ESu8kWqPAm1eu+utqJOENXWpAgMBAAGjgbgwgbUwHQYDVR0OBBYEFN1iayQf9puR
29 | 3OZxEy/FtFv6YSRoMIGFBgNVHSMEfjB8gBTdYmskH/abkdzmcRMvxbRb+mEkaKFZ
30 | pFcwVTELMAkGA1UEBhMCRVMxEjAQBgNVBAgTCUJhcmNlbG9uYTESMBAGA1UEBxMJ
31 | QmFyY2Vsb25hMQ4wDAYDVQQKEwUxMGdlbjEOMAwGA1UEAxMFMTBnZW6CCQDNK+bX
32 | jSEssjAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4GBAOvATDKZ95epzVjV
33 | qGnaHgu4u6NJuvZlGmBl2Mq/EA2ymw2LL+Pt38BqyQWvO/5TlxF1EFhH3kl6RENw
34 | 9U7C+w8tqhQL3hQsTASdX9gTczaYgHwHsKLc2JFcC7J2ffXM0aW5wUCKUwvvtf/h
35 | 7rUWJPi+evtVGTxRhCQGJcoI9lEK
36 | -----END CERTIFICATE-----
37 |
--------------------------------------------------------------------------------
/test/commands_test.js:
--------------------------------------------------------------------------------
1 | var mongodb = process.env['TEST_NATIVE'] != null ? require('../lib/mongodb').native() : require('../lib/mongodb').pure();
2 |
3 | var testCase = require('nodeunit').testCase,
4 | debug = require('util').debug,
5 | inspect = require('util').inspect,
6 | gleak = require('../dev/tools/gleak'),
7 | fs = require('fs'),
8 | BSON = mongodb.BSON,
9 | Code = mongodb.Code,
10 | Binary = mongodb.Binary,
11 | Symbol = mongodb.Symbol,
12 | DBRef = mongodb.DBRef,
13 | Double = mongodb.Double,
14 | MinKey = mongodb.MinKey,
15 | MaxKey = mongodb.MaxKey,
16 | Timestamp = mongodb.Timestamp,
17 | Long = mongodb.Long,
18 | ObjectID = mongodb.ObjectID,
19 | DBRef = mongodb.DBRef,
20 | BaseCommand = mongodb.BaseCommand,
21 | InsertCommand = mongodb.InsertCommand,
22 | UpdateCommand = mongodb.UpdateCommand,
23 | DeleteCommand = mongodb.DeleteCommand,
24 | GetMoreCommand = mongodb.GetMoreCommand,
25 | KillCursorCommand = mongodb.KillCursorCommand,
26 | QueryCommand = mongodb.QueryCommand,
27 | MongoReply = mongodb.MongoReply,
28 | BinaryParser = mongodb.BinaryParser;
29 |
30 | /**
31 | * Retrieve the server information for the current
32 | * instance of the db client
33 | *
34 | * @ignore
35 | */
36 | exports.setUp = function(callback) {
37 | callback();
38 | }
39 |
40 | /**
41 | * Retrieve the server information for the current
42 | * instance of the db client
43 | *
44 | * @ignore
45 | */
46 | exports.tearDown = function(callback) {
47 | callback();
48 | }
49 |
50 | exports['Should Correctly Generate an Insert Command'] = function(test) {
51 | var full_collection_name = "db.users";
52 | var insert_command = new InsertCommand({bson: new BSON([Long, ObjectID, Binary, Code, DBRef, Symbol, Double, Timestamp, MaxKey, MinKey])}, full_collection_name);
53 | insert_command.add({name: 'peter pan'});
54 | insert_command.add({name: 'monkey king'});
55 | // assert the length of the binary
56 | test.equal(81, insert_command.toBinary().length);
57 | test.done();
58 | }
59 |
60 | exports['Should Correctly Generate an Update Command'] = function(test) {
61 | var full_collection_name = "db.users";
62 | var flags = UpdateCommand.DB_UPSERT;
63 | var selector = {name: 'peter pan'};
64 | var document = {name: 'peter pan junior'};
65 | // Create the command
66 | var update_command = new UpdateCommand({bson: new BSON([Long, ObjectID, Binary, Code, DBRef, Symbol, Double, Timestamp, MaxKey, MinKey])}, full_collection_name, selector, document, flags);
67 | // assert the length of the binary
68 | test.equal(90, update_command.toBinary().length);
69 | test.done();
70 | }
71 |
72 | exports['Should Correctly Generate a Delete Command'] = function(test) {
73 | var full_collection_name = "db.users";
74 | var selector = {name: 'peter pan'};
75 | // Create the command
76 | var delete_command = new DeleteCommand({bson: new BSON([Long, ObjectID, Binary, Code, DBRef, Symbol, Double, Timestamp, MaxKey, MinKey])}, full_collection_name, selector);
77 | // assert the length of the binary
78 | test.equal(58, delete_command.toBinary().length);
79 | test.done();
80 | }
81 |
82 | exports['Should Correctly Generate a Get More Command'] = function(test) {
83 | var full_collection_name = "db.users";
84 | var numberToReturn = 100;
85 | var cursorId = Long.fromNumber(10000222);
86 | // Create the command
87 | var get_more_command = new GetMoreCommand({bson: new BSON([Long, ObjectID, Binary, Code, DBRef, Symbol, Double, Timestamp, MaxKey, MinKey])}, full_collection_name, numberToReturn, cursorId);
88 | // assert the length of the binary
89 | test.equal(41, get_more_command.toBinary().length);
90 | test.done();
91 | }
92 |
93 | exports['Should Correctly Generate a Kill Cursors Command'] = function(test) {
94 | Array.prototype.toXml = function() {}
95 | var cursorIds = [Long.fromNumber(1), Long.fromNumber(10000222)];
96 | // Create the command
97 | var kill_cursor_command = new KillCursorCommand({bson: new BSON([Long, ObjectID, Binary, Code, DBRef, Symbol, Double, Timestamp, MaxKey, MinKey])}, cursorIds);
98 | // assert the length of the binary
99 | test.equal(40, kill_cursor_command.toBinary().length);
100 | test.done();
101 | }
102 |
103 | exports['Should Correctly Generate a Query Command'] = function(test) {
104 | var full_collection_name = "db.users";
105 | var options = QueryCommand.OPTS_SLAVE;
106 | var numberToSkip = 100;
107 | var numberToReturn = 200;
108 | var query = {name:'peter pan'};
109 | var query_command = new QueryCommand({bson: new BSON([Long, ObjectID, Binary, Code, DBRef, Symbol, Double, Timestamp, MaxKey, MinKey])}, full_collection_name, options, numberToSkip, numberToReturn, query, null);
110 | // assert the length of the binary
111 | test.equal(62, query_command.toBinary().length);
112 | // Generate command with return field filter
113 | query_command = new QueryCommand({bson: new BSON([Long, ObjectID, Binary, Code, DBRef, Symbol, Double, Timestamp, MaxKey, MinKey])}, full_collection_name, options, numberToSkip, numberToReturn, query, { a : 1, b : 1, c : 1});
114 | test.equal(88, query_command.toBinary().length);
115 | test.done();
116 | }
117 |
118 | /**
119 | * Retrieve the server information for the current
120 | * instance of the db client
121 | *
122 | * @ignore
123 | */
124 | exports.noGlobalsLeaked = function(test) {
125 | var leaks = gleak.detectNew();
126 | test.equal(0, leaks.length, "global var leak detected: " + leaks.join(', '));
127 | test.done();
128 | }
--------------------------------------------------------------------------------
/test/connect_test.js:
--------------------------------------------------------------------------------
1 | var mongodb = process.env['TEST_NATIVE'] != null ? require('../lib/mongodb').native() : require('../lib/mongodb').pure();
2 | var useSSL = process.env['USE_SSL'] != null ? true : false;
3 |
4 | var testCase = require('nodeunit').testCase,
5 | debug = require('util').debug,
6 | inspect = require('util').inspect,
7 | nodeunit = require('nodeunit'),
8 | Db = mongodb.Db,
9 | Cursor = mongodb.Cursor,
10 | connect = mongodb.connect,
11 | gleak = require('../dev/tools/gleak'),
12 | Script = require('vm'),
13 | Collection = mongodb.Collection,
14 | Server = mongodb.Server;
15 |
16 | var MONGODB = 'integration_tests';
17 | var clientUrl = 'mongo://localhost:27017/' + MONGODB + (useSSL == true ? '?ssl=true' : '');
18 |
19 | /**
20 | * @ignore
21 | */
22 | function connectionTester(test, testName) {
23 | return function(err, db) {
24 | test.equal(err, null);
25 | db.collection(testName, function(err, collection) {
26 | test.equal(err, null);
27 | var doc = {foo:123};
28 | collection.insert({foo:123}, {safe:true}, function(err, docs) {
29 | test.equal(err, null);
30 | db.dropDatabase(function(err, done) {
31 | db.close();
32 | test.equal(err, null);
33 | test.ok(done);
34 | test.done();
35 | });
36 | });
37 | });
38 | };
39 | };
40 |
41 | /**
42 | * @ignore
43 | */
44 | exports.testConnectNoOptions = function(test) {
45 | connect(clientUrl, connectionTester(test, 'testConnectNoOptions'));
46 | };
47 |
48 | /**
49 | * @ignore
50 | */
51 | exports.testConnectDbOptions = function(test) {
52 | connect(clientUrl,
53 | { db: {native_parser: (process.env['TEST_NATIVE'] != null)} },
54 | connectionTester(test, 'testConnectDbOptions'));
55 | };
56 |
57 | /**
58 | * @ignore
59 | */
60 | exports.testConnectServerOptions = function(test) {
61 | connect(clientUrl,
62 | { server: {auto_reconnect: true, poolSize: 4} },
63 | connectionTester(test, 'testConnectServerOptions'));
64 | };
65 |
66 | /**
67 | * @ignore
68 | */
69 | exports.testConnectAllOptions = function(test) {
70 | connect(clientUrl,
71 | { server: {auto_reconnect: true, poolSize: 4},
72 | db: {native_parser: (process.env['TEST_NATIVE'] != null)} },
73 | connectionTester(test, 'testConnectAllOptions'));
74 | };
75 |
76 | /**
77 | * @ignore
78 | */
79 | exports.testConnectGoodAuth = function(test) {
80 | var user = 'testConnectGoodAuth', password = 'password';
81 | // First add a user.
82 | connect(clientUrl, function(err, db) {
83 | test.equal(err, null);
84 | db.addUser(user, password, function(err, result) {
85 | test.equal(err, null);
86 | db.close();
87 | restOfTest();
88 | });
89 | });
90 |
91 | function restOfTest() {
92 | var url = 'mongo://' + user + ':' + password + '@localhost:27017/' + MONGODB + (useSSL == true ? '?ssl=true' : '');
93 | connect(url, connectionTester(test, 'testConnectGoodAuth'));
94 | }
95 | };
96 |
97 | /**
98 | * @ignore
99 | */
100 | exports.testConnectBadAuth = function(test) {
101 | var url = 'mongo://slithy:toves@localhost:27017/' + MONGODB + (useSSL == true ? '?ssl=true' : '');
102 | connect(url, function(err, db) {
103 | test.ok(err);
104 | test.ok(db);
105 | db.close();
106 | test.done();
107 | });
108 | };
109 |
110 | /**
111 | * @ignore
112 | */
113 | exports.testConnectNoOpen = function(test) {
114 | var db = connect('mongo://localhost:27017/' + MONGODB, {noOpen:true});
115 | test.ok(db != null);
116 | test.done();
117 | };
118 |
119 | /**
120 | * @ignore
121 | */
122 | exports.testConnectThrowsNoCallbackProvided = function(test) {
123 | test.throws(function() {
124 | var db = connect('mongo://localhost:27017/' + MONGODB);
125 | });
126 | test.done();
127 | };
128 |
129 | /**
130 | * @ignore
131 | */
132 | exports.testConnectBadUrl = function(test) {
133 | test.throws(function() {
134 | connect('mango://localhost:27017/' + MONGODB, function(err, db) {
135 | test.ok(false, 'Bad URL!');
136 | });
137 | });
138 | test.done();
139 | };
140 |
141 | /**
142 | * Example of a simple url connection string.
143 | *
144 | * @_class db
145 | * @_function Db.connect
146 | * @ignore
147 | */
148 | exports.shouldCorrectlyDoSimpleCountExamples = function(test) {
149 | // Connect to the server
150 | Db.connect('mongodb://localhost:27017/integration_tests' + (useSSL == true ? '?ssl=true' : ''), function(err, db) {
151 | test.equal(null, err);
152 |
153 | db.close();
154 | test.done();
155 | });
156 | }
157 |
158 |
159 | /**
160 | * @ignore
161 | */
162 | exports.noGlobalsLeaked = function(test) {
163 | var leaks = gleak.detectNew();
164 | test.equal(0, leaks.length, "global var leak detected: " + leaks.join(', '));
165 | test.done();
166 | }
167 |
--------------------------------------------------------------------------------
/test/connection/connection_pool_test.js:
--------------------------------------------------------------------------------
1 | var mongodb = process.env['TEST_NATIVE'] != null ? require('../../lib/mongodb').native() : require('../../lib/mongodb').pure();
2 |
3 | var testCase = require('nodeunit').testCase,
4 | debug = require('util').debug,
5 | inspect = require('util').inspect,
6 | Buffer = require('buffer').Buffer,
7 | gleak = require('../../dev/tools/gleak'),
8 | fs = require('fs'),
9 | ConnectionPool = require('../../lib/mongodb/connection/connection_pool').ConnectionPool;
10 |
11 | /**
12 | * Retrieve the server information for the current
13 | * instance of the db client
14 | *
15 | * @ignore
16 | */
17 | exports.setUp = function(callback) {
18 | callback();
19 | }
20 |
21 | /**
22 | * Retrieve the server information for the current
23 | * instance of the db client
24 | *
25 | * @ignore
26 | */
27 | exports.tearDown = function(callback) {
28 | callback();
29 | }
30 |
31 | exports['Should Correctly create a pool instance with the expected values'] = function(test) {
32 | var connectionPool = new ConnectionPool('localhost', 2000, 1, null, {timeout:100, noDelay:true});
33 | test.equal(100, connectionPool.socketOptions.timeout);
34 | test.equal(true, connectionPool.socketOptions.noDelay);
35 | test.equal(null, connectionPool.socketOptions.encoding);
36 | test.equal(0, connectionPool.socketOptions.bufferSize);
37 | test.done();
38 | }
39 |
40 | exports['Should correctly fail due to no server'] = function(test) {
41 | var connectionPool = new ConnectionPool('localhost', 2000, 4, null, {timeout:100, noDelay:true});
42 |
43 | // // Add event handler that will fire once the pool is ready
44 | connectionPool.on("poolReady", function(err, result) {
45 | })
46 |
47 | // Add event handler that will fire when it fails
48 | connectionPool.on("error", function(err, connection) {
49 | test.equal(0, connectionPool.openConnections.length)
50 | test.done();
51 | });
52 |
53 | // Start the pool
54 | connectionPool.start();
55 | }
56 |
57 | exports['Should Correctly create a pool of connections and receive an ok when all connections are active'] = function(test) {
58 | var connectionPool = new ConnectionPool('localhost', 27017, 4, {timeout:100, noDelay:true});
59 |
60 | // Add event handler that will fire once the pool is ready
61 | connectionPool.on("poolReady", function() {
62 | connectionPool.stop();
63 | test.done();
64 | })
65 |
66 | // Start the pool
67 | connectionPool.start();
68 | }
69 |
70 | exports['Should Correctly connect and then force a restart creating new connections'] = function(test) {
71 | var connectionPool = new ConnectionPool('localhost', 27017, 4, {timeout:100, noDelay:true});
72 | var done = false;
73 |
74 | // Add event handler that will fire once the pool is ready
75 | connectionPool.on("poolReady", function() {
76 | // Restart
77 | if(done) {
78 | connectionPool.stop();
79 | test.done();
80 | } else {
81 | // Trigger stop
82 | connectionPool.restart();
83 | done = true;
84 | }
85 | })
86 |
87 | // Start the pool
88 | connectionPool.start();
89 | },
90 |
91 | /**
92 | * Retrieve the server information for the current
93 | * instance of the db client
94 | *
95 | * @ignore
96 | */
97 | exports.noGlobalsLeaked = function(test) {
98 | var leaks = gleak.detectNew();
99 | test.equal(0, leaks.length, "global var leak detected: " + leaks.join(', '));
100 | test.done();
101 | }
--------------------------------------------------------------------------------
/test/custom_pk_test.js:
--------------------------------------------------------------------------------
1 | var mongodb = process.env['TEST_NATIVE'] != null ? require('../lib/mongodb').native() : require('../lib/mongodb').pure();
2 | var useSSL = process.env['USE_SSL'] != null ? true : false;
3 |
4 | var testCase = require('nodeunit').testCase,
5 | debug = require('util').debug,
6 | inspect = require('util').inspect,
7 | nodeunit = require('nodeunit'),
8 | gleak = require('../dev/tools/gleak'),
9 | Db = mongodb.Db,
10 | Cursor = mongodb.Cursor,
11 | Collection = mongodb.Collection,
12 | ObjectID = mongodb.ObjectID,
13 | Server = mongodb.Server;
14 |
15 | var MONGODB = 'integration_tests';
16 | var client = null;
17 |
18 | /**
19 | * Retrieve the server information for the current
20 | * instance of the db client
21 | *
22 | * @ignore
23 | */
24 | exports.setUp = function(callback) {
25 | var self = exports;
26 | client = new Db(MONGODB, new Server("127.0.0.1", 27017, {auto_reconnect: true, poolSize: 4, ssl:useSSL}), {native_parser: (process.env['TEST_NATIVE'] != null)});
27 | client.open(function(err, db_p) {
28 | if(numberOfTestsRun == (Object.keys(self).length)) {
29 | // If first test drop the db
30 | client.dropDatabase(function(err, done) {
31 | callback();
32 | });
33 | } else {
34 | return callback();
35 | }
36 | });
37 | }
38 |
39 | /**
40 | * Retrieve the server information for the current
41 | * instance of the db client
42 | *
43 | * @ignore
44 | */
45 | exports.tearDown = function(callback) {
46 | var self = this;
47 | numberOfTestsRun = numberOfTestsRun - 1;
48 | // Close connection
49 | client.close();
50 | callback();
51 | }
52 |
53 | exports.shouldCreateRecordsWithCustomPKFactory = function(test) {
54 | // Custom factory (need to provide a 12 byte array);
55 | var CustomPKFactory = function() {}
56 | CustomPKFactory.prototype = new Object();
57 | CustomPKFactory.createPk = function() {
58 | return new ObjectID("aaaaaaaaaaaa");
59 | }
60 |
61 | var p_client = new Db(MONGODB, new Server("127.0.0.1", 27017, {ssl:useSSL}), {'pk':CustomPKFactory, native_parser: (process.env['TEST_NATIVE'] != null)});
62 | p_client.open(function(err, p_client) {
63 | p_client.dropDatabase(function(err, done) {
64 | p_client.createCollection('test_custom_key', function(err, collection) {
65 | collection.insert({'a':1}, {safe:true}, function(err, doc) {
66 | collection.find({'_id':new ObjectID("aaaaaaaaaaaa")}).toArray(function(err, items) {
67 | test.equal(1, items.length);
68 |
69 | p_client.close();
70 | test.done();
71 | });
72 | });
73 | });
74 | });
75 | });
76 | }
77 |
78 | exports.testConnectBadUrl = function(test) {
79 | test.throws(function() {
80 | connect('mango://localhost:27017/' + MONGODB, function(err, db) {
81 | test.ok(false, 'Bad URL!');
82 | });
83 | });
84 | test.done();
85 | }
86 |
87 | /**
88 | * Retrieve the server information for the current
89 | * instance of the db client
90 | *
91 | * @ignore
92 | */
93 | exports.noGlobalsLeaked = function(test) {
94 | var leaks = gleak.detectNew();
95 | test.equal(0, leaks.length, "global var leak detected: " + leaks.join(', '));
96 | test.done();
97 | }
98 |
99 | /**
100 | * Retrieve the server information for the current
101 | * instance of the db client
102 | *
103 | * @ignore
104 | */
105 | var numberOfTestsRun = Object.keys(this).length - 2;
--------------------------------------------------------------------------------
/test/exception_handling_test.js:
--------------------------------------------------------------------------------
1 | var mongodb = process.env['TEST_NATIVE'] != null ? require('../lib/mongodb').native() : require('../lib/mongodb').pure();
2 | var useSSL = process.env['USE_SSL'] != null ? true : false;
3 |
4 | var testCase = require('nodeunit').testCase,
5 | debug = require('util').debug,
6 | inspect = require('util').inspect,
7 | nodeunit = require('nodeunit'),
8 | gleak = require('../dev/tools/gleak'),
9 | Db = mongodb.Db,
10 | Cursor = mongodb.Cursor,
11 | Collection = mongodb.Collection,
12 | Server = mongodb.Server;
13 |
14 | var MONGODB = 'integration_tests';
15 | var client = null;
16 |
17 | /**
18 | * Retrieve the server information for the current
19 | * instance of the db client
20 | *
21 | * @ignore
22 | */
23 | exports.setUp = function(callback) {
24 | var self = exports;
25 | client = new Db(MONGODB, new Server("127.0.0.1", 27017, {auto_reconnect: true, poolSize: 4, ssl:useSSL}), {native_parser: (process.env['TEST_NATIVE'] != null)});
26 | client.open(function(err, db_p) {
27 | if(numberOfTestsRun == (Object.keys(self).length)) {
28 | // If first test drop the db
29 | client.dropDatabase(function(err, done) {
30 | callback();
31 | });
32 | } else {
33 | return callback();
34 | }
35 | });
36 | }
37 |
38 | /**
39 | * Retrieve the server information for the current
40 | * instance of the db client
41 | *
42 | * @ignore
43 | */
44 | exports.tearDown = function(callback) {
45 | var self = this;
46 | numberOfTestsRun = numberOfTestsRun - 1;
47 | // Close connection
48 | client.close();
49 | callback();
50 | }
51 |
52 | exports.shouldCorrectlyHandleThrownError = function(test) {
53 | client.createCollection('shouldCorrectlyHandleThrownError', function(err, r) {
54 | try {
55 | client.collection('shouldCorrectlyHandleThrownError', function(err, collection) {
56 | debug(someUndefinedVariable);
57 | });
58 | } catch (err) {
59 | test.ok(err != null);
60 | test.done();
61 | }
62 | });
63 | }
64 |
65 | exports.shouldCorrectlyHandleThrownErrorInRename = function(test) {
66 | // Catch unhandled exception
67 | process.on("uncaughtException", function(err) {
68 | // Remove listener
69 | process.removeAllListeners("uncaughtException");
70 | test.done()
71 | })
72 |
73 | // Execute code
74 | client.createCollection('shouldCorrectlyHandleThrownErrorInRename', function(err, r) {
75 | client.collection('shouldCorrectlyHandleThrownError', function(err, collection) {
76 | collection.rename("shouldCorrectlyHandleThrownErrorInRename2", function(err, result) {
77 | debug(someUndefinedVariable);
78 | })
79 | });
80 | });
81 | }
82 |
83 | /**
84 | * Retrieve the server information for the current
85 | * instance of the db client
86 | *
87 | * @ignore
88 | */
89 | exports.noGlobalsLeaked = function(test) {
90 | var leaks = gleak.detectNew();
91 | test.equal(0, leaks.length, "global var leak detected: " + leaks.join(', '));
92 | test.done();
93 | }
94 |
95 | /**
96 | * Retrieve the server information for the current
97 | * instance of the db client
98 | *
99 | * @ignore
100 | */
101 | var numberOfTestsRun = Object.keys(this).length - 2;
--------------------------------------------------------------------------------
/test/geo_search_test.js:
--------------------------------------------------------------------------------
1 | var mongodb = process.env['TEST_NATIVE'] != null ? require('../lib/mongodb').native() : require('../lib/mongodb').pure();
2 | var useSSL = process.env['USE_SSL'] != null ? true : false;
3 |
4 | var testCase = require('nodeunit').testCase,
5 | debug = require('util').debug,
6 | inspect = require('util').inspect,
7 | nodeunit = require('nodeunit'),
8 | gleak = require('../dev/tools/gleak'),
9 | Db = mongodb.Db,
10 | Cursor = mongodb.Cursor,
11 | Collection = mongodb.Collection,
12 | ObjectID = mongodb.ObjectID,
13 | Long = mongodb.Long,
14 | Server = mongodb.Server;
15 |
16 | var MONGODB = 'integration_tests';
17 | var native_parser = (process.env['TEST_NATIVE'] != null);
18 | var client = null;
19 |
20 | /**
21 | * Retrieve the server information for the current
22 | * instance of the db client
23 | *
24 | * @ignore
25 | */
26 | exports.setUp = function(callback) {
27 | var self = exports;
28 | client = new Db(MONGODB, new Server("127.0.0.1", 27017, {auto_reconnect: true, poolSize: 4, ssl:useSSL}), {native_parser: (process.env['TEST_NATIVE'] != null)});
29 | client.open(function(err, db_p) {
30 | if(numberOfTestsRun == (Object.keys(self).length)) {
31 | // If first test drop the db
32 | client.dropDatabase(function(err, done) {
33 | callback();
34 | });
35 | } else {
36 | return callback();
37 | }
38 | });
39 | }
40 |
41 | /**
42 | * Retrieve the server information for the current
43 | * instance of the db client
44 | *
45 | * @ignore
46 | */
47 | exports.tearDown = function(callback) {
48 | var self = this;
49 | numberOfTestsRun = numberOfTestsRun - 1;
50 | // Close connection
51 | client.close();
52 | callback();
53 | }
54 |
55 | /**
56 | * Example of a simple geoNear query across some documents
57 | *
58 | * @_class collection
59 | * @_function geoNear
60 | * @ignore
61 | */
62 | exports.shouldCorrectlyPerformSimpleGeoNearCommand = function(test) {
63 | var db = new Db('integration_tests', new Server("127.0.0.1", 27017,
64 | {auto_reconnect: false, poolSize: 4, ssl:useSSL}), {native_parser: native_parser});
65 |
66 | // Establish connection to db
67 | db.open(function(err, db) {
68 |
69 | // Fetch the collection
70 | db.collection("simple_geo_near_command", function(err, collection) {
71 |
72 | // Add a location based index
73 | collection.ensureIndex({loc:"2d"}, function(err, result) {
74 |
75 | // Save a new location tagged document
76 | collection.insert([{a:1, loc:[50, 30]}, {a:1, loc:[30, 50]}], {safe:true}, function(err, result) {
77 |
78 | // Use geoNear command to find document
79 | collection.geoNear(50, 50, {query:{a:1}, num:1}, function(err, docs) {
80 | test.equal(1, docs.results.length);
81 |
82 | db.close();
83 | test.done();
84 | });
85 | });
86 | });
87 | });
88 | });
89 | }
90 |
91 | /**
92 | * Example of a simple geoHaystackSearch query across some documents
93 | *
94 | * @_class collection
95 | * @_function geoHaystackSearch
96 | * @ignore
97 | */
98 | exports.shouldCorrectlyPerformSimpleGeoHaystackSearchCommand = function(test) {
99 | var db = new Db('integration_tests', new Server("127.0.0.1", 27017,
100 | {auto_reconnect: false, poolSize: 4, ssl:useSSL}), {native_parser: native_parser});
101 |
102 | // Establish connection to db
103 | db.open(function(err, db) {
104 |
105 | // Fetch the collection
106 | db.collection("simple_geo_haystack_command", function(err, collection) {
107 |
108 | // Add a location based index
109 | collection.ensureIndex({loc: "geoHaystack", type: 1}, {bucketSize: 1}, function(err, result) {
110 |
111 | // Save a new location tagged document
112 | collection.insert([{a:1, loc:[50, 30]}, {a:1, loc:[30, 50]}], {safe:true}, function(err, result) {
113 |
114 | // Use geoNear command to find document
115 | collection.geoHaystackSearch(50, 50, {search:{a:1}, limit:1, maxDistance:100}, function(err, docs) {
116 | test.equal(1, docs.results.length);
117 |
118 | db.close();
119 | test.done();
120 | });
121 | });
122 | });
123 | });
124 | });
125 | }
126 |
127 | /**
128 | * Retrieve the server information for the current
129 | * instance of the db client
130 | *
131 | * @ignore
132 | */
133 | exports.noGlobalsLeaked = function(test) {
134 | var leaks = gleak.detectNew();
135 | test.equal(0, leaks.length, "global var leak detected: " + leaks.join(', '));
136 | test.done();
137 | }
138 |
139 | /**
140 | * Retrieve the server information for the current
141 | * instance of the db client
142 | *
143 | * @ignore
144 | */
145 | var numberOfTestsRun = Object.keys(this).length - 2;
--------------------------------------------------------------------------------
/test/gridstore/iya_logo_final_bw.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/christkv/node-mongodb-native/2cac80902898dbcfa58de45c9b017fdee92f7f19/test/gridstore/iya_logo_final_bw.jpg
--------------------------------------------------------------------------------
/test/gridstore/test_gs_weird_bug.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/christkv/node-mongodb-native/2cac80902898dbcfa58de45c9b017fdee92f7f19/test/gridstore/test_gs_weird_bug.png
--------------------------------------------------------------------------------
/test/gridstore/test_gs_working_field_read.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/christkv/node-mongodb-native/2cac80902898dbcfa58de45c9b017fdee92f7f19/test/gridstore/test_gs_working_field_read.pdf
--------------------------------------------------------------------------------
/test/logging_test.js:
--------------------------------------------------------------------------------
1 | var mongodb = process.env['TEST_NATIVE'] != null ? require('../lib/mongodb').native() : require('../lib/mongodb').pure();
2 | var useSSL = process.env['USE_SSL'] != null ? true : false;
3 |
4 | var testCase = require('nodeunit').testCase,
5 | debug = require('util').debug,
6 | inspect = require('util').inspect,
7 | nodeunit = require('nodeunit'),
8 | gleak = require('../dev/tools/gleak'),
9 | Db = mongodb.Db,
10 | Cursor = mongodb.Cursor,
11 | Collection = mongodb.Collection,
12 | Server = mongodb.Server;
13 |
14 | var MONGODB = 'integration_tests';
15 | var client = null;
16 |
17 | /**
18 | * Retrieve the server information for the current
19 | * instance of the db client
20 | *
21 | * @ignore
22 | */
23 | exports.setUp = function(callback) {
24 | var self = exports;
25 | client = new Db(MONGODB, new Server("127.0.0.1", 27017, {auto_reconnect: true, poolSize: 4, ssl:useSSL}), {native_parser: (process.env['TEST_NATIVE'] != null)});
26 | client.open(function(err, db_p) {
27 | if(numberOfTestsRun == (Object.keys(self).length)) {
28 | // If first test drop the db
29 | client.dropDatabase(function(err, done) {
30 | callback();
31 | });
32 | } else {
33 | return callback();
34 | }
35 | });
36 | }
37 |
38 | /**
39 | * Retrieve the server information for the current
40 | * instance of the db client
41 | *
42 | * @ignore
43 | */
44 | exports.tearDown = function(callback) {
45 | var self = this;
46 | numberOfTestsRun = numberOfTestsRun - 1;
47 | // Close connection
48 | client.close();
49 | callback();
50 | }
51 |
52 | exports.shouldCorrectlyLogContent = function(test) {
53 | var loggedOutput = false;
54 | var logger = {
55 | doDebug:true,
56 | doError:true,
57 | doLog:true,
58 |
59 | error:function(message, object) {},
60 | log:function(message, object) {},
61 |
62 | debug:function(message, object) {
63 | loggedOutput = true;
64 | }
65 | }
66 |
67 | var automatic_connect_client = new Db(MONGODB, new Server("127.0.0.1", 27017, {auto_reconnect: true, ssl:useSSL}), {native_parser: (process.env['TEST_NATIVE'] != null), retryMiliSeconds:50, logger:logger});
68 | automatic_connect_client.open(function(err, automatic_connect_client) {
69 | automatic_connect_client.close();
70 | test.equal(true, loggedOutput);
71 | test.done();
72 | });
73 | }
74 |
75 | /**
76 | * Retrieve the server information for the current
77 | * instance of the db client
78 | *
79 | * @ignore
80 | */
81 | exports.noGlobalsLeaked = function(test) {
82 | var leaks = gleak.detectNew();
83 | test.equal(0, leaks.length, "global var leak detected: " + leaks.join(', '));
84 | test.done();
85 | }
86 |
87 | /**
88 | * Retrieve the server information for the current
89 | * instance of the db client
90 | *
91 | * @ignore
92 | */
93 | var numberOfTestsRun = Object.keys(this).length - 2;
--------------------------------------------------------------------------------
/test/manual_tests/grids_fs_compatibility_test.js:
--------------------------------------------------------------------------------
1 | var Server = require("../../lib/mongodb").Server,
2 | Db = require("../../lib/mongodb").Db,
3 | ObjectID = require("../../lib/mongodb").ObjectID,
4 | GridStore = require("../../lib/mongodb").GridStore;
5 |
6 | var options = {
7 | auto_reconnect: true,
8 | poolSize: 1,
9 | socketOptions: { timeout:8000 }
10 | };
11 |
12 | var db = new Db("data", new Server( 'localhost', 27017, options));
13 | db.open(function(err, client){
14 | var id = new ObjectID();
15 | // Write a file into gridfs and then verify that it's readable
16 | var gridStore = new GridStore(client, 'manual_test.jpg', "w");
17 | gridStore.writeFile('/Users/christiankvalheim/coding/projects/node-mongodb-native/test/gridstore/iya_logo_final_bw.jpg', function(err, result) {
18 | db.close();
19 | })
20 | });
--------------------------------------------------------------------------------
/test/manual_tests/issue_replicaset_test.js:
--------------------------------------------------------------------------------
1 | var mongodb = require("../../lib/mongodb"),
2 | ReplicaSetManager = require('../tools/replica_set_manager').ReplicaSetManager;
3 |
4 | var options = {
5 | auto_reconnect: true,
6 | poolSize: 4,
7 | socketOptions: { keepAlive: 100, timeout:6000 }
8 | };
9 |
10 | var userObjects = [];
11 |
12 | // Build user array
13 | for(var i = 0; i < 122; i++) {
14 | userObjects.push({'user_id':i});
15 | }
16 |
17 | // Manual config
18 | // mongod --rest --replSet mlocal --oplogSize 8 --dbpath=./d1
19 | // mongod --port 27018 --rest --replSet mlocal --dbpath=./d2
20 | // mongod --port=27019 --rest --replSet mlocal --dbpath=./d3
21 | // {"_id" : "mlocal", "members" : [{"_id" : 0,"host" : "localhost:27017"},{"_id" : 1,"host" : "localhost:27018"},{"_id" : 2,"host" : "localhost:27019","arbiterOnly" : true}]}
22 |
23 | // Replica configuration
24 | var replSet = new mongodb.ReplSetServers( [
25 | new mongodb.Server( 'localhost', 27017, { auto_reconnect: true } ),
26 | new mongodb.Server( 'localhost', 27018, { auto_reconnect: true } ),
27 | new mongodb.Server( 'localhost', 27019, { auto_reconnect: true } )
28 | ],
29 | {rs_name:'mlocal'}
30 | );
31 |
32 | var queryCount = 0;
33 | var users;
34 | var db = new mongodb.Db("data", replSet);
35 | db.on("error", function(err) {
36 | console.log("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
37 | console.dir(err)
38 | })
39 |
40 | db.open(function(err, client){
41 | // Just close the connection
42 | db.close();
43 | });
44 |
--------------------------------------------------------------------------------
/test/manual_tests/large_file_gridstore_stream.js:
--------------------------------------------------------------------------------
1 | var http = require('http'),
2 | mongodb = require('../../lib/mongodb'),
3 | mongoClient = new mongodb.Db('music', new mongodb.Server('localhost', mongodb.Connection.DEFAULT_PORT, {}), {});
4 |
5 | http.createServer(function(request, response) {
6 | var band = 'testfile';
7 |
8 | mongoClient.open(function(err, db) {
9 | var gs = new mongodb.GridStore(db, band+'.mp3', "r");
10 | gs.open(function(err, gs) {
11 | console.log("streaming...");
12 | response.writeHeader(200, {
13 | 'Content-type': 'audio/mpeg, audio/x-mpeg, audio/x-mpeg-3, audio/mpeg3',
14 | 'content-disposition': 'attachment; filename=' + band + '.mp3',
15 | 'X-Pad': 'avoid browser bug',
16 | 'Cache-Control': 'no-cache',
17 | 'Content-Length': gs.length
18 | });
19 |
20 | gs.stream(true).pipe(response);
21 | });
22 | });
23 | }).listen(8080);
--------------------------------------------------------------------------------
/test/manual_tests/manual_lock.js:
--------------------------------------------------------------------------------
1 | var mongodb = require("../../lib/mongodb"),
2 | ReplicaSetManager = require('../../test/tools/replica_set_manager').ReplicaSetManager;
3 |
4 | var options = {
5 | auto_reconnect: true,
6 | poolSize: 4,
7 | socketOptions: { keepAlive: 100, timeout:30000 }
8 | };
9 |
10 | var userObjects = [];
11 | var counter = 0;
12 | var counter2 = 0;
13 |
14 | // Build user array
15 | for(var i = 0; i < 122; i++) {
16 | userObjects.push({a:true, b:true});
17 | }
18 |
19 | RS = new ReplicaSetManager({retries:120, secondary_count:1, passive_count:0, arbiter_count:1});
20 | RS.startSet(true, function(err, result) {
21 | // Replica configuration
22 | var replSet = new mongodb.ReplSetServers( [
23 | new mongodb.Server( RS.host, RS.ports[1], { auto_reconnect: true } ),
24 | new mongodb.Server( RS.host, RS.ports[0], { auto_reconnect: true } ),
25 | new mongodb.Server( RS.host, RS.ports[2], { auto_reconnect: true } )
26 | ],
27 | {rs_name:RS.name}
28 | );
29 |
30 | var collA;
31 | var collB;
32 |
33 | var db = new mongodb.Db("data", replSet);
34 | db.open(function(err, client){
35 | console.log("Connected");
36 | client.collection("collA", function(err, coll){
37 | collA = coll;
38 |
39 | coll.insert(userObjects, {safe:true}, function(err, result) {
40 |
41 | client.collection("collB", function(err, coll){
42 | collB = coll;
43 |
44 | coll.insert(userObjects, {safe:true}, function(err, result) {
45 |
46 | var timeoutFunc = function() {
47 | lookup(function(err, result) {
48 | console.log("-------------------------------------------- lookedup")
49 | process.nextTick(timeoutFunc, 1);
50 | })
51 | }
52 |
53 | process.nextTick(timeoutFunc, 1);
54 | });
55 | });
56 | });
57 | });
58 | });
59 |
60 | function lookup(cb){
61 | var a, b;
62 | var waiting = 2;
63 |
64 | collA.findOne({ a: true }, function(err, result){
65 | a = result;
66 | waiting--;
67 | if(waiting === 0){
68 | console.log("---------------------------------------------------------------------- collA :: " + counter);
69 | counter = counter + 1;
70 | cb(null, [a, b]);
71 | }
72 | });
73 |
74 | collB.findOne({ b: true }, function(err, result){
75 | b = result;
76 | waiting--;
77 | if(waiting === 0){
78 | console.log("---------------------------------------------------------------------- collB :: " + counter);
79 | counter = counter + 1;
80 | cb(null, [a, b]);
81 | }
82 | });
83 | }
84 | });
--------------------------------------------------------------------------------
/test/manual_tests/motherload_pullback_test.js:
--------------------------------------------------------------------------------
1 | var mongodb = require("../../lib/mongodb"),
2 | ReplicaSetManager = require('../tools/replica_set_manager').ReplicaSetManager;
3 |
4 | var RS = new ReplicaSetManager({retries:120, secondary_count:2, passive_count:1, arbiter_count:1});
5 | RS.startSet(true, function(err, result) {
6 | // Replica configuration
7 | var replSet = new mongodb.ReplSetServers( [
8 | new mongodb.Server( RS.host, RS.ports[1], { auto_reconnect: true } ),
9 | new mongodb.Server( RS.host, RS.ports[0], { auto_reconnect: true } ),
10 | new mongodb.Server( RS.host, RS.ports[2], { auto_reconnect: true } )
11 | ],
12 | {rs_name:RS.name}
13 | );
14 |
15 | new mongodb.Db("data", replSet).open(function(err, db) {
16 | console.log("------------------------------------------------")
17 | console.dir(err)
18 |
19 | db.dropDatabase(function(err, result) {
20 | // Get collection
21 | var collection = db.collection('documents');
22 | var totalDocumentsToInsert = 100000;
23 | // total count
24 | var inserted = 0;
25 |
26 | // Insert 100 000 documents to verify correct pullback
27 | for(var i = 0; i < totalDocumentsToInsert; i++) {
28 | collection.insert({games:1, country:1, username:1}, {safe:{w:2, wtimout:10000}}, function(err, result) {
29 | inserted = inserted + 1;
30 |
31 | if(inserted == totalDocumentsToInsert) {
32 |
33 | // Fetch all the documents as an array and count them
34 | collection.find().toArray(function(err, items) {
35 | console.log("----------------------------------------------------")
36 | console.log(items.length)
37 |
38 | db.close();
39 | });
40 | }
41 | });
42 | }
43 | })
44 | });
45 | });
46 |
47 |
--------------------------------------------------------------------------------
/test/manual_tests/replicaset_manual_kill_test.js:
--------------------------------------------------------------------------------
1 | var http = require('http'),
2 | os = require('os'),
3 | mongodb = require('../../lib/mongodb'),
4 | Server = mongodb.Server,
5 | ReplSetServers = mongodb.ReplSetServers,
6 | Db = mongodb.Db;
7 |
8 | console.log('launching simple mongo application...');
9 |
10 | //open replicaset
11 | var replSet = new ReplSetServers([
12 | new Server('127.0.0.1', 30000, { auto_reconnect: true }),
13 | new Server('127.0.0.1', 30001, { auto_reconnect: true }),
14 | new Server('127.0.0.1', 30002, { auto_reconnect: true })
15 | ],
16 | {
17 | rs_name: 'testappset',
18 | read_secondary: true,
19 | ha:true
20 | }
21 | );
22 |
23 | //opens the database
24 | var db = new Db('testapp', replSet);
25 | db.open(function(err) {
26 | if (err) return console.log('database open error %o', err);
27 | console.log('database opened');
28 |
29 | db.collection('stats', function(statsErr, stats) {
30 | if (statsErr) return console.log('error opening stats %o', err);
31 | stats.remove({}, {safe:true}, function(err, result) {
32 | console.log("================================================================")
33 | console.dir(err)
34 |
35 | stats.insert({name:'reqcount', value:0}, {safe:true}, function(err, result) {
36 | console.log("================================================================")
37 | console.dir(err)
38 | //create server
39 | http.createServer(function (req, res) {
40 | if (req.url !== '/') {
41 | res.end();
42 | return console.log('invalid request performed');
43 | }
44 |
45 | //get amount of requests done
46 | stats.findOne({name: 'reqcount'}, function(err, reqstat) {
47 | if(err) {
48 | res.writeHead(200, {'Content-Type': 'text/plain'});
49 | res.end('Hello World, from server node: ' + os.hostname() + '...\nError #' + err + ', reqstat ' + reqstat);
50 | return console.log('reqstat is null!');
51 | }
52 | var reqcount = reqstat.value;
53 |
54 | //write to client
55 | res.writeHead(200, {'Content-Type': 'text/plain'});
56 | res.end('Hello World, from server node: ' + os.hostname() + '...\nThis is visit #' + reqcount);
57 | });
58 |
59 | //increment amount of requests
60 | console.log('incrementing request by 1!');
61 | stats.update({name: 'reqcount'}, {'$inc': {value: 1}}, {upsert: true});
62 |
63 | }).listen(8000);
64 | });
65 | });
66 |
67 | console.log('Server running at port 8000');
68 | });
69 | });
--------------------------------------------------------------------------------
/test/manual_tests/replicaset_test.js:
--------------------------------------------------------------------------------
1 | var mongodb = require("../../lib/mongodb"),
2 | ReplicaSetManager = require('../tools/replica_set_manager').ReplicaSetManager;
3 |
4 | var options = {
5 | auto_reconnect: true,
6 | poolSize: 4,
7 | socketOptions: { keepAlive: 100, timeout:6000 }
8 | };
9 |
10 | var userObjects = [];
11 |
12 | // Build user array
13 | for(var i = 0; i < 122; i++) {
14 | userObjects.push({'user_id':i});
15 | }
16 |
17 | RS = new ReplicaSetManager({retries:120, secondary_count:2, passive_count:1, arbiter_count:1});
18 | RS.startSet(true, function(err, result) {
19 | // Replica configuration
20 | var replSet = new mongodb.ReplSetServers( [
21 | new mongodb.Server( RS.host, RS.ports[1], { auto_reconnect: true } ),
22 | new mongodb.Server( RS.host, RS.ports[0], { auto_reconnect: true } ),
23 | new mongodb.Server( RS.host, RS.ports[2], { auto_reconnect: true } )
24 | ],
25 | {rs_name:RS.name}
26 | );
27 |
28 | var queryCount = 0;
29 | var users;
30 | var db = new mongodb.Db("data", replSet);
31 | db.on("error", function(err) {
32 | console.log("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
33 | console.dir(err)
34 | })
35 |
36 | db.open(function(err, client){
37 | if(err){
38 | console.log("[%s] %s", new Date, err.stack || err);
39 | return;
40 | }
41 |
42 | if(users){
43 | console.log("[%s] Reconnected?!", new Date);
44 | return;
45 | }
46 |
47 | client.collection("users", function(err, coll){
48 | coll.insert(userObjects, {safe:true}, function(err, result) {
49 | users = coll;
50 | query();
51 | })
52 | });
53 | });
54 |
55 | function query(){
56 | var current = queryCount++;
57 | console.log("[%s] #%s querying all users", new Date, current);
58 | // setTimeout(query, 32 * 1000);
59 | setTimeout(query, 7 * 1000);
60 | users.find().count(function(err, all){
61 | if(err){
62 | console.log("[%s] #%s %s", new Date, current, err.stack || err);
63 | }else{
64 | console.log("[%s] #%s found %s users", new Date, current, all);
65 | }
66 | });
67 | }
68 | });
69 |
70 |
71 |
--------------------------------------------------------------------------------
/test/manual_tests/server_load.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 | var mongo = require("../../lib/mongodb");
3 | var express = require("express");
4 | var ObjectID = mongo.ObjectID;
5 | var DBRef = mongo.DBRef;
6 | var util = require("util");
7 |
8 | var app = express.createServer();
9 |
10 | app.configure(function() {
11 | app.set('dbconnection', {
12 | "port": 27017,
13 | "host": "localhost"
14 | });
15 | });
16 |
17 | app.renderResponse = function(res, err, data, allCount) {
18 | res.header('Content-Type', 'application/json');
19 |
20 | if(err == null) {
21 | if(typeof allCount == "undefined") {
22 | res.send({data: data, success: true});
23 | } else {
24 | res.send({allCount: allCount, data: data, success: true});
25 | }
26 | } else {
27 | util.log(util.inspect(err));
28 | console.log(err.stack);
29 | res.send({success: false, error:err.message});
30 | }
31 | };
32 |
33 | app.use(express.bodyParser());
34 | app.use(app.router);
35 | app.use(express.logger());
36 | app.use(express.errorHandler({ dumpExceptions: true, showStack: true }));
37 |
38 | var isISO8601 = function(dString) {
39 | var regexp = /(\d\d\d\d)(-)?(\d\d)(-)?(\d\d)(T)?(\d\d)(:)?(\d\d)(:)?(\d\d)(\.\d+)?(Z|([+-])(\d\d)(:)?(\d\d))?/;
40 | if (dString.toString().match(new RegExp(regexp))) {
41 | return true;
42 | }
43 | else
44 | {
45 | return false;
46 | }
47 | };
48 |
49 |
50 | var decodeField = function(value) {
51 | if(value == null)
52 | return null;
53 |
54 | if(typeof value == "object" && value['namespace'] && value['oid']) {
55 | if(/^[0-9a-fA-F]{24}$/.test(value['oid']))
56 | return new DBRef(value['namespace'], new ObjectID(value['oid']));
57 | else
58 | return new DBRef(value['namespace'], value['oid']);
59 | }
60 |
61 | if(isISO8601(value))
62 | return new Date(value);
63 |
64 | return value;
65 | };
66 |
67 | var deepDecode = function(obj) {
68 | for(var i in obj)
69 | {
70 | if(obj[i] == null) {
71 | // do nothing
72 | }
73 | else if(i == "_id" && /^[0-9a-fA-F]{24}$/.test(obj[i])) {
74 | obj[i] = new ObjectID(obj[i]);
75 | }
76 | else if(typeof obj[i] == "object" && typeof obj[i]['namespace'] == "undefined" && typeof obj[i]['oid'] == "undefined") {
77 | deepDecode(obj[i]);
78 | }
79 | else {
80 | obj[i] = decodeField(obj[i]);
81 | }
82 | }
83 | };
84 |
85 | db = null;
86 | var openConnection = function(dbname, config, callback) {
87 | if(db) {
88 | callback(null, db);
89 | }
90 | else {
91 | var target;
92 | target = new mongo.Server(config.host, config.port, {'auto_reconnect':true, 'poolSize':4});
93 | db = new mongo.Db(dbname, target, {native_parser:false});
94 | db.open(callback);
95 | }
96 | }
97 |
98 | var listCommand = function (target, spec, options, next){
99 | deepDecode(spec);
100 | openConnection(target.db, target.connection, function(err,db) {
101 | if(err) { next(err); return; }
102 | // open collection
103 | db.collection(target.collection, function(err, collection) {
104 |
105 | if(spec._id) {
106 | collection.findOne(spec, options, function(err, doc){
107 | next(err, doc);
108 | });
109 | }
110 | else
111 | {
112 | // console.dir(options)
113 | options['limit'] = 10;
114 |
115 | collection.find(spec, options, function(err, cursor)
116 | {
117 |
118 | cursor.toArray(function(err, docs)
119 | {
120 | next(err, docs);
121 | //db.close();
122 | });
123 | });
124 | }
125 | });
126 | });
127 | }
128 |
129 | app.get('/:db/:collection/:id?', function(req, res, next)
130 | {
131 | var spec = req.query.query? JSON.parse(req.query.query) : {};
132 | spec = req.query.spec? JSON.parse(req.query.spec) : spec;
133 |
134 | if(req.params.id)
135 | spec._id = req.params.id;
136 |
137 | // JSON decode options
138 | var options = req.query.options?JSON.parse(req.query.options) : {};
139 |
140 | listCommand({
141 | connection: app.set("dbconnection"),
142 | db: req.params.db,
143 | collection: req.params.collection
144 | },
145 | spec,
146 | options,
147 | function(err, docs, allCount) {
148 | app.renderResponse(res, err, docs, allCount);
149 | });
150 | });
151 |
152 | app.listen(9999, '127.0.0.1');
--------------------------------------------------------------------------------
/test/manual_tests/simple_test.js:
--------------------------------------------------------------------------------
1 | var Db = require('../../lib/mongodb').Db,
2 | Server = require('../../lib/mongodb').Server;
3 |
4 | var _db = new Db('mydb', new Server('localhost', 27017, {auto_reconnect: true, poolSize: 2}));
5 | _db.open(function(err, db) {
6 |
7 | db.collection('coll1', function(err, coll) {
8 | var expireDate = new Date();
9 | expireDate.setHours(expireDate.getHours() + 24);
10 | coll.remove({valid_to: {$lt: expireDate}}, {safe: true}, function(err) {
11 | console.log('Deleted the items');
12 | });
13 | });
14 |
15 | db.collection('coll2', function(err, coll) {
16 | coll.find({}, {}, function(err, cursor) {
17 | console.log('Turning the cursor into an array');
18 | cursor.toArray(function(err, docs) {
19 | console.log('Got the array');
20 | });
21 | });
22 | });
23 | });
--------------------------------------------------------------------------------
/test/manual_tests/single_test.js:
--------------------------------------------------------------------------------
1 | var mongodb = require("../../lib/mongodb"),
2 | ReplicaSetManager = require('../tools/replica_set_manager').ReplicaSetManager;
3 |
4 | var options = {
5 | auto_reconnect: true,
6 | poolSize: 1,
7 | // socketOptions: { keepAlive: 100, timeout:8000 }
8 | socketOptions: { timeout:8000 }
9 | };
10 |
11 | var userObjects = [];
12 |
13 | // Build user array
14 | for(var i = 0; i < 122; i++) {
15 | userObjects.push({'user_id':i});
16 | }
17 |
18 | var queryCount = 0;
19 | var replSet = new mongodb.Server( 'localhost', 27017, options);
20 |
21 | var users;
22 | var db = new mongodb.Db("data", replSet);
23 | db.on("error", function(err) {
24 | console.log("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
25 | console.dir(err)
26 | })
27 |
28 | db.open(function(err, client){
29 | if(err){
30 | console.log("[%s] %s", new Date, err.stack || err);
31 | return;
32 | }
33 |
34 | if(users){
35 | console.log("[%s] Reconnected?!", new Date);
36 | return;
37 | }
38 |
39 | client.collection("users", function(err, coll){
40 | coll.remove({}, {safe:true}, function(err) {
41 | coll.insert(userObjects, {safe:true}, function(err, result) {
42 | users = coll;
43 | query();
44 | })
45 | });
46 | });
47 | });
48 |
49 | function query(){
50 | var current = queryCount++;
51 | console.log("[%s] #%s querying all users", new Date, current);
52 | // setTimeout(query, 32 * 1000);
53 | setTimeout(query, 7 * 1000);
54 | users.find().count(function(err, all){
55 | if(err){
56 | console.log("[%s] #%s %s", new Date, current, err.stack || err);
57 | }else{
58 | console.log("[%s] #%s found %s users", new Date, current, all);
59 | }
60 | });
61 | }
--------------------------------------------------------------------------------
/test/manual_tests/test.js:
--------------------------------------------------------------------------------
1 | var mongodb = require("./lib/mongodb"),
2 | ReplicaSetManager = require('./test/tools/replica_set_manager').ReplicaSetManager;
3 |
4 | var options = {
5 | auto_reconnect: true,
6 | poolSize: 4,
7 | socketOptions: { keepAlive: 100, timeout:30000 }
8 | };
9 |
10 | var userObjects = [];
11 |
12 | // Build user array
13 | for(var i = 0; i < 122; i++) {
14 | userObjects.push({'user_id':i});
15 | }
16 |
17 | // var replSet = new mongodb.ReplSetServers([
18 | // new mongodb.Server("mongo-1", 27017, options),
19 | // new mongodb.Server("mongo-2", 27017, options),
20 | // new mongodb.Server("mongo-3", 27017, options)
21 | // ]);
22 |
23 | RS = new ReplicaSetManager({retries:120, secondary_count:2, passive_count:1, arbiter_count:1});
24 | RS.startSet(true, function(err, result) {
25 | // Replica configuration
26 | var replSet = new mongodb.ReplSetServers( [
27 | new mongodb.Server( RS.host, RS.ports[1], { auto_reconnect: true } ),
28 | new mongodb.Server( RS.host, RS.ports[0], { auto_reconnect: true } ),
29 | new mongodb.Server( RS.host, RS.ports[2], { auto_reconnect: true } )
30 | ],
31 | {rs_name:RS.name}
32 | // {rs_name:RS.name, strategy:'ping'}
33 | // {rs_name:RS.name, strategy:'statistical'}
34 | );
35 |
36 | var users;
37 | var db = new mongodb.Db("data", replSet);
38 | db.open(function(err, client){
39 | if(err){
40 | console.log("[%s] %s", new Date, err.stack || err);
41 | return;
42 | }
43 |
44 | if(users){
45 | console.log("[%s] Reconnected?!", new Date);
46 | return;
47 | }
48 |
49 | client.collection("users", function(err, coll){
50 | console.log("Connected");
51 | coll.insert(userObjects, {safe:true}, function(err, result) {
52 | users = coll;
53 | query();
54 | })
55 | });
56 | // client.collection("users", function(err, users){
57 | // console.log("Connected");
58 | // db.users = users;
59 | // query();
60 | // });
61 | });
62 |
63 | function query(){
64 | console.log("[%s] querying all users", new Date);
65 | // setTimeout(query, 70 * 60 * 1000);
66 | setTimeout(query, 32 * 1000);
67 | db.collection('users', function(err, coll) {
68 | if(err){
69 | console.log("[0] :: [%s] %s", new Date, err.stack || err);
70 | }
71 |
72 | coll.find().count(function(err, all){
73 | if(err){
74 | console.log("[1] :: [%s] %s", new Date, err.stack || err);
75 | }else{
76 | console.log("[2] :: [%s] found %s users", new Date, all);
77 | }
78 | });
79 | })
80 | }
81 | });
82 |
83 |
84 |
--------------------------------------------------------------------------------
/test/reaper_test.js:
--------------------------------------------------------------------------------
1 | var mongodb = process.env['TEST_NATIVE'] != null ? require('../lib/mongodb').native() : require('../lib/mongodb').pure();
2 | var useSSL = process.env['USE_SSL'] != null ? true : false;
3 |
4 | var testCase = require('nodeunit').testCase,
5 | debug = require('util').debug,
6 | inspect = require('util').inspect,
7 | nodeunit = require('nodeunit'),
8 | gleak = require('../dev/tools/gleak'),
9 | Db = mongodb.Db,
10 | Cursor = mongodb.Cursor,
11 | Collection = mongodb.Collection,
12 | Server = mongodb.Server;
13 |
14 | var MONGODB = 'integration_tests';
15 | var client = null;
16 |
17 | /**
18 | * Retrieve the server information for the current
19 | * instance of the db client
20 | *
21 | * @ignore
22 | */
23 | exports.setUp = function(callback) {
24 | var self = exports;
25 | client = new Db(MONGODB, new Server("127.0.0.1", 27017, {auto_reconnect: true, poolSize: 4, ssl:useSSL}), {native_parser: (process.env['TEST_NATIVE'] != null)});
26 | client.open(function(err, db_p) {
27 | if(numberOfTestsRun == (Object.keys(self).length)) {
28 | // If first test drop the db
29 | client.dropDatabase(function(err, done) {
30 | callback();
31 | });
32 | } else {
33 | return callback();
34 | }
35 | });
36 | }
37 |
38 | /**
39 | * Retrieve the server information for the current
40 | * instance of the db client
41 | *
42 | * @ignore
43 | */
44 | exports.tearDown = function(callback) {
45 | var self = this;
46 | numberOfTestsRun = numberOfTestsRun - 1;
47 | // Close connection
48 | client.close();
49 | callback();
50 | }
51 |
52 | exports.shouldCorrectlySaveUnicodeContainingDocument = function(test) {
53 | var reaperClient = new Db(MONGODB, new Server("127.0.0.1", 27017, {auto_reconnect: false, ssl:useSSL}), {reaper:true, native_parser: (process.env['TEST_NATIVE'] != null)});
54 | reaperClient.open(function(err, reaperClient) {
55 | reaperClient._lastReaperTimestamp = (new Date().getTime() - 1000000);
56 | var con = reaperClient.serverConfig.checkoutReader();
57 | // Prime the reaper with a bogus call
58 | reaperClient._callBackStore._notReplied["3"] = {start: (new Date().getTime() - 50000), 'raw': false, chained:null, connection:con};
59 | reaperClient._callBackStore.once("3", function(err, result) {
60 | reaperClient.close();
61 | test.done();
62 | })
63 |
64 | reaperClient.collection("test", {safe:true}, function(err, col) {
65 | // Does not matter
66 | });
67 | })
68 | }
69 |
70 | /**
71 | * Retrieve the server information for the current
72 | * instance of the db client
73 | *
74 | * @ignore
75 | */
76 | exports.noGlobalsLeaked = function(test) {
77 | var leaks = gleak.detectNew();
78 | test.equal(0, leaks.length, "global var leak detected: " + leaks.join(', '));
79 | test.done();
80 | }
81 |
82 | /**
83 | * Retrieve the server information for the current
84 | * instance of the db client
85 | *
86 | * @ignore
87 | */
88 | var numberOfTestsRun = Object.keys(this).length - 2;
--------------------------------------------------------------------------------
/test/regexp_test.js:
--------------------------------------------------------------------------------
1 | var mongodb = process.env['TEST_NATIVE'] != null ? require('../lib/mongodb').native() : require('../lib/mongodb').pure();
2 | var useSSL = process.env['USE_SSL'] != null ? true : false;
3 |
4 | var testCase = require('nodeunit').testCase,
5 | debug = require('util').debug,
6 | inspect = require('util').inspect,
7 | nodeunit = require('nodeunit'),
8 | gleak = require('../dev/tools/gleak'),
9 | Db = mongodb.Db,
10 | Cursor = mongodb.Cursor,
11 | Collection = mongodb.Collection,
12 | Server = mongodb.Server;
13 |
14 | var MONGODB = 'integration_tests';
15 | var client = null;
16 |
17 | /**
18 | * Retrieve the server information for the current
19 | * instance of the db client
20 | *
21 | * @ignore
22 | */
23 | exports.setUp = function(callback) {
24 | var self = exports;
25 | client = new Db(MONGODB, new Server("127.0.0.1", 27017, {auto_reconnect: true, poolSize: 4, ssl:useSSL}), {native_parser: (process.env['TEST_NATIVE'] != null)});
26 | client.open(function(err, db_p) {
27 | if(numberOfTestsRun == (Object.keys(self).length)) {
28 | // If first test drop the db
29 | client.dropDatabase(function(err, done) {
30 | callback();
31 | });
32 | } else {
33 | return callback();
34 | }
35 | });
36 | }
37 |
38 | /**
39 | * Retrieve the server information for the current
40 | * instance of the db client
41 | *
42 | * @ignore
43 | */
44 | exports.tearDown = function(callback) {
45 | var self = this;
46 | numberOfTestsRun = numberOfTestsRun - 1;
47 | // Close connection
48 | client.close();
49 | callback();
50 | }
51 |
52 | exports.shouldCorrectlyInsertSimpleRegExpDocument = function(test) {
53 | var regexp = /foobar/i;
54 |
55 | client.createCollection('test_regex', function(err, collection) {
56 | collection.insert({'b':regexp}, {safe:true}, function(err, ids) {
57 | collection.find({}, {'fields': ['b']}).toArray(function(err, items) {
58 | test.equal(("" + regexp), ("" + items[0].b));
59 | // Let's close the db
60 | test.done();
61 | });
62 | });
63 | });
64 | }
65 |
66 | exports.shouldCorrectlyInsertSimpleUTF8Regexp = function(test) {
67 | var regexp = /foobaré/;
68 |
69 | client.createCollection('test_utf8_regex', function(err, collection) {
70 | collection.insert({'b':regexp}, {safe:true}, function(err, ids) {
71 | collection.find({}, {'fields': ['b']}).toArray(function(err, items) {
72 | test.equal(("" + regexp), ("" + items[0].b));
73 | // Let's close the db
74 | test.done();
75 | });
76 | });
77 | });
78 | }
79 |
80 | exports.shouldCorrectlyFindDocumentsByRegExp = function(test) {
81 | // Serialized regexes contain extra trailing chars. Sometimes these trailing chars contain / which makes
82 | // the original regex invalid, and leads to segmentation fault.
83 | client.createCollection('test_regex_serialization', function(err, collection) {
84 | collection.insert({keywords: ["test", "segmentation", "fault", "regex", "serialization", "native"]}, {safe:true}, function(err, r) {
85 |
86 | var count = 20,
87 | run = function(i) {
88 | // search by regex
89 | collection.findOne({keywords: {$all: [/ser/, /test/, /seg/, /fault/, /nat/]}}, function(err, item) {
90 | test.equal(6, item.keywords.length);
91 | if (i === 0) {
92 | test.done()
93 | }
94 | });
95 | };
96 | // loop a few times to catch the / in trailing chars case
97 | while (count--) {
98 | run(count);
99 | }
100 | });
101 | });
102 | }
103 |
104 | /**
105 | * Retrieve the server information for the current
106 | * instance of the db client
107 | *
108 | * @ignore
109 | */
110 | exports.noGlobalsLeaked = function(test) {
111 | var leaks = gleak.detectNew();
112 | test.equal(0, leaks.length, "global var leak detected: " + leaks.join(', '));
113 | test.done();
114 | }
115 |
116 | /**
117 | * Retrieve the server information for the current
118 | * instance of the db client
119 | *
120 | * @ignore
121 | */
122 | var numberOfTestsRun = Object.keys(this).length - 2;
--------------------------------------------------------------------------------
/test/replicaset/insert_and_query_on_dead_primary_test.js:
--------------------------------------------------------------------------------
1 |
2 | var mongodb = process.env['TEST_NATIVE'] != null ? require('../../lib/mongodb').native() : require('../../lib/mongodb').pure();
3 | var noReplicasetStart = process.env['NO_REPLICASET_START'] != null ? true : false;
4 |
5 | var testCase = require('nodeunit').testCase,
6 | debug = require('util').debug,
7 | inspect = require('util').inspect,
8 | gleak = require('../../dev/tools/gleak'),
9 | ReplicaSetManager = require('../tools/replica_set_manager').ReplicaSetManager,
10 | Db = mongodb.Db,
11 | ReplSetServers = mongodb.ReplSetServers,
12 | Server = mongodb.Server;
13 |
14 | // Keep instance of ReplicaSetManager
15 | var serversUp = false;
16 | var retries = 120;
17 | var RS = RS == null ? null : RS;
18 |
19 | var ensureConnection = function(test, numberOfTries, callback) {
20 | // Replica configuration
21 | var replSet = new ReplSetServers( [
22 | new Server( RS.host, RS.ports[1], { auto_reconnect: true } ),
23 | new Server( RS.host, RS.ports[0], { auto_reconnect: true } ),
24 | new Server( RS.host, RS.ports[2], { auto_reconnect: true } )
25 | ],
26 | {rs_name:RS.name}
27 | );
28 |
29 | if(numberOfTries <= 0) return callback(new Error("could not connect correctly"), null);
30 |
31 | var db = new Db('integration_test_', replSet);
32 | // Print any errors
33 | db.on("error", function(err) {
34 | console.log("============================= ensureConnection caught error")
35 | console.dir(err)
36 | if(err != null && err.stack != null) console.log(err.stack)
37 | db.close();
38 | })
39 |
40 | // Open the db
41 | db.open(function(err, p_db) {
42 | db.close();
43 |
44 | if(err != null) {
45 | // Wait for a sec and retry
46 | setTimeout(function() {
47 | numberOfTries = numberOfTries - 1;
48 | ensureConnection(test, numberOfTries, callback);
49 | }, 3000);
50 | } else {
51 | return callback(null);
52 | }
53 | })
54 | }
55 |
56 | /**
57 | * Retrieve the server information for the current
58 | * instance of the db client
59 | *
60 | * @ignore
61 | */
62 | exports.setUp = function(callback) {
63 | // Create instance of replicaset manager but only for the first call
64 | if(!serversUp && !noReplicasetStart) {
65 | serversUp = true;
66 | RS = new ReplicaSetManager({retries:120, secondary_count:2, passive_count:1, arbiter_count:1});
67 | RS.startSet(true, function(err, result) {
68 | if(err != null) throw err;
69 | // Finish setup
70 | callback();
71 | });
72 | } else {
73 | RS.restartKilledNodes(function(err, result) {
74 | if(err != null) throw err;
75 | callback();
76 | })
77 | }
78 | }
79 |
80 | /**
81 | * Retrieve the server information for the current
82 | * instance of the db client
83 | *
84 | * @ignore
85 | */
86 | exports.tearDown = function(callback) {
87 | numberOfTestsRun = numberOfTestsRun - 1;
88 | if(numberOfTestsRun == 0) {
89 | // Finished kill all instances
90 | RS.killAll(function() {
91 | callback();
92 | })
93 | } else {
94 | callback();
95 | }
96 | }
97 |
98 | exports.shouldNotTimeout = function (test) {
99 | var replSet = new ReplSetServers([
100 | new Server( RS.host, RS.ports[1], { auto_reconnect: true } ),
101 | new Server( RS.host, RS.ports[0], { auto_reconnect: true } ),
102 | new Server( RS.host, RS.ports[2], { auto_reconnect: true } )
103 | ],
104 | {}
105 | );
106 |
107 | var db = new Db('integration_test_', replSet);
108 |
109 | db.open(function(err, p_db) {
110 | test.equal(null, err);
111 |
112 | db.collection('shouldnottimeout', function (err, coll) {
113 | test.equal(null, err);
114 |
115 | RS.killPrimary(function(node) {
116 |
117 | var pending = 2;
118 |
119 | coll.update({name: 'a'}, {'$inc': {v: 1}}, {upsert: true, safe:true}, done);
120 | coll.findOne({name: 'a'}, done);
121 |
122 | function done (err, result) {
123 | debug('should not timeout:', pending, err);
124 | test.equal(null, err);
125 | if (--pending) return;
126 | test.done();
127 | p_db.close();
128 | }
129 | });
130 | });
131 |
132 | });
133 | }
134 |
135 | /**
136 | * Retrieve the server information for the current
137 | * instance of the db client
138 | * @ignore
139 | */
140 | exports.noGlobalsLeaked = function(test) {
141 | var leaks = gleak.detectNew();
142 | test.equal(0, leaks.length, "global var leak detected: " + leaks.join(', '));
143 | test.done();
144 | }
145 |
146 | /**
147 | * Retrieve the server information for the current
148 | * instance of the db client
149 | *
150 | * @ignore
151 | */
152 | var numberOfTestsRun = Object.keys(this).length - 2;
--------------------------------------------------------------------------------
/test/replicaset/two_server_tests.js:
--------------------------------------------------------------------------------
1 | var mongodb = process.env['TEST_NATIVE'] != null ? require('../../lib/mongodb').native() : require('../../lib/mongodb').pure();
2 | var noReplicasetStart = process.env['NO_REPLICASET_START'] != null ? true : false;
3 |
4 | var testCase = require('nodeunit').testCase,
5 | debug = require('util').debug,
6 | inspect = require('util').inspect,
7 | gleak = require('../../dev/tools/gleak'),
8 | ReplicaSetManager = require('../tools/replica_set_manager').ReplicaSetManager,
9 | Db = mongodb.Db,
10 | ReplSetServers = mongodb.ReplSetServers,
11 | Server = mongodb.Server,
12 | Step = require("step");
13 |
14 | // Keep instance of ReplicaSetManager
15 | var serversUp = false;
16 | var retries = 120;
17 | var RS = RS == null ? null : RS;
18 |
19 | var ensureConnection = function(test, numberOfTries, callback) {
20 | // Replica configuration
21 | var replSet = new ReplSetServers( [
22 | new Server( RS.host, RS.ports[1], { auto_reconnect: true } ),
23 | new Server( RS.host, RS.ports[0], { auto_reconnect: true } ),
24 | new Server( RS.host, RS.ports[2], { auto_reconnect: true } )
25 | ],
26 | {rs_name:RS.name}
27 | );
28 |
29 | if(numberOfTries <= 0) return callback(new Error("could not connect correctly"), null);
30 |
31 | var db = new Db('integration_test_', replSet);
32 | db.open(function(err, p_db) {
33 | db.close();
34 | if(err != null) {
35 | // Wait for a sec and retry
36 | setTimeout(function() {
37 | numberOfTries = numberOfTries - 1;
38 | ensureConnection(test, numberOfTries, callback);
39 | }, 1000);
40 | } else {
41 | return callback(null, p_db);
42 | }
43 | })
44 | }
45 |
46 | /**
47 | * Retrieve the server information for the current
48 | * instance of the db client
49 | *
50 | * @ignore
51 | */
52 | exports.setUp = function(callback) {
53 | // Create instance of replicaset manager but only for the first call
54 | if(!serversUp && !noReplicasetStart) {
55 | serversUp = true;
56 | RS = new ReplicaSetManager({retries:120,
57 | arbiter_count:0,
58 | secondary_count:1,
59 | passive_count:0});
60 | RS.startSet(true, function(err, result) {
61 | if(err != null) throw err;
62 | // Finish setup
63 | callback();
64 | });
65 | } else {
66 | RS.restartKilledNodes(function(err, result) {
67 | if(err != null) throw err;
68 | callback();
69 | })
70 | }
71 | }
72 |
73 | /**
74 | * Retrieve the server information for the current
75 | * instance of the db client
76 | *
77 | * @ignore
78 | */
79 | exports.tearDown = function(callback) {
80 | numberOfTestsRun = numberOfTestsRun - 1;
81 | if(numberOfTestsRun == 0) {
82 | // Finished kill all instances
83 | RS.killAll(function() {
84 | callback();
85 | })
86 | } else {
87 | callback();
88 | }
89 | }
90 |
91 | exports.shouldCorrectlyExecuteSafeFindAndModify = function(test) {
92 | // Replica configuration
93 | var replSet = new ReplSetServers( [
94 | new Server( RS.host, RS.ports[1], { auto_reconnect: true } ),
95 | new Server( RS.host, RS.ports[0], { auto_reconnect: true } ),
96 | // new Server( RS.host, RS.ports[2], { auto_reconnect: true } )
97 | ],
98 | {rs_name:RS.name, read_secondary:false}
99 | );
100 |
101 | // Insert some data
102 | var db = new Db('integration_test_', replSet);
103 | db.open(function(err, p_db) {
104 | // Check if we got an error
105 | if(err != null) debug("shouldWorkCorrectlyWithInserts :: " + inspect(err));
106 |
107 | // Drop collection on replicaset
108 | p_db.dropCollection('testsets', function(err, r) {
109 | if(err != null) debug("shouldWorkCorrectlyWithInserts :: " + inspect(err));
110 | // Recreate collection on replicaset
111 | p_db.createCollection('testsets', function(err, collection) {
112 | if(err != null) debug("shouldWorkCorrectlyWithInserts :: " + inspect(err));
113 | // Insert a dummy document
114 | collection.insert({a:20}, {safe: {w:1, wtimeout: 10000}}, function(err, r) {
115 | // Execute a findAndModify
116 | collection.findAndModify({'a':20}, [['a', 1]], {'$set':{'b':3}}, {'new':true, safe: {w:7, wtimeout: 10000}}, function(err, updated_doc) {
117 | test.equal('timeout', err.err)
118 | test.equal(true, err.wtimeout)
119 | p_db.close();
120 | test.done();
121 | });
122 | });
123 | });
124 | });
125 | });
126 | }
127 |
128 | /**
129 | * Retrieve the server information for the current
130 | * instance of the db client
131 | *
132 | * @ignore
133 | */
134 | exports.noGlobalsLeaked = function(test) {
135 | var leaks = gleak.detectNew();
136 | test.equal(0, leaks.length, "global var leak detected: " + leaks.join(', '));
137 | test.done();
138 | }
139 |
140 | /**
141 | * Retrieve the server information for the current
142 | * instance of the db client
143 | *
144 | * @ignore
145 | */
146 | var numberOfTestsRun = Object.keys(this).length - 2;
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
--------------------------------------------------------------------------------
/test/streaming_test.js:
--------------------------------------------------------------------------------
1 | var mongodb = process.env['TEST_NATIVE'] != null ? require('../lib/mongodb').native() : require('../lib/mongodb').pure();
2 | var useSSL = process.env['USE_SSL'] != null ? true : false;
3 |
4 | var testCase = require('nodeunit').testCase,
5 | debug = require('util').debug,
6 | inspect = require('util').inspect,
7 | nodeunit = require('nodeunit'),
8 | gleak = require('../dev/tools/gleak'),
9 | Db = mongodb.Db,
10 | Cursor = mongodb.Cursor,
11 | Collection = mongodb.Collection,
12 | Server = mongodb.Server;
13 |
14 | var MONGODB = 'integration_tests';
15 | var native_parser = (process.env['TEST_NATIVE'] != null);
16 | var client = null;
17 |
18 | /**
19 | * Retrieve the server information for the current
20 | * instance of the db client
21 | *
22 | * @ignore
23 | */
24 | exports.setUp = function(callback) {
25 | var self = exports;
26 | client = new Db(MONGODB, new Server("127.0.0.1", 27017, {auto_reconnect: true, poolSize: 4, ssl:useSSL}), {native_parser: (process.env['TEST_NATIVE'] != null)});
27 | client.open(function(err, db_p) {
28 | if(numberOfTestsRun == (Object.keys(self).length)) {
29 | // If first test drop the db
30 | client.dropDatabase(function(err, done) {
31 | callback();
32 | });
33 | } else {
34 | return callback();
35 | }
36 | });
37 | }
38 |
39 | /**
40 | * Retrieve the server information for the current
41 | * instance of the db client
42 | *
43 | * @ignore
44 | */
45 | exports.tearDown = function(callback) {
46 | var self = this;
47 | numberOfTestsRun = numberOfTestsRun - 1;
48 | // Close connection
49 | client.close();
50 | callback();
51 | }
52 |
53 | exports.shouldStreamRecordsCallsDataTheRightNumberOfTimes = function(test) {
54 | client.createCollection('test_stream_records', function(err, collection) {
55 | test.ok(collection instanceof Collection);
56 | collection.insert([{'a':1}, {'b' : 2}, {'c' : 3}, {'d' : 4}, {'e' : 5}], {safe:true}, function(err, ids) {
57 | var stream = collection.find({}, {'limit' : 3}).streamRecords();
58 | var callsToEnd = 0;
59 | stream.on('end', function() {
60 | test.done();
61 | });
62 |
63 | var callsToData = 0;
64 | stream.on('data',function(data){
65 | callsToData += 1;
66 | test.ok(callsToData <= 3);
67 | });
68 | });
69 | });
70 | }
71 |
72 | exports.shouldStreamRecordsCallsEndTheRightNumberOfTimes = function(test) {
73 | client.createCollection('test_stream_records', function(err, collection) {
74 | test.ok(collection instanceof Collection);
75 | collection.insert([{'a':1}, {'b' : 2}, {'c' : 3}, {'d' : 4}, {'e' : 5}], {safe:true}, function(err, ids) {
76 | var cursor = collection.find({}, {'limit' : 3});
77 | var stream = cursor.streamRecords(function(er,item) {});
78 | var callsToEnd = 0;
79 | stream.on('end', function() {
80 | callsToEnd += 1;
81 | test.equal(1, callsToEnd);
82 | setTimeout(function() {
83 | // Let's close the db
84 | if (callsToEnd == 1) {
85 | test.done();
86 | }
87 | }.bind(this), 1000);
88 | });
89 |
90 | stream.on('data',function(data){ /* nothing here */ });
91 | });
92 | });
93 | }
94 |
95 | exports.shouldStreamDocumentsWithLimitForFetching = function(test) {
96 | var docs = []
97 |
98 | for(var i = 0; i < 3000; i++) {
99 | docs.push({'a':i})
100 | }
101 |
102 | client.createCollection('test_streaming_function_with_limit_for_fetching', function(err, collection) {
103 | test.ok(collection instanceof Collection);
104 |
105 | collection.insert(docs, {safe:true}, function(err, ids) {
106 | var cursor = collection.find({});
107 | // Execute find on all the documents
108 | var stream = cursor.streamRecords({fetchSize:1000});
109 | var callsToEnd = 0;
110 | stream.on('end', function() {
111 | test.done();
112 | });
113 |
114 | var callsToData = 0;
115 | stream.on('data',function(data){
116 | callsToData += 1;
117 | test.ok(callsToData <= 3000);
118 | });
119 | });
120 | });
121 | }
122 |
123 | /**
124 | * Retrieve the server information for the current
125 | * instance of the db client
126 | *
127 | * @ignore
128 | */
129 | exports.noGlobalsLeaked = function(test) {
130 | var leaks = gleak.detectNew();
131 | test.equal(0, leaks.length, "global var leak detected: " + leaks.join(', '));
132 | test.done();
133 | }
134 |
135 | /**
136 | * Retrieve the server information for the current
137 | * instance of the db client
138 | *
139 | * @ignore
140 | */
141 | var numberOfTestsRun = Object.keys(this).length - 2;
--------------------------------------------------------------------------------
/test/tools/keyfile.txt:
--------------------------------------------------------------------------------
1 | THIS IS A SECRET KEYFILE FOR REPLICA SETS BWAHAHAHAH
2 |
--------------------------------------------------------------------------------