summaryrefslogtreecommitdiff
path: root/jstests
diff options
context:
space:
mode:
authorAntonin Kral <a.kral@bobek.cz>2010-03-25 19:21:32 +0100
committerAntonin Kral <a.kral@bobek.cz>2010-03-25 19:21:32 +0100
commit0ca01a91ae0a3562e54c226e7b9512feb2ea83d0 (patch)
tree2b3886e435b0217d6afd63a213b04d32bb4b4f6f /jstests
parenta696359b248adef0cc8576fce3f473535e995136 (diff)
downloadmongodb-0ca01a91ae0a3562e54c226e7b9512feb2ea83d0.tar.gz
Imported Upstream version 1.4.0
Diffstat (limited to 'jstests')
-rw-r--r--jstests/array_match1.js31
-rw-r--r--jstests/arrayfind1.js6
-rw-r--r--jstests/arrayfind2.js35
-rw-r--r--jstests/auth/auth1.js73
-rw-r--r--jstests/auth/copyauth.js29
-rw-r--r--jstests/auth1.js2
-rw-r--r--jstests/auth2.js5
-rw-r--r--jstests/capped3.js22
-rw-r--r--jstests/capped5.js32
-rw-r--r--jstests/clone/clonecollection.js89
-rw-r--r--jstests/copydb2.js17
-rw-r--r--jstests/cursor8.js23
-rw-r--r--jstests/dbadmin.js7
-rw-r--r--jstests/dbhash.js43
-rw-r--r--jstests/disk/directoryperdb.js62
-rw-r--r--jstests/disk/diskfull.js7
-rw-r--r--jstests/disk/newcollection.js13
-rw-r--r--jstests/disk/preallocate.js10
-rw-r--r--jstests/disk/repair.js18
-rw-r--r--jstests/drop.js2
-rw-r--r--jstests/dropIndex.js16
-rw-r--r--jstests/exists2.js14
-rw-r--r--jstests/explain2.js27
-rw-r--r--jstests/find6.js30
-rw-r--r--jstests/find7.js8
-rw-r--r--jstests/geo1.js41
-rw-r--r--jstests/geo2.js43
-rw-r--r--jstests/geo3.js87
-rw-r--r--jstests/geo4.js10
-rw-r--r--jstests/geo5.js18
-rw-r--r--jstests/geo6.js23
-rw-r--r--jstests/geo7.js20
-rw-r--r--jstests/geo8.js13
-rw-r--r--jstests/geo9.js28
-rw-r--r--jstests/geo_box1.js43
-rw-r--r--jstests/geo_box2.js19
-rw-r--r--jstests/geo_circle1.js50
-rw-r--r--jstests/geoa.js12
-rw-r--r--jstests/geob.js35
-rw-r--r--jstests/geoc.js24
-rw-r--r--jstests/group2.js4
-rw-r--r--jstests/group3.js2
-rw-r--r--jstests/hint1.js4
-rw-r--r--jstests/in.js1
-rw-r--r--jstests/in3.js11
-rw-r--r--jstests/inc2.js2
-rw-r--r--jstests/index10.js8
-rw-r--r--jstests/index7.js26
-rw-r--r--jstests/index8.js13
-rw-r--r--jstests/index_check2.js2
-rw-r--r--jstests/index_diag.js38
-rw-r--r--jstests/indexg.js13
-rw-r--r--jstests/insert1.js41
-rw-r--r--jstests/json1.js4
-rw-r--r--jstests/mod1.js1
-rw-r--r--jstests/mr5.js27
-rw-r--r--jstests/mr_bigobject.js41
-rw-r--r--jstests/mr_errorhandling.js47
-rw-r--r--jstests/nin.js3
-rw-r--r--jstests/not2.js139
-rw-r--r--jstests/parallel/basic.js5
-rw-r--r--jstests/parallel/basicPlus.js6
-rw-r--r--jstests/parallel/repl.js55
-rw-r--r--jstests/profile1.js2
-rw-r--r--jstests/pullall.js2
-rw-r--r--jstests/regex4.js2
-rw-r--r--jstests/regex5.js44
-rw-r--r--jstests/regex6.js15
-rw-r--r--jstests/regex7.js26
-rw-r--r--jstests/regex8.js19
-rw-r--r--jstests/regex9.js11
-rw-r--r--jstests/regex_embed1.js25
-rw-r--r--jstests/repl/basic1.js45
-rw-r--r--jstests/repl/master1.js49
-rw-r--r--jstests/repl/pair1.js1
-rw-r--r--jstests/repl/pair3.js2
-rw-r--r--jstests/repl/pair4.js1
-rw-r--r--jstests/repl/pair5.js2
-rw-r--r--jstests/repl/pair7.js85
-rw-r--r--jstests/repl/repl10.js38
-rw-r--r--jstests/repl/repl11.js59
-rw-r--r--jstests/repl/repl4.js8
-rw-r--r--jstests/repl/replacePeer1.js25
-rw-r--r--jstests/repl/replacePeer2.js31
-rw-r--r--jstests/repl/snapshot1.js34
-rw-r--r--jstests/repl/snapshot2.js50
-rw-r--r--jstests/repl/snapshot3.js50
-rw-r--r--jstests/run_program1.js19
-rw-r--r--jstests/set5.js17
-rw-r--r--jstests/set6.js20
-rw-r--r--jstests/set7.js40
-rw-r--r--jstests/sharding/findandmodify1.js57
-rw-r--r--jstests/sharding/key_many.js15
-rw-r--r--jstests/sharding/moveshard1.js6
-rw-r--r--jstests/sharding/shard2.js2
-rw-r--r--jstests/sharding/sync1.js21
-rw-r--r--jstests/sharding/sync2.js48
-rw-r--r--jstests/shellkillop.js18
-rw-r--r--jstests/shellspawn.js6
-rw-r--r--jstests/slow/indexbg1.js117
-rw-r--r--jstests/slow/indexbg2.js83
-rw-r--r--jstests/sort5.js8
-rw-r--r--jstests/sort6.js38
-rw-r--r--jstests/storefunc.js11
-rw-r--r--jstests/testminmax.js14
-rw-r--r--jstests/tool/csv1.js11
-rw-r--r--jstests/tool/tool1.js4
-rw-r--r--jstests/type1.js1
-rw-r--r--jstests/unset2.js23
-rw-r--r--jstests/update6.js2
-rw-r--r--jstests/update_addToSet.js41
-rw-r--r--jstests/update_arraymatch1.js16
-rw-r--r--jstests/update_arraymatch2.js16
-rw-r--r--jstests/update_arraymatch3.js17
-rw-r--r--jstests/updatec.js14
115 files changed, 2735 insertions, 156 deletions
diff --git a/jstests/array_match1.js b/jstests/array_match1.js
new file mode 100644
index 0000000..f764fb9
--- /dev/null
+++ b/jstests/array_match1.js
@@ -0,0 +1,31 @@
+
+t = db.array_match1
+t.drop();
+
+t.insert( { _id : 1 , a : [ 5 , 5 ] } )
+t.insert( { _id : 2 , a : [ 6 , 6 ] } )
+t.insert( { _id : 3 , a : [ 5 , 5 ] } )
+
+function test( f , m ){
+ var q = {};
+
+ q[f] = [5,5];
+ assert.eq( 2 , t.find( q ).itcount() , m + "1" )
+
+ q[f] = [6,6];
+ assert.eq( 1 , t.find( q ).itcount() , m + "2" )
+}
+
+test( "a" , "A" );
+t.ensureIndex( { a : 1 } )
+test( "a" , "B" );
+
+t.drop();
+
+t.insert( { _id : 1 , a : { b : [ 5 , 5 ] } } )
+t.insert( { _id : 2 , a : { b : [ 6 , 6 ] } } )
+t.insert( { _id : 3 , a : { b : [ 5 , 5 ] } } )
+
+test( "a.b" , "C" );
+t.ensureIndex( { a : 1 } )
+test( "a.b" , "D" );
diff --git a/jstests/arrayfind1.js b/jstests/arrayfind1.js
index 422369e..539fa61 100644
--- a/jstests/arrayfind1.js
+++ b/jstests/arrayfind1.js
@@ -33,6 +33,8 @@ t.find( { "a.x" : 1 } ).count();
t.find( { "a.x" : { $gt : 1 } } ).count();
res = t.find( { "a" : { $elemMatch : { x : { $gt : 2 } } } } ).explain()
-assert( res.cursor.indexOf( "BtreeC" ) == 0 , "C1" );
-assert.eq( 2 , t.find( { a : { $elemMatch : { x : { $gt : 2 } } } } ).count() , "D2" );
+assert( res.cursor.indexOf( "BtreeC" ) == 0 , "D2" );
+assert.eq( 2 , t.find( { a : { $elemMatch : { x : { $gt : 2 } } } } ).count() , "D3" );
+assert.eq( 2 , t.find( { a : { $ne:2, $elemMatch : { x : { $gt : 2 } } } } ).count() , "E1" );
+assert( t.find( { a : { $ne:2, $elemMatch : { x : { $gt : 2 } } } } ).explain().cursor.indexOf( "BtreeC" ) == 0 , "E2" );
diff --git a/jstests/arrayfind2.js b/jstests/arrayfind2.js
new file mode 100644
index 0000000..59bf2b0
--- /dev/null
+++ b/jstests/arrayfind2.js
@@ -0,0 +1,35 @@
+
+t = db.arrayfind2;
+t.drop();
+
+function go( prefix ){
+ assert.eq( 3 , t.count() , prefix + " A1" );
+ assert.eq( 3 , t.find( { a : { $elemMatch : { x : { $gt : 4 } } } } ).count() , prefix + " A2" );
+ assert.eq( 1 , t.find( { a : { $elemMatch : { x : { $lt : 2 } } } } ).count() , prefix + " A3" );
+ assert.eq( 1 , t.find( { a : { $all : [ { $elemMatch : { x : { $lt : 4 } } } ,
+ { $elemMatch : { x : { $gt : 5 } } } ] } } ).count() , prefix + " A4" );
+
+ assert.throws( function() { return t.findOne( { a : { $all : [ 1, { $elemMatch : { x : 3 } } ] } } ) } );
+ assert.throws( function() { return t.findOne( { a : { $all : [ /a/, { $elemMatch : { x : 3 } } ] } } ) } );
+
+}
+
+t.save( { a : [ { x : 1 } , { x : 5 } ] } )
+t.save( { a : [ { x : 3 } , { x : 5 } ] } )
+t.save( { a : [ { x : 3 } , { x : 6 } ] } )
+
+go( "no index" );
+t.ensureIndex( { a : 1 } );
+go( "index(a)" );
+
+assert.eq( [], t.find( { a : { $all : [ { $elemMatch : { x : 3 } } ] } } ).explain().indexBounds );
+
+t.ensureIndex( { "a.x": 1 } );
+
+assert.eq( [ [ {"a.x":3},{"a.x":3} ] ], t.find( { a : { $all : [ { $elemMatch : { x : 3 } } ] } } ).explain().indexBounds );
+// only first $elemMatch used to find bounds
+assert.eq( [ [ {"a.x":3},{"a.x":3} ] ], t.find( { a : { $all : [ { $elemMatch : { x : 3 } }, { $elemMatch : { y : 5 } } ] } } ).explain().indexBounds );
+
+t.ensureIndex( { "a.x":1,"a.y":-1 } );
+
+assert.eq( [ [ {"a.x":3,"a.y":1.7976931348623157e+308},{"a.x":3,"a.y":4} ] ], t.find( { a : { $all : [ { $elemMatch : { x : 3, y : { $gt: 4 } } } ] } } ).explain().indexBounds );
diff --git a/jstests/auth/auth1.js b/jstests/auth/auth1.js
new file mode 100644
index 0000000..6fc6dc5
--- /dev/null
+++ b/jstests/auth/auth1.js
@@ -0,0 +1,73 @@
+// test read/write permissions
+
+port = allocatePorts( 1 )[ 0 ];
+baseName = "jstests_auth_auth1";
+
+m = startMongod( "--auth", "--port", port, "--dbpath", "/data/db/" + baseName, "--nohttpinterface", "--bind_ip", "127.0.0.1" );
+db = m.getDB( "test" );
+
+t = db[ baseName ];
+t.drop();
+
+users = db.getCollection( "system.users" );
+users.remove( {} );
+
+db.addUser( "eliot" , "eliot" );
+db.addUser( "guest" , "guest", true );
+db.getSisterDB( "admin" ).addUser( "super", "super" );
+
+assert.throws( function() { t.findOne() }, [], "read without login" );
+
+assert( db.auth( "eliot" , "eliot" ) , "auth failed" );
+
+for( i = 0; i < 999; ++i ) {
+ t.save( {i:i} );
+}
+assert.eq( 999, t.count() , "A1" );
+assert.eq( 999, t.find().toArray().length , "A2" );
+
+assert.eq( 999, db.eval( function() { return db[ "jstests_auth_auth1" ].count(); } ) , "A3" );
+db.eval( function() { db[ "jstests_auth_auth1" ].save( {i:999} ) } );
+assert.eq( 1000, db.eval( function() { return db[ "jstests_auth_auth1" ].count(); } ) , "A4" );
+
+var p = { key : { i : true } ,
+ reduce : function(obj,prev) { prev.count++; },
+initial: { count: 0 }
+};
+
+assert.eq( 1000, t.group( p ).length , "A5" );
+
+if ( db.runCommand( "features" ).readlock ){
+ print( "doing readonly test" );
+ assert( db.auth( "guest", "guest" ), "auth failed 2" );
+
+ assert.eq( 1000, t.count() , "B1" );
+ assert.eq( 1000, t.find().toArray().length , "B2" ); // make sure we have a getMore in play
+ assert.commandWorked( db.runCommand( {ismaster:1} ) , "B3" );
+
+ assert( !db.getLastError() , "B4" );
+ t.save( {} ); // fail
+ assert( db.getLastError() , "B5: " + tojson( db.getLastErrorObj() ) );
+ assert.eq( 1000, t.count() , "B6" );
+
+ assert.eq( 2, db.system.users.count() , "B7" );
+ assert( !db.getLastError() , "B8" );
+ db.addUser( "a", "b" );
+ assert( db.getLastError() , "B9" );
+ assert.eq( 2, db.system.users.count() , "B10");
+
+ assert.eq( 1000, db.eval( function() { return db[ "jstests_auth_auth1" ].count(); } ) , "C1" );
+ assert.eq( 1000, db.eval( function() { return db[ "jstests_auth_auth1" ].find().toArray().length; } ) , "C2" );
+ db.eval( function() { db[ "jstests_auth_auth1" ].save( {i:1} ) } , "C3" );
+ assert.eq( 1000, db.eval( function() { return db[ "jstests_auth_auth1" ].count(); } ) , "C4" );
+
+ assert.eq( 1000, t.group( p ).length , "C5" );
+
+ var p = { key : { i : true } ,
+ reduce : function(obj,prev) { db.jstests_auth_auth1.save( {i:10000} ); prev.count++; },
+ initial: { count: 0 }
+ };
+
+ assert.throws( function() { return t.group( p ) }, "write reduce didn't fail" );
+}
+
diff --git a/jstests/auth/copyauth.js b/jstests/auth/copyauth.js
new file mode 100644
index 0000000..043b863
--- /dev/null
+++ b/jstests/auth/copyauth.js
@@ -0,0 +1,29 @@
+// test copyDatabase from an auth enabled source
+
+ports = allocatePorts( 2 );
+
+var baseName = "jstests_clone_copyauth";
+
+var source = startMongod( "--auth", "--port", ports[ 0 ], "--dbpath", "/data/db/" + baseName + "_source", "--nohttpinterface", "--bind_ip", "127.0.0.1", "--smallfiles" );
+var target = startMongod( "--port", ports[ 1 ], "--dbpath", "/data/db/" + baseName + "_target", "--nohttpinterface", "--bind_ip", "127.0.0.1", "--smallfiles" );
+
+source.getDB( baseName )[ baseName ].save( {i:1} );
+source.getDB( baseName ).addUser( "foo", "bar" );
+source.getDB( "admin" ).addUser( "super", "super" );
+assert.throws( function() { source.getDB( baseName )[ baseName ].findOne(); } );
+
+target.getDB( baseName ).copyDatabase( baseName, baseName, source.host, "foo", "bar" );
+assert.eq( 1, target.getDB( baseName )[ baseName ].count() );
+assert.eq( 1, target.getDB( baseName )[ baseName ].findOne().i );
+
+stopMongod( ports[ 1 ] );
+
+var target = startMongod( "--auth", "--port", ports[ 1 ], "--dbpath", "/data/db/" + baseName + "_target", "--nohttpinterface", "--bind_ip", "127.0.0.1", "--smallfiles" );
+
+target.getDB( "admin" ).addUser( "super1", "super1" );
+assert.throws( function() { source.getDB( baseName )[ baseName ].findOne(); } );
+target.getDB( "admin" ).auth( "super1", "super1" );
+
+target.getDB( baseName ).copyDatabase( baseName, baseName, source.host, "foo", "bar" );
+assert.eq( 1, target.getDB( baseName )[ baseName ].count() );
+assert.eq( 1, target.getDB( baseName )[ baseName ].findOne().i );
diff --git a/jstests/auth1.js b/jstests/auth1.js
index f6890cc..ce0159b 100644
--- a/jstests/auth1.js
+++ b/jstests/auth1.js
@@ -1,5 +1,3 @@
-
-
users = db.getCollection( "system.users" );
users.remove( {} );
diff --git a/jstests/auth2.js b/jstests/auth2.js
new file mode 100644
index 0000000..9b6dfad
--- /dev/null
+++ b/jstests/auth2.js
@@ -0,0 +1,5 @@
+// just make sure logout doesn't break anything
+
+// SERVER-724
+db.runCommand({logout : 1});
+db.runCommand({logout : 1});
diff --git a/jstests/capped3.js b/jstests/capped3.js
index f3b29b7..c4f1a3c 100644
--- a/jstests/capped3.js
+++ b/jstests/capped3.js
@@ -5,12 +5,12 @@ t2.drop();
for( i = 0; i < 1000; ++i ) {
t.save( {i:i} );
}
-assert.commandWorked( db.runCommand( { cloneCollectionAsCapped:"jstests_capped3", toCollection:"jstests_capped3_clone", size:100000 } ) );
+assert.commandWorked( db.runCommand( { cloneCollectionAsCapped:"jstests_capped3", toCollection:"jstests_capped3_clone", size:100000 } ), "A" );
c = t2.find();
for( i = 0; i < 1000; ++i ) {
- assert.eq( i, c.next().i );
+ assert.eq( i, c.next().i, "B" );
}
-assert( !c.hasNext() );
+assert( !c.hasNext(), "C" );
t.drop();
t2.drop();
@@ -18,13 +18,15 @@ t2.drop();
for( i = 0; i < 1000; ++i ) {
t.save( {i:i} );
}
-assert.commandWorked( db.runCommand( { cloneCollectionAsCapped:"jstests_capped3", toCollection:"jstests_capped3_clone", size:1000 } ) );
+assert.commandWorked( db.runCommand( { cloneCollectionAsCapped:"jstests_capped3", toCollection:"jstests_capped3_clone", size:1000 } ), "D" );
c = t2.find().sort( {$natural:-1} );
i = 999;
while( c.hasNext() ) {
- assert.eq( i--, c.next().i );
+ assert.eq( i--, c.next().i, "E" );
}
-assert( i < 990 );
+print( "i: " + i );
+print( "stats: " + tojson( t2.stats() ) );
+assert( i < 990, "F" );
t.drop();
t2.drop();
@@ -32,11 +34,11 @@ t2.drop();
for( i = 0; i < 1000; ++i ) {
t.save( {i:i} );
}
-assert.commandWorked( t.convertToCapped( 1000 ) );
+assert.commandWorked( t.convertToCapped( 1000 ), "G" );
c = t.find().sort( {$natural:-1} );
i = 999;
while( c.hasNext() ) {
- assert.eq( i--, c.next().i );
+ assert.eq( i--, c.next().i, "H" );
}
-assert( i < 990 );
-assert( i > 900 );
+assert( i < 990, "I" );
+assert( i > 900, "J" );
diff --git a/jstests/capped5.js b/jstests/capped5.js
index a5d04de..1c7ec3d 100644
--- a/jstests/capped5.js
+++ b/jstests/capped5.js
@@ -16,3 +16,35 @@ t.ensureIndex( { x : 1 } )
assert.eq( 52 , t.findOne( { x : 11 } ).z , "B1" );
assert.eq( 52 , t.findOne( { _id : 5 } ).z , "B2" );
+
+t.drop();
+db.createCollection( tn , {capped: true, size: 1024 * 1024 * 1 } );
+t.insert( { _id : 5 , x : 11 } );
+t.insert( { _id : 6 , x : 11 } );
+t.ensureIndex( { x:1 }, {unique:true, dropDups:true } );
+assert.eq( 0, db.system.indexes.count( {ns:"test."+tn} ) );
+assert.eq( 2, t.find().toArray().length );
+
+t.drop();
+db.createCollection( tn , {capped: true, size: 1024 * 1024 * 1 } );
+t.insert( { _id : 5 , x : 11 } );
+t.insert( { _id : 5 , x : 12 } );
+t.ensureIndex( { _id:1 } );
+assert.eq( 0, db.system.indexes.count( {ns:"test."+tn} ) );
+assert.eq( 2, t.find().toArray().length );
+
+t.drop();
+db.createCollection( tn , {capped: true, size: 1024 * 1024 * 1 } );
+t.insert( { _id : 5 , x : 11 } );
+t.insert( { _id : 6 , x : 12 } );
+t.ensureIndex( { x:1 }, {unique:true, dropDups:true } );
+assert.eq( 1, db.system.indexes.count( {ns:"test."+tn} ) );
+assert.eq( 2, t.find().hint( {x:1} ).toArray().length );
+
+// SERVER-525
+t.drop();
+db.createCollection( tn , {capped: true, size: 1024 * 1024 * 1 } );
+t.ensureIndex( { _id:1 } );
+t.insert( { _id : 5 , x : 11 } );
+t.insert( { _id : 5 , x : 12 } );
+assert.eq( 1, t.find().toArray().length );
diff --git a/jstests/clone/clonecollection.js b/jstests/clone/clonecollection.js
index 64d4ff0..123369f 100644
--- a/jstests/clone/clonecollection.js
+++ b/jstests/clone/clonecollection.js
@@ -23,6 +23,45 @@ waitParallel = function() {
assert.soon( function() { return doneParallel(); }, "parallel did not finish in time", 300000, 1000 );
}
+cloneNo = -1;
+startstartclone = function( spec ) {
+ spec = spec || "";
+ cloneNo++;
+ doParallel( "z = db.runCommand( {startCloneCollection:\"jstests_clonecollection.a\", from:\"localhost:" + ports[ 0 ] + "\"" + spec + " } ); print( \"clone_clone_clone_commandResult::" + cloneNo + "::\" + tojson( z , '' , true ) + \":::::\" );" );
+}
+
+finishstartclone = function() {
+ waitParallel();
+ // even after parallel shell finished, must wait for finishToken line to appear in log
+ assert.soon( function() {
+ raw = rawMongoProgramOutput().replace( /[\r\n]/gm , " " )
+ ret = raw.match( new RegExp( "clone_clone_clone_commandResult::" + cloneNo + "::(.*):::::" ) );
+ if ( ret == null ) {
+ return false;
+ }
+ ret = ret[ 1 ];
+ return true;
+ } );
+
+ eval( "ret = " + ret );
+
+ assert.commandWorked( ret );
+ return ret;
+}
+
+dofinishclonecmd = function( ret ) {
+ finishToken = ret.finishToken;
+ // Round-tripping through JS can corrupt the cursor ids we store as BSON
+ // Date elements. Date( 0 ) will correspond to a cursorId value of 0, which
+ // makes the db start scanning from the beginning of the collection.
+ finishToken.cursorId = new Date( 0 );
+ return t.runCommand( {finishCloneCollection:finishToken} );
+}
+
+finishclone = function( ret ) {
+ assert.commandWorked( dofinishclonecmd( ret ) );
+}
+
ports = allocatePorts( 2 );
f = startMongod( "--port", ports[ 0 ], "--dbpath", "/data/db/" + baseName + "_from", "--nohttpinterface", "--bind_ip", "127.0.0.1" ).getDB( baseName );
@@ -52,7 +91,7 @@ if ( t.system.indexes.find().count() != 2 ) {
}
assert.eq( 2, t.system.indexes.find().count(), "expected index missing" );
// Verify index works
-assert.eq( 50, t.a.find( { i: 50 } ).hint( { i: 1 } ).explain().startKey.i );
+assert.eq( 50, t.a.find( { i: 50 } ).hint( { i: 1 } ).explain().indexBounds[0][0].i );
assert.eq( 1, t.a.find( { i: 50 } ).hint( { i: 1 } ).toArray().length, "match length did not match expected" );
// Check that capped-ness is preserved on clone
@@ -71,16 +110,17 @@ t.a.drop();
for( i = 0; i < 100000; ++i ) {
f.a.save( { i: i } );
}
+assert.eq( 100000, f.a.count() );
-doParallel( "assert.commandWorked( db.cloneCollection( \"localhost:" + ports[ 0 ] + "\", \"a\", {i:{$gte:0}} ) );" );
+startstartclone( ", query:{i:{$gte:0}}" );
sleep( 200 );
f.a.save( { i: 200000 } );
f.a.save( { i: -1 } );
f.a.remove( { i: 0 } );
f.a.update( { i: 99998 }, { i: 99998, x: "y" } );
-assert( !doneParallel(), "test run invalid" );
-waitParallel();
+ret = finishstartclone();
+finishclone( ret );
assert.eq( 100000, t.a.find().count() );
assert.eq( 1, t.a.find( { i: 200000 } ).count() );
@@ -96,15 +136,16 @@ t.a.drop();
for( i = 0; i < 200000; ++i ) {
f.a.save( { i: i } );
}
+assert.eq( 200000, f.a.count() );
-doParallel( "assert.commandFailed( db.runCommand( { cloneCollection: \"jstests_clonecollection.a\", from: \"localhost:" + ports[ 0 ] + "\", logSizeMb:1 } ) );" );
+startstartclone( ", logSizeMb:1" );
+ret = finishstartclone();
-sleep( 200 );
for( i = 200000; i < 250000; ++i ) {
f.a.save( { i: i } );
}
-waitParallel();
+assert.commandFailed( dofinishclonecmd( ret ) );
// Make sure the same works with standard size op log.
f.a.drop();
@@ -113,15 +154,17 @@ t.a.drop();
for( i = 0; i < 200000; ++i ) {
f.a.save( { i: i } );
}
+assert.eq( 200000, f.a.count() );
-doParallel( "assert.commandWorked( db.cloneCollection( \"localhost:" + ports[ 0 ] + "\", \"a\" ) );" );
+startstartclone();
+ret = finishstartclone();
-sleep( 200 );
for( i = 200000; i < 250000; ++i ) {
f.a.save( { i: i } );
}
+assert.eq( 250000, f.a.count() );
-waitParallel();
+finishclone( ret );
assert.eq( 250000, t.a.find().count() );
// Test startCloneCollection and finishCloneCollection commands.
@@ -131,35 +174,17 @@ t.a.drop();
for( i = 0; i < 100000; ++i ) {
f.a.save( { i: i } );
}
+assert.eq( 100000, f.a.count() );
-doParallel( "z = db.runCommand( {startCloneCollection:\"jstests_clonecollection.a\", from:\"localhost:" + ports[ 0 ] + "\" } ); print( \"clone_clone_clone_commandResult:::::\" + tojson( z , '' , true ) + \":::::\" );" );
+startstartclone();
sleep( 200 );
f.a.save( { i: -1 } );
-waitParallel();
-// even after parallel shell finished, must wait for finishToken line to appear in log
-assert.soon( function() {
- raw = rawMongoProgramOutput().replace( /[\r\n]/gm , " " )
- ret = raw.match( /clone_clone_clone_commandResult:::::(.*):::::/ );
- if ( ret == null ) {
- return false;
- }
- ret = ret[ 1 ];
- return true;
- } );
-
-eval( "ret = " + ret );
-
-assert.commandWorked( ret );
+ret = finishstartclone();
assert.eq( 100001, t.a.find().count() );
f.a.save( { i: -2 } );
assert.eq( 100002, f.a.find().count() );
-finishToken = ret.finishToken;
-// Round-tripping through JS can corrupt the cursor ids we store as BSON
-// Date elements. Date( 0 ) will correspond to a cursorId value of 0, which
-// makes the db start scanning from the beginning of the collection.
-finishToken.cursorId = new Date( 0 );
-assert.commandWorked( t.runCommand( {finishCloneCollection:finishToken} ) );
+finishclone( ret );
assert.eq( 100002, t.a.find().count() );
diff --git a/jstests/copydb2.js b/jstests/copydb2.js
new file mode 100644
index 0000000..90ef943
--- /dev/null
+++ b/jstests/copydb2.js
@@ -0,0 +1,17 @@
+a = db.getSisterDB( "copydb2-test-a" );
+b = db.getSisterDB( "copydb2-test-b" );
+
+a.dropDatabase();
+b.dropDatabase();
+
+a.foo.save( { a : 1 } );
+
+a.addUser( "chevy" , "chase" );
+
+assert.eq( 1 , a.foo.count() , "A" );
+assert.eq( 0 , b.foo.count() , "B" );
+
+// SERVER-727
+a.copyDatabase( a._name , b._name, "" , "chevy" , "chase" );
+assert.eq( 1 , a.foo.count() , "C" );
+assert.eq( 1 , b.foo.count() , "D" );
diff --git a/jstests/cursor8.js b/jstests/cursor8.js
index 169bb5d..5ebd4f5 100644
--- a/jstests/cursor8.js
+++ b/jstests/cursor8.js
@@ -1,10 +1,19 @@
-db.f.drop();
-db.f.save( {} );
-db.f.save( {} );
-db.f.save( {} );
+t = db.f
+t.drop();
+t.save( {} );
+t.save( {} );
+t.save( {} );
db.getMongo().getDB( "admin" ).runCommand( {closeAllDatabases:1} );
-assert.eq( 0, db.runCommand( {cursorInfo:1} ).clientCursors_size );
-assert.eq( 2, db.f.find( {} ).limit( 2 ).toArray().length );
-assert.eq( 1, db.runCommand( {cursorInfo:1} ).clientCursors_size );
+function test( want , msg ){
+ var res = db.runCommand( { cursorInfo:1 } );
+ assert.eq( want , res.clientCursors_size , msg + " " + tojson( res ) );
+}
+
+test( 0 , "A1" );
+assert.eq( 3 , t.find().count() , "A1" );
+assert.eq( 3 , t.find( {} ).count() , "A2" );
+assert.eq( 2, t.find( {} ).limit( 2 ).itcount() , "A3" );
+test( 1 , "B1" );
+
diff --git a/jstests/dbadmin.js b/jstests/dbadmin.js
index c7b7bc8..8d0e7d1 100644
--- a/jstests/dbadmin.js
+++ b/jstests/dbadmin.js
@@ -4,9 +4,10 @@ t.save( { x : 1 } );
before = db._adminCommand( "serverStatus" )
if ( before.mem.supported ){
- db._adminCommand( "closeAllDatabases" );
+ cmdres = db._adminCommand( "closeAllDatabases" );
after = db._adminCommand( "serverStatus" );
- assert( before.mem.mapped > after.mem.mapped , "closeAllDatabases does something before:" + tojson( before ) + " after:" + tojson( after ) );
+ assert( before.mem.mapped > after.mem.mapped , "closeAllDatabases does something before:" + tojson( before.mem ) + " after:" + tojson( after.mem ) + " cmd res:" + tojson( cmdres ) );
+ print( before.mem.mapped + " -->> " + after.mem.mapped );
}
else {
print( "can't test serverStatus on this machine" );
@@ -17,6 +18,4 @@ t.save( { x : 1 } );
res = db._adminCommand( "listDatabases" );
assert( res.databases.length > 0 , "listDatabases 1" );
-print( "BEFORE: " + tojson( before ) );
-print( "AFTER : " + tojson( after ) );
// TODO: add more tests here
diff --git a/jstests/dbhash.js b/jstests/dbhash.js
new file mode 100644
index 0000000..101be18
--- /dev/null
+++ b/jstests/dbhash.js
@@ -0,0 +1,43 @@
+
+a = db.dbhasha;
+b = db.dbhashb;
+
+a.drop();
+b.drop();
+
+function gh( coll , mydb ){
+ if ( ! mydb ) mydb = db;
+ var x = mydb.runCommand( "dbhash" ).collections[coll.getName()];
+ if ( ! x )
+ return "";
+ return x;
+}
+
+function dbh( mydb ){
+ return mydb.runCommand( "dbhash" ).md5;
+}
+
+assert.eq( gh( a ) , gh( b ) , "A1" );
+
+a.insert( { _id : 5 } );
+assert.neq( gh( a ) , gh( b ) , "A2" );
+
+b.insert( { _id : 5 } );
+assert.eq( gh( a ) , gh( b ) , "A3" );
+
+dba = db.getSisterDB( "dbhasha" );
+dbb = db.getSisterDB( "dbhashb" );
+
+dba.dropDatabase();
+dbb.dropDatabase();
+
+assert.eq( gh( dba.foo , dba ) , gh( dbb.foo , dbb ) , "B1" );
+assert.eq( dbh( dba ) , dbh( dbb ) , "C1" );
+
+dba.foo.insert( { _id : 5 } );
+assert.neq( gh( dba.foo , dba ) , gh( dbb.foo , dbb ) , "B2" );
+assert.neq( dbh( dba ) , dbh( dbb ) , "C2" );
+
+dbb.foo.insert( { _id : 5 } );
+assert.eq( gh( dba.foo , dba ) , gh( dbb.foo , dbb ) , "B3" );
+assert.eq( dbh( dba ) , dbh( dbb ) , "C3" );
diff --git a/jstests/disk/directoryperdb.js b/jstests/disk/directoryperdb.js
new file mode 100644
index 0000000..a5fd18e
--- /dev/null
+++ b/jstests/disk/directoryperdb.js
@@ -0,0 +1,62 @@
+var baseDir = "jstests_disk_directoryper";
+var baseName = "directoryperdb"
+port = allocatePorts( 1 )[ 0 ];
+dbpath = "/data/db/" + baseDir + "/";
+
+var m = startMongod( "--directoryperdb", "--port", port, "--dbpath", dbpath, "--nohttpinterface", "--bind_ip", "127.0.0.1" );
+db = m.getDB( baseName );
+db[ baseName ].save( {} );
+assert.eq( 1, db[ baseName ].count() );
+
+checkDir = function( dir ) {
+ db.runCommand( {fsync:1} );
+ files = listFiles( dir );
+ found = false;
+ for( f in files ) {
+ if ( new RegExp( baseName ).test( files[ f ].name ) ) {
+ found = true;
+ assert( files[ f ].isDirectory, "file not directory" );
+ }
+ }
+ assert( found, "no directory" );
+
+ files = listFiles( dir + baseName );
+ for( f in files ) {
+ assert( new RegExp( baseName + "/" + baseName + "." ).test( files[ f ].name ) );
+ }
+}
+checkDir( dbpath );
+
+// file iterator
+assert( m.getDBs().totalSize > 0, "bad size calc" );
+
+// repair
+db.runCommand( {repairDatabase:1, backupOriginalFiles:true} );
+checkDir( dbpath );
+files = listFiles( dbpath );
+for( f in files ) {
+ if ( new RegExp( "^" + dbpath + "backup_" ).test( files[ f ].name ) ) {
+ backupDir = files[ f ].name + "/";
+ }
+}
+checkDir( backupDir );
+assert.eq( 1, db[ baseName ].count() );
+
+// tool test
+stopMongod( port );
+
+externalPath = "/data/db/" + baseDir + "_external/";
+
+runMongoProgram( "mongodump", "--dbpath", dbpath, "--directoryperdb", "--out", externalPath );
+resetDbpath( dbpath );
+runMongoProgram( "mongorestore", "--dbpath", dbpath, "--directoryperdb", "--dir", externalPath );
+m = startMongoProgram( "mongod", "--directoryperdb", "--port", port, "--dbpath", dbpath, "--nohttpinterface", "--bind_ip", "127.0.0.1" );
+db = m.getDB( baseName );
+checkDir( dbpath );
+assert.eq( 1, db[ baseName ].count() );
+assert( m.getDBs().totalSize > 0, "bad size calc" );
+
+// drop db test
+db.dropDatabase();
+files = listFiles( dbpath );
+files.forEach( function( f ) { assert( !new RegExp( baseName ).test( f.name ), "drop database - dir not cleared" ); } );
diff --git a/jstests/disk/diskfull.js b/jstests/disk/diskfull.js
index 7f75266..8057174 100644
--- a/jstests/disk/diskfull.js
+++ b/jstests/disk/diskfull.js
@@ -15,6 +15,9 @@ if ( doIt ) {
port = allocatePorts( 1 )[ 0 ];
m = startMongoProgram( "mongod", "--port", port, "--dbpath", "/data/db/diskfulltest", "--nohttpinterface", "--bind_ip", "127.0.0.1" );
m.getDB( "diskfulltest" ).getCollection( "diskfulltest" ).save( { a: 6 } );
- assert.soon( function() { return rawMongoProgramOutput().match( /dbexit: really exiting now/ ); }, "didn't see 'really exiting now'" );
- assert( !rawMongoProgramOutput().match( /Got signal/ ), "saw 'Got signal', not expected. Output: " + rawMongoProgramOutput() );
+ assert.soon( function() { return rawMongoProgramOutput().match( /file allocation failure/ ); }, "didn't see 'file allocation failure'" );
+ assert.soon( function() { return rawMongoProgramOutput().match( /Caught Assertion in insert , continuing/ ); }, "didn't see 'Caught Assertion...'" );
+ sleep( 3000 );
+ m2 = new Mongo( m.host );
+ printjson( m2.getDBs() );
}
diff --git a/jstests/disk/newcollection.js b/jstests/disk/newcollection.js
new file mode 100644
index 0000000..944ad1c
--- /dev/null
+++ b/jstests/disk/newcollection.js
@@ -0,0 +1,13 @@
+// SERVER-594 test
+
+port = allocatePorts( 1 )[ 0 ]
+var baseName = "jstests_disk_newcollection";
+var m = startMongod( "--noprealloc", "--smallfiles", "--port", port, "--dbpath", "/data/db/" + baseName );
+db = m.getDB( "test" );
+
+db.createCollection( baseName, {size:15.9*1024*1024} );
+db.baseName.drop();
+
+size = m.getDBs().totalSize;
+db.baseName.save( {} );
+assert.eq( size, m.getDBs().totalSize );
diff --git a/jstests/disk/preallocate.js b/jstests/disk/preallocate.js
index 69f9a47..c3c9bd0 100644
--- a/jstests/disk/preallocate.js
+++ b/jstests/disk/preallocate.js
@@ -1,4 +1,4 @@
-port = allocatePorts( 1 )[ 0 ]
+port = allocatePorts( 1 )[ 0 ];
var baseName = "jstests_preallocate";
@@ -10,12 +10,14 @@ var m = startMongod( "--port", port, "--dbpath", "/data/db/" + baseName );
m.getDB( baseName ).createCollection( baseName + "1" );
-vs = vsize();
-
stopMongod( port );
var m = startMongoProgram( "mongod", "--port", port, "--dbpath", "/data/db/" + baseName );
+size = m.getDBs().totalSize;
+
m.getDB( baseName ).createCollection( baseName + "2" );
-assert.eq( vs, vsize() );
+sleep( 2000 ); // give prealloc a chance
+
+assert.eq( size, m.getDBs().totalSize );
diff --git a/jstests/disk/repair.js b/jstests/disk/repair.js
new file mode 100644
index 0000000..6c8d81b
--- /dev/null
+++ b/jstests/disk/repair.js
@@ -0,0 +1,18 @@
+var baseName = "jstests_disk_repair";
+
+port = allocatePorts( 1 )[ 0 ];
+dbpath = "/data/db/" + baseName + "/";
+repairpath = dbpath + "repairDir/"
+
+resetDbpath( dbpath );
+resetDbpath( repairpath );
+
+m = startMongoProgram( "mongod", "--port", port, "--dbpath", dbpath, "--repairpath", repairpath, "--nohttpinterface", "--bind_ip", "127.0.0.1" );
+db = m.getDB( baseName );
+db[ baseName ].save( {} );
+db.runCommand( {repairDatabase:1, backupOriginalFiles:true} );
+
+files = listFiles( dbpath );
+for( f in files ) {
+ assert( ! new RegExp( "^" + dbpath + "backup_" ).test( files[ f ].name ), "backup dir in dbpath" );
+}
diff --git a/jstests/drop.js b/jstests/drop.js
index b233409..1bd539e 100644
--- a/jstests/drop.js
+++ b/jstests/drop.js
@@ -18,4 +18,4 @@ assert.eq( 1, db.system.indexes.find( {ns:"test.jstests_drop"} ).count() , "G" )
// make sure we can still use it
f.save( {} );
-assert.eq( 1, f.find().hint( {_id:new ObjectId( "000000000000000000000000" )} ).toArray().length , "H" );
+assert.eq( 1, f.find().hint( "_id_" ).toArray().length , "H" );
diff --git a/jstests/dropIndex.js b/jstests/dropIndex.js
new file mode 100644
index 0000000..a6e5f46
--- /dev/null
+++ b/jstests/dropIndex.js
@@ -0,0 +1,16 @@
+
+t = db.dropIndex;
+t.drop();
+
+t.insert( { _id : 1 , a : 2 , b : 3 } );
+assert.eq( 1 , t.getIndexes().length , "A1" );
+
+t.ensureIndex( { a : 1 } );
+t.ensureIndex( { b : 1 } );
+assert.eq( 3 , t.getIndexes().length , "A2" );
+
+x = db._dbCommand( { dropIndexes: t.getName() , index : t._genIndexName( { a : 1 } ) } );
+assert.eq( 2 , t.getIndexes().length , "B1" );
+
+x = db._dbCommand( { dropIndexes: t.getName() , index : { b : 1 } } )
+assert.eq( 1 , t.getIndexes().length , "B2" );
diff --git a/jstests/exists2.js b/jstests/exists2.js
new file mode 100644
index 0000000..a9b4d1e
--- /dev/null
+++ b/jstests/exists2.js
@@ -0,0 +1,14 @@
+
+t = db.exists2;
+t.drop();
+
+t.save( { a : 1 , b : 1 } )
+t.save( { a : 1 , b : 1 , c : 1 } )
+
+assert.eq( 2 , t.find().itcount() , "A1" );
+assert.eq( 2 , t.find( { a : 1 , b : 1 } ).itcount() , "A2" );
+assert.eq( 1 , t.find( { a : 1 , b : 1 , c : { "$exists" : true } } ).itcount() , "A3" );
+
+t.ensureIndex( { a : 1 , b : 1 , c : 1 } )
+assert.eq( 1 , t.find( { a : 1 , b : 1 , c : { "$exists" : true } } ).itcount() , "B1" );
+
diff --git a/jstests/explain2.js b/jstests/explain2.js
new file mode 100644
index 0000000..5a36552
--- /dev/null
+++ b/jstests/explain2.js
@@ -0,0 +1,27 @@
+
+t = db.explain2
+t.drop();
+
+t.ensureIndex( { a : 1 , b : 1 } );
+
+for ( i=1; i<10; i++ ){
+ t.insert( { _id : i , a : i , b : i , c : i } );
+}
+
+function go( q , c , b , o ){
+ var e = t.find( q ).explain();
+ assert.eq( c , e.n , "count " + tojson( q ) )
+ assert.eq( b , e.nscanned , "nscanned " + tojson( q ) )
+ assert.eq( o , e.nscannedObjects , "nscannedObjects " + tojson( q ) )
+}
+
+q = { a : { $gt : 3 } }
+go( q , 6 , 7 , 6 );
+
+q.b = 5
+go( q , 1 , 6 , 1 );
+
+delete q.b
+q.c = 5
+go( q , 1 , 7 , 6 );
+
diff --git a/jstests/find6.js b/jstests/find6.js
index baa5969..c4efd3b 100644
--- a/jstests/find6.js
+++ b/jstests/find6.js
@@ -9,3 +9,33 @@ assert.eq( 2 , t.find().count() , "A" );
assert.eq( 1 , t.find( { b : null } ).count() , "B" );
assert.eq( 1 , t.find( "function() { return this.b == null; }" ).itcount() , "C" );
assert.eq( 1 , t.find( "function() { return this.b == null; }" ).count() , "D" );
+
+/* test some stuff with dot array notation */
+q = db.find6a;
+q.drop();
+q.insert( { "a" : [ { "0" : 1 } ] } );
+q.insert( { "a" : [ { "0" : 2 } ] } );
+q.insert( { "a" : [ 1 ] } );
+q.insert( { "a" : [ 9, 1 ] } );
+
+function f() {
+
+ assert.eq( 2, q.find( { 'a.0' : 1 } ).count(), "da1");
+ assert.eq( 2, q.find( { 'a.0' : 1 } ).count(), "da2");
+
+ assert.eq( 1, q.find( { 'a.0' : { $gt : 8 } } ).count(), "da3");
+ assert.eq( 0, q.find( { 'a.0' : { $lt : 0 } } ).count(), "da4");
+
+}
+
+for( var pass = 0; pass <= 1 ; pass++ ) {
+ f();
+ q.ensureIndex({a:1});
+}
+
+t = db.multidim;
+t.drop();
+t.insert({"a" : [ [ ], 1, [ 3, 4 ] ] });
+assert.eq(1, t.find({"a.2":[3,4]}).count(), "md1");
+assert.eq(1, t.find({"a.2.1":4}).count(), "md2");
+assert.eq(0, t.find({"a.2.1":3}).count(), "md3");
diff --git a/jstests/find7.js b/jstests/find7.js
new file mode 100644
index 0000000..ca4c7d4
--- /dev/null
+++ b/jstests/find7.js
@@ -0,0 +1,8 @@
+t = db.find7;
+t.drop();
+
+x = { "_id" : { "d" : 3649, "w" : "signed" }, "u" : { "3649" : 5 } };
+t.insert(x );
+assert.eq( x , t.findOne() , "A1" );
+assert.eq( x , t.findOne( { _id : x._id } ) , "A2" );
+
diff --git a/jstests/geo1.js b/jstests/geo1.js
new file mode 100644
index 0000000..8f31e8e
--- /dev/null
+++ b/jstests/geo1.js
@@ -0,0 +1,41 @@
+
+t = db.geo1
+t.drop();
+
+idx = { loc : "2d" , zip : 1 }
+
+t.insert( { zip : "06525" , loc : [ 41.352964 , 73.01212 ] } )
+t.insert( { zip : "10024" , loc : [ 40.786387 , 73.97709 ] } )
+t.insert( { zip : "94061" , loc : [ 37.463911 , 122.23396 ] } )
+assert.isnull( db.getLastError() )
+
+// test "2d" has to be first
+assert.eq( 1 , t.getIndexKeys().length , "S1" );
+t.ensureIndex( { zip : 1 , loc : "2d" } );
+assert.eq( 1 , t.getIndexKeys().length , "S2" );
+
+t.ensureIndex( idx );
+assert.eq( 2 , t.getIndexKeys().length , "S3" );
+
+assert.eq( 3 , t.count() , "B1" );
+t.insert( { loc : [ 200 , 200 ] } )
+assert( db.getLastError() , "B2" )
+assert.eq( 3 , t.count() , "B3" );
+
+// test normal access
+
+wb = t.findOne( { zip : "06525" } )
+assert( wb , "C1" );
+
+assert.eq( "06525" , t.find( { loc : wb.loc } ).hint( { "$natural" : 1 } )[0].zip , "C2" )
+assert.eq( "06525" , t.find( { loc : wb.loc } )[0].zip , "C3" )
+assert.eq( 1 , t.find( { loc : wb.loc } ).explain().nscanned , "C4" )
+
+// test config options
+
+t.drop();
+
+t.ensureIndex( { loc : "2d" } , { min : -500 , max : 500 , bits : 4 } );
+t.insert( { loc : [ 200 , 200 ] } )
+assert.isnull( db.getLastError() , "D1" )
+
diff --git a/jstests/geo2.js b/jstests/geo2.js
new file mode 100644
index 0000000..b9452c8
--- /dev/null
+++ b/jstests/geo2.js
@@ -0,0 +1,43 @@
+
+t = db.geo2
+t.drop();
+
+n = 1
+for ( var x=-100; x<100; x+=2 ){
+ for ( var y=-100; y<100; y+=2 ){
+ t.insert( { _id : n++ , loc : [ x , y ] } )
+ }
+}
+
+t.ensureIndex( { loc : "2d" } )
+
+fast = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 } );
+slow = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 , start : "11" } );
+
+printjson(fast.stats);
+printjson(slow.stats);
+
+v = "\n" + tojson( fast ) + "\n" + tojson( slow );
+
+assert.lt( fast.stats.nscanned * 10 , slow.stats.nscanned , "A1" + v );
+assert.lt( fast.stats.objectsLoaded , slow.stats.objectsLoaded , "A2" + v );
+assert.eq( fast.stats.avgDistance , slow.stats.avgDistance , "A3" + v );
+
+function a( cur ){
+ var total = 0;
+ var outof = 0;
+ while ( cur.hasNext() ){
+ var o = cur.next();
+ total += Geo.distance( [ 50 , 50 ] , o.loc );
+ outof++;
+ }
+ return total/outof;
+}
+
+assert.close( fast.stats.avgDistance , a( t.find( { loc : { $near : [ 50 , 50 ] } } ).limit(10) ) , "B1" )
+assert.close( 1.33333 , a( t.find( { loc : { $near : [ 50 , 50 ] } } ).limit(3) ) , "B2" );
+assert.close( fast.stats.avgDistance , a( t.find( { loc : { $near : [ 50 , 50 ] } } ).limit(10) ) , "B3" );
+
+printjson( t.find( { loc : { $near : [ 50 , 50 ] } } ).explain() )
+
+
diff --git a/jstests/geo3.js b/jstests/geo3.js
new file mode 100644
index 0000000..6bf27f9
--- /dev/null
+++ b/jstests/geo3.js
@@ -0,0 +1,87 @@
+
+t = db.geo3
+t.drop();
+
+n = 1
+for ( var x=-100; x<100; x+=2 ){
+ for ( var y=-100; y<100; y+=2 ){
+ t.insert( { _id : n++ , loc : [ x , y ] , a : Math.abs( x ) % 5 , b : Math.abs( y ) % 5 } )
+ }
+}
+
+
+t.ensureIndex( { loc : "2d" } )
+
+fast = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 } );
+
+//printjson( fast.stats );
+
+slow = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 , start : "11" } );
+
+//printjson( slow.stats );
+
+assert.lt( fast.stats.nscanned * 10 , slow.stats.nscanned , "A1" );
+assert.lt( fast.stats.objectsLoaded , slow.stats.objectsLoaded , "A2" );
+assert.eq( fast.stats.avgDistance , slow.stats.avgDistance , "A3" );
+
+// test filter
+
+filtered1 = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 , query : { a : 2 } } );
+assert.eq( 10 , filtered1.results.length , "B1" );
+filtered1.results.forEach( function(z){ assert.eq( 2 , z.obj.a , "B2: " + tojson( z ) ); } )
+//printjson( filtered1.stats );
+
+function avgA( q , len ){
+ if ( ! len )
+ len = 10;
+ var realq = { loc : { $near : [ 50 , 50 ] } };
+ if ( q )
+ Object.extend( realq , q );
+ var as =
+ t.find( realq ).limit(len).map(
+ function(z){
+ return z.a;
+ }
+ );
+ assert.eq( len , as.length , "length in avgA" );
+ return Array.avg( as );
+}
+
+function testFiltering( msg ){
+ assert.gt( 2 , avgA( {} ) , msg + " testFiltering 1 " );
+ assert.eq( 2 , avgA( { a : 2 } ) , msg + " testFiltering 2 " );
+ assert.eq( 4 , avgA( { a : 4 } ) , msg + " testFiltering 3 " );
+}
+
+testFiltering( "just loc" );
+
+t.dropIndex( { loc : "2d" } )
+assert.eq( 1 , t.getIndexKeys().length , "setup 3a" )
+t.ensureIndex( { loc : "2d" , a : 1 } )
+assert.eq( 2 , t.getIndexKeys().length , "setup 3b" )
+
+filtered2 = db.runCommand( { geoNear : t.getName() , near : [ 50 , 50 ] , num : 10 , query : { a : 2 } } );
+assert.eq( 10 , filtered2.results.length , "B3" );
+filtered2.results.forEach( function(z){ assert.eq( 2 , z.obj.a , "B4: " + tojson( z ) ); } )
+
+assert.eq( filtered1.stats.avgDistance , filtered2.stats.avgDistance , "C1" )
+assert.eq( filtered1.stats.nscanned , filtered2.stats.nscanned , "C3" )
+assert.gt( filtered1.stats.objectsLoaded , filtered2.stats.objectsLoaded , "C3" )
+
+testFiltering( "loc and a" );
+
+t.dropIndex( { loc : "2d" , a : 1 } )
+assert.eq( 1 , t.getIndexKeys().length , "setup 4a" )
+t.ensureIndex( { loc : "2d" , b : 1 } )
+assert.eq( 2 , t.getIndexKeys().length , "setup 4b" )
+
+testFiltering( "loc and b" );
+
+
+q = { loc : { $near : [ 50 , 50 ] } }
+assert.eq( 100 , t.find( q ).limit(100).itcount() , "D1" )
+assert.eq( 100 , t.find( q ).limit(100).count() , "D2" )
+
+assert.eq( 20 , t.find( q ).limit(20).itcount() , "D3" )
+assert.eq( 20 , t.find( q ).limit(20).size() , "D4" )
+
diff --git a/jstests/geo4.js b/jstests/geo4.js
new file mode 100644
index 0000000..73b4020
--- /dev/null
+++ b/jstests/geo4.js
@@ -0,0 +1,10 @@
+var t = db.geo4;
+t.drop();
+
+t.insert( { zip : "06525" , loc : [ 41.352964 , 73.01212 ] } );
+
+t.ensureIndex( { loc : "2d" }, { bits : 33 } );
+assert.eq( db.getLastError() , "can't have more than 32 bits in geo index" , "a" );
+
+t.ensureIndex( { loc : "2d" }, { bits : 32 } );
+assert( !db.getLastError(), "b" );
diff --git a/jstests/geo5.js b/jstests/geo5.js
new file mode 100644
index 0000000..67b00f8
--- /dev/null
+++ b/jstests/geo5.js
@@ -0,0 +1,18 @@
+t = db.geo5;
+t.drop();
+
+t.insert( { p : [ 0,0 ] } )
+t.ensureIndex( { p : "2d" } )
+
+res = t.runCommand( "geoNear" , { near : [1,1] } );
+assert.eq( 1 , res.results.length , "A1" );
+
+t.insert( { p : [ 1,1 ] } )
+t.insert( { p : [ -1,-1 ] } )
+res = t.runCommand( "geoNear" , { near : [50,50] } );
+assert.eq( 3 , res.results.length , "A2" );
+
+t.insert( { p : [ -1,-1 ] } )
+res = t.runCommand( "geoNear" , { near : [50,50] } );
+assert.eq( 4 , res.results.length , "A3" );
+
diff --git a/jstests/geo6.js b/jstests/geo6.js
new file mode 100644
index 0000000..47e3bf8
--- /dev/null
+++ b/jstests/geo6.js
@@ -0,0 +1,23 @@
+
+t = db.geo6;
+t.drop();
+
+t.ensureIndex( { loc : "2d" } );
+
+assert.eq( 0 , t.find().itcount() , "pre0" );
+assert.eq( 0 , t.find( { loc : { $near : [50,50] } } ).itcount() , "pre1" )
+
+t.insert( { _id : 1 , loc : [ 1 , 1 ] } )
+t.insert( { _id : 2 , loc : [ 1 , 2 ] } )
+t.insert( { _id : 3 } )
+
+assert.eq( 3 , t.find().itcount() , "A1" )
+assert.eq( 2 , t.find().hint( { loc : "2d" } ).itcount() , "A2" )
+assert.eq( 2 , t.find( { loc : { $near : [50,50] } } ).itcount() , "A3" )
+
+assert.eq( 1 , t.find( { loc : { $near : [50,50] } } ).sort( { _id : 1 } ).next()._id , "B1" )
+assert.eq( 2 , t.find( { loc : { $near : [50,50] } } ).sort( { _id : -1 } ).next()._id , "B1" )
+
+
+t.insert( { _id : 4 , loc : [] } )
+assert.eq( 4 , t.find().itcount() , "C1" )
diff --git a/jstests/geo7.js b/jstests/geo7.js
new file mode 100644
index 0000000..c220da5
--- /dev/null
+++ b/jstests/geo7.js
@@ -0,0 +1,20 @@
+
+t = db.geo7;
+t.drop();
+
+t.insert({_id:1,y:[1,1]})
+t.insert({_id:2,y:[1,1],z:3})
+t.insert({_id:3,y:[1,1],z:4})
+t.insert({_id:4,y:[1,1],z:5})
+
+t.ensureIndex({y:"2d",z:1})
+
+assert.eq( 1 , t.find({y:[1,1],z:3}).itcount() , "A1" );
+
+t.dropIndex({y:"2d",z:1})
+
+t.ensureIndex({y:"2d"})
+assert.eq( 1 , t.find({y:[1,1],z:3}).itcount() , "A2" );
+
+t.insert( { _id : 5 , y : 5 } );
+assert.eq( 5 , t.findOne( { y : 5 } )._id , "B1" );
diff --git a/jstests/geo8.js b/jstests/geo8.js
new file mode 100644
index 0000000..301f3bc
--- /dev/null
+++ b/jstests/geo8.js
@@ -0,0 +1,13 @@
+
+t = db.geo8
+t.drop()
+
+t.insert( { loc : [ 5 , 5 ] } )
+t.insert( { loc : [ 5 , 6 ] } )
+t.insert( { loc : [ 5 , 7 ] } )
+t.insert( { loc : [ 4 , 5 ] } )
+t.insert( { loc : [ 100 , 100 ] } )
+
+t.ensureIndex( { loc : "2d" } )
+
+t.runCommand( "geoWalk" );
diff --git a/jstests/geo9.js b/jstests/geo9.js
new file mode 100644
index 0000000..8b6510f
--- /dev/null
+++ b/jstests/geo9.js
@@ -0,0 +1,28 @@
+
+t = db.geo9
+t.drop();
+
+t.save( { _id : 1 , a : [ 10 , 10 ] , b : [ 50 , 50 ] } )
+t.save( { _id : 2 , a : [ 11 , 11 ] , b : [ 51 , 52 ] } )
+t.save( { _id : 3 , a : [ 12 , 12 ] , b : [ 52 , 52 ] } )
+
+t.save( { _id : 4 , a : [ 50 , 50 ] , b : [ 10 , 10 ] } )
+t.save( { _id : 5 , a : [ 51 , 51 ] , b : [ 11 , 11 ] } )
+t.save( { _id : 6 , a : [ 52 , 52 ] , b : [ 12 , 12 ] } )
+
+t.ensureIndex( { a : "2d" } )
+t.ensureIndex( { b : "2d" } )
+
+function check( field ){
+ var q = {}
+ q[field] = { $near : [ 11 , 11 ] }
+ arr = t.find( q ).limit(3).map(
+ function(z){
+ return Geo.distance( [ 11 , 11 ] , z[field] );
+ }
+ );
+ assert.eq( 2 * Math.sqrt( 2 ) , Array.sum( arr ) , "test " + field );
+}
+
+check( "a" )
+check( "b" )
diff --git a/jstests/geo_box1.js b/jstests/geo_box1.js
new file mode 100644
index 0000000..5ef3351
--- /dev/null
+++ b/jstests/geo_box1.js
@@ -0,0 +1,43 @@
+
+t = db.geo_box1;
+t.drop();
+
+num = 0;
+for ( x=0; x<=20; x++ ){
+ for ( y=0; y<=20; y++ ){
+ o = { _id : num++ , loc : [ x , y ] }
+ t.save( o )
+ }
+}
+
+t.ensureIndex( { loc : "2d" } );
+
+searches = [
+ [ [ 1 , 2 ] , [ 4 , 5 ] ] ,
+ [ [ 1 , 1 ] , [ 2 , 2 ] ] ,
+ [ [ 0 , 2 ] , [ 4 , 5 ] ] ,
+ [ [ 1 , 1 ] , [ 2 , 8 ] ] ,
+];
+
+
+for ( i=0; i<searches.length; i++ ){
+ b = searches[i];
+ //printjson( b );
+
+ q = { loc : { $within : { $box : b } } }
+ numWanetd = ( 1 + b[1][0] - b[0][0] ) * ( 1 + b[1][1] - b[0][1] );
+ assert.eq( numWanetd , t.find(q).itcount() , "itcount: " + tojson( q ) );
+ printjson( t.find(q).explain() )
+}
+
+
+
+assert.eq( 0 , t.find( { loc : { $within : { $box : [ [100 , 100 ] , [ 110 , 110 ] ] } } } ).itcount() , "E1" )
+assert.eq( 0 , t.find( { loc : { $within : { $box : [ [100 , 100 ] , [ 110 , 110 ] ] } } } ).count() , "E2" )
+
+
+assert.eq( num , t.find( { loc : { $within : { $box : [ [ 0 , 0 ] , [ 110 , 110 ] ] } } } ).count() , "E3" )
+assert.eq( num , t.find( { loc : { $within : { $box : [ [ 0 , 0 ] , [ 110 , 110 ] ] } } } ).itcount() , "E4" )
+
+assert.eq( 57 , t.find( { loc : { $within : { $box : [ [ 0 , 0 ] , [ 110 , 110 ] ] } } } ).limit(57).itcount() , "E5" )
+
diff --git a/jstests/geo_box2.js b/jstests/geo_box2.js
new file mode 100644
index 0000000..2aa65d0
--- /dev/null
+++ b/jstests/geo_box2.js
@@ -0,0 +1,19 @@
+
+t = db.geo_box2;
+
+t.drop()
+
+for (i=1; i<10; i++) {
+ for(j=1; j<10; j++) {
+ t.insert({loc : [i,j]});
+ }
+}
+
+t.ensureIndex({"loc" : "2d"} )
+assert.eq( 9 , t.find({loc : {$within : {$box : [[4,4],[6,6]]}}}).itcount() , "A1" );
+
+t.dropIndex( { "loc" : "2d" } )
+
+t.ensureIndex({"loc" : "2d"} , {"min" : 0, "max" : 10})
+assert.eq( 9 , t.find({loc : {$within : {$box : [[4,4],[6,6]]}}}).itcount() , "B1" );
+
diff --git a/jstests/geo_circle1.js b/jstests/geo_circle1.js
new file mode 100644
index 0000000..9208511
--- /dev/null
+++ b/jstests/geo_circle1.js
@@ -0,0 +1,50 @@
+
+t = db.geo_circle1;
+t.drop();
+
+searches = [
+ [ [ 5 , 5 ] , 3 ] ,
+ [ [ 5 , 5 ] , 1 ] ,
+ [ [ 5 , 5 ] , 5 ] ,
+ [ [ 0 , 5 ] , 5 ] ,
+];
+correct = searches.map( function(z){ return []; } );
+
+num = 0;
+
+for ( x=0; x<=20; x++ ){
+ for ( y=0; y<=20; y++ ){
+ o = { _id : num++ , loc : [ x , y ] }
+ t.save( o )
+ for ( i=0; i<searches.length; i++ )
+ if ( Geo.distance( [ x , y ] , searches[i][0] ) <= searches[i][1] )
+ correct[i].push( o );
+ }
+}
+
+t.ensureIndex( { loc : "2d" } );
+
+for ( i=0; i<searches.length; i++ ){
+ //print( tojson( searches[i] ) + "\t" + correct[i].length )
+ q = { loc : { $within : { $center : searches[i] } } }
+
+ //correct[i].forEach( printjson )
+ //printjson( q );
+ //t.find( q ).forEach( printjson )
+
+ //printjson( Array.sort( correct[i].map( function(z){ return z._id; } ) ) )
+ //printjson( Array.sort( t.find(q).map( function(z){ return z._id; } ) ) )
+
+ assert.eq( correct[i].length , t.find( q ).itcount() , "itcount : " + tojson( searches[i] ) );
+ assert.eq( correct[i].length , t.find( q ).itcount() , "count : " + tojson( searches[i] ) );
+ assert.gt( correct[i].length * 2 , t.find(q).explain().nscanned , "nscanned : " + tojson( searches[i] ) )
+}
+
+
+
+
+
+
+
+
+
diff --git a/jstests/geoa.js b/jstests/geoa.js
new file mode 100644
index 0000000..3081f6c
--- /dev/null
+++ b/jstests/geoa.js
@@ -0,0 +1,12 @@
+
+t = db.geoa
+t.drop();
+
+t.save( { _id : 1 , a : { loc : [ 5 , 5 ] } } )
+t.save( { _id : 2 , a : { loc : [ 6 , 6 ] } } )
+t.save( { _id : 3 , a : { loc : [ 7 , 7 ] } } )
+
+t.ensureIndex( { "a.loc" : "2d" } );
+
+cur = t.find( { "a.loc" : { $near : [ 6 , 6 ] } } );
+assert.eq( 2 , cur.next()._id , "A1" );
diff --git a/jstests/geob.js b/jstests/geob.js
new file mode 100644
index 0000000..0dcc265
--- /dev/null
+++ b/jstests/geob.js
@@ -0,0 +1,35 @@
+var t = db.geob;
+t.drop();
+
+var a = {p: [0, 0]};
+var b = {p: [1, 0]};
+var c = {p: [3, 4]};
+var d = {p: [0, 6]};
+
+t.save(a);
+t.save(b);
+t.save(c);
+t.save(d);
+t.ensureIndex({p: "2d"});
+
+var res = t.runCommand("geoNear", {near: [0,0]});
+assert.close(3, res.stats.avgDistance, "A");
+
+assert.close(0, res.results[0].dis, "B1");
+assert.eq(a._id, res.results[0].obj._id, "B2");
+
+assert.close(1, res.results[1].dis, "C1");
+assert.eq(b._id, res.results[1].obj._id, "C2");
+
+assert.close(5, res.results[2].dis, "D1");
+assert.eq(c._id, res.results[2].obj._id, "D2");
+
+assert.close(6, res.results[3].dis, "E1");
+assert.eq(d._id, res.results[3].obj._id, "E2");
+
+res = t.runCommand("geoNear", {near: [0,0], distanceMultiplier: 2});
+assert.close(6, res.stats.avgDistance, "F");
+assert.close(0, res.results[0].dis, "G");
+assert.close(2, res.results[1].dis, "H");
+assert.close(10, res.results[2].dis, "I");
+assert.close(12, res.results[3].dis, "J");
diff --git a/jstests/geoc.js b/jstests/geoc.js
new file mode 100644
index 0000000..8b01780
--- /dev/null
+++ b/jstests/geoc.js
@@ -0,0 +1,24 @@
+
+t = db.geoc;
+t.drop()
+
+N = 1000;
+
+for (var i=0; i<N; i++) t.insert({loc:[100+Math.random(), 100+Math.random()], z:0})
+for (var i=0; i<N; i++) t.insert({loc:[0+Math.random(), 0+Math.random()], z:1})
+for (var i=0; i<N; i++) t.insert({loc:[-100+Math.random(), -100+Math.random()], z:2})
+
+t.ensureIndex({loc:'2d'})
+
+function test( z , l ){
+ assert.lt( 0 , t.find({loc:{$near:[100,100]}, z:z}).limit(l).itcount() , "z: " + z + " l: " + l );
+}
+
+test( 1 , 1 );
+test( 1 , 2 );
+test( 2 , 2 );
+test( 2 , 10 );
+test( 2 , 1000 );
+test( 2 , 100000 );
+test( 2 , 10000000 );
+
diff --git a/jstests/group2.js b/jstests/group2.js
index f687e88..a8e6653 100644
--- a/jstests/group2.js
+++ b/jstests/group2.js
@@ -28,11 +28,11 @@ delete cmd.key
cmd["$keyf"] = function(x){ return { a : x.a }; };
result2 = t.group( cmd );
-assert.eq( result , result2 );
+assert.eq( result , result2, "check result2" );
delete cmd.$keyf
cmd["keyf"] = function(x){ return { a : x.a }; };
result3 = t.group( cmd );
-assert.eq( result , result3 );
+assert.eq( result , result3, "check result3" );
diff --git a/jstests/group3.js b/jstests/group3.js
index afa32f1..d113b9d 100644
--- a/jstests/group3.js
+++ b/jstests/group3.js
@@ -1,4 +1,4 @@
-t = db.group2;
+t = db.group3;
t.drop();
t.save({a: 1});
diff --git a/jstests/hint1.js b/jstests/hint1.js
index 416eb4a..c222aa3 100644
--- a/jstests/hint1.js
+++ b/jstests/hint1.js
@@ -6,5 +6,5 @@ p.save( { ts: new Date( 1 ), cls: "entry", verticals: "alleyinsider", live: true
p.ensureIndex( { ts: 1 } );
e = p.find( { live: true, ts: { $lt: new Date( 1234119308272 ) }, cls: "entry", verticals: " alleyinsider" } ).sort( { ts: -1 } ).hint( { ts: 1 } ).explain();
-assert.eq( e.startKey.ts.getTime(), new Date( 1234119308272 ).getTime() , "A" );
-assert.eq( 0 , e.endKey.ts.getTime() , "B" );
+assert.eq( e.indexBounds[0][0].ts.getTime(), new Date( 1234119308272 ).getTime() , "A" );
+assert.eq( 0 , e.indexBounds[0][1].ts.getTime() , "B" );
diff --git a/jstests/in.js b/jstests/in.js
index 5442bbe..b8ba159 100644
--- a/jstests/in.js
+++ b/jstests/in.js
@@ -17,3 +17,4 @@ assert.eq( 2 , t.find( { a : { $in : [ 1 , 2 ] } } ).itcount() , "F" );
assert.eq( 0 , t.find( { a : { $in : [] } } ).itcount() , "G" );
+assert.eq( 1 , t.find( { a : { $gt: 1, $in : [ 2 ] } } ).itcount() , "E" ); \ No newline at end of file
diff --git a/jstests/in3.js b/jstests/in3.js
new file mode 100644
index 0000000..1ec53ca
--- /dev/null
+++ b/jstests/in3.js
@@ -0,0 +1,11 @@
+t = db.jstests_in3;
+
+t.drop();
+t.ensureIndex( {i:1} );
+assert.eq( [ [ {i:3}, {i:3} ] ], t.find( {i:{$in:[3]}} ).explain().indexBounds , "A1" );
+assert.eq( [ [ {i:3}, {i:3} ], [ {i:6}, {i:6} ] ], t.find( {i:{$in:[3,6]}} ).explain().indexBounds , "A2" );
+
+for ( var i=0; i<20; i++ )
+ t.insert( { i : i } );
+
+assert.eq( 2 , t.find( {i:{$in:[3,6]}} ).explain().nscanned , "B1" )
diff --git a/jstests/inc2.js b/jstests/inc2.js
index 8442f14..75a8e65 100644
--- a/jstests/inc2.js
+++ b/jstests/inc2.js
@@ -1,5 +1,5 @@
-t = db.inc1
+t = db.inc2
t.drop();
t.save( { _id : 1 , x : 1 } );
diff --git a/jstests/index10.js b/jstests/index10.js
index 105fcc1..c638264 100644
--- a/jstests/index10.js
+++ b/jstests/index10.js
@@ -13,10 +13,18 @@ t.ensureIndex( {i:1} );
assert.eq( 5, t.count() );
t.dropIndexes();
t.ensureIndex( {i:1}, true );
+err = db.getLastErrorObj();
+assert( err.err );
+assert.eq( 11000, err.code );
assert.eq( 1, db.system.indexes.count( {ns:"test.jstests_index10" } ) ); // only id index
// t.dropIndexes();
+ts = t.totalIndexSize();
t.ensureIndex( {i:1}, [ true, true ] );
+ts2 = t.totalIndexSize();
+
+assert.eq( ts * 2, ts2, "totalIndexSize fail" );
+
assert.eq( 3, t.count() );
assert.eq( 1, t.count( {i:1} ) );
diff --git a/jstests/index7.js b/jstests/index7.js
index cf5050b..a3b88d5 100644
--- a/jstests/index7.js
+++ b/jstests/index7.js
@@ -8,18 +8,16 @@ function noIndex( q ) {
assert( q.explain().cursor.match( /^BasicCursor/ ) , "noIndex assert" );
}
-function start( k, q ) {
- var s = q.explain().startKey;
+function start( k, q, rev) {
+ var s = q.explain().indexBounds[rev?1:0][0];
assert.eq( k.a, s.a );
assert.eq( k.b, s.b );
}
-
-function end( k, q ) {
- var e = q.explain().endKey;
+function end( k, q, rev) {
+ var e = q.explain().indexBounds[rev?1:0][1];
assert.eq( k.a, e.a );
assert.eq( k.b, e.b );
}
-
function both( k, q ) {
start( k, q );
end( k, q );
@@ -35,20 +33,20 @@ noIndex( f.find( { a: 5 } ).sort( { a: 1 } ).hint( { $natural: 1 } ) );
f.drop();
f.ensureIndex( { a: 1, b: 1 } );
-assert.eq( 1, f.find( { a: 1 } ).hint( { a: 1, b: 1 } ).explain().startKey.a );
-assert.eq( 1, f.find( { a: 1 } ).hint( { a: 1, b: 1 } ).explain().endKey.a );
-assert.eq( 1, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().startKey.a );
-assert.eq( 1, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().endKey.a );
-assert.eq( null, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().startKey.c );
-assert.eq( null, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().endKey.c );
+assert.eq( 1, f.find( { a: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds[0][0].a );
+assert.eq( 1, f.find( { a: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds[0][1].a );
+assert.eq( 1, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds[0][0].a );
+assert.eq( 1, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds[0][1].a );
+assert.eq( null, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds[0][0].c );
+assert.eq( null, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().indexBounds[0][1].c );
start( { a: "a", b: 1 }, f.find( { a: /^a/, b: 1 } ).hint( { a: 1, b: 1 } ) );
start( { a: "a", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
-start( { a: "b", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
+start( { a: "b", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ), true );
start( { a: "a", b: 1 }, f.find( { b: 1, a: /^a/ } ).hint( { a: 1, b: 1 } ) );
end( { a: "b", b: 1 }, f.find( { a: /^a/, b: 1 } ).hint( { a: 1, b: 1 } ) );
end( { a: "b", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
-end( { a: "a", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
+end( { a: "a", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ), true );
end( { a: "b", b: 1 }, f.find( { b: 1, a: /^a/ } ).hint( { a: 1, b: 1 } ) );
start( { a: "z", b: 1 }, f.find( { a: /^z/, b: 1 } ).hint( { a: 1, b: 1 } ) );
diff --git a/jstests/index8.js b/jstests/index8.js
index 09a0645..719ad2d 100644
--- a/jstests/index8.js
+++ b/jstests/index8.js
@@ -9,11 +9,14 @@ t.ensureIndex( { c: 1 }, [ false, "cIndex" ] );
checkIndexes = function( num ) {
// printjson( db.system.indexes.find( { ns: "test.jstests_index8" } ).toArray() );
- indexes = db.system.indexes.find( { ns: "test.jstests_index8" } ).sort( { key: 1 } );
- assert( !indexes[ 0 ].unique , "A" + num );
- assert( indexes[ 1 ].unique , "B" + num );
- assert( !indexes[ 2 ].unique , "C" + num );
- assert.eq( "cIndex", indexes[ 2 ].name , "D" + num );
+ indexes = db.system.indexes.find( { ns: "test.jstests_index8" } ).sort( { key: 1 } ).toArray();
+ var start = 0;
+ if ( indexes[0].name == "_id_" )
+ start = 1;
+ assert( !indexes[ start ].unique , "A" + num );
+ assert( indexes[ start + 1 ].unique , "B" + num + " " + tojson( indexes[start+1] ) );
+ assert( !indexes[ start + 2 ].unique , "C" + num );
+ assert.eq( "cIndex", indexes[ start + 2 ].name , "D" + num );
}
checkIndexes( 1 );
diff --git a/jstests/index_check2.js b/jstests/index_check2.js
index 56796ac..a489fd6 100644
--- a/jstests/index_check2.js
+++ b/jstests/index_check2.js
@@ -38,4 +38,4 @@ scanned3 = t.find(q3).explain().nscanned;
assert( scanned3 <= Math.max( scanned1 , scanned2 ) , "$all makes query optimizer not work well" );
exp3 = t.find( q3 ).explain();
-assert.eq( exp3.startKey, exp3.endKey, "$all range not a single key" );
+assert.eq( exp3.indexBounds[0][0], exp3.indexBounds[0][1], "$all range not a single key" );
diff --git a/jstests/index_diag.js b/jstests/index_diag.js
new file mode 100644
index 0000000..38169b3
--- /dev/null
+++ b/jstests/index_diag.js
@@ -0,0 +1,38 @@
+
+t = db.index_diag
+t.drop();
+
+t.ensureIndex( { x : 1 } );
+
+all = []
+ids = []
+xs = []
+
+function r( a ){
+ var n = []
+ for ( var x=a.length-1; x>=0; x-- )
+ n.push( a[x] );
+ return n;
+}
+
+for ( i=1; i<4; i++ ){
+ o = { _id : i , x : -i }
+ t.insert( o );
+ all.push( o );
+ ids.push( { _id : i } );
+ xs.push( { x : -i } );
+}
+
+assert.eq( all , t.find().sort( { _id : 1 } ).toArray() , "A1" );
+assert.eq( r( all ) , t.find().sort( { _id : -1 } ).toArray() , "A2" );
+
+assert.eq( all , t.find().sort( { x : -1 } ).toArray() , "A3" );
+assert.eq( r( all ) , t.find().sort( { x : 1 } ).toArray() , "A4" );
+
+assert.eq( ids , t.find().sort( { _id : 1 } )._addSpecial( "$returnKey" , true ).toArray() , "B1" )
+assert.eq( r( ids ) , t.find().sort( { _id : -1 } )._addSpecial( "$returnKey" , true ).toArray() , "B2" )
+assert.eq( xs , t.find().sort( { x : -1 } )._addSpecial( "$returnKey" , true ).toArray() , "B3" )
+assert.eq( r( xs ) , t.find().sort( {x : 1 } )._addSpecial( "$returnKey" , true ).toArray() , "B4" )
+
+assert.eq( r( xs ) , t.find().hint( { x : 1 } )._addSpecial( "$returnKey" , true ).toArray() , "B4" )
+
diff --git a/jstests/indexg.js b/jstests/indexg.js
new file mode 100644
index 0000000..a0709fd
--- /dev/null
+++ b/jstests/indexg.js
@@ -0,0 +1,13 @@
+
+f = db.jstests_indexg;
+f.drop();
+f.save( { list: [1, 2] } );
+f.save( { list: [1, 3] } );
+
+doit = function() {
+ assert.eq( 1, f.count( { list: { $in: [1], $ne: 3 } } ) );
+ assert.eq( 1, f.count( { list: { $in: [1], $not:{$in: [3] } } } ) );
+}
+doit();
+f.ensureIndex( { list: 1 } );
+doit(); \ No newline at end of file
diff --git a/jstests/insert1.js b/jstests/insert1.js
new file mode 100644
index 0000000..76edca1
--- /dev/null
+++ b/jstests/insert1.js
@@ -0,0 +1,41 @@
+t = db.insert1;
+t.drop();
+
+o = {a:1};
+t.insert(o);
+id = t._lastID
+assert.eq(o, {a:1}, "input unchanged 1");
+assert.eq(typeof(id), "object", "1");
+assert.eq(id.constructor, ObjectId, "1");
+assert.eq(t.findOne({_id:id}).a, 1, "find by id 1");
+assert.eq(t.findOne({a:1})._id, id , "find by val 1");
+
+o = {a:2, _id:new ObjectId()};
+id1 = o._id
+t.insert(o);
+id2 = t._lastID
+assert.eq(id1, id2, "ids match 2");
+assert.eq(o, {a:2, _id:id1}, "input unchanged 2");
+assert.eq(typeof(id2), "object", "2");
+assert.eq(id2.constructor, ObjectId, "2");
+assert.eq(t.findOne({_id:id1}).a, 2, "find by id 2");
+assert.eq(t.findOne({a:2})._id, id1 , "find by val 2");
+
+o = {a:3, _id:"asdf"};
+id1 = o._id
+t.insert(o);
+id2 = t._lastID
+assert.eq(id1, id2, "ids match 3");
+assert.eq(o, {a:3, _id:id1}, "input unchanged 3");
+assert.eq(typeof(id2), "string", "3");
+assert.eq(t.findOne({_id:id1}).a, 3, "find by id 3");
+assert.eq(t.findOne({a:3})._id, id1 , "find by val 3");
+
+o = {a:4, _id:null};
+id1 = o._id
+t.insert(o);
+id2 = t._lastID
+assert.eq(id1, id2, "ids match 4");
+assert.eq(o, {a:4, _id:id1}, "input unchanged 4");
+assert.eq(t.findOne({_id:id1}).a, 4, "find by id 4");
+assert.eq(t.findOne({a:4})._id, id1 , "find by val 4");
diff --git a/jstests/json1.js b/jstests/json1.js
index a3dc820..e045df7 100644
--- a/jstests/json1.js
+++ b/jstests/json1.js
@@ -5,8 +5,8 @@ assert.eq( tojson( x ) , tojson( y ) , "A" );
assert.eq( typeof( x.nulls ) , typeof( y.nulls ) , "B" );
// each type is parsed properly
-x = {"x" : null, "y" : true, "z" : 123, "w" : "foo"};
-assert.eq(tojson(x,"",false), '{\n\t"x" : null,\n\t"y" : true,\n\t"z" : 123,\n\t"w" : "foo"\n}' , "C" );
+x = {"x" : null, "y" : true, "z" : 123, "w" : "foo", "a": undefined};
+assert.eq(tojson(x,"",false), '{\n\t"x" : null,\n\t"y" : true,\n\t"z" : 123,\n\t"w" : "foo",\n\t"a" : undefined\n}' , "C" );
x = {"x" : [], "y" : {}};
assert.eq(tojson(x,"",false), '{\n\t"x" : [ ],\n\t"y" : {\n\t\t\n\t}\n}' , "D" );
diff --git a/jstests/mod1.js b/jstests/mod1.js
index eca35b7..46e3482 100644
--- a/jstests/mod1.js
+++ b/jstests/mod1.js
@@ -22,3 +22,4 @@ assert.eq( 1 , t.find( "this.a % 10 == 0" ).itcount() , "B3" );
assert.eq( 1 , t.find( { a : { $mod : [ 10 , 0 ] } } ).itcount() , "B4" );
assert.eq( 4 , t.find( { a : { $mod : [ 10 , 1 ] } } ).explain().nscanned , "B5" );
+assert.eq( 1, t.find( { a: { $gt: 5, $mod : [ 10, 1 ] } } ).itcount() ); \ No newline at end of file
diff --git a/jstests/mr5.js b/jstests/mr5.js
index 50eb366..bbac3fe 100644
--- a/jstests/mr5.js
+++ b/jstests/mr5.js
@@ -26,12 +26,31 @@ r = function( k , v ){
}
res = t.mapReduce( m , r , { scope : { xx : 1 } } );
-res.find().forEach( printjson )
+//res.find().forEach( printjson )
z = res.convertToSingleObject()
-assert.eq( 2 , Object.keySet( z ).length , "A" )
-assert.eq( [ 9 , 11 , 30 ] , z["1"].stats , "B" )
-assert.eq( [ 9 , 41 , 41 ] , z["2"].stats , "B" )
+assert.eq( 2 , Object.keySet( z ).length , "A1" )
+assert.eq( [ 9 , 11 , 30 ] , z["1"].stats , "A2" )
+assert.eq( [ 9 , 41 , 41 ] , z["2"].stats , "A3" )
+
+
+res.drop()
+
+m = function(){
+ var x = "partner";
+ var y = "visits";
+ emit( this[x] , { stats : [ this[y] ] } )
+}
+
+
+
+res = t.mapReduce( m , r , { scope : { xx : 1 } } );
+//res.find().forEach( printjson )
+
+z = res.convertToSingleObject()
+assert.eq( 2 , Object.keySet( z ).length , "B1" )
+assert.eq( [ 9 , 11 , 30 ] , z["1"].stats , "B2" )
+assert.eq( [ 9 , 41 , 41 ] , z["2"].stats , "B3" )
res.drop()
diff --git a/jstests/mr_bigobject.js b/jstests/mr_bigobject.js
new file mode 100644
index 0000000..8224209
--- /dev/null
+++ b/jstests/mr_bigobject.js
@@ -0,0 +1,41 @@
+
+t = db.mr_bigobject
+t.drop()
+
+s = "";
+while ( s.length < ( 1024 * 1024 ) ){
+ s += "asdasdasd";
+}
+
+for ( i=0; i<10; i++ )
+ t.insert( { _id : i , s : s } )
+
+m = function(){
+ emit( 1 , this.s + this.s );
+}
+
+r = function( k , v ){
+ return 1;
+}
+
+assert.throws( function(){ t.mapReduce( m , r ); } , "emit should fail" )
+
+m = function(){
+ emit( 1 , this.s );
+}
+
+assert.eq( { 1 : 1 } , t.mapReduce( m , r ).convertToSingleObject() , "A1" )
+
+r = function( k , v ){
+ total = 0;
+ for ( var i=0; i<v.length; i++ ){
+ var x = v[i];
+ if ( typeof( x ) == "number" )
+ total += x
+ else
+ total += x.length;
+ }
+ return total;
+}
+
+assert.eq( { 1 : 10 * s.length } , t.mapReduce( m , r ).convertToSingleObject() , "A1" )
diff --git a/jstests/mr_errorhandling.js b/jstests/mr_errorhandling.js
new file mode 100644
index 0000000..57724f1
--- /dev/null
+++ b/jstests/mr_errorhandling.js
@@ -0,0 +1,47 @@
+
+t = db.mr_errorhandling;
+t.drop();
+
+t.save( { a : [ 1 , 2 , 3 ] } )
+t.save( { a : [ 2 , 3 , 4 ] } )
+
+m_good = function(){
+ for ( var i=0; i<this.a.length; i++ ){
+ emit( this.a[i] , 1 );
+ }
+}
+
+m_bad = function(){
+ for ( var i=0; i<this.a.length; i++ ){
+ emit( this.a[i] );
+ }
+}
+
+r = function( k , v ){
+ var total = 0;
+ for ( var i=0; i<v.length; i++ )
+ total += v[i];
+ return total;
+}
+
+res = t.mapReduce( m_good , r );
+assert.eq( { 1 : 1 , 2 : 2 , 3 : 2 , 4 : 1 } , res.convertToSingleObject() , "A" );
+res.drop()
+
+res = null;
+
+theerror = null;
+try {
+ res = t.mapReduce( m_bad , r );
+}
+catch ( e ){
+ theerror = e.toString();
+}
+assert.isnull( res , "B1" );
+assert( theerror , "B2" );
+assert( theerror.indexOf( "emit" ) >= 0 , "B3" );
+
+// test things are still in an ok state
+res = t.mapReduce( m_good , r );
+assert.eq( { 1 : 1 , 2 : 2 , 3 : 2 , 4 : 1 } , res.convertToSingleObject() , "A" );
+res.drop()
diff --git a/jstests/nin.js b/jstests/nin.js
index 4afd344..0658278 100644
--- a/jstests/nin.js
+++ b/jstests/nin.js
@@ -29,6 +29,7 @@ doTest = function( n ) {
assert.eq( 4, t.find( { a: { $nin: [ 9 ] } } ).count() , n + " G" );
assert.eq( 4, t.find( { a: { $nin: [ 3 ] } } ).count() , n + " H" );
assert.eq( 3, t.find( { a: { $nin: [ 2, 3 ] } } ).count() , n + " I" );
+ assert.eq( 1, t.find( { a: { $ne: 8, $nin: [ 2, 3 ] } } ).count() , n + " I2" );
checkEqual( n + " A" , "a" , 5 );
@@ -43,7 +44,7 @@ doTest = function( n ) {
checkEqual( n + " C" , "a.b" , 5 );
assert.eq( 7, t.find( { 'a.b': { $nin: [ 10 ] } } ).count() , n + " L" );
- assert.eq( 8, t.find( { 'a.b': { $nin: [ [ 10, 11 ] ] } } ).count() , n + " M" );
+ assert.eq( 7, t.find( { 'a.b': { $nin: [ [ 10, 11 ] ] } } ).count() , n + " M" );
assert.eq( 7, t.find( { a: { $nin: [ 11 ] } } ).count() , n + " N" );
t.save( { a: { b: [ 20, 30 ] } } );
diff --git a/jstests/not2.js b/jstests/not2.js
new file mode 100644
index 0000000..5d33baa
--- /dev/null
+++ b/jstests/not2.js
@@ -0,0 +1,139 @@
+t = db.jstests_not2;
+t.drop();
+
+check = function( query, expected, size ) {
+ if ( size == null ) {
+ size = 1;
+ }
+ assert.eq( size, t.count( query ), tojson( query ) );
+ if ( size > 0 ) {
+ assert.eq( expected, t.findOne( query ).i, tojson( query ) );
+ }
+}
+
+fail = function( query ) {
+ try {
+ t.count( query );
+ } catch ( e ) {
+ }
+ assert( db.getLastError(), tojson( query ) );
+}
+
+doTest = function() {
+
+t.remove( {} );
+
+t.save( {i:"a"} );
+t.save( {i:"b"} );
+
+fail( {i:{$not:"a"}} );
+fail( {i:{$not:{$not:{$gt:"a"}}}} );
+fail( {i:{$not:{$ref:"foo"}}} );
+fail( {i:{$not:{}}} );
+check( {i:{$gt:"a"}}, "b" );
+check( {i:{$not:{$gt:"a"}}}, "a" );
+check( {i:{$not:{$ne:"a"}}}, "a" );
+check( {i:{$not:{$gte:"b"}}}, "a" );
+check( {i:{$exists:true}}, "a", 2 );
+check( {i:{$not:{$exists:true}}}, "", 0 );
+check( {j:{$not:{$exists:false}}}, "", 0 );
+check( {j:{$not:{$exists:true}}}, "a", 2 );
+check( {i:{$not:{$in:["a"]}}}, "b" );
+check( {i:{$not:{$in:["a", "b"]}}}, "", 0 );
+check( {i:{$not:{$in:["g"]}}}, "a", 2 );
+check( {i:{$not:{$nin:["a"]}}}, "a" );
+check( {i:{$not:/a/}}, "b" );
+check( {i:{$not:/(a|b)/}}, "", 0 );
+check( {i:{$not:/a/,$regex:"a"}}, "", 0 );
+check( {i:{$not:/aa/}}, "a", 2 );
+fail( {i:{$not:{$regex:"a"}}} );
+fail( {i:{$not:{$options:"a"}}} );
+check( {i:{$type:2}}, "a", 2 );
+check( {i:{$not:{$type:1}}}, "a", 2 );
+check( {i:{$not:{$type:2}}}, "", 0 );
+
+check( {i:{$not:{$gt:"c",$lt:"b"}}}, "b" );
+
+t.remove( {} );
+t.save( {i:1} );
+check( {i:{$not:{$mod:[5,1]}}}, null, 0 );
+check( {i:{$mod:[5,2]}}, null, 0 );
+check( {i:{$not:{$mod:[5,2]}}}, 1, 1 );
+
+t.remove( {} );
+t.save( {i:["a","b"]} );
+check( {i:{$not:{$size:2}}}, null, 0 );
+check( {i:{$not:{$size:3}}}, ["a","b"] );
+check( {i:{$not:{$gt:"a"}}}, null, 0 );
+check( {i:{$not:{$gt:"c"}}}, ["a","b"] );
+check( {i:{$not:{$all:["a","b"]}}}, null, 0 );
+check( {i:{$not:{$all:["c"]}}}, ["a","b"] );
+
+t.remove( {} );
+t.save( {i:{j:"a"}} );
+t.save( {i:{j:"b"}} );
+check( {i:{$not:{$elemMatch:{j:"a"}}}}, {j:"b"} );
+check( {i:{$not:{$elemMatch:{j:"f"}}}}, {j:"a"}, 2 );
+
+}
+
+doTest();
+t.ensureIndex( {i:1} );
+doTest();
+
+t.drop();
+t.save( {i:"a"} );
+t.save( {i:"b"} );
+t.ensureIndex( {i:1} );
+
+indexed = function( query, min, max ) {
+ exp = t.find( query ).explain();
+// printjson( exp );
+ assert( exp.cursor.match( /Btree/ ), tojson( query ) );
+ assert( exp.allPlans.length == 1, tojson( query ) );
+ // just expecting one element per key
+ for( i in exp.indexBounds[0][0] ) {
+ assert.eq( exp.indexBounds[0][0][ i ], min );
+ }
+ for( i in exp.indexBounds[0][1] ) {
+ assert.eq( exp.indexBounds[0][1][ i ], max );
+ }
+}
+
+not = function( query ) {
+ exp = t.find( query ).explain();
+// printjson( exp );
+ assert( !exp.cursor.match( /Btree/ ), tojson( query ) );
+ assert( exp.allPlans.length == 1, tojson( query ) );
+}
+
+indexed( {i:1}, 1, 1 );
+not( {i:{$ne:1}} );
+
+indexed( {i:{$not:{$ne:"a"}}}, "a", "a" );
+not( {i:{$not:/^a/}} );
+
+indexed( {i:{$gt:"a"}}, "a", {} );
+indexed( {i:{$not:{$gt:"a"}}}, "", "a" );
+
+indexed( {i:{$gte:"a"}}, "a", {} );
+indexed( {i:{$not:{$gte:"a"}}}, "", "a" );
+
+indexed( {i:{$lt:"b"}}, "", "b" );
+indexed( {i:{$not:{$lt:"b"}}}, "b", {} );
+
+indexed( {i:{$lte:"b"}}, "", "b" );
+indexed( {i:{$not:{$lte:"b"}}}, "b", {} );
+
+not( {i:{$not:{$all:["a"]}}} );
+not( {i:{$not:{$mod:[2,1]}}} );
+not( {i:{$not:{$type:2}}} );
+
+indexed( {i:{$in:[1]}}, 1, 1 );
+not( {i:{$not:{$in:[1]}}} );
+
+t.drop();
+t.ensureIndex( {"i.j":1} );
+indexed( {i:{$elemMatch:{j:1}}}, 1, 1 );
+not( {i:{$not:{$elemMatch:{j:1}}}} );
+indexed( {i:{$not:{$elemMatch:{j:{$ne:1}}}}}, 1, 1 );
diff --git a/jstests/parallel/basic.js b/jstests/parallel/basic.js
index 9c10306..bcb4d65 100644
--- a/jstests/parallel/basic.js
+++ b/jstests/parallel/basic.js
@@ -9,3 +9,8 @@ for( i in params ) {
}
t.run( "one or more tests failed", true );
+
+db.getCollectionNames().forEach( function( x ) {
+ v = db[ x ].validate();
+ assert( v.valid, "validate failed for " + x + " with " + tojson( v ) );
+ } ); \ No newline at end of file
diff --git a/jstests/parallel/basicPlus.js b/jstests/parallel/basicPlus.js
index d6f9a4d..4d65d25 100644
--- a/jstests/parallel/basicPlus.js
+++ b/jstests/parallel/basicPlus.js
@@ -23,4 +23,8 @@ for( var i = 4; i < 8; ++i ) {
t.run( "one or more tests failed", true );
-assert( c.validate().valid, "validate failed" ); \ No newline at end of file
+assert( c.validate().valid, "validate failed" );
+db.getCollectionNames().forEach( function( x ) {
+ v = db[ x ].validate();
+ assert( v.valid, "validate failed for " + x + " with " + tojson( v ) );
+ } ); \ No newline at end of file
diff --git a/jstests/parallel/repl.js b/jstests/parallel/repl.js
new file mode 100644
index 0000000..cb9b770
--- /dev/null
+++ b/jstests/parallel/repl.js
@@ -0,0 +1,55 @@
+// test all operations in parallel
+
+baseName = "parallel_repl"
+
+rt = new ReplTest( baseName );
+
+m = rt.start( true );
+s = rt.start( false );
+
+db = m.getDB( "test" );
+
+Random.setRandomSeed();
+
+t = new ParallelTester();
+
+for( id = 0; id < 10; ++id ) {
+ var g = new EventGenerator( id, baseName, Random.randInt( 20 ) );
+ for( var j = 0; j < 1000; ++j ) {
+ var op = Random.randInt( 3 );
+ switch( op ) {
+ case 0: // insert
+ g.addInsert( { _id:Random.randInt( 1000 ) } );
+ break;
+ case 1: // remove
+ g.addRemove( { _id:Random.randInt( 1000 ) } );
+ break;
+ case 2: // update
+ g.addUpdate( {_id:{$lt:1000}}, {a:{$inc:5}} );
+ break;
+ default:
+ assert( false, "Invalid op code" );
+ }
+ }
+ t.add( EventGenerator.dispatch, g.getEvents() );
+}
+
+var g = new EventGenerator( id, baseName, Random.randInt( 5 ) );
+for( var j = 1000; j < 3000; ++j ) {
+ g.addCheckCount( j - 1000, { _id: {$gte:1000} }, j % 100 == 0, j % 500 == 0 );
+ g.addInsert( {_id:j} );
+}
+t.add( EventGenerator.dispatch, g.getEvents() );
+
+t.run( "one or more tests failed" );
+
+assert( m.getDB( "test" )[ baseName ].validate().valid );
+assert( s.getDB( "test" )[ baseName ].validate().valid );
+
+assert.soon( function() {
+ mh = m.getDB( "test" ).runCommand( "dbhash" );
+// printjson( mh );
+ sh = s.getDB( "test" ).runCommand( "dbhash" );
+// printjson( sh );
+ return mh.md5 == sh.md5;
+ } );
diff --git a/jstests/profile1.js b/jstests/profile1.js
index ea53b09..49f6838 100644
--- a/jstests/profile1.js
+++ b/jstests/profile1.js
@@ -13,6 +13,8 @@ var capped_size = db.system.profile.storageSize();
assert.gt(capped_size, 999, "D");
assert.lt(capped_size, 2000, "E");
+db.foo.findOne()
+
assert.eq( 4 , db.system.profile.find().count() , "E2" );
/* Make sure we can't drop if profiling is still on */
diff --git a/jstests/pullall.js b/jstests/pullall.js
index b720ce5..76b1b47 100644
--- a/jstests/pullall.js
+++ b/jstests/pullall.js
@@ -1,4 +1,4 @@
-t = db.jstests_pushall;
+t = db.jstests_pullall;
t.drop();
t.save( { a: [ 1, 2, 3 ] } );
diff --git a/jstests/regex4.js b/jstests/regex4.js
index 568c937..fc26d69 100644
--- a/jstests/regex4.js
+++ b/jstests/regex4.js
@@ -1,5 +1,5 @@
-t = db.regex3;
+t = db.regex4;
t.drop();
t.save( { name : "eliot" } );
diff --git a/jstests/regex5.js b/jstests/regex5.js
index 7fe39d5..418752b 100644
--- a/jstests/regex5.js
+++ b/jstests/regex5.js
@@ -2,12 +2,46 @@
t = db.regex5
t.drop()
-t.save( { x : [ "abc" , "xyz" ] } )
-t.save( { x : [ "ac" , "xyz" ] } )
+t.save( { x : [ "abc" , "xyz1" ] } )
+t.save( { x : [ "ac" , "xyz2" ] } )
a = /.*b.*c/
x = /.*y.*/
-assert.eq( 1 , t.find( { x : a } ).count() , "A" )
-assert.eq( 2 , t.find( { x : x } ).count() , "B" )
-// assert.eq( 1 , t.find( { x : { $all : [ a , x ] } } ).count() , "C" ) // SERVER-505
+doit = function() {
+
+ assert.eq( 1 , t.find( { x : a } ).count() , "A" );
+ assert.eq( 2 , t.find( { x : x } ).count() , "B" );
+ assert.eq( 2 , t.find( { x : { $in: [ x ] } } ).count() , "C" ); // SERVER-322
+ assert.eq( 1 , t.find( { x : { $in: [ a, "xyz1" ] } } ).count() , "D" ); // SERVER-322
+ assert.eq( 2 , t.find( { x : { $in: [ a, "xyz2" ] } } ).count() , "E" ); // SERVER-322
+ assert.eq( 1 , t.find( { x : { $all : [ a , x ] } } ).count() , "F" ); // SERVER-505
+ assert.eq( 1 , t.find( { x : { $all : [ a , "abc" ] } } ).count() , "G" ); // SERVER-505
+ assert.eq( 0 , t.find( { x : { $all : [ a , "ac" ] } } ).count() , "H" ); // SERVER-505
+ assert.eq( 0 , t.find( { x : { $nin: [ x ] } } ).count() , "I" ); // SERVER-322
+ assert.eq( 1 , t.find( { x : { $nin: [ a, "xyz1" ] } } ).count() , "J" ); // SERVER-322
+ assert.eq( 0 , t.find( { x : { $nin: [ a, "xyz2" ] } } ).count() , "K" ); // SERVER-322
+ assert.eq( 2 , t.find( { x : { $not: { $nin: [ x ] } } } ).count() , "L" ); // SERVER-322
+ assert.eq( 1 , t.find( { x : { $nin: [ /^a.c/ ] } } ).count() , "M" ) // SERVER-322
+}
+
+doit();
+t.ensureIndex( {x:1} );
+print( "now indexed" );
+doit();
+
+// check bound unions SERVER-322
+assert.eq( [
+ [ {x:1},{x:1} ],
+ [ {x:2.5},{x:2.5} ],
+ [ {x:"a"},{x:"a"} ],
+ [ {x:"b"},{x:"e"} ],
+ [ {x:/^b/},{x:/^b/} ],
+ [ {x:/^c/},{x:/^c/} ],
+ [ {x:/^d/},{x:/^d/} ]
+ ],
+ t.find( { x : { $in: [ 1, 2.5, "a", "b", /^b/, /^c/, /^d/ ] } } ).explain().indexBounds );
+
+// SERVER-505
+assert.eq( [ [ {x:"a"}, {x:"a"} ] ], t.find( { x : { $all: [ "a", /^a/ ] } } ).explain().indexBounds );
+assert.eq( [ [ {x:"a"}, {x:"b"} ] ], t.find( { x : { $all: [ /^a/ ] } } ).explain().indexBounds );
diff --git a/jstests/regex6.js b/jstests/regex6.js
index d25367c..12ed85b 100644
--- a/jstests/regex6.js
+++ b/jstests/regex6.js
@@ -10,10 +10,13 @@ t.save( { name : "aaron" } );
t.ensureIndex( { name : 1 } );
assert.eq( 0 , t.find( { name : /^\// } ).count() , "index count" );
-assert.eq( 0 , t.find( { name : /^\// } ).explain().nscanned , "index explain" );
-assert.eq( 0 , t.find( { name : /^é/ } ).explain().nscanned , "index explain" );
-assert.eq( 0 , t.find( { name : /^\é/ } ).explain().nscanned , "index explain" );
-assert.eq( 0 , t.find( { name : /^\./ } ).explain().nscanned , "index explain" );
-assert.eq( 4 , t.find( { name : /^./ } ).explain().nscanned , "index explain" );
+assert.eq( 0 , t.find( { name : /^\// } ).explain().nscanned , "index explain 1" );
+assert.eq( 0 , t.find( { name : /^é/ } ).explain().nscanned , "index explain 2" );
+assert.eq( 0 , t.find( { name : /^\é/ } ).explain().nscanned , "index explain 3" );
+assert.eq( 0 , t.find( { name : /^\./ } ).explain().nscanned , "index explain 4" );
+assert.eq( 4 , t.find( { name : /^./ } ).explain().nscanned , "index explain 5" );
-assert.eq( 4 , t.find( { name : /^\Qblah\E/ } ).explain().nscanned , "index explain" );
+assert.eq( 4 , t.find( { name : /^\Qblah\E/ } ).explain().nscanned , "index explain 6" );
+
+assert.eq( 1, t.find( { name : { $regex : "^e", $gte: "emily" } } ).explain().nscanned , "ie7" );
+assert.eq( 1, t.find( { name : { $gt : "a", $regex: "^emily" } } ).explain().nscanned , "ie7" );
diff --git a/jstests/regex7.js b/jstests/regex7.js
new file mode 100644
index 0000000..ab4f608
--- /dev/null
+++ b/jstests/regex7.js
@@ -0,0 +1,26 @@
+t = db.regex_matches_self;
+t.drop();
+
+t.insert({r:/^a/});
+t.insert({r:/^a/i});
+t.insert({r:/^b/});
+
+// no index
+assert.eq( /^a/, t.findOne({r:/^a/}).r, '1 1 a')
+assert.eq( 1, t.count({r:/^a/}), '1 2')
+assert.eq( /^a/i, t.findOne({r:/^a/i}).r, '2 1 a')
+assert.eq( 1, t.count({r:/^a/i}), '2 2 a')
+assert.eq( /^b/, t.findOne({r:/^b/}).r, '3 1 a')
+assert.eq( 1, t.count({r:/^b/}), '3 2 a')
+
+// with index
+t.ensureIndex({r:1})
+assert.eq( /^a/, t.findOne({r:/^a/}).r, '1 1 b')
+assert.eq( 1, t.count({r:/^a/}), '1 2 b')
+assert.eq( /^a/i, t.findOne({r:/^a/i}).r, '2 1 b')
+assert.eq( 1, t.count({r:/^a/i}), '2 2 b')
+assert.eq( /^b/, t.findOne({r:/^b/}).r, '3 1 b')
+assert.eq( 1, t.count({r:/^b/}), '3 2 b')
+
+t.insert( {r:"a"} );
+assert.eq( 2, t.count({r:/^a/}), 'c' ); \ No newline at end of file
diff --git a/jstests/regex8.js b/jstests/regex8.js
new file mode 100644
index 0000000..33dd74f
--- /dev/null
+++ b/jstests/regex8.js
@@ -0,0 +1,19 @@
+
+t = db.regex8;
+t.drop()
+
+t.insert( { _id : 1 , a : "abc" } )
+t.insert( { _ud : 2 , a : "abc" } )
+t.insert( { _id : 3 , a : "bdc" } )
+
+function test( msg ){
+ assert.eq( 3 , t.find().itcount() , msg + "1" )
+ assert.eq( 2 , t.find( { a : /a.*/ } ).itcount() , msg + "2" )
+ assert.eq( 3 , t.find( { a : /[ab].*/ } ).itcount() , msg + "3" )
+ assert.eq( 3 , t.find( { a : /[a|b].*/ } ).itcount() , msg + "4" )
+}
+
+test( "A" );
+
+t.ensureIndex( { a : 1 } )
+test( "B" )
diff --git a/jstests/regex9.js b/jstests/regex9.js
new file mode 100644
index 0000000..559efd9
--- /dev/null
+++ b/jstests/regex9.js
@@ -0,0 +1,11 @@
+
+t = db.regex3;
+t.drop();
+
+t.insert( { _id : 1 , a : [ "a" , "b" , "c" ] } )
+t.insert( { _id : 2 , a : [ "a" , "b" , "c" , "d" ] } )
+t.insert( { _id : 3 , a : [ "b" , "c" , "d" ] } )
+
+assert.eq( 2 , t.find( { a : /a/ } ).itcount() , "A1" )
+assert.eq( 2 , t.find( { a : { $regex : "a" } } ).itcount() , "A2" )
+assert.eq( 2 , t.find( { a : { $regex : /a/ } } ).itcount() , "A3" )
diff --git a/jstests/regex_embed1.js b/jstests/regex_embed1.js
new file mode 100644
index 0000000..61b1b9a
--- /dev/null
+++ b/jstests/regex_embed1.js
@@ -0,0 +1,25 @@
+
+t = db.regex_embed1
+
+t.drop()
+
+t.insert( { _id : 1 , a : [ { x : "abc" } , { x : "def" } ] } )
+t.insert( { _id : 2 , a : [ { x : "ab" } , { x : "de" } ] } )
+t.insert( { _id : 3 , a : [ { x : "ab" } , { x : "de" } , { x : "abc" } ] } )
+
+function test( m ){
+ assert.eq( 3 , t.find().itcount() , m + "1" );
+ assert.eq( 2 , t.find( { "a.x" : "abc" } ).itcount() , m + "2" );
+ assert.eq( 2 , t.find( { "a.x" : /.*abc.*/ } ).itcount() , m + "3" );
+
+ assert.eq( 1 , t.find( { "a.0.x" : "abc" } ).itcount() , m + "4" );
+ assert.eq( 1 , t.find( { "a.0.x" : /abc/ } ).itcount() , m + "5" );
+}
+
+test( "A" );
+
+t.ensureIndex( { "a.x" : 1 } )
+test( "B" );
+
+
+
diff --git a/jstests/repl/basic1.js b/jstests/repl/basic1.js
index 9668a91..e0acf5c 100644
--- a/jstests/repl/basic1.js
+++ b/jstests/repl/basic1.js
@@ -52,6 +52,51 @@ check( "B" );
am.a.update( {} , { $inc : { x : 1 } } , false , true );
check( "C" );
+// ----- check features -------
+
+// map/reduce
+am.mr.insert( { tags : [ "a" ] } )
+am.mr.insert( { tags : [ "a" , "b" ] } )
+am.getLastError();
+check( "mr setup" );
+
+m = function(){
+ for ( var i=0; i<this.tags.length; i++ ){
+ print( "\t " + i );
+ emit( this.tags[i] , 1 );
+ }
+}
+
+r = function( key , v ){
+ return Array.sum( v );
+}
+
+correct = { a : 2 , b : 1 };
+
+function checkMR( t ){
+ var res = t.mapReduce( m , r );
+ assert.eq( correct , res.convertToSingleObject() , "checkMR: " + tojson( t ) );
+}
+
+function checkNumCollections( msg , diff ){
+ if ( ! diff ) diff = 0;
+ var m = am.getCollectionNames();
+ var s = as.getCollectionNames();
+ assert.eq( m.length + diff , s.length , "lengths bad \n" + tojson( m ) + "\n" + tojson( s ) );
+}
+
+checkNumCollections( "MR1" );
+checkMR( am.mr );
+checkMR( as.mr );
+checkNumCollections( "MR2" );
+
+sleep( 3000 );
+checkNumCollections( "MR3" );
+
+var res = am.mr.mapReduce( m , r , { out : "xyz" } );
+sleep( 3000 );
+checkNumCollections( "MR4" );
+
rt.stop();
diff --git a/jstests/repl/master1.js b/jstests/repl/master1.js
new file mode 100644
index 0000000..9f021fc
--- /dev/null
+++ b/jstests/repl/master1.js
@@ -0,0 +1,49 @@
+// Test handling of clock skew and optimes across mongod instances
+
+var baseName = "jstests_repl_master1test";
+
+oplog = function() {
+ return m.getDB( "local" ).oplog.$main;
+}
+
+lastop = function() {
+ return oplog().find().sort( {$natural:-1} ).next();
+}
+
+am = function() {
+ return m.getDB( baseName ).a;
+}
+
+rt = new ReplTest( baseName );
+
+m = rt.start( true );
+
+am().save( {} );
+assert.eq( "i", lastop().op );
+
+op = lastop();
+printjson( op );
+op.ts.t = op.ts.t + 600000 // 10 minutes
+m.getDB( "local" ).runCommand( {godinsert:"oplog.$main", obj:op} );
+
+rt.stop( true );
+m = rt.start( true, null, true );
+
+assert.eq( op.ts.t, lastop().ts.t );
+am().save( {} );
+assert.eq( op.ts.t, lastop().ts.t );
+assert.eq( op.ts.i + 1, lastop().ts.i );
+
+op = lastop();
+printjson( op );
+op.ts.i = Math.pow(2,31);
+printjson( op );
+m.getDB( "local" ).runCommand( {godinsert:"oplog.$main", obj:op} );
+
+rt.stop( true );
+m = rt.start( true, null, true );
+assert.eq( op.ts.i, lastop().ts.i );
+am().save( {} );
+sleep( 3000 ); // make sure dies on its own before stop() called
+
+assert.eq( 47 /*EXIT_CLOCK_SKEW*/, rt.stop( true ) ); \ No newline at end of file
diff --git a/jstests/repl/pair1.js b/jstests/repl/pair1.js
index 7004048..b8b7ffd 100644
--- a/jstests/repl/pair1.js
+++ b/jstests/repl/pair1.js
@@ -34,6 +34,7 @@ check = function( s ) {
assert.soon( function() {
return 1 == getCount( s );
} );
+ sleep( 500 ); // wait for sync clone to finish up
}
// check that slave reads and writes are guarded
diff --git a/jstests/repl/pair3.js b/jstests/repl/pair3.js
index 506e173..d1cf99a 100644
--- a/jstests/repl/pair3.js
+++ b/jstests/repl/pair3.js
@@ -226,6 +226,8 @@ doTest4 = function( signal ) {
// now can only talk to arbiter
pair.start( true );
pair.waitForSteadyState( [ 1, 1 ], null, true );
+
+ ports.forEach( function( x ) { stopMongoProgram( x ); } );
}
doTest1();
diff --git a/jstests/repl/pair4.js b/jstests/repl/pair4.js
index 5a59c16..c04433e 100644
--- a/jstests/repl/pair4.js
+++ b/jstests/repl/pair4.js
@@ -134,6 +134,7 @@ doTest = function( recover, newMaster, newSlave ) {
}
+// right will be master on recovery b/c both sides will have completed initial sync
debug( "basic test" );
doTest( function() {
connect();
diff --git a/jstests/repl/pair5.js b/jstests/repl/pair5.js
index ed8c72d..de7e2d5 100644
--- a/jstests/repl/pair5.js
+++ b/jstests/repl/pair5.js
@@ -62,7 +62,7 @@ doTest = function( nSlave, opIdMem ) {
disconnect();
pair.waitForSteadyState( [ 1, 1 ], null, true );
- // left will become slave
+ // left will become slave (b/c both completed initial sync)
for( i = 0; i < nSlave; ++i ) {
write( pair.left(), i, i );
}
diff --git a/jstests/repl/pair7.js b/jstests/repl/pair7.js
new file mode 100644
index 0000000..52ef91f
--- /dev/null
+++ b/jstests/repl/pair7.js
@@ -0,0 +1,85 @@
+// pairing with auth
+
+var baseName = "jstests_pair7test";
+
+setAdmin = function( n ) {
+ n.getDB( "admin" ).addUser( "super", "super" );
+ n.getDB( "local" ).addUser( "repl", "foo" );
+ n.getDB( "local" ).system.users.findOne();
+}
+
+auth = function( n ) {
+ return n.getDB( baseName ).auth( "test", "test" );
+}
+
+doTest = function( signal ) {
+
+ ports = allocatePorts( 3 );
+
+ m = startMongod( "--port", ports[ 1 ], "--dbpath", "/data/db/" + baseName + "-left", "--nohttpinterface", "--bind_ip", "127.0.0.1" );
+ setAdmin( m );
+ stopMongod( ports[ 1 ] );
+
+ m = startMongod( "--port", ports[ 2 ], "--dbpath", "/data/db/" + baseName + "-right", "--nohttpinterface", "--bind_ip", "127.0.0.1" );
+ setAdmin( m );
+ stopMongod( ports[ 2 ] );
+
+ a = new MongodRunner( ports[ 0 ], "/data/db/" + baseName + "-arbiter" );
+ l = new MongodRunner( ports[ 1 ], "/data/db/" + baseName + "-left", "127.0.0.1:" + ports[ 2 ], "127.0.0.1:" + ports[ 0 ], [ "--auth" ] );
+ r = new MongodRunner( ports[ 2 ], "/data/db/" + baseName + "-right", "127.0.0.1:" + ports[ 1 ], "127.0.0.1:" + ports[ 0 ], [ "--auth" ] );
+
+ rp = new ReplPair( l, r, a );
+ rp.start( true );
+ rp.waitForSteadyState();
+
+ rp.master().getDB( "admin" ).auth( "super", "super" );
+ rp.master().getDB( baseName ).addUser( "test", "test" );
+ auth( rp.master() ); // reauth
+ assert.soon( function() { return auth( rp.slave() ); } );
+ rp.slave().setSlaveOk();
+
+ ma = rp.master().getDB( baseName ).a;
+ ma.save( {} );
+ sa = rp.slave().getDB( baseName ).a;
+ assert.soon( function() { return 1 == sa.count(); } );
+
+ rp.killNode( rp.slave(), signal );
+ rp.waitForSteadyState( [ 1, null ] );
+ ma.save( {} );
+
+ rp.start( true );
+ rp.waitForSteadyState();
+ assert.soon( function() { return auth( rp.slave() ); } );
+ rp.slave().setSlaveOk();
+ sa = rp.slave().getDB( baseName ).a;
+ assert.soon( function() { return 2 == sa.count(); } );
+
+ ma.save( {a:1} );
+ assert.soon( function() { return 1 == sa.count( {a:1} ); } );
+
+ ma.update( {a:1}, {b:2} );
+ assert.soon( function() { return 1 == sa.count( {b:2} ); } );
+
+ ma.remove( {b:2} );
+ assert.soon( function() { return 0 == sa.count( {b:2} ); } );
+
+ rp.killNode( rp.master(), signal );
+ rp.waitForSteadyState( [ 1, null ] );
+ ma = sa;
+ ma.save( {} );
+
+ rp.start( true );
+ rp.waitForSteadyState();
+ assert.soon( function() { return auth( rp.slave() ); } );
+ rp.slave().setSlaveOk();
+ sa = rp.slave().getDB( baseName ).a;
+ assert.soon( function() { return 3 == sa.count(); } );
+
+ ma.save( {} );
+ assert.soon( function() { return 4 == sa.count(); } );
+
+ ports.forEach( function( x ) { stopMongod( x ); } );
+}
+
+doTest( 15 ); // SIGTERM
+doTest( 9 ); // SIGKILL
diff --git a/jstests/repl/repl10.js b/jstests/repl/repl10.js
new file mode 100644
index 0000000..67c5db1
--- /dev/null
+++ b/jstests/repl/repl10.js
@@ -0,0 +1,38 @@
+// Test slave delay
+
+var baseName = "jstests_repl10test";
+
+soonCount = function( count ) {
+ assert.soon( function() {
+ // print( "check count" );
+ // print( "count: " + s.getDB( baseName ).z.find().count() );
+ return s.getDB( baseName ).a.find().count() == count;
+ } );
+}
+
+doTest = function( signal ) {
+
+ rt = new ReplTest( "repl10tests" );
+
+ m = rt.start( true );
+ s = rt.start( false, { "slavedelay": "10" } );
+
+ am = m.getDB( baseName ).a
+
+ am.save( {i:1} );
+
+ soonCount( 1 );
+
+ am.save( {i:2} );
+ assert.eq( 2, am.count() );
+ sleep( 3000 );
+
+ rt.stop( true, signal );
+ sleep( 3000 );
+ assert.eq( 1, s.getDB( baseName ).a.count() );
+
+ rt.stop();
+}
+
+doTest( 15 ); // SIGTERM
+doTest( 9 ); // SIGKILL
diff --git a/jstests/repl/repl11.js b/jstests/repl/repl11.js
new file mode 100644
index 0000000..c5c63b3
--- /dev/null
+++ b/jstests/repl/repl11.js
@@ -0,0 +1,59 @@
+// Test repl with auth enabled
+
+var baseName = "jstests_repl11test";
+
+setAdmin = function( n ) {
+ n.getDB( "admin" ).addUser( "super", "super" );
+ n.getDB( "local" ).addUser( "repl", "foo" );
+ n.getDB( "local" ).system.users.findOne();
+}
+
+auth = function( n ) {
+ return n.getDB( baseName ).auth( "test", "test" );
+}
+
+doTest = function( signal ) {
+
+ rt = new ReplTest( baseName );
+
+ m = rt.start( true, {}, false, true );
+ m.getDB( baseName ).addUser( "test", "test" );
+ setAdmin( m );
+ rt.stop( true );
+
+ s = rt.start( false, {}, false, true );
+ setAdmin( s );
+ rt.stop( false );
+
+ m = rt.start( true, { auth:null }, true );
+ auth( m );
+ s = rt.start( false, { auth:null }, true );
+ assert.soon( function() { return auth( s ); } );
+
+ ma = m.getDB( baseName ).a;
+ ma.save( {} );
+ sa = s.getDB( baseName ).a;
+ assert.soon( function() { return 1 == sa.count(); } );
+
+ rt.stop( false, signal );
+
+ ma.save( {} );
+ s = rt.start( false, { auth:null }, true );
+ assert.soon( function() { return auth( s ); } );
+ sa = s.getDB( baseName ).a;
+ assert.soon( function() { return 2 == sa.count(); } );
+
+ ma.save( {a:1} );
+ assert.soon( function() { return 1 == sa.count( {a:1} ); } );
+
+ ma.update( {a:1}, {b:2} );
+ assert.soon( function() { return 1 == sa.count( {b:2} ); } );
+
+ ma.remove( {b:2} );
+ assert.soon( function() { return 0 == sa.count( {b:2} ); } );
+
+ rt.stop();
+}
+
+doTest( 15 ); // SIGTERM
+doTest( 9 ); // SIGKILL
diff --git a/jstests/repl/repl4.js b/jstests/repl/repl4.js
index de7ca43..56719b7 100644
--- a/jstests/repl/repl4.js
+++ b/jstests/repl/repl4.js
@@ -25,6 +25,14 @@ doTest = function() {
printjson( s.getDBNames() );
assert.eq( -1, s.getDBNames().indexOf( "b" ) );
assert.eq( 0, s.getDB( "b" ).b.find().count() );
+
+ rt.stop( false );
+
+ cm.save( { x:3 } );
+ bm.save( { x:4 } );
+
+ s = rt.start( false, { only: "c" }, true );
+ soonCount( "c", "c", 2 );
}
doTest();
diff --git a/jstests/repl/replacePeer1.js b/jstests/repl/replacePeer1.js
index 45ee544..b3743ce 100644
--- a/jstests/repl/replacePeer1.js
+++ b/jstests/repl/replacePeer1.js
@@ -38,7 +38,8 @@ doTest = function( signal ) {
rp = new ReplPair( l, r, a );
rp.start();
- rp.waitForSteadyState( [ 1, 0 ], rp.right().host );
+ rp.waitForSteadyState( [ 1, 0 ] );
+ rightMaster = ( rp.master().host == rp.right().host );
checkWrite( rp.master(), rp.slave() );
@@ -51,16 +52,26 @@ doTest = function( signal ) {
rp.killNode( rp.master(), signal );
rp.killNode( rp.arbiter(), signal );
- o = new MongodRunner( ports[ 2 ], "/data/db/" + baseName + "-left", "127.0.0.1:" + ports[ 3 ], "127.0.0.1:" + ports[ 0 ] );
- r = new MongodRunner( ports[ 3 ], "/data/db/" + baseName + "-right", "127.0.0.1:" + ports[ 2 ], "127.0.0.1:" + ports[ 0 ] );
+ if ( rightMaster ) {
+ o = new MongodRunner( ports[ 2 ], "/data/db/" + baseName + "-left", "127.0.0.1:" + ports[ 3 ], "127.0.0.1:" + ports[ 0 ] );
+ r = new MongodRunner( ports[ 3 ], "/data/db/" + baseName + "-right", "127.0.0.1:" + ports[ 2 ], "127.0.0.1:" + ports[ 0 ] );
+ rp = new ReplPair( o, r, a );
+ resetDbpath( "/data/db/" + baseName + "-left" );
+ } else {
+ l = new MongodRunner( ports[ 1 ], "/data/db/" + baseName + "-left", "127.0.0.1:" + ports[ 2 ], "127.0.0.1:" + ports[ 0 ] );
+ o = new MongodRunner( ports[ 2 ], "/data/db/" + baseName + "-right", "127.0.0.1:" + ports[ 1 ], "127.0.0.1:" + ports[ 0 ] );
+ rp = new ReplPair( l, o, a );
+ resetDbpath( "/data/db/" + baseName + "-right" );
+ }
- rp = new ReplPair( o, r, a );
- resetDbpath( "/data/db/" + baseName + "-left" );
rp.start( true );
- rp.waitForSteadyState( [ 1, 0 ], rp.right().host );
+ rp.waitForSteadyState( [ 1, 0 ] );
- checkWrite( rp.master(), rp.slave() );
rp.slave().setSlaveOk();
+ assert.eq( 2, rp.master().getDB( baseName ).z.find().toArray().length );
+ assert.eq( 2, rp.slave().getDB( baseName ).z.find().toArray().length );
+
+ checkWrite( rp.master(), rp.slave() );
assert.eq( 3, rp.slave().getDB( baseName ).z.find().toArray().length );
ports.forEach( function( x ) { stopMongod( x ); } );
diff --git a/jstests/repl/replacePeer2.js b/jstests/repl/replacePeer2.js
index 09c8177..f519b17 100644
--- a/jstests/repl/replacePeer2.js
+++ b/jstests/repl/replacePeer2.js
@@ -38,8 +38,9 @@ doTest = function( signal ) {
rp = new ReplPair( l, r, a );
rp.start();
- rp.waitForSteadyState( [ 1, 0 ], rp.right().host );
-
+ rp.waitForSteadyState( [ 1, 0 ] );
+ leftSlave = ( rp.slave().host == rp.left().host );
+
checkWrite( rp.master(), rp.slave() );
// allow slave to finish initial sync
@@ -52,16 +53,26 @@ doTest = function( signal ) {
ports.forEach( function( x ) { stopMongod( x, signal ); } );
- l = new MongodRunner( ports[ 1 ], "/data/db/" + baseName + "-left", "127.0.0.1:" + ports[ 2 ], "127.0.0.1:" + ports[ 0 ] );
- o = new MongodRunner( ports[ 2 ], "/data/db/" + baseName + "-right", "127.0.0.1:" + ports[ 1 ], "127.0.0.1:" + ports[ 0 ] );
-
- rp = new ReplPair( l, o, a );
- resetDbpath( "/data/db/" + baseName + "-right" );
- rp.start( true );
- rp.waitForSteadyState( [ 1, 0 ], rp.left().host );
+ if ( leftSlave ) {
+ l = new MongodRunner( ports[ 1 ], "/data/db/" + baseName + "-left", "127.0.0.1:" + ports[ 2 ], "127.0.0.1:" + ports[ 0 ] );
+ o = new MongodRunner( ports[ 2 ], "/data/db/" + baseName + "-right", "127.0.0.1:" + ports[ 1 ], "127.0.0.1:" + ports[ 0 ] );
+ rp = new ReplPair( l, o, a );
+ resetDbpath( "/data/db/" + baseName + "-right" );
+ } else {
+ o = new MongodRunner( ports[ 2 ], "/data/db/" + baseName + "-left", "127.0.0.1:" + ports[ 3 ], "127.0.0.1:" + ports[ 0 ] );
+ r = new MongodRunner( ports[ 3 ], "/data/db/" + baseName + "-right", "127.0.0.1:" + ports[ 2 ], "127.0.0.1:" + ports[ 0 ] );
+ rp = new ReplPair( o, r, a );
+ resetDbpath( "/data/db/" + baseName + "-left" );
+ }
- checkWrite( rp.master(), rp.slave() );
+ rp.start( true );
+ rp.waitForSteadyState( [ 1, 0 ] );
+
rp.slave().setSlaveOk();
+ assert.eq( 1, rp.slave().getDB( baseName ).z.find().toArray().length );
+ assert.eq( 1, rp.master().getDB( baseName ).z.find().toArray().length );
+
+ checkWrite( rp.master(), rp.slave() );
assert.eq( 2, rp.slave().getDB( baseName ).z.find().toArray().length );
ports.forEach( function( x ) { stopMongod( x ); } );
diff --git a/jstests/repl/snapshot1.js b/jstests/repl/snapshot1.js
new file mode 100644
index 0000000..3be37aa
--- /dev/null
+++ b/jstests/repl/snapshot1.js
@@ -0,0 +1,34 @@
+// Test SERVER-623 - starting slave from a new snapshot
+
+ports = allocatePorts( 3 );
+
+var baseName = "repl_snapshot1";
+
+rt1 = new ReplTest( "repl_snapshot1-1", [ ports[ 0 ], ports[ 1 ] ] );
+rt2 = new ReplTest( "repl_snapshot1-2", [ ports[ 0 ], ports[ 2 ] ] );
+m = rt1.start( true );
+
+big = new Array( 2000 ).toString();
+for( i = 0; i < 1000; ++i )
+ m.getDB( baseName )[ baseName ].save( { _id: new ObjectId(), i: i, b: big } );
+
+m.getDB( "admin" ).runCommand( {fsync:1,lock:1} );
+copyDbpath( rt1.getPath( true ), rt1.getPath( false ) );
+m.getDB( "admin" ).$cmd.sys.unlock.findOne();
+
+s1 = rt1.start( false, null, true );
+assert.eq( 1000, s1.getDB( baseName )[ baseName ].count() );
+m.getDB( baseName )[ baseName ].save( {i:1000} );
+assert.soon( function() { return 1001 == s1.getDB( baseName )[ baseName ].count(); } );
+
+s1.getDB( "admin" ).runCommand( {fsync:1,lock:1} );
+copyDbpath( rt1.getPath( false ), rt2.getPath( false ) );
+s1.getDB( "admin" ).$cmd.sys.unlock.findOne();
+
+s2 = rt2.start( false, null, true );
+assert.eq( 1001, s2.getDB( baseName )[ baseName ].count() );
+m.getDB( baseName )[ baseName ].save( {i:1001} );
+assert.soon( function() { return 1002 == s2.getDB( baseName )[ baseName ].count(); } );
+assert.soon( function() { return 1002 == s1.getDB( baseName )[ baseName ].count(); } );
+
+assert( !rawMongoProgramOutput().match( /resync/ ) ); \ No newline at end of file
diff --git a/jstests/repl/snapshot2.js b/jstests/repl/snapshot2.js
new file mode 100644
index 0000000..4ebd786
--- /dev/null
+++ b/jstests/repl/snapshot2.js
@@ -0,0 +1,50 @@
+// Test SERVER-623 - starting repl peer from a new snapshot of master
+
+ports = allocatePorts( 3 );
+
+var baseName = "repl_snapshot2";
+var basePath = "/data/db/" + baseName;
+
+a = new MongodRunner( ports[ 0 ], basePath + "-arbiter" );
+l = new MongodRunner( ports[ 1 ], basePath + "-left", "127.0.0.1:" + ports[ 2 ], "127.0.0.1:" + ports[ 0 ] );
+r = new MongodRunner( ports[ 2 ], basePath + "-right", "127.0.0.1:" + ports[ 1 ], "127.0.0.1:" + ports[ 0 ] );
+
+rp = new ReplPair( l, r, a );
+rp.start();
+rp.waitForSteadyState();
+
+big = new Array( 2000 ).toString();
+rp.slave().setSlaveOk();
+for( i = 0; i < 1000; ++i ) {
+ rp.master().getDB( baseName )[ baseName ].save( { _id: new ObjectId(), i: i, b: big } );
+ if ( i % 250 == 249 ) {
+ assert.soon( function() { return i+1 == rp.slave().getDB( baseName )[ baseName ].count(); } );
+ }
+}
+
+rp.master().getDB( "admin" ).runCommand( {fsync:1,lock:1} );
+leftMaster = ( rp.master().host == rp.left().host );
+rp.killNode( rp.slave() );
+if ( leftMaster ) {
+ copyDbpath( basePath + "-left", basePath + "-right" );
+} else {
+ copyDbpath( basePath + "-right", basePath + "-left" );
+}
+rp.master().getDB( "admin" ).$cmd.sys.unlock.findOne();
+rp.killNode( rp.master() );
+
+clearRawMongoProgramOutput();
+
+rp.right_.extraArgs_ = [ "--fastsync" ];
+rp.left_.extraArgs_ = [ "--fastsync" ];
+
+rp.start( true );
+rp.waitForSteadyState();
+assert.eq( 1000, rp.master().getDB( baseName )[ baseName ].count() );
+rp.slave().setSlaveOk();
+assert.eq( 1000, rp.slave().getDB( baseName )[ baseName ].count() );
+rp.master().getDB( baseName )[ baseName ].save( {i:1000} );
+assert.soon( function() { return 1001 == rp.slave().getDB( baseName )[ baseName ].count(); } );
+
+assert( !rawMongoProgramOutput().match( /resync/ ) );
+assert( !rawMongoProgramOutput().match( /SyncException/ ) ); \ No newline at end of file
diff --git a/jstests/repl/snapshot3.js b/jstests/repl/snapshot3.js
new file mode 100644
index 0000000..5380bbf
--- /dev/null
+++ b/jstests/repl/snapshot3.js
@@ -0,0 +1,50 @@
+// Test SERVER-623 - starting repl peer from a new snapshot of slave
+
+ports = allocatePorts( 3 );
+
+var baseName = "repl_snapshot2";
+var basePath = "/data/db/" + baseName;
+
+a = new MongodRunner( ports[ 0 ], basePath + "-arbiter" );
+l = new MongodRunner( ports[ 1 ], basePath + "-left", "127.0.0.1:" + ports[ 2 ], "127.0.0.1:" + ports[ 0 ] );
+r = new MongodRunner( ports[ 2 ], basePath + "-right", "127.0.0.1:" + ports[ 1 ], "127.0.0.1:" + ports[ 0 ] );
+
+rp = new ReplPair( l, r, a );
+rp.start();
+rp.waitForSteadyState();
+
+big = new Array( 2000 ).toString();
+rp.slave().setSlaveOk();
+for( i = 0; i < 1000; ++i ) {
+ rp.master().getDB( baseName )[ baseName ].save( { _id: new ObjectId(), i: i, b: big } );
+ if ( i % 250 == 249 ) {
+ assert.soon( function() { return i+1 == rp.slave().getDB( baseName )[ baseName ].count(); } );
+ }
+}
+
+rp.slave().getDB( "admin" ).runCommand( {fsync:1,lock:1} );
+leftSlave = ( rp.slave().host == rp.left().host );
+rp.killNode( rp.master() );
+if ( leftSlave ) {
+ copyDbpath( basePath + "-left", basePath + "-right" );
+} else {
+ copyDbpath( basePath + "-right", basePath + "-left" );
+}
+rp.slave().getDB( "admin" ).$cmd.sys.unlock.findOne();
+rp.killNode( rp.slave() );
+
+clearRawMongoProgramOutput();
+
+rp.right_.extraArgs_ = [ "--fastsync" ];
+rp.left_.extraArgs_ = [ "--fastsync" ];
+
+rp.start( true );
+rp.waitForSteadyState();
+assert.eq( 1000, rp.master().getDB( baseName )[ baseName ].count() );
+rp.slave().setSlaveOk();
+assert.eq( 1000, rp.slave().getDB( baseName )[ baseName ].count() );
+rp.master().getDB( baseName )[ baseName ].save( {i:1000} );
+assert.soon( function() { return 1001 == rp.slave().getDB( baseName )[ baseName ].count(); } );
+
+assert( !rawMongoProgramOutput().match( /resync/ ) );
+assert( !rawMongoProgramOutput().match( /SyncException/ ) ); \ No newline at end of file
diff --git a/jstests/run_program1.js b/jstests/run_program1.js
new file mode 100644
index 0000000..7a994b2
--- /dev/null
+++ b/jstests/run_program1.js
@@ -0,0 +1,19 @@
+if ( ! _isWindows() ) {
+
+ // note that normal program exit returns 0
+ assert.eq (0, runProgram('true'))
+ assert.neq(0, runProgram('false'))
+ assert.neq(0, runProgram('this_program_doesnt_exit'));
+
+ //verify output visually
+ runProgram('echo', 'Hello', 'World.', 'How are you?');
+ runProgram('bash', '-c', 'echo Hello World. "How are you?"'); // only one space is printed between Hello and World
+
+ // numbers can be passed as numbers or strings
+ runProgram('sleep', 0.5);
+ runProgram('sleep', '0.5');
+
+} else {
+
+ runProgram('cmd', '/c', 'echo hello windows');
+}
diff --git a/jstests/set5.js b/jstests/set5.js
new file mode 100644
index 0000000..10f26ad
--- /dev/null
+++ b/jstests/set5.js
@@ -0,0 +1,17 @@
+
+t = db.set5;
+t.drop();
+
+function check( want , err ){
+ var x = t.findOne();
+ delete x._id;
+ assert.eq( want , x , err );
+}
+
+t.update( { a : 5 } , { $set : { a : 6 , b : null } } , true );
+check( { a : 6 , b : null } , "A" )
+
+t.drop();
+
+t.update( { z : 5 } , { $set : { z : 6 , b : null } } , true );
+check( { b : null , z : 6 } , "B" )
diff --git a/jstests/set6.js b/jstests/set6.js
new file mode 100644
index 0000000..d41e7ab
--- /dev/null
+++ b/jstests/set6.js
@@ -0,0 +1,20 @@
+
+t = db.set6;
+t.drop();
+
+x = { _id : 1 , r : new DBRef( "foo" , new ObjectId() ) }
+t.insert( x )
+assert.eq( x , t.findOne() , "A" );
+
+x.r.$id = new ObjectId()
+t.update({}, { $set : { r : x.r } } );
+assert.eq( x , t.findOne() , "B");
+
+x.r2 = new DBRef( "foo2" , 5 )
+t.update( {} , { $set : { "r2" : x.r2 } } );
+assert.eq( x , t.findOne() , "C" )
+
+x.r.$id = 2;
+t.update( {} , { $set : { "r.$id" : 2 } } )
+assert.eq( x.r.$id , t.findOne().r.$id , "D");
+
diff --git a/jstests/set7.js b/jstests/set7.js
new file mode 100644
index 0000000..b46fe9e
--- /dev/null
+++ b/jstests/set7.js
@@ -0,0 +1,40 @@
+// test $set with array indicies
+
+t = db.jstests_set7;
+
+t.drop();
+
+t.save( {a:[0,1,2,3]} );
+t.update( {}, {$set:{"a.0":2}} );
+assert.eq( [2,1,2,3], t.findOne().a );
+
+t.update( {}, {$set:{"a.4":5}} );
+assert.eq( [2,1,2,3,5], t.findOne().a );
+
+t.update( {}, {$set:{"a.9":9}} );
+assert.eq( [2,1,2,3,5,null,null,null,null,9], t.findOne().a );
+
+t.drop();
+t.save( {a:[0,1,2,3]} );
+t.update( {}, {$set:{"a.9":9,"a.7":7}} );
+assert.eq( [0,1,2,3,null,null,null,7,null,9], t.findOne().a );
+
+t.drop();
+t.save( {a:[0,1,2,3,4,5,6,7,8,9,10]} );
+t.update( {}, {$set:{"a.11":11} } );
+assert.eq( [0,1,2,3,4,5,6,7,8,9,10,11], t.findOne().a );
+
+t.drop();
+t.save( {} );
+t.update( {}, {$set:{"a.0":4}} );
+assert.eq( {"0":4}, t.findOne().a );
+
+t.drop();
+t.update( {"a.0":4}, {$set:{b:1}}, true );
+assert.eq( {"0":4}, t.findOne().a );
+
+t.drop();
+t.save( {a:[]} );
+t.update( {}, {$set:{"a.f":1}} );
+assert( db.getLastError() );
+assert.eq( [], t.findOne().a );
diff --git a/jstests/sharding/findandmodify1.js b/jstests/sharding/findandmodify1.js
new file mode 100644
index 0000000..774701f
--- /dev/null
+++ b/jstests/sharding/findandmodify1.js
@@ -0,0 +1,57 @@
+s = new ShardingTest( "find_and_modify_sharded" , 2 );
+
+s.adminCommand( { enablesharding : "test" } );
+db = s.getDB( "test" );
+primary = s.getServer( "test" ).getDB( "test" );
+seconday = s.getOther( primary ).getDB( "test" );
+
+numObjs = 20;
+
+s.adminCommand( { shardcollection : "test.stuff" , key : {_id:1} } );
+
+for (var i=0; i < numObjs; i++){
+ db.stuff.insert({_id: i});
+}
+
+for (var i=0; i < numObjs; i+=2){
+ s.adminCommand( { split: "test.stuff" , middle : {_id: i} } );
+}
+
+for (var i=0; i < numObjs; i+=4){
+ s.adminCommand( { movechunk : "test.stuff" , find : {_id: i} , to : seconday.getMongo().name } );
+}
+
+//sorted update
+for (var i=0; i < numObjs; i++){
+ assert.eq(db.stuff.count({a:1}), i, "1 A");
+
+ var out = db.stuff.findAndModify({query: {a:null}, update: {$set: {a:1}}, sort: {_id:1}});
+
+ assert.eq(db.stuff.count({a:1}), i+1, "1 B");
+ assert.eq(db.stuff.findOne({_id:i}).a, 1, "1 C");
+ assert.eq(out._id, i, "1 D");
+}
+
+// unsorted update
+for (var i=0; i < numObjs; i++){
+ assert.eq(db.stuff.count({b:1}), i, "2 A");
+
+ var out = db.stuff.findAndModify({query: {b:null}, update: {$set: {b:1}}});
+
+ assert.eq(db.stuff.count({b:1}), i+1, "2 B");
+ assert.eq(db.stuff.findOne({_id:out._id}).a, 1, "2 C");
+}
+
+//sorted remove (no query)
+for (var i=0; i < numObjs; i++){
+ assert.eq(db.stuff.count(), numObjs - i, "3 A");
+ assert.eq(db.stuff.count({_id: i}), 1, "3 B");
+
+ var out = db.stuff.findAndModify({remove: true, sort: {_id:1}});
+
+ assert.eq(db.stuff.count(), numObjs - i - 1, "3 C");
+ assert.eq(db.stuff.count({_id: i}), 0, "3 D");
+ assert.eq(out._id, i, "3 E");
+}
+
+s.stop();
diff --git a/jstests/sharding/key_many.js b/jstests/sharding/key_many.js
index 43e7cc5..d1644ac 100644
--- a/jstests/sharding/key_many.js
+++ b/jstests/sharding/key_many.js
@@ -1,13 +1,14 @@
// key_many.js
// values have to be sorted
-types =
- [ { name : "string" , values : [ "allan" , "bob" , "eliot" , "joe" , "mark" , "sara" ] , keyfield: "k" } ,
- { name : "double" , values : [ 1.2 , 3.5 , 4.5 , 4.6 , 6.7 , 9.9 ] , keyfield : "a" } ,
- { name : "string_id" , values : [ "allan" , "bob" , "eliot" , "joe" , "mark" , "sara" ] , keyfield : "_id" },
- { name : "embedded" , values : [ "allan" , "bob" , "eliot" , "joe" , "mark" , "sara" ] , keyfield : "a.b" } ,
- { name : "embedded 2" , values : [ "allan" , "bob" , "eliot" , "joe" , "mark" , "sara" ] , keyfield : "a.b.c" } ,
- { name : "object" , values : [ {a:1, b:1.2}, {a:1, b:3.5}, {a:1, b:4.5}, {a:2, b:1.2}, {a:2, b:3.5}, {a:2, b:4.5} ] , keyfield : "o" } ,
+types = [
+ { name : "string" , values : [ "allan" , "bob" , "eliot" , "joe" , "mark" , "sara" ] , keyfield: "k" } ,
+ { name : "double" , values : [ 1.2 , 3.5 , 4.5 , 4.6 , 6.7 , 9.9 ] , keyfield : "a" } ,
+ { name : "date" , values : [ new Date( 1000000 ) , new Date( 2000000 ) , new Date( 3000000 ) , new Date( 4000000 ) , new Date( 5000000 ) , new Date( 6000000 ) ] , keyfield : "a" } ,
+ { name : "string_id" , values : [ "allan" , "bob" , "eliot" , "joe" , "mark" , "sara" ] , keyfield : "_id" },
+ { name : "embedded" , values : [ "allan" , "bob" , "eliot" , "joe" , "mark" , "sara" ] , keyfield : "a.b" } ,
+ { name : "embedded 2" , values : [ "allan" , "bob" , "eliot" , "joe" , "mark" , "sara" ] , keyfield : "a.b.c" } ,
+ { name : "object" , values : [ {a:1, b:1.2}, {a:1, b:3.5}, {a:1, b:4.5}, {a:2, b:1.2}, {a:2, b:3.5}, {a:2, b:4.5} ] , keyfield : "o" } ,
]
s = new ShardingTest( "key_many" , 2 );
diff --git a/jstests/sharding/moveshard1.js b/jstests/sharding/moveshard1.js
index b074b4c..9220983 100644
--- a/jstests/sharding/moveshard1.js
+++ b/jstests/sharding/moveshard1.js
@@ -16,8 +16,8 @@ assert.eq( ldb.things.count() , 3 );
assert.eq( rdb.things.count() , 0 );
startResult = l.getDB( "admin" ).runCommand( { "movechunk.start" : "foo.things" ,
- "to" : s._serverNames[1] ,
- "from" : s._serverNames[0] ,
+ "to" : s._connections[1].name ,
+ "from" : s._connections[0].name ,
filter : { a : { $gt : 2 } }
} );
print( "movechunk.start: " + tojson( startResult ) );
@@ -25,7 +25,7 @@ assert( startResult.ok == 1 , "start failed!" );
finishResult = l.getDB( "admin" ).runCommand( { "movechunk.finish" : "foo.things" ,
finishToken : startResult.finishToken ,
- to : s._serverNames[1] ,
+ to : s._connections[1].name ,
newVersion : 1 } );
print( "movechunk.finish: " + tojson( finishResult ) );
assert( finishResult.ok == 1 , "finishResult failed!" );
diff --git a/jstests/sharding/shard2.js b/jstests/sharding/shard2.js
index 566a0db..5932210 100644
--- a/jstests/sharding/shard2.js
+++ b/jstests/sharding/shard2.js
@@ -140,7 +140,7 @@ function countCursor( c ){
return num;
}
assert.eq( 6 , countCursor( db.foo.find()._exec() ) , "getMore 2" );
-assert.eq( 6 , countCursor( db.foo.find().limit(1)._exec() ) , "getMore 3" );
+assert.eq( 6 , countCursor( db.foo.find().batchSize(1)._exec() ) , "getMore 3" );
// find by non-shard-key
db.foo.find().forEach(
diff --git a/jstests/sharding/sync1.js b/jstests/sharding/sync1.js
new file mode 100644
index 0000000..905b488
--- /dev/null
+++ b/jstests/sharding/sync1.js
@@ -0,0 +1,21 @@
+
+test = new SyncCCTest( "sync1" )
+
+db = test.conn.getDB( "test" )
+t = db.sync1
+t.save( { x : 1 } )
+assert.eq( 1 , t.find().itcount() , "A1" );
+assert.eq( 1 , t.find().count() , "A2" );
+t.save( { x : 2 } )
+assert.eq( 2 , t.find().itcount() , "A3" );
+assert.eq( 2 , t.find().count() , "A4" );
+
+test.checkHashes( "test" , "A3" );
+
+test.tempKill();
+assert.throws( function(){ t.save( { x : 3 } ) } , "B1" )
+assert.eq( 2 , t.find().itcount() , "B2" );
+test.tempStart();
+test.checkHashes( "test" , "B3" );
+
+test.stop();
diff --git a/jstests/sharding/sync2.js b/jstests/sharding/sync2.js
new file mode 100644
index 0000000..b0bbcb6
--- /dev/null
+++ b/jstests/sharding/sync2.js
@@ -0,0 +1,48 @@
+// sync2.js
+
+s = new ShardingTest( "sync2" , 3 , 50 , 2 , { sync : true } );
+
+s2 = s._mongos[1];
+
+s.adminCommand( { enablesharding : "test" } );
+s.adminCommand( { shardcollection : "test.foo" , key : { num : 1 } } );
+
+s.getDB( "test" ).foo.save( { num : 1 } );
+s.getDB( "test" ).foo.save( { num : 2 } );
+s.getDB( "test" ).foo.save( { num : 3 } );
+s.getDB( "test" ).foo.save( { num : 4 } );
+s.getDB( "test" ).foo.save( { num : 5 } );
+s.getDB( "test" ).foo.save( { num : 6 } );
+s.getDB( "test" ).foo.save( { num : 7 } );
+
+assert.eq( 7 , s.getDB( "test" ).foo.find().toArray().length , "normal A" );
+assert.eq( 7 , s2.getDB( "test" ).foo.find().toArray().length , "other A" );
+
+s.adminCommand( { split : "test.foo" , middle : { num : 4 } } );
+s.adminCommand( { movechunk : "test.foo" , find : { num : 3 } , to : s.getFirstOther( s.getServer( "test" ) ).name } );
+
+assert( s._connections[0].getDB( "test" ).foo.find().toArray().length > 0 , "blah 1" );
+assert( s._connections[1].getDB( "test" ).foo.find().toArray().length > 0 , "blah 2" );
+assert.eq( 7 , s._connections[0].getDB( "test" ).foo.find().toArray().length +
+ s._connections[1].getDB( "test" ).foo.find().toArray().length , "blah 3" );
+
+assert.eq( 7 , s.getDB( "test" ).foo.find().toArray().length , "normal B" );
+assert.eq( 7 , s2.getDB( "test" ).foo.find().toArray().length , "other B" );
+
+s.adminCommand( { split : "test.foo" , middle : { num : 2 } } );
+s.printChunks();
+
+print( "* A" );
+
+assert.eq( 7 , s.getDB( "test" ).foo.find().toArray().length , "normal B 1" );
+assert.eq( 7 , s2.getDB( "test" ).foo.find().toArray().length , "other B 2" );
+print( "* B" );
+assert.eq( 7 , s.getDB( "test" ).foo.find().toArray().length , "normal B 3" );
+assert.eq( 7 , s2.getDB( "test" ).foo.find().toArray().length , "other B 4" );
+
+for ( var i=0; i<10; i++ ){
+ print( "* C " + i );
+ assert.eq( 7 , s2.getDB( "test" ).foo.find().toArray().length , "other B " + i );
+}
+
+s.stop();
diff --git a/jstests/shellkillop.js b/jstests/shellkillop.js
new file mode 100644
index 0000000..e8a9763
--- /dev/null
+++ b/jstests/shellkillop.js
@@ -0,0 +1,18 @@
+baseName = "jstests_shellkillop";
+
+db[ baseName ].drop();
+
+for( i = 0; i < 100000; ++i ) {
+ db[ baseName ].save( {i:1} );
+}
+assert.eq( 100000, db[ baseName ].count() );
+
+spawn = startMongoProgramNoConnect( "mongo", "--port", myPort(), "--eval", "db." + baseName + ".update( {}, {$set:{i:\"abcdefghijkl\"}}, false, true ); db." + baseName + ".count();" );
+sleep( 100 );
+stopMongoProgramByPid( spawn );
+sleep( 100 );
+inprog = db.currentOp().inprog
+printjson( inprog );
+for( i in inprog ) {
+ assert( inprog[ i ].ns != "test." + baseName, "still running op" );
+}
diff --git a/jstests/shellspawn.js b/jstests/shellspawn.js
index ea2b671..5b0de6b 100644
--- a/jstests/shellspawn.js
+++ b/jstests/shellspawn.js
@@ -6,13 +6,13 @@ if ( typeof( _startMongoProgram ) == "undefined" ){
print( "no fork support" );
}
else {
- spawn = startMongoProgramNoConnect( "mongo", "--port", myPort(), "--eval", "sleep( 2000 ); db.getCollection( \"" + baseName + "\" ).save( {a:1} );" );
+ spawn = startMongoProgramNoConnect( "mongo", "--port", myPort(), "--eval", "sleep( 2000 ); db.getCollection( '" + baseName + "' ).save( {a:1} );" );
assert.soon( function() { return 1 == t.count(); } );
stopMongoProgramByPid( spawn );
- spawn = startMongoProgramNoConnect( "mongo", "--port", myPort(), "--eval", "print( \"I am a shell\" );" );
+ spawn = startMongoProgramNoConnect( "mongo", "--port", myPort(), "--eval", "print( 'I am a shell' );" );
spawn = startMongoProgramNoConnect( "mongo", "--port", myPort() );
@@ -21,4 +21,4 @@ else {
stopMongoProgramByPid( spawn );
// all these shells should be killed
-} \ No newline at end of file
+}
diff --git a/jstests/slow/indexbg1.js b/jstests/slow/indexbg1.js
new file mode 100644
index 0000000..5e34d44
--- /dev/null
+++ b/jstests/slow/indexbg1.js
@@ -0,0 +1,117 @@
+// Test background index creation
+
+parallel = function() {
+ return db[ baseName + "_parallelStatus" ];
+}
+
+resetParallel = function() {
+ parallel().drop();
+}
+
+doParallel = function(work) {
+ resetParallel();
+ print("doParallel: " + work);
+ startMongoProgramNoConnect("mongo", "--eval", work + "; db." + baseName + "_parallelStatus.save( {done:1} );", db.getMongo().host);
+}
+
+doneParallel = function() {
+ return !!parallel().findOne();
+}
+
+waitParallel = function() {
+ assert.soon( function() { return doneParallel(); }, "parallel did not finish in time", 300000, 1000 );
+}
+
+// waiting on SERVER-620
+
+print( "index11.js host:" );
+print( db.getMongo().host );
+
+if (1) {
+
+size = 500000;
+while( 1 ) { // if indexing finishes before we can run checks, try indexing w/ more data
+ print( "size: " + size );
+ baseName = "jstests_index11";
+ fullName = "db." + baseName;
+ t = db[ baseName ];
+ t.drop();
+
+ db.eval( function( size ) {
+ for( i = 0; i < size; ++i ) {
+ db.jstests_index11.save( {i:i} );
+ }
+ },
+ size );
+ assert.eq( size, t.count() );
+
+ doParallel( fullName + ".ensureIndex( {i:1}, {background:true} )" );
+ try {
+ // wait for indexing to start
+ print("wait for indexing to start");
+ assert.soon( function() { return 2 == db.system.indexes.count( {ns:"test."+baseName} ) }, "no index created", 30000, 50 );
+ print("started.");
+ assert.eq( size, t.count() );
+ assert.eq( 100, t.findOne( {i:100} ).i );
+ q = t.find();
+ for( i = 0; i < 120; ++i ) { // getmore
+ q.next();
+ assert( q.hasNext(), "no next" );
+ }
+ assert.eq( "BasicCursor", t.find( {i:100} ).explain().cursor, "used btree cursor" );
+ t.remove( {i:40} );
+ t.update( {i:10}, {i:-10} );
+ id = t.find().hint( {$natural:-1} )._id;
+ t.update( {_id:id}, {i:-2} );
+ t.save( {i:-50} );
+ t.save( {i:size+2} );
+ assert( !db.getLastError() );
+
+ print("calling ensureIndex");
+ t.ensureIndex( {i:1} );
+
+ printjson( db.getLastError() );
+ assert( db.getLastError() );
+ assert.eq( size + 1, t.count() );
+ assert( !db.getLastError() );
+
+ print("calling dropIndex");
+ t.dropIndex( {i:1} );
+ printjson( db.getLastError() );
+ assert( db.getLastError() );
+ } catch( e ) {
+ // only a failure if we're still indexing
+ // wait for parallel status to update to reflect indexing status
+ print("caught exception");
+ sleep( 1000 );
+ if ( !doneParallel() ) {
+ throw e;
+ }
+ print("but that's OK")
+ }
+ if ( !doneParallel() ) {
+ break;
+ }
+ print( "indexing finished too soon, retrying..." );
+ size *= 2;
+ assert( size < 20000000, "unable to run checks in parallel with index creation" );
+}
+
+print("our tests done, waiting for parallel to finish");
+waitParallel();
+print("finished");
+
+assert.eq( "BtreeCursor i_1", t.find( {i:100} ).explain().cursor );
+assert.eq( 1, t.count( {i:-10} ) );
+assert.eq( 1, t.count( {i:-2} ) );
+assert.eq( 1, t.count( {i:-50} ) );
+assert.eq( 1, t.count( {i:size+2} ) );
+assert.eq( 0, t.count( {i:40} ) );
+assert( !db.getLastError() );
+print("about to drop index");
+t.dropIndex( {i:1} );
+printjson( db.getLastError() );
+assert( !db.getLastError() );
+
+} // if 1
+
diff --git a/jstests/slow/indexbg2.js b/jstests/slow/indexbg2.js
new file mode 100644
index 0000000..1830f42
--- /dev/null
+++ b/jstests/slow/indexbg2.js
@@ -0,0 +1,83 @@
+// Test background index creation w/ constraints
+
+parallel = function() {
+ return db[ baseName + "_parallelStatus" ];
+}
+
+resetParallel = function() {
+ parallel().drop();
+}
+
+doParallel = function( work ) {
+ resetParallel();
+ startMongoProgramNoConnect( "mongo", "--eval", work + "; db." + baseName + "_parallelStatus.save( {done:1} );", db.getMongo().host );
+}
+
+doneParallel = function() {
+ return !!parallel().findOne();
+}
+
+waitParallel = function() {
+ assert.soon( function() { return doneParallel(); }, "parallel did not finish in time", 300000, 1000 );
+}
+
+doTest = function(dropDups) {
+
+ size = 10000;
+ while (1) { // if indexing finishes before we can run checks, try indexing w/ more data
+ print("size: " + size);
+ baseName = "jstests_index12";
+ fullName = "db." + baseName;
+ t = db[baseName];
+ t.drop();
+
+ db.eval(function(size) {
+ for (i = 0; i < size; ++i) {
+ db.jstests_index12.save({ i: i });
+ }
+ },
+ size);
+ assert.eq(size, t.count());
+
+ doParallel(fullName + ".ensureIndex( {i:1}, {background:true, unique:true, dropDups:" + dropDups + "} )");
+ try {
+ // wait for indexing to start
+ assert.soon(function() { return 2 == db.system.indexes.count({ ns: "test." + baseName }) }, "no index created", 30000, 50);
+ t.save({ i: 0, n: true });
+ //printjson(db.getLastError());
+ t.save({ i: size - 1, n: true });
+ //printjson(db.getLastError());
+ } catch (e) {
+ // only a failure if we're still indexing
+ // wait for parallel status to update to reflect indexing status
+ sleep(1000);
+ if (!doneParallel()) {
+ throw e;
+ }
+ }
+ if (!doneParallel()) {
+ break;
+ }
+ print("indexing finished too soon, retrying...");
+ size *= 2;
+ assert(size < 5000000, "unable to run checks in parallel with index creation");
+ }
+
+ waitParallel();
+
+ if( dropDups == "true" ) {
+ assert.eq(size, t.find().toArray().length, "full query failed");
+ assert.eq(size, t.count(), "count failed");
+ }
+ else {
+ /* without dropdups, it could be that there is more than size now but the index failed
+ to build - which is valid. we check index isn't there.
+ */
+ if (t.count() != size)
+ assert.eq(1, t.getIndexes().length, "change in # of elems yet index is there");
+ }
+
+}
+
+doTest( "false" );
+doTest( "true" );
diff --git a/jstests/sort5.js b/jstests/sort5.js
index a589355..b90256e 100644
--- a/jstests/sort5.js
+++ b/jstests/sort5.js
@@ -15,7 +15,7 @@ assert(t.validate().valid, "A valid");
// test sorting on compound key involving _id
-// assert.eq( [4,2,3,1] , t.find().sort({"y.b": 1 , _id : -1 }).map( function(z){ return z.x; } ) , "B no index" );
-// t.ensureIndex({"y.b": 1, "_id": -1});
-// assert.eq( [4,2,3,1] , t.find().sort({"y.b": 1 , _id : -1 }).map( function(z){ return z.x; } ) , "B index" );
-// assert(t.validate().valid, "B valid");
+assert.eq( [4,2,3,1] , t.find().sort({"y.b": 1 , _id : -1 }).map( function(z){ return z.x; } ) , "B no index" );
+t.ensureIndex({"y.b": 1, "_id": -1});
+assert.eq( [4,2,3,1] , t.find().sort({"y.b": 1 , _id : -1 }).map( function(z){ return z.x; } ) , "B index" );
+assert(t.validate().valid, "B valid");
diff --git a/jstests/sort6.js b/jstests/sort6.js
new file mode 100644
index 0000000..027ba7a
--- /dev/null
+++ b/jstests/sort6.js
@@ -0,0 +1,38 @@
+
+t = db.sort6;
+
+function get( x ){
+ return t.find().sort( { c : x } ).map( function(z){ return z._id; } );
+}
+
+// part 1
+t.drop();
+
+t.insert({_id:1,c:null})
+t.insert({_id:2,c:1})
+t.insert({_id:3,c:2})
+
+
+assert.eq( [3,2,1] , get( -1 ) , "A1" ) // SERVER-635
+assert.eq( [1,2,3] , get( 1 ) , "A2" )
+
+t.ensureIndex( { c : 1 } );
+
+assert.eq( [3,2,1] , get( -1 ) , "B1" )
+assert.eq( [1,2,3] , get( 1 ) , "B2" )
+
+
+// part 2
+t.drop();
+
+t.insert({_id:1})
+t.insert({_id:2,c:1})
+t.insert({_id:3,c:2})
+
+assert.eq( [3,2,1] , get( -1 ) , "C1" ) // SERVER-635
+assert.eq( [1,2,3] , get( 1 ) , "C2" )
+
+t.ensureIndex( { c : 1 } );
+
+assert.eq( [3,2,1] , get( -1 ) , "D1" )
+assert.eq( [1,2,3] , get( 1 ) , "X2" )
diff --git a/jstests/storefunc.js b/jstests/storefunc.js
index bae1090..4cf7e30 100644
--- a/jstests/storefunc.js
+++ b/jstests/storefunc.js
@@ -12,6 +12,8 @@ assert.eq( 0 , s.count() , "setup - D" );
s.save( { _id : "x" , value : "4" } );
assert.eq( 1 , s.count() , "setup - E" );
+assert.eq( 4 , s.findOne( { _id : "x" } ).value , "E2 " );
+
assert.eq( 4 , s.findOne().value , "setup - F" );
s.update( { _id : "x" } , { $set : { value : 5 } } );
assert.eq( 1 , s.count() , "setup - G" );
@@ -29,3 +31,12 @@ assert.eq( 6 , db.eval( "return x" ) , "exec - 2 " );
s.insert( { _id : "bar" , value : function( z ){ return 17 + z; } } );
assert.eq( 22 , db.eval( "return bar(5);" ) , "exec - 3 " );
+
+assert( s.getIndexKeys().length > 0 , "no indexes" );
+assert( s.getIndexKeys()[0]._id , "no _id index" );
+
+assert.eq( "undefined" , db.eval( function(){ return typeof(zzz); } ) , "C1" );
+s.save( { _id : "zzz" , value : 5 } )
+assert.eq( "number" , db.eval( function(){ return typeof(zzz); } ) , "C2" );
+s.remove( { _id : "zzz" } );
+assert.eq( "undefined" , db.eval( function(){ return typeof(zzz); } ) , "C3" );
diff --git a/jstests/testminmax.js b/jstests/testminmax.js
new file mode 100644
index 0000000..803f1b4
--- /dev/null
+++ b/jstests/testminmax.js
@@ -0,0 +1,14 @@
+t = db.minmaxtest;
+t.drop();
+t.insert({"_id" : "IBM.N|00001264779918428889", "DESCRIPTION" : { "n" : "IBMSTK2", "o" : "IBM STK", "s" : "changed" } });
+t.insert({ "_id" : "VOD.N|00001264779918433344", "COMPANYNAME" : { "n" : "Vodafone Group PLC 2", "o" : "Vodafone Group PLC", "s" : "changed" } });
+t.insert({ "_id" : "IBM.N|00001264779918437075", "DESCRIPTION" : { "n" : "IBMSTK3", "o" : "IBM STK2", "s" : "changed" } });
+t.insert({ "_id" : "VOD.N|00001264779918441426", "COMPANYNAME" : { "n" : "Vodafone Group PLC 3", "o" : "Vodafone Group PLC 2", "s" : "changed" } });
+
+// temp:
+printjson( t.find().min({"_id":"IBM.N|00000000000000000000"}).max({"_id":"IBM.N|99999999999999999999"}).toArray() );
+
+// this should be 2!! add assertion when fixed
+// http://jira.mongodb.org/browse/SERVER-675
+print( t.find().min({"_id":"IBM.N|00000000000000000000"}).max({"_id":"IBM.N|99999999999999999999"}).count() );
+
diff --git a/jstests/tool/csv1.js b/jstests/tool/csv1.js
index df8aa10..edf9dc2 100644
--- a/jstests/tool/csv1.js
+++ b/jstests/tool/csv1.js
@@ -4,26 +4,25 @@ t = new ToolTest( "csv1" )
c = t.startDB( "foo" );
-base = { a : 1 , b : "foo,bar" , c: 5 };
+base = { a : 1 , b : "foo,bar\"baz,qux" , c: 5, 'd d': 6 };
assert.eq( 0 , c.count() , "setup1" );
c.insert( base );
delete base._id
assert.eq( 1 , c.count() , "setup2" );
-t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv" , "-f" , "a,b,c" )
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv" , "-f" , "a,b,c,d d" )
c.drop()
assert.eq( 0 , c.count() , "after drop" )
-t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "-f" , "a,b,c" );
-assert.soon( "c.findOne()" , "no data after sleep" );
-assert.eq( 2 , c.count() , "after restore 2" );
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "-f" , "a,b,c,d d" );
+assert.soon( "2 == c.count()" , "restore 2" );
a = c.find().sort( { a : 1 } ).toArray();
delete a[0]._id
delete a[1]._id
-assert.eq( tojson( { a : "a" , b : "b" , c : "c" } ) , tojson( a[1] ) , "csv parse 1" );
+assert.eq( tojson( { a : "a" , b : "b" , c : "c" , 'd d': "d d"} ) , tojson( a[1] ) , "csv parse 1" );
assert.eq( tojson( base ) , tojson(a[0]) , "csv parse 0" )
c.drop()
diff --git a/jstests/tool/tool1.js b/jstests/tool/tool1.js
index 00e92e7..91fce80 100644
--- a/jstests/tool/tool1.js
+++ b/jstests/tool/tool1.js
@@ -2,8 +2,8 @@
baseName = "jstests_tool_tool1";
dbPath = "/data/db/" + baseName + "/";
-externalPath = "/data/db/" + baseName + "_external/"
-externalFile = externalPath + "export.json"
+externalPath = "/data/db/" + baseName + "_external/";
+externalFile = externalPath + "export.json";
function fileSize(){
var l = listFiles( externalPath );
diff --git a/jstests/type1.js b/jstests/type1.js
index 94385fa..518e367 100644
--- a/jstests/type1.js
+++ b/jstests/type1.js
@@ -21,3 +21,4 @@ assert.eq( 1 , t.find( { x : { $type : 1 } } ).count() , "B2" );
assert.eq( 3 , t.find( { x : { $type : 2 } } ).count() , "B3" );
assert.eq( 0 , t.find( { x : { $type : 3 } } ).count() , "B4" );
assert.eq( 1 , t.find( { x : { $type : 1 } } ).explain().nscanned , "B5" );
+assert.eq( 1 , t.find( { x : { $regex:"f", $type : 2 } } ).count() , "B3" ); \ No newline at end of file
diff --git a/jstests/unset2.js b/jstests/unset2.js
new file mode 100644
index 0000000..e1dc445
--- /dev/null
+++ b/jstests/unset2.js
@@ -0,0 +1,23 @@
+t = db.unset2;
+t.drop();
+
+t.save( {a:["a","b","c","d"]} );
+t.update( {}, {$unset:{"a.3":1}} );
+assert.eq( ["a","b","c",null], t.findOne().a );
+t.update( {}, {$unset:{"a.1":1}} );
+assert.eq( ["a",null,"c",null], t.findOne().a );
+t.update( {}, {$unset:{"a.0":1}} );
+assert.eq( [null,null,"c",null], t.findOne().a );
+t.update( {}, {$unset:{"a.4":1}} );
+assert.eq( [null,null,"c",null], t.findOne().a ); // no change
+
+t.drop();
+t.save( {a:["a","b","c","d","e"]} );
+t.update( {}, {$unset:{"a.2":1},$set:{"a.3":3,"a.4":4,"a.5":5}} );
+assert.eq( ["a","b",null,3,4,5], t.findOne().a );
+
+t.drop();
+t.save( {a:["a","b","c","d","e"]} );
+t.update( {}, {$unset:{"a.2":1},$set:{"a.2":4}} );
+assert( db.getLastError() );
+assert.eq( ["a","b","c","d","e"], t.findOne().a ); \ No newline at end of file
diff --git a/jstests/update6.js b/jstests/update6.js
index 1f42fe5..f547677 100644
--- a/jstests/update6.js
+++ b/jstests/update6.js
@@ -10,7 +10,7 @@ assert.eq( "c,d" , Object.keySet( t.findOne().b ).toString() , "B" );
t.update( { a : 1 } , { $inc : { "b.0e" : 1 } } );
assert.eq( 1 , t.findOne().b["0e"] , "C" );
-assert.eq( "0e,c,d" , Object.keySet( t.findOne().b ).toString() , "D" );
+assert.eq( "c,d,0e" , Object.keySet( t.findOne().b ).toString() , "D" );
// -----
diff --git a/jstests/update_addToSet.js b/jstests/update_addToSet.js
new file mode 100644
index 0000000..123bacb
--- /dev/null
+++ b/jstests/update_addToSet.js
@@ -0,0 +1,41 @@
+
+t = db.update_addToSet1;
+t.drop();
+
+o = { _id : 1 , a : [ 2 , 1 ] }
+t.insert( o );
+
+assert.eq( o , t.findOne() , "A1" );
+
+t.update( {} , { $addToSet : { a : 3 } } );
+o.a.push( 3 );
+assert.eq( o , t.findOne() , "A2" );
+
+t.update( {} , { $addToSet : { a : 3 } } );
+assert.eq( o , t.findOne() , "A3" );
+
+// SERVER-628
+t.update( {} , { $addToSet : { a : { $each : [ 3 , 5 , 6 ] } } } );
+o.a.push( 5 )
+o.a.push( 6 )
+assert.eq( o , t.findOne() , "B1" )
+
+t.drop()
+o = { _id : 1 , a : [ 3 , 5 , 6 ] }
+t.insert( o );
+t.update( {} , { $addToSet : { a : { $each : [ 3 , 5 , 6 ] } } } );
+assert.eq( o , t.findOne() , "B2" );
+
+t.drop();
+t.update( { _id : 1 } , { $addToSet : { a : { $each : [ 3 , 5 , 6 ] } } } , true );
+assert.eq( o , t.findOne() , "B3" );
+t.update( { _id : 1 } , { $addToSet : { a : { $each : [ 3 , 5 , 6 ] } } } , true );
+assert.eq( o , t.findOne() , "B4" );
+
+
+// SERVER-630
+t.drop();
+t.update( { _id : 2 } , { $addToSet : { a : 3 } } , true );
+assert.eq( 1 , t.count() , "C1" );
+assert.eq( { _id : 2 , a : [ 3 ] } , t.findOne() , "C2" );
+
diff --git a/jstests/update_arraymatch1.js b/jstests/update_arraymatch1.js
new file mode 100644
index 0000000..521271d
--- /dev/null
+++ b/jstests/update_arraymatch1.js
@@ -0,0 +1,16 @@
+
+t = db.update_arraymatch1
+t.drop();
+
+o = { _id : 1 , a : [ { x : 1 , y : 1 } , { x : 2 , y : 2 } , { x : 3 , y : 3 } ] }
+t.insert( o );
+assert.eq( o , t.findOne() , "A1" );
+
+q = { "a.x" : 2 }
+t.update( q , { $set : { b : 5 } } )
+o.b = 5
+assert.eq( o , t.findOne() , "A2" )
+
+t.update( { "a.x" : 2 } , { $inc : { "a.$.y" : 1 } } )
+o.a[1].y++;
+assert.eq( o , t.findOne() , "A3" );
diff --git a/jstests/update_arraymatch2.js b/jstests/update_arraymatch2.js
new file mode 100644
index 0000000..7eb810b
--- /dev/null
+++ b/jstests/update_arraymatch2.js
@@ -0,0 +1,16 @@
+t = db.tilde;
+t.drop();
+
+t.insert( { } );
+t.insert( { x : [1,2,3] } );
+t.insert( { x : 99 } );
+t.update( {x : 2}, { $inc : { "x.$" : 1 } } , false, true );
+assert( t.findOne({x:1}).x[1] == 3, "A1" );
+
+t.insert( { x : { y : [8,7,6] } } )
+t.update( {'x.y' : 7}, { $inc : { "x.y.$" : 1 } } , false, true )
+assert.eq( 8 , t.findOne({"x.y" : 8}).x.y[1] , "B1" );
+
+t.insert( { x : [90,91,92], y : ['a', 'b', 'c'] } );
+t.update( { x : 92} , { $set : { 'y.$' : 'z' } }, false, true );
+assert.eq( 'z', t.findOne({x:92}).y[2], "B2" );
diff --git a/jstests/update_arraymatch3.js b/jstests/update_arraymatch3.js
new file mode 100644
index 0000000..116ac6b
--- /dev/null
+++ b/jstests/update_arraymatch3.js
@@ -0,0 +1,17 @@
+
+t = db.update_arraymatch3;
+t.drop();
+
+o = { _id : 1 ,
+ title : "ABC",
+ comments : [ { "by" : "joe", "votes" : 3 },
+ { "by" : "jane", "votes" : 7 }
+ ]
+ }
+
+t.save( o );
+assert.eq( o , t.findOne() , "A1" );
+
+t.update( {'comments.by':'joe'}, {$inc:{'comments.$.votes':1}}, false, true )
+o.comments[0].votes++;
+assert.eq( o , t.findOne() , "A2" );
diff --git a/jstests/updatec.js b/jstests/updatec.js
new file mode 100644
index 0000000..12b1325
--- /dev/null
+++ b/jstests/updatec.js
@@ -0,0 +1,14 @@
+
+t = db.updatec;
+t.drop();
+
+t.update( { "_id" : 123 }, { $set : { "v" : { "i" : 123, "a":456 } }, $push : { "f" : 234} }, 1, 0 );
+t.update( { "_id" : 123 }, { $set : { "v" : { "i" : 123, "a":456 } }, $push : { "f" : 234} }, 1, 0 );
+
+assert.eq(
+ {
+ "_id" : 123,
+ "f" : [ 234, 234 ] ,
+ "v" : { "i" : 123, "a" : 456 }
+ } , t.findOne() );
+