summaryrefslogtreecommitdiff
path: root/jstests
diff options
context:
space:
mode:
Diffstat (limited to 'jstests')
-rw-r--r--jstests/_lodeRunner.js4
-rw-r--r--jstests/_runner.js24
-rw-r--r--jstests/_runner_leak.js44
-rw-r--r--jstests/_runner_leak_nojni.js42
-rw-r--r--jstests/_runner_sharding.js35
-rw-r--r--jstests/all.js45
-rw-r--r--jstests/all2.js86
-rw-r--r--jstests/apitest_db.js70
-rw-r--r--jstests/apitest_dbcollection.js115
-rw-r--r--jstests/array1.js14
-rw-r--r--jstests/array3.js8
-rw-r--r--jstests/arrayfind1.js38
-rw-r--r--jstests/auth1.js42
-rw-r--r--jstests/autoid.js11
-rw-r--r--jstests/basic1.js21
-rw-r--r--jstests/basic2.js16
-rw-r--r--jstests/basic3.js24
-rw-r--r--jstests/basic4.js12
-rw-r--r--jstests/basic5.js6
-rw-r--r--jstests/basic6.js8
-rw-r--r--jstests/basic7.js11
-rw-r--r--jstests/basic8.js11
-rw-r--r--jstests/basic9.js25
-rw-r--r--jstests/basica.js33
-rw-r--r--jstests/basicb.js7
-rw-r--r--jstests/capped.js11
-rw-r--r--jstests/capped1.js11
-rw-r--r--jstests/capped2.js62
-rw-r--r--jstests/capped3.js42
-rw-r--r--jstests/capped4.js28
-rw-r--r--jstests/capped5.js18
-rw-r--r--jstests/clone/clonecollection.js165
-rw-r--r--jstests/copydb.js20
-rw-r--r--jstests/count.js25
-rw-r--r--jstests/count2.js23
-rw-r--r--jstests/count3.js26
-rw-r--r--jstests/count4.js17
-rw-r--r--jstests/count5.js30
-rw-r--r--jstests/cursor1.js20
-rw-r--r--jstests/cursor2.js24
-rw-r--r--jstests/cursor3.js35
-rw-r--r--jstests/cursor4.js47
-rw-r--r--jstests/cursor5.js36
-rw-r--r--jstests/cursor6.js100
-rw-r--r--jstests/cursor7.js42
-rw-r--r--jstests/cursor8.js10
-rw-r--r--jstests/datasize.js28
-rw-r--r--jstests/date1.js14
-rw-r--r--jstests/dbadmin.js22
-rw-r--r--jstests/dbref1.js10
-rw-r--r--jstests/dbref2.js13
-rw-r--r--jstests/disk/dbNoCreate.js19
-rw-r--r--jstests/disk/diskfull.js20
-rw-r--r--jstests/disk/norepeat.js61
-rw-r--r--jstests/disk/preallocate.js21
-rw-r--r--jstests/distinct1.js25
-rw-r--r--jstests/distinct2.js13
-rw-r--r--jstests/drop.js21
-rw-r--r--jstests/error1.js41
-rw-r--r--jstests/error2.js21
-rw-r--r--jstests/error3.js5
-rw-r--r--jstests/error4.js7
-rw-r--r--jstests/error5.js8
-rw-r--r--jstests/eval0.js3
-rw-r--r--jstests/eval1.js17
-rw-r--r--jstests/eval2.js28
-rw-r--r--jstests/eval3.js21
-rw-r--r--jstests/eval4.js23
-rw-r--r--jstests/eval5.js23
-rw-r--r--jstests/eval6.js15
-rw-r--r--jstests/eval7.js3
-rw-r--r--jstests/eval8.js19
-rw-r--r--jstests/eval9.js19
-rw-r--r--jstests/evala.js9
-rw-r--r--jstests/evalb.js14
-rw-r--r--jstests/exists.js48
-rw-r--r--jstests/explain1.js24
-rw-r--r--jstests/extent.js11
-rw-r--r--jstests/find1.js30
-rw-r--r--jstests/find2.js16
-rw-r--r--jstests/find3.js10
-rw-r--r--jstests/find4.js26
-rw-r--r--jstests/find5.js51
-rw-r--r--jstests/find6.js11
-rw-r--r--jstests/find_and_modify.js38
-rw-r--r--jstests/fm1.js12
-rw-r--r--jstests/fm2.js9
-rw-r--r--jstests/fm3.js37
-rw-r--r--jstests/fsync.js22
-rw-r--r--jstests/fsync2.js15
-rw-r--r--jstests/group1.js64
-rw-r--r--jstests/group2.js38
-rw-r--r--jstests/group3.js43
-rw-r--r--jstests/group4.js45
-rw-r--r--jstests/group5.js38
-rw-r--r--jstests/hint1.js10
-rw-r--r--jstests/id1.js16
-rw-r--r--jstests/in.js19
-rw-r--r--jstests/in2.js33
-rw-r--r--jstests/inc1.js32
-rw-r--r--jstests/inc2.js22
-rw-r--r--jstests/inc3.js16
-rw-r--r--jstests/index1.js33
-rw-r--r--jstests/index10.js24
-rw-r--r--jstests/index2.js40
-rw-r--r--jstests/index3.js16
-rw-r--r--jstests/index4.js33
-rw-r--r--jstests/index5.js24
-rw-r--r--jstests/index6.js8
-rw-r--r--jstests/index7.js67
-rw-r--r--jstests/index8.js59
-rw-r--r--jstests/index9.js17
-rw-r--r--jstests/index_check1.js31
-rw-r--r--jstests/index_check2.js41
-rw-r--r--jstests/index_check3.js63
-rw-r--r--jstests/index_check5.js17
-rw-r--r--jstests/index_check6.js17
-rw-r--r--jstests/index_check7.js15
-rw-r--r--jstests/index_many.js34
-rw-r--r--jstests/indexa.js22
-rw-r--r--jstests/indexapi.js40
-rw-r--r--jstests/indexb.js30
-rw-r--r--jstests/indexc.js20
-rw-r--r--jstests/indexd.js10
-rw-r--r--jstests/indexe.js21
-rw-r--r--jstests/indexf.js13
-rw-r--r--jstests/jni1.js12
-rw-r--r--jstests/jni2.js22
-rw-r--r--jstests/jni3.js74
-rw-r--r--jstests/jni4.js49
-rw-r--r--jstests/jni5.js10
-rw-r--r--jstests/jni7.js7
-rw-r--r--jstests/jni8.js14
-rw-r--r--jstests/jni9.js24
-rw-r--r--jstests/json1.js20
-rw-r--r--jstests/map1.js24
-rw-r--r--jstests/median.js74
-rw-r--r--jstests/minmax.js40
-rw-r--r--jstests/mod1.js24
-rw-r--r--jstests/mr1.js176
-rw-r--r--jstests/mr2.js50
-rw-r--r--jstests/mr3.js73
-rw-r--r--jstests/mr4.js45
-rw-r--r--jstests/mr5.js39
-rw-r--r--jstests/multi.js24
-rw-r--r--jstests/multi2.js23
-rw-r--r--jstests/ne1.js11
-rw-r--r--jstests/nin.js57
-rw-r--r--jstests/not1.js20
-rw-r--r--jstests/null.js14
-rw-r--r--jstests/objid1.js16
-rw-r--r--jstests/objid2.js7
-rw-r--r--jstests/objid3.js9
-rw-r--r--jstests/objid4.js16
-rw-r--r--jstests/objid5.js6
-rw-r--r--jstests/parallel/allops.js40
-rw-r--r--jstests/parallel/basic.js11
-rw-r--r--jstests/parallel/basicPlus.js26
-rw-r--r--jstests/parallel/insert.js24
-rw-r--r--jstests/parallel/manyclients.js26
-rw-r--r--jstests/parallel/shellfork.js33
-rw-r--r--jstests/perf/find1.js90
-rw-r--r--jstests/perf/index1.js20
-rw-r--r--jstests/perf/remove1.js68
-rw-r--r--jstests/profile1.js40
-rw-r--r--jstests/pull.js19
-rw-r--r--jstests/pull2.js31
-rw-r--r--jstests/pullall.js18
-rw-r--r--jstests/push.js22
-rw-r--r--jstests/push2.js20
-rw-r--r--jstests/pushall.js20
-rw-r--r--jstests/query1.js20
-rw-r--r--jstests/queryoptimizer1.js26
-rw-r--r--jstests/quota/quota1.js48
-rw-r--r--jstests/recstore.js24
-rw-r--r--jstests/ref.js19
-rw-r--r--jstests/ref2.js14
-rw-r--r--jstests/ref3.js19
-rw-r--r--jstests/ref4.js23
-rw-r--r--jstests/regex.js24
-rw-r--r--jstests/regex2.js62
-rw-r--r--jstests/regex3.js36
-rw-r--r--jstests/regex4.js18
-rw-r--r--jstests/regex5.js13
-rw-r--r--jstests/regex6.js19
-rw-r--r--jstests/remove.js25
-rw-r--r--jstests/remove2.js41
-rw-r--r--jstests/remove3.js18
-rw-r--r--jstests/remove4.js10
-rw-r--r--jstests/remove5.js24
-rw-r--r--jstests/remove6.js38
-rw-r--r--jstests/remove7.js35
-rw-r--r--jstests/remove8.js21
-rw-r--r--jstests/rename.js48
-rw-r--r--jstests/rename2.js19
-rw-r--r--jstests/rename3.js25
-rw-r--r--jstests/repair.js6
-rw-r--r--jstests/repl/basic1.js59
-rw-r--r--jstests/repl/pair1.js99
-rw-r--r--jstests/repl/pair2.js71
-rw-r--r--jstests/repl/pair3.js235
-rw-r--r--jstests/repl/pair4.js159
-rw-r--r--jstests/repl/pair5.js95
-rw-r--r--jstests/repl/pair6.js115
-rw-r--r--jstests/repl/repl1.js55
-rw-r--r--jstests/repl/repl2.js45
-rw-r--r--jstests/repl/repl3.js47
-rw-r--r--jstests/repl/repl4.js30
-rw-r--r--jstests/repl/repl5.js32
-rw-r--r--jstests/repl/repl6.js73
-rw-r--r--jstests/repl/repl7.js45
-rw-r--r--jstests/repl/repl8.js30
-rw-r--r--jstests/repl/repl9.js48
-rw-r--r--jstests/repl/replacePeer1.js71
-rw-r--r--jstests/repl/replacePeer2.js72
-rw-r--r--jstests/set1.js9
-rw-r--r--jstests/set2.js18
-rw-r--r--jstests/set3.js11
-rw-r--r--jstests/set4.js15
-rw-r--r--jstests/sharding/auto1.js51
-rw-r--r--jstests/sharding/auto2.js44
-rw-r--r--jstests/sharding/count1.js55
-rw-r--r--jstests/sharding/diffservers1.js20
-rw-r--r--jstests/sharding/error1.js47
-rw-r--r--jstests/sharding/features1.js139
-rw-r--r--jstests/sharding/features2.js114
-rw-r--r--jstests/sharding/key_many.js121
-rw-r--r--jstests/sharding/key_string.js44
-rw-r--r--jstests/sharding/movePrimary1.js31
-rw-r--r--jstests/sharding/moveshard1.js39
-rw-r--r--jstests/sharding/passthrough1.js10
-rw-r--r--jstests/sharding/shard1.js32
-rw-r--r--jstests/sharding/shard2.js194
-rw-r--r--jstests/sharding/shard3.js130
-rw-r--r--jstests/sharding/shard4.js49
-rw-r--r--jstests/sharding/shard5.js52
-rw-r--r--jstests/sharding/shard6.js39
-rw-r--r--jstests/sharding/splitpick.js33
-rw-r--r--jstests/sharding/update1.js33
-rw-r--r--jstests/sharding/version1.js23
-rw-r--r--jstests/sharding/version2.js36
-rw-r--r--jstests/shellspawn.js24
-rw-r--r--jstests/slow/ns1.js49
-rw-r--r--jstests/sort1.js50
-rw-r--r--jstests/sort2.js22
-rw-r--r--jstests/sort3.js16
-rw-r--r--jstests/sort4.js43
-rw-r--r--jstests/sort5.js21
-rw-r--r--jstests/sort_numeric.js35
-rw-r--r--jstests/stats.js9
-rw-r--r--jstests/storefunc.js31
-rw-r--r--jstests/sub1.js14
-rw-r--r--jstests/tool/csv1.js43
-rw-r--r--jstests/tool/dumprestore1.js20
-rw-r--r--jstests/tool/dumprestore2.js26
-rw-r--r--jstests/tool/exportimport1.js20
-rw-r--r--jstests/tool/exportimport2.js24
-rw-r--r--jstests/tool/tool1.js64
-rw-r--r--jstests/type1.js23
-rw-r--r--jstests/unique2.js41
-rw-r--r--jstests/uniqueness.js45
-rw-r--r--jstests/unset.js19
-rw-r--r--jstests/update.js25
-rw-r--r--jstests/update2.js18
-rw-r--r--jstests/update3.js23
-rw-r--r--jstests/update4.js33
-rw-r--r--jstests/update5.js41
-rw-r--r--jstests/update6.js46
-rw-r--r--jstests/update7.js138
-rw-r--r--jstests/update8.js11
-rw-r--r--jstests/update9.js19
-rw-r--r--jstests/updatea.js50
-rw-r--r--jstests/updateb.js11
-rw-r--r--jstests/where1.js14
-rw-r--r--jstests/where2.js10
275 files changed, 9529 insertions, 0 deletions
diff --git a/jstests/_lodeRunner.js b/jstests/_lodeRunner.js
new file mode 100644
index 0000000..6e23dbb
--- /dev/null
+++ b/jstests/_lodeRunner.js
@@ -0,0 +1,4 @@
+// Start mongod and run jstests/_runner.js
+
+db = startMongod( "--port", "27018", "--dbpath", "/data/db/jstests" ).getDB( "test" );
+load( "jstests/_runner.js" );
diff --git a/jstests/_runner.js b/jstests/_runner.js
new file mode 100644
index 0000000..f0ce49d
--- /dev/null
+++ b/jstests/_runner.js
@@ -0,0 +1,24 @@
+//
+// simple runner to run toplevel tests in jstests
+//
+var files = listFiles("jstests");
+
+files.forEach(
+ function(x) {
+
+ if ( /_runner/.test(x.name) ||
+ /_lodeRunner/.test(x.name) ||
+ ! /\.js$/.test(x.name ) ){
+ print(" >>>>>>>>>>>>>>> skipping " + x.name);
+ return;
+ }
+
+
+ print(" *******************************************");
+ print(" Test : " + x.name + " ...");
+ print(" " + Date.timeFunc( function() { load(x.name); }, 1) + "ms");
+
+ }
+);
+
+
diff --git a/jstests/_runner_leak.js b/jstests/_runner_leak.js
new file mode 100644
index 0000000..18d7fb2
--- /dev/null
+++ b/jstests/_runner_leak.js
@@ -0,0 +1,44 @@
+//
+// simple runner to run toplevel tests in jstests
+//
+var files = listFiles("jstests");
+
+var dummyDb = db.getSisterDB( "dummyDBdummydummy" );
+
+dummyDb.getSisterDB( "admin" ).runCommand( "closeAllDatabases" );
+prev = dummyDb.serverStatus();
+
+print( "START : " + tojson( prev ) );
+
+files.forEach(
+ function(x) {
+
+ if ( /_runner/.test(x.name) ||
+ /_lodeRunner/.test(x.name) ||
+ ! /\.js$/.test(x.name ) ){
+ print(" >>>>>>>>>>>>>>> skipping " + x.name);
+ return;
+ }
+
+
+ print(" *******************************************");
+ print(" Test : " + x.name + " ...");
+ print(" " + Date.timeFunc( function() { load(x.name); }, 1) + "ms");
+
+ assert( dummyDb.getSisterDB( "admin" ).runCommand( "closeAllDatabases" ).ok == 1 , "closeAllDatabases failed" );
+ var now = dummyDb.serverStatus();
+ var leaked = now.mem.virtual - prev.mem.virtual;
+ if ( leaked > 0 ){
+ print( " LEAK : " + prev.mem.virtual + " -->> " + now.mem.virtual );
+ printjson( now );
+ if ( leaked > 20 )
+ throw -1;
+ }
+ prev = now;
+ }
+);
+
+
+
+dummyDb.getSisterDB( "admin" ).runCommand( "closeAllDatabases" );
+print( "END : " + tojson( dummyDb.serverStatus() ) );
diff --git a/jstests/_runner_leak_nojni.js b/jstests/_runner_leak_nojni.js
new file mode 100644
index 0000000..fe2c6b2
--- /dev/null
+++ b/jstests/_runner_leak_nojni.js
@@ -0,0 +1,42 @@
+//
+// simple runner to run toplevel tests in jstests
+//
+var files = listFiles("jstests");
+
+var dummyDb = db.getSisterDB( "dummyDBdummydummy" );
+
+dummyDb.getSisterDB( "admin" ).runCommand( "closeAllDatabases" );
+prev = dummyDb.runCommand( "meminfo" );
+
+print( "START : " + tojson( prev ) );
+
+files.forEach(
+ function(x) {
+
+ if ( /_runner/.test(x.name) ||
+ /_lodeRunner/.test(x.name) ||
+ /jni/.test(x.name) ||
+ /eval/.test(x.name) ||
+ /where/.test(x.name) ||
+ ! /\.js$/.test(x.name ) ){
+ print(" >>>>>>>>>>>>>>> skipping " + x.name);
+ return;
+ }
+
+
+ print(" *******************************************");
+ print(" Test : " + x.name + " ...");
+ print(" " + Date.timeFunc( function() { load(x.name); }, 1) + "ms");
+
+ assert( dummyDb.getSisterDB( "admin" ).runCommand( "closeAllDatabases" ).ok == 1 , "closeAllDatabases failed" );
+ var now = dummyDb.runCommand( "meminfo" );
+ if ( now.virtual > prev.virtual )
+ print( " LEAK : " + prev.virtual + " -->> " + now.virtual );
+ prev = now;
+ }
+);
+
+
+
+dummyDb.getSisterDB( "admin" ).runCommand( "closeAllDatabases" );
+print( "END : " + tojson( dummyDb.runCommand( "meminfo" ) ) );
diff --git a/jstests/_runner_sharding.js b/jstests/_runner_sharding.js
new file mode 100644
index 0000000..761b9df
--- /dev/null
+++ b/jstests/_runner_sharding.js
@@ -0,0 +1,35 @@
+//
+// simple runner to run toplevel tests in jstests
+//
+var files = listFiles("jstests/sharding");
+
+var num = 0;
+
+files.forEach(
+ function(x) {
+
+ if ( /_runner/.test(x.name) ||
+ /_lodeRunner/.test(x.name) ||
+ ! /\.js$/.test(x.name ) ){
+ print(" >>>>>>>>>>>>>>> skipping " + x.name);
+ return;
+ }
+
+ if ( num++ > 0 ){
+ sleep( 1000 ); // let things fully come down
+ }
+
+ print(" *******************************************");
+ print(" Test : " + x.name + " ...");
+ try {
+ print(" " + Date.timeFunc( function() { load(x.name); }, 1) + "ms");
+ }
+ catch ( e ){
+ print( " ERROR on " + x.name + "!! " + e );
+ throw e;
+ }
+
+ }
+);
+
+
diff --git a/jstests/all.js b/jstests/all.js
new file mode 100644
index 0000000..3d642ee
--- /dev/null
+++ b/jstests/all.js
@@ -0,0 +1,45 @@
+t = db.jstests_all;
+t.drop();
+
+doTest = function() {
+
+ t.save( { a:[ 1,2,3 ] } );
+ t.save( { a:[ 1,2,4 ] } );
+ t.save( { a:[ 1,8,5 ] } );
+ t.save( { a:[ 1,8,6 ] } );
+ t.save( { a:[ 1,9,7 ] } );
+
+ assert.eq( 5, t.find( { a: { $all: [ 1 ] } } ).count() );
+ assert.eq( 2, t.find( { a: { $all: [ 1, 2 ] } } ).count() );
+ assert.eq( 2, t.find( { a: { $all: [ 1, 8 ] } } ).count() );
+ assert.eq( 1, t.find( { a: { $all: [ 1, 3 ] } } ).count() );
+ assert.eq( 2, t.find( { a: { $all: [ 2 ] } } ).count() );
+ assert.eq( 1, t.find( { a: { $all: [ 2, 3 ] } } ).count() );
+ assert.eq( 2, t.find( { a: { $all: [ 2, 1 ] } } ).count() );
+
+ t.save( { a: [ 2, 2 ] } );
+ assert.eq( 3, t.find( { a: { $all: [ 2, 2 ] } } ).count() );
+
+ t.save( { a: [ [ 2 ] ] } );
+ assert.eq( 3, t.find( { a: { $all: [ 2 ] } } ).count() );
+
+ t.save( { a: [ { b: [ 10, 11 ] }, 11 ] } );
+ assert.eq( 1, t.find( { 'a.b': { $all: [ 10 ] } } ).count() );
+ assert.eq( 1, t.find( { a: { $all: [ 11 ] } } ).count() );
+
+ t.save( { a: { b: [ 20, 30 ] } } );
+ assert.eq( 1, t.find( { 'a.b': { $all: [ 20 ] } } ).count() );
+ assert.eq( 1, t.find( { 'a.b': { $all: [ 20, 30 ] } } ).count() );
+
+
+ assert.eq( 5 , t.find( { a : { $all : [1] } } ).count() , "E1" );
+ assert.eq( 0 , t.find( { a : { $all : [19] } } ).count() , "E2" );
+ assert.eq( 0 , t.find( { a : { $all : [] } } ).count() , "E3" );
+
+
+}
+
+doTest();
+t.drop();
+t.ensureIndex( {a:1} );
+doTest();
diff --git a/jstests/all2.js b/jstests/all2.js
new file mode 100644
index 0000000..64372ca
--- /dev/null
+++ b/jstests/all2.js
@@ -0,0 +1,86 @@
+
+t = db.all2;
+t.drop();
+
+t.save( { a : [ { x : 1 } , { x : 2 } ] } )
+t.save( { a : [ { x : 2 } , { x : 3 } ] } )
+t.save( { a : [ { x : 3 } , { x : 4 } ] } )
+
+state = "no index";
+
+function check( n , q , e ){
+ assert.eq( n , t.find( q ).count() , tojson( q ) + " " + e + " count " + state );
+ assert.eq( n , t.find( q ).itcount() , tojson( q ) + " " + e + " itcount" + state );
+}
+
+check( 1 , { "a.x" : { $in : [ 1 ] } } , "A" );
+check( 2 , { "a.x" : { $in : [ 2 ] } } , "B" );
+
+check( 2 , { "a.x" : { $in : [ 1 , 2 ] } } , "C" );
+check( 3 , { "a.x" : { $in : [ 2 , 3 ] } } , "D" );
+check( 3 , { "a.x" : { $in : [ 1 , 3 ] } } , "E" );
+
+check( 1 , { "a.x" : { $all : [ 1 , 2 ] } } , "F" );
+check( 1 , { "a.x" : { $all : [ 2 , 3 ] } } , "G" );
+check( 0 , { "a.x" : { $all : [ 1 , 3 ] } } , "H" );
+
+t.ensureIndex( { "a.x" : 1 } );
+state = "index";
+
+check( 1 , { "a.x" : { $in : [ 1 ] } } , "A" );
+check( 2 , { "a.x" : { $in : [ 2 ] } } , "B" );
+
+check( 2 , { "a.x" : { $in : [ 1 , 2 ] } } , "C" );
+check( 3 , { "a.x" : { $in : [ 2 , 3 ] } } , "D" );
+check( 3 , { "a.x" : { $in : [ 1 , 3 ] } } , "E" );
+
+check( 1 , { "a.x" : { $all : [ 1 , 2 ] } } , "F" );
+check( 1 , { "a.x" : { $all : [ 2 , 3 ] } } , "G" );
+check( 0 , { "a.x" : { $all : [ 1 , 3 ] } } , "H" );
+
+// --- more
+
+t.drop();
+
+t.save( { a : [ 1 , 2 ] } )
+t.save( { a : [ 2 , 3 ] } )
+t.save( { a : [ 3 , 4 ] } )
+
+state = "more no index";
+
+check( 1 , { "a" : { $in : [ 1 ] } } , "A" );
+check( 2 , { "a" : { $in : [ 2 ] } } , "B" );
+
+check( 2 , { "a" : { $in : [ 1 , 2 ] } } , "C" );
+check( 3 , { "a" : { $in : [ 2 , 3 ] } } , "D" );
+check( 3 , { "a" : { $in : [ 1 , 3 ] } } , "E" );
+
+check( 1 , { "a" : { $all : [ 1 , 2 ] } } , "F" );
+check( 1 , { "a" : { $all : [ 2 , 3 ] } } , "G" );
+check( 0 , { "a" : { $all : [ 1 , 3 ] } } , "H" );
+
+t.ensureIndex( { "a" : 1 } );
+state = "more index";
+
+check( 1 , { "a" : { $in : [ 1 ] } } , "A" );
+check( 2 , { "a" : { $in : [ 2 ] } } , "B" );
+
+check( 2 , { "a" : { $in : [ 1 , 2 ] } } , "C" );
+check( 3 , { "a" : { $in : [ 2 , 3 ] } } , "D" );
+check( 3 , { "a" : { $in : [ 1 , 3 ] } } , "E" );
+
+check( 1 , { "a" : { $all : [ 1 , 2 ] } } , "F" );
+check( 1 , { "a" : { $all : [ 2 , 3 ] } } , "G" );
+check( 0 , { "a" : { $all : [ 1 , 3 ] } } , "H" );
+
+
+// more 2
+
+state = "more 2"
+
+t.drop();
+t.save( { name : [ "harry","jack","tom" ] } )
+check( 0 , { name : { $all : ["harry","john"] } } , "A" );
+t.ensureIndex( { name : 1 } );
+check( 0 , { name : { $all : ["harry","john"] } } , "B" );
+
diff --git a/jstests/apitest_db.js b/jstests/apitest_db.js
new file mode 100644
index 0000000..45e25b6
--- /dev/null
+++ b/jstests/apitest_db.js
@@ -0,0 +1,70 @@
+/**
+ * Tests for the db object enhancement
+ */
+
+dd = function( x ){
+ //print( x );
+}
+
+dd( "a" );
+
+
+dd( "b" );
+
+/*
+ * be sure the public collection API is complete
+ */
+assert(db.createCollection , "createCollection" );
+assert(db.getProfilingLevel , "getProfilingLevel" );
+assert(db.setProfilingLevel , "setProfilingLevel" );
+assert(db.dbEval , "dbEval" );
+assert(db.group , "group" );
+
+dd( "c" );
+
+/*
+ * test createCollection
+ */
+
+db.getCollection( "test" ).drop();
+db.getCollection( "system.namespaces" ).find().forEach( function(x) { assert(x.name != "test.test"); });
+
+dd( "d" );
+
+db.createCollection("test");
+var found = false;
+db.getCollection( "system.namespaces" ).find().forEach( function(x) { if (x.name == "test.test") found = true; });
+assert(found);
+
+dd( "e" );
+
+/*
+ * profile level
+ */
+
+db.setProfilingLevel(0);
+assert(db.getProfilingLevel() == 0);
+
+db.setProfilingLevel(1);
+assert(db.getProfilingLevel() == 1);
+
+db.setProfilingLevel(2);
+assert(db.getProfilingLevel() == 2);
+
+db.setProfilingLevel(0);
+assert(db.getProfilingLevel() == 0);
+
+dd( "f" );
+asserted = false;
+try {
+ db.setProfilingLevel(10);
+ assert(false);
+}
+catch (e) {
+ asserted = true;
+ assert(e.dbSetProfilingException);
+}
+assert( asserted );
+
+dd( "g" );
+
diff --git a/jstests/apitest_dbcollection.js b/jstests/apitest_dbcollection.js
new file mode 100644
index 0000000..f6e74da
--- /dev/null
+++ b/jstests/apitest_dbcollection.js
@@ -0,0 +1,115 @@
+/**
+ * Tests for the db collection
+ */
+
+
+
+/*
+ * test drop
+ */
+db.getCollection( "test_db" ).drop();
+assert(db.getCollection( "test_db" ).find().length() == 0,1);
+
+db.getCollection( "test_db" ).save({a:1});
+assert(db.getCollection( "test_db" ).find().length() == 1,2);
+
+db.getCollection( "test_db" ).drop();
+assert(db.getCollection( "test_db" ).find().length() == 0,3);
+
+/*
+ * test count
+ */
+
+assert(db.getCollection( "test_db" ).count() == 0,4);
+db.getCollection( "test_db" ).save({a:1});
+assert(db.getCollection( "test_db" ).count() == 1,5);
+for (i = 0; i < 100; i++) {
+ db.getCollection( "test_db" ).save({a:1});
+}
+assert(db.getCollection( "test_db" ).count() == 101,6);
+db.getCollection( "test_db" ).drop();
+assert(db.getCollection( "test_db" ).count() == 0,7);
+
+/*
+ * test clean (not sure... just be sure it doen't blow up, I guess
+ */
+
+ db.getCollection( "test_db" ).clean();
+
+ /*
+ * test validate
+ */
+
+db.getCollection( "test_db" ).drop();
+assert(db.getCollection( "test_db" ).count() == 0,8);
+
+for (i = 0; i < 100; i++) {
+ db.getCollection( "test_db" ).save({a:1});
+}
+
+var v = db.getCollection( "test_db" ).validate();
+if( v.ns != "test.test_db" ) {
+ print("Error: wrong ns name");
+ print(tojson(v));
+}
+assert (v.ns == "test.test_db",9);
+assert (v.ok == 1,10);
+
+assert(v.result.toString().match(/nrecords\?:(\d+)/)[1] == 100,11);
+
+/*
+ * test deleteIndex, deleteIndexes
+ */
+
+db.getCollection( "test_db" ).drop();
+assert(db.getCollection( "test_db" ).count() == 0,12);
+db.getCollection( "test_db" ).dropIndexes();
+assert(db.getCollection( "test_db" ).getIndexes().length == 0,13);
+
+db.getCollection( "test_db" ).save({a:10});
+assert(db.getCollection( "test_db" ).getIndexes().length == 1,14);
+
+db.getCollection( "test_db" ).ensureIndex({a:1});
+db.getCollection( "test_db" ).save({a:10});
+
+print( tojson( db.getCollection( "test_db" ).getIndexes() ) );
+assert.eq(db.getCollection( "test_db" ).getIndexes().length , 2,15);
+
+db.getCollection( "test_db" ).dropIndex({a:1});
+assert(db.getCollection( "test_db" ).getIndexes().length == 1,16);
+
+db.getCollection( "test_db" ).save({a:10});
+db.getCollection( "test_db" ).ensureIndex({a:1});
+db.getCollection( "test_db" ).save({a:10});
+
+assert(db.getCollection( "test_db" ).getIndexes().length == 2,17);
+
+db.getCollection( "test_db" ).dropIndex("a_1");
+assert.eq( db.getCollection( "test_db" ).getIndexes().length , 1,18);
+
+db.getCollection( "test_db" ).save({a:10, b:11});
+db.getCollection( "test_db" ).ensureIndex({a:1});
+db.getCollection( "test_db" ).ensureIndex({b:1});
+db.getCollection( "test_db" ).save({a:10, b:12});
+
+assert(db.getCollection( "test_db" ).getIndexes().length == 3,19);
+
+db.getCollection( "test_db" ).dropIndex({b:1});
+assert(db.getCollection( "test_db" ).getIndexes().length == 2,20);
+db.getCollection( "test_db" ).dropIndex({a:1});
+assert(db.getCollection( "test_db" ).getIndexes().length == 1,21);
+
+db.getCollection( "test_db" ).save({a:10, b:11});
+db.getCollection( "test_db" ).ensureIndex({a:1});
+db.getCollection( "test_db" ).ensureIndex({b:1});
+db.getCollection( "test_db" ).save({a:10, b:12});
+
+assert(db.getCollection( "test_db" ).getIndexes().length == 3,22);
+
+db.getCollection( "test_db" ).dropIndexes();
+assert(db.getCollection( "test_db" ).getIndexes().length == 1,23);
+
+db.getCollection( "test_db" ).find();
+
+db.getCollection( "test_db" ).drop();
+assert(db.getCollection( "test_db" ).getIndexes().length == 0,24);
diff --git a/jstests/array1.js b/jstests/array1.js
new file mode 100644
index 0000000..4409b7b
--- /dev/null
+++ b/jstests/array1.js
@@ -0,0 +1,14 @@
+t = db.array1
+t.drop()
+
+x = { a : [ 1 , 2 ] };
+
+t.save( { a : [ [1,2] ] } );
+assert.eq( 1 , t.find( x ).count() , "A" );
+
+t.save( x );
+delete x._id;
+assert.eq( 2 , t.find( x ).count() , "B" );
+
+t.ensureIndex( { a : 1 } );
+assert.eq( 2 , t.find( x ).count() , "C" ); // TODO SERVER-146
diff --git a/jstests/array3.js b/jstests/array3.js
new file mode 100644
index 0000000..3d053f9
--- /dev/null
+++ b/jstests/array3.js
@@ -0,0 +1,8 @@
+
+assert.eq( 5 , Array.sum( [ 1 , 4 ] ), "A" )
+assert.eq( 2.5 , Array.avg( [ 1 , 4 ] ), "B" )
+
+arr = [ 2 , 4 , 4 , 4 , 5 , 5 , 7 , 9 ]
+assert.eq( 5 , Array.avg( arr ) , "C" )
+assert.eq( 2 , Array.stdDev( arr ) , "D" )
+
diff --git a/jstests/arrayfind1.js b/jstests/arrayfind1.js
new file mode 100644
index 0000000..422369e
--- /dev/null
+++ b/jstests/arrayfind1.js
@@ -0,0 +1,38 @@
+
+t = db.arrayfind1;
+t.drop();
+
+t.save( { a : [ { x : 1 } ] } )
+t.save( { a : [ { x : 1 , y : 2 , z : 1 } ] } )
+t.save( { a : [ { x : 1 , y : 1 , z : 3 } ] } )
+
+function test( exptected , q , name ){
+ assert.eq( exptected , t.find( q ).itcount() , name + " " + tojson( q ) + " itcount" );
+ assert.eq( exptected , t.find( q ).count() , name + " " + tojson( q ) + " count" );
+}
+
+test( 3 , {} , "A1" );
+test( 1 , { "a.y" : 2 } , "A2" );
+test( 1 , { "a" : { x : 1 } } , "A3" );
+test( 3 , { "a" : { $elemMatch : { x : 1 } } } , "A4" ); // SERVER-377
+
+
+t.save( { a : [ { x : 2 } ] } )
+t.save( { a : [ { x : 3 } ] } )
+t.save( { a : [ { x : 4 } ] } )
+
+assert.eq( 1 , t.find( { a : { $elemMatch : { x : 2 } } } ).count() , "B1" );
+assert.eq( 2 , t.find( { a : { $elemMatch : { x : { $gt : 2 } } } } ).count() , "B2" );
+
+t.ensureIndex( { "a.x" : 1 } );
+assert( t.find( { "a" : { $elemMatch : { x : 1 } } } ).explain().cursor.indexOf( "BtreeC" ) == 0 , "C1" );
+
+assert.eq( 1 , t.find( { a : { $elemMatch : { x : 2 } } } ).count() , "D1" );
+
+t.find( { "a.x" : 1 } ).count();
+t.find( { "a.x" : { $gt : 1 } } ).count();
+
+res = t.find( { "a" : { $elemMatch : { x : { $gt : 2 } } } } ).explain()
+assert( res.cursor.indexOf( "BtreeC" ) == 0 , "C1" );
+assert.eq( 2 , t.find( { a : { $elemMatch : { x : { $gt : 2 } } } } ).count() , "D2" );
+
diff --git a/jstests/auth1.js b/jstests/auth1.js
new file mode 100644
index 0000000..f6890cc
--- /dev/null
+++ b/jstests/auth1.js
@@ -0,0 +1,42 @@
+
+
+users = db.getCollection( "system.users" );
+users.remove( {} );
+
+pass = "a" + Math.random();
+//print( "password [" + pass + "]" );
+
+db.addUser( "eliot" , pass );
+
+assert( db.auth( "eliot" , pass ) , "auth failed" );
+assert( ! db.auth( "eliot" , pass + "a" ) , "auth should have failed" );
+
+pass2 = "b" + Math.random();
+db.addUser( "eliot" , pass2 );
+
+assert( ! db.auth( "eliot" , pass ) , "failed to change password failed" );
+assert( db.auth( "eliot" , pass2 ) , "new password didn't take" );
+
+assert( db.auth( "eliot" , pass2 ) , "what?" );
+db.removeUser( "eliot" );
+assert( ! db.auth( "eliot" , pass2 ) , "didn't remove user" );
+
+
+var a = db.getMongo().getDB( "admin" );
+users = a.getCollection( "system.users" );
+users.remove( {} );
+pass = "c" + Math.random();
+a.addUser( "super", pass );
+assert( a.auth( "super" , pass ) , "auth failed" );
+assert( !a.auth( "super" , pass + "a" ) , "auth should have failed" );
+
+db2 = new Mongo( db.getMongo().host ).getDB( db.getName() );
+
+users = db2.getCollection( "system.users" );
+users.remove( {} );
+
+pass = "a" + Math.random();
+
+db2.addUser( "eliot" , pass );
+
+assert.commandFailed( db2.runCommand( { authenticate: 1, user: "eliot", nonce: "foo", key: "bar" } ) );
diff --git a/jstests/autoid.js b/jstests/autoid.js
new file mode 100644
index 0000000..6c8062f
--- /dev/null
+++ b/jstests/autoid.js
@@ -0,0 +1,11 @@
+f = db.jstests_autoid;
+f.drop();
+
+f.save( {z:1} );
+a = f.findOne( {z:1} );
+f.update( {z:1}, {z:2} );
+b = f.findOne( {z:2} );
+assert.eq( a._id.str, b._id.str );
+c = f.update( {z:2}, {z:"abcdefgabcdefgabcdefg"} );
+c = f.findOne( {} );
+assert.eq( a._id.str, c._id.str );
diff --git a/jstests/basic1.js b/jstests/basic1.js
new file mode 100644
index 0000000..e5fa577
--- /dev/null
+++ b/jstests/basic1.js
@@ -0,0 +1,21 @@
+
+t = db.getCollection( "basic1" );
+t.drop();
+
+o = { a : 1 };
+t.save( o );
+
+assert.eq( 1 , t.findOne().a , "first" );
+assert( o._id , "now had id" );
+assert( o._id.str , "id not a real id" );
+
+o.a = 2;
+t.save( o );
+
+assert.eq( 2 , t.findOne().a , "second" );
+
+assert(t.validate().valid);
+
+// not a very good test of currentOp, but tests that it at least
+// is sort of there:
+assert( db.currentOp().inprog != null );
diff --git a/jstests/basic2.js b/jstests/basic2.js
new file mode 100644
index 0000000..aaa3de4
--- /dev/null
+++ b/jstests/basic2.js
@@ -0,0 +1,16 @@
+
+t = db.getCollection( "basic2" );
+t.drop();
+
+o = { n : 2 };
+t.save( o );
+
+assert.eq( 1 , t.find().count() );
+
+assert.eq( 2 , t.find( o._id ).toArray()[0].n );
+assert.eq( 2 , t.find( o._id , { n : 1 } ).toArray()[0].n );
+
+t.remove( o._id );
+assert.eq( 0 , t.find().count() );
+
+assert(t.validate().valid);
diff --git a/jstests/basic3.js b/jstests/basic3.js
new file mode 100644
index 0000000..b1ebafd
--- /dev/null
+++ b/jstests/basic3.js
@@ -0,0 +1,24 @@
+
+t = db.getCollection( "foo" );
+
+t.find( { "a.b" : 1 } ).toArray();
+
+ok = false;
+
+try{
+ t.save( { "a.b" : 5 } );
+ ok = false;
+}
+catch ( e ){
+ ok = true;
+}
+assert( ok , ". in names aren't allowed doesn't work" );
+
+try{
+ t.save( { "x" : { "a.b" : 5 } } );
+ ok = false;
+}
+catch ( e ){
+ ok = true;
+}
+assert( ok , ". in embedded names aren't allowed doesn't work" );
diff --git a/jstests/basic4.js b/jstests/basic4.js
new file mode 100644
index 0000000..0cf7a26
--- /dev/null
+++ b/jstests/basic4.js
@@ -0,0 +1,12 @@
+t = db.getCollection( "basic4" );
+t.drop();
+
+t.save( { a : 1 , b : 1.0 } );
+
+assert( t.findOne() );
+assert( t.findOne( { a : 1 } ) );
+assert( t.findOne( { a : 1.0 } ) );
+assert( t.findOne( { b : 1 } ) );
+assert( t.findOne( { b : 1.0 } ) );
+
+assert( ! t.findOne( { b : 2.0 } ) );
diff --git a/jstests/basic5.js b/jstests/basic5.js
new file mode 100644
index 0000000..bfa40fb
--- /dev/null
+++ b/jstests/basic5.js
@@ -0,0 +1,6 @@
+t = db.getCollection( "basic5" );
+t.drop();
+
+t.save( { a : 1 , b : [ 1 , 2 , 3 ] } );
+assert.eq( 3 , t.findOne().b.length );
+
diff --git a/jstests/basic6.js b/jstests/basic6.js
new file mode 100644
index 0000000..e0cd6f1
--- /dev/null
+++ b/jstests/basic6.js
@@ -0,0 +1,8 @@
+
+t = db.basic6;
+
+t.findOne();
+t.a.findOne();
+
+assert.eq( "test.basic6" , t.toString() );
+assert.eq( "test.basic6.a" , t.a.toString() );
diff --git a/jstests/basic7.js b/jstests/basic7.js
new file mode 100644
index 0000000..7bb0d47
--- /dev/null
+++ b/jstests/basic7.js
@@ -0,0 +1,11 @@
+
+t = db.basic7;
+t.drop();
+
+t.save( { a : 1 } )
+t.ensureIndex( { a : 1 } );
+
+assert.eq( t.find().toArray()[0].a , 1 );
+assert.eq( t.find().arrayAccess(0).a , 1 );
+assert.eq( t.find()[0].a , 1 );
+
diff --git a/jstests/basic8.js b/jstests/basic8.js
new file mode 100644
index 0000000..513da0d
--- /dev/null
+++ b/jstests/basic8.js
@@ -0,0 +1,11 @@
+
+t = db.basic8;
+t.drop();
+
+t.save( { a : 1 } );
+o = t.findOne();
+o.b = 2;
+t.save( o );
+
+assert.eq( 1 , t.find().count() , "A" );
+assert.eq( 2 , t.findOne().b , "B" );
diff --git a/jstests/basic9.js b/jstests/basic9.js
new file mode 100644
index 0000000..5920418
--- /dev/null
+++ b/jstests/basic9.js
@@ -0,0 +1,25 @@
+
+t = db.getCollection( "foo" );
+
+t.save( { "foo$bar" : 5 } );
+
+ok = false;
+
+try{
+ t.save( { "$foo" : 5 } );
+ ok = false;
+}
+catch ( e ){
+ ok = true;
+}
+assert( ok , "key names aren't allowed to start with $ doesn't work" );
+
+try{
+ t.save( { "x" : { "$foo" : 5 } } );
+ ok = false;
+}
+catch ( e ){
+ ok = true;
+}
+assert( ok , "embedded key names aren't allowed to start with $ doesn't work" );
+
diff --git a/jstests/basica.js b/jstests/basica.js
new file mode 100644
index 0000000..0cc364b
--- /dev/null
+++ b/jstests/basica.js
@@ -0,0 +1,33 @@
+
+t = db.basica;
+
+
+t.drop();
+
+t.save( { a : 1 , b : [ { x : 2 , y : 2 } , { x : 3 , y : 3 } ] } );
+
+x = t.findOne();
+x.b["0"].x = 4;
+x.b["0"].z = 4;
+x.b[0].m = 9;
+x.b[0]["asd"] = 11;
+x.a = 2;
+x.z = 11;
+
+tojson( x );
+t.save( x );
+assert.eq( tojson( x ) , tojson( t.findOne() ) , "FIRST" );
+
+// -----
+
+t.drop();
+
+t.save( { a : 1 , b : [ { x : 2 , y : 2 } , { x : 3 , y : 3 } ] } );
+
+x = t.findOne();
+x.b["0"].z = 4;
+
+//printjson( x );
+t.save( x );
+assert.eq( tojson( x ) , tojson( t.findOne() ) , "SECOND" );
+
diff --git a/jstests/basicb.js b/jstests/basicb.js
new file mode 100644
index 0000000..571b88c
--- /dev/null
+++ b/jstests/basicb.js
@@ -0,0 +1,7 @@
+
+t = db.basicb;
+t.drop();
+
+assert.throws( "t.insert( { '$a' : 5 } );" );
+t.insert( { '$a' : 5 } , true );
+
diff --git a/jstests/capped.js b/jstests/capped.js
new file mode 100644
index 0000000..bae7472
--- /dev/null
+++ b/jstests/capped.js
@@ -0,0 +1,11 @@
+db.jstests_capped.drop();
+db.createCollection("jstests_capped", {capped:true, size:30000});
+assert.eq( 0, db.system.indexes.find( {ns:"test.jstests_capped"} ).count() );
+t = db.jstests_capped;
+
+t.save({x:1});
+t.save({x:2});
+
+assert( t.find().sort({$natural:1})[0].x == 1 );
+assert( t.find().sort({$natural:-1})[0].x == 2 );
+
diff --git a/jstests/capped1.js b/jstests/capped1.js
new file mode 100644
index 0000000..0bbeaa4
--- /dev/null
+++ b/jstests/capped1.js
@@ -0,0 +1,11 @@
+
+t = db.capped1;
+t.drop();
+
+db.createCollection("capped1" , {capped:true, size:1024 });
+v = t.validate();
+assert( v.valid , "A : " + tojson( v ) ); // SERVER-485
+
+t.save( { x : 1 } )
+assert( t.validate().valid , "B" )
+
diff --git a/jstests/capped2.js b/jstests/capped2.js
new file mode 100644
index 0000000..2d2f6a8
--- /dev/null
+++ b/jstests/capped2.js
@@ -0,0 +1,62 @@
+db.capped2.drop();
+db._dbCommand( { create: "capped2", capped: true, size: 1000, $nExtents: 11, autoIndexId: false } );
+tzz = db.capped2;
+
+function debug( x ) {
+// print( x );
+}
+
+var val = new Array( 2000 );
+var c = "";
+for( i = 0; i < 2000; ++i, c += "-" ) {
+ val[ i ] = { a: c };
+}
+
+function checkIncreasing( i ) {
+ res = tzz.find().sort( { $natural: -1 } );
+ assert( res.hasNext(), "A" );
+ var j = i;
+ while( res.hasNext() ) {
+ try {
+ assert.eq( val[ j-- ].a, res.next().a, "B" );
+ } catch( e ) {
+ debug( "capped2 err " + j );
+ throw e;
+ }
+ }
+ res = tzz.find().sort( { $natural: 1 } );
+ assert( res.hasNext(), "C" );
+ while( res.hasNext() )
+ assert.eq( val[ ++j ].a, res.next().a, "D" );
+ assert.eq( j, i, "E" );
+}
+
+function checkDecreasing( i ) {
+ res = tzz.find().sort( { $natural: -1 } );
+ assert( res.hasNext(), "F" );
+ var j = i;
+ while( res.hasNext() ) {
+ assert.eq( val[ j++ ].a, res.next().a, "G" );
+ }
+ res = tzz.find().sort( { $natural: 1 } );
+ assert( res.hasNext(), "H" );
+ while( res.hasNext() )
+ assert.eq( val[ --j ].a, res.next().a, "I" );
+ assert.eq( j, i, "J" );
+}
+
+for( i = 0 ;; ++i ) {
+ debug( "capped 2: " + i );
+ tzz.save( val[ i ] );
+ if ( tzz.count() == 0 ) {
+ assert( i > 100, "K" );
+ break;
+ }
+ checkIncreasing( i );
+}
+
+for( i = 600 ; i >= 0 ; --i ) {
+ debug( "capped 2: " + i );
+ tzz.save( val[ i ] );
+ checkDecreasing( i );
+}
diff --git a/jstests/capped3.js b/jstests/capped3.js
new file mode 100644
index 0000000..f3b29b7
--- /dev/null
+++ b/jstests/capped3.js
@@ -0,0 +1,42 @@
+t = db.jstests_capped3;
+t2 = db.jstests_capped3_clone;
+t.drop();
+t2.drop();
+for( i = 0; i < 1000; ++i ) {
+ t.save( {i:i} );
+}
+assert.commandWorked( db.runCommand( { cloneCollectionAsCapped:"jstests_capped3", toCollection:"jstests_capped3_clone", size:100000 } ) );
+c = t2.find();
+for( i = 0; i < 1000; ++i ) {
+ assert.eq( i, c.next().i );
+}
+assert( !c.hasNext() );
+
+t.drop();
+t2.drop();
+
+for( i = 0; i < 1000; ++i ) {
+ t.save( {i:i} );
+}
+assert.commandWorked( db.runCommand( { cloneCollectionAsCapped:"jstests_capped3", toCollection:"jstests_capped3_clone", size:1000 } ) );
+c = t2.find().sort( {$natural:-1} );
+i = 999;
+while( c.hasNext() ) {
+ assert.eq( i--, c.next().i );
+}
+assert( i < 990 );
+
+t.drop();
+t2.drop();
+
+for( i = 0; i < 1000; ++i ) {
+ t.save( {i:i} );
+}
+assert.commandWorked( t.convertToCapped( 1000 ) );
+c = t.find().sort( {$natural:-1} );
+i = 999;
+while( c.hasNext() ) {
+ assert.eq( i--, c.next().i );
+}
+assert( i < 990 );
+assert( i > 900 );
diff --git a/jstests/capped4.js b/jstests/capped4.js
new file mode 100644
index 0000000..14d5bd0
--- /dev/null
+++ b/jstests/capped4.js
@@ -0,0 +1,28 @@
+t = db.jstests_capped4;
+t.drop();
+
+db.createCollection( "jstests_capped4", {size:1000,capped:true} );
+t.ensureIndex( { i: 1 } );
+for( i = 0; i < 20; ++i ) {
+ t.save( { i : i } );
+}
+c = t.find().sort( { $natural: -1 } ).limit( 2 );
+c.next();
+c.next();
+d = t.find().sort( { i: -1 } ).limit( 2 );
+d.next();
+d.next();
+
+for( i = 20; t.findOne( { i:19 } ); ++i ) {
+ t.save( { i : i } );
+}
+//assert( !t.findOne( { i : 19 } ), "A" );
+assert( !c.hasNext(), "B" );
+assert( !d.hasNext(), "C" );
+assert( t.find().sort( { i : 1 } ).hint( { i : 1 } ).toArray().length > 10, "D" );
+
+assert( t.findOne( { i : i - 1 } ), "E" );
+t.remove( { i : i - 1 } );
+assert( db.getLastError().indexOf( "capped" ) >= 0, "F" );
+
+assert( t.validate().valid, "G" );
diff --git a/jstests/capped5.js b/jstests/capped5.js
new file mode 100644
index 0000000..a5d04de
--- /dev/null
+++ b/jstests/capped5.js
@@ -0,0 +1,18 @@
+
+tn = "capped5"
+
+t = db[tn]
+t.drop();
+
+db.createCollection( tn , {capped: true, size: 1024 * 1024 * 1 } );
+t.insert( { _id : 5 , x : 11 , z : 52 } );
+
+assert.eq( 0 , t.getIndexKeys().length , "A0" )
+assert.eq( 52 , t.findOne( { x : 11 } ).z , "A1" );
+assert.eq( 52 , t.findOne( { _id : 5 } ).z , "A2" );
+
+t.ensureIndex( { _id : 1 } )
+t.ensureIndex( { x : 1 } )
+
+assert.eq( 52 , t.findOne( { x : 11 } ).z , "B1" );
+assert.eq( 52 , t.findOne( { _id : 5 } ).z , "B2" );
diff --git a/jstests/clone/clonecollection.js b/jstests/clone/clonecollection.js
new file mode 100644
index 0000000..64d4ff0
--- /dev/null
+++ b/jstests/clone/clonecollection.js
@@ -0,0 +1,165 @@
+// Test cloneCollection command
+
+var baseName = "jstests_clonecollection";
+
+parallel = function() {
+ return t.parallelStatus;
+}
+
+resetParallel = function() {
+ parallel().drop();
+}
+
+doParallel = function( work ) {
+ resetParallel();
+ startMongoProgramNoConnect( "mongo", "--port", ports[ 1 ], "--eval", work + "; db.parallelStatus.save( {done:1} );", baseName );
+}
+
+doneParallel = function() {
+ return !!parallel().findOne();
+}
+
+waitParallel = function() {
+ assert.soon( function() { return doneParallel(); }, "parallel did not finish in time", 300000, 1000 );
+}
+
+ports = allocatePorts( 2 );
+
+f = startMongod( "--port", ports[ 0 ], "--dbpath", "/data/db/" + baseName + "_from", "--nohttpinterface", "--bind_ip", "127.0.0.1" ).getDB( baseName );
+t = startMongod( "--port", ports[ 1 ], "--dbpath", "/data/db/" + baseName + "_to", "--nohttpinterface", "--bind_ip", "127.0.0.1" ).getDB( baseName );
+
+for( i = 0; i < 1000; ++i ) {
+ f.a.save( { i: i } );
+}
+assert.eq( 1000, f.a.find().count() );
+
+assert.commandWorked( t.cloneCollection( "localhost:" + ports[ 0 ], "a" ) );
+assert.eq( 1000, t.a.find().count() );
+
+t.a.drop();
+
+assert.commandWorked( t.cloneCollection( "localhost:" + ports[ 0 ], "a", { i: { $gte: 10, $lt: 20 } } ) );
+assert.eq( 10, t.a.find().count() );
+
+t.a.drop();
+assert.eq( 0, t.system.indexes.find().count() );
+
+f.a.ensureIndex( { i: 1 } );
+assert.eq( 2, f.system.indexes.find().count(), "expected index missing" );
+assert.commandWorked( t.cloneCollection( "localhost:" + ports[ 0 ], "a" ) );
+if ( t.system.indexes.find().count() != 2 ) {
+ printjson( t.system.indexes.find().toArray() );
+}
+assert.eq( 2, t.system.indexes.find().count(), "expected index missing" );
+// Verify index works
+assert.eq( 50, t.a.find( { i: 50 } ).hint( { i: 1 } ).explain().startKey.i );
+assert.eq( 1, t.a.find( { i: 50 } ).hint( { i: 1 } ).toArray().length, "match length did not match expected" );
+
+// Check that capped-ness is preserved on clone
+f.a.drop();
+t.a.drop();
+
+f.createCollection( "a", {capped:true,size:1000} );
+assert( f.a.isCapped() );
+assert.commandWorked( t.cloneCollection( "localhost:" + ports[ 0 ], "a" ) );
+assert( t.a.isCapped(), "cloned collection not capped" );
+
+// Now test insert + delete + update during clone
+f.a.drop();
+t.a.drop();
+
+for( i = 0; i < 100000; ++i ) {
+ f.a.save( { i: i } );
+}
+
+doParallel( "assert.commandWorked( db.cloneCollection( \"localhost:" + ports[ 0 ] + "\", \"a\", {i:{$gte:0}} ) );" );
+
+sleep( 200 );
+f.a.save( { i: 200000 } );
+f.a.save( { i: -1 } );
+f.a.remove( { i: 0 } );
+f.a.update( { i: 99998 }, { i: 99998, x: "y" } );
+assert( !doneParallel(), "test run invalid" );
+waitParallel();
+
+assert.eq( 100000, t.a.find().count() );
+assert.eq( 1, t.a.find( { i: 200000 } ).count() );
+assert.eq( 0, t.a.find( { i: -1 } ).count() );
+assert.eq( 0, t.a.find( { i: 0 } ).count() );
+assert.eq( 1, t.a.find( { i: 99998, x: "y" } ).count() );
+
+
+// Now test oplog running out of space -- specify small size clone oplog for test.
+f.a.drop();
+t.a.drop();
+
+for( i = 0; i < 200000; ++i ) {
+ f.a.save( { i: i } );
+}
+
+doParallel( "assert.commandFailed( db.runCommand( { cloneCollection: \"jstests_clonecollection.a\", from: \"localhost:" + ports[ 0 ] + "\", logSizeMb:1 } ) );" );
+
+sleep( 200 );
+for( i = 200000; i < 250000; ++i ) {
+ f.a.save( { i: i } );
+}
+
+waitParallel();
+
+// Make sure the same works with standard size op log.
+f.a.drop();
+t.a.drop();
+
+for( i = 0; i < 200000; ++i ) {
+ f.a.save( { i: i } );
+}
+
+doParallel( "assert.commandWorked( db.cloneCollection( \"localhost:" + ports[ 0 ] + "\", \"a\" ) );" );
+
+sleep( 200 );
+for( i = 200000; i < 250000; ++i ) {
+ f.a.save( { i: i } );
+}
+
+waitParallel();
+assert.eq( 250000, t.a.find().count() );
+
+// Test startCloneCollection and finishCloneCollection commands.
+f.a.drop();
+t.a.drop();
+
+for( i = 0; i < 100000; ++i ) {
+ f.a.save( { i: i } );
+}
+
+doParallel( "z = db.runCommand( {startCloneCollection:\"jstests_clonecollection.a\", from:\"localhost:" + ports[ 0 ] + "\" } ); print( \"clone_clone_clone_commandResult:::::\" + tojson( z , '' , true ) + \":::::\" );" );
+
+sleep( 200 );
+f.a.save( { i: -1 } );
+
+waitParallel();
+// even after parallel shell finished, must wait for finishToken line to appear in log
+assert.soon( function() {
+ raw = rawMongoProgramOutput().replace( /[\r\n]/gm , " " )
+ ret = raw.match( /clone_clone_clone_commandResult:::::(.*):::::/ );
+ if ( ret == null ) {
+ return false;
+ }
+ ret = ret[ 1 ];
+ return true;
+ } );
+
+eval( "ret = " + ret );
+
+assert.commandWorked( ret );
+assert.eq( 100001, t.a.find().count() );
+
+f.a.save( { i: -2 } );
+assert.eq( 100002, f.a.find().count() );
+finishToken = ret.finishToken;
+// Round-tripping through JS can corrupt the cursor ids we store as BSON
+// Date elements. Date( 0 ) will correspond to a cursorId value of 0, which
+// makes the db start scanning from the beginning of the collection.
+finishToken.cursorId = new Date( 0 );
+assert.commandWorked( t.runCommand( {finishCloneCollection:finishToken} ) );
+assert.eq( 100002, t.a.find().count() );
diff --git a/jstests/copydb.js b/jstests/copydb.js
new file mode 100644
index 0000000..7c7c025
--- /dev/null
+++ b/jstests/copydb.js
@@ -0,0 +1,20 @@
+
+
+
+
+a = db.getSisterDB( "copydb-test-a" );
+b = db.getSisterDB( "copydb-test-b" );
+
+a.dropDatabase();
+b.dropDatabase();
+
+a.foo.save( { a : 1 } );
+
+assert.eq( 1 , a.foo.count() , "A" );
+assert.eq( 0 , b.foo.count() , "B" );
+
+a.copyDatabase( a._name , b._name );
+
+assert.eq( 1 , a.foo.count() , "C" );
+assert.eq( 1 , b.foo.count() , "D" );
+
diff --git a/jstests/count.js b/jstests/count.js
new file mode 100644
index 0000000..5502d71
--- /dev/null
+++ b/jstests/count.js
@@ -0,0 +1,25 @@
+t = db.jstests_count;
+
+t.drop();
+t.save( { i: 1 } );
+t.save( { i: 2 } );
+assert.eq( 1, t.find( { i: 1 } ).count(), "A" );
+assert.eq( 1, t.count( { i: 1 } ) , "B" );
+assert.eq( 2, t.find().count() , "C" );
+assert.eq( 2, t.find( undefined ).count() , "D" );
+assert.eq( 2, t.find( null ).count() , "E" );
+assert.eq( 2, t.count() , "F" );
+
+t.drop();
+t.save( {a:true,b:false} );
+t.ensureIndex( {b:1,a:1} );
+assert.eq( 1, t.find( {a:true,b:false} ).count() , "G" );
+assert.eq( 1, t.find( {b:false,a:true} ).count() , "H" );
+
+t.drop();
+t.save( {a:true,b:false} );
+t.ensureIndex( {b:1,a:1,c:1} );
+
+assert.eq( 1, t.find( {a:true,b:false} ).count() , "I" );
+assert.eq( 1, t.find( {b:false,a:true} ).count() , "J" );
+
diff --git a/jstests/count2.js b/jstests/count2.js
new file mode 100644
index 0000000..33ff712
--- /dev/null
+++ b/jstests/count2.js
@@ -0,0 +1,23 @@
+t = db.count2;
+t.drop();
+
+for ( var i=0; i<1000; i++ ){
+ t.save( { num : i , m : i % 20 } );
+}
+
+assert.eq( 1000 , t.count() , "A" )
+assert.eq( 1000 , t.find().count() , "B" )
+assert.eq( 1000 , t.find().toArray().length , "C" )
+
+assert.eq( 50 , t.find( { m : 5 } ).toArray().length , "D" )
+assert.eq( 50 , t.find( { m : 5 } ).count() , "E" )
+
+assert.eq( 40 , t.find( { m : 5 } ).skip( 10 ).toArray().length , "F" )
+assert.eq( 50 , t.find( { m : 5 } ).skip( 10 ).count() , "G" )
+assert.eq( 40 , t.find( { m : 5 } ).skip( 10 ).countReturn() , "H" )
+
+assert.eq( 20 , t.find( { m : 5 } ).skip( 10 ).limit(20).toArray().length , "I" )
+assert.eq( 50 , t.find( { m : 5 } ).skip( 10 ).limit(20).count() , "J" )
+assert.eq( 20 , t.find( { m : 5 } ).skip( 10 ).limit(20).countReturn() , "K" )
+
+assert.eq( 5 , t.find( { m : 5 } ).skip( 45 ).limit(20).countReturn() , "L" )
diff --git a/jstests/count3.js b/jstests/count3.js
new file mode 100644
index 0000000..a8c3ef5
--- /dev/null
+++ b/jstests/count3.js
@@ -0,0 +1,26 @@
+
+t = db.count3;
+
+t.drop();
+
+t.save( { a : 1 } );
+t.save( { a : 1 , b : 2 } );
+
+assert.eq( 2 , t.find( { a : 1 } ).itcount() , "A" );
+assert.eq( 2 , t.find( { a : 1 } ).count() , "B" );
+
+assert.eq( 2 , t.find( { a : 1 } , { b : 1 } ).itcount() , "C" );
+assert.eq( 2 , t.find( { a : 1 } , { b : 1 } ).count() , "D" );
+
+t.drop();
+
+t.save( { a : 1 } );
+
+assert.eq( 1 , t.find( { a : 1 } ).itcount() , "E" );
+assert.eq( 1 , t.find( { a : 1 } ).count() , "F" );
+
+assert.eq( 1 , t.find( { a : 1 } , { b : 1 } ).itcount() , "G" );
+assert.eq( 1 , t.find( { a : 1 } , { b : 1 } ).count() , "H" );
+
+
+
diff --git a/jstests/count4.js b/jstests/count4.js
new file mode 100644
index 0000000..7be7436
--- /dev/null
+++ b/jstests/count4.js
@@ -0,0 +1,17 @@
+
+t = db.count4;
+t.drop();
+
+for ( i=0; i<100; i++ ){
+ t.save( { x : i } );
+}
+
+q = { x : { $gt : 25 , $lte : 75 } }
+
+assert.eq( 50 , t.find( q ).count() , "A" );
+assert.eq( 50 , t.find( q ).itcount() , "B" );
+
+t.ensureIndex( { x : 1 } );
+
+assert.eq( 50 , t.find( q ).count() , "C" );
+assert.eq( 50 , t.find( q ).itcount() , "D" );
diff --git a/jstests/count5.js b/jstests/count5.js
new file mode 100644
index 0000000..b6bbc54
--- /dev/null
+++ b/jstests/count5.js
@@ -0,0 +1,30 @@
+
+t = db.count5;
+t.drop();
+
+for ( i=0; i<100; i++ ){
+ t.save( { x : i } );
+}
+
+q = { x : { $gt : 25 , $lte : 75 } };
+
+assert.eq( 50 , t.find( q ).count() , "A" );
+assert.eq( 50 , t.find( q ).itcount() , "B" );
+
+t.ensureIndex( { x : 1 } );
+
+assert.eq( 50 , t.find( q ).count() , "C" );
+assert.eq( 50 , t.find( q ).itcount() , "D" );
+
+assert.eq( 50 , t.find( q ).limit(1).count() , "E" );
+assert.eq( 1 , t.find( q ).limit(1).itcount() , "F" );
+
+assert.eq( 5 , t.find( q ).limit(5).size() , "G" );
+assert.eq( 5 , t.find( q ).skip(5).limit(5).size() , "H" );
+assert.eq( 2 , t.find( q ).skip(48).limit(5).size() , "I" );
+
+assert.eq( 20 , t.find().limit(20).size() , "J" );
+
+assert.eq( 0 , t.find().skip(120).size() , "K" );
+assert.eq( 1 , db.$cmd.findOne( { count: "count5" } )["ok"] , "L" );
+assert.eq( 1 , db.$cmd.findOne( { count: "count5", skip: 120 } )["ok"] , "M" );
diff --git a/jstests/cursor1.js b/jstests/cursor1.js
new file mode 100644
index 0000000..8448752
--- /dev/null
+++ b/jstests/cursor1.js
@@ -0,0 +1,20 @@
+
+t = db.cursor1
+t.drop();
+
+big = "";
+while ( big.length < 50000 )
+ big += "asdasdasdasdsdsdadsasdasdasD";
+
+num = Math.ceil( 10000000 / big.length );
+
+for ( var i=0; i<num; i++ ){
+ t.save( { num : i , str : big } );
+}
+
+assert.eq( num , t.find().count() );
+assert.eq( num , t.find().itcount() );
+
+assert.eq( num / 2 , t.find().limit(num/2).itcount() );
+
+t.drop(); // save some space
diff --git a/jstests/cursor2.js b/jstests/cursor2.js
new file mode 100644
index 0000000..2389a6a
--- /dev/null
+++ b/jstests/cursor2.js
@@ -0,0 +1,24 @@
+
+/**
+ * test to see if the count returned from the cursor is the number of objects that would be returned
+ *
+ * BUG 884
+ */
+function testCursorCountVsArrLen(dbConn) {
+
+ var coll = dbConn.ed_db_cursor2_ccvsal;
+
+ coll.drop();
+
+ coll.save({ a: 1, b : 1});
+ coll.save({ a: 2, b : 1});
+ coll.save({ a: 3});
+
+ var fromCount = coll.find({}, {b:1}).count();
+ var fromArrLen = coll.find({}, {b:1}).toArray().length;
+
+ assert(fromCount == fromArrLen, "count from cursor [" + fromCount + "] != count from arrlen [" + fromArrLen + "]");
+}
+
+
+testCursorCountVsArrLen(db);
diff --git a/jstests/cursor3.js b/jstests/cursor3.js
new file mode 100644
index 0000000..d23264c
--- /dev/null
+++ b/jstests/cursor3.js
@@ -0,0 +1,35 @@
+// Test inequality bounds combined with ordering for a single-field index.
+// BUG 1079 (fixed)
+
+testNum = 1;
+
+function checkResults( expected, cursor , testNum ) {
+ assert.eq( expected.length, cursor.count() , "testNum: " + testNum + " A : " + tojson( cursor.toArray() ) + " " + tojson( cursor.explain() ) );
+ for( i = 0; i < expected.length; ++i ) {
+ assert.eq( expected[ i ], cursor[ i ][ "a" ] , "testNum: " + testNum + " B" );
+ }
+}
+
+t = db.cursor3;
+t.drop()
+
+t.save( { a: 0 } );
+t.save( { a: 1 } );
+t.save( { a: 2 } );
+
+t.ensureIndex( { a: 1 } );
+
+
+
+checkResults( [ 1 ], t.find( { a: 1 } ).sort( { a: 1 } ).hint( { a: 1 } ) , testNum++ )
+checkResults( [ 1 ], t.find( { a: 1 } ).sort( { a: -1 } ).hint( { a: 1 } ) , testNum++ )
+
+checkResults( [ 1, 2 ], t.find( { a: { $gt: 0 } } ).sort( { a: 1 } ).hint( { a: 1 } ) , testNum++ )
+checkResults( [ 2, 1 ], t.find( { a: { $gt: 0 } } ).sort( { a: -1 } ).hint( { a: 1 } ) , testNum++ )
+checkResults( [ 1, 2 ], t.find( { a: { $gte: 1 } } ).sort( { a: 1 } ).hint( { a: 1 } ) , testNum++ )
+checkResults( [ 2, 1 ], t.find( { a: { $gte: 1 } } ).sort( { a: -1 } ).hint( { a: 1 } ) , testNum++ )
+
+checkResults( [ 0, 1 ], t.find( { a: { $lt: 2 } } ).sort( { a: 1 } ).hint( { a: 1 } ) , testNum++ )
+checkResults( [ 1, 0 ], t.find( { a: { $lt: 2 } } ).sort( { a: -1 } ).hint( { a: 1 } ) , testNum++ )
+checkResults( [ 0, 1 ], t.find( { a: { $lte: 1 } } ).sort( { a: 1 } ).hint( { a: 1 } ) , testNum++ )
+checkResults( [ 1, 0 ], t.find( { a: { $lte: 1 } } ).sort( { a: -1 } ).hint( { a: 1 } ) , testNum++ )
diff --git a/jstests/cursor4.js b/jstests/cursor4.js
new file mode 100644
index 0000000..b08a72f
--- /dev/null
+++ b/jstests/cursor4.js
@@ -0,0 +1,47 @@
+// Test inequality bounds with multi-field sorting
+
+function checkResults( expected, cursor ) {
+ assert.eq( expected.length, cursor.count() );
+ for( i = 0; i < expected.length; ++i ) {
+ assert.eq( expected[ i ].a, cursor[ i ].a );
+ assert.eq( expected[ i ].b, cursor[ i ].b );
+ }
+}
+
+function testConstrainedFindMultiFieldSorting( db ) {
+ r = db.ed_db_cursor4_cfmfs;
+ r.drop();
+
+ entries = [ { a: 0, b: 0 },
+ { a: 0, b: 1 },
+ { a: 1, b: 1 },
+ { a: 1, b: 1 },
+ { a: 2, b: 0 } ];
+ for( i = 0; i < entries.length; ++i )
+ r.save( entries[ i ] );
+ r.ensureIndex( { a: 1, b: 1 } );
+ reverseEntries = entries.slice();
+ reverseEntries.reverse();
+
+ checkResults( entries.slice( 2, 4 ), r.find( { a: 1, b: 1 } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
+ checkResults( entries.slice( 2, 4 ), r.find( { a: 1, b: 1 } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
+
+ checkResults( entries.slice( 2, 5 ), r.find( { a: { $gt: 0 } } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
+ checkResults( reverseEntries.slice( 0, 3 ), r.find( { a: { $gt: 0 } } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
+ checkResults( entries.slice( 0, 4 ), r.find( { a: { $lt: 2 } } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
+ checkResults( reverseEntries.slice( 1, 5 ), r.find( { a: { $lt: 2 } } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
+
+ checkResults( entries.slice( 4, 5 ), r.find( { a: { $gt: 0 }, b: { $lt: 1 } } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
+ checkResults( entries.slice( 2, 4 ), r.find( { a: { $gt: 0 }, b: { $gt: 0 } } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
+
+ checkResults( reverseEntries.slice( 0, 1 ), r.find( { a: { $gt: 0 }, b: { $lt: 1 } } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
+ checkResults( reverseEntries.slice( 1, 3 ), r.find( { a: { $gt: 0 }, b: { $gt: 0 } } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
+
+ checkResults( entries.slice( 0, 1 ), r.find( { a: { $lt: 2 }, b: { $lt: 1 } } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
+ checkResults( entries.slice( 1, 4 ), r.find( { a: { $lt: 2 }, b: { $gt: 0 } } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
+
+ checkResults( reverseEntries.slice( 4, 5 ), r.find( { a: { $lt: 2 }, b: { $lt: 1 } } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
+ checkResults( reverseEntries.slice( 1, 4 ), r.find( { a: { $lt: 2 }, b: { $gt: 0 } } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
+}
+
+testConstrainedFindMultiFieldSorting( db );
diff --git a/jstests/cursor5.js b/jstests/cursor5.js
new file mode 100644
index 0000000..6434d2b
--- /dev/null
+++ b/jstests/cursor5.js
@@ -0,0 +1,36 @@
+// Test bounds with subobject indexes.
+
+function checkResults( expected, cursor ) {
+ assert.eq( expected.length, cursor.count() );
+ for( i = 0; i < expected.length; ++i ) {
+ assert.eq( expected[ i ].a.b, cursor[ i ].a.b );
+ assert.eq( expected[ i ].a.c, cursor[ i ].a.c );
+ assert.eq( expected[ i ].a.d, cursor[ i ].a.d );
+ assert.eq( expected[ i ].e, cursor[ i ].e );
+ }
+}
+
+function testBoundsWithSubobjectIndexes( db ) {
+ r = db.ed_db_cursor5_bwsi;
+ r.drop();
+
+ z = [ { a: { b: 1, c: 2, d: 3 }, e: 4 },
+ { a: { b: 1, c: 2, d: 3 }, e: 5 },
+ { a: { b: 1, c: 2, d: 4 }, e: 4 },
+ { a: { b: 1, c: 2, d: 4 }, e: 5 },
+ { a: { b: 2, c: 2, d: 3 }, e: 4 },
+ { a: { b: 2, c: 2, d: 3 }, e: 5 } ];
+ for( i = 0; i < z.length; ++i )
+ r.save( z[ i ] );
+ idx = { "a.d": 1, a: 1, e: -1 };
+ rIdx = { "a.d": -1, a: -1, e: 1 };
+ r.ensureIndex( idx );
+
+ checkResults( [ z[ 0 ], z[ 4 ], z[ 2 ] ], r.find( { e: 4 } ).sort( idx ).hint( idx ) );
+ checkResults( [ z[ 1 ], z[ 3 ] ], r.find( { e: { $gt: 4 }, "a.b": 1 } ).sort( idx ).hint( idx ) );
+
+ checkResults( [ z[ 2 ], z[ 4 ], z[ 0 ] ], r.find( { e: 4 } ).sort( rIdx ).hint( idx ) );
+ checkResults( [ z[ 3 ], z[ 1 ] ], r.find( { e: { $gt: 4 }, "a.b": 1 } ).sort( rIdx ).hint( idx ) );
+}
+
+testBoundsWithSubobjectIndexes( db );
diff --git a/jstests/cursor6.js b/jstests/cursor6.js
new file mode 100644
index 0000000..9a45f93
--- /dev/null
+++ b/jstests/cursor6.js
@@ -0,0 +1,100 @@
+// Test different directions for compound indexes
+
+function eq( one, two ) {
+ assert.eq( one.a, two.a );
+ assert.eq( one.b, two.b );
+}
+
+function checkExplain( e, idx, reverse, nScanned ) {
+ if ( !reverse ) {
+ if ( idx ) {
+ assert.eq( "BtreeCursor a_1_b_-1", e.cursor );
+ } else {
+ assert.eq( "BasicCursor", e.cursor );
+ }
+ } else {
+ if ( idx ) {
+ assert.eq( "BtreeCursor a_1_b_-1 reverse", e.cursor );
+ } else {
+ assert( false );
+ }
+ }
+ assert.eq( nScanned, e.nscanned );
+}
+
+function check( indexed ) {
+ var hint;
+ if ( indexed ) {
+ hint = { a: 1, b: -1 };
+ } else {
+ hint = { $natural: 1 };
+ }
+
+ e = r.find().sort( { a: 1, b: 1 } ).hint( hint ).explain();
+ checkExplain( e, indexed, false, 4 );
+ f = r.find().sort( { a: 1, b: 1 } ).hint( hint );
+ eq( z[ 0 ], f[ 0 ] );
+ eq( z[ 1 ], f[ 1 ] );
+ eq( z[ 2 ], f[ 2 ] );
+ eq( z[ 3 ], f[ 3 ] );
+
+ e = r.find().sort( { a: 1, b: -1 } ).hint( hint ).explain();
+ checkExplain( e, indexed, false, 4 );
+ f = r.find().sort( { a: 1, b: -1 } ).hint( hint );
+ eq( z[ 1 ], f[ 0 ] );
+ eq( z[ 0 ], f[ 1 ] );
+ eq( z[ 3 ], f[ 2 ] );
+ eq( z[ 2 ], f[ 3 ] );
+
+ e = r.find().sort( { a: -1, b: 1 } ).hint( hint ).explain();
+ checkExplain( e, indexed, true && indexed, 4 );
+ f = r.find().sort( { a: -1, b: 1 } ).hint( hint );
+ eq( z[ 2 ], f[ 0 ] );
+ eq( z[ 3 ], f[ 1 ] );
+ eq( z[ 0 ], f[ 2 ] );
+ eq( z[ 1 ], f[ 3 ] );
+
+ e = r.find( { a: { $gte: 2 } } ).sort( { a: 1, b: -1 } ).hint( hint ).explain();
+ checkExplain( e, indexed, false, indexed ? 2 : 4 );
+ f = r.find( { a: { $gte: 2 } } ).sort( { a: 1, b: -1 } ).hint( hint );
+ eq( z[ 3 ], f[ 0 ] );
+ eq( z[ 2 ], f[ 1 ] );
+
+ e = r.find( { a : { $gte: 2 } } ).sort( { a: -1, b: 1 } ).hint( hint ).explain();
+ checkExplain( e, indexed, true && indexed, indexed ? 2 : 4 );
+ f = r.find( { a: { $gte: 2 } } ).sort( { a: -1, b: 1 } ).hint( hint );
+ eq( z[ 2 ], f[ 0 ] );
+ eq( z[ 3 ], f[ 1 ] );
+
+ e = r.find( { a : { $gte: 2 } } ).sort( { a: 1, b: 1 } ).hint( hint ).explain();
+ checkExplain( e, indexed, false, indexed ? 2 : 4 );
+ f = r.find( { a: { $gte: 2 } } ).sort( { a: 1, b: 1 } ).hint( hint );
+ eq( z[ 2 ], f[ 0 ] );
+ eq( z[ 3 ], f[ 1 ] );
+
+ e = r.find().sort( { a: -1, b: -1 } ).hint( hint ).explain();
+ checkExplain( e, indexed, false, 4 );
+ f = r.find().sort( { a: -1, b: -1 } ).hint( hint );
+ eq( z[ 3 ], f[ 0 ] );
+ eq( z[ 2 ], f[ 1 ] );
+ eq( z[ 1 ], f[ 2 ] );
+ eq( z[ 0 ], f[ 3 ] );
+}
+
+db.setProfilingLevel( 1 );
+r = db.ed_db_cursor6;
+r.drop();
+
+z = [ { a: 1, b: 1 },
+ { a: 1, b: 2 },
+ { a: 2, b: 1 },
+ { a: 2, b: 2 } ];
+for( i = 0; i < z.length; ++i )
+ r.save( z[ i ] );
+
+r.ensureIndex( { a: 1, b: -1 } );
+
+check( false );
+check( true );
+
+assert.eq( "BasicCursor", r.find().sort( { a: 1, b: -1, z: 1 } ).hint( { $natural: -1 } ).explain().cursor );
diff --git a/jstests/cursor7.js b/jstests/cursor7.js
new file mode 100644
index 0000000..97cfbb7
--- /dev/null
+++ b/jstests/cursor7.js
@@ -0,0 +1,42 @@
+// Test bounds with multiple inequalities and sorting.
+
+function checkResults( expected, cursor ) {
+ assert.eq( expected.length, cursor.count() );
+ for( i = 0; i < expected.length; ++i ) {
+ assert.eq( expected[ i ].a, cursor[ i ].a );
+ assert.eq( expected[ i ].b, cursor[ i ].b );
+ }
+}
+
+function testMultipleInequalities( db ) {
+ r = db.ed_db_cursor_mi;
+ r.drop();
+
+ z = [ { a: 1, b: 2 },
+ { a: 3, b: 4 },
+ { a: 5, b: 6 },
+ { a: 7, b: 8 } ];
+ for( i = 0; i < z.length; ++i )
+ r.save( z[ i ] );
+ idx = { a: 1, b: 1 };
+ rIdx = { a: -1, b: -1 };
+ r.ensureIndex( idx );
+
+ checkResults( [ z[ 2 ], z[ 3 ] ], r.find( { a: { $gt: 3 } } ).sort( idx ).hint( idx ) );
+ checkResults( [ z[ 2 ] ], r.find( { a: { $gt: 3, $lt: 7 } } ).sort( idx ).hint( idx ) );
+ checkResults( [ z[ 2 ] ], r.find( { a: { $gt: 1, $lt: 7, $gt: 3 } } ).sort( idx ).hint( idx ) );
+ checkResults( [ z[ 2 ] ], r.find( { a: { $gt: 3, $lt: 7, $lte: 5 } } ).sort( idx ).hint( idx ) );
+
+ checkResults( [ z[ 3 ], z[ 2 ] ], r.find( { a: { $gt: 3 } } ).sort( rIdx ).hint( idx ) );
+ checkResults( [ z[ 2 ] ], r.find( { a: { $gt: 3, $lt: 7 } } ).sort( rIdx ).hint( idx ) );
+ checkResults( [ z[ 2 ] ], r.find( { a: { $gt: 1, $lt: 7, $gt: 3 } } ).sort( rIdx ).hint( idx ) );
+ checkResults( [ z[ 2 ] ], r.find( { a: { $gt: 3, $lt: 7, $lte: 5 } } ).sort( rIdx ).hint( idx ) );
+
+ checkResults( [ z[ 1 ], z[ 2 ] ], r.find( { a: { $gt: 1, $lt: 7, $gte: 3, $lte: 5 }, b: { $gt: 2, $lt: 8, $gte: 4, $lte: 6 } } ).sort( idx ).hint( idx ) );
+ checkResults( [ z[ 2 ], z[ 1 ] ], r.find( { a: { $gt: 1, $lt: 7, $gte: 3, $lte: 5 }, b: { $gt: 2, $lt: 8, $gte: 4, $lte: 6 } } ).sort( rIdx ).hint( idx ) );
+
+ checkResults( [ z[ 1 ], z[ 2 ] ], r.find( { a: { $gte: 1, $lte: 7, $gt: 2, $lt: 6 }, b: { $gte: 2, $lte: 8, $gt: 3, $lt: 7 } } ).sort( idx ).hint( idx ) );
+ checkResults( [ z[ 2 ], z[ 1 ] ], r.find( { a: { $gte: 1, $lte: 7, $gt: 2, $lt: 6 }, b: { $gte: 2, $lte: 8, $gt: 3, $lt: 7 } } ).sort( rIdx ).hint( idx ) );
+}
+
+testMultipleInequalities( db );
diff --git a/jstests/cursor8.js b/jstests/cursor8.js
new file mode 100644
index 0000000..169bb5d
--- /dev/null
+++ b/jstests/cursor8.js
@@ -0,0 +1,10 @@
+db.f.drop();
+db.f.save( {} );
+db.f.save( {} );
+db.f.save( {} );
+
+db.getMongo().getDB( "admin" ).runCommand( {closeAllDatabases:1} );
+
+assert.eq( 0, db.runCommand( {cursorInfo:1} ).clientCursors_size );
+assert.eq( 2, db.f.find( {} ).limit( 2 ).toArray().length );
+assert.eq( 1, db.runCommand( {cursorInfo:1} ).clientCursors_size );
diff --git a/jstests/datasize.js b/jstests/datasize.js
new file mode 100644
index 0000000..396d24d
--- /dev/null
+++ b/jstests/datasize.js
@@ -0,0 +1,28 @@
+f = db.jstests_datasize;
+f.drop();
+
+assert.eq( 0, db.runCommand( {datasize:"test.jstests_datasize"} ).size );
+f.save( {qq:'c'} );
+assert.eq( 32, db.runCommand( {datasize:"test.jstests_datasize"} ).size );
+f.save( {qq:'fg'} );
+assert.eq( 65, db.runCommand( {datasize:"test.jstests_datasize"} ).size );
+
+f.drop();
+f.ensureIndex( {qq:1} );
+assert.eq( 0, db.runCommand( {datasize:"test.jstests_datasize"} ).size );
+f.save( {qq:'c'} );
+assert.eq( 32, db.runCommand( {datasize:"test.jstests_datasize"} ).size );
+f.save( {qq:'fg'} );
+assert.eq( 65, db.runCommand( {datasize:"test.jstests_datasize"} ).size );
+
+assert.eq( 0, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'a'}} ).ok );
+
+assert.eq( 65, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'a'}, max:{qq:'z' }} ).size );
+assert.eq( 32, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'a'}, max:{qq:'d' }} ).size );
+assert.eq( 32, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'a'}, max:{qq:'d' }, keyPattern:{qq:1}} ).size );
+assert.eq( 33, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'d'}, max:{qq:'z' }, keyPattern:{qq:1}} ).size );
+
+assert.eq( 0, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'c'}, max:{qq:'c' }} ).size );
+assert.eq( 32, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'c'}, max:{qq:'d' }} ).size );
+
+assert.eq( 0, db.runCommand( {datasize:"test.jstests_datasize", min:{qq:'a'}, max:{qq:'d' }, keyPattern:{a:1}} ).ok );
diff --git a/jstests/date1.js b/jstests/date1.js
new file mode 100644
index 0000000..ca2e616
--- /dev/null
+++ b/jstests/date1.js
@@ -0,0 +1,14 @@
+
+t = db.date1;
+
+
+function go( d , msg ){
+ t.drop();
+ t.save( { a : 1 , d : d } );
+ assert.eq( d , t.findOne().d , msg )
+}
+
+go( new Date() , "A" )
+go( new Date( 1 ) , "B")
+go( new Date( 0 ) , "C (old spidermonkey lib fails this test)")
+
diff --git a/jstests/dbadmin.js b/jstests/dbadmin.js
new file mode 100644
index 0000000..c7b7bc8
--- /dev/null
+++ b/jstests/dbadmin.js
@@ -0,0 +1,22 @@
+
+t = db.dbadmin;
+t.save( { x : 1 } );
+
+before = db._adminCommand( "serverStatus" )
+if ( before.mem.supported ){
+ db._adminCommand( "closeAllDatabases" );
+ after = db._adminCommand( "serverStatus" );
+ assert( before.mem.mapped > after.mem.mapped , "closeAllDatabases does something before:" + tojson( before ) + " after:" + tojson( after ) );
+}
+else {
+ print( "can't test serverStatus on this machine" );
+}
+
+t.save( { x : 1 } );
+
+res = db._adminCommand( "listDatabases" );
+assert( res.databases.length > 0 , "listDatabases 1" );
+
+print( "BEFORE: " + tojson( before ) );
+print( "AFTER : " + tojson( after ) );
+// TODO: add more tests here
diff --git a/jstests/dbref1.js b/jstests/dbref1.js
new file mode 100644
index 0000000..4a82766
--- /dev/null
+++ b/jstests/dbref1.js
@@ -0,0 +1,10 @@
+
+a = db.dbref1a;
+b = db.dbref1b;
+
+a.drop();
+b.drop();
+
+a.save( { name : "eliot" } );
+b.save( { num : 1 , link : new DBPointer( "dbref1a" , a.findOne()._id ) } );
+assert.eq( "eliot" , b.findOne().link.fetch().name , "A" );
diff --git a/jstests/dbref2.js b/jstests/dbref2.js
new file mode 100644
index 0000000..6ea7305
--- /dev/null
+++ b/jstests/dbref2.js
@@ -0,0 +1,13 @@
+
+a = db.dbref2a;
+b = db.dbref2b;
+
+a.drop();
+b.drop();
+
+a.save( { name : "eliot" } );
+b.save( { num : 1 , link : new DBRef( "dbref2a" , a.findOne()._id ) } );
+assert.eq( "eliot" , b.findOne().link.fetch().name , "A" );
+
+assert.eq( 1 , b.find( function(){ return this.link.fetch().name == "eliot"; } ).count() , "B" );
+assert.eq( 0 , b.find( function(){ return this.link.fetch().name == "el"; } ).count() , "C" );
diff --git a/jstests/disk/dbNoCreate.js b/jstests/disk/dbNoCreate.js
new file mode 100644
index 0000000..c93267b
--- /dev/null
+++ b/jstests/disk/dbNoCreate.js
@@ -0,0 +1,19 @@
+var baseName = "jstests_dbNoCreate";
+
+var m = startMongod( "--port", "27018", "--dbpath", "/data/db/" + baseName );
+
+var t = m.getDB( baseName ).t;
+
+var no = function( dbName ) {
+ assert.eq( -1, db.getMongo().getDBNames().indexOf( dbName ) );
+}
+
+assert.eq( 0, t.find().toArray().length );
+t.remove();
+t.update( {}, { a:1 } );
+t.drop();
+
+stopMongod( 27018 );
+
+var m = startMongoProgram( "mongod", "--port", "27018", "--dbpath", "/data/db/" + baseName );
+assert.eq( -1, m.getDBNames().indexOf( baseName ) );
diff --git a/jstests/disk/diskfull.js b/jstests/disk/diskfull.js
new file mode 100644
index 0000000..7f75266
--- /dev/null
+++ b/jstests/disk/diskfull.js
@@ -0,0 +1,20 @@
+doIt = false;
+files = listFiles( "/data/db" );
+for ( i in files ) {
+ if ( files[ i ].name == "/data/db/diskfulltest" ) {
+ doIt = true;
+ }
+}
+
+if ( !doIt ) {
+ print( "path /data/db/diskfulltest/ missing, skipping diskfull test" );
+ doIt = false;
+}
+
+if ( doIt ) {
+ port = allocatePorts( 1 )[ 0 ];
+ m = startMongoProgram( "mongod", "--port", port, "--dbpath", "/data/db/diskfulltest", "--nohttpinterface", "--bind_ip", "127.0.0.1" );
+ m.getDB( "diskfulltest" ).getCollection( "diskfulltest" ).save( { a: 6 } );
+ assert.soon( function() { return rawMongoProgramOutput().match( /dbexit: really exiting now/ ); }, "didn't see 'really exiting now'" );
+ assert( !rawMongoProgramOutput().match( /Got signal/ ), "saw 'Got signal', not expected. Output: " + rawMongoProgramOutput() );
+}
diff --git a/jstests/disk/norepeat.js b/jstests/disk/norepeat.js
new file mode 100644
index 0000000..d9f1cd3
--- /dev/null
+++ b/jstests/disk/norepeat.js
@@ -0,0 +1,61 @@
+/*
+baseName = "jstests_disk_norepeat";
+
+ports = allocatePorts( 1 );
+m = startMongod( "--port", ports[ 0 ], "--deDupMem", "200", "--dbpath", "/data/db/" + baseName, "--nohttpinterface", "--bind_ip", "127.0.0.1" );
+
+t = m.getDB( baseName ).getCollection( baseName );
+
+t.drop();
+t.ensureIndex( { i: 1 } );
+for( i = 0; i < 3; ++i ) {
+ t.save( { i: i } );
+}
+
+c = t.find().hint( { i: 1 } ).limit( 2 );
+assert.eq( 0, c.next().i );
+t.update( { i: 0 }, { i: 3 } );
+assert.eq( 1, c.next().i );
+assert.eq( 2, c.next().i );
+assert.throws( function() { c.next() }, [], "unexpected: object found" );
+
+// now force upgrade to disk storage
+
+t.drop();
+t.ensureIndex( { i: 1 } );
+for( i = 0; i < 10; ++i ) {
+ t.save( { i: i } );
+}
+// apparently this means we also request 2 in subsequent getMore's
+c = t.find().hint( {i:1} ).limit( 2 );
+assert.eq( 0, c.next().i );
+t.update( { i: 0 }, { i: 10 } );
+for( i = 1; i < 10; ++i ) {
+ if ( i == 7 ) {
+ t.update( { i: 6 }, { i: 11 } );
+ t.update( { i: 9 }, { i: 12 } );
+ }
+ if ( i == 9 ) {
+ i = 12;
+ }
+ assert.eq( i, c.next().i );
+}
+assert.throws( function() { c.next() }, [], "unexpected: object found" );
+
+m.getDB( "local" ).getCollectionNames().forEach( function( x ) { assert( !x.match( /^temp/ ), "temp collection found" ); } );
+
+t.drop();
+m.getDB( baseName ).createCollection( baseName, { capped:true, size:100000, autoIdIndex:false } );
+t = m.getDB( baseName ).getCollection( baseName );
+t.insert( {_id:"a"} );
+t.insert( {_id:"a"} );
+t.insert( {_id:"a"} );
+
+c = t.find().limit( 2 );
+assert.eq( "a", c.next()._id );
+assert.eq( "a", c.next()._id );
+assert.eq( "a", c.next()._id );
+assert( !c.hasNext() );
+
+assert( t.validate().valid );
+*/
diff --git a/jstests/disk/preallocate.js b/jstests/disk/preallocate.js
new file mode 100644
index 0000000..69f9a47
--- /dev/null
+++ b/jstests/disk/preallocate.js
@@ -0,0 +1,21 @@
+port = allocatePorts( 1 )[ 0 ]
+
+var baseName = "jstests_preallocate";
+
+vsize = function() {
+ return m.getDB( "admin" ).runCommand( "serverStatus" ).mem.virtual;
+}
+
+var m = startMongod( "--port", port, "--dbpath", "/data/db/" + baseName );
+
+m.getDB( baseName ).createCollection( baseName + "1" );
+
+vs = vsize();
+
+stopMongod( port );
+
+var m = startMongoProgram( "mongod", "--port", port, "--dbpath", "/data/db/" + baseName );
+
+m.getDB( baseName ).createCollection( baseName + "2" );
+
+assert.eq( vs, vsize() );
diff --git a/jstests/distinct1.js b/jstests/distinct1.js
new file mode 100644
index 0000000..433e051
--- /dev/null
+++ b/jstests/distinct1.js
@@ -0,0 +1,25 @@
+
+t = db.distinct1;
+t.drop();
+
+t.save( { a : 1 } )
+t.save( { a : 2 } )
+t.save( { a : 2 } )
+t.save( { a : 2 } )
+t.save( { a : 3 } )
+
+
+res = t.distinct( "a" );
+assert.eq( "1,2,3" , res.toString() , "A1" );
+
+assert.eq( "1,2" , t.distinct( "a" , { a : { $lt : 3 } } ) , "A2" );
+
+t.drop();
+
+t.save( { a : { b : "a" } , c : 12 } );
+t.save( { a : { b : "b" } , c : 12 } );
+t.save( { a : { b : "c" } , c : 12 } );
+t.save( { a : { b : "c" } , c : 12 } );
+
+res = t.distinct( "a.b" );
+assert.eq( "a,b,c" , res.toString() , "B1" );
diff --git a/jstests/distinct2.js b/jstests/distinct2.js
new file mode 100644
index 0000000..41ee78c
--- /dev/null
+++ b/jstests/distinct2.js
@@ -0,0 +1,13 @@
+
+t = db.distinct2;
+t.drop();
+
+t.save({a:null});
+assert.eq( 0 , t.distinct('a.b').length , "A" );
+
+t.drop();
+t.save( { a : 1 } );
+assert.eq( [1] , t.distinct( "a" ) , "B" );
+t.save( {} )
+assert.eq( [1] , t.distinct( "a" ) , "C" );
+
diff --git a/jstests/drop.js b/jstests/drop.js
new file mode 100644
index 0000000..b233409
--- /dev/null
+++ b/jstests/drop.js
@@ -0,0 +1,21 @@
+f = db.jstests_drop;
+
+f.drop();
+
+assert.eq( 0, db.system.indexes.find( {ns:"test.jstests_drop"} ).count() , "A" );
+f.save( {} );
+assert.eq( 1, db.system.indexes.find( {ns:"test.jstests_drop"} ).count() , "B" );
+f.ensureIndex( {a:1} );
+assert.eq( 2, db.system.indexes.find( {ns:"test.jstests_drop"} ).count() , "C" );
+assert.commandWorked( db.runCommand( {drop:"jstests_drop"} ) );
+assert.eq( 0, db.system.indexes.find( {ns:"test.jstests_drop"} ).count() , "D" );
+
+f.resetIndexCache();
+f.ensureIndex( {a:1} );
+assert.eq( 2, db.system.indexes.find( {ns:"test.jstests_drop"} ).count() , "E" );
+assert.commandWorked( db.runCommand( {deleteIndexes:"jstests_drop",index:"*"} ) );
+assert.eq( 1, db.system.indexes.find( {ns:"test.jstests_drop"} ).count() , "G" );
+
+// make sure we can still use it
+f.save( {} );
+assert.eq( 1, f.find().hint( {_id:new ObjectId( "000000000000000000000000" )} ).toArray().length , "H" );
diff --git a/jstests/error1.js b/jstests/error1.js
new file mode 100644
index 0000000..4043bff
--- /dev/null
+++ b/jstests/error1.js
@@ -0,0 +1,41 @@
+db.jstests_error1.drop();
+
+// test 1
+db.$cmd.findOne({reseterror:1});
+assert( db.$cmd.findOne({getlasterror:1}).err == null, "A" );
+assert( db.$cmd.findOne({getpreverror:1}).err == null, "B" );
+
+db.resetError();
+assert( db.getLastError() == null, "C" );
+assert( db.getPrevError().err == null , "preverror 1" );
+
+// test 2
+
+db.$cmd.findOne({forceerror:1});
+assert( db.$cmd.findOne({getlasterror:1}).err != null, "D" );
+assert( db.$cmd.findOne({getpreverror:1}).err != null, "E" );
+
+
+assert( db.getLastError() != null, "F" );
+assert( db.getPrevError().err != null , "preverror 2" );
+assert( db.getPrevError().nPrev == 1, "G" );
+
+db.jstests_error1.findOne();
+assert( db.$cmd.findOne({getlasterror:1}).err == null, "H" );
+assert( db.$cmd.findOne({getpreverror:1}).err != null, "I" );
+assert( db.$cmd.findOne({getpreverror:1}).nPrev == 2, "J" );
+
+db.jstests_error1.findOne();
+assert( db.$cmd.findOne({getlasterror:1}).err == null, "K" );
+assert( db.$cmd.findOne({getpreverror:1}).err != null, "L" );
+assert( db.$cmd.findOne({getpreverror:1}).nPrev == 3, "M" );
+
+db.resetError();
+db.forceError();
+db.jstests_error1.findOne();
+assert( db.getLastError() == null , "getLastError 5" );
+assert( db.getPrevError().err != null , "preverror 3" );
+
+// test 3
+db.$cmd.findOne({reseterror:1});
+assert( db.$cmd.findOne({getpreverror:1}).err == null, "N" );
diff --git a/jstests/error2.js b/jstests/error2.js
new file mode 100644
index 0000000..8c27d62
--- /dev/null
+++ b/jstests/error2.js
@@ -0,0 +1,21 @@
+// Test that client gets stack trace on failed invoke
+
+f = db.jstests_error2;
+
+f.drop();
+
+f.save( {a:1} );
+
+assert.throws(
+ function(){
+ c = f.find({$where : function(){ return a() }});
+ c.next();
+ }
+);
+
+assert.throws(
+ function(){
+ db.eval( function() { return a(); } );
+ }
+);
+
diff --git a/jstests/error3.js b/jstests/error3.js
new file mode 100644
index 0000000..9f7f298
--- /dev/null
+++ b/jstests/error3.js
@@ -0,0 +1,5 @@
+
+db.runCommand( "forceerror" );
+assert.eq( "forced error" , db.getLastError() );
+db.runCommand( "switchtoclienterrors" );
+assert.isnull( db.getLastError() );
diff --git a/jstests/error4.js b/jstests/error4.js
new file mode 100644
index 0000000..deb2eb2
--- /dev/null
+++ b/jstests/error4.js
@@ -0,0 +1,7 @@
+
+t = db.error4;
+t.drop()
+t.insert( { _id : 1 } )
+t.insert( { _id : 1 } )
+assert.eq( 11000 , db.getLastErrorCmd().code , "A" )
+
diff --git a/jstests/error5.js b/jstests/error5.js
new file mode 100644
index 0000000..ed8d922
--- /dev/null
+++ b/jstests/error5.js
@@ -0,0 +1,8 @@
+
+t = db.error5
+t.drop();
+
+assert.throws( function(){ t.save( 4 ); } , "A" );
+t.save( { a : 1 } )
+assert.eq( 1 , t.count() , "B" );
+
diff --git a/jstests/eval0.js b/jstests/eval0.js
new file mode 100644
index 0000000..1b9bd35
--- /dev/null
+++ b/jstests/eval0.js
@@ -0,0 +1,3 @@
+
+assert.eq( 17 , db.eval( function(){ return 11 + 6; } ) , "A" );
+assert.eq( 17 , db.eval( function( x ){ return 10 + x; } , 7 ) , "B" );
diff --git a/jstests/eval1.js b/jstests/eval1.js
new file mode 100644
index 0000000..4a5ca75
--- /dev/null
+++ b/jstests/eval1.js
@@ -0,0 +1,17 @@
+
+t = db.eval1;
+t.drop();
+
+t.save( { _id : 1 , name : "eliot" } );
+t.save( { _id : 2 , name : "sara" } );
+
+f = function(id){
+ return db["eval1"].findOne( { _id : id } ).name;
+}
+
+
+assert.eq( "eliot" , f( 1 ) , "A" );
+assert.eq( "sara" , f( 2 ) , "B" );
+assert.eq( "eliot" , db.eval( f , 1 ) , "C" );
+assert.eq( "sara" , db.eval( f , 2 ) , "D" );
+
diff --git a/jstests/eval2.js b/jstests/eval2.js
new file mode 100644
index 0000000..c3a7499
--- /dev/null
+++ b/jstests/eval2.js
@@ -0,0 +1,28 @@
+
+t = db.test;
+t.drop();
+t.save({a:1});
+t.save({a:1});
+
+var f = db.group(
+ {
+ ns: "test",
+ key: { a:true},
+ cond: { a:1 },
+ reduce: function(obj,prev) { prev.csum++; } ,
+ initial: { csum: 0}
+ }
+);
+
+assert(f[0].a == 1 && f[0].csum == 2 , "on db" );
+
+var f = t.group(
+ {
+ key: { a:true},
+ cond: { a:1 },
+ reduce: function(obj,prev) { prev.csum++; } ,
+ initial: { csum: 0}
+ }
+);
+
+assert(f[0].a == 1 && f[0].csum == 2 , "on coll" );
diff --git a/jstests/eval3.js b/jstests/eval3.js
new file mode 100644
index 0000000..404d4d8
--- /dev/null
+++ b/jstests/eval3.js
@@ -0,0 +1,21 @@
+
+t = db.eval3;
+t.drop();
+
+t.save( { _id : 1 , name : "eliot" } );
+assert.eq( 1 , t.count() , "A" );
+
+function z( a , b ){
+ db.eval3.save( { _id : a , name : b } );
+ return b;
+}
+
+z( 2 , "sara" );
+assert.eq( 2 , t.count() , "B" );
+
+assert.eq( "eliot,sara" , t.find().toArray().map( function(z){ return z.name; } ).sort().toString() );
+
+assert.eq( "joe" , db.eval( z , 3 , "joe" ) , "C" );
+assert.eq( 3 , t.count() , "D" );
+
+assert.eq( "eliot,joe,sara" , t.find().toArray().map( function(z){ return z.name; } ).sort().toString() );
diff --git a/jstests/eval4.js b/jstests/eval4.js
new file mode 100644
index 0000000..31d6ef0
--- /dev/null
+++ b/jstests/eval4.js
@@ -0,0 +1,23 @@
+
+t = db.eval4;
+t.drop();
+
+t.save( { a : 1 } );
+t.save( { a : 2 } );
+t.save( { a : 3 } );
+
+assert.eq( 3 , t.count() , "A" );
+
+function f( x ){
+ db.eval4.remove( { a : x } );
+}
+
+f( 2 );
+assert.eq( 2 , t.count() , "B" );
+
+db.eval( f , 2 );
+assert.eq( 2 , t.count() , "C" );
+
+db.eval( f , 3 );
+assert.eq( 1 , t.count() , "D" );
+
diff --git a/jstests/eval5.js b/jstests/eval5.js
new file mode 100644
index 0000000..a9223a5
--- /dev/null
+++ b/jstests/eval5.js
@@ -0,0 +1,23 @@
+
+t = db.eval5;
+t.drop();
+
+t.save( { a : 1 , b : 2 , c : 3 } );
+
+assert.eq( 3 ,
+ db.eval(
+ function(z){
+ return db.eval5.find().toArray()[0].c;
+ }
+ ) ,
+ "something weird A"
+ );
+
+assert.isnull(
+ db.eval(
+ function(z){
+ return db.eval5.find( {} , { a : 1 } ).toArray()[0].c;
+ }
+ ),
+ "field spec didn't work"
+ );
diff --git a/jstests/eval6.js b/jstests/eval6.js
new file mode 100644
index 0000000..5fe0969
--- /dev/null
+++ b/jstests/eval6.js
@@ -0,0 +1,15 @@
+
+t = db.eval6;
+t.drop();
+
+t.save( { a : 1 } );
+
+db.eval(
+ function(){
+ o = db.eval6.findOne();
+ o.b = 2;
+ db.eval6.save( o );
+ }
+);
+
+assert.eq( 2 , t.findOne().b );
diff --git a/jstests/eval7.js b/jstests/eval7.js
new file mode 100644
index 0000000..45e06af
--- /dev/null
+++ b/jstests/eval7.js
@@ -0,0 +1,3 @@
+
+assert.eq( 6 , db.eval( "5 + 1" ) , "A" )
+assert.throws( function(z){ db.eval( "5 + function x; + 1" )} );
diff --git a/jstests/eval8.js b/jstests/eval8.js
new file mode 100644
index 0000000..072a890
--- /dev/null
+++ b/jstests/eval8.js
@@ -0,0 +1,19 @@
+
+t = db.eval8;
+t.drop();
+
+x = { a : 1 , b : 2 };
+t.save( x );
+x = t.findOne();
+
+assert( x.a && x.b , "A" );
+delete x.b;
+
+assert( x.a && ! x.b , "B" )
+x.b = 3;
+assert( x.a && x.b , "C" );
+assert.eq( 3 , x.b , "D" );
+
+t.save( x );
+y = t.findOne();
+assert.eq( tojson( x ) , tojson( y ) , "E" );
diff --git a/jstests/eval9.js b/jstests/eval9.js
new file mode 100644
index 0000000..cfa1f58
--- /dev/null
+++ b/jstests/eval9.js
@@ -0,0 +1,19 @@
+
+a = [ 1 , "asd" , null , [ 2 , 3 ] , new Date() , { x : 1 } ]
+
+for ( var i=0; i<a.length; i++ ){
+ var ret = db.eval( "function( a , i ){ return a[i]; }" , a , i );
+ assert.eq( typeof( a[i] ) , typeof( ret ) , "type test" );
+ assert.eq( a[i] , ret , "val test: " + typeof( a[i] ) );
+}
+
+db.eval9.drop();
+db.eval9.save( { a : 17 } );
+
+assert.eq( 1 , db.eval( "return db.eval9.find().toArray()" ).length , "A" );
+assert.eq( 17 , db.eval( "return db.eval9.find().toArray()" )[0].a , "B" );
+
+// just to make sure these things don't crash
+assert( db.eval( "return db.eval9.find()" ) );
+assert( db.eval( "return db.eval9" ) );
+assert( db.eval( "return db" ) );
diff --git a/jstests/evala.js b/jstests/evala.js
new file mode 100644
index 0000000..ed72582
--- /dev/null
+++ b/jstests/evala.js
@@ -0,0 +1,9 @@
+
+t = db.evala;
+t.drop()
+
+t.save( { x : 5 } )
+
+assert.eq( 5 , db.eval( "function(){ return db.evala.findOne().x; }" ) , "A" );
+assert.eq( 5 , db.eval( "/* abc */function(){ return db.evala.findOne().x; }" ) , "B" );
+
diff --git a/jstests/evalb.js b/jstests/evalb.js
new file mode 100644
index 0000000..3bc3db1
--- /dev/null
+++ b/jstests/evalb.js
@@ -0,0 +1,14 @@
+
+t = db.evalb;
+t.drop();
+
+t.save( { x : 3 } );
+
+assert.eq( 3, db.eval( function(){ return db.evalb.findOne().x; } ) , "A" );
+
+db.setProfilingLevel( 2 );
+
+assert.eq( 3, db.eval( function(){ return db.evalb.findOne().x; } ) , "B" );
+
+db.setProfilingLevel( 0 );
+
diff --git a/jstests/exists.js b/jstests/exists.js
new file mode 100644
index 0000000..28f69e8
--- /dev/null
+++ b/jstests/exists.js
@@ -0,0 +1,48 @@
+t = db.jstests_exists;
+t.drop();
+
+t.save( {} );
+t.save( {a:1} );
+t.save( {a:{b:1}} );
+t.save( {a:{b:{c:1}}} );
+t.save( {a:{b:{c:{d:null}}}} );
+
+function dotest( n ){
+
+ assert.eq( 5, t.count() , n );
+ assert.eq( 1, t.count( {a:null} ) , n );
+ assert.eq( 2, t.count( {'a.b':null} ) , n );
+ assert.eq( 3, t.count( {'a.b.c':null} ) , n );
+ assert.eq( 5, t.count( {'a.b.c.d':null} ) , n );
+
+ assert.eq( 5, t.count() , n );
+ assert.eq( 4, t.count( {a:{$ne:null}} ) , n );
+ assert.eq( 3, t.count( {'a.b':{$ne:null}} ) , n );
+ assert.eq( 2, t.count( {'a.b.c':{$ne:null}} ) , n );
+ assert.eq( 0, t.count( {'a.b.c.d':{$ne:null}} ) , n );
+
+ assert.eq( 4, t.count( {a: {$exists:true}} ) , n );
+ assert.eq( 3, t.count( {'a.b': {$exists:true}} ) , n );
+ assert.eq( 2, t.count( {'a.b.c': {$exists:true}} ) , n );
+ assert.eq( 1, t.count( {'a.b.c.d': {$exists:true}} ) , n );
+
+ assert.eq( 1, t.count( {a: {$exists:false}} ) , n );
+ assert.eq( 2, t.count( {'a.b': {$exists:false}} ) , n );
+ assert.eq( 3, t.count( {'a.b.c': {$exists:false}} ) , n );
+ assert.eq( 4, t.count( {'a.b.c.d': {$exists:false}} ) , n );
+}
+
+dotest( "before index" )
+t.ensureIndex( { "a" : 1 } )
+t.ensureIndex( { "a.b" : 1 } )
+t.ensureIndex( { "a.b.c" : 1 } )
+t.ensureIndex( { "a.b.c.d" : 1 } )
+dotest( "after index" )
+
+t.drop();
+
+t.save( {r:[{s:1}]} );
+assert( t.findOne( {'r.s':{$exists:true}} ) );
+assert( !t.findOne( {'r.s':{$exists:false}} ) );
+assert( !t.findOne( {'r.t':{$exists:true}} ) );
+assert( t.findOne( {'r.t':{$exists:false}} ) );
diff --git a/jstests/explain1.js b/jstests/explain1.js
new file mode 100644
index 0000000..6d5ac55
--- /dev/null
+++ b/jstests/explain1.js
@@ -0,0 +1,24 @@
+
+t = db.explain1;
+t.drop();
+
+for ( var i=0; i<100; i++ ){
+ t.save( { x : i } );
+}
+
+q = { x : { $gt : 50 } };
+
+assert.eq( 49 , t.find( q ).count() , "A" );
+assert.eq( 49 , t.find( q ).itcount() , "B" );
+assert.eq( 20 , t.find( q ).limit(20).itcount() , "C" );
+
+t.ensureIndex( { x : 1 } );
+
+assert.eq( 49 , t.find( q ).count() , "D" );
+assert.eq( 49 , t.find( q ).itcount() , "E" );
+assert.eq( 20 , t.find( q ).limit(20).itcount() , "F" );
+
+assert.eq( 49 , t.find(q).explain().n , "G" );
+assert.eq( 20 , t.find(q).limit(20).explain().n , "H" );
+assert.eq( 49 , t.find(q).limit(-20).explain().n , "I" );
+
diff --git a/jstests/extent.js b/jstests/extent.js
new file mode 100644
index 0000000..8fca699
--- /dev/null
+++ b/jstests/extent.js
@@ -0,0 +1,11 @@
+t = db.reclaimExtentsTest;
+t.drop();
+
+for ( var i=0; i<50; i++ ) { // enough iterations to break 32 bit.
+ db.createCollection('reclaimExtentsTest', { size : 100000000 });
+ t.insert({x:1});
+ assert( t.count() == 1 );
+ t.drop();
+}
+
+db.dropDatabase();
diff --git a/jstests/find1.js b/jstests/find1.js
new file mode 100644
index 0000000..93b8f60
--- /dev/null
+++ b/jstests/find1.js
@@ -0,0 +1,30 @@
+t = db.find1;
+t.drop();
+
+t.save( { a : 1 , b : "hi" } );
+t.save( { a : 2 , b : "hi" } );
+
+/* very basic test of $snapshot just that we get some result */
+// we are assumign here that snapshot uses the id index; maybe one day it doesn't if so this would need to change then
+assert( t.find({$query:{},$snapshot:1})[0].a == 1 , "$snapshot simple test 1" );
+var q = t.findOne();
+q.c = "zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz";
+t.save(q); // will move a:1 object to after a:2 in the file
+assert( t.find({$query:{},$snapshot:1})[0].a == 1 , "$snapshot simple test 2" );
+
+assert( t.findOne( { a : 1 } ).b != null , "A" );
+assert( t.findOne( { a : 1 } , { a : 1 } ).b == null , "B");
+
+assert( t.find( { a : 1 } )[0].b != null , "C" );
+assert( t.find( { a : 1 } , { a : 1 } )[0].b == null , "D" );
+
+id = t.findOne()._id;
+
+assert( t.findOne( id ) , "E" );
+assert( t.findOne( id ).a , "F" );
+assert( t.findOne( id ).b , "G" );
+
+assert( t.findOne( id , { a : 1 } ).a , "H" );
+assert( ! t.findOne( id , { a : 1 } ).b , "I" );
+
+assert(t.validate().valid,"not valid");
diff --git a/jstests/find2.js b/jstests/find2.js
new file mode 100644
index 0000000..f722034
--- /dev/null
+++ b/jstests/find2.js
@@ -0,0 +1,16 @@
+// Test object id sorting.
+
+function testObjectIdFind( db ) {
+ r = db.ed_db_find2_oif;
+ r.drop();
+
+ for( i = 0; i < 3; ++i )
+ r.save( {} );
+
+ f = r.find().sort( { _id: 1 } );
+ assert.eq( 3, f.count() );
+ assert( f[ 0 ]._id < f[ 1 ]._id );
+ assert( f[ 1 ]._id < f[ 2 ]._id );
+}
+
+testObjectIdFind( db );
diff --git a/jstests/find3.js b/jstests/find3.js
new file mode 100644
index 0000000..a5e4b7a
--- /dev/null
+++ b/jstests/find3.js
@@ -0,0 +1,10 @@
+t = db.find3;
+t.drop();
+
+for ( i=1; i<=50; i++)
+ t.save( { a : i } );
+
+assert.eq( 50 , t.find().toArray().length );
+assert.eq( 20 , t.find().limit(20).toArray().length );
+
+assert(t.validate().valid);
diff --git a/jstests/find4.js b/jstests/find4.js
new file mode 100644
index 0000000..17639d3
--- /dev/null
+++ b/jstests/find4.js
@@ -0,0 +1,26 @@
+
+t = db.find4;
+t.drop();
+
+t.save( { a : 1123 , b : 54332 } );
+
+o = t.find( {} , {} )[0];
+assert.eq( 1123 , o.a , "A" );
+assert.eq( 54332 , o.b , "B" );
+assert( o._id.str , "C" );
+
+o = t.find( {} , { a : 1 } )[0];
+assert.eq( 1123 , o.a , "D" );
+assert( o._id.str , "E" );
+assert( ! o.b , "F" );
+
+o = t.find( {} , { b : 1 } )[0];
+assert.eq( 54332 , o.b , "G" );
+assert( o._id.str , "H" );
+assert( ! o.a , "I" );
+
+t.drop();
+t.save( { a : 1 , b : 1 } );
+t.save( { a : 2 , b : 2 } );
+assert.eq( "1-1,2-2" , t.find().map( function(z){ return z.a + "-" + z.b } ).toString() );
+assert.eq( "1-undefined,2-undefined" , t.find( {} , { a : 1 }).map( function(z){ return z.a + "-" + z.b } ).toString() );
diff --git a/jstests/find5.js b/jstests/find5.js
new file mode 100644
index 0000000..b4a2c0f
--- /dev/null
+++ b/jstests/find5.js
@@ -0,0 +1,51 @@
+
+t = db.find5;
+t.drop();
+
+t.save({a: 1});
+t.save({b: 5});
+
+assert.eq( 2 , t.find({}, {b:1}).count(), "A");
+
+function getIds( f ){
+ return t.find( {} , f ).map( function(z){ return z._id; } );
+}
+
+assert.eq( Array.tojson( getIds( null ) ) , Array.tojson( getIds( {} ) ) , "B1 " );
+assert.eq( Array.tojson( getIds( null ) ) , Array.tojson( getIds( { a : 1 } ) ) , "B2 " );
+assert.eq( Array.tojson( getIds( null ) ) , Array.tojson( getIds( { b : 1 } ) ) , "B3 " );
+assert.eq( Array.tojson( getIds( null ) ) , Array.tojson( getIds( { c : 1 } ) ) , "B4 " );
+
+x = t.find( {} , { a : 1 } )[0];
+assert.eq( 1 , x.a , "C1" );
+assert.isnull( x.b , "C2" );
+
+x = t.find( {} , { a : 1 } )[1];
+assert.isnull( x.a , "C3" );
+assert.isnull( x.b , "C4" );
+
+x = t.find( {} , { b : 1 } )[0];
+assert.isnull( x.a , "C5" );
+assert.isnull( x.b , "C6" );
+
+x = t.find( {} , { b : 1 } )[1];
+assert.isnull( x.a , "C7" );
+assert.eq( 5 , x.b , "C8" );
+
+t.drop();
+
+
+t.save( { a : 1 , b : { c : 2 , d : 3 , e : 4 } } );
+assert.eq( 2 , t.find( {} , { "b.c" : 1 } ).toArray()[0].b.c , "D" );
+
+o = t.find( {} , { "b.c" : 1 , "b.d" : 1 } ).toArray()[0];
+assert( o.b.c , "E 1" );
+assert( o.b.d , "E 2" );
+assert( !o.b.e , "E 3" );
+
+assert( ! t.find( {} , { "b.c" : 1 } ).toArray()[0].b.d , "F" );
+
+t.drop();
+t.save( { a : { b : { c : 1 } } } )
+assert.eq( 1 , t.find( {} , { "a.b.c" : 1 } )[0].a.b.c , "G" );
+
diff --git a/jstests/find6.js b/jstests/find6.js
new file mode 100644
index 0000000..baa5969
--- /dev/null
+++ b/jstests/find6.js
@@ -0,0 +1,11 @@
+
+t = db.find6;
+t.drop();
+
+t.save( { a : 1 } )
+t.save( { a : 1 , b : 1 } )
+
+assert.eq( 2 , t.find().count() , "A" );
+assert.eq( 1 , t.find( { b : null } ).count() , "B" );
+assert.eq( 1 , t.find( "function() { return this.b == null; }" ).itcount() , "C" );
+assert.eq( 1 , t.find( "function() { return this.b == null; }" ).count() , "D" );
diff --git a/jstests/find_and_modify.js b/jstests/find_and_modify.js
new file mode 100644
index 0000000..5e10079
--- /dev/null
+++ b/jstests/find_and_modify.js
@@ -0,0 +1,38 @@
+t = db.find_and_modify;
+t.drop();
+
+// fill db
+for(var i=1; i<=10; i++) {
+ t.insert({priority:i, inprogress:false, value:0});
+}
+
+// returns old
+out = t.findAndModify({update: {$set: {inprogress: true}, $inc: {value:1}}});
+assert.eq(out.value, 0);
+assert.eq(out.inprogress, false);
+t.update({_id: out._id}, {$set: {inprogress: false}});
+
+// returns new
+out = t.findAndModify({update: {$set: {inprogress: true}, $inc: {value:1}}, 'new': true});
+assert.eq(out.value, 2);
+assert.eq(out.inprogress, true);
+t.update({_id: out._id}, {$set: {inprogress: false}});
+
+// update highest priority
+out = t.findAndModify({query: {inprogress:false}, sort:{priority:-1}, update: {$set: {inprogress: true}}});
+assert.eq(out.priority, 10);
+// update next highest priority
+out = t.findAndModify({query: {inprogress:false}, sort:{priority:-1}, update: {$set: {inprogress: true}}});
+assert.eq(out.priority, 9);
+
+// remove lowest priority
+out = t.findAndModify({sort:{priority:1}, remove:true});
+assert.eq(out.priority, 1);
+
+// remove next lowest priority
+out = t.findAndModify({sort:{priority:1}, remove:1});
+assert.eq(out.priority, 2);
+
+// return empty obj if no matches (drivers may handle this differently)
+out = t.findAndModify({query:{no_such_field:1}, remove:1});
+assert.eq(out, {});
diff --git a/jstests/fm1.js b/jstests/fm1.js
new file mode 100644
index 0000000..bc60a3d
--- /dev/null
+++ b/jstests/fm1.js
@@ -0,0 +1,12 @@
+
+t = db.fm1;
+t.drop();
+
+t.insert({foo:{bar:1}})
+t.find({},{foo:1}).toArray();
+t.find({},{'foo.bar':1}).toArray();
+t.find({},{'baz':1}).toArray();
+t.find({},{'baz.qux':1}).toArray();
+t.find({},{'foo.qux':1}).toArray();
+
+
diff --git a/jstests/fm2.js b/jstests/fm2.js
new file mode 100644
index 0000000..00ccdf4
--- /dev/null
+++ b/jstests/fm2.js
@@ -0,0 +1,9 @@
+
+t = db.fm2
+t.drop();
+
+t.insert( { "one" : { "two" : {"three":"four"} } } );
+
+x = t.find({},{"one.two":1})[0]
+assert.eq( 1 , Object.keySet( x.one ).length , "ks l 1" );
+
diff --git a/jstests/fm3.js b/jstests/fm3.js
new file mode 100644
index 0000000..8ccde6d
--- /dev/null
+++ b/jstests/fm3.js
@@ -0,0 +1,37 @@
+t = db.fm3
+t.drop();
+
+t.insert( {a:[{c:{e:1, f:1}}, {d:2}, 'z'], b:1} );
+
+
+res = t.findOne({}, {a:1});
+assert.eq(res.a, [{c:{e:1, f:1}}, {d:2}, 'z'], "one a");
+assert.eq(res.b, undefined, "one b");
+
+res = t.findOne({}, {a:0});
+assert.eq(res.a, undefined, "two a");
+assert.eq(res.b, 1, "two b");
+
+res = t.findOne({}, {'a.d':1});
+assert.eq(res.a, [{}, {d:2}], "three a");
+assert.eq(res.b, undefined, "three b");
+
+res = t.findOne({}, {'a.d':0});
+assert.eq(res.a, [{c:{e:1, f:1}}, {}, 'z'], "four a");
+assert.eq(res.b, 1, "four b");
+
+res = t.findOne({}, {'a.c':1});
+assert.eq(res.a, [{c:{e:1, f:1}}, {}], "five a");
+assert.eq(res.b, undefined, "five b");
+
+res = t.findOne({}, {'a.c':0});
+assert.eq(res.a, [{}, {d:2}, 'z'], "six a");
+assert.eq(res.b, 1, "six b");
+
+res = t.findOne({}, {'a.c.e':1});
+assert.eq(res.a, [{c:{e:1}}, {}], "seven a");
+assert.eq(res.b, undefined, "seven b");
+
+res = t.findOne({}, {'a.c.e':0});
+assert.eq(res.a, [{c:{f:1}}, {d:2}, 'z'], "eight a");
+assert.eq(res.b, 1, "eight b");
diff --git a/jstests/fsync.js b/jstests/fsync.js
new file mode 100644
index 0000000..fccd623
--- /dev/null
+++ b/jstests/fsync.js
@@ -0,0 +1,22 @@
+// test the lock/unlock snapshotting feature a bit
+
+x=db.runCommand({fsync:1,lock:1});
+assert(!x.ok,"D");
+
+d=db.getSisterDB("admin");
+
+x=d.runCommand({fsync:1,lock:1});
+
+assert(x.ok,"C");
+
+y = d.currentOp();
+assert(y.fsyncLock,"B");
+
+z = d.$cmd.sys.unlock.findOne();
+
+// it will take some time to unlock, and unlock does not block and wait for that
+// doing a write will make us wait until db is writeable.
+db.jstests_fsync.insert({x:1});
+
+assert( d.currentOp().fsyncLock == null, "A" );
+
diff --git a/jstests/fsync2.js b/jstests/fsync2.js
new file mode 100644
index 0000000..2b5370b
--- /dev/null
+++ b/jstests/fsync2.js
@@ -0,0 +1,15 @@
+db.fsync2.drop();
+
+d = db.getSisterDB( "admin" );
+
+assert.commandWorked( d.runCommand( {fsync:1, lock: 1 } ) );
+
+// uncomment when fixed SERVER-519
+db.fsync2.save( {x:1} );
+
+m = new Mongo( db.getMongo().host );
+
+assert( m.getDB("admin").$cmd.sys.unlock.findOne().ok );
+
+// uncomment when fixed SERVER-519
+assert.eq( 1, db.fsync2.count() );
diff --git a/jstests/group1.js b/jstests/group1.js
new file mode 100644
index 0000000..c4147c0
--- /dev/null
+++ b/jstests/group1.js
@@ -0,0 +1,64 @@
+t = db.group1;
+t.drop();
+
+t.save( { n : 1 , a : 1 } );
+t.save( { n : 2 , a : 1 } );
+t.save( { n : 3 , a : 2 } );
+t.save( { n : 4 , a : 2 } );
+t.save( { n : 5 , a : 2 } );
+
+var p = { key : { a : true } ,
+ reduce : function(obj,prev) { prev.count++; },
+ initial: { count: 0 }
+ };
+
+res = t.group( p );
+
+assert( res.length == 2 , "A" );
+assert( res[0].a == 1 , "B" );
+assert( res[0].count == 2 , "C" );
+assert( res[1].a == 2 , "D" );
+assert( res[1].count == 3 , "E" );
+
+assert.eq( res , t.groupcmd( p ) , "ZZ" );
+
+ret = t.groupcmd( { key : {} , reduce : p.reduce , initial : p.initial } );
+assert.eq( 1 , ret.length , "ZZ 2" );
+assert.eq( 5 , ret[0].count , "ZZ 3" );
+
+ret = t.groupcmd( { key : {} , reduce : function(obj,prev){ prev.sum += obj.n } , initial : { sum : 0 } } );
+assert.eq( 1 , ret.length , "ZZ 4" );
+assert.eq( 15 , ret[0].sum , "ZZ 5" );
+
+t.drop();
+
+t.save( { "a" : 2 } );
+t.save( { "b" : 5 } );
+t.save( { "a" : 1 } );
+t.save( { "a" : 2 } );
+
+c = {key: {a:1}, cond: {}, initial: {"count": 0}, reduce: function(obj, prev) { prev.count++; } };
+
+assert.eq( t.group( c ) , t.groupcmd( c ) , "ZZZZ" );
+
+
+t.drop();
+
+t.save( { name : { first : "a" , last : "A" } } );
+t.save( { name : { first : "b" , last : "B" } } );
+t.save( { name : { first : "a" , last : "A" } } );
+
+
+p = { key : { 'name.first' : true } ,
+ reduce : function(obj,prev) { prev.count++; },
+ initial: { count: 0 }
+ };
+
+res = t.group( p );
+assert.eq( 2 , res.length , "Z1" );
+assert.eq( "a" , res[0]['name.first'] , "Z2" )
+assert.eq( "b" , res[1]['name.first'] , "Z3" )
+assert.eq( 2 , res[0].count , "Z4" )
+assert.eq( 1 , res[1].count , "Z5" )
+
+
diff --git a/jstests/group2.js b/jstests/group2.js
new file mode 100644
index 0000000..f687e88
--- /dev/null
+++ b/jstests/group2.js
@@ -0,0 +1,38 @@
+t = db.group2;
+t.drop();
+
+t.save({a: 2});
+t.save({b: 5});
+t.save({a: 1});
+
+cmd = { key: {a: 1},
+ initial: {count: 0},
+ reduce: function(obj, prev) {
+ prev.count++;
+ }
+ };
+
+result = t.group(cmd);
+
+assert.eq(3, result.length, "A");
+assert.eq(null, result[1].a, "C");
+assert("a" in result[1], "D");
+assert.eq(1, result[2].a, "E");
+
+assert.eq(1, result[0].count, "F");
+assert.eq(1, result[1].count, "G");
+assert.eq(1, result[2].count, "H");
+
+
+delete cmd.key
+cmd["$keyf"] = function(x){ return { a : x.a }; };
+result2 = t.group( cmd );
+
+assert.eq( result , result2 );
+
+
+delete cmd.$keyf
+cmd["keyf"] = function(x){ return { a : x.a }; };
+result3 = t.group( cmd );
+
+assert.eq( result , result3 );
diff --git a/jstests/group3.js b/jstests/group3.js
new file mode 100644
index 0000000..afa32f1
--- /dev/null
+++ b/jstests/group3.js
@@ -0,0 +1,43 @@
+t = db.group2;
+t.drop();
+
+t.save({a: 1});
+t.save({a: 2});
+t.save({a: 3});
+t.save({a: 4});
+
+
+cmd = { initial: {count: 0, sum: 0},
+ reduce: function(obj, prev) {
+ prev.count++;
+ prev.sum += obj.a;
+ },
+ finalize: function(obj) {
+ if (obj.count){
+ obj.avg = obj.sum / obj.count;
+ }else{
+ obj.avg = 0;
+ }
+ },
+ };
+
+result1 = t.group(cmd);
+
+assert.eq(1, result1.length, "test1");
+assert.eq(10, result1[0].sum, "test1");
+assert.eq(4, result1[0].count, "test1");
+assert.eq(2.5, result1[0].avg, "test1");
+
+
+cmd['finalize'] = function(obj) {
+ if (obj.count){
+ return obj.sum / obj.count;
+ }else{
+ return 0;
+ }
+};
+
+result2 = t.group(cmd);
+
+assert.eq(1, result2.length, "test2");
+assert.eq(2.5, result2[0], "test2");
diff --git a/jstests/group4.js b/jstests/group4.js
new file mode 100644
index 0000000..e75c0d1
--- /dev/null
+++ b/jstests/group4.js
@@ -0,0 +1,45 @@
+
+t = db.group4
+t.drop();
+
+function test( c , n ){
+ var x = {};
+ c.forEach(
+ function(z){
+ assert.eq( z.count , z.values.length , n + "\t" + tojson( z ) );
+ }
+ );
+}
+
+t.insert({name:'bob',foo:1})
+t.insert({name:'bob',foo:2})
+t.insert({name:'alice',foo:1})
+t.insert({name:'alice',foo:3})
+t.insert({name:'fred',foo:3})
+t.insert({name:'fred',foo:4})
+
+x = t.group(
+ {
+ key: {foo:1},
+ initial: {count:0,values:[]},
+ reduce: function (obj, prev){
+ prev.count++
+ prev.values.push(obj.name)
+ }
+ }
+);
+test( x , "A" );
+
+x = t.group(
+ {
+ key: {foo:1},
+ initial: {count:0},
+ reduce: function (obj, prev){
+ if (!prev.values) {prev.values = [];}
+ prev.count++;
+ prev.values.push(obj.name);
+ }
+ }
+);
+test( x , "B" );
+
diff --git a/jstests/group5.js b/jstests/group5.js
new file mode 100644
index 0000000..3534fe5
--- /dev/null
+++ b/jstests/group5.js
@@ -0,0 +1,38 @@
+
+t = db.group5;
+t.drop();
+
+// each group has groupnum+1 5 users
+for ( var group=0; group<10; group++ ){
+ for ( var i=0; i<5+group; i++ ){
+ t.save( { group : "group" + group , user : i } )
+ }
+}
+
+function c( group ){
+ return t.group(
+ {
+ key : { group : 1 } ,
+ q : { group : "group" + group } ,
+ initial : { users : {} },
+ reduce : function(obj,prev){
+ prev.users[obj.user] = true; // add this user to the hash
+ },
+ finalize : function(x){
+ var count = 0;
+ for (var key in x.users){
+ count++;
+ }
+
+ //replace user obj with count
+ //count add new field and keep users
+ x.users = count;
+ return x;
+ }
+ })[0]; // returns array
+}
+
+assert.eq( "group0" , c(0).group , "g0" );
+assert.eq( 5 , c(0).users , "g0 a" );
+assert.eq( "group5" , c(5).group , "g5" );
+assert.eq( 10 , c(5).users , "g5 a" );
diff --git a/jstests/hint1.js b/jstests/hint1.js
new file mode 100644
index 0000000..416eb4a
--- /dev/null
+++ b/jstests/hint1.js
@@ -0,0 +1,10 @@
+
+p = db.jstests_hint1;
+p.drop();
+
+p.save( { ts: new Date( 1 ), cls: "entry", verticals: "alleyinsider", live: true } );
+p.ensureIndex( { ts: 1 } );
+
+e = p.find( { live: true, ts: { $lt: new Date( 1234119308272 ) }, cls: "entry", verticals: " alleyinsider" } ).sort( { ts: -1 } ).hint( { ts: 1 } ).explain();
+assert.eq( e.startKey.ts.getTime(), new Date( 1234119308272 ).getTime() , "A" );
+assert.eq( 0 , e.endKey.ts.getTime() , "B" );
diff --git a/jstests/id1.js b/jstests/id1.js
new file mode 100644
index 0000000..9236340
--- /dev/null
+++ b/jstests/id1.js
@@ -0,0 +1,16 @@
+
+t = db.id1
+t.drop();
+
+t.save( { _id : { a : 1 , b : 2 } , x : "a" } );
+t.save( { _id : { a : 1 , b : 2 } , x : "b" } );
+t.save( { _id : { a : 3 , b : 2 } , x : "c" } );
+t.save( { _id : { a : 4 , b : 2 } , x : "d" } );
+t.save( { _id : { a : 4 , b : 2 } , x : "e" } );
+t.save( { _id : { a : 2 , b : 2 } , x : "f" } );
+
+assert.eq( 4 , t.find().count() , "A" );
+assert.eq( "b" , t.findOne( { _id : { a : 1 , b : 2 } } ).x );
+assert.eq( "c" , t.findOne( { _id : { a : 3 , b : 2 } } ).x );
+assert.eq( "e" , t.findOne( { _id : { a : 4 , b : 2 } } ).x );
+assert.eq( "f" , t.findOne( { _id : { a : 2 , b : 2 } } ).x );
diff --git a/jstests/in.js b/jstests/in.js
new file mode 100644
index 0000000..5442bbe
--- /dev/null
+++ b/jstests/in.js
@@ -0,0 +1,19 @@
+
+t = db.in1;
+t.drop();
+
+t.save( { a : 1 } );
+t.save( { a : 2 } );
+
+assert.eq( 1 , t.find( { a : { $in : [ 1 ] } } ).itcount() , "A" );
+assert.eq( 1 , t.find( { a : { $in : [ 2 ] } } ).itcount() , "B" );
+assert.eq( 2 , t.find( { a : { $in : [ 1 , 2 ] } } ).itcount() , "C" );
+
+t.ensureIndex( { a : 1 } );
+
+assert.eq( 1 , t.find( { a : { $in : [ 1 ] } } ).itcount(), "D" );
+assert.eq( 1 , t.find( { a : { $in : [ 2 ] } } ).itcount() , "E" );
+assert.eq( 2 , t.find( { a : { $in : [ 1 , 2 ] } } ).itcount() , "F" );
+
+assert.eq( 0 , t.find( { a : { $in : [] } } ).itcount() , "G" );
+
diff --git a/jstests/in2.js b/jstests/in2.js
new file mode 100644
index 0000000..66b90da
--- /dev/null
+++ b/jstests/in2.js
@@ -0,0 +1,33 @@
+
+t = db.in2;
+
+function go( name , index ){
+
+ t.drop();
+
+ t.save( { a : 1 , b : 1 } );
+ t.save( { a : 1 , b : 2 } );
+ t.save( { a : 1 , b : 3 } );
+
+ t.save( { a : 1 , b : 1 } );
+ t.save( { a : 2 , b : 2 } );
+ t.save( { a : 3 , b : 3 } );
+
+ t.save( { a : 1 , b : 1 } );
+ t.save( { a : 2 , b : 1 } );
+ t.save( { a : 3 , b : 1 } );
+
+ if ( index )
+ t.ensureIndex( index );
+
+ assert.eq( 7 , t.find( { a : { $in : [ 1 , 2 ] } } ).count() , name + " A" );
+
+ assert.eq( 6 , t.find( { a : { $in : [ 1 , 2 ] } , b : { $in : [ 1 , 2 ] } } ).count() , name + " B" );
+}
+
+go( "no index" );
+go( "index on a" , { a : 1 } );
+go( "index on b" , { b : 1 } );
+go( "index on a&b" , { a : 1 , b : 1 } );
+
+
diff --git a/jstests/inc1.js b/jstests/inc1.js
new file mode 100644
index 0000000..027f307
--- /dev/null
+++ b/jstests/inc1.js
@@ -0,0 +1,32 @@
+
+t = db.inc1;
+t.drop();
+
+function test( num , name ){
+ assert.eq( 1 , t.count() , name + " count" );
+ assert.eq( num , t.findOne().x , name + " value" );
+}
+
+t.save( { _id : 1 , x : 1 } );
+test( 1 , "A" );
+
+t.update( { _id : 1 } , { $inc : { x : 1 } } );
+test( 2 , "B" );
+
+t.update( { _id : 1 } , { $inc : { x : 1 } } );
+test( 3 , "C" );
+
+t.update( { _id : 2 } , { $inc : { x : 1 } } );
+test( 3 , "D" );
+
+t.update( { _id : 1 } , { $inc : { x : 2 } } );
+test( 5 , "E" );
+
+t.update( { _id : 1 } , { $inc : { x : -1 } } );
+test( 4 , "F" );
+
+t.ensureIndex( { x : 1 } );
+
+t.update( { _id : 1 } , { $inc : { x : 1 } } );
+test( 5 , "G" );
+
diff --git a/jstests/inc2.js b/jstests/inc2.js
new file mode 100644
index 0000000..8442f14
--- /dev/null
+++ b/jstests/inc2.js
@@ -0,0 +1,22 @@
+
+t = db.inc1
+t.drop();
+
+t.save( { _id : 1 , x : 1 } );
+t.save( { _id : 2 , x : 2 } );
+t.save( { _id : 3 , x : 3 } );
+
+function order(){
+ return t.find().sort( { x : 1 } ).map( function(z){ return z._id; } );
+}
+
+assert.eq( "1,2,3" , order() , "A" );
+
+t.update( { _id : 1 } , { $inc : { x : 4 } } );
+assert.eq( "2,3,1" , order() , "B" );
+
+t.ensureIndex( { x : 1 } );
+assert.eq( "2,3,1" , order() , "C" );
+
+t.update( { _id : 3 } , { $inc : { x : 4 } } );
+assert.eq( "2,1,3" , order() , "D" );
diff --git a/jstests/inc3.js b/jstests/inc3.js
new file mode 100644
index 0000000..baeeb19
--- /dev/null
+++ b/jstests/inc3.js
@@ -0,0 +1,16 @@
+
+t = db.inc3;
+
+t.drop();
+t.save( { _id : 1 , z : 1 , a : 1 } );
+t.update( {} , { $inc : { z : 1 , a : 1 } } );
+t.update( {} , { $inc : { a : 1 , z : 1 } } );
+assert.eq( { _id : 1 , z : 3 , a : 3 } , t.findOne() , "A" )
+
+
+t.drop();
+t.save( { _id : 1 , a : 1 , z : 1 } );
+t.update( {} , { $inc : { z : 1 , a : 1 } } );
+t.update( {} , { $inc : { a : 1 , z : 1 } } );
+assert.eq( { _id : 1 , a : 3 , z : 3 } , t.findOne() , "B" )
+
diff --git a/jstests/index1.js b/jstests/index1.js
new file mode 100644
index 0000000..620f8bb
--- /dev/null
+++ b/jstests/index1.js
@@ -0,0 +1,33 @@
+
+t = db.embeddedIndexTest;
+
+t.remove( {} );
+
+o = { name : "foo" , z : { a : 17 , b : 4} };
+t.save( o );
+
+assert( t.findOne().z.a == 17 );
+assert( t.findOne( { z : { a : 17 } } ) == null);
+
+t.ensureIndex( { "z.a" : 1 } );
+
+assert( t.findOne().z.a == 17 );
+assert( t.findOne( { z : { a : 17 } } ) == null);
+
+o = { name : "bar" , z : { a : 18 } };
+t.save( o );
+
+assert( t.find().length() == 2 );
+assert( t.find().sort( { "z.a" : 1 } ).length() == 2 );
+assert( t.find().sort( { "z.a" : -1 } ).length() == 2 );
+// We are planning to phase out this syntax.
+assert( t.find().sort( { z : { a : 1 } } ).length() == 2 );
+assert( t.find().sort( { z : { a: -1 } } ).length() == 2 );
+
+//
+// TODO - these don't work yet as indexing on x.y doesn't work yet
+//
+//assert( t.find().sort( { z : { a : 1 } } )[0].name == "foo" );
+//assert( t.find().sort( { z : { a : -1 } } )[1].name == "bar" );
+
+assert(t.validate().valid);
diff --git a/jstests/index10.js b/jstests/index10.js
new file mode 100644
index 0000000..105fcc1
--- /dev/null
+++ b/jstests/index10.js
@@ -0,0 +1,24 @@
+// unique index, drop dups
+
+t = db.jstests_index10;
+t.drop();
+
+t.save( {i:1} );
+t.save( {i:2} );
+t.save( {i:1} );
+t.save( {i:3} );
+t.save( {i:1} );
+
+t.ensureIndex( {i:1} );
+assert.eq( 5, t.count() );
+t.dropIndexes();
+t.ensureIndex( {i:1}, true );
+assert.eq( 1, db.system.indexes.count( {ns:"test.jstests_index10" } ) ); // only id index
+// t.dropIndexes();
+
+t.ensureIndex( {i:1}, [ true, true ] );
+assert.eq( 3, t.count() );
+assert.eq( 1, t.count( {i:1} ) );
+
+t.ensureIndex( {j:1}, [ true, true ] );
+assert.eq( 1, t.count() );
diff --git a/jstests/index2.js b/jstests/index2.js
new file mode 100644
index 0000000..b54abca
--- /dev/null
+++ b/jstests/index2.js
@@ -0,0 +1,40 @@
+/* test indexing where the key is an embedded object.
+ */
+
+t = db.embeddedIndexTest2;
+
+t.drop();
+assert( t.findOne() == null );
+
+o = { name : "foo" , z : { a : 17 } };
+p = { name : "foo" , z : { a : 17 } };
+q = { name : "barrr" , z : { a : 18 } };
+r = { name : "barrr" , z : { k : "zzz", L:[1,2] } };
+
+t.save( o );
+
+assert( t.findOne().z.a == 17 );
+
+t.save( p );
+t.save( q );
+
+assert( t.findOne({z:{a:17}}).z.a==17 );
+assert( t.find({z:{a:17}}).length() == 2 );
+assert( t.find({z:{a:18}}).length() == 1 );
+
+t.save( r );
+
+assert( t.findOne({z:{a:17}}).z.a==17 );
+assert( t.find({z:{a:17}}).length() == 2 );
+assert( t.find({z:{a:18}}).length() == 1 );
+
+t.ensureIndex( { z : 1 } );
+
+assert( t.findOne({z:{a:17}}).z.a==17 );
+assert( t.find({z:{a:17}}).length() == 2 );
+assert( t.find({z:{a:18}}).length() == 1 );
+
+assert( t.find().sort( { z : 1 } ).length() == 4 );
+assert( t.find().sort( { z : -1 } ).length() == 4 );
+
+assert(t.validate().valid);
diff --git a/jstests/index3.js b/jstests/index3.js
new file mode 100644
index 0000000..8013946
--- /dev/null
+++ b/jstests/index3.js
@@ -0,0 +1,16 @@
+
+
+t = db.index3;
+t.drop();
+
+assert( t.getIndexes().length == 0 );
+
+t.ensureIndex( { name : 1 } );
+
+t.save( { name : "a" } );
+
+t.ensureIndex( { name : 1 } );
+
+assert( t.getIndexes().length == 2 );
+
+assert(t.validate().valid);
diff --git a/jstests/index4.js b/jstests/index4.js
new file mode 100644
index 0000000..9dd731c
--- /dev/null
+++ b/jstests/index4.js
@@ -0,0 +1,33 @@
+// index4.js
+
+
+t = db.index4;
+t.drop();
+
+t.save( { name : "alleyinsider" ,
+ instances : [
+ { pool : "prod1" } ,
+ { pool : "dev1" }
+ ]
+ } );
+
+t.save( { name : "clusterstock" ,
+ instances : [
+ { pool : "dev1" }
+ ]
+ } );
+
+
+// this should fail, not allowed -- we confirm that.
+t.ensureIndex( { instances : { pool : 1 } } );
+assert.eq( 0, db.system.indexes.find( {ns:"test.index4",name:{$ne:"_id_"}} ).count(), "no indexes should be here yet");
+
+t.ensureIndex( { "instances.pool" : 1 } );
+
+sleep( 10 );
+
+a = t.find( { instances : { pool : "prod1" } } );
+assert( a.length() == 1, "len1" );
+assert( a[0].name == "alleyinsider", "alley" );
+
+assert(t.validate().valid, "valid" );
diff --git a/jstests/index5.js b/jstests/index5.js
new file mode 100644
index 0000000..841ac12
--- /dev/null
+++ b/jstests/index5.js
@@ -0,0 +1,24 @@
+// index5.js - test reverse direction index
+
+function validate() {
+ assert.eq( 2, t.find().count() );
+ f = t.find().sort( { a: 1 } );
+ assert.eq( 2, t.count() );
+ assert.eq( 1, f[ 0 ].a );
+ assert.eq( 2, f[ 1 ].a );
+ r = t.find().sort( { a: -1 } );
+ assert.eq( 2, r.count() );
+ assert.eq( 2, r[ 0 ].a );
+ assert.eq( 1, r[ 1 ].a );
+}
+
+t = db.index5;
+t.drop();
+
+t.save( { a: 1 } );
+t.save( { a: 2 } );
+
+validate();
+
+t.ensureIndex( { a: -1 } );
+validate();
diff --git a/jstests/index6.js b/jstests/index6.js
new file mode 100644
index 0000000..7514aca
--- /dev/null
+++ b/jstests/index6.js
@@ -0,0 +1,8 @@
+// index6.js Test indexes on array subelements.
+
+r = db.ed.db.index5;
+r.drop();
+
+r.save( { comments : [ { name : "eliot", foo : 1 } ] } );
+r.ensureIndex( { "comments.name": 1 } );
+assert( r.findOne( { "comments.name": "eliot" } ) );
diff --git a/jstests/index7.js b/jstests/index7.js
new file mode 100644
index 0000000..cf5050b
--- /dev/null
+++ b/jstests/index7.js
@@ -0,0 +1,67 @@
+// index7.js Test that we use an index when and only when we expect to.
+
+function index( q ) {
+ assert( q.explain().cursor.match( /^BtreeCursor/ ) , "index assert" );
+}
+
+function noIndex( q ) {
+ assert( q.explain().cursor.match( /^BasicCursor/ ) , "noIndex assert" );
+}
+
+function start( k, q ) {
+ var s = q.explain().startKey;
+ assert.eq( k.a, s.a );
+ assert.eq( k.b, s.b );
+}
+
+function end( k, q ) {
+ var e = q.explain().endKey;
+ assert.eq( k.a, e.a );
+ assert.eq( k.b, e.b );
+}
+
+function both( k, q ) {
+ start( k, q );
+ end( k, q );
+}
+
+f = db.ed_db_index7;
+f.drop();
+
+f.save( { a : 5 } )
+f.ensureIndex( { a: 1 } );
+index( f.find( { a: 5 } ).sort( { a: 1 } ).hint( { a: 1 } ) );
+noIndex( f.find( { a: 5 } ).sort( { a: 1 } ).hint( { $natural: 1 } ) );
+f.drop();
+
+f.ensureIndex( { a: 1, b: 1 } );
+assert.eq( 1, f.find( { a: 1 } ).hint( { a: 1, b: 1 } ).explain().startKey.a );
+assert.eq( 1, f.find( { a: 1 } ).hint( { a: 1, b: 1 } ).explain().endKey.a );
+assert.eq( 1, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().startKey.a );
+assert.eq( 1, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().endKey.a );
+assert.eq( null, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().startKey.c );
+assert.eq( null, f.find( { a: 1, c: 1 } ).hint( { a: 1, b: 1 } ).explain().endKey.c );
+
+start( { a: "a", b: 1 }, f.find( { a: /^a/, b: 1 } ).hint( { a: 1, b: 1 } ) );
+start( { a: "a", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
+start( { a: "b", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
+start( { a: "a", b: 1 }, f.find( { b: 1, a: /^a/ } ).hint( { a: 1, b: 1 } ) );
+end( { a: "b", b: 1 }, f.find( { a: /^a/, b: 1 } ).hint( { a: 1, b: 1 } ) );
+end( { a: "b", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
+end( { a: "a", b: 1 }, f.find( { a: /^a/, b: 1 } ).sort( { a: -1, b: -1 } ).hint( { a: 1, b: 1 } ) );
+end( { a: "b", b: 1 }, f.find( { b: 1, a: /^a/ } ).hint( { a: 1, b: 1 } ) );
+
+start( { a: "z", b: 1 }, f.find( { a: /^z/, b: 1 } ).hint( { a: 1, b: 1 } ) );
+end( { a: "{", b: 1 }, f.find( { a: /^z/, b: 1 } ).hint( { a: 1, b: 1 } ) );
+
+start( { a: "az", b: 1 }, f.find( { a: /^az/, b: 1 } ).hint( { a: 1, b: 1 } ) );
+end( { a: "a{", b: 1 }, f.find( { a: /^az/, b: 1 } ).hint( { a: 1, b: 1 } ) );
+
+both( { a: 1, b: 3 }, f.find( { a: 1, b: 3 } ).hint( { a: 1, b: 1 } ) );
+
+both( { a: 1, b: 2 }, f.find( { a: { $gte: 1, $lte: 1 }, b: 2 } ).hint( { a: 1, b: 1 } ) );
+both( { a: 1, b: 2 }, f.find( { a: { $gte: 1, $lte: 1 }, b: 2 } ).sort( { a: 1, b: 1 } ).hint( { a: 1, b: 1 } ) );
+
+f.drop();
+f.ensureIndex( { b: 1, a: 1 } );
+both( { a: 1, b: 3 }, f.find( { a: 1, b: 3 } ).hint( { b: 1, a: 1 } ) );
diff --git a/jstests/index8.js b/jstests/index8.js
new file mode 100644
index 0000000..09a0645
--- /dev/null
+++ b/jstests/index8.js
@@ -0,0 +1,59 @@
+// Test key uniqueness
+
+t = db.jstests_index8;
+t.drop();
+
+t.ensureIndex( { a: 1 } );
+t.ensureIndex( { b: 1 }, true );
+t.ensureIndex( { c: 1 }, [ false, "cIndex" ] );
+
+checkIndexes = function( num ) {
+// printjson( db.system.indexes.find( { ns: "test.jstests_index8" } ).toArray() );
+ indexes = db.system.indexes.find( { ns: "test.jstests_index8" } ).sort( { key: 1 } );
+ assert( !indexes[ 0 ].unique , "A" + num );
+ assert( indexes[ 1 ].unique , "B" + num );
+ assert( !indexes[ 2 ].unique , "C" + num );
+ assert.eq( "cIndex", indexes[ 2 ].name , "D" + num );
+}
+
+checkIndexes( 1 );
+
+t.reIndex();
+checkIndexes( 2 );
+
+t.save( { a: 2, b: 1 } );
+t.save( { a: 2 } );
+assert.eq( 2, t.find().count() );
+
+t.save( { b: 4 } );
+t.save( { b: 4 } );
+assert.eq( 3, t.find().count() );
+assert.eq( 3, t.find().hint( {c:1} ).toArray().length );
+assert.eq( 3, t.find().hint( {b:1} ).toArray().length );
+assert.eq( 3, t.find().hint( {a:1} ).toArray().length );
+
+t.drop();
+t.ensureIndex( { a: 1, b: -1 }, true );
+t.save( { a: 2, b: 3 } );
+t.save( { a: 2, b: 3 } );
+t.save( { a: 2, b: 4 } );
+t.save( { a: 1, b: 3 } );
+assert.eq( 3, t.find().count() );
+
+t.drop();
+t.ensureIndex( { a: 1 }, true );
+t.save( { a: [ 2, 3 ] } );
+t.save( { a: 2 } );
+assert.eq( 1, t.find().count() );
+
+t.drop();
+t.ensureIndex( { a: 1 }, true );
+t.save( { a: 2 } );
+t.save( { a: [ 1, 2, 3 ] } );
+t.save( { a: [ 3, 2, 1 ] } );
+assert.eq( 1, t.find().sort( { a: 1 } ).hint( { a: 1 } ).toArray().length );
+assert.eq( 1, t.find().sort( { a: -1 } ).hint( { a: 1 } ).toArray().length );
+
+assert.eq( t._indexSpec( { x : 1 } , true ) , t._indexSpec( { x : 1 } , [ true ] ) , "spec 1" );
+assert.eq( t._indexSpec( { x : 1 } , "eliot" ) , t._indexSpec( { x : 1 } , [ "eliot" ] ) , "spec 2" );
+
diff --git a/jstests/index9.js b/jstests/index9.js
new file mode 100644
index 0000000..c832783
--- /dev/null
+++ b/jstests/index9.js
@@ -0,0 +1,17 @@
+t = db.jstests_index9;
+
+t.drop();
+db.createCollection( "jstests_index9", {autoIndexId:false} );
+t.createIndex( { _id:1 } );
+assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ) );
+t.createIndex( { _id:1 } );
+assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ) );
+
+t.drop();
+t.createIndex( { _id:1 } );
+assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ) );
+
+t.drop();
+t.save( {a:1} );
+t.createIndex( { _id:1 } );
+assert.eq( 1, db.system.indexes.count( {ns: "test.jstests_index9"} ) );
diff --git a/jstests/index_check1.js b/jstests/index_check1.js
new file mode 100644
index 0000000..7113dff
--- /dev/null
+++ b/jstests/index_check1.js
@@ -0,0 +1,31 @@
+
+db.somecollection.drop();
+
+assert(db.system.namespaces.find({name:/somecollection/}).length() == 0, 1);
+
+db.somecollection.save({a:1});
+
+assert(db.system.namespaces.find({name:/somecollection/}).length() == 2, 2);
+
+db.somecollection.ensureIndex({a:1});
+
+var z = db.system.namespaces.find({name:/somecollection/}).length();
+assert( z >= 1 , 3 );
+
+if( z == 1 )
+ print("warning: z==1, should only happen with alternate storage engines");
+
+db.somecollection.drop();
+
+assert(db.system.namespaces.find({name:/somecollection/}).length() == 0, 4);
+
+db.somecollection.save({a:1});
+
+assert(db.system.namespaces.find({name:/somecollection/}).length() == 2, 5);
+
+db.somecollection.ensureIndex({a:1});
+
+var x = db.system.namespaces.find({name:/somecollection/}).length();
+assert( x == 2 || x == z, 6);
+
+assert(db.somecollection.validate().valid, 7);
diff --git a/jstests/index_check2.js b/jstests/index_check2.js
new file mode 100644
index 0000000..56796ac
--- /dev/null
+++ b/jstests/index_check2.js
@@ -0,0 +1,41 @@
+
+t = db.index_check2;
+t.drop();
+
+for ( var i=0; i<1000; i++ ){
+ var a = [];
+ for ( var j=1; j<5; j++ ){
+ a.push( "tag" + ( i * j % 50 ));
+ }
+ t.save( { num : i , tags : a } );
+}
+
+q1 = { tags : "tag6" };
+q2 = { tags : "tag12" };
+q3 = { tags : { $all : [ "tag6" , "tag12" ] } }
+
+assert.eq( 120 , t.find( q1 ).itcount() , "q1 a");
+assert.eq( 120 , t.find( q2 ).itcount() , "q2 a" );
+assert.eq( 60 , t.find( q3 ).itcount() , "q3 a");
+
+t.ensureIndex( { tags : 1 } );
+
+assert.eq( 120 , t.find( q1 ).itcount() , "q1 a");
+assert.eq( 120 , t.find( q2 ).itcount() , "q2 a" );
+assert.eq( 60 , t.find( q3 ).itcount() , "q3 a");
+
+assert.eq( "BtreeCursor tags_1" , t.find( q1 ).explain().cursor , "e1" );
+assert.eq( "BtreeCursor tags_1" , t.find( q2 ).explain().cursor , "e2" );
+assert.eq( "BtreeCursor tags_1" , t.find( q3 ).explain().cursor , "e3" );
+
+scanned1 = t.find(q1).explain().nscanned;
+scanned2 = t.find(q2).explain().nscanned;
+scanned3 = t.find(q3).explain().nscanned;
+
+//print( "scanned1: " + scanned1 + " scanned2: " + scanned2 + " scanned3: " + scanned3 );
+
+// $all should just iterate either of the words
+assert( scanned3 <= Math.max( scanned1 , scanned2 ) , "$all makes query optimizer not work well" );
+
+exp3 = t.find( q3 ).explain();
+assert.eq( exp3.startKey, exp3.endKey, "$all range not a single key" );
diff --git a/jstests/index_check3.js b/jstests/index_check3.js
new file mode 100644
index 0000000..55515af
--- /dev/null
+++ b/jstests/index_check3.js
@@ -0,0 +1,63 @@
+
+
+t = db.index_check3;
+t.drop();
+
+
+
+t.save( { a : 1 } );
+t.save( { a : 2 } );
+t.save( { a : 3 } );
+t.save( { a : "z" } );
+
+assert.eq( 1 , t.find( { a : { $lt : 2 } } ).itcount() , "A" );
+assert.eq( 1 , t.find( { a : { $gt : 2 } } ).itcount() , "B" );
+
+t.ensureIndex( { a : 1 } );
+
+assert.eq( 1 , t.find( { a : { $lt : 2 } } ).itcount() , "C" );
+assert.eq( 1 , t.find( { a : { $gt : 2 } } ).itcount() , "D" );
+
+t.drop();
+
+for ( var i=0; i<100; i++ ){
+ var o = { i : i };
+ if ( i % 2 == 0 )
+ o.foo = i;
+ t.save( o );
+}
+
+t.ensureIndex( { foo : 1 } );
+
+//printjson( t.find( { foo : { $lt : 50 } } ).explain() );
+assert.gt( 30 , t.find( { foo : { $lt : 50 } } ).explain().nscanned , "lt" );
+//printjson( t.find( { foo : { $gt : 50 } } ).explain() );
+assert.gt( 30 , t.find( { foo : { $gt : 50 } } ).explain().nscanned , "gt" );
+
+
+t.drop();
+t.save( {i:'a'} );
+for( var i=0; i < 10; ++i ) {
+ t.save( {} );
+}
+
+t.ensureIndex( { i : 1 } );
+
+//printjson( t.find( { i : { $lte : 'a' } } ).explain() );
+assert.gt( 3 , t.find( { i : { $lte : 'a' } } ).explain().nscanned , "lte" );
+//printjson( t.find( { i : { $gte : 'a' } } ).explain() );
+// bug SERVER-99
+assert.gt( 3 , t.find( { i : { $gte : 'a' } } ).explain().nscanned , "gte" );
+assert.eq( 1 , t.find( { i : { $gte : 'a' } } ).count() , "gte a" );
+assert.eq( 1 , t.find( { i : { $gte : 'a' } } ).itcount() , "gte b" );
+assert.eq( 1 , t.find( { i : { $gte : 'a' } } ).sort( { i : 1 } ).count() , "gte c" );
+assert.eq( 1 , t.find( { i : { $gte : 'a' } } ).sort( { i : 1 } ).itcount() , "gte d" );
+
+t.save( { i : "b" } );
+
+assert.gt( 3 , t.find( { i : { $gte : 'a' } } ).explain().nscanned , "gte" );
+assert.eq( 2 , t.find( { i : { $gte : 'a' } } ).count() , "gte a2" );
+assert.eq( 2 , t.find( { i : { $gte : 'a' } } ).itcount() , "gte b2" );
+assert.eq( 2 , t.find( { i : { $gte : 'a' , $lt : MaxKey } } ).itcount() , "gte c2" );
+assert.eq( 2 , t.find( { i : { $gte : 'a' , $lt : MaxKey } } ).sort( { i : -1 } ).itcount() , "gte d2" );
+assert.eq( 2 , t.find( { i : { $gte : 'a' , $lt : MaxKey } } ).sort( { i : 1 } ).itcount() , "gte e2" );
diff --git a/jstests/index_check5.js b/jstests/index_check5.js
new file mode 100644
index 0000000..90ac301
--- /dev/null
+++ b/jstests/index_check5.js
@@ -0,0 +1,17 @@
+
+t = db.index_check5
+t.drop();
+
+t.save( { "name" : "Player1" ,
+ "scores" : [{"level" : 1 , "score" : 100},
+ {"level" : 2 , "score" : 50}],
+ "total" : 150 } );
+t.save( { "name" : "Player2" ,
+ "total" : 90 ,
+ "scores" : [ {"level" : 1 , "score" : 90},
+ {"level" : 2 , "score" : 0} ]
+ } );
+
+assert.eq( 2 , t.find( { "scores.level": 2, "scores.score": {$gt:30} } ).itcount() , "A" );
+t.ensureIndex( { "scores.level" : 1 , "scores.score" : 1 } );
+assert.eq( 1 , t.find( { "scores.level": 2, "scores.score": {$gt:30} } ).itcount() , "B" );
diff --git a/jstests/index_check6.js b/jstests/index_check6.js
new file mode 100644
index 0000000..71e6420
--- /dev/null
+++ b/jstests/index_check6.js
@@ -0,0 +1,17 @@
+
+t = db.index_check6;
+t.drop();
+
+t.ensureIndex( { age : 1 , rating : 1 } );
+
+for ( var age=10; age<50; age++ ){
+ for ( var rating=0; rating<10; rating++ ){
+ t.save( { age : age , rating : rating } );
+ }
+}
+
+assert.eq( 10 , t.find( { age : 30 } ).explain().nscanned , "A" );
+assert.eq( 20 , t.find( { age : { $gte : 29 , $lte : 30 } } ).explain().nscanned , "B" );
+
+//assert.eq( 2 , t.find( { age : { $gte : 29 , $lte : 30 } , rating : 5 } ).explain().nscanned , "C" ); // SERVER-371
+//assert.eq( 4 , t.find( { age : { $gte : 29 , $lte : 30 } , rating : { $gte : 4 , $lte : 5 } } ).explain().nscanned , "D" ); // SERVER-371
diff --git a/jstests/index_check7.js b/jstests/index_check7.js
new file mode 100644
index 0000000..68102d6
--- /dev/null
+++ b/jstests/index_check7.js
@@ -0,0 +1,15 @@
+
+t = db.index_check7
+t.drop()
+
+for ( var i=0; i<100; i++ )
+ t.save( { x : i } )
+
+t.ensureIndex( { x : 1 } )
+assert.eq( 1 , t.find( { x : 27 } ).explain().nscanned , "A" )
+
+t.ensureIndex( { x : -1 } )
+assert.eq( 1 , t.find( { x : 27 } ).explain().nscanned , "B" )
+
+assert.eq( 41 , t.find( { x : { $gt : 59 } } ).explain().nscanned , "C" );
+
diff --git a/jstests/index_many.js b/jstests/index_many.js
new file mode 100644
index 0000000..9960afa
--- /dev/null
+++ b/jstests/index_many.js
@@ -0,0 +1,34 @@
+t = db.many;
+
+t.drop();
+db.many2.drop();
+
+t.save({x:9});
+t.save({x:19});
+
+x = 2;
+while( x < 60 ) {
+ patt={};
+ patt[x] = 1;
+ if( x == 20 )
+ patt = { x : 1 };
+ t.ensureIndex(patt);
+ x++;
+}
+
+// print( tojson(db.getLastErrorObj()) );
+assert( db.getLastError(), "should have an error 'too many indexes'" );
+
+// 40 is the limit currently
+
+// print( t.getIndexes().length == 40, "40" );
+
+assert( t.getIndexes().length == 40, "40" );
+
+assert( t.find({x:9}).length() == 1, "b" ) ;
+
+t.renameCollection( "many2" );
+
+assert( t.find({x:9}).length() == 0, "c" ) ;
+
+assert( db.many2.find({x:9}).length() == 1, "d" ) ;
diff --git a/jstests/indexa.js b/jstests/indexa.js
new file mode 100644
index 0000000..7602183
--- /dev/null
+++ b/jstests/indexa.js
@@ -0,0 +1,22 @@
+// unique index constraint test for updates
+// case where object doesn't grow tested here
+
+t = db.indexa;
+t.drop();
+
+t.ensureIndex( { x:1 }, true );
+
+t.insert( { 'x':'A' } );
+t.insert( { 'x':'B' } );
+t.insert( { 'x':'A' } );
+
+assert.eq( 2 , t.count() , "indexa 1" );
+
+t.update( {x:'B'}, { x:'A' } );
+
+a = t.find().toArray();
+u = Array.unique( a.map( function(z){ return z.x } ) );
+assert.eq( 2 , t.count() , "indexa 2" );
+
+assert( a.length == u.length , "unique index update is broken" );
+
diff --git a/jstests/indexapi.js b/jstests/indexapi.js
new file mode 100644
index 0000000..ae76ec7
--- /dev/null
+++ b/jstests/indexapi.js
@@ -0,0 +1,40 @@
+
+t = db.indexapi;
+t.drop();
+
+key = { x : 1 };
+
+c = { ns : t._fullName , key : key , name : t._genIndexName( key ) };
+assert.eq( c , t._indexSpec( { x : 1 } ) , "A" );
+
+c.name = "bob";
+assert.eq( c , t._indexSpec( { x : 1 } , "bob" ) , "B" );
+
+c.name = t._genIndexName( key );
+assert.eq( c , t._indexSpec( { x : 1 } ) , "C" );
+
+c.unique = true;
+assert.eq( c , t._indexSpec( { x : 1 } , true ) , "D" );
+assert.eq( c , t._indexSpec( { x : 1 } , [ true ] ) , "E" );
+assert.eq( c , t._indexSpec( { x : 1 } , { unique : true } ) , "F" );
+
+c.dropDups = true;
+assert.eq( c , t._indexSpec( { x : 1 } , [ true , true ] ) , "G" );
+assert.eq( c , t._indexSpec( { x : 1 } , { unique : true , dropDups : true } ) , "F" );
+
+t.ensureIndex( { x : 1 } , { unique : true } );
+idx = t.getIndexes();
+assert.eq( 2 , idx.length , "M1" );
+assert.eq( key , idx[1].key , "M2" );
+assert( idx[1].unique , "M3" );
+
+t.drop();
+t.ensureIndex( { x : 1 } , { unique : 1 } );
+idx = t.getIndexes();
+assert.eq( 2 , idx.length , "M1" );
+assert.eq( key , idx[1].key , "M2" );
+assert( idx[1].unique , "M3" );
+printjson( idx );
+
+db.system.indexes.insert( { ns : "test" , key : { x : 1 } , name : "x" } );
+assert( db.getLastError().indexOf( "invalid" ) >= 0 , "Z1" );
diff --git a/jstests/indexb.js b/jstests/indexb.js
new file mode 100644
index 0000000..5507fee
--- /dev/null
+++ b/jstests/indexb.js
@@ -0,0 +1,30 @@
+// unique index test for a case where the object grows
+// and must move
+
+// see indexa.js for the test case for an update with dup id check
+// when it doesn't move
+
+
+t = db.indexb;t = db.indexb;
+db.dropDatabase();
+t.drop();
+t.ensureIndex({a:1},true);
+
+t.insert({a:1});
+
+x = { a : 2 };
+t.save(x);
+
+{
+
+ assert( t.count() == 2, "count wrong B");
+
+ x.a = 1;
+ x.filler = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
+ t.save(x); // should fail, not unique.
+
+ assert( t.count() == 2,"count wrong" );
+ assert( t.find({a:1}).count() == 1,"bfail1" );
+ assert( t.find({a:2}).count() == 1,"bfail2" );
+
+}
diff --git a/jstests/indexc.js b/jstests/indexc.js
new file mode 100644
index 0000000..b099e2d
--- /dev/null
+++ b/jstests/indexc.js
@@ -0,0 +1,20 @@
+
+t = db.indexc;
+t.drop();
+
+for ( var i=1; i<100; i++ ){
+ var d = new Date( ( new Date() ).getTime() + i );
+ t.save( { a : i , ts : d , cats : [ i , i + 1 , i + 2 ] } );
+ if ( i == 51 )
+ mid = d;
+}
+
+assert.eq( 50 , t.find( { ts : { $lt : mid } } ).itcount() , "A" );
+assert.eq( 50 , t.find( { ts : { $lt : mid } } ).sort( { ts : 1 } ).itcount() , "B" );
+
+t.ensureIndex( { ts : 1 , cats : 1 } );
+t.ensureIndex( { cats : 1 } );
+
+// multi-key bug was firing here (related to getsetdup()):
+assert.eq( 50 , t.find( { ts : { $lt : mid } } ).itcount() , "C" );
+assert.eq( 50 , t.find( { ts : { $lt : mid } } ).sort( { ts : 1 } ).itcount() , "D" );
diff --git a/jstests/indexd.js b/jstests/indexd.js
new file mode 100644
index 0000000..33246ad
--- /dev/null
+++ b/jstests/indexd.js
@@ -0,0 +1,10 @@
+
+t = db.indexd;
+t.drop();
+
+t.save( { a : 1 } );
+t.ensureIndex( { a : 1 } );
+assert.throws( function(){ db.indexd.$_id_.drop(); } );
+assert( t.drop() );
+
+//db.indexd.$_id_.remove({});
diff --git a/jstests/indexe.js b/jstests/indexe.js
new file mode 100644
index 0000000..3170757
--- /dev/null
+++ b/jstests/indexe.js
@@ -0,0 +1,21 @@
+
+t = db.indexe;
+t.drop();
+
+num = 100000;
+
+for ( i=0; i<num; i++){
+ t.insert( { a : "b" } );
+}
+
+assert.eq( num , t.find().count() ,"A1" );
+assert.eq( num , t.find( { a : "b" } ).count() , "B1" );
+assert.eq( num , t.find( { a : "b" } ).itcount() , "C1" );
+
+t.ensureIndex( { a : "b" } );
+
+assert.eq( num , t.find().count() ,"A2" );
+assert.eq( num , t.find().sort( { a : 1 } ).count() , "A2a" );
+assert.eq( num , t.find().sort( { a : "b" } ).itcount() , "A2b" );
+assert.eq( num , t.find( { a : "b" } ).count() , "B2" );
+assert.eq( num , t.find( { a : "b" } ).itcount() , "C3" );
diff --git a/jstests/indexf.js b/jstests/indexf.js
new file mode 100644
index 0000000..d65e7b1
--- /dev/null
+++ b/jstests/indexf.js
@@ -0,0 +1,13 @@
+
+t = db.indexf
+t.drop();
+
+t.ensureIndex( { x : 1 } );
+
+t.save( { x : 2 } );
+t.save( { y : 3 } );
+t.save( { x : 4 } );
+
+assert.eq( 2 , t.findOne( { x : 2 } ).x , "A1" );
+assert.eq( 3 , t.findOne( { x : null } ).y , "A2" );
+assert.eq( 4 , t.findOne( { x : 4 } ).x , "A3" );
diff --git a/jstests/jni1.js b/jstests/jni1.js
new file mode 100644
index 0000000..9e33287
--- /dev/null
+++ b/jstests/jni1.js
@@ -0,0 +1,12 @@
+
+
+t = db.jni1;
+t.remove( {} );
+
+t.save( { z : 1 } );
+t.save( { z : 2 } );
+assert( 2 == t.find().length() );
+assert( 2 == t.find( { $where : function(){ return 1; } } ).length() );
+assert( 1 == t.find( { $where : function(){ return obj.z == 2; } } ).length() );
+
+assert(t.validate().valid);
diff --git a/jstests/jni2.js b/jstests/jni2.js
new file mode 100644
index 0000000..221780d
--- /dev/null
+++ b/jstests/jni2.js
@@ -0,0 +1,22 @@
+
+t = db.jni2;
+t.remove( {} );
+
+db.jni2t.remove( {} );
+
+assert.eq( 0 , db.jni2t.find().length() , "A" );
+
+t.save( { z : 1 } );
+t.save( { z : 2 } );
+assert.throws( function(){
+ t.find( { $where :
+ function(){
+ db.jni2t.save( { y : 1 } );
+ return 1;
+ }
+ } ).length();
+} , "can't save from $where" );
+
+assert.eq( 0 , db.jni2t.find().length() , "B" )
+
+assert(t.validate().valid , "E");
diff --git a/jstests/jni3.js b/jstests/jni3.js
new file mode 100644
index 0000000..e0f0d10
--- /dev/null
+++ b/jstests/jni3.js
@@ -0,0 +1,74 @@
+
+t = db.jni3;
+
+debug = function( s ){
+ //printjson( s );
+}
+
+for( z = 0; z < 2; z++ ) {
+ debug(z);
+
+ t.drop();
+
+ if( z > 0 ) {
+ t.ensureIndex({_id:1});
+ t.ensureIndex({i:1});
+ }
+
+ for( i = 0; i < 1000; i++ )
+ t.save( { i:i, z: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } );
+
+ assert( 33 == db.dbEval(function() { return 33; } ) );
+
+ db.dbEval( function() { db.jni3.save({i:-1, z:"server side"}) } );
+
+ assert( db.jni3.findOne({i:-1}) );
+
+ assert( 2 == t.find( { $where :
+ function(){
+ return obj.i == 7 || obj.i == 8;
+ }
+ } ).length() );
+
+
+ // NPE test
+ var ok = false;
+ try {
+ var x = t.find( { $where :
+ function(){
+ asdf.asdf.f.s.s();
+ }
+ } );
+ debug( x.length() );
+ debug( tojson( x ) );
+ }
+ catch(e) {
+ ok = true;
+ }
+ debug( ok );
+ assert(ok);
+
+ t.ensureIndex({z:1});
+ t.ensureIndex({q:1});
+
+ debug( "before indexed find" );
+
+ arr = t.find( { $where :
+ function(){
+ return obj.i == 7 || obj.i == 8;
+ }
+ } ).toArray();
+ debug( arr );
+ assert.eq( 2, arr.length );
+
+ debug( "after indexed find" );
+
+ for( i = 1000; i < 2000; i++ )
+ t.save( { i:i, z: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } );
+
+ assert( t.find().count() == 2001 );
+
+ assert( t.validate().valid );
+
+ debug( "done iter" );
+}
diff --git a/jstests/jni4.js b/jstests/jni4.js
new file mode 100644
index 0000000..b9f429e
--- /dev/null
+++ b/jstests/jni4.js
@@ -0,0 +1,49 @@
+t = db.jni4;
+t.drop();
+
+real = { a : 1 ,
+ b : "abc" ,
+ c : /abc/i ,
+ d : new Date(111911100111) ,
+ e : null ,
+ f : true
+ };
+
+t.save( real );
+
+assert.eq( "/abc/i" , real.c.toString() , "regex 1" );
+
+var cursor = t.find( { $where :
+ function(){
+ fullObject;
+ assert.eq( 7 , Object.keySet( obj ).length , "A" )
+ assert.eq( 1 , obj.a , "B" );
+ assert.eq( "abc" , obj.b , "C" );
+ assert.eq( "/abc/i" , obj.c.toString() , "D" );
+ assert.eq( 111911100111 , obj.d.getTime() , "E" );
+ assert( obj.f , "F" );
+ assert( ! obj.e , "G" );
+
+ return true;
+ }
+ } );
+assert.eq( 1 , cursor.toArray().length );
+assert.eq( "abc" , cursor[0].b );
+
+// ---
+
+t.drop();
+t.save( { a : 2 , b : { c : 7 , d : "d is good" } } );
+var cursor = t.find( { $where :
+ function(){
+ fullObject;
+ assert.eq( 3 , Object.keySet( obj ).length )
+ assert.eq( 2 , obj.a );
+ assert.eq( 7 , obj.b.c );
+ assert.eq( "d is good" , obj.b.d );
+ return true;
+ }
+ } );
+assert.eq( 1 , cursor.toArray().length );
+
+assert(t.validate().valid);
diff --git a/jstests/jni5.js b/jstests/jni5.js
new file mode 100644
index 0000000..c6e6b54
--- /dev/null
+++ b/jstests/jni5.js
@@ -0,0 +1,10 @@
+
+t = db.jni5
+t.drop();
+
+t.save( { a : 1 } )
+t.save( { a : 2 } )
+
+assert.eq( 2 , t.find( { "$where" : "this.a" } ).count() , "A" );
+assert.eq( 0 , t.find( { "$where" : "this.b" } ).count() , "B" );
+assert.eq( 0 , t.find( { "$where" : "this.b > 45" } ).count() , "C" );
diff --git a/jstests/jni7.js b/jstests/jni7.js
new file mode 100644
index 0000000..2685dce
--- /dev/null
+++ b/jstests/jni7.js
@@ -0,0 +1,7 @@
+t = db.jni7;
+t.drop();
+
+assert.eq( 17 , db.eval( function(){ return args[0]; } , 17 ) );
+
+assert.eq( 17 , db.eval( function( foo ){ return foo; } , 17 ) );
+
diff --git a/jstests/jni8.js b/jstests/jni8.js
new file mode 100644
index 0000000..afc7d83
--- /dev/null
+++ b/jstests/jni8.js
@@ -0,0 +1,14 @@
+t = db.jni8;
+t.drop();
+
+t.save( { a : 1 , b : [ 2 , 3 , 4 ] } );
+
+assert.eq( 1 , t.find().length() , "A" );
+assert.eq( 1 , t.find( function(){ return this.a == 1; } ).length() , "B" );
+assert.eq( 1 , t.find( function(){ if ( ! this.b.length ) return true; return this.b.length == 3; } ).length() , "B2" );
+assert.eq( 1 , t.find( function(){ return this.b[0] == 2; } ).length() , "C" );
+assert.eq( 0 , t.find( function(){ return this.b[0] == 3; } ).length() , "D" );
+assert.eq( 1 , t.find( function(){ return this.b[1] == 3; } ).length() , "E" );
+
+
+assert(t.validate().valid);
diff --git a/jstests/jni9.js b/jstests/jni9.js
new file mode 100644
index 0000000..940e36a
--- /dev/null
+++ b/jstests/jni9.js
@@ -0,0 +1,24 @@
+c = db.jni9;
+c.drop();
+
+c.save( { a : 1 } );
+c.save( { a : 2 } );
+
+
+assert.eq( 2 , c.find().length() );
+assert.eq( 2 , c.find().count() );
+
+
+assert.eq( 2 ,
+ db.eval(
+ function(){
+ num = 0;
+ db.jni9.find().forEach(
+ function(z){
+ num++;
+ }
+ );
+ return num;
+ }
+ )
+ )
diff --git a/jstests/json1.js b/jstests/json1.js
new file mode 100644
index 0000000..a3dc820
--- /dev/null
+++ b/jstests/json1.js
@@ -0,0 +1,20 @@
+
+x = { quotes:"a\"b" , nulls:null };
+eval( "y = " + tojson( x ) );
+assert.eq( tojson( x ) , tojson( y ) , "A" );
+assert.eq( typeof( x.nulls ) , typeof( y.nulls ) , "B" );
+
+// each type is parsed properly
+x = {"x" : null, "y" : true, "z" : 123, "w" : "foo"};
+assert.eq(tojson(x,"",false), '{\n\t"x" : null,\n\t"y" : true,\n\t"z" : 123,\n\t"w" : "foo"\n}' , "C" );
+
+x = {"x" : [], "y" : {}};
+assert.eq(tojson(x,"",false), '{\n\t"x" : [ ],\n\t"y" : {\n\t\t\n\t}\n}' , "D" );
+
+// nested
+x = {"x" : [{"x" : [1,2,[]], "z" : "ok", "y" : [[]]}, {"foo" : "bar"}], "y" : null};
+assert.eq(tojson(x), '{\n\t"x" : [\n\t\t{\n\t\t\t"x" : [\n\t\t\t\t1,\n\t\t\t\t2,\n\t\t\t\t[ ]\n\t\t\t],\n\t\t\t"z" : "ok",\n\t\t\t"y" : [\n\t\t\t\t[ ]\n\t\t\t]\n\t\t},\n\t\t{\n\t\t\t"foo" : "bar"\n\t\t}\n\t],\n\t"y" : null\n}' , "E" );
+
+// special types
+x = {"x" : ObjectId("4ad35a73d2e34eb4fc43579a"), 'z' : /xd?/ig};
+assert.eq(tojson(x,"",false), '{\n\t"x" : ObjectId("4ad35a73d2e34eb4fc43579a"),\n\t"z" : /xd?/gi\n}' , "F" );
diff --git a/jstests/map1.js b/jstests/map1.js
new file mode 100644
index 0000000..1db53cd
--- /dev/null
+++ b/jstests/map1.js
@@ -0,0 +1,24 @@
+
+function basic1( key , lookup , shouldFail){
+ var m = new Map();
+ m.put( key , 17 );
+
+ var out = m.get( lookup || key );
+
+ if ( ! shouldFail ){
+ assert.eq( 17 , out , "basic1 missing: " + tojson( key ) );
+ }
+ else {
+ assert.isnull( out , "basic1 not missing: " + tojson( key ) );
+ }
+
+}
+
+basic1( 6 )
+basic1( new Date() )
+basic1( "eliot" )
+basic1( { a : 1 } );
+basic1( { a : 1 , b : 1 } )
+basic1( { a : 1 } , { b : 1 } , true )
+basic1( { a : 1 , b : 1 } , { b : 1 , a : 1 } , true )
+basic1( { a : 1 } , { a : 2 } , true );
diff --git a/jstests/median.js b/jstests/median.js
new file mode 100644
index 0000000..b6ef7c4
--- /dev/null
+++ b/jstests/median.js
@@ -0,0 +1,74 @@
+f = db.jstests_median;
+f.drop();
+
+f.ensureIndex( {i:1} );
+assert.eq( false, db.runCommand( {medianKey:"test.jstests_median", keyPattern:{i:1}, min:{i:0}, max:{i:0} } ).ok );
+
+f.save( {i:0} );
+assert.eq( 0, db.runCommand( {medianKey:"test.jstests_median", keyPattern:{i:1}, min:{i:0}, max:{i:1} } ).median.i );
+assert.eq( 0, db.runCommand( {medianKey:"test.jstests_median", keyPattern:{i:1}, min:{i:0}, max:{i:10} } ).median.i );
+
+f.save( {i:1} );
+assert.eq( 0, db.runCommand( {medianKey:"test.jstests_median", keyPattern:{i:1}, min:{i:0}, max:{i:1} } ).median.i );
+assert.eq( 1, db.runCommand( {medianKey:"test.jstests_median", keyPattern:{i:1}, min:{i:0}, max:{i:10} } ).median.i );
+
+for( i = 2; i < 1000; ++i ) {
+ f.save( {i:i} );
+}
+
+assert.eq( 500, db.runCommand( {medianKey:"test.jstests_median", keyPattern:{i:1}, min:{i:0}, max:{i:1000} } ).median.i );
+assert.eq( 0, db.runCommand( {medianKey:"test.jstests_median", keyPattern:{i:1}, min:{i:0}, max:{i:1} } ).median.i );
+assert.eq( 500, db.runCommand( {medianKey:"test.jstests_median", keyPattern:{i:1}, min:{i:500}, max:{i:501} } ).median.i );
+assert.eq( 0, db.runCommand( {medianKey:"test.jstests_median", keyPattern:{i:1}, min:{i:0}, max:{i:1} } ).median.i );
+assert.eq( 1, db.runCommand( {medianKey:"test.jstests_median", keyPattern:{i:1}, min:{i:0}, max:{i:2} } ).median.i );
+
+f.drop();
+f.ensureIndex( {i:1,j:-1} );
+for( i = 0; i < 100; ++i ) {
+ for( j = 0; j < 100; ++j ) {
+ f.save( {i:i,j:j} );
+ }
+}
+
+assert.eq( 50, db.runCommand( {medianKey:"test.jstests_median", keyPattern:{i:1,j:-1}, min:{i:0,j:0}, max:{i:99,j:0} } ).median.i );
+assert.eq( 0, db.runCommand( {medianKey:"test.jstests_median", keyPattern:{i:1,j:-1}, min:{i:0,j:1}, max:{i:0,j:0} } ).median.i );
+assert.eq( 50, db.runCommand( {medianKey:"test.jstests_median", keyPattern:{i:1,j:-1}, min:{i:50,j:1}, max:{i:50,j:0} } ).median.i );
+assert.eq( 1, db.runCommand( {medianKey:"test.jstests_median", keyPattern:{i:1,j:-1}, min:{i:0,j:0}, max:{i:1,j:0} } ).median.i );
+assert.eq( 1, db.runCommand( {medianKey:"test.jstests_median", keyPattern:{i:1,j:-1}, min:{i:0,j:0}, max:{i:2,j:0} } ).median.i );
+
+assert.eq( 50, db.runCommand( {medianKey:"test.jstests_median", keyPattern:{i:1,j:-1}, min:{i:0,j:99}, max:{i:0,j:0} } ).median.j );
+assert.eq( 45, db.runCommand( {medianKey:"test.jstests_median", keyPattern:{i:1,j:-1}, min:{i:0,j:49}, max:{i:0,j:40} } ).median.j );
+
+assert.eq( 10, db.runCommand( {medianKey:"test.jstests_median", keyPattern:{i:1,j:-1}, min:{i:10,j:50}, max:{i:11,j:75} } ).median.i );
+assert.eq( 13, db.runCommand( {medianKey:"test.jstests_median", keyPattern:{i:1,j:-1}, min:{i:10,j:50}, max:{i:11,j:75} } ).median.j );
+
+f.drop();
+f.ensureIndex( {i:1,j:1} );
+f.save( {i:0,j:0} );
+assert.eq( false, db.runCommand( {medianKey:"test.jstests_median", min:{i:0}, max:{i:0} } ).ok );
+assert.eq( false, db.runCommand( {medianKey:"test.jstests_median", min:{i:0}, max:{i:1} } ).ok );
+assert.eq( false, db.runCommand( {medianKey:"test.jstests_median", min:{i:0,j:1}, max:{i:1} } ).ok );
+assert.eq( false, db.runCommand( {medianKey:"test.jstests_median", min:{i:0,j:0}, max:{i:0,j:0} } ).ok );
+assert.eq( true, db.runCommand( {medianKey:"test.jstests_median", min:{i:0,j:0}, max:{i:1,j:1} } ).ok );
+assert.eq( false, db.runCommand( {medianKey:"test.jstests_median", min:{i:1,j:1}, max:{i:0,j:0} } ).ok );
+assert.eq( true, db.runCommand( {medianKey:"test.jstests_median", min:{i:0,j:0}, max:{i:0,j:1} } ).ok );
+assert.eq( false, db.runCommand( {medianKey:"test.jstests_median", min:{i:0,j:1}, max:{i:0,j:0} } ).ok );
+
+f.drop();
+f.ensureIndex( {i:1,j:-1} );
+f.save( {i:0,j:0} );
+assert.eq( false, db.runCommand( {medianKey:"test.jstests_median", min:{i:0,j:0}, max:{i:0,j:0} } ).ok );
+assert.eq( true, db.runCommand( {medianKey:"test.jstests_median", min:{i:0,j:0}, max:{i:1,j:1} } ).ok );
+assert.eq( false, db.runCommand( {medianKey:"test.jstests_median", min:{i:1,j:1}, max:{i:0,j:0} } ).ok );
+assert.eq( false, db.runCommand( {medianKey:"test.jstests_median", min:{i:0,j:0}, max:{i:0,j:1} } ).ok );
+assert.eq( true, db.runCommand( {medianKey:"test.jstests_median", min:{i:0,j:1}, max:{i:0,j:-1} } ).ok );
+
+f.drop();
+f.ensureIndex( {i:1,j:1} );
+f.ensureIndex( {i:1,j:-1} );
+f.save( {i:0,j:0} );
+assert.eq( false, db.runCommand( {medianKey:"test.jstests_median", min:{i:0,j:0}, max:{i:0,j:0} } ).ok );
+assert.eq( true, db.runCommand( {medianKey:"test.jstests_median", min:{i:0,j:0}, max:{i:1,j:1} } ).ok );
+assert.eq( false, db.runCommand( {medianKey:"test.jstests_median", min:{i:1,j:1}, max:{i:-1,j:-1} } ).ok );
+assert.eq( true, db.runCommand( {medianKey:"test.jstests_median", min:{i:0,j:0}, max:{i:0,j:1} } ).ok );
+assert.eq( true, db.runCommand( {medianKey:"test.jstests_median", min:{i:0,j:1}, max:{i:0,j:-1} } ).ok );
diff --git a/jstests/minmax.js b/jstests/minmax.js
new file mode 100644
index 0000000..3723e33
--- /dev/null
+++ b/jstests/minmax.js
@@ -0,0 +1,40 @@
+// test min / max query parameters
+
+addData = function() {
+ t.save( { a: 1, b: 1 } );
+ t.save( { a: 1, b: 2 } );
+ t.save( { a: 2, b: 1 } );
+ t.save( { a: 2, b: 2 } );
+}
+
+t = db.jstests_minmax;
+t.drop();
+t.ensureIndex( { a: 1, b: 1 } );
+addData();
+
+assert.eq( 1, t.find().min( { a: 1, b: 2 } ).max( { a: 2, b: 1 } ).toArray().length );
+assert.eq( 2, t.find().min( { a: 1, b: 2 } ).max( { a: 2, b: 1.5 } ).toArray().length );
+assert.eq( 2, t.find().min( { a: 1, b: 2 } ).max( { a: 2, b: 2 } ).toArray().length );
+
+// just one bound
+assert.eq( 3, t.find().min( { a: 1, b: 2 } ).toArray().length );
+assert.eq( 3, t.find().max( { a: 2, b: 1.5 } ).toArray().length );
+assert.eq( 3, t.find().min( { a: 1, b: 2 } ).hint( { a: 1, b: 1 } ).toArray().length );
+assert.eq( 3, t.find().max( { a: 2, b: 1.5 } ).hint( { a: 1, b: 1 } ).toArray().length );
+
+t.drop();
+t.ensureIndex( { a: 1, b: -1 } );
+addData();
+assert.eq( 4, t.find().min( { a: 1, b: 2 } ).toArray().length );
+assert.eq( 4, t.find().max( { a: 2, b: 0.5 } ).toArray().length );
+assert.eq( 1, t.find().min( { a: 2, b: 1 } ).toArray().length );
+assert.eq( 1, t.find().max( { a: 1, b: 1.5 } ).toArray().length );
+assert.eq( 4, t.find().min( { a: 1, b: 2 } ).hint( { a: 1, b: -1 } ).toArray().length );
+assert.eq( 4, t.find().max( { a: 2, b: 0.5 } ).hint( { a: 1, b: -1 } ).toArray().length );
+assert.eq( 1, t.find().min( { a: 2, b: 1 } ).hint( { a: 1, b: -1 } ).toArray().length );
+assert.eq( 1, t.find().max( { a: 1, b: 1.5 } ).hint( { a: 1, b: -1 } ).toArray().length );
+
+// hint doesn't match
+assert.throws( function() { t.find().min( { a: 1 } ).hint( { a: 1, b: -1 } ).toArray() } );
+assert.throws( function() { t.find().min( { a: 1, b: 1 } ).max( { a: 1 } ).hint( { a: 1, b: -1 } ).toArray() } );
+assert.throws( function() { t.find().min( { b: 1 } ).max( { a: 1, b: 2 } ).hint( { a: 1, b: -1 } ).toArray() } ); \ No newline at end of file
diff --git a/jstests/mod1.js b/jstests/mod1.js
new file mode 100644
index 0000000..eca35b7
--- /dev/null
+++ b/jstests/mod1.js
@@ -0,0 +1,24 @@
+
+t = db.mod1;
+t.drop();
+
+t.save( { a : 1 } );
+t.save( { a : 2 } );
+t.save( { a : 11 } );
+t.save( { a : 20 } );
+t.save( { a : "asd" } );
+t.save( { a : "adasdas" } );
+
+assert.eq( 2 , t.find( "this.a % 10 == 1" ).itcount() , "A1" );
+assert.eq( 2 , t.find( { a : { $mod : [ 10 , 1 ] } } ).itcount() , "A2" );
+assert.eq( 6 , t.find( { a : { $mod : [ 10 , 1 ] } } ).explain().nscanned , "A3" );
+
+t.ensureIndex( { a : 1 } );
+
+assert.eq( 2 , t.find( "this.a % 10 == 1" ).itcount() , "B1" );
+assert.eq( 2 , t.find( { a : { $mod : [ 10 , 1 ] } } ).itcount() , "B2" );
+
+assert.eq( 1 , t.find( "this.a % 10 == 0" ).itcount() , "B3" );
+assert.eq( 1 , t.find( { a : { $mod : [ 10 , 0 ] } } ).itcount() , "B4" );
+assert.eq( 4 , t.find( { a : { $mod : [ 10 , 1 ] } } ).explain().nscanned , "B5" );
+
diff --git a/jstests/mr1.js b/jstests/mr1.js
new file mode 100644
index 0000000..aacd69b
--- /dev/null
+++ b/jstests/mr1.js
@@ -0,0 +1,176 @@
+
+t = db.mr1;
+t.drop();
+
+t.save( { x : 1 , tags : [ "a" , "b" ] } );
+t.save( { x : 2 , tags : [ "b" , "c" ] } );
+t.save( { x : 3 , tags : [ "c" , "a" ] } );
+t.save( { x : 4 , tags : [ "b" , "c" ] } );
+
+emit = printjson;
+
+function d( x ){
+ printjson( x );
+}
+
+ks = "_id";
+if ( db.version() == "1.1.1" )
+ ks = "key";
+
+
+m = function(){
+ this.tags.forEach(
+ function(z){
+ emit( z , { count : 1 } );
+ }
+ );
+};
+
+m2 = function(){
+ for ( var i=0; i<this.tags.length; i++ ){
+ emit( this.tags[i] , 1 );
+ }
+};
+
+
+r = function( key , values ){
+ var total = 0;
+ for ( var i=0; i<values.length; i++ ){
+ total += values[i].count;
+ }
+ return { count : total };
+};
+
+r2 = function( key , values ){
+ var total = 0;
+ for ( var i=0; i<values.length; i++ ){
+ total += values[i];
+ }
+ return total;
+};
+
+res = db.runCommand( { mapreduce : "mr1" , map : m , reduce : r } );
+d( res );
+if ( ks == "_id" ) assert( res.ok , "not ok" );
+assert.eq( 4 , res.counts.input , "A" );
+x = db[res.result];
+
+assert.eq( 3 , x.find().count() , "B" );
+x.find().forEach( d );
+z = {};
+x.find().forEach( function(a){ z[a[ks]] = a.value.count; } );
+d( z );
+assert.eq( 3 , Object.keySet( z ).length , "C" );
+assert.eq( 2 , z.a , "D" );
+assert.eq( 3 , z.b , "E" );
+assert.eq( 3 , z.c , "F" );
+x.drop();
+
+res = db.runCommand( { mapreduce : "mr1" , map : m , reduce : r , query : { x : { "$gt" : 2 } } } );
+d( res );
+assert.eq( 2 , res.counts.input , "B" );
+x = db[res.result];
+z = {};
+x.find().forEach( function(a){ z[a[ks]] = a.value.count; } );
+assert.eq( 1 , z.a , "C1" );
+assert.eq( 1 , z.b , "C2" );
+assert.eq( 2 , z.c , "C3" );
+x.drop();
+
+res = db.runCommand( { mapreduce : "mr1" , map : m2 , reduce : r2 , query : { x : { "$gt" : 2 } } } );
+d( res );
+assert.eq( 2 , res.counts.input , "B" );
+x = db[res.result];
+z = {};
+x.find().forEach( function(a){ z[a[ks]] = a.value; } );
+assert.eq( 1 , z.a , "C1z" );
+assert.eq( 1 , z.b , "C2z" );
+assert.eq( 2 , z.c , "C3z" );
+x.drop();
+
+res = db.runCommand( { mapreduce : "mr1" , out : "mr1_foo" , map : m , reduce : r , query : { x : { "$gt" : 2 } } } );
+d( res );
+assert.eq( 2 , res.counts.input , "B2" );
+assert.eq( "mr1_foo" , res.result , "B2-c" );
+x = db[res.result];
+z = {};
+x.find().forEach( function(a){ z[a[ks]] = a.value.count; } );
+assert.eq( 1 , z.a , "C1a" );
+assert.eq( 1 , z.b , "C2a" );
+assert.eq( 2 , z.c , "C3a" );
+x.drop();
+
+for ( i=5; i<1000; i++ ){
+ t.save( { x : i , tags : [ "b" , "d" ] } );
+}
+
+res = db.runCommand( { mapreduce : "mr1" , map : m , reduce : r } );
+d( res );
+assert.eq( 999 , res.counts.input , "Z1" );
+x = db[res.result];
+x.find().forEach( d )
+assert.eq( 4 , x.find().count() , "Z2" );
+assert.eq( "a,b,c,d" , x.distinct( ks ) , "Z3" );
+
+function getk( k ){
+ var o = {};
+ o[ks] = k;
+ return x.findOne( o );
+}
+
+assert.eq( 2 , getk( "a" ).value.count , "ZA" );
+assert.eq( 998 , getk( "b" ).value.count , "ZB" );
+assert.eq( 3 , getk( "c" ).value.count , "ZC" );
+assert.eq( 995 , getk( "d" ).value.count , "ZD" );
+x.drop();
+
+if ( true ){
+ printjson( db.runCommand( { mapreduce : "mr1" , map : m , reduce : r , verbose : true } ) );
+}
+
+print( "t1: " + Date.timeFunc(
+ function(){
+ var out = db.runCommand( { mapreduce : "mr1" , map : m , reduce : r } );
+ if ( ks == "_id" ) assert( out.ok , "XXX : " + tojson( out ) );
+ db[out.result].drop();
+ } , 10 ) + " (~500 on 2.8ghz) - itcount: " + Date.timeFunc( function(){ db.mr1.find().itcount(); } , 10 ) );
+
+
+
+// test doesn't exist
+res = db.runCommand( { mapreduce : "lasjdlasjdlasjdjasldjalsdj12e" , map : m , reduce : r } );
+assert( ! res.ok , "should be not ok" );
+
+if ( true ){
+ correct = {};
+
+ for ( i=0; i<20000; i++ ){
+ k = "Z" + i % 10000;
+ if ( correct[k] )
+ correct[k]++;
+ else
+ correct[k] = 1;
+ t.save( { x : i , tags : [ k ] } );
+ }
+
+ res = db.runCommand( { mapreduce : "mr1" , out : "mr1_foo" , map : m , reduce : r } );
+ d( res );
+ print( "t2: " + res.timeMillis + " (~3500 on 2.8ghz) - itcount: " + Date.timeFunc( function(){ db.mr1.find().itcount(); } ) );
+ x = db[res.result];
+ z = {};
+ x.find().forEach( function(a){ z[a[ks]] = a.value.count; } );
+ for ( zz in z ){
+ if ( zz.indexOf( "Z" ) == 0 ){
+ assert.eq( correct[zz] , z[zz] , "ZZ : " + zz );
+ }
+ }
+ x.drop();
+
+ res = db.runCommand( { mapreduce : "mr1" , out : "mr1_foo" , map : m2 , reduce : r2 } );
+ d(res);
+ print( "t3: " + res.timeMillis + " (~3500 on 2.8ghz)" );
+}
+
+
+res = db.runCommand( { mapreduce : "mr1" , map : m , reduce : r } );
+assert( res.ok , "should be ok" );
diff --git a/jstests/mr2.js b/jstests/mr2.js
new file mode 100644
index 0000000..0a8e9d6
--- /dev/null
+++ b/jstests/mr2.js
@@ -0,0 +1,50 @@
+
+
+t = db.mr2;
+t.drop();
+
+t.save( { comments : [ { who : "a" , txt : "asdasdasd" } ,
+ { who : "b" , txt : "asdasdasdasdasdasdas" } ] } );
+
+t.save( { comments : [ { who : "b" , txt : "asdasdasdaaa" } ,
+ { who : "c" , txt : "asdasdasdaasdasdas" } ] } );
+
+
+
+function m(){
+ for ( var i=0; i<this.comments.length; i++ ){
+ var c = this.comments[i];
+ emit( c.who , { totalSize : c.txt.length , num : 1 } );
+ }
+}
+
+function r( who , values ){
+ var n = { totalSize : 0 , num : 0 };
+ for ( var i=0; i<values.length; i++ ){
+ n.totalSize += values[i].totalSize;
+ n.num += values[i].num;
+ }
+ return n;
+}
+
+function reformat( r ){
+ var x = {};
+ r.find().forEach(
+ function(z){
+ x[z._id] = z.value;
+ }
+ );
+ return x;
+}
+
+function f( who , res ){
+ res.avg = res.totalSize / res.num;
+ return res;
+}
+res = t.mapReduce( m , r , { finalize : f } );
+x = reformat( res );
+assert.eq( 9 , x.a.avg , "A" );
+assert.eq( 16 , x.b.avg , "B" );
+assert.eq( 18 , x.c.avg , "C" );
+res.drop();
+
diff --git a/jstests/mr3.js b/jstests/mr3.js
new file mode 100644
index 0000000..e7d1f2c
--- /dev/null
+++ b/jstests/mr3.js
@@ -0,0 +1,73 @@
+
+t = db.mr3;
+t.drop();
+
+t.save( { x : 1 , tags : [ "a" , "b" ] } );
+t.save( { x : 2 , tags : [ "b" , "c" ] } );
+t.save( { x : 3 , tags : [ "c" , "a" ] } );
+t.save( { x : 4 , tags : [ "b" , "c" ] } );
+
+m = function( n , x ){
+ x = x || 1;
+ this.tags.forEach(
+ function(z){
+ for ( var i=0; i<x; i++ )
+ emit( z , { count : n || 1 } );
+ }
+ );
+};
+
+r = function( key , values ){
+ var total = 0;
+ for ( var i=0; i<values.length; i++ ){
+ total += values[i].count;
+ }
+ return { count : total };
+};
+
+res = t.mapReduce( m , r );
+z = res.convertToSingleObject()
+
+assert.eq( 3 , Object.keySet( z ).length , "A1" );
+assert.eq( 2 , z.a.count , "A2" );
+assert.eq( 3 , z.b.count , "A3" );
+assert.eq( 3 , z.c.count , "A4" );
+
+res.drop();
+
+res = t.mapReduce( m , r , { mapparams : [ 2 , 2 ] } );
+z = res.convertToSingleObject()
+
+assert.eq( 3 , Object.keySet( z ).length , "B1" );
+assert.eq( 8 , z.a.count , "B2" );
+assert.eq( 12 , z.b.count , "B3" );
+assert.eq( 12 , z.c.count , "B4" );
+
+res.drop();
+
+// -- just some random tests
+
+realm = m;
+
+m = function(){
+ emit( this._id , 1 );
+}
+res = t.mapReduce( m , r );
+res.drop();
+
+m = function(){
+ emit( this._id , this.xzz.a );
+}
+
+before = db.getCollectionNames().length;
+assert.throws( function(){ t.mapReduce( m , r ); } );
+assert.eq( before , db.getCollectionNames().length , "after throw crap" );
+
+
+m = realm;
+r = function( k , v ){
+ return v.x.x.x;
+}
+before = db.getCollectionNames().length;
+assert.throws( function(){ t.mapReduce( m , r ); } );
+assert.eq( before , db.getCollectionNames().length , "after throw crap" );
diff --git a/jstests/mr4.js b/jstests/mr4.js
new file mode 100644
index 0000000..b14cdfe
--- /dev/null
+++ b/jstests/mr4.js
@@ -0,0 +1,45 @@
+
+t = db.mr4;
+t.drop();
+
+t.save( { x : 1 , tags : [ "a" , "b" ] } );
+t.save( { x : 2 , tags : [ "b" , "c" ] } );
+t.save( { x : 3 , tags : [ "c" , "a" ] } );
+t.save( { x : 4 , tags : [ "b" , "c" ] } );
+
+m = function(){
+ this.tags.forEach(
+ function(z){
+ emit( z , { count : xx } );
+ }
+ );
+};
+
+r = function( key , values ){
+ var total = 0;
+ for ( var i=0; i<values.length; i++ ){
+ total += values[i].count;
+ }
+ return { count : total };
+};
+
+res = t.mapReduce( m , r , { scope : { xx : 1 } } );
+z = res.convertToSingleObject()
+
+assert.eq( 3 , Object.keySet( z ).length , "A1" );
+assert.eq( 2 , z.a.count , "A2" );
+assert.eq( 3 , z.b.count , "A3" );
+assert.eq( 3 , z.c.count , "A4" );
+
+res.drop();
+
+
+res = t.mapReduce( m , r , { scope : { xx : 2 } } );
+z = res.convertToSingleObject()
+
+assert.eq( 3 , Object.keySet( z ).length , "A1" );
+assert.eq( 4 , z.a.count , "A2" );
+assert.eq( 6 , z.b.count , "A3" );
+assert.eq( 6 , z.c.count , "A4" );
+
+res.drop();
diff --git a/jstests/mr5.js b/jstests/mr5.js
new file mode 100644
index 0000000..50eb366
--- /dev/null
+++ b/jstests/mr5.js
@@ -0,0 +1,39 @@
+
+t = db.mr5;
+t.drop();
+
+t.save( { "partner" : 1, "visits" : 9 } )
+t.save( { "partner" : 2, "visits" : 9 } )
+t.save( { "partner" : 1, "visits" : 11 } )
+t.save( { "partner" : 1, "visits" : 30 } )
+t.save( { "partner" : 2, "visits" : 41 } )
+t.save( { "partner" : 2, "visits" : 41 } )
+
+m = function(){
+ emit( this.partner , { stats : [ this.visits ] } )
+}
+
+r = function( k , v ){
+ var stats = [];
+ var total = 0;
+ for ( var i=0; i<v.length; i++ ){
+ for ( var j in v[i].stats ) {
+ stats.push( v[i].stats[j] )
+ total += v[i].stats[j];
+ }
+ }
+ return { stats : stats , total : total }
+}
+
+res = t.mapReduce( m , r , { scope : { xx : 1 } } );
+res.find().forEach( printjson )
+
+z = res.convertToSingleObject()
+assert.eq( 2 , Object.keySet( z ).length , "A" )
+assert.eq( [ 9 , 11 , 30 ] , z["1"].stats , "B" )
+assert.eq( [ 9 , 41 , 41 ] , z["2"].stats , "B" )
+
+
+res.drop()
+
+
diff --git a/jstests/multi.js b/jstests/multi.js
new file mode 100644
index 0000000..eb6cad3
--- /dev/null
+++ b/jstests/multi.js
@@ -0,0 +1,24 @@
+t = db.jstests_multi;
+t.drop();
+
+t.ensureIndex( { a: 1 } );
+t.save( { a: [ 1, 2 ] } );
+assert.eq( 1, t.find( { a: { $gt: 0 } } ).count() , "A" );
+assert.eq( 1, t.find( { a: { $gt: 0 } } ).toArray().length , "B" );
+
+t.drop();
+t.save( { a: [ [ [ 1 ] ] ] } );
+assert.eq( 0, t.find( { a:1 } ).count() , "C" );
+assert.eq( 0, t.find( { a: [ 1 ] } ).count() , "D" );
+assert.eq( 1, t.find( { a: [ [ 1 ] ] } ).count() , "E" );
+assert.eq( 1, t.find( { a: [ [ [ 1 ] ] ] } ).count() , "F" );
+
+t.drop();
+t.save( { a: [ 1, 2 ] } );
+assert.eq( 0, t.find( { a: { $ne: 1 } } ).count() , "G" );
+
+t.drop();
+t.save( { a: [ { b: 1 }, { b: 2 } ] } );
+assert.eq( 0, t.find( { 'a.b': { $ne: 1 } } ).count() , "H" );
+
+// TODO - run same tests with an index on a
diff --git a/jstests/multi2.js b/jstests/multi2.js
new file mode 100644
index 0000000..7c72722
--- /dev/null
+++ b/jstests/multi2.js
@@ -0,0 +1,23 @@
+
+t = db.multi2;
+t.drop();
+
+t.save( { x : 1 , a : [ 1 ] } );
+t.save( { x : 1 , a : [] } );
+t.save( { x : 1 , a : null } );
+t.save( {} );
+
+assert.eq( 3 , t.find( { x : 1 } ).count() , "A" );
+
+t.ensureIndex( { x : 1 } );
+assert.eq( 3 , t.find( { x : 1 } ).count() , "B" );
+assert.eq( 4 , t.find().sort( { x : 1 , a : 1 } ).count() , "s1" );
+assert.eq( 1 , t.find( { x : 1 , a : null } ).count() , "B2" );
+
+t.dropIndex( { x : 1 } );
+t.ensureIndex( { x : 1 , a : 1 } );
+assert.eq( 3 , t.find( { x : 1 } ).count() , "C" ); // SERVER-279
+assert.eq( 4 , t.find().sort( { x : 1 , a : 1 } ).count() , "s2" );
+assert.eq( 1 , t.find( { x : 1 , a : null } ).count() , "C2" );
+
+
diff --git a/jstests/ne1.js b/jstests/ne1.js
new file mode 100644
index 0000000..e1c5656
--- /dev/null
+++ b/jstests/ne1.js
@@ -0,0 +1,11 @@
+
+t = db.ne1;
+t.drop();
+
+t.save( { x : 1 } );
+t.save( { x : 2 } );
+t.save( { x : 3 } );
+
+assert.eq( 2 , t.find( { x : { $ne : 2 } } ).itcount() , "A" );
+t.ensureIndex( { x : 1 } );
+assert.eq( 2 , t.find( { x : { $ne : 2 } } ).itcount() , "B" );
diff --git a/jstests/nin.js b/jstests/nin.js
new file mode 100644
index 0000000..4afd344
--- /dev/null
+++ b/jstests/nin.js
@@ -0,0 +1,57 @@
+t = db.jstests_nin;
+t.drop();
+
+function checkEqual( name , key , value ){
+ var o = {};
+ o[key] = { $in : [ value ] };
+ var i = t.find( o ).count();
+ o[key] = { $nin : [ value ] };
+ var n = t.find( o ).count();
+
+ assert.eq( t.find().count() , i + n ,
+ "checkEqual " + name + " $in + $nin != total | " + i + " + " + n + " != " + t.find().count() );
+}
+
+doTest = function( n ) {
+
+ t.save( { a:[ 1,2,3 ] } );
+ t.save( { a:[ 1,2,4 ] } );
+ t.save( { a:[ 1,8,5 ] } );
+ t.save( { a:[ 1,8,6 ] } );
+ t.save( { a:[ 1,9,7 ] } );
+
+ assert.eq( 5, t.find( { a: { $nin: [ 10 ] } } ).count() , n + " A" );
+ assert.eq( 0, t.find( { a: { $ne: 1 } } ).count() , n + " B" );
+ assert.eq( 0, t.find( { a: { $nin: [ 1 ] } } ).count() , n + " C" );
+ assert.eq( 0, t.find( { a: { $nin: [ 1, 2 ] } } ).count() , n + " D" );
+ assert.eq( 3, t.find( { a: { $nin: [ 2 ] } } ).count() , n + " E" );
+ assert.eq( 3, t.find( { a: { $nin: [ 8 ] } } ).count() , n + " F" );
+ assert.eq( 4, t.find( { a: { $nin: [ 9 ] } } ).count() , n + " G" );
+ assert.eq( 4, t.find( { a: { $nin: [ 3 ] } } ).count() , n + " H" );
+ assert.eq( 3, t.find( { a: { $nin: [ 2, 3 ] } } ).count() , n + " I" );
+
+ checkEqual( n + " A" , "a" , 5 );
+
+ t.save( { a: [ 2, 2 ] } );
+ assert.eq( 3, t.find( { a: { $nin: [ 2, 2 ] } } ).count() , n + " J" );
+
+ t.save( { a: [ [ 2 ] ] } );
+ assert.eq( 4, t.find( { a: { $nin: [ 2 ] } } ).count() , n + " K" );
+
+ t.save( { a: [ { b: [ 10, 11 ] }, 11 ] } );
+ checkEqual( n + " B" , "a" , 5 );
+ checkEqual( n + " C" , "a.b" , 5 );
+
+ assert.eq( 7, t.find( { 'a.b': { $nin: [ 10 ] } } ).count() , n + " L" );
+ assert.eq( 8, t.find( { 'a.b': { $nin: [ [ 10, 11 ] ] } } ).count() , n + " M" );
+ assert.eq( 7, t.find( { a: { $nin: [ 11 ] } } ).count() , n + " N" );
+
+ t.save( { a: { b: [ 20, 30 ] } } );
+ assert.eq( 1, t.find( { 'a.b': { $all: [ 20 ] } } ).count() , n + " O" );
+ assert.eq( 1, t.find( { 'a.b': { $all: [ 20, 30 ] } } ).count() , n + " P" );
+}
+
+doTest( "no index" );
+t.drop();
+t.ensureIndex( {a:1} );
+doTest( "with index" );
diff --git a/jstests/not1.js b/jstests/not1.js
new file mode 100644
index 0000000..f99a849
--- /dev/null
+++ b/jstests/not1.js
@@ -0,0 +1,20 @@
+
+t = db.not1;
+t.drop();
+
+
+t.insert({a:1})
+t.insert({a:2})
+t.insert({})
+
+function test( name ){
+ assert.eq( 3 , t.find().count() , name + "A" );
+ assert.eq( 1 , t.find( { a : 1 } ).count() , name + "B" );
+ assert.eq( 2 , t.find( { a : { $ne : 1 } } ).count() , name + "C" ); // SERVER-198
+ assert.eq( 1 , t.find({a:{$in:[1]}}).count() , name + "D" );
+ assert.eq( 2 , t.find({a:{$nin:[1]}}).count() , name + "E" ); // SERVER-198
+}
+
+test( "no index" );
+t.ensureIndex( { a : 1 } );
+test( "with index" );
diff --git a/jstests/null.js b/jstests/null.js
new file mode 100644
index 0000000..4fb663e
--- /dev/null
+++ b/jstests/null.js
@@ -0,0 +1,14 @@
+
+t = db.null1;
+t.drop();
+
+t.save( { x : 1 } );
+t.save( { x : null } );
+
+assert.eq( 1 , t.find( { x : null } ).count() , "A" );
+assert.eq( 1 , t.find( { x : { $ne : null } } ).count() , "B" );
+
+t.ensureIndex( { x : 1 } );
+
+assert.eq( 1 , t.find( { x : null } ).count() , "C" );
+assert.eq( 1 , t.find( { x : { $ne : null } } ).count() , "D" );
diff --git a/jstests/objid1.js b/jstests/objid1.js
new file mode 100644
index 0000000..dea31ee
--- /dev/null
+++ b/jstests/objid1.js
@@ -0,0 +1,16 @@
+t = db.objid1;
+t.drop();
+
+b = new ObjectId();
+assert( b.str , "A" );
+
+a = new ObjectId( b.str );
+assert.eq( a.str , b.str , "B" );
+
+t.save( { a : a } )
+assert( t.findOne().a.isObjectId , "C" );
+assert.eq( a.str , t.findOne().a.str , "D" );
+
+x = { a : new ObjectId() };
+eval( " y = " + tojson( x ) );
+assert.eq( x.a.str , y.a.str , "E" );
diff --git a/jstests/objid2.js b/jstests/objid2.js
new file mode 100644
index 0000000..a28c18f
--- /dev/null
+++ b/jstests/objid2.js
@@ -0,0 +1,7 @@
+t = db.objid2;
+t.drop();
+
+t.save( { _id : 517 , a : "hello" } )
+
+assert.eq( t.findOne().a , "hello" );
+assert.eq( t.findOne()._id , 517 );
diff --git a/jstests/objid3.js b/jstests/objid3.js
new file mode 100644
index 0000000..ddf20d9
--- /dev/null
+++ b/jstests/objid3.js
@@ -0,0 +1,9 @@
+t = db.objid3;
+t.drop();
+
+t.save( { a : "bob" , _id : 517 } );
+for ( var k in t.findOne() ){
+ assert.eq( k , "_id" , "keys out of order" );
+ break;
+}
+
diff --git a/jstests/objid4.js b/jstests/objid4.js
new file mode 100644
index 0000000..23986b9
--- /dev/null
+++ b/jstests/objid4.js
@@ -0,0 +1,16 @@
+
+
+
+o = new ObjectId();
+assert( o.str );
+
+a = new ObjectId( o.str );
+assert.eq( o.str , a.str );
+assert.eq( a.str , a.str.toString() )
+
+b = ObjectId( o.str );
+assert.eq( o.str , b.str );
+assert.eq( b.str , b.str.toString() )
+
+assert.throws( function(z){ return new ObjectId( "a" ); } );
+assert.throws( function(z){ return new ObjectId( "12345678901234567890123z" ); } );
diff --git a/jstests/objid5.js b/jstests/objid5.js
new file mode 100644
index 0000000..ab883bc
--- /dev/null
+++ b/jstests/objid5.js
@@ -0,0 +1,6 @@
+
+t = db.objid5;
+t.drop();
+
+t.save( { _id : 5.5 } );
+assert.eq( 18 , Object.bsonsize( t.findOne() ) , "A" );
diff --git a/jstests/parallel/allops.js b/jstests/parallel/allops.js
new file mode 100644
index 0000000..7eb0cb2
--- /dev/null
+++ b/jstests/parallel/allops.js
@@ -0,0 +1,40 @@
+// test all operations in parallel
+
+f = db.jstests_parallel_allops;
+f.drop();
+
+Random.setRandomSeed();
+
+t = new ParallelTester();
+
+for( id = 0; id < 10; ++id ) {
+ var g = new EventGenerator( id, "jstests_parallel_allops", Random.randInt( 20 ) );
+ for( var j = 0; j < 1000; ++j ) {
+ var op = Random.randInt( 3 );
+ switch( op ) {
+ case 0: // insert
+ g.addInsert( { _id:Random.randInt( 1000 ) } );
+ break;
+ case 1: // remove
+ g.addRemove( { _id:Random.randInt( 1000 ) } );
+ break;
+ case 2: // update
+ g.addUpdate( {_id:{$lt:1000}}, { _id:Random.randInt( 1000 ) } );
+ break;
+ default:
+ assert( false, "Invalid op code" );
+ }
+ }
+ t.add( EventGenerator.dispatch, g.getEvents() );
+}
+
+var g = new EventGenerator( id, "jstests_parallel_allops", Random.randInt( 5 ) );
+for( var j = 1000; j < 3000; ++j ) {
+ g.addCheckCount( j - 1000, { _id: {$gte:1000} }, j % 100 == 0, j % 500 == 0 );
+ g.addInsert( {_id:j} );
+}
+t.add( EventGenerator.dispatch, g.getEvents() );
+
+t.run( "one or more tests failed" );
+
+assert( f.validate().valid );
diff --git a/jstests/parallel/basic.js b/jstests/parallel/basic.js
new file mode 100644
index 0000000..9c10306
--- /dev/null
+++ b/jstests/parallel/basic.js
@@ -0,0 +1,11 @@
+// perform basic js tests in parallel
+
+Random.setRandomSeed();
+
+var params = ParallelTester.createJstestsLists( 4 );
+var t = new ParallelTester();
+for( i in params ) {
+ t.add( ParallelTester.fileTester, params[ i ] );
+}
+
+t.run( "one or more tests failed", true );
diff --git a/jstests/parallel/basicPlus.js b/jstests/parallel/basicPlus.js
new file mode 100644
index 0000000..d6f9a4d
--- /dev/null
+++ b/jstests/parallel/basicPlus.js
@@ -0,0 +1,26 @@
+// perform basic js tests in parallel & some other tasks as well
+
+var c = db.jstests_parallel_basicPlus;
+c.drop();
+
+Random.setRandomSeed();
+
+var params = ParallelTester.createJstestsLists( 4 );
+var t = new ParallelTester();
+for( i in params ) {
+ t.add( ParallelTester.fileTester, params[ i ] );
+}
+
+for( var i = 4; i < 8; ++i ) {
+ var g = new EventGenerator( i, "jstests_parallel_basicPlus", Random.randInt( 20 ) );
+ for( var j = ( i - 4 ) * 3000; j < ( i - 3 ) * 3000; ++j ) {
+ var expected = j - ( ( i - 4 ) * 3000 );
+ g.addCheckCount( expected, {_id:{$gte:((i-4)*3000),$lt:((i-3)*3000)}}, expected % 1000 == 0, expected % 500 == 0 );
+ g.addInsert( {_id:j} );
+ }
+ t.add( EventGenerator.dispatch, g.getEvents() );
+}
+
+t.run( "one or more tests failed", true );
+
+assert( c.validate().valid, "validate failed" ); \ No newline at end of file
diff --git a/jstests/parallel/insert.js b/jstests/parallel/insert.js
new file mode 100644
index 0000000..fc1c750
--- /dev/null
+++ b/jstests/parallel/insert.js
@@ -0,0 +1,24 @@
+// perform inserts in parallel from several clients
+
+f = db.jstests_parallel_insert;
+f.drop();
+f.ensureIndex( {who:1} );
+
+Random.setRandomSeed();
+
+t = new ParallelTester();
+
+for( id = 0; id < 10; ++id ) {
+ var g = new EventGenerator( id, "jstests_parallel_insert", Random.randInt( 20 ) );
+ for( j = 0; j < 1000; ++j ) {
+ if ( j % 50 == 0 ) {
+ g.addCheckCount( j, {who:id} );
+ }
+ g.addInsert( { i:j, who:id } );
+ }
+ t.add( EventGenerator.dispatch, g.getEvents() );
+}
+
+t.run( "one or more tests failed" );
+
+assert( f.validate().valid );
diff --git a/jstests/parallel/manyclients.js b/jstests/parallel/manyclients.js
new file mode 100644
index 0000000..14cdec5
--- /dev/null
+++ b/jstests/parallel/manyclients.js
@@ -0,0 +1,26 @@
+// perform inserts in parallel from a large number of clients
+
+f = db.jstests_parallel_manyclients;
+f.drop();
+f.ensureIndex( {who:1} );
+
+Random.setRandomSeed();
+
+t = new ParallelTester();
+
+for( id = 0; id < 200; ++id ) {
+ var g = new EventGenerator( id, "jstests_parallel_manyclients", Random.randInt( 20 ) );
+ for( j = 0; j < 1000; ++j ) {
+ if ( j % 50 == 0 ) {
+ g.addCheckCount( j, {who:id}, true );
+ }
+ g.addInsert( { i:j, who:id } );
+ }
+ t.add( EventGenerator.dispatch, g.getEvents() );
+}
+
+print( "done preparing test" );
+
+t.run( "one or more tests failed" );
+
+assert( f.validate().valid );
diff --git a/jstests/parallel/shellfork.js b/jstests/parallel/shellfork.js
new file mode 100644
index 0000000..20a1d3d
--- /dev/null
+++ b/jstests/parallel/shellfork.js
@@ -0,0 +1,33 @@
+a = fork( function( a, b ) { return a / b; }, 10, 2 );
+a.start();
+b = fork( function( a, b, c ) { return a + b + c; }, 18, " is a ", "multiple of 3" );
+makeFunny = function( text ) {
+ return text + " ha ha!";
+}
+c = fork( makeFunny, "paisley" );
+c.start();
+b.start();
+b.join();
+assert.eq( 5, a.returnData() );
+assert.eq( "18 is a multiple of 3", b.returnData() );
+assert.eq( "paisley ha ha!", c.returnData() );
+
+z = fork( function( a ) {
+ var y = fork( function( a ) {
+ return a + 1; }, 5 );
+ y.start();
+ return y.returnData() + a;
+ }, 1 );
+z.start();
+assert.eq( 7, z.returnData() );
+
+
+t = 1;
+z = new ScopedThread( function() {
+ assert( typeof( t ) == "undefined", "t not undefined" );
+ t = 5;
+ return t;
+ } );
+z.start();
+assert.eq( 5, z.returnData() );
+assert.eq( 1, t ); \ No newline at end of file
diff --git a/jstests/perf/find1.js b/jstests/perf/find1.js
new file mode 100644
index 0000000..ecd94e5
--- /dev/null
+++ b/jstests/perf/find1.js
@@ -0,0 +1,90 @@
+/**
+ * Performance tests for various finders
+ */
+
+var calls = 100;
+var size = 500000;
+var collection_name = "sort2";
+
+function testSetup(dbConn) {
+ var t = dbConn[collection_name];
+ t.drop();
+
+ for (var i=0; i<size; i++){
+ t.save({ num : i });
+ if (i == 0 )
+ t.ensureIndex( { num : 1 } );
+ }
+}
+
+function resetQueryCache( db ) {
+ db[ collection_name ].createIndex( { a: 1 }, "dumbIndex" );
+ db[ collection_name ].dropIndex( "dumbIndex" );
+}
+
+function between( low, high, val, msg ) {
+ assert( low < val, msg );
+ assert( val < high, msg );
+}
+
+/**
+ * Tests fetching a set of 10 objects in sorted order, comparing getting
+ * from front of collection vs end, using $lt
+ */
+function testFindLTFrontBack(dbConn) {
+
+ var results = {};
+ var t = dbConn[collection_name];
+
+ resetQueryCache( dbConn );
+ results.oneInOrderLTFirst = Date.timeFunc(
+ function(){
+ assert( t.find( { num : {$lt : 20} } ).sort( { num : 1 } ).limit(10).toArray().length == 10);
+ } , calls );
+
+ resetQueryCache( dbConn );
+ results.oneInOrderLTLast = Date.timeFunc(
+ function(){
+ assert( t.find( { num : {$lt : size-20 }} ).sort( { num : 1 } ).limit(10).toArray().length == 10);
+ } , calls );
+
+
+ between( 0.9, 1.1, results.oneInOrderLTFirst / results.oneInOrderLTLast,
+ "first / last (" + results.oneInOrderLTFirst + " / " + results.oneInOrderLTLast + " ) = " +
+ results.oneInOrderLTFirst / results.oneInOrderLTLast + " not in [0.9, 1.1]" );
+}
+
+
+
+/**
+ * Tests fetching a set of 10 objects in sorted order, comparing getting
+ * from front of collection vs end
+ */
+function testFindGTFrontBack(dbConn) {
+
+ var results = {};
+ var t = dbConn[collection_name];
+
+ resetQueryCache( dbConn );
+ results.oneInOrderGTFirst = Date.timeFunc(
+ function(){
+ assert( t.find( { num : {$gt : 5} } ).sort( { num : 1 } ).limit(10).toArray().length == 10);
+ } , calls );
+
+ resetQueryCache( dbConn );
+ results.oneInOrderGTLast = Date.timeFunc(
+ function(){
+ assert( t.find( { num : {$gt : size-20 }} ).sort( { num : 1 } ).limit(10).toArray().length == 10);
+ } , calls );
+
+
+ between( 0.25, 4.0, results.oneInOrderGTFirst / results.oneInOrderGTLast,
+ "first / last (" + results.oneInOrderGTFirst + " / " + results.oneInOrderGTLast + " ) = " +
+ results.oneInOrderGTFirst / results.oneInOrderGTLast + " not in [0.25, 4.0]" );
+
+}
+
+testSetup(db);
+
+testFindLTFrontBack(db);
+testFindGTFrontBack(db); \ No newline at end of file
diff --git a/jstests/perf/index1.js b/jstests/perf/index1.js
new file mode 100644
index 0000000..7bcf4b7
--- /dev/null
+++ b/jstests/perf/index1.js
@@ -0,0 +1,20 @@
+
+t = db.perf.index1;
+t.drop();
+
+for ( var i=0; i<100000; i++ ){
+ t.save( { x : i } );
+}
+
+t.findOne();
+
+printjson( db.serverStatus().mem );
+
+for ( var i=0; i<5; i++ ){
+ nonu = Date.timeFunc( function(){ t.ensureIndex( { x : 1 } ); } );
+ t.dropIndex( { x : 1 } );
+ u = Date.timeFunc( function(){ t.ensureIndex( { x : 1 }, { unique : 1 } ); } );
+ t.dropIndex( { x : 1 } );
+ print( "non unique: " + nonu + " unique: " + u );
+ printjson( db.serverStatus().mem );
+}
diff --git a/jstests/perf/remove1.js b/jstests/perf/remove1.js
new file mode 100644
index 0000000..3e1a1a6
--- /dev/null
+++ b/jstests/perf/remove1.js
@@ -0,0 +1,68 @@
+/**
+ * Performance tests for removing ojects
+ */
+
+var removals = 100;
+var size = 500000;
+var collection_name = "remove_test";
+var msg = "Hello from remove test";
+
+function testSetup(dbConn) {
+ var t = dbConn[collection_name];
+ t.drop();
+ t.ensureIndex( { num : 1 } );
+
+ for (var i=0; i<size; i++){
+ t.save({ num : i, msg : msg });
+ }
+}
+
+function between( low, high, val, msg ) {
+ assert( low < val, msg );
+ assert( val < high, msg );
+}
+
+/**
+ * Compares difference of removing objects from a collection if only includes
+ * field that's indexed, vs w/ additional other fields
+ *
+ * @param dbConn
+ */
+function testRemoveWithMultiField(dbConn) {
+
+ var results = {};
+ var t = dbConn[collection_name];
+
+ testSetup(dbConn);
+
+ t.remove( {num:0 } );
+ results.indexOnly = Date.timeFunc(
+ function(){
+ for (var i = 1; i < removals; i++) {
+ t.remove({num : i});
+ }
+
+ t.findOne();
+ }
+ );
+
+ testSetup(dbConn);
+
+ t.remove( {num: 0, msg: msg } );
+ results.withAnother = Date.timeFunc(
+ function(){
+ for (var i = 1; i < removals; i++) {
+ t.remove({num : i, msg : msg});
+ }
+
+ t.findOne();
+ }
+ );
+
+
+ between( 0.65, 1.35, (results.indexOnly / results.withAnother),
+ "indexOnly / withAnother (" + results.indexOnly + " / " + results.withAnother + " ) = " +
+ results.indexOnly / results.withAnother + " not in [0.65, 1.35]" );
+}
+
+testRemoveWithMultiField(db);
diff --git a/jstests/profile1.js b/jstests/profile1.js
new file mode 100644
index 0000000..ea53b09
--- /dev/null
+++ b/jstests/profile1.js
@@ -0,0 +1,40 @@
+
+/* With pre-created system.profile (capped) */
+db.runCommand({profile: 0});
+db.getCollection("system.profile").drop();
+assert(!db.getLastError(), "Z");
+assert.eq(0, db.runCommand({profile: -1}).was, "A");
+
+db.createCollection("system.profile", {capped: true, size: 1000});
+db.runCommand({profile: 2});
+assert.eq(2, db.runCommand({profile: -1}).was, "B");
+assert.eq(1, db.system.profile.stats().capped, "C");
+var capped_size = db.system.profile.storageSize();
+assert.gt(capped_size, 999, "D");
+assert.lt(capped_size, 2000, "E");
+
+assert.eq( 4 , db.system.profile.find().count() , "E2" );
+
+/* Make sure we can't drop if profiling is still on */
+assert.throws( function(z){ db.getCollection("system.profile").drop(); } )
+
+/* With pre-created system.profile (un-capped) */
+db.runCommand({profile: 0});
+db.getCollection("system.profile").drop();
+assert.eq(0, db.runCommand({profile: -1}).was, "F");
+
+db.createCollection("system.profile");
+db.runCommand({profile: 2});
+assert.eq(2, db.runCommand({profile: -1}).was, "G");
+assert.eq(null, db.system.profile.stats().capped, "G1");
+
+/* With no system.profile collection */
+db.runCommand({profile: 0});
+db.getCollection("system.profile").drop();
+assert.eq(0, db.runCommand({profile: -1}).was, "H");
+
+db.runCommand({profile: 2});
+assert.eq(2, db.runCommand({profile: -1}).was, "I");
+assert.eq(1, db.system.profile.stats().capped, "J");
+var auto_size = db.system.profile.storageSize();
+assert.gt(auto_size, capped_size, "K");
diff --git a/jstests/pull.js b/jstests/pull.js
new file mode 100644
index 0000000..cf8147a
--- /dev/null
+++ b/jstests/pull.js
@@ -0,0 +1,19 @@
+t = db.jstests_pull;
+t.drop();
+
+t.save( { a: [ 1, 2, 3 ] } );
+t.update( {}, { $pull: { a: 2 } } );
+t.update( {}, { $pull: { a: 6 } } );
+assert.eq( [ 1, 3 ], t.findOne().a );
+
+t.drop();
+t.save( { a: [ 1, 2, 3 ] } );
+t.update( {}, { $pull: { a: 2 } } );
+t.update( {}, { $pull: { a: 2 } } );
+assert.eq( [ 1, 3 ], t.findOne().a );
+
+t.drop();
+t.save( { a: [ 2 ] } );
+t.update( {}, { $pull: { a: 2 } } );
+t.update( {}, { $pull: { a: 6 } } );
+assert.eq( [], t.findOne().a );
diff --git a/jstests/pull2.js b/jstests/pull2.js
new file mode 100644
index 0000000..ca13fc2
--- /dev/null
+++ b/jstests/pull2.js
@@ -0,0 +1,31 @@
+
+t = db.pull2;
+t.drop();
+
+t.save( { a : [ { x : 1 } , { x : 1 , b : 2 } ] } );
+assert.eq( 2 , t.findOne().a.length , "A" );
+
+t.update( {} , { $pull : { a : { x : 1 } } } );
+assert.eq( 0 , t.findOne().a.length , "B" );
+
+assert.eq( 1 , t.find().count() , "C1" )
+
+t.update( {} , { $push : { a : { x : 1 } } } )
+t.update( {} , { $push : { a : { x : 1 , b : 2 } } } )
+assert.eq( 2 , t.findOne().a.length , "C" );
+
+t.update( {} , { $pullAll : { a : [ { x : 1 } ] } } );
+assert.eq( 1 , t.findOne().a.length , "D" );
+
+t.update( {} , { $push : { a : { x : 2 , b : 2 } } } )
+t.update( {} , { $push : { a : { x : 3 , b : 2 } } } )
+t.update( {} , { $push : { a : { x : 4 , b : 2 } } } )
+assert.eq( 4 , t.findOne().a.length , "E" );
+
+assert.eq( 1 , t.find().count() , "C2" )
+
+
+t.update( {} , { $pull : { a : { x : { $lt : 3 } } } } );
+assert.eq( 2 , t.findOne().a.length , "F" );
+assert.eq( [ 3 , 4 ] , t.findOne().a.map( function(z){ return z.x; } ) , "G" )
+
diff --git a/jstests/pullall.js b/jstests/pullall.js
new file mode 100644
index 0000000..b720ce5
--- /dev/null
+++ b/jstests/pullall.js
@@ -0,0 +1,18 @@
+t = db.jstests_pushall;
+t.drop();
+
+t.save( { a: [ 1, 2, 3 ] } );
+t.update( {}, { $pullAll: { a: [ 3 ] } } );
+assert.eq( [ 1, 2 ], t.findOne().a );
+t.update( {}, { $pullAll: { a: [ 3 ] } } );
+assert.eq( [ 1, 2 ], t.findOne().a );
+
+t.drop();
+t.save( { a: [ 1, 2, 3 ] } );
+t.update( {}, { $pullAll: { a: [ 2, 3 ] } } );
+assert.eq( [ 1 ], t.findOne().a );
+t.update( {}, { $pullAll: { a: [] } } );
+assert.eq( [ 1 ], t.findOne().a );
+t.update( {}, { $pullAll: { a: [ 1, 5 ] } } );
+assert.eq( [], t.findOne().a );
+
diff --git a/jstests/push.js b/jstests/push.js
new file mode 100644
index 0000000..2cdd91c
--- /dev/null
+++ b/jstests/push.js
@@ -0,0 +1,22 @@
+
+t = db.push
+t.drop();
+
+t.save( { _id : 2 , a : [ 1 ] } );
+t.update( { _id : 2 } , { $push : { a : 2 } } );
+assert.eq( "1,2" , t.findOne().a.toString() , "A" );
+t.update( { _id : 2 } , { $push : { a : 3 } } );
+assert.eq( "1,2,3" , t.findOne().a.toString() , "B" );
+
+t.update( { _id : 2 } , { $pop : { a : 1 } } );
+assert.eq( "1,2" , t.findOne().a.toString() , "C" );
+t.update( { _id : 2 } , { $pop : { a : -1 } } );
+assert.eq( "2" , t.findOne().a.toString() , "D" );
+
+
+t.update( { _id : 2 } , { $push : { a : 3 } } );
+t.update( { _id : 2 } , { $push : { a : 4 } } );
+t.update( { _id : 2 } , { $push : { a : 5 } } );
+assert.eq( "2,3,4,5" , t.findOne().a.toString() , "D" );
+t.update( { _id : 2 } , { $pop : { a : -1 } } );
+assert.eq( "3,4,5" , t.findOne().a.toString() , "D" );
diff --git a/jstests/push2.js b/jstests/push2.js
new file mode 100644
index 0000000..943ec11
--- /dev/null
+++ b/jstests/push2.js
@@ -0,0 +1,20 @@
+
+t = db.push2
+t.drop()
+
+t.save( { _id : 1 , a : [] } )
+
+var s = "";
+while ( s.length < 100000 )
+ s += "asdasdasdasdasdasdasasdasdasdasdasdasdasasdasdasdasdasdasdasasdasdasdasdasdasdasasdasdasdasdasdasdas";
+
+gotError = null;
+
+for ( x=0; x<200; x++ ){
+ t.update( {} , { $push : { a : s } } )
+ gotError = db.getLastError();
+ if ( gotError )
+ break;
+}
+
+assert( gotError , "should have gotten error" );
diff --git a/jstests/pushall.js b/jstests/pushall.js
new file mode 100644
index 0000000..eda6820
--- /dev/null
+++ b/jstests/pushall.js
@@ -0,0 +1,20 @@
+t = db.jstests_pushall;
+t.drop();
+
+t.save( { a: [ 1, 2, 3 ] } );
+t.update( {}, { $pushAll: { a: [ 4 ] } } );
+assert.eq( [ 1, 2, 3, 4 ], t.findOne().a );
+t.update( {}, { $pushAll: { a: [ 4 ] } } );
+assert.eq( [ 1, 2, 3, 4, 4 ], t.findOne().a );
+
+t.drop();
+t.save( { a: [ 1, 2, 3 ] } );
+t.update( {}, { $pushAll: { a: [ 4, 5 ] } } );
+assert.eq( [ 1, 2, 3, 4, 5 ], t.findOne().a );
+t.update( {}, { $pushAll: { a: [] } } );
+assert.eq( [ 1, 2, 3, 4, 5 ], t.findOne().a );
+
+t.drop();
+t.save( {} );
+t.update( {}, { $pushAll: { a: [ 1, 2 ] } } );
+assert.eq( [ 1, 2 ], t.findOne().a );
diff --git a/jstests/query1.js b/jstests/query1.js
new file mode 100644
index 0000000..9b40054
--- /dev/null
+++ b/jstests/query1.js
@@ -0,0 +1,20 @@
+
+t = db.query1;
+t.drop();
+
+t.save( { num : 1 } );
+t.save( { num : 3 } )
+t.save( { num : 4 } );
+
+num = 0;
+total = 0;
+
+t.find().forEach(
+ function(z){
+ num++;
+ total += z.num;
+ }
+);
+
+assert.eq( num , 3 , "num" )
+assert.eq( total , 8 , "total" )
diff --git a/jstests/queryoptimizer1.js b/jstests/queryoptimizer1.js
new file mode 100644
index 0000000..d65d4d2
--- /dev/null
+++ b/jstests/queryoptimizer1.js
@@ -0,0 +1,26 @@
+
+t = db.queryoptimizer1;
+t.drop()
+
+for ( i=0; i<1000; i++ )
+ for ( j=0; j<20; j++ )
+ t.save( { a : i , b : i , c : j } )
+
+
+t.ensureIndex( { a : 1 } )
+t.ensureIndex( { b : 1 } )
+
+for ( ; i<2000; i++ )
+ for ( j=0; j<20; j++ )
+ t.save( { a : i , b : i , c : j } )
+
+
+printjson( t.find( { a : 50 , b : 50 , c : 6 } ).explain() );
+
+for ( var i=0; i<10000; i++ ){
+ a = t.find( { a : 50 , b : 50 , c : i % 20 } ).toArray();
+}
+
+printjson( t.find( { a : 50 , b : 50 , c : 6 } ).explain() );
+assert.eq( 1 , t.find( { a : 50 , b : 50 , c : 6 } ).count() )
+
diff --git a/jstests/quota/quota1.js b/jstests/quota/quota1.js
new file mode 100644
index 0000000..d8f4c42
--- /dev/null
+++ b/jstests/quota/quota1.js
@@ -0,0 +1,48 @@
+t = db.quota1;
+
+print( "starting quota1.a" );
+assert.throws(
+ function(z){
+ db.eval(
+ function(){
+ db.quota1a.save( { a : 1 } );
+ var a = 5;
+ while ( true ){
+ a += 2;
+ }
+ }
+ )
+ }
+);
+print( "done quota1.a" );
+
+//print( "starting quota1.b" );
+//assert.throws(
+// function(z){
+// db.eval(
+// function(){
+// db.quota1b.save( { a : 1 } );
+// var a = 5;
+// assert( sleep( 150000 ) );
+// }
+// )
+// }
+//);
+//print( "done quota1.b" );
+//
+//print( "starting quota1.c" );
+//assert.throws(
+// function(z){
+// db.eval(
+// function(){
+// db.quota1c.save( { a : 1 } );
+// var a = 1;
+// while ( true ){
+// a += 1;
+// assert( sleep( 1000 ) );
+// }
+// }
+// )
+// }
+//);
+//print( "done quota1.c" );
diff --git a/jstests/recstore.js b/jstests/recstore.js
new file mode 100644
index 0000000..f2e78e2
--- /dev/null
+++ b/jstests/recstore.js
@@ -0,0 +1,24 @@
+// recstore.js
+// this is a simple test for new recstores (see reci.h)
+// it is probably redundant with other tests but is a convenient starting point
+// for testing such things.
+
+t = db.storetest;
+
+t.drop();
+
+t.save({z:3});
+t.save({z:2});
+
+t.ensureIndex({z:1});
+t.ensureIndex({q:1});
+assert( t.find().sort({z:1})[0].z == 2 );
+
+t.dropIndexes();
+
+assert( t.find().sort({z:1})[0].z == 2 );
+
+t.ensureIndex({z:1});
+t.ensureIndex({q:1});
+
+db.getSisterDB('admin').$cmd.findOne({closeAllDatabases:1});
diff --git a/jstests/ref.js b/jstests/ref.js
new file mode 100644
index 0000000..20fd6ca
--- /dev/null
+++ b/jstests/ref.js
@@ -0,0 +1,19 @@
+// to run:
+// ./mongo jstests/ref.js
+
+db.otherthings.drop();
+db.things.drop();
+
+var other = { s : "other thing", n : 1};
+db.otherthings.save(other);
+
+db.things.save( { name : "abc" } );
+x = db.things.findOne();
+x.o = new DBPointer( "otherthings" , other._id );
+db.things.save(x);
+
+assert( db.things.findOne().o.fetch().n == 1, "dbref broken 2" );
+
+other.n++;
+db.otherthings.save(other);
+assert( db.things.findOne().o.fetch().n == 2, "dbrefs broken" );
diff --git a/jstests/ref2.js b/jstests/ref2.js
new file mode 100644
index 0000000..29640cd
--- /dev/null
+++ b/jstests/ref2.js
@@ -0,0 +1,14 @@
+
+t = db.ref2;
+t.drop();
+
+a = { $ref : "foo" , $id : 1 };
+b = { $ref : "foo" , $id : 2 };
+
+
+t.save( { name : "a" , r : a } );
+t.save( { name : "b" , r : b } );
+
+assert.eq( 2 , t.find().count() , "A" );
+assert.eq( 1 , t.find( { r : a } ).count() , "B" );
+assert.eq( 1 , t.find( { r : b } ).count() , "C" );
diff --git a/jstests/ref3.js b/jstests/ref3.js
new file mode 100644
index 0000000..77d6038
--- /dev/null
+++ b/jstests/ref3.js
@@ -0,0 +1,19 @@
+// to run:
+// ./mongo jstests/ref.js
+
+db.otherthings.drop();
+db.things.drop();
+
+var other = { s : "other thing", n : 1};
+db.otherthings.save(other);
+
+db.things.save( { name : "abc" } );
+x = db.things.findOne();
+x.o = new DBRef( "otherthings" , other._id );
+db.things.save(x);
+
+assert( db.things.findOne().o.fetch().n == 1, "dbref broken 2" );
+
+other.n++;
+db.otherthings.save(other);
+assert( db.things.findOne().o.fetch().n == 2, "dbrefs broken" );
diff --git a/jstests/ref4.js b/jstests/ref4.js
new file mode 100644
index 0000000..6e4cd95
--- /dev/null
+++ b/jstests/ref4.js
@@ -0,0 +1,23 @@
+
+a = db.ref4a;
+b = db.ref4b;
+
+a.drop();
+b.drop();
+
+db.otherthings.drop();
+db.things.drop();
+
+var other = { s : "other thing", n : 17 };
+b.save(other);
+
+a.save( { name : "abc" , others : [ new DBRef( "ref4b" , other._id ) , new DBPointer( "ref4b" , other._id ) ] } );
+assert( a.findOne().others[0].fetch().n == 17 , "dbref broken 1" );
+
+x = Array.fetchRefs( a.findOne().others );
+assert.eq( 2 , x.length , "A" );
+assert.eq( 17 , x[0].n , "B" );
+assert.eq( 17 , x[1].n , "C" );
+
+
+assert.eq( 0 , Array.fetchRefs( a.findOne().others , "z" ).length , "D" );
diff --git a/jstests/regex.js b/jstests/regex.js
new file mode 100644
index 0000000..f431d50
--- /dev/null
+++ b/jstests/regex.js
@@ -0,0 +1,24 @@
+t = db.jstests_regex;
+
+t.drop();
+t.save( { a: "bcd" } );
+assert.eq( 1, t.count( { a: /b/ } ) , "A" );
+assert.eq( 1, t.count( { a: /bc/ } ) , "B" );
+assert.eq( 1, t.count( { a: /bcd/ } ) , "C" );
+assert.eq( 0, t.count( { a: /bcde/ } ) , "D" );
+
+t.drop();
+t.save( { a: { b: "cde" } } );
+assert.eq( 1, t.count( { 'a.b': /de/ } ) , "E" );
+
+t.drop();
+t.save( { a: { b: [ "cde" ] } } );
+assert.eq( 1, t.count( { 'a.b': /de/ } ) , "F" );
+
+t.drop();
+t.save( { a: [ { b: "cde" } ] } );
+assert.eq( 1, t.count( { 'a.b': /de/ } ) , "G" );
+
+t.drop();
+t.save( { a: [ { b: [ "cde" ] } ] } );
+assert.eq( 1, t.count( { 'a.b': /de/ } ) , "H" );
diff --git a/jstests/regex2.js b/jstests/regex2.js
new file mode 100644
index 0000000..b6a21f5
--- /dev/null
+++ b/jstests/regex2.js
@@ -0,0 +1,62 @@
+
+t = db.regex2;
+t.drop();
+
+t.save( { a : "test" } );
+t.save( { a : "Test" } );
+
+assert.eq( 2 , t.find().count() , "A" );
+assert.eq( 1 , t.find( { a : "Test" } ).count() , "B" );
+assert.eq( 1 , t.find( { a : "test" } ).count() , "C" );
+assert.eq( 1 , t.find( { a : /Test/ } ).count() , "D" );
+assert.eq( 1 , t.find( { a : /test/ } ).count() , "E" );
+assert.eq( 2 , t.find( { a : /test/i } ).count() , "F" );
+
+
+t.drop();
+
+a = "\u0442\u0435\u0441\u0442";
+b = "\u0422\u0435\u0441\u0442";
+
+assert( ( new RegExp( a ) ).test( a ) , "B 1" );
+assert( ! ( new RegExp( a ) ).test( b ) , "B 2" );
+assert( ( new RegExp( a , "i" ) ).test( b ) , "B 3 " );
+
+t.save( { a : a } );
+t.save( { a : b } );
+
+
+assert.eq( 2 , t.find().count() , "C A" );
+assert.eq( 1 , t.find( { a : a } ).count() , "C B" );
+assert.eq( 1 , t.find( { a : b } ).count() , "C C" );
+assert.eq( 1 , t.find( { a : new RegExp( a ) } ).count() , "C D" );
+assert.eq( 1 , t.find( { a : new RegExp( b ) } ).count() , "C E" );
+assert.eq( 2 , t.find( { a : new RegExp( a , "i" ) } ).count() , "C F is spidermonkey built with UTF-8 support?" );
+
+
+// same tests as above but using {$regex: "a|b", $options: "imx"} syntax.
+t.drop();
+
+t.save( { a : "test" } );
+t.save( { a : "Test" } );
+
+assert.eq( 2 , t.find().count() , "obj A" );
+assert.eq( 1 , t.find( { a : {$regex:"Test"} } ).count() , "obj D" );
+assert.eq( 1 , t.find( { a : {$regex:"test"} } ).count() , "obj E" );
+assert.eq( 2 , t.find( { a : {$regex:"test", $options:"i"} } ).count() , "obj F" );
+assert.eq( 2 , t.find( { a : {$options:"i", $regex:"test"} } ).count() , "obj F rev" ); // both orders should work
+
+
+t.drop();
+
+a = "\u0442\u0435\u0441\u0442";
+b = "\u0422\u0435\u0441\u0442";
+
+t.save( { a : a } );
+t.save( { a : b } );
+
+
+assert.eq( 1 , t.find( { a : {$regex: a} } ).count() , "obj C D" );
+assert.eq( 1 , t.find( { a : {$regex: b} } ).count() , "obj C E" );
+assert.eq( 2 , t.find( { a : {$regex: a , $options: "i" } } ).count() , "obj C F is spidermonkey built with UTF-8 support?" );
+
diff --git a/jstests/regex3.js b/jstests/regex3.js
new file mode 100644
index 0000000..ee8d9cf
--- /dev/null
+++ b/jstests/regex3.js
@@ -0,0 +1,36 @@
+
+t = db.regex3;
+t.drop();
+
+t.save( { name : "eliot" } );
+t.save( { name : "emily" } );
+t.save( { name : "bob" } );
+t.save( { name : "aaron" } );
+
+assert.eq( 2 , t.find( { name : /^e.*/ } ).count() , "no index count" );
+assert.eq( 4 , t.find( { name : /^e.*/ } ).explain().nscanned , "no index explain" );
+t.ensureIndex( { name : 1 } );
+assert.eq( 2 , t.find( { name : /^e.*/ } ).count() , "index count" );
+assert.eq( 2 , t.find( { name : /^e.*/ } ).explain().nscanned , "index explain" ); // SERVER-239
+
+t.drop();
+
+t.save( { name : "aa" } );
+t.save( { name : "ab" } );
+t.save( { name : "ac" } );
+t.save( { name : "c" } );
+
+assert.eq( 3 , t.find( { name : /^aa*/ } ).count() , "B ni" );
+t.ensureIndex( { name : 1 } );
+assert.eq( 3 , t.find( { name : /^aa*/ } ).count() , "B i 1" );
+assert.eq( 3 , t.find( { name : /^aa*/ } ).explain().nscanned , "B i 1 e" );
+
+assert.eq( 2 , t.find( { name : /^a[ab]/ } ).count() , "B i 2" );
+assert.eq( 2 , t.find( { name : /^a[bc]/ } ).count() , "B i 3" );
+
+t.drop();
+
+t.save( { name: "" } );
+assert.eq( 1, t.count( { name: /^a?/ } ) , "C 1" );
+t.ensureIndex( { name: 1 } );
+assert.eq( 1, t.count( { name: /^a?/ } ) , "C 2");
diff --git a/jstests/regex4.js b/jstests/regex4.js
new file mode 100644
index 0000000..568c937
--- /dev/null
+++ b/jstests/regex4.js
@@ -0,0 +1,18 @@
+
+t = db.regex3;
+t.drop();
+
+t.save( { name : "eliot" } );
+t.save( { name : "emily" } );
+t.save( { name : "bob" } );
+t.save( { name : "aaron" } );
+
+assert.eq( 2 , t.find( { name : /^e.*/ } ).count() , "no index count" );
+assert.eq( 4 , t.find( { name : /^e.*/ } ).explain().nscanned , "no index explain" );
+//assert.eq( 2 , t.find( { name : { $ne : /^e.*/ } } ).count() , "no index count ne" ); // SERVER-251
+
+t.ensureIndex( { name : 1 } );
+
+assert.eq( 2 , t.find( { name : /^e.*/ } ).count() , "index count" );
+assert.eq( 2 , t.find( { name : /^e.*/ } ).explain().nscanned , "index explain" ); // SERVER-239
+//assert.eq( 2 , t.find( { name : { $ne : /^e.*/ } } ).count() , "index count ne" ); // SERVER-251
diff --git a/jstests/regex5.js b/jstests/regex5.js
new file mode 100644
index 0000000..7fe39d5
--- /dev/null
+++ b/jstests/regex5.js
@@ -0,0 +1,13 @@
+
+t = db.regex5
+t.drop()
+
+t.save( { x : [ "abc" , "xyz" ] } )
+t.save( { x : [ "ac" , "xyz" ] } )
+
+a = /.*b.*c/
+x = /.*y.*/
+
+assert.eq( 1 , t.find( { x : a } ).count() , "A" )
+assert.eq( 2 , t.find( { x : x } ).count() , "B" )
+// assert.eq( 1 , t.find( { x : { $all : [ a , x ] } } ).count() , "C" ) // SERVER-505
diff --git a/jstests/regex6.js b/jstests/regex6.js
new file mode 100644
index 0000000..d25367c
--- /dev/null
+++ b/jstests/regex6.js
@@ -0,0 +1,19 @@
+// contributed by Andrew Kempe
+t = db.regex6;
+t.drop();
+
+t.save( { name : "eliot" } );
+t.save( { name : "emily" } );
+t.save( { name : "bob" } );
+t.save( { name : "aaron" } );
+
+t.ensureIndex( { name : 1 } );
+
+assert.eq( 0 , t.find( { name : /^\// } ).count() , "index count" );
+assert.eq( 0 , t.find( { name : /^\// } ).explain().nscanned , "index explain" );
+assert.eq( 0 , t.find( { name : /^é/ } ).explain().nscanned , "index explain" );
+assert.eq( 0 , t.find( { name : /^\é/ } ).explain().nscanned , "index explain" );
+assert.eq( 0 , t.find( { name : /^\./ } ).explain().nscanned , "index explain" );
+assert.eq( 4 , t.find( { name : /^./ } ).explain().nscanned , "index explain" );
+
+assert.eq( 4 , t.find( { name : /^\Qblah\E/ } ).explain().nscanned , "index explain" );
diff --git a/jstests/remove.js b/jstests/remove.js
new file mode 100644
index 0000000..bec015c
--- /dev/null
+++ b/jstests/remove.js
@@ -0,0 +1,25 @@
+// remove.js
+// unit test for db remove
+
+t = db.removetest;
+
+function f(n,dir) {
+ t.ensureIndex({x:dir||1});
+ for( i = 0; i < n; i++ ) t.save( { x:3, z:"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } );
+
+ assert.eq( n , t.find().count() );
+ t.remove({x:3});
+
+ assert.eq( 0 , t.find().count() );
+
+ assert( t.findOne() == null , "A:" + tojson( t.findOne() ) );
+ assert( t.validate().valid , "B" );
+}
+
+t.drop();
+f(300, 1);
+
+f(500, -1);
+
+assert(t.validate().valid , "C" );
+
diff --git a/jstests/remove2.js b/jstests/remove2.js
new file mode 100644
index 0000000..ff122a0
--- /dev/null
+++ b/jstests/remove2.js
@@ -0,0 +1,41 @@
+// remove2.js
+// a unit test for db remove
+
+t = db.removetest2;
+
+function f() {
+ t.save( { x:[3,3,3,3,3,3,3,3,4,5,6], z:"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } );
+ t.save( { x: 9 } );
+ t.save( { x: 1 } );
+
+ t.remove({x:3});
+
+ assert( t.findOne({x:3}) == null );
+ assert( t.validate().valid );
+}
+
+x = 0;
+
+function g() {
+ t.save( { x:[3,4,5,6], z:"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } );
+ t.save( { x:[7,8,9], z:"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } );
+
+ t.remove( {x : {$gte:3}, $atomic:x++ } );
+
+ assert( t.findOne({x:3}) == null );
+ assert( t.findOne({x:8}) == null );
+ assert( t.validate().valid );
+}
+
+t.drop();
+f();
+t.drop();
+g();
+
+t.ensureIndex({x:1});
+t.remove({});
+f();
+t.drop();
+t.ensureIndex({x:1});
+g();
+
diff --git a/jstests/remove3.js b/jstests/remove3.js
new file mode 100644
index 0000000..fe1a754
--- /dev/null
+++ b/jstests/remove3.js
@@ -0,0 +1,18 @@
+
+t = db.remove3;
+t.drop();
+
+for ( i=1; i<=8; i++){
+ t.save( { _id : i , x : i } );
+}
+
+assert.eq( 8 , t.count() , "A" );
+
+t.remove( { x : { $lt : 5 } } );
+assert.eq( 4 , t.count() , "B" );
+
+t.remove( { _id : 5 } );
+assert.eq( 3 , t.count() , "C" );
+
+t.remove( { _id : { $lt : 8 } } , "D" );
+assert.eq( 1 , t.count() , "D" );
diff --git a/jstests/remove4.js b/jstests/remove4.js
new file mode 100644
index 0000000..bd007ed
--- /dev/null
+++ b/jstests/remove4.js
@@ -0,0 +1,10 @@
+t = db.remove4;
+t.drop();
+
+t.save ( { a : 1 , b : 1 } );
+t.save ( { a : 2 , b : 1 } );
+t.save ( { a : 3 , b : 1 } );
+
+assert.eq( 3 , t.find().length() );
+t.remove( { b : 1 } );
+assert.eq( 0 , t.find().length() );
diff --git a/jstests/remove5.js b/jstests/remove5.js
new file mode 100644
index 0000000..be4f0b4
--- /dev/null
+++ b/jstests/remove5.js
@@ -0,0 +1,24 @@
+f = db.jstests_remove5;
+f.drop();
+
+getLastError = function() {
+ return db.runCommand( { getlasterror : 1 } );
+}
+
+f.remove( {} );
+assert.eq( 0, getLastError().n );
+f.save( {a:1} );
+f.remove( {} );
+assert.eq( 1, getLastError().n );
+for( i = 0; i < 10; ++i ) {
+ f.save( {i:i} );
+}
+f.remove( {} );
+assert.eq( 10, getLastError().n );
+assert.eq( 10, db.getPrevError().n );
+assert.eq( 1, db.getPrevError().nPrev );
+
+f.findOne();
+assert.eq( 0, getLastError().n );
+assert.eq( 10, db.getPrevError().n );
+assert.eq( 2, db.getPrevError().nPrev );
diff --git a/jstests/remove6.js b/jstests/remove6.js
new file mode 100644
index 0000000..d843aee
--- /dev/null
+++ b/jstests/remove6.js
@@ -0,0 +1,38 @@
+
+t = db.remove6;
+t.drop();
+
+N = 1000;
+
+function pop(){
+ t.drop();
+ for ( var i=0; i<N; i++ ){
+ t.save( { x : 1 , tags : [ "a" , "b" , "c" ] } );
+ }
+}
+
+function del(){
+ t.remove( { tags : { $in : [ "a" , "c" ] } } );
+}
+
+function test( n , idx ){
+ pop();
+ assert.eq( N , t.count() , n + " A " + idx );
+ if ( idx )
+ t.ensureIndex( idx );
+ del();
+ var e = db.getLastError();
+ assert( e == null , "error deleting: " + e );
+ assert.eq( 0 , t.count() , n + " B " + idx );
+}
+
+test( "a" );
+test( "b" , { x : 1 } );
+test( "c" , { tags : 1 } );
+
+N = 5000
+
+test( "a2" );
+test( "b2" , { x : 1 } );
+test( "c2" , { tags : 1 } );
+
diff --git a/jstests/remove7.js b/jstests/remove7.js
new file mode 100644
index 0000000..50c6ac1
--- /dev/null
+++ b/jstests/remove7.js
@@ -0,0 +1,35 @@
+
+t = db.remove7
+t.drop();
+
+
+
+function getTags( n ){
+ n = n || 5;
+ var a = [];
+ for ( var i=0; i<n; i++ ){
+ var v = Math.ceil( 20 * Math.random() );
+ a.push( v );
+ }
+
+ return a;
+}
+
+for ( i=0; i<1000; i++ ){
+ t.save( { tags : getTags() } );
+}
+
+t.ensureIndex( { tags : 1 } );
+
+for ( i=0; i<200; i++ ){
+ for ( var j=0; j<10; j++ )
+ t.save( { tags : getTags( 100 ) } );
+ var q = { tags : { $in : getTags( 10 ) } };
+ var before = t.find( q ).count();
+ t.remove( q );
+ var o = db.getLastErrorObj();
+ var after = t.find( q ).count();
+ assert.eq( 0 , after , "not zero after!" );
+ assert.isnull( o.err , "error: " + tojson( o ) );
+}
+
diff --git a/jstests/remove8.js b/jstests/remove8.js
new file mode 100644
index 0000000..3ab53f3
--- /dev/null
+++ b/jstests/remove8.js
@@ -0,0 +1,21 @@
+
+t = db.remove8;
+t.drop();
+
+N = 1000;
+
+function fill(){
+ for ( var i=0; i<N; i++ ){
+ t.save( { x : i } );
+ }
+}
+
+fill();
+assert.eq( N , t.count() , "A" );
+t.remove( {} )
+assert.eq( 0 , t.count() , "B" );
+
+fill();
+assert.eq( N , t.count() , "C" );
+db.eval( function(){ db.remove8.remove( {} ); } )
+assert.eq( 0 , t.count() , "D" );
diff --git a/jstests/rename.js b/jstests/rename.js
new file mode 100644
index 0000000..3ace968
--- /dev/null
+++ b/jstests/rename.js
@@ -0,0 +1,48 @@
+admin = db.getMongo().getDB( "admin" );
+
+a = db.jstests_rename_a;
+b = db.jstests_rename_b;
+c = db.jstests_rename_c;
+
+a.drop();
+b.drop();
+c.drop();
+
+a.save( {a: 1} );
+a.save( {a: 2} );
+a.ensureIndex( {a:1} );
+a.ensureIndex( {b:1} );
+
+c.save( {a: 100} );
+assert.commandFailed( admin.runCommand( {renameCollection:"test.jstests_rename_a", to:"test.jstests_rename_c"} ) );
+
+assert.commandWorked( admin.runCommand( {renameCollection:"test.jstests_rename_a", to:"test.jstests_rename_b"} ) );
+assert.eq( 0, a.find().count() );
+
+assert.eq( 2, b.find().count() );
+assert( db.system.namespaces.findOne( {name:"test.jstests_rename_b" } ) );
+assert( !db.system.namespaces.findOne( {name:"test.jstests_rename_a" } ) );
+assert.eq( 3, db.system.indexes.find( {ns:"test.jstests_rename_b"} ).count() );
+assert( b.find( {a:1} ).explain().cursor.match( /^BtreeCursor/ ) );
+
+// now try renaming a capped collection
+
+a.drop();
+b.drop();
+c.drop();
+
+db.createCollection( "jstests_rename_a", {capped:true,size:100} );
+for( i = 0; i < 10; ++i ) {
+ a.save( { i: i } );
+}
+assert.commandWorked( admin.runCommand( {renameCollection:"test.jstests_rename_a", to:"test.jstests_rename_b"} ) );
+assert.eq( 1, b.count( {i:9} ) );
+for( i = 10; i < 20; ++i ) {
+ b.save( { i: i } );
+}
+assert.eq( 0, b.count( {i:9} ) );
+assert.eq( 1, b.count( {i:19} ) );
+
+assert( db.system.namespaces.findOne( {name:"test.jstests_rename_b" } ) );
+assert( !db.system.namespaces.findOne( {name:"test.jstests_rename_a" } ) );
+assert.eq( true, db.system.namespaces.findOne( {name:"test.jstests_rename_b"} ).options.capped );
diff --git a/jstests/rename2.js b/jstests/rename2.js
new file mode 100644
index 0000000..a06268f
--- /dev/null
+++ b/jstests/rename2.js
@@ -0,0 +1,19 @@
+
+
+a = db.rename2a;
+b = db.rename2b;
+
+a.drop();
+b.drop();
+
+a.save( { x : 1 } )
+a.save( { x : 2 } )
+a.save( { x : 3 } )
+
+assert.eq( 3 , a.count() , "A" )
+assert.eq( 0 , b.count() , "B" )
+
+assert( a.renameCollection( "rename2b" ) , "the command" );
+
+assert.eq( 0 , a.count() , "C" )
+assert.eq( 3 , b.count() , "D" )
diff --git a/jstests/rename3.js b/jstests/rename3.js
new file mode 100644
index 0000000..5e1005f
--- /dev/null
+++ b/jstests/rename3.js
@@ -0,0 +1,25 @@
+
+
+a = db.rename3a
+b = db.rename3b
+
+a.drop();
+b.drop()
+
+a.save( { x : 1 } );
+b.save( { x : 2 } );
+
+assert.eq( 1 , a.findOne().x , "before 1a" );
+assert.eq( 2 , b.findOne().x , "before 2a" );
+
+res = b.renameCollection( a._shortName );
+assert.eq( 0 , res.ok , "should fail: " + tojson( res ) );
+
+assert.eq( 1 , a.findOne().x , "before 1b" );
+assert.eq( 2 , b.findOne().x , "before 2b" );
+
+res = b.renameCollection( a._shortName , true )
+assert.eq( 1 , res.ok , "should succeed:" + tojson( res ) );
+
+assert.eq( 2 , a.findOne().x , "after 1" );
+assert.isnull( b.findOne() , "after 2" );
diff --git a/jstests/repair.js b/jstests/repair.js
new file mode 100644
index 0000000..5548c2b
--- /dev/null
+++ b/jstests/repair.js
@@ -0,0 +1,6 @@
+t = db.jstests_repair;
+t.drop();
+t.save( { i:1 } );
+db.repairDatabase();
+v = t.validate();
+assert( v.valid , "not valid! " + tojson( v ) );
diff --git a/jstests/repl/basic1.js b/jstests/repl/basic1.js
new file mode 100644
index 0000000..9668a91
--- /dev/null
+++ b/jstests/repl/basic1.js
@@ -0,0 +1,59 @@
+
+// test repl basics
+// data on master/slave is the same
+
+var rt = new ReplTest( "basic1" );
+
+m = rt.start( true );
+s = rt.start( false );
+
+function hash( db ){
+ var s = "";
+ var a = db.getCollectionNames();
+ a = a.sort();
+ a.forEach(
+ function(cn){
+ var c = db.getCollection( cn );
+ s += cn + "\t" + c.find().count() + "\n";
+ c.find().sort( { _id : 1 } ).forEach(
+ function(o){
+ s += tojson( o , "" , true ) + "\n";
+ }
+ );
+ }
+ );
+ return s;
+}
+
+am = m.getDB( "foo" );
+as = s.getDB( "foo" );
+
+function check( note ){
+ var start = new Date();
+ var x,y;
+ while ( (new Date()).getTime() - start.getTime() < 30000 ){
+ x = hash( am );
+ y = hash( as );
+ if ( x == y )
+ return;
+ sleep( 200 );
+ }
+ assert.eq( x , y , note );
+}
+
+am.a.save( { x : 1 } );
+check( "A" );
+
+am.a.save( { x : 5 } );
+
+am.a.update( {} , { $inc : { x : 1 } } );
+check( "B" );
+
+am.a.update( {} , { $inc : { x : 1 } } , false , true );
+check( "C" );
+
+rt.stop();
+
+
+
+
diff --git a/jstests/repl/pair1.js b/jstests/repl/pair1.js
new file mode 100644
index 0000000..7004048
--- /dev/null
+++ b/jstests/repl/pair1.js
@@ -0,0 +1,99 @@
+// Basic pairing test
+
+var baseName = "jstests_pair1test";
+
+debug = function( p ) {
+// print( p );
+}
+
+ismaster = function( n ) {
+ var im = n.getDB( "admin" ).runCommand( { "ismaster" : 1 } );
+// print( "ismaster: " + tojson( im ) );
+ assert( im, "command ismaster failed" );
+ return im.ismaster;
+}
+
+var writeOneIdx = 0;
+
+writeOne = function( n ) {
+ n.getDB( baseName ).z.save( { _id: new ObjectId(), i: ++writeOneIdx } );
+}
+
+getCount = function( n ) {
+ return n.getDB( baseName ).z.find( { i: writeOneIdx } ).toArray().length;
+}
+
+checkWrite = function( m, s ) {
+ writeOne( m );
+ assert.eq( 1, getCount( m ) );
+ check( s );
+}
+
+check = function( s ) {
+ s.setSlaveOk();
+ assert.soon( function() {
+ return 1 == getCount( s );
+ } );
+}
+
+// check that slave reads and writes are guarded
+checkSlaveGuard = function( s ) {
+ var t = s.getDB( baseName + "-temp" ).temp;
+ assert.throws( t.find().count, {}, "not master" );
+ assert.throws( t.find(), {}, "not master", "find did not assert" );
+
+ checkError = function() {
+ assert.eq( "not master", s.getDB( "admin" ).getLastError() );
+ s.getDB( "admin" ).resetError();
+ }
+ s.getDB( "admin" ).resetError();
+ t.save( {x:1} );
+ checkError();
+ t.update( {}, {x:2}, true );
+ checkError();
+ t.remove( {x:0} );
+ checkError();
+}
+
+doTest = function( signal ) {
+
+ ports = allocatePorts( 3 );
+
+ a = new MongodRunner( ports[ 0 ], "/data/db/" + baseName + "-arbiter" );
+ l = new MongodRunner( ports[ 1 ], "/data/db/" + baseName + "-left", "127.0.0.1:" + ports[ 2 ], "127.0.0.1:" + ports[ 0 ] );
+ r = new MongodRunner( ports[ 2 ], "/data/db/" + baseName + "-right", "127.0.0.1:" + ports[ 1 ], "127.0.0.1:" + ports[ 0 ] );
+
+ rp = new ReplPair( l, r, a );
+ rp.start();
+ rp.waitForSteadyState();
+
+ checkSlaveGuard( rp.slave() );
+
+ checkWrite( rp.master(), rp.slave() );
+
+ debug( "kill first" );
+ rp.killNode( rp.master(), signal );
+ rp.waitForSteadyState( [ 1, null ], rp.slave().host );
+ writeOne( rp.master() );
+
+ debug( "restart first" );
+ rp.start( true );
+ rp.waitForSteadyState();
+ check( rp.slave() );
+ checkWrite( rp.master(), rp.slave() );
+
+ debug( "kill second" );
+ rp.killNode( rp.master(), signal );
+ rp.waitForSteadyState( [ 1, null ], rp.slave().host );
+
+ debug( "restart second" );
+ rp.start( true );
+ rp.waitForSteadyState( [ 1, 0 ], rp.master().host );
+ checkWrite( rp.master(), rp.slave() );
+
+ ports.forEach( function( x ) { stopMongod( x ); } );
+
+}
+
+doTest( 15 ); // SIGTERM
+doTest( 9 ); // SIGKILL
diff --git a/jstests/repl/pair2.js b/jstests/repl/pair2.js
new file mode 100644
index 0000000..2491fb2
--- /dev/null
+++ b/jstests/repl/pair2.js
@@ -0,0 +1,71 @@
+// Pairing resync
+
+var baseName = "jstests_pair2test";
+
+ismaster = function( n ) {
+ im = n.getDB( "admin" ).runCommand( { "ismaster" : 1 } );
+ assert( im );
+ return im.ismaster;
+}
+
+soonCount = function( m, count ) {
+ assert.soon( function() {
+// print( "counting" );
+//// print( "counted: " + l.getDB( baseName ).z.find().count() );
+ return m.getDB( baseName ).z.find().count() == count;
+ } );
+}
+
+doTest = function( signal ) {
+
+ ports = allocatePorts( 3 );
+
+ a = new MongodRunner( ports[ 0 ], "/data/db/" + baseName + "-arbiter" );
+ l = new MongodRunner( ports[ 1 ], "/data/db/" + baseName + "-left", "127.0.0.1:" + ports[ 2 ], "127.0.0.1:" + ports[ 0 ] );
+ r = new MongodRunner( ports[ 2 ], "/data/db/" + baseName + "-right", "127.0.0.1:" + ports[ 1 ], "127.0.0.1:" + ports[ 0 ] );
+
+ rp = new ReplPair( l, r, a );
+ rp.start();
+ rp.waitForSteadyState();
+
+ rp.slave().setSlaveOk();
+ mz = rp.master().getDB( baseName ).z;
+
+ mz.save( { _id: new ObjectId() } );
+ soonCount( rp.slave(), 1 );
+ assert.eq( 0, rp.slave().getDB( "admin" ).runCommand( { "resync" : 1 } ).ok );
+
+ sleep( 3000 ); // allow time to finish clone and save ReplSource
+ rp.killNode( rp.slave(), signal );
+ rp.waitForSteadyState( [ 1, null ], rp.master().host );
+
+ big = new Array( 2000 ).toString();
+ for( i = 0; i < 1000; ++i )
+ mz.save( { _id: new ObjectId(), i: i, b: big } );
+
+ rp.start( true );
+ rp.waitForSteadyState( [ 1, 0 ], rp.master().host );
+
+ sleep( 15000 );
+
+ rp.slave().setSlaveOk();
+ assert.soon( function() {
+ ret = rp.slave().getDB( "admin" ).runCommand( { "resync" : 1 } );
+// printjson( ret );
+ return 1 == ret.ok;
+ } );
+
+ sleep( 8000 );
+ soonCount( rp.slave(), 1001 );
+ sz = rp.slave().getDB( baseName ).z
+ assert.eq( 1, sz.find( { i: 0 } ).count() );
+ assert.eq( 1, sz.find( { i: 999 } ).count() );
+
+ assert.eq( 0, rp.slave().getDB( "admin" ).runCommand( { "resync" : 1 } ).ok );
+
+ ports.forEach( function( x ) { stopMongod( x ); } );
+
+}
+
+doTest( 15 ); // SIGTERM
+doTest( 9 ); // SIGKILL
diff --git a/jstests/repl/pair3.js b/jstests/repl/pair3.js
new file mode 100644
index 0000000..506e173
--- /dev/null
+++ b/jstests/repl/pair3.js
@@ -0,0 +1,235 @@
+// test arbitration
+
+var baseName = "jstests_pair3test";
+
+ismaster = function( n ) {
+ var im = n.getDB( "admin" ).runCommand( { "ismaster" : 1 } );
+ print( "ismaster: " + tojson( im ) );
+ assert( im, "command ismaster failed" );
+ return im.ismaster;
+}
+
+// bring up node connections before arbiter connections so that arb can forward to node when expected
+connect = function() {
+ if ( lp == null ) {
+ lp = startMongoProgram( "mongobridge", "--port", lpPort, "--dest", "localhost:" + lPort );
+ }
+ if ( rp == null ) {
+ rp = startMongoProgram( "mongobridge", "--port", rpPort, "--dest", "localhost:" + rPort );
+ }
+ if ( al == null ) {
+ al = startMongoProgram( "mongobridge", "--port", alPort, "--dest", "localhost:" + aPort );
+ }
+ if ( ar == null ) {
+ ar = startMongoProgram( "mongobridge", "--port", arPort, "--dest", "localhost:" + aPort );
+ }
+}
+
+disconnectNode = function( mongo ) {
+ if ( lp ) {
+ stopMongoProgram( lpPort );
+ lp = null;
+ }
+ if ( rp ) {
+ stopMongoProgram( rpPort );
+ rp = null;
+ }
+ if ( mongo.host.match( new RegExp( "^127.0.0.1:" + lPort + "$" ) ) ) {
+ stopMongoProgram( alPort );
+ al = null;
+ } else if ( mongo.host.match( new RegExp( "^127.0.0.1:" + rPort + "$" ) ) ) {
+ stopMongoProgram( arPort );
+ ar = null;
+ } else {
+ assert( false, "don't know how to disconnect node: " + mongo );
+ }
+}
+
+doTest1 = function() {
+ al = ar = lp = rp = null;
+ ports = allocatePorts( 7 );
+ aPort = ports[ 0 ];
+ alPort = ports[ 1 ];
+ arPort = ports[ 2 ];
+ lPort = ports[ 3 ];
+ lpPort = ports[ 4 ];
+ rPort = ports[ 5 ];
+ rpPort = ports[ 6 ];
+
+ connect();
+
+ a = new MongodRunner( aPort, "/data/db/" + baseName + "-arbiter" );
+ l = new MongodRunner( lPort, "/data/db/" + baseName + "-left", "127.0.0.1:" + rpPort, "127.0.0.1:" + alPort );
+ r = new MongodRunner( rPort, "/data/db/" + baseName + "-right", "127.0.0.1:" + lpPort, "127.0.0.1:" + arPort );
+
+ pair = new ReplPair( l, r, a );
+
+ // normal startup
+ pair.start();
+ pair.waitForSteadyState();
+
+ // disconnect slave
+ disconnectNode( pair.slave() );
+ pair.waitForSteadyState( [ 1, -3 ], pair.master().host );
+
+ // disconnect master
+ disconnectNode( pair.master() );
+ pair.waitForSteadyState( [ -3, -3 ] );
+
+ // reconnect
+ connect();
+ pair.waitForSteadyState();
+
+ // disconnect master
+ disconnectNode( pair.master() );
+ pair.waitForSteadyState( [ 1, -3 ], pair.slave().host, true );
+
+ // disconnect new master
+ disconnectNode( pair.master() );
+ pair.waitForSteadyState( [ -3, -3 ] );
+
+ // reconnect
+ connect();
+ pair.waitForSteadyState();
+
+ // disconnect slave
+ disconnectNode( pair.slave() );
+ pair.waitForSteadyState( [ 1, -3 ], pair.master().host );
+
+ // reconnect slave
+ connect();
+ pair.waitForSteadyState( [ 1, 0 ], pair.master().host );
+
+ // disconnect master
+ disconnectNode( pair.master() );
+ pair.waitForSteadyState( [ 1, -3 ], pair.slave().host, true );
+
+ // reconnect old master
+ connect();
+ pair.waitForSteadyState( [ 1, 0 ], pair.master().host );
+
+ ports.forEach( function( x ) { stopMongoProgram( x ); } );
+}
+
+// this time don't start connected
+doTest2 = function() {
+ al = ar = lp = rp = null;
+ ports = allocatePorts( 7 );
+ aPort = ports[ 0 ];
+ alPort = ports[ 1 ];
+ arPort = ports[ 2 ];
+ lPort = ports[ 3 ];
+ lpPort = ports[ 4 ];
+ rPort = ports[ 5 ];
+ rpPort = ports[ 6 ];
+
+ a = new MongodRunner( aPort, "/data/db/" + baseName + "-arbiter" );
+ l = new MongodRunner( lPort, "/data/db/" + baseName + "-left", "127.0.0.1:" + rpPort, "127.0.0.1:" + alPort );
+ r = new MongodRunner( rPort, "/data/db/" + baseName + "-right", "127.0.0.1:" + lpPort, "127.0.0.1:" + arPort );
+
+ pair = new ReplPair( l, r, a );
+ pair.start();
+ pair.waitForSteadyState( [ -3, -3 ] );
+
+ startMongoProgram( "mongobridge", "--port", arPort, "--dest", "localhost:" + aPort );
+
+ // there hasn't been an initial sync, no no node will become master
+
+ for( i = 0; i < 10; ++i ) {
+ assert( pair.isMaster( pair.right() ) == -3 && pair.isMaster( pair.left() ) == -3 );
+ sleep( 500 );
+ }
+
+ stopMongoProgram( arPort );
+
+ startMongoProgram( "mongobridge", "--port", alPort, "--dest", "localhost:" + aPort );
+
+ for( i = 0; i < 10; ++i ) {
+ assert( pair.isMaster( pair.right() ) == -3 && pair.isMaster( pair.left() ) == -3 );
+ sleep( 500 );
+ }
+
+ stopMongoProgram( alPort );
+
+ // connect l and r without a
+
+ startMongoProgram( "mongobridge", "--port", lpPort, "--dest", "localhost:" + lPort );
+ startMongoProgram( "mongobridge", "--port", rpPort, "--dest", "localhost:" + rPort );
+
+ pair.waitForSteadyState( [ 1, 0 ] );
+
+ ports.forEach( function( x ) { stopMongoProgram( x ); } );
+}
+
+// recover from master - master setup
+doTest3 = function() {
+ al = ar = lp = rp = null;
+ ports = allocatePorts( 7 );
+ aPort = ports[ 0 ];
+ alPort = ports[ 1 ];
+ arPort = ports[ 2 ];
+ lPort = ports[ 3 ];
+ lpPort = ports[ 4 ];
+ rPort = ports[ 5 ];
+ rpPort = ports[ 6 ];
+
+ connect();
+
+ a = new MongodRunner( aPort, "/data/db/" + baseName + "-arbiter" );
+ l = new MongodRunner( lPort, "/data/db/" + baseName + "-left", "127.0.0.1:" + rpPort, "127.0.0.1:" + alPort );
+ r = new MongodRunner( rPort, "/data/db/" + baseName + "-right", "127.0.0.1:" + lpPort, "127.0.0.1:" + arPort );
+
+ pair = new ReplPair( l, r, a );
+ pair.start();
+ pair.waitForSteadyState();
+
+ // now can only talk to arbiter
+ stopMongoProgram( lpPort );
+ stopMongoProgram( rpPort );
+ pair.waitForSteadyState( [ 1, 1 ], null, true );
+
+ // recover
+ startMongoProgram( "mongobridge", "--port", lpPort, "--dest", "localhost:" + lPort );
+ startMongoProgram( "mongobridge", "--port", rpPort, "--dest", "localhost:" + rPort );
+ pair.waitForSteadyState( [ 1, 0 ], null, true );
+
+ ports.forEach( function( x ) { stopMongoProgram( x ); } );
+}
+
+// check that initial sync is persistent
+doTest4 = function( signal ) {
+ al = ar = lp = rp = null;
+ ports = allocatePorts( 7 );
+ aPort = ports[ 0 ];
+ alPort = ports[ 1 ];
+ arPort = ports[ 2 ];
+ lPort = ports[ 3 ];
+ lpPort = ports[ 4 ];
+ rPort = ports[ 5 ];
+ rpPort = ports[ 6 ];
+
+ connect();
+
+ a = new MongodRunner( aPort, "/data/db/" + baseName + "-arbiter" );
+ l = new MongodRunner( lPort, "/data/db/" + baseName + "-left", "127.0.0.1:" + rpPort, "127.0.0.1:" + alPort );
+ r = new MongodRunner( rPort, "/data/db/" + baseName + "-right", "127.0.0.1:" + lpPort, "127.0.0.1:" + arPort );
+
+ pair = new ReplPair( l, r, a );
+ pair.start();
+ pair.waitForSteadyState();
+
+ pair.killNode( pair.left(), signal );
+ pair.killNode( pair.right(), signal );
+ stopMongoProgram( rpPort );
+ stopMongoProgram( lpPort );
+
+ // now can only talk to arbiter
+ pair.start( true );
+ pair.waitForSteadyState( [ 1, 1 ], null, true );
+}
+
+doTest1();
+doTest2();
+doTest3();
+doTest4( 15 );
+doTest4( 9 );
diff --git a/jstests/repl/pair4.js b/jstests/repl/pair4.js
new file mode 100644
index 0000000..5a59c16
--- /dev/null
+++ b/jstests/repl/pair4.js
@@ -0,0 +1,159 @@
+// data consistency after master-master
+
+var baseName = "jstests_pair4test";
+
+debug = function( o ) {
+ printjson( o );
+}
+
+ismaster = function( n ) {
+ var im = n.getDB( "admin" ).runCommand( { "ismaster" : 1 } );
+ print( "ismaster: " + tojson( im ) );
+ assert( im, "command ismaster failed" );
+ return im.ismaster;
+}
+
+connect = function() {
+ startMongoProgram( "mongobridge", "--port", lpPort, "--dest", "localhost:" + lPort );
+ startMongoProgram( "mongobridge", "--port", rpPort, "--dest", "localhost:" + rPort );
+}
+
+disconnect = function() {
+ stopMongoProgram( lpPort );
+ stopMongoProgram( rpPort );
+}
+
+write = function( m, n, id ) {
+ if ( id ) {
+ save = { _id:id, n:n };
+ } else {
+ save = { n:n };
+ }
+ m.getDB( baseName ).getCollection( baseName ).save( save );
+}
+
+check = function( m, n, id ) {
+ m.setSlaveOk();
+ if ( id ) {
+ find = { _id:id, n:n };
+ } else {
+ find = { n:n };
+ }
+ assert.soon( function() { return m.getDB( baseName ).getCollection( baseName ).find( find ).count() > 0; },
+ "failed waiting for " + m + " value of n to be " + n );
+}
+
+checkCount = function( m, c ) {
+ m.setSlaveOk();
+ assert.soon( function() {
+ actual = m.getDB( baseName ).getCollection( baseName ).find().count();
+ print( actual );
+ return c == actual; },
+ "count failed for " + m );
+}
+
+coll = function( m ) {
+ return m.getDB( baseName ).getCollection( baseName );
+}
+
+db2Coll = function( m ) {
+ return m.getDB( baseName + "_second" ).getCollection( baseName );
+}
+
+doTest = function( recover, newMaster, newSlave ) {
+ ports = allocatePorts( 5 );
+ aPort = ports[ 0 ];
+ lPort = ports[ 1 ];
+ lpPort = ports[ 2 ];
+ rPort = ports[ 3 ];
+ rpPort = ports[ 4 ];
+
+ // start normally
+ connect();
+ a = new MongodRunner( aPort, "/data/db/" + baseName + "-arbiter" );
+ l = new MongodRunner( lPort, "/data/db/" + baseName + "-left", "127.0.0.1:" + rpPort, "127.0.0.1:" + aPort );
+ r = new MongodRunner( rPort, "/data/db/" + baseName + "-right", "127.0.0.1:" + lpPort, "127.0.0.1:" + aPort );
+ pair = new ReplPair( l, r, a );
+ pair.start();
+ pair.waitForSteadyState();
+
+ firstMaster = pair.master();
+ firstSlave = pair.slave();
+
+ write( pair.master(), 0 );
+ write( pair.master(), 1 );
+ check( pair.slave(), 0 );
+ check( pair.slave(), 1 );
+
+ // now each can only talk to arbiter
+ disconnect();
+ pair.waitForSteadyState( [ 1, 1 ], null, true );
+
+ m = newMaster();
+ write( m, 10 );
+ write( m, 100, "a" );
+ coll( m ).update( {n:1}, {$set:{n:2}} );
+ db2Coll( m ).save( {n:500} );
+ db2Coll( m ).findOne();
+
+ s = newSlave();
+ write( s, 20 );
+ write( s, 200, "a" );
+ coll( s ).update( {n:1}, {n:1,m:3} );
+ db2Coll( s ).save( {_id:"a",n:600} );
+ db2Coll( s ).findOne();
+
+ // recover
+ recover();
+
+ nodes = [ pair.right(), pair.left() ];
+
+ nodes.forEach( function( x ) { checkCount( x, 5 ); } );
+ nodes.forEach( function( x ) { [ 0, 10, 20, 100 ].forEach( function( y ) { check( x, y ); } ); } );
+
+ checkM = function( c ) {
+ assert.soon( function() {
+ obj = coll( c ).findOne( {n:2} );
+ printjson( obj );
+ return obj.m == undefined;
+ }, "n:2 test for " + c + " failed" );
+ };
+ nodes.forEach( function( x ) { checkM( x ); } );
+
+ // check separate database
+ nodes.forEach( function( x ) { assert.soon( function() {
+ r = db2Coll( x ).findOne( {_id:"a"} );
+ debug( r );
+ if ( r == null ) {
+ return false;
+ }
+ return 600 == r.n;
+ } ) } );
+
+ ports.forEach( function( x ) { stopMongoProgram( x ); } );
+
+}
+
+debug( "basic test" );
+doTest( function() {
+ connect();
+ pair.waitForSteadyState( [ 1, 0 ], pair.right().host, true );
+ }, function() { return pair.right(); }, function() { return pair.left(); } );
+
+doRestartTest = function( signal ) {
+ doTest( function() {
+ if ( signal == 9 ) {
+ sleep( 3000 );
+ }
+ pair.killNode( firstMaster, signal );
+ connect();
+ pair.start( true );
+ pair.waitForSteadyState( [ 1, 0 ], firstSlave.host, true );
+ }, function() { return firstSlave; }, function() { return firstMaster; } );
+}
+
+debug( "sigterm restart test" );
+doRestartTest( 15 ) // SIGTERM
+
+debug( "sigkill restart test" );
+doRestartTest( 9 ) // SIGKILL
diff --git a/jstests/repl/pair5.js b/jstests/repl/pair5.js
new file mode 100644
index 0000000..ed8c72d
--- /dev/null
+++ b/jstests/repl/pair5.js
@@ -0,0 +1,95 @@
+// writes to new master while making master-master logs consistent
+
+var baseName = "jstests_pair5test";
+
+debug = function( p ) {
+ print( p );
+}
+
+ismaster = function( n ) {
+ var im = n.getDB( "admin" ).runCommand( { "ismaster" : 1 } );
+ print( "ismaster: " + tojson( im ) );
+ assert( im, "command ismaster failed" );
+ return im.ismaster;
+}
+
+connect = function() {
+ startMongoProgram( "mongobridge", "--port", lpPort, "--dest", "localhost:" + lPort );
+ startMongoProgram( "mongobridge", "--port", rpPort, "--dest", "localhost:" + rPort );
+}
+
+disconnect = function() {
+ stopMongoProgram( lpPort );
+ stopMongoProgram( rpPort );
+}
+
+write = function( m, n, id ) {
+ if ( id ) {
+ save = { _id:id, n:n };
+ } else {
+ save = { n:n };
+ }
+ m.getDB( baseName ).getCollection( baseName ).save( save );
+}
+
+checkCount = function( m, c ) {
+ m.setSlaveOk();
+ assert.soon( function() {
+ actual = m.getDB( baseName ).getCollection( baseName ).find().count();
+ print( actual );
+ return c == actual; },
+ "count failed for " + m );
+}
+
+doTest = function( nSlave, opIdMem ) {
+ ports = allocatePorts( 5 );
+ aPort = ports[ 0 ];
+ lPort = ports[ 1 ];
+ lpPort = ports[ 2 ];
+ rPort = ports[ 3 ];
+ rpPort = ports[ 4 ];
+
+ // start normally
+ connect();
+ a = new MongodRunner( aPort, "/data/db/" + baseName + "-arbiter" );
+ l = new MongodRunner( lPort, "/data/db/" + baseName + "-left", "127.0.0.1:" + rpPort, "127.0.0.1:" + aPort );
+ r = new MongodRunner( rPort, "/data/db/" + baseName + "-right", "127.0.0.1:" + lpPort, "127.0.0.1:" + aPort );
+ pair = new ReplPair( l, r, a );
+ pair.start();
+ pair.waitForSteadyState();
+
+ // now each can only talk to arbiter
+ disconnect();
+ pair.waitForSteadyState( [ 1, 1 ], null, true );
+
+ // left will become slave
+ for( i = 0; i < nSlave; ++i ) {
+ write( pair.left(), i, i );
+ }
+ pair.left().getDB( baseName ).getCollection( baseName ).findOne();
+
+ for( i = 10000; i < 15000; ++i ) {
+ write( pair.right(), i, i );
+ }
+ pair.right().getDB( baseName ).getCollection( baseName ).findOne();
+
+ connect();
+ pair.waitForSteadyState( [ 1, 0 ], pair.right().host, true );
+
+ pair.master().getDB( baseName ).getCollection( baseName ).update( {_id:nSlave - 1}, {_id:nSlave - 1,n:-1}, true );
+ assert.eq( -1, pair.master().getDB( baseName ).getCollection( baseName ).findOne( {_id:nSlave - 1} ).n );
+ checkCount( pair.master(), 5000 + nSlave );
+ assert.eq( -1, pair.master().getDB( baseName ).getCollection( baseName ).findOne( {_id:nSlave - 1} ).n );
+ pair.slave().setSlaveOk();
+ assert.soon( function() {
+ n = pair.slave().getDB( baseName ).getCollection( baseName ).findOne( {_id:nSlave - 1} ).n;
+ print( n );
+ return -1 == n;
+ } );
+
+ ports.forEach( function( x ) { stopMongoProgram( x ); } );
+
+}
+
+doTest( 5000, 100000000 );
+doTest( 5000, 100 ); // force op id converstion to collection based storage
diff --git a/jstests/repl/pair6.js b/jstests/repl/pair6.js
new file mode 100644
index 0000000..b249fc0
--- /dev/null
+++ b/jstests/repl/pair6.js
@@ -0,0 +1,115 @@
+// pairing cases where oplogs run out of space
+
+var baseName = "jstests_pair6test";
+
+debug = function( p ) {
+ print( p );
+}
+
+ismaster = function( n ) {
+ var im = n.getDB( "admin" ).runCommand( { "ismaster" : 1 } );
+ print( "ismaster: " + tojson( im ) );
+ assert( im, "command ismaster failed" );
+ return im.ismaster;
+}
+
+connect = function() {
+ startMongoProgram( "mongobridge", "--port", lpPort, "--dest", "localhost:" + lPort );
+ startMongoProgram( "mongobridge", "--port", rpPort, "--dest", "localhost:" + rPort );
+}
+
+disconnect = function() {
+ stopMongoProgram( lpPort );
+ stopMongoProgram( rpPort );
+}
+
+checkCount = function( m, c ) {
+ m.setSlaveOk();
+ assert.soon( function() {
+ actual = m.getDB( baseName ).getCollection( baseName ).find().count();
+ print( actual );
+ return c == actual; },
+ "expected count " + c + " for " + m );
+}
+
+resetSlave = function( s ) {
+ s.setSlaveOk();
+ assert.soon( function() {
+ ret = s.getDB( "admin" ).runCommand( { "resync" : 1 } );
+ // printjson( ret );
+ return 1 == ret.ok;
+ } );
+}
+
+big = new Array( 2000 ).toString();
+
+doTest = function() {
+ ports = allocatePorts( 5 );
+ aPort = ports[ 0 ];
+ lPort = ports[ 1 ];
+ lpPort = ports[ 2 ];
+ rPort = ports[ 3 ];
+ rpPort = ports[ 4 ];
+
+ // start normally
+ connect();
+ a = new MongodRunner( aPort, "/data/db/" + baseName + "-arbiter" );
+ l = new MongodRunner( lPort, "/data/db/" + baseName + "-left", "127.0.0.1:" + rpPort, "127.0.0.1:" + aPort );
+ r = new MongodRunner( rPort, "/data/db/" + baseName + "-right", "127.0.0.1:" + lpPort, "127.0.0.1:" + aPort );
+ pair = new ReplPair( l, r, a );
+ pair.start();
+ pair.waitForSteadyState();
+
+ disconnect();
+ pair.waitForSteadyState( [ 1, 1 ], null, true );
+
+ print( "test one" );
+
+ // fill new slave oplog
+ for( i = 0; i < 1000; ++i ) {
+ pair.left().getDB( baseName ).getCollection( baseName ).save( {b:big} );
+ }
+ pair.left().getDB( baseName ).getCollection( baseName ).findOne();
+
+ // write single to new master
+ pair.right().getDB( baseName ).getCollection( baseName ).save( {} );
+
+ connect();
+ pair.waitForSteadyState( [ 1, 0 ], pair.right().host, true );
+
+ resetSlave( pair.left() );
+
+ checkCount( pair.left(), 1 );
+ checkCount( pair.right(), 1 );
+
+ pair.right().getDB( baseName ).getCollection( baseName ).remove( {} );
+ checkCount( pair.left(), 0 );
+
+ disconnect();
+ pair.waitForSteadyState( [ 1, 1 ], null, true );
+
+ print( "test two" );
+
+ // fill new master oplog
+ for( i = 0; i < 1000; ++i ) {
+ pair.right().getDB( baseName ).getCollection( baseName ).save( {b:big} );
+ }
+
+ pair.left().getDB( baseName ).getCollection( baseName ).save( {_id:"abcde"} );
+
+ connect();
+ pair.waitForSteadyState( [ 1, 0 ], pair.right().host, true );
+
+ sleep( 15000 );
+
+ resetSlave( pair.left() );
+
+ checkCount( pair.left(), 1000 );
+ checkCount( pair.right(), 1000 );
+ assert.eq( 0, pair.left().getDB( baseName ).getCollection( baseName ).find( {_id:"abcde"} ).count() );
+
+ ports.forEach( function( x ) { stopMongoProgram( x ); } );
+
+}
+
+doTest(); \ No newline at end of file
diff --git a/jstests/repl/repl1.js b/jstests/repl/repl1.js
new file mode 100644
index 0000000..60f3942
--- /dev/null
+++ b/jstests/repl/repl1.js
@@ -0,0 +1,55 @@
+// Test basic replication functionality
+
+var baseName = "jstests_repl1test";
+
+soonCount = function( count ) {
+ assert.soon( function() {
+// print( "check count" );
+// print( "count: " + s.getDB( baseName ).z.find().count() );
+ return s.getDB( baseName ).a.find().count() == count;
+ } );
+}
+
+doTest = function( signal ) {
+
+ rt = new ReplTest( "repl1tests" );
+
+ m = rt.start( true );
+ s = rt.start( false );
+
+ am = m.getDB( baseName ).a
+
+ for( i = 0; i < 1000; ++i )
+ am.save( { _id: new ObjectId(), i: i } );
+
+ soonCount( 1000 );
+ as = s.getDB( baseName ).a
+ assert.eq( 1, as.find( { i: 0 } ).count() );
+ assert.eq( 1, as.find( { i: 999 } ).count() );
+
+ rt.stop( false, signal );
+
+ for( i = 1000; i < 1010; ++i )
+ am.save( { _id: new ObjectId(), i: i } );
+
+ s = rt.start( false, null, true );
+ soonCount( 1010 );
+ as = s.getDB( baseName ).a
+ assert.eq( 1, as.find( { i: 1009 } ).count() );
+
+ rt.stop( true, signal );
+
+ m = rt.start( true, null, true );
+ am = m.getDB( baseName ).a
+
+ for( i = 1010; i < 1020; ++i )
+ am.save( { _id: new ObjectId(), i: i } );
+
+ assert.soon( function() { return as.find().count() == 1020; } );
+ assert.eq( 1, as.find( { i: 1019 } ).count() );
+
+ rt.stop();
+}
+
+doTest( 15 ); // SIGTERM
+doTest( 9 ); // SIGKILL
diff --git a/jstests/repl/repl2.js b/jstests/repl/repl2.js
new file mode 100644
index 0000000..c9fe6b9
--- /dev/null
+++ b/jstests/repl/repl2.js
@@ -0,0 +1,45 @@
+// Test resync command
+
+soonCount = function( count ) {
+ assert.soon( function() {
+// print( "check count" );
+// print( "count: " + s.getDB( baseName ).z.find().count() );
+ return s.getDB("foo").a.find().count() == count;
+ } );
+}
+
+doTest = function( signal ) {
+
+ var rt = new ReplTest( "repl2tests" );
+
+ // implicit small oplog makes slave get out of sync
+ m = rt.start( true );
+ s = rt.start( false );
+
+ am = m.getDB("foo").a
+
+ am.save( { _id: new ObjectId() } );
+ soonCount( 1 );
+ assert.eq( 0, s.getDB( "admin" ).runCommand( { "resync" : 1 } ).ok );
+ rt.stop( false , signal );
+
+ big = new Array( 2000 ).toString();
+ for( i = 0; i < 1000; ++i )
+ am.save( { _id: new ObjectId(), i: i, b: big } );
+
+ s = rt.start( false , null , true );
+ assert.soon( function() { return 1 == s.getDB( "admin" ).runCommand( { "resync" : 1 } ).ok; } );
+
+ soonCount( 1001 );
+ as = s.getDB("foo").a
+ assert.eq( 1, as.find( { i: 0 } ).count() );
+ assert.eq( 1, as.find( { i: 999 } ).count() );
+
+ assert.eq( 0, s.getDB( "admin" ).runCommand( { "resync" : 1 } ).ok );
+
+ rt.stop();
+
+}
+
+doTest( 15 ); // SIGTERM
+doTest( 9 ); // SIGKILL
diff --git a/jstests/repl/repl3.js b/jstests/repl/repl3.js
new file mode 100644
index 0000000..d3c3848
--- /dev/null
+++ b/jstests/repl/repl3.js
@@ -0,0 +1,47 @@
+// Test autoresync
+
+var baseName = "jstests_repl3test";
+
+soonCount = function( count ) {
+ assert.soon( function() {
+// print( "check count" );
+// print( "count: " + s.getDB( baseName ).z.find().count() + ", expected: " + count );
+ return s.getDB( baseName ).a.find().count() == count;
+ } );
+}
+
+doTest = function( signal ) {
+
+ rt = new ReplTest( "repl3tests" );
+
+ m = rt.start( true );
+ s = rt.start( false );
+
+ am = m.getDB( baseName ).a
+
+ am.save( { _id: new ObjectId() } );
+ soonCount( 1 );
+ rt.stop( false, signal );
+
+ big = new Array( 2000 ).toString();
+ for( i = 0; i < 1000; ++i )
+ am.save( { _id: new ObjectId(), i: i, b: big } );
+
+ s = rt.start( false, { autoresync: null }, true );
+
+ // after SyncException, mongod waits 10 secs.
+ sleep( 15000 );
+
+ // Need the 2 additional seconds timeout, since commands don't work on an 'allDead' node.
+ soonCount( 1001 );
+ as = s.getDB( baseName ).a
+ assert.eq( 1, as.find( { i: 0 } ).count() );
+ assert.eq( 1, as.find( { i: 999 } ).count() );
+
+ assert.commandFailed( s.getDB( "admin" ).runCommand( { "resync" : 1 } ) );
+
+ rt.stop();
+}
+
+doTest( 15 ); // SIGTERM
+doTest( 9 ); // SIGKILL
diff --git a/jstests/repl/repl4.js b/jstests/repl/repl4.js
new file mode 100644
index 0000000..de7ca43
--- /dev/null
+++ b/jstests/repl/repl4.js
@@ -0,0 +1,30 @@
+// Test replication 'only' mode
+
+soonCount = function( db, coll, count ) {
+ assert.soon( function() {
+ return s.getDB( db )[ coll ].find().count() == count;
+ } );
+}
+
+doTest = function() {
+
+ rt = new ReplTest( "repl4tests" );
+
+ m = rt.start( true );
+ s = rt.start( false, { only: "c" } );
+
+ cm = m.getDB( "c" ).c
+ bm = m.getDB( "b" ).b
+
+ cm.save( { x:1 } );
+ bm.save( { x:2 } );
+
+ soonCount( "c", "c", 1 );
+ assert.eq( 1, s.getDB( "c" ).c.findOne().x );
+ sleep( 10000 );
+ printjson( s.getDBNames() );
+ assert.eq( -1, s.getDBNames().indexOf( "b" ) );
+ assert.eq( 0, s.getDB( "b" ).b.find().count() );
+}
+
+doTest();
diff --git a/jstests/repl/repl5.js b/jstests/repl/repl5.js
new file mode 100644
index 0000000..b9bcef9
--- /dev/null
+++ b/jstests/repl/repl5.js
@@ -0,0 +1,32 @@
+// Test auto reclone after failed initial clone
+
+soonCountAtLeast = function( db, coll, count ) {
+ assert.soon( function() {
+// print( "count: " + s.getDB( db )[ coll ].find().count() );
+ return s.getDB( db )[ coll ].find().count() >= count;
+ } );
+}
+
+doTest = function( signal ) {
+
+ rt = new ReplTest( "repl5tests" );
+
+ m = rt.start( true );
+
+ ma = m.getDB( "a" ).a;
+ for( i = 0; i < 10000; ++i )
+ ma.save( { i:i } );
+
+ s = rt.start( false );
+ soonCountAtLeast( "a", "a", 1 );
+ rt.stop( false, signal );
+
+ s = rt.start( false, null, true );
+ sleep( 1000 );
+ soonCountAtLeast( "a", "a", 10000 );
+
+ rt.stop();
+}
+
+doTest( 15 ); // SIGTERM
+doTest( 9 ); // SIGKILL
diff --git a/jstests/repl/repl6.js b/jstests/repl/repl6.js
new file mode 100644
index 0000000..f4fdc9b
--- /dev/null
+++ b/jstests/repl/repl6.js
@@ -0,0 +1,73 @@
+// Test one master replicating to two slaves
+
+var baseName = "jstests_repl6test";
+
+soonCount = function( m, count ) {
+ assert.soon( function() {
+ return m.getDB( baseName ).a.find().count() == count;
+ }, "expected count: " + count + " from : " + m );
+}
+
+doTest = function( signal ) {
+
+ ports = allocatePorts( 3 );
+
+ ms1 = new ReplTest( "repl6tests-1", [ ports[ 0 ], ports[ 1 ] ] );
+ ms2 = new ReplTest( "repl6tests-2", [ ports[ 0 ], ports[ 2 ] ] );
+
+ m = ms1.start( true );
+ s1 = ms1.start( false );
+ s2 = ms2.start( false );
+
+ am = m.getDB( baseName ).a
+
+ for( i = 0; i < 1000; ++i )
+ am.save( { _id: new ObjectId(), i: i } );
+
+ soonCount( s1, 1000 );
+ soonCount( s2, 1000 );
+
+ check = function( as ) {
+ assert.eq( 1, as.find( { i: 0 } ).count() );
+ assert.eq( 1, as.find( { i: 999 } ).count() );
+ }
+
+ as = s1.getDB( baseName ).a
+ check( as );
+ as = s2.getDB( baseName ).a
+ check( as );
+
+ ms1.stop( false, signal );
+ ms2.stop( false, signal );
+
+ for( i = 1000; i < 1010; ++i )
+ am.save( { _id: new ObjectId(), i: i } );
+
+ s1 = ms1.start( false, null, true );
+ soonCount( s1, 1010 );
+ as = s1.getDB( baseName ).a
+ assert.eq( 1, as.find( { i: 1009 } ).count() );
+
+ ms1.stop( true, signal );
+
+ m = ms1.start( true, null, true );
+ am = m.getDB( baseName ).a
+
+ for( i = 1010; i < 1020; ++i )
+ am.save( { _id: new ObjectId(), i: i } );
+
+ soonCount( s1, 1020 );
+ assert.eq( 1, as.find( { i: 1019 } ).count() );
+
+ s2 = ms2.start( false, null, true );
+ soonCount( s2, 1020 );
+ as = s2.getDB( baseName ).a
+ assert.eq( 1, as.find( { i: 1009 } ).count() );
+ assert.eq( 1, as.find( { i: 1019 } ).count() );
+
+ ms1.stop();
+ ms2.stop( false );
+}
+
+doTest( 15 ); // SIGTERM
+doTest( 9 ); // SIGKILL
diff --git a/jstests/repl/repl7.js b/jstests/repl/repl7.js
new file mode 100644
index 0000000..e3fdee9
--- /dev/null
+++ b/jstests/repl/repl7.js
@@ -0,0 +1,45 @@
+// Test persistence of list of dbs to add.
+
+doTest = function( signal ) {
+
+ rt = new ReplTest( "repl7tests" );
+
+ m = rt.start( true );
+
+ for( n = "a"; n != "aaaaa"; n += "a" ) {
+ m.getDB( n ).a.save( {x:1} );
+ }
+
+ s = rt.start( false );
+
+ assert.soon( function() {
+ return -1 != s.getDBNames().indexOf( "aa" );
+ }, "aa timeout", 60000, 1000 );
+
+ rt.stop( false, signal );
+
+ s = rt.start( false, null, signal );
+
+ assert.soon( function() {
+ for( n = "a"; n != "aaaaa"; n += "a" ) {
+ if ( -1 == s.getDBNames().indexOf( n ) )
+ return false;
+ }
+ return true;
+ }, "a-aaaa timeout", 60000, 1000 );
+
+ assert.soon( function() {
+ for( n = "a"; n != "aaaaa"; n += "a" ) {
+ if ( 1 != m.getDB( n ).a.find().count() ) {
+ return false;
+ }
+ }
+ return true; }, "a-aaaa count timeout" );
+
+ sleep( 300 );
+
+ rt.stop();
+}
+
+doTest( 15 ); // SIGTERM
+doTest( 9 ); // SIGKILL
diff --git a/jstests/repl/repl8.js b/jstests/repl/repl8.js
new file mode 100644
index 0000000..64e65cc
--- /dev/null
+++ b/jstests/repl/repl8.js
@@ -0,0 +1,30 @@
+// Test cloning of capped collections
+
+baseName = "jstests_repl_repl8";
+
+rt = new ReplTest( "repl8tests" );
+
+m = rt.start( true );
+
+m.getDB( baseName ).createCollection( "first", {capped:true,size:1000} );
+assert( m.getDB( baseName ).getCollection( "first" ).isCapped() );
+
+s = rt.start( false );
+
+assert.soon( function() { return s.getDB( baseName ).getCollection( "first" ).isCapped(); } );
+
+m.getDB( baseName ).createCollection( "second", {capped:true,size:1000} );
+assert.soon( function() { return s.getDB( baseName ).getCollection( "second" ).isCapped(); } );
+
+m.getDB( baseName ).getCollection( "third" ).save( { a: 1 } );
+assert.soon( function() { return s.getDB( baseName ).getCollection( "third" ).exists(); } );
+assert.commandWorked( m.getDB( "admin" ).runCommand( {renameCollection:"jstests_repl_repl8.third", to:"jstests_repl_repl8.third_rename"} ) );
+assert( m.getDB( baseName ).getCollection( "third_rename" ).exists() );
+assert( !m.getDB( baseName ).getCollection( "third" ).exists() );
+assert.soon( function() { return s.getDB( baseName ).getCollection( "third_rename" ).exists(); } );
+assert.soon( function() { return !s.getDB( baseName ).getCollection( "third" ).exists(); } );
+
+m.getDB( baseName ).getCollection( "fourth" ).save( {a:1} );
+assert.commandWorked( m.getDB( baseName ).getCollection( "fourth" ).convertToCapped( 1000 ) );
+assert( m.getDB( baseName ).getCollection( "fourth" ).isCapped() );
+assert.soon( function() { return s.getDB( baseName ).getCollection( "fourth" ).isCapped(); } );
diff --git a/jstests/repl/repl9.js b/jstests/repl/repl9.js
new file mode 100644
index 0000000..be06e08
--- /dev/null
+++ b/jstests/repl/repl9.js
@@ -0,0 +1,48 @@
+// Test replication of collection renaming
+
+baseName = "jstests_repl_repl9";
+
+rt = new ReplTest( "repl9tests" );
+
+m = rt.start( true );
+s = rt.start( false );
+
+admin = m.getDB( "admin" );
+
+debug = function( foo ) {} // print( foo ); }
+
+// rename within db
+
+m.getDB( baseName ).one.save( { a: 1 } );
+assert.soon( function() { v = s.getDB( baseName ).one.findOne(); return v && 1 == v.a } );
+
+assert.commandWorked( admin.runCommand( {renameCollection:"jstests_repl_repl9.one", to:"jstests_repl_repl9.two"} ) );
+assert.soon( function() {
+ if ( -1 == s.getDB( baseName ).getCollectionNames().indexOf( "two" ) ) {
+ debug( "no two coll" );
+ debug( tojson( s.getDB( baseName ).getCollectionNames() ) );
+ return false;
+ }
+ if ( !s.getDB( baseName ).two.findOne() ) {
+ debug( "no two object" );
+ return false;
+ }
+ return 1 == s.getDB( baseName ).two.findOne().a; });
+assert.eq( -1, s.getDB( baseName ).getCollectionNames().indexOf( "one" ) );
+
+// rename to new db
+
+first = baseName + "_first";
+second = baseName + "_second";
+
+m.getDB( first ).one.save( { a: 1 } );
+assert.soon( function() { return s.getDB( first ).one.findOne() && 1 == s.getDB( first ).one.findOne().a; } );
+
+assert.commandWorked( admin.runCommand( {renameCollection:"jstests_repl_repl9_first.one", to:"jstests_repl_repl9_second.two"} ) );
+assert.soon( function() {
+ return -1 != s.getDBNames().indexOf( second ) &&
+ -1 != s.getDB( second ).getCollectionNames().indexOf( "two" ) &&
+ s.getDB( second ).two.findOne() &&
+ 1 == s.getDB( second ).two.findOne().a; } );
+assert.eq( -1, s.getDB( first ).getCollectionNames().indexOf( "one" ) );
+
diff --git a/jstests/repl/replacePeer1.js b/jstests/repl/replacePeer1.js
new file mode 100644
index 0000000..45ee544
--- /dev/null
+++ b/jstests/repl/replacePeer1.js
@@ -0,0 +1,71 @@
+// test replace peer on master
+
+var baseName = "jstests_replacepeer1test";
+
+ismaster = function( n ) {
+ im = n.getDB( "admin" ).runCommand( { "ismaster" : 1 } );
+// print( "ismaster: " + tojson( im ) );
+ assert( im );
+ return im.ismaster;
+}
+
+var writeOneIdx = 0;
+
+writeOne = function( n ) {
+ n.getDB( baseName ).z.save( { _id: new ObjectId(), i: ++writeOneIdx } );
+}
+
+getCount = function( n ) {
+ return n.getDB( baseName ).z.find( { i: writeOneIdx } ).toArray().length;
+}
+
+checkWrite = function( m, s ) {
+ writeOne( m );
+ assert.eq( 1, getCount( m ) );
+ s.setSlaveOk();
+ assert.soon( function() {
+ return 1 == getCount( s );
+ } );
+}
+
+doTest = function( signal ) {
+
+ ports = allocatePorts( 4 );
+
+ a = new MongodRunner( ports[ 0 ], "/data/db/" + baseName + "-arbiter" );
+ l = new MongodRunner( ports[ 1 ], "/data/db/" + baseName + "-left", "127.0.0.1:" + ports[ 3 ], "127.0.0.1:" + ports[ 0 ] );
+ r = new MongodRunner( ports[ 3 ], "/data/db/" + baseName + "-right", "127.0.0.1:" + ports[ 1 ], "127.0.0.1:" + ports[ 0 ] );
+
+ rp = new ReplPair( l, r, a );
+ rp.start();
+ rp.waitForSteadyState( [ 1, 0 ], rp.right().host );
+
+ checkWrite( rp.master(), rp.slave() );
+
+ rp.killNode( rp.slave(), signal );
+
+ writeOne( rp.master() );
+
+ assert.commandWorked( rp.master().getDB( "admin" ).runCommand( {replacepeer:1} ) );
+
+ rp.killNode( rp.master(), signal );
+ rp.killNode( rp.arbiter(), signal );
+
+ o = new MongodRunner( ports[ 2 ], "/data/db/" + baseName + "-left", "127.0.0.1:" + ports[ 3 ], "127.0.0.1:" + ports[ 0 ] );
+ r = new MongodRunner( ports[ 3 ], "/data/db/" + baseName + "-right", "127.0.0.1:" + ports[ 2 ], "127.0.0.1:" + ports[ 0 ] );
+
+ rp = new ReplPair( o, r, a );
+ resetDbpath( "/data/db/" + baseName + "-left" );
+ rp.start( true );
+ rp.waitForSteadyState( [ 1, 0 ], rp.right().host );
+
+ checkWrite( rp.master(), rp.slave() );
+ rp.slave().setSlaveOk();
+ assert.eq( 3, rp.slave().getDB( baseName ).z.find().toArray().length );
+
+ ports.forEach( function( x ) { stopMongod( x ); } );
+
+}
+
+doTest( 15 ); // SIGTERM
+doTest( 9 ); // SIGKILL
diff --git a/jstests/repl/replacePeer2.js b/jstests/repl/replacePeer2.js
new file mode 100644
index 0000000..09c8177
--- /dev/null
+++ b/jstests/repl/replacePeer2.js
@@ -0,0 +1,72 @@
+// test replace peer on slave
+
+var baseName = "jstests_replacepeer2test";
+
+ismaster = function( n ) {
+ im = n.getDB( "admin" ).runCommand( { "ismaster" : 1 } );
+// print( "ismaster: " + tojson( im ) );
+ assert( im );
+ return im.ismaster;
+}
+
+var writeOneIdx = 0;
+
+writeOne = function( n ) {
+ n.getDB( baseName ).z.save( { _id: new ObjectId(), i: ++writeOneIdx } );
+}
+
+getCount = function( n ) {
+ return n.getDB( baseName ).z.find( { i: writeOneIdx } ).toArray().length;
+}
+
+checkWrite = function( m, s ) {
+ writeOne( m );
+ assert.eq( 1, getCount( m ) );
+ s.setSlaveOk();
+ assert.soon( function() {
+ return 1 == getCount( s );
+ } );
+}
+
+doTest = function( signal ) {
+
+ ports = allocatePorts( 4 );
+
+ a = new MongodRunner( ports[ 0 ], "/data/db/" + baseName + "-arbiter" );
+ l = new MongodRunner( ports[ 1 ], "/data/db/" + baseName + "-left", "127.0.0.1:" + ports[ 3 ], "127.0.0.1:" + ports[ 0 ] );
+ r = new MongodRunner( ports[ 3 ], "/data/db/" + baseName + "-right", "127.0.0.1:" + ports[ 1 ], "127.0.0.1:" + ports[ 0 ] );
+
+ rp = new ReplPair( l, r, a );
+ rp.start();
+ rp.waitForSteadyState( [ 1, 0 ], rp.right().host );
+
+ checkWrite( rp.master(), rp.slave() );
+
+ // allow slave to finish initial sync
+ assert.soon( function() { return 1 == rp.slave().getDB( "admin" ).runCommand( {replacepeer:1} ).ok; } );
+
+ // Should not be saved to slave.
+ writeOne( rp.master() );
+ // Make sure there would be enough time to save to l if we hadn't called replacepeer.
+ sleep( 10000 );
+
+ ports.forEach( function( x ) { stopMongod( x, signal ); } );
+
+ l = new MongodRunner( ports[ 1 ], "/data/db/" + baseName + "-left", "127.0.0.1:" + ports[ 2 ], "127.0.0.1:" + ports[ 0 ] );
+ o = new MongodRunner( ports[ 2 ], "/data/db/" + baseName + "-right", "127.0.0.1:" + ports[ 1 ], "127.0.0.1:" + ports[ 0 ] );
+
+ rp = new ReplPair( l, o, a );
+ resetDbpath( "/data/db/" + baseName + "-right" );
+ rp.start( true );
+ rp.waitForSteadyState( [ 1, 0 ], rp.left().host );
+
+ checkWrite( rp.master(), rp.slave() );
+ rp.slave().setSlaveOk();
+ assert.eq( 2, rp.slave().getDB( baseName ).z.find().toArray().length );
+
+ ports.forEach( function( x ) { stopMongod( x ); } );
+
+}
+
+doTest( 15 ); // SIGTERM
+doTest( 9 ); // SIGKILL
diff --git a/jstests/set1.js b/jstests/set1.js
new file mode 100644
index 0000000..d741387
--- /dev/null
+++ b/jstests/set1.js
@@ -0,0 +1,9 @@
+
+t = db.set1;
+t.drop();
+
+t.insert( { _id : 1, emb : {} });
+t.update( { _id : 1 }, { $set : { emb : { 'a.dot' : 'data'} }});
+assert.eq( { _id : 1 , emb : {} } , t.findOne() , "A" );
+
+
diff --git a/jstests/set2.js b/jstests/set2.js
new file mode 100644
index 0000000..221ee40
--- /dev/null
+++ b/jstests/set2.js
@@ -0,0 +1,18 @@
+
+t = db.set2;
+t.drop();
+
+t.save( { _id : 1 , x : true , y : { x : true } } );
+assert.eq( true , t.findOne().x );
+
+t.update( { _id : 1 } , { $set : { x : 17 } } );
+assert.eq( 17 , t.findOne().x );
+
+assert.eq( true , t.findOne().y.x );
+t.update( { _id : 1 } , { $set : { "y.x" : 17 } } );
+assert.eq( 17 , t.findOne().y.x );
+
+t.update( { _id : 1 } , { $set : { a : 2 , b : 3 } } );
+assert.eq( 2 , t.findOne().a );
+assert.eq( 3 , t.findOne().b );
+
diff --git a/jstests/set3.js b/jstests/set3.js
new file mode 100644
index 0000000..611abc4
--- /dev/null
+++ b/jstests/set3.js
@@ -0,0 +1,11 @@
+
+t = db.set3;
+t.drop();
+
+t.insert( { "test1" : { "test2" : { "abcdefghijklmnopqrstu" : {"id":1} } } } );
+t.update( {}, {"$set":{"test1.test2.abcdefghijklmnopqrstuvwxyz":{"id":2}}})
+
+x = t.findOne();
+assert.eq( 1 , x.test1.test2.abcdefghijklmnopqrstu.id , "A" );
+assert.eq( 2 , x.test1.test2.abcdefghijklmnopqrstuvwxyz.id , "B" );
+
diff --git a/jstests/set4.js b/jstests/set4.js
new file mode 100644
index 0000000..b37366c
--- /dev/null
+++ b/jstests/set4.js
@@ -0,0 +1,15 @@
+
+t = db.set4;
+t.drop();
+
+orig = { _id:1 , a : [ { x : 1 } ]}
+t.insert( orig );
+
+t.update( {}, { $set : { 'a.0.x' : 2, 'foo.bar' : 3 } } );
+orig.a[0].x = 2; orig.foo = { bar : 3 }
+assert.eq( orig , t.findOne() , "A" );
+
+t.update( {}, { $set : { 'a.0.x' : 4, 'foo.bar' : 5 } } );
+orig.a[0].x = 4; orig.foo.bar = 5;
+assert.eq( orig , t.findOne() , "B" );
+
diff --git a/jstests/sharding/auto1.js b/jstests/sharding/auto1.js
new file mode 100644
index 0000000..92a4ce8
--- /dev/null
+++ b/jstests/sharding/auto1.js
@@ -0,0 +1,51 @@
+// auto1.js
+
+s = new ShardingTest( "auto1" , 2 , 1 , 1 );
+
+s.adminCommand( { enablesharding : "test" } );
+s.adminCommand( { shardcollection : "test.foo" , key : { num : 1 } } );
+
+bigString = "";
+while ( bigString.length < 1024 * 50 )
+ bigString += "asocsancdnsjfnsdnfsjdhfasdfasdfasdfnsadofnsadlkfnsaldknfsad";
+
+db = s.getDB( "test" )
+coll = db.foo;
+
+var i=0;
+
+for ( ; i<500; i++ ){
+ coll.save( { num : i , s : bigString } );
+}
+
+s.adminCommand( "connpoolsync" );
+
+primary = s.getServer( "test" ).getDB( "test" );
+
+assert.eq( 1 , s.config.chunks.count() );
+assert.eq( 500 , primary.foo.count() );
+
+print( "datasize: " + tojson( s.getServer( "test" ).getDB( "admin" ).runCommand( { datasize : "test.foo" } ) ) );
+
+for ( ; i<800; i++ ){
+ coll.save( { num : i , s : bigString } );
+}
+
+assert.eq( 1 , s.config.chunks.count() );
+
+for ( ; i<1500; i++ ){
+ coll.save( { num : i , s : bigString } );
+}
+
+assert.eq( 3 , s.config.chunks.count() , "shard didn't split A " );
+s.printChunks();
+
+for ( ; i<3000; i++ ){
+ coll.save( { num : i , s : bigString } );
+}
+
+assert.eq( 4 , s.config.chunks.count() , "shard didn't split B " );
+s.printChunks();
+
+
+s.stop();
diff --git a/jstests/sharding/auto2.js b/jstests/sharding/auto2.js
new file mode 100644
index 0000000..c6ec374
--- /dev/null
+++ b/jstests/sharding/auto2.js
@@ -0,0 +1,44 @@
+// auto2.js
+
+s = new ShardingTest( "auto2" , 2 , 1 , 1 );
+
+s.adminCommand( { enablesharding : "test" } );
+s.adminCommand( { shardcollection : "test.foo" , key : { num : 1 } } );
+
+bigString = "";
+while ( bigString.length < 1024 * 50 )
+ bigString += "asocsancdnsjfnsdnfsjdhfasdfasdfasdfnsadofnsadlkfnsaldknfsad";
+
+db = s.getDB( "test" )
+coll = db.foo;
+
+var i=0;
+
+for ( j=0; j<30; j++ ){
+ print( "j:" + j + " : " +
+ Date.timeFunc(
+ function(){
+ for ( var k=0; k<100; k++ ){
+ coll.save( { num : i , s : bigString } );
+ i++;
+ }
+ }
+ ) );
+
+}
+s.adminCommand( "connpoolsync" );
+
+print( "done inserting data" );
+
+print( "datasize: " + tojson( s.getServer( "test" ).getDB( "admin" ).runCommand( { datasize : "test.foo" } ) ) );
+s.printChunks();
+
+counta = s._connections[0].getDB( "test" ).foo.count();
+countb = s._connections[1].getDB( "test" ).foo.count();
+
+assert.eq( j * 100 , counta + countb , "from each a:" + counta + " b:" + countb + " i:" + i );
+assert.eq( j * 100 , coll.find().limit(100000000).itcount() , "itcount A" );
+
+assert( Array.unique( s.config.chunks.find().toArray().map( function(z){ return z.shard; } ) ).length == 2 , "should be using both servers" );
+
+s.stop();
diff --git a/jstests/sharding/count1.js b/jstests/sharding/count1.js
new file mode 100644
index 0000000..a697162
--- /dev/null
+++ b/jstests/sharding/count1.js
@@ -0,0 +1,55 @@
+// count1.js
+
+s = new ShardingTest( "count1" , 2 );
+
+db = s.getDB( "test" );
+
+db.bar.save( { n : 1 } )
+db.bar.save( { n : 2 } )
+db.bar.save( { n : 3 } )
+
+assert.eq( 3 , db.bar.find().count() , "bar 1" );
+assert.eq( 1 , db.bar.find( { n : 1 } ).count() , "bar 2" );
+
+s.adminCommand( { enablesharding : "test" } )
+s.adminCommand( { shardcollection : "test.foo" , key : { name : 1 } } );
+
+primary = s.getServer( "test" ).getDB( "test" );
+seconday = s.getOther( primary ).getDB( "test" );
+
+assert.eq( 1 , s.config.chunks.count() , "sanity check A" );
+
+db.foo.save( { name : "eliot" } )
+db.foo.save( { name : "sara" } )
+db.foo.save( { name : "bob" } )
+db.foo.save( { name : "joe" } )
+db.foo.save( { name : "mark" } )
+db.foo.save( { name : "allan" } )
+
+assert.eq( 6 , db.foo.find().count() , "basic count" );
+
+s.adminCommand( { split : "test.foo" , find : { name : "joe" } } );
+s.adminCommand( { split : "test.foo" , find : { name : "joe" } } );
+s.adminCommand( { split : "test.foo" , find : { name : "joe" } } );
+
+assert.eq( 6 , db.foo.find().count() , "basic count after split " );
+assert.eq( 6 , db.foo.find().sort( { name : 1 } ).count() , "basic count after split sorted " );
+
+s.adminCommand( { movechunk : "test.foo" , find : { name : "joe" } , to : seconday.getMongo().name } );
+
+assert.eq( 3 , primary.foo.find().toArray().length , "primary count" );
+assert.eq( 3 , seconday.foo.find().toArray().length , "secondary count" );
+assert.eq( 3 , primary.foo.find().sort( { name : 1 } ).toArray().length , "primary count sorted" );
+assert.eq( 3 , seconday.foo.find().sort( { name : 1 } ).toArray().length , "secondary count sorted" );
+
+assert.eq( 6 , db.foo.find().toArray().length , "total count after move" );
+assert.eq( 6 , db.foo.find().sort( { name : 1 } ).toArray().length , "total count() sorted" );
+
+assert.eq( 6 , db.foo.find().sort( { name : 1 } ).count() , "total count with count() after move" );
+
+assert.eq( "allan,bob,eliot,joe,mark,sara" , db.foo.find().sort( { name : 1 } ).toArray().map( function(z){ return z.name; } ) , "sort 1" );
+assert.eq( "sara,mark,joe,eliot,bob,allan" , db.foo.find().sort( { name : -1 } ).toArray().map( function(z){ return z.name; } ) , "sort 2" );
+
+s.stop();
+
+
diff --git a/jstests/sharding/diffservers1.js b/jstests/sharding/diffservers1.js
new file mode 100644
index 0000000..6497bc0
--- /dev/null
+++ b/jstests/sharding/diffservers1.js
@@ -0,0 +1,20 @@
+
+
+s = new ShardingTest( "diffservers1" , 2 );
+
+assert.eq( 2 , s.config.shards.count() , "server count wrong" );
+assert.eq( 2 , s._connections[0].getDB( "config" ).shards.count() , "where are servers!" );
+assert.eq( 0 , s._connections[1].getDB( "config" ).shards.count() , "shouldn't be here" );
+
+test1 = s.getDB( "test1" ).foo;
+test1.save( { a : 1 } );
+test1.save( { a : 2 } );
+test1.save( { a : 3 } );
+assert( 3 , test1.count() );
+
+assert( ! s.admin.runCommand( { addshard: "sdd$%" } ).ok , "bad hostname" );
+assert( ! s.admin.runCommand( { addshard: "127.0.0.1:43415" } ).ok , "host not up" );
+assert( ! s.admin.runCommand( { addshard: "127.0.0.1:43415" , allowLocal : true } ).ok , "host not up" );
+
+s.stop();
+
diff --git a/jstests/sharding/error1.js b/jstests/sharding/error1.js
new file mode 100644
index 0000000..b4db9c3
--- /dev/null
+++ b/jstests/sharding/error1.js
@@ -0,0 +1,47 @@
+
+s = new ShardingTest( "error1" , 2 , 1 , 1 );
+s.adminCommand( { enablesharding : "test" } );
+
+a = s._connections[0].getDB( "test" );
+b = s._connections[1].getDB( "test" );
+
+// ---- simple getLastError ----
+
+db = s.getDB( "test" );
+db.foo.insert( { _id : 1 } );
+assert.isnull( db.getLastError() , "gle 1" );
+db.foo.insert( { _id : 1 } );
+assert( db.getLastError() , "gle21" );
+assert( db.getLastError() , "gle22" );
+
+// --- sharded getlasterror
+
+s.adminCommand( { shardcollection : "test.foo2" , key : { num : 1 } } );
+
+db.foo2.insert( { _id : 1 , num : 5 } );
+db.foo2.insert( { _id : 2 , num : 10 } );
+db.foo2.insert( { _id : 3 , num : 15 } );
+db.foo2.insert( { _id : 4 , num : 20 } );
+
+s.adminCommand( { split : "test.foo2" , middle : { num : 10 } } );
+s.adminCommand( { movechunk : "test.foo2" , find : { num : 20 } , to : s.getOther( s.getServer( "test" ) ).name } );
+
+assert( a.foo2.count() > 0 && a.foo2.count() < 4 , "se1" );
+assert( b.foo2.count() > 0 && b.foo2.count() < 4 , "se2" );
+assert.eq( 4 , db.foo2.count() , "se3" );
+
+db.foo2.insert( { _id : 5 , num : 25 } );
+assert( ! db.getLastError() , "se3.5" );
+s.sync();
+assert.eq( 5 , db.foo2.count() , "se4" );
+
+
+
+db.foo2.insert( { _id : 5 , num : 30 } );
+assert( db.getLastError() , "se5" );
+assert( db.getLastError() , "se6" );
+
+assert.eq( 5 , db.foo2.count() , "se5" );
+
+// ----
+s.stop();
diff --git a/jstests/sharding/features1.js b/jstests/sharding/features1.js
new file mode 100644
index 0000000..d2f692a
--- /dev/null
+++ b/jstests/sharding/features1.js
@@ -0,0 +1,139 @@
+// features1.js
+
+s = new ShardingTest( "features1" , 2 , 1 , 1 );
+
+s.adminCommand( { enablesharding : "test" } );
+
+// ---- can't shard system namespaces ----
+
+assert( ! s.admin.runCommand( { shardcollection : "test.system.blah" , key : { num : 1 } } ).ok , "shard system namespace" );
+
+// ---- setup test.foo -----
+
+s.adminCommand( { shardcollection : "test.foo" , key : { num : 1 } } );
+
+db = s.getDB( "test" );
+
+a = s._connections[0].getDB( "test" );
+b = s._connections[1].getDB( "test" );
+
+db.foo.ensureIndex( { y : 1 } );
+
+s.adminCommand( { split : "test.foo" , middle : { num : 10 } } );
+s.adminCommand( { movechunk : "test.foo" , find : { num : 20 } , to : s.getOther( s.getServer( "test" ) ).name } );
+
+db.foo.save( { num : 5 } );
+db.foo.save( { num : 15 } );
+
+s.sync();
+
+// ---- make sure shard key index is everywhere ----
+
+assert.eq( 3 , a.foo.getIndexKeys().length , "a index 1" );
+assert.eq( 3 , b.foo.getIndexKeys().length , "b index 1" );
+
+// ---- make sure if you add an index it goes everywhere ------
+
+db.foo.ensureIndex( { x : 1 } );
+
+s.sync();
+
+assert.eq( 4 , a.foo.getIndexKeys().length , "a index 2" );
+assert.eq( 4 , b.foo.getIndexKeys().length , "b index 2" );
+
+// ---- no unique indexes ------
+
+db.foo.ensureIndex( { z : 1 } , true );
+
+s.sync();
+
+assert.eq( 4 , a.foo.getIndexKeys().length , "a index 3" );
+assert.eq( 4 , b.foo.getIndexKeys().length , "b index 3" );
+
+// ---- can't shard thing with unique indexes
+
+db.foo2.ensureIndex( { a : 1 } );
+s.sync();
+assert( s.admin.runCommand( { shardcollection : "test.foo2" , key : { num : 1 } } ).ok , "shard with index" );
+
+db.foo3.ensureIndex( { a : 1 } , true );
+s.sync();
+printjson( db.system.indexes.find( { ns : "test.foo3" } ).toArray() );
+assert( ! s.admin.runCommand( { shardcollection : "test.foo3" , key : { num : 1 } } ).ok , "shard with unique index" );
+
+// ----- eval -----
+
+db.foo2.save( { num : 5 , a : 7 } );
+db.foo3.save( { num : 5 , a : 8 } );
+
+assert.eq( 1 , db.foo3.count() , "eval pre1" );
+assert.eq( 1 , db.foo2.count() , "eval pre2" );
+
+assert.eq( 8 , db.eval( function(){ return db.foo3.findOne().a; } ), "eval 1 " );
+assert.throws( function(){ db.eval( function(){ return db.foo2.findOne().a; } ) } , "eval 2" )
+
+assert.eq( 1 , db.eval( function(){ return db.foo3.count(); } ), "eval 3 " );
+assert.throws( function(){ db.eval( function(){ return db.foo2.count(); } ) } , "eval 4" )
+
+
+// ---- unique shard key ----
+
+assert( s.admin.runCommand( { shardcollection : "test.foo4" , key : { num : 1 } , unique : true } ).ok , "shard with index and unique" );
+s.adminCommand( { split : "test.foo4" , middle : { num : 10 } } );
+s.adminCommand( { movechunk : "test.foo4" , find : { num : 20 } , to : s.getOther( s.getServer( "test" ) ).name } );
+db.foo4.save( { num : 5 } );
+db.foo4.save( { num : 15 } );
+s.sync();
+assert.eq( 1 , a.foo4.count() , "ua1" );
+assert.eq( 1 , b.foo4.count() , "ub1" );
+
+assert.eq( 2 , a.foo4.getIndexes().length , "ua2" );
+assert.eq( 2 , b.foo4.getIndexes().length , "ub2" );
+
+assert( a.foo4.getIndexes()[1].unique , "ua3" );
+assert( b.foo4.getIndexes()[1].unique , "ub3" );
+
+// --- don't let you convertToCapped ----
+assert( ! db.foo4.isCapped() , "ca1" );
+assert( ! a.foo4.isCapped() , "ca2" );
+assert( ! b.foo4.isCapped() , "ca3" );
+assert( ! db.foo4.convertToCapped( 30000 ).ok , "ca30" );
+assert( ! db.foo4.isCapped() , "ca4" );
+assert( ! a.foo4.isCapped() , "ca5" );
+assert( ! b.foo4.isCapped() , "ca6" );
+
+// make sure i didn't break anything
+db.foo4a.save( { a : 1 } );
+assert( ! db.foo4a.isCapped() , "ca7" );
+db.foo4a.convertToCapped( 30000 );
+assert( db.foo4a.isCapped() , "ca8" );
+
+// --- don't let you shard a capped collection
+
+db.createCollection("foo5", {capped:true, size:30000});
+assert( db.foo5.isCapped() , "cb1" );
+assert( ! s.admin.runCommand( { shardcollection : "test.foo5" , key : { num : 1 } } ).ok , "shard capped" );
+
+
+// ----- group ----
+
+db.foo6.save( { a : 1 } );
+db.foo6.save( { a : 3 } );
+db.foo6.save( { a : 3 } );
+s.sync();
+
+assert.eq( 2 , db.foo6.group( { key : { a : 1 } , initial : { count : 0 } ,
+ reduce : function(z,prev){ prev.count++; } } ).length );
+
+assert.eq( 3 , db.foo6.find().count() );
+assert( s.admin.runCommand( { shardcollection : "test.foo6" , key : { a : 2 } } ).ok );
+assert.eq( 3 , db.foo6.find().count() );
+
+s.adminCommand( { split : "test.foo6" , middle : { a : 2 } } );
+s.adminCommand( { movechunk : "test.foo6" , find : { a : 3 } , to : s.getOther( s.getServer( "test" ) ).name } );
+
+assert.throws( function(){ db.foo6.group( { key : { a : 1 } , initial : { count : 0 } , reduce : function(z,prev){ prev.count++; } } ); } );;
+
+
+s.stop()
+
diff --git a/jstests/sharding/features2.js b/jstests/sharding/features2.js
new file mode 100644
index 0000000..47fedc8
--- /dev/null
+++ b/jstests/sharding/features2.js
@@ -0,0 +1,114 @@
+// features2.js
+
+s = new ShardingTest( "features2" , 2 , 1 , 1 );
+s.adminCommand( { enablesharding : "test" } );
+
+a = s._connections[0].getDB( "test" );
+b = s._connections[1].getDB( "test" );
+
+db = s.getDB( "test" );
+
+// ---- distinct ----
+
+db.foo.save( { x : 1 } );
+db.foo.save( { x : 2 } );
+db.foo.save( { x : 3 } );
+
+assert.eq( "1,2,3" , db.foo.distinct( "x" ) , "distinct 1" );
+assert( a.foo.distinct("x").length == 3 || b.foo.distinct("x").length == 3 , "distinct 2" );
+assert( a.foo.distinct("x").length == 0 || b.foo.distinct("x").length == 0 , "distinct 3" );
+
+assert.eq( 1 , s.onNumShards( "foo" ) , "A1" );
+
+s.shardGo( "foo" , { x : 1 } , { x : 2 } , { x : 3 } );
+
+assert.eq( 2 , s.onNumShards( "foo" ) , "A2" );
+
+assert.eq( "1,2,3" , db.foo.distinct( "x" ) , "distinct 4" );
+
+// ----- delete ---
+
+assert.eq( 3 , db.foo.count() , "D1" );
+
+db.foo.remove( { x : 3 } );
+assert.eq( 2 , db.foo.count() , "D2" );
+
+db.foo.save( { x : 3 } );
+assert.eq( 3 , db.foo.count() , "D3" );
+
+db.foo.remove( { x : { $gt : 2 } } );
+assert.eq( 2 , db.foo.count() , "D4" );
+
+db.foo.remove( { x : { $gt : -1 } } );
+assert.eq( 0 , db.foo.count() , "D5" );
+
+db.foo.save( { x : 1 } );
+db.foo.save( { x : 2 } );
+db.foo.save( { x : 3 } );
+assert.eq( 3 , db.foo.count() , "D6" );
+db.foo.remove( {} );
+assert.eq( 0 , db.foo.count() , "D7" );
+
+// --- _id key ---
+
+db.foo2.insert( { _id : new ObjectId() } );
+db.foo2.insert( { _id : new ObjectId() } );
+db.foo2.insert( { _id : new ObjectId() } );
+
+assert.eq( 1 , s.onNumShards( "foo2" ) , "F1" );
+
+s.adminCommand( { shardcollection : "test.foo2" , key : { _id : 1 } } );
+
+assert.eq( 3 , db.foo2.count() , "F2" )
+db.foo2.insert( {} );
+assert.eq( 4 , db.foo2.count() , "F3" )
+
+
+// --- map/reduce
+
+db.mr.save( { x : 1 , tags : [ "a" , "b" ] } );
+db.mr.save( { x : 2 , tags : [ "b" , "c" ] } );
+db.mr.save( { x : 3 , tags : [ "c" , "a" ] } );
+db.mr.save( { x : 4 , tags : [ "b" , "c" ] } );
+
+m = function(){
+ this.tags.forEach(
+ function(z){
+ emit( z , { count : 1 } );
+ }
+ );
+};
+
+r = function( key , values ){
+ var total = 0;
+ for ( var i=0; i<values.length; i++ ){
+ total += values[i].count;
+ }
+ return { count : total };
+};
+
+doMR = function( n ){
+ var res = db.mr.mapReduce( m , r );
+ printjson( res );
+ var x = db[res.result];
+ assert.eq( 3 , x.find().count() , "MR T1 " + n );
+
+ var z = {};
+ x.find().forEach( function(a){ z[a._id] = a.value.count; } );
+ assert.eq( 3 , Object.keySet( z ).length , "MR T2 " + n );
+ assert.eq( 2 , z.a , "MR T2 " + n );
+ assert.eq( 3 , z.b , "MR T2 " + n );
+ assert.eq( 3 , z.c , "MR T2 " + n );
+
+ x.drop();
+}
+
+doMR( "before" );
+
+assert.eq( 1 , s.onNumShards( "mr" ) , "E1" );
+s.shardGo( "mr" , { x : 1 } , { x : 2 } , { x : 3 } );
+assert.eq( 2 , s.onNumShards( "mr" ) , "E1" );
+
+doMR( "after" );
+
+s.stop();
diff --git a/jstests/sharding/key_many.js b/jstests/sharding/key_many.js
new file mode 100644
index 0000000..43e7cc5
--- /dev/null
+++ b/jstests/sharding/key_many.js
@@ -0,0 +1,121 @@
+// key_many.js
+
+// values have to be sorted
+types =
+ [ { name : "string" , values : [ "allan" , "bob" , "eliot" , "joe" , "mark" , "sara" ] , keyfield: "k" } ,
+ { name : "double" , values : [ 1.2 , 3.5 , 4.5 , 4.6 , 6.7 , 9.9 ] , keyfield : "a" } ,
+ { name : "string_id" , values : [ "allan" , "bob" , "eliot" , "joe" , "mark" , "sara" ] , keyfield : "_id" },
+ { name : "embedded" , values : [ "allan" , "bob" , "eliot" , "joe" , "mark" , "sara" ] , keyfield : "a.b" } ,
+ { name : "embedded 2" , values : [ "allan" , "bob" , "eliot" , "joe" , "mark" , "sara" ] , keyfield : "a.b.c" } ,
+ { name : "object" , values : [ {a:1, b:1.2}, {a:1, b:3.5}, {a:1, b:4.5}, {a:2, b:1.2}, {a:2, b:3.5}, {a:2, b:4.5} ] , keyfield : "o" } ,
+ ]
+
+s = new ShardingTest( "key_many" , 2 );
+
+s.adminCommand( { enablesharding : "test" } )
+db = s.getDB( "test" );
+primary = s.getServer( "test" ).getDB( "test" );
+seconday = s.getOther( primary ).getDB( "test" );
+
+function makeObjectDotted( v ){
+ var o = {};
+ o[curT.keyfield] = v;
+ return o;
+}
+
+function makeObject( v ){
+ var o = {};
+ var p = o;
+
+ var keys = curT.keyfield.split('.');
+ for(var i=0; i<keys.length-1; i++){
+ p[keys[i]] = {};
+ p = p[keys[i]];
+ }
+
+ p[keys[i]] = v;
+
+ return o;
+}
+
+function getKey( o ){
+ var keys = curT.keyfield.split('.');
+ for(var i=0; i<keys.length; i++){
+ o = o[keys[i]];
+ }
+ return o;
+}
+
+
+
+for ( var i=0; i<types.length; i++ ){
+ curT = types[i]; //global
+
+ print("\n\n#### Now Testing " + curT.name + " ####\n\n");
+
+ var shortName = "foo_" + curT.name;
+ var longName = "test." + shortName;
+
+ var c = db[shortName];
+ s.adminCommand( { shardcollection : longName , key : makeObjectDotted( 1 ) } );
+
+ assert.eq( 1 , s.config.chunks.find( { ns : longName } ).count() , curT.name + " sanity check A" );
+
+ var unsorted = Array.shuffle( Object.extend( [] , curT.values ) );
+ c.insert( makeObject( unsorted[0] ) );
+ for ( var x=1; x<unsorted.length; x++ )
+ c.save( makeObject( unsorted[x] ) );
+
+ assert.eq( 6 , c.find().count() , curT.name + " basic count" );
+
+ s.adminCommand( { split : longName , find : makeObjectDotted( curT.values[3] ) } );
+ s.adminCommand( { split : longName , find : makeObjectDotted( curT.values[3] ) } );
+ s.adminCommand( { split : longName , find : makeObjectDotted( curT.values[3] ) } );
+
+ s.adminCommand( { movechunk : longName , find : makeObjectDotted( curT.values[3] ) , to : seconday.getMongo().name } );
+
+ s.printChunks();
+
+ assert.eq( 3 , primary[shortName].find().toArray().length , curT.name + " primary count" );
+ assert.eq( 3 , seconday[shortName].find().toArray().length , curT.name + " secondary count" );
+
+ assert.eq( 6 , c.find().toArray().length , curT.name + " total count" );
+ assert.eq( 6 , c.find().sort( makeObjectDotted( 1 ) ).toArray().length , curT.name + " total count sorted" );
+
+ assert.eq( 6 , c.find().sort( makeObjectDotted( 1 ) ).count() , curT.name + " total count with count()" );
+
+ assert.eq( curT.values , c.find().sort( makeObjectDotted( 1 ) ).toArray().map( getKey ) , curT.name + " sort 1" );
+ assert.eq( curT.values.reverse() , c.find().sort( makeObjectDotted( -1 ) ).toArray().map( getKey ) , curT.name + " sort 2" );
+
+
+ assert.eq( 0 , c.find( { xx : 17 } ).sort( { zz : 1 } ).count() , curT.name + " xx 0a " );
+ assert.eq( 0 , c.find( { xx : 17 } ).sort( makeObjectDotted( 1 ) ).count() , curT.name + " xx 0b " );
+ assert.eq( 0 , c.find( { xx : 17 } ).count() , curT.name + " xx 0c " );
+ assert.eq( 0 , c.find( { xx : { $exists : true } } ).count() , curT.name + " xx 1 " );
+
+ c.update( makeObjectDotted( curT.values[3] ) , { $set : { xx : 17 } } );
+ assert.eq( 1 , c.find( { xx : { $exists : true } } ).count() , curT.name + " xx 2 " );
+ assert.eq( curT.values[3] , getKey( c.findOne( { xx : 17 } ) ) , curT.name + " xx 3 " );
+
+ c.update( makeObjectDotted( curT.values[3] ) , { $set : { xx : 17 } } , {upsert: true});
+ assert.eq( null , db.getLastError() , curT.name + " upserts should work if they include the shard key in the query" );
+
+ c.ensureIndex( { _id : 1 } , { unique : true } );
+ assert.eq( null , db.getLastError() , curT.name + " creating _id index should be ok" );
+
+ // multi update
+ var mysum = 0;
+ c.find().forEach( function(z){ mysum += z.xx || 0; } );
+ assert.eq( 17 , mysum, curT.name + " multi update pre" );
+ c.update( {} , { $inc : { xx : 1 } } , false , true );
+ var mysum = 0;
+ c.find().forEach( function(z){ mysum += z.xx || 0; } );
+ assert.eq( 23 , mysum, curT.name + " multi update" );
+
+ // TODO remove
+}
+
+
+s.stop();
+
+
diff --git a/jstests/sharding/key_string.js b/jstests/sharding/key_string.js
new file mode 100644
index 0000000..8ee1c70
--- /dev/null
+++ b/jstests/sharding/key_string.js
@@ -0,0 +1,44 @@
+// key_string.js
+
+s = new ShardingTest( "keystring" , 2 );
+
+db = s.getDB( "test" );
+s.adminCommand( { enablesharding : "test" } )
+s.adminCommand( { shardcollection : "test.foo" , key : { name : 1 } } );
+
+primary = s.getServer( "test" ).getDB( "test" );
+seconday = s.getOther( primary ).getDB( "test" );
+
+assert.eq( 1 , s.config.chunks.count() , "sanity check A" );
+
+db.foo.save( { name : "eliot" } )
+db.foo.save( { name : "sara" } )
+db.foo.save( { name : "bob" } )
+db.foo.save( { name : "joe" } )
+db.foo.save( { name : "mark" } )
+db.foo.save( { name : "allan" } )
+
+assert.eq( 6 , db.foo.find().count() , "basic count" );
+
+s.adminCommand( { split : "test.foo" , find : { name : "joe" } } );
+s.adminCommand( { split : "test.foo" , find : { name : "joe" } } );
+s.adminCommand( { split : "test.foo" , find : { name : "joe" } } );
+
+s.adminCommand( { movechunk : "test.foo" , find : { name : "joe" } , to : seconday.getMongo().name } );
+
+s.printChunks();
+
+assert.eq( 3 , primary.foo.find().toArray().length , "primary count" );
+assert.eq( 3 , seconday.foo.find().toArray().length , "secondary count" );
+
+assert.eq( 6 , db.foo.find().toArray().length , "total count" );
+assert.eq( 6 , db.foo.find().sort( { name : 1 } ).toArray().length , "total count sorted" );
+
+assert.eq( 6 , db.foo.find().sort( { name : 1 } ).count() , "total count with count()" );
+
+assert.eq( "allan,bob,eliot,joe,mark,sara" , db.foo.find().sort( { name : 1 } ).toArray().map( function(z){ return z.name; } ) , "sort 1" );
+assert.eq( "sara,mark,joe,eliot,bob,allan" , db.foo.find().sort( { name : -1 } ).toArray().map( function(z){ return z.name; } ) , "sort 2" );
+
+s.stop();
+
+
diff --git a/jstests/sharding/movePrimary1.js b/jstests/sharding/movePrimary1.js
new file mode 100644
index 0000000..20dc6c1
--- /dev/null
+++ b/jstests/sharding/movePrimary1.js
@@ -0,0 +1,31 @@
+
+
+s = new ShardingTest( "movePrimary1" , 2 );
+
+initDB = function( name ){
+ var db = s.getDB( name );
+ var c = db.foo;
+ c.save( { a : 1 } );
+ c.save( { a : 2 } );
+ c.save( { a : 3 } );
+ assert( 3 , c.count() );
+
+ return s.getServer( name );
+}
+
+from = initDB( "test1" );
+to = s.getOther( from );
+
+assert.eq( 3 , from.getDB( "test1" ).foo.count() , "from doesn't have data before move" );
+assert.eq( 0 , to.getDB( "test1" ).foo.count() , "to has data before move" );
+
+assert.eq( s.config.databases.findOne( { name : "test1" } ).primary , from.name , "not in db correctly to start" );
+s.admin.runCommand( { moveprimary : "test1" , to : to.name } );
+assert.eq( s.config.databases.findOne( { name : "test1" } ).primary , to.name , "to in config db didn't change" );
+
+
+assert.eq( 0 , from.getDB( "test1" ).foo.count() , "from still has data after move" );
+assert.eq( 3 , to.getDB( "test1" ).foo.count() , "to doesn't have data after move" );
+
+s.stop();
+
diff --git a/jstests/sharding/moveshard1.js b/jstests/sharding/moveshard1.js
new file mode 100644
index 0000000..b074b4c
--- /dev/null
+++ b/jstests/sharding/moveshard1.js
@@ -0,0 +1,39 @@
+// movechunk1.js
+
+s = new ShardingTest( "movechunk1" , 2 );
+
+l = s._connections[0];
+r = s._connections[1];
+
+ldb = l.getDB( "foo" );
+rdb = r.getDB( "foo" );
+
+ldb.things.save( { a : 1 } )
+ldb.things.save( { a : 2 } )
+ldb.things.save( { a : 3 } )
+
+assert.eq( ldb.things.count() , 3 );
+assert.eq( rdb.things.count() , 0 );
+
+startResult = l.getDB( "admin" ).runCommand( { "movechunk.start" : "foo.things" ,
+ "to" : s._serverNames[1] ,
+ "from" : s._serverNames[0] ,
+ filter : { a : { $gt : 2 } }
+ } );
+print( "movechunk.start: " + tojson( startResult ) );
+assert( startResult.ok == 1 , "start failed!" );
+
+finishResult = l.getDB( "admin" ).runCommand( { "movechunk.finish" : "foo.things" ,
+ finishToken : startResult.finishToken ,
+ to : s._serverNames[1] ,
+ newVersion : 1 } );
+print( "movechunk.finish: " + tojson( finishResult ) );
+assert( finishResult.ok == 1 , "finishResult failed!" );
+
+assert.eq( rdb.things.count() , 1 , "right has wrong size after move" );
+assert.eq( ldb.things.count() , 2 , "left has wrong size after move" );
+
+
+s.stop();
+
+
diff --git a/jstests/sharding/passthrough1.js b/jstests/sharding/passthrough1.js
new file mode 100644
index 0000000..d5df0d2
--- /dev/null
+++ b/jstests/sharding/passthrough1.js
@@ -0,0 +1,10 @@
+
+s = new ShardingTest( "passthrough1" , 2 )
+
+db = s.getDB( "test" );
+db.foo.insert( { num : 1 , name : "eliot" } );
+db.foo.insert( { num : 2 , name : "sara" } );
+db.foo.insert( { num : -1 , name : "joe" } );
+assert.eq( 3 , db.foo.find().length() );
+
+s.stop();
diff --git a/jstests/sharding/shard1.js b/jstests/sharding/shard1.js
new file mode 100644
index 0000000..bbe1144
--- /dev/null
+++ b/jstests/sharding/shard1.js
@@ -0,0 +1,32 @@
+/**
+* this tests some of the ground work
+*/
+
+s = new ShardingTest( "shard1" , 2 );
+
+db = s.getDB( "test" );
+db.foo.insert( { num : 1 , name : "eliot" } );
+db.foo.insert( { num : 2 , name : "sara" } );
+db.foo.insert( { num : -1 , name : "joe" } );
+assert.eq( 3 , db.foo.find().length() );
+
+shardCommand = { shardcollection : "test.foo" , key : { num : 1 } };
+
+assert.throws( function(){ s.adminCommand( shardCommand ); } );
+
+s.adminCommand( { enablesharding : "test" } );
+assert.eq( 3 , db.foo.find().length() , "after partitioning count failed" );
+
+s.adminCommand( shardCommand );
+dbconfig = s.config.databases.findOne( { name : "test" } );
+assert.eq( dbconfig.sharded["test.foo"] , { key : { num : 1 } , unique : false } , "Sharded content" );
+
+assert.eq( 1 , s.config.chunks.count() );
+si = s.config.chunks.findOne();
+assert( si );
+assert.eq( si.ns , "test.foo" );
+
+assert.eq( 3 , db.foo.find().length() , "after sharding, no split count failed" );
+
+
+s.stop();
diff --git a/jstests/sharding/shard2.js b/jstests/sharding/shard2.js
new file mode 100644
index 0000000..566a0db
--- /dev/null
+++ b/jstests/sharding/shard2.js
@@ -0,0 +1,194 @@
+// shard2.js
+
+/**
+* test basic sharding
+*/
+
+placeCheck = function( num ){
+ print("shard2 step: " + num );
+}
+
+s = new ShardingTest( "shard2" , 2 , 6 );
+
+db = s.getDB( "test" );
+
+s.adminCommand( { enablesharding : "test" } );
+s.adminCommand( { shardcollection : "test.foo" , key : { num : 1 } } );
+assert.eq( 1 , s.config.chunks.count() , "sanity check 1" );
+
+s.adminCommand( { split : "test.foo" , middle : { num : 0 } } );
+assert.eq( 2 , s.config.chunks.count() , "should be 2 shards" );
+chunks = s.config.chunks.find().toArray();
+assert.eq( chunks[0].shard , chunks[1].shard , "server should be the same after a split" );
+
+
+db.foo.save( { num : 1 , name : "eliot" } );
+db.foo.save( { num : 2 , name : "sara" } );
+db.foo.save( { num : -1 , name : "joe" } );
+
+s.adminCommand( "connpoolsync" );
+
+assert.eq( 3 , s.getServer( "test" ).getDB( "test" ).foo.find().length() , "not right directly to db A" );
+assert.eq( 3 , db.foo.find().length() , "not right on shard" );
+
+primary = s.getServer( "test" ).getDB( "test" );
+secondary = s.getOther( primary ).getDB( "test" );
+
+assert.eq( 3 , primary.foo.find().length() , "primary wrong B" );
+assert.eq( 0 , secondary.foo.find().length() , "secondary wrong C" );
+assert.eq( 3 , db.foo.find().sort( { num : 1 } ).length() );
+
+placeCheck( 2 );
+
+// NOTE: at this point we have 2 shard on 1 server
+
+// test move shard
+assert.throws( function(){ s.adminCommand( { movechunk : "test.foo" , find : { num : 1 } , to : primary.getMongo().name } ); } );
+assert.throws( function(){ s.adminCommand( { movechunk : "test.foo" , find : { num : 1 } , to : "adasd" } ) } );
+
+s.adminCommand( { movechunk : "test.foo" , find : { num : 1 } , to : secondary.getMongo().name } );
+assert.eq( 2 , secondary.foo.find().length() , "secondary should have 2 after move shard" );
+assert.eq( 1 , primary.foo.find().length() , "primary should only have 1 after move shard" );
+
+assert.eq( 2 , s.config.chunks.count() , "still should have 2 shards after move not:" + s.getChunksString() );
+chunks = s.config.chunks.find().toArray();
+assert.neq( chunks[0].shard , chunks[1].shard , "servers should NOT be the same after the move" );
+
+placeCheck( 3 );
+
+// test inserts go to right server/shard
+
+db.foo.save( { num : 3 , name : "bob" } );
+s.adminCommand( "connpoolsync" );
+assert.eq( 1 , primary.foo.find().length() , "after move insert go wrong place?" );
+assert.eq( 3 , secondary.foo.find().length() , "after move insert go wrong place?" );
+
+db.foo.save( { num : -2 , name : "funny man" } );
+s.adminCommand( "connpoolsync" );
+assert.eq( 2 , primary.foo.find().length() , "after move insert go wrong place?" );
+assert.eq( 3 , secondary.foo.find().length() , "after move insert go wrong place?" );
+
+
+db.foo.save( { num : 0 , name : "funny guy" } );
+s.adminCommand( "connpoolsync" );
+assert.eq( 2 , primary.foo.find().length() , "boundary A" );
+assert.eq( 4 , secondary.foo.find().length() , "boundary B" );
+
+placeCheck( 4 );
+
+// findOne
+assert.eq( "eliot" , db.foo.findOne( { num : 1 } ).name );
+assert.eq( "funny man" , db.foo.findOne( { num : -2 } ).name );
+
+// getAll
+function sumQuery( c ){
+ var sum = 0;
+ c.toArray().forEach(
+ function(z){
+ sum += z.num;
+ }
+ );
+ return sum;
+}
+assert.eq( 6 , db.foo.find().length() , "sharded query 1" );
+assert.eq( 3 , sumQuery( db.foo.find() ) , "sharded query 2" );
+
+placeCheck( 5 );
+
+// sort by num
+
+assert.eq( 3 , sumQuery( db.foo.find().sort( { num : 1 } ) ) , "sharding query w/sort 1" );
+assert.eq( 3 , sumQuery( db.foo.find().sort( { num : -1 } ) ) , "sharding query w/sort 2" );
+
+assert.eq( "funny man" , db.foo.find().sort( { num : 1 } )[0].name , "sharding query w/sort 3 order wrong" );
+assert.eq( -2 , db.foo.find().sort( { num : 1 } )[0].num , "sharding query w/sort 4 order wrong" );
+
+assert.eq( "bob" , db.foo.find().sort( { num : -1 } )[0].name , "sharding query w/sort 5 order wrong" );
+assert.eq( 3 , db.foo.find().sort( { num : -1 } )[0].num , "sharding query w/sort 6 order wrong" );
+
+placeCheck( 6 );
+// sory by name
+
+function getNames( c ){
+ return c.toArray().map( function(z){ return z.name; } );
+}
+correct = getNames( db.foo.find() ).sort();
+assert.eq( correct , getNames( db.foo.find().sort( { name : 1 } ) ) );
+correct = correct.reverse();
+assert.eq( correct , getNames( db.foo.find().sort( { name : -1 } ) ) );
+
+assert.eq( 3 , sumQuery( db.foo.find().sort( { name : 1 } ) ) , "sharding query w/non-shard sort 1" );
+assert.eq( 3 , sumQuery( db.foo.find().sort( { name : -1 } ) ) , "sharding query w/non-shard sort 2" );
+
+
+// sort by num multiple shards per server
+s.adminCommand( { split : "test.foo" , middle : { num : 2 } } );
+assert.eq( "funny man" , db.foo.find().sort( { num : 1 } )[0].name , "sharding query w/sort and another split 1 order wrong" );
+assert.eq( "bob" , db.foo.find().sort( { num : -1 } )[0].name , "sharding query w/sort and another split 2 order wrong" );
+assert.eq( "funny man" , db.foo.find( { num : { $lt : 100 } } ).sort( { num : 1 } ).arrayAccess(0).name , "sharding query w/sort and another split 3 order wrong" );
+
+placeCheck( 7 );
+
+// getMore
+assert.eq( 4 , db.foo.find().limit(-4).toArray().length , "getMore 1" );
+function countCursor( c ){
+ var num = 0;
+ while ( c.hasNext() ){
+ c.next();
+ num++;
+ }
+ return num;
+}
+assert.eq( 6 , countCursor( db.foo.find()._exec() ) , "getMore 2" );
+assert.eq( 6 , countCursor( db.foo.find().limit(1)._exec() ) , "getMore 3" );
+
+// find by non-shard-key
+db.foo.find().forEach(
+ function(z){
+ var y = db.foo.findOne( { _id : z._id } );
+ assert( y , "_id check 1 : " + tojson( z ) );
+ assert.eq( z.num , y.num , "_id check 2 : " + tojson( z ) );
+ }
+);
+
+// update
+person = db.foo.findOne( { num : 3 } );
+assert.eq( "bob" , person.name , "update setup 1" );
+person.name = "bob is gone";
+db.foo.update( { num : 3 } , person );
+person = db.foo.findOne( { num : 3 } );
+assert.eq( "bob is gone" , person.name , "update test B" );
+
+// remove
+assert( db.foo.findOne( { num : 3 } ) != null , "remove test A" );
+db.foo.remove( { num : 3 } );
+assert.isnull( db.foo.findOne( { num : 3 } ) , "remove test B" );
+
+db.foo.save( { num : 3 , name : "eliot2" } );
+person = db.foo.findOne( { num : 3 } );
+assert( person , "remove test C" );
+assert.eq( person.name , "eliot2" );
+
+db.foo.remove( { _id : person._id } );
+assert.isnull( db.foo.findOne( { num : 3 } ) , "remove test E" );
+
+placeCheck( 8 );
+
+// TODO: getLastError
+db.getLastError();
+db.getPrevError();
+
+// ---- move all to the secondary
+
+assert.eq( 2 , s.onNumShards( "foo" ) , "on 2 shards" );
+
+secondary.foo.insert( { num : -3 } );
+
+s.adminCommand( { movechunk : "test.foo" , find : { num : -2 } , to : secondary.getMongo().name } );
+assert.eq( 1 , s.onNumShards( "foo" ) , "on 1 shards" );
+
+s.adminCommand( { movechunk : "test.foo" , find : { num : -2 } , to : primary.getMongo().name } );
+assert.eq( 2 , s.onNumShards( "foo" ) , "on 2 shards again" );
+assert.eq( 3 , s.config.chunks.count() , "only 3 chunks" );
+
+s.stop();
diff --git a/jstests/sharding/shard3.js b/jstests/sharding/shard3.js
new file mode 100644
index 0000000..8c5b184
--- /dev/null
+++ b/jstests/sharding/shard3.js
@@ -0,0 +1,130 @@
+// shard3.js
+
+s = new ShardingTest( "shard3" , 2 , 50 , 2 );
+
+s2 = s._mongos[1];
+
+s.adminCommand( { enablesharding : "test" } );
+s.adminCommand( { shardcollection : "test.foo" , key : { num : 1 } } );
+
+a = s.getDB( "test" ).foo;
+b = s2.getDB( "test" ).foo;
+
+primary = s.getServer( "test" ).getDB( "test" ).foo;
+secondary = s.getOther( primary.name ).getDB( "test" ).foo;
+
+a.save( { num : 1 } );
+a.save( { num : 2 } );
+a.save( { num : 3 } );
+
+assert.eq( 3 , a.find().toArray().length , "normal A" );
+assert.eq( 3 , b.find().toArray().length , "other A" );
+
+assert.eq( 3 , primary.count() , "p1" )
+assert.eq( 0 , secondary.count() , "s1" )
+
+assert.eq( 1 , s.onNumShards( "foo" ) , "on 1 shards" );
+
+s.adminCommand( { split : "test.foo" , middle : { num : 2 } } );
+s.adminCommand( { movechunk : "test.foo" , find : { num : 3 } , to : s.getOther( s.getServer( "test" ) ).name } );
+
+assert( primary.find().toArray().length > 0 , "blah 1" );
+assert( secondary.find().toArray().length > 0 , "blah 2" );
+assert.eq( 3 , primary.find().itcount() + secondary.find().itcount() , "blah 3" )
+
+assert.eq( 3 , a.find().toArray().length , "normal B" );
+assert.eq( 3 , b.find().toArray().length , "other B" );
+
+// --- filtering ---
+
+function doCounts( name , total ){
+ total = total || ( primary.count() + secondary.count() );
+ assert.eq( total , a.count() , name + " count" );
+ assert.eq( total , a.find().sort( { n : 1 } ).itcount() , name + " itcount - sort n" );
+ assert.eq( total , a.find().itcount() , name + " itcount" );
+ assert.eq( total , a.find().sort( { _id : 1 } ).itcount() , name + " itcount - sort _id" );
+ return total;
+}
+
+var total = doCounts( "before wrong save" )
+secondary.save( { num : -3 } );
+doCounts( "after wrong save" , total )
+
+// --- move all to 1 ---
+print( "MOVE ALL TO 1" );
+
+assert.eq( 2 , s.onNumShards( "foo" ) , "on 2 shards" );
+s.printCollectionInfo( "test.foo" );
+
+assert( a.findOne( { num : 1 } ) )
+assert( b.findOne( { num : 1 } ) )
+
+print( "GOING TO MOVE" );
+s.printCollectionInfo( "test.foo" );
+s.adminCommand( { movechunk : "test.foo" , find : { num : 1 } , to : s.getOther( s.getServer( "test" ) ).name } );
+s.printCollectionInfo( "test.foo" );
+assert.eq( 1 , s.onNumShards( "foo" ) , "on 1 shard again" );
+assert( a.findOne( { num : 1 } ) )
+assert( b.findOne( { num : 1 } ) )
+
+print( "*** drop" );
+
+s.printCollectionInfo( "test.foo" , "before drop" );
+a.drop();
+s.printCollectionInfo( "test.foo" , "after drop" );
+
+assert.eq( 0 , a.count() , "a count after drop" )
+assert.eq( 0 , b.count() , "b count after drop" )
+
+s.printCollectionInfo( "test.foo" , "after counts" );
+
+assert.eq( 0 , primary.count() , "p count after drop" )
+assert.eq( 0 , secondary.count() , "s count after drop" )
+
+primary.save( { num : 1 } );
+secondary.save( { num : 4 } );
+
+assert.eq( 1 , primary.count() , "p count after drop adn save" )
+assert.eq( 1 , secondary.count() , "s count after drop save " )
+
+
+print("*** makes sure that sharding knows where things live" );
+
+assert.eq( 1 , a.count() , "a count after drop and save" )
+s.printCollectionInfo( "test.foo" , "after a count" );
+assert.eq( 1 , b.count() , "b count after drop and save" )
+s.printCollectionInfo( "test.foo" , "after b count" );
+
+assert( a.findOne( { num : 1 } ) , "a drop1" );
+assert.isnull( a.findOne( { num : 4 } ) , "a drop1" );
+
+s.printCollectionInfo( "test.foo" , "after a findOne tests" );
+
+assert( b.findOne( { num : 1 } ) , "b drop1" );
+assert.isnull( b.findOne( { num : 4 } ) , "b drop1" );
+
+s.printCollectionInfo( "test.foo" , "after b findOne tests" );
+
+print( "*** dropDatabase setup" )
+
+s.printShardingStatus()
+s.adminCommand( { shardcollection : "test.foo" , key : { num : 1 } } );
+a.save( { num : 2 } );
+a.save( { num : 3 } );
+s.adminCommand( { split : "test.foo" , middle : { num : 2 } } );
+s.adminCommand( { movechunk : "test.foo" , find : { num : 3 } , to : s.getOther( s.getServer( "test" ) ).name } );
+s.printShardingStatus();
+
+s.printCollectionInfo( "test.foo" , "after dropDatabase setup" );
+doCounts( "after dropDatabase setup2" )
+s.printCollectionInfo( "test.foo" , "after dropDatabase setup3" );
+
+print( "*** ready to call dropDatabase" )
+res = s.getDB( "test" ).dropDatabase();
+assert.eq( 1 , res.ok , "dropDatabase failed : " + tojson( res ) );
+
+s.printShardingStatus();
+s.printCollectionInfo( "test.foo" , "after dropDatabase call 1" );
+assert.eq( 0 , doCounts( "after dropDatabase called" ) )
+
+s.stop();
diff --git a/jstests/sharding/shard4.js b/jstests/sharding/shard4.js
new file mode 100644
index 0000000..2d7a0df
--- /dev/null
+++ b/jstests/sharding/shard4.js
@@ -0,0 +1,49 @@
+// shard4.js
+
+s = new ShardingTest( "shard4" , 2 , 50 , 2 );
+
+s2 = s._mongos[1];
+
+s.adminCommand( { enablesharding : "test" } );
+s.adminCommand( { shardcollection : "test.foo" , key : { num : 1 } } );
+
+s.getDB( "test" ).foo.save( { num : 1 } );
+s.getDB( "test" ).foo.save( { num : 2 } );
+s.getDB( "test" ).foo.save( { num : 3 } );
+s.getDB( "test" ).foo.save( { num : 4 } );
+s.getDB( "test" ).foo.save( { num : 5 } );
+s.getDB( "test" ).foo.save( { num : 6 } );
+s.getDB( "test" ).foo.save( { num : 7 } );
+
+assert.eq( 7 , s.getDB( "test" ).foo.find().toArray().length , "normal A" );
+assert.eq( 7 , s2.getDB( "test" ).foo.find().toArray().length , "other A" );
+
+s.adminCommand( { split : "test.foo" , middle : { num : 4 } } );
+s.adminCommand( { movechunk : "test.foo" , find : { num : 3 } , to : s.getOther( s.getServer( "test" ) ).name } );
+
+assert( s._connections[0].getDB( "test" ).foo.find().toArray().length > 0 , "blah 1" );
+assert( s._connections[1].getDB( "test" ).foo.find().toArray().length > 0 , "blah 2" );
+assert.eq( 7 , s._connections[0].getDB( "test" ).foo.find().toArray().length +
+ s._connections[1].getDB( "test" ).foo.find().toArray().length , "blah 3" );
+
+assert.eq( 7 , s.getDB( "test" ).foo.find().toArray().length , "normal B" );
+assert.eq( 7 , s2.getDB( "test" ).foo.find().toArray().length , "other B" );
+
+s.adminCommand( { split : "test.foo" , middle : { num : 2 } } );
+//s.adminCommand( { movechunk : "test.foo" , find : { num : 3 } , to : s.getOther( s.getServer( "test" ) ).name } );
+s.printChunks();
+
+print( "* A" );
+
+assert.eq( 7 , s.getDB( "test" ).foo.find().toArray().length , "normal B 1" );
+assert.eq( 7 , s2.getDB( "test" ).foo.find().toArray().length , "other B 2" );
+print( "* B" );
+assert.eq( 7 , s.getDB( "test" ).foo.find().toArray().length , "normal B 3" );
+assert.eq( 7 , s2.getDB( "test" ).foo.find().toArray().length , "other B 4" );
+
+for ( var i=0; i<10; i++ ){
+ print( "* C " + i );
+ assert.eq( 7 , s2.getDB( "test" ).foo.find().toArray().length , "other B " + i );
+}
+
+s.stop();
diff --git a/jstests/sharding/shard5.js b/jstests/sharding/shard5.js
new file mode 100644
index 0000000..050a7d7
--- /dev/null
+++ b/jstests/sharding/shard5.js
@@ -0,0 +1,52 @@
+// shard5.js
+
+// tests write passthrough
+
+s = new ShardingTest( "shard5" , 2 , 50 , 2 );
+
+s2 = s._mongos[1];
+
+s.adminCommand( { enablesharding : "test" } );
+s.adminCommand( { shardcollection : "test.foo" , key : { num : 1 } } );
+
+s.getDB( "test" ).foo.save( { num : 1 } );
+s.getDB( "test" ).foo.save( { num : 2 } );
+s.getDB( "test" ).foo.save( { num : 3 } );
+s.getDB( "test" ).foo.save( { num : 4 } );
+s.getDB( "test" ).foo.save( { num : 5 } );
+s.getDB( "test" ).foo.save( { num : 6 } );
+s.getDB( "test" ).foo.save( { num : 7 } );
+
+assert.eq( 7 , s.getDB( "test" ).foo.find().toArray().length , "normal A" );
+assert.eq( 7 , s2.getDB( "test" ).foo.find().toArray().length , "other A" );
+
+s.adminCommand( { split : "test.foo" , middle : { num : 4 } } );
+s.adminCommand( { movechunk : "test.foo" , find : { num : 3 } , to : s.getOther( s.getServer( "test" ) ).name } );
+
+assert( s._connections[0].getDB( "test" ).foo.find().toArray().length > 0 , "blah 1" );
+assert( s._connections[1].getDB( "test" ).foo.find().toArray().length > 0 , "blah 2" );
+assert.eq( 7 , s._connections[0].getDB( "test" ).foo.find().toArray().length +
+ s._connections[1].getDB( "test" ).foo.find().toArray().length , "blah 3" );
+
+assert.eq( 7 , s.getDB( "test" ).foo.find().toArray().length , "normal B" );
+assert.eq( 7 , s2.getDB( "test" ).foo.find().toArray().length , "other B" );
+
+s.adminCommand( { split : "test.foo" , middle : { num : 2 } } );
+//s.adminCommand( { movechunk : "test.foo" , find : { num : 3 } , to : s.getOther( s.getServer( "test" ) ).name } );
+s.printChunks()
+
+print( "* A" );
+
+assert.eq( 7 , s.getDB( "test" ).foo.find().toArray().length , "normal B 1" );
+
+s2.getDB( "test" ).foo.save( { num : 2 } );
+
+assert.soon(
+ function(){
+ return 8 == s2.getDB( "test" ).foo.find().toArray().length;
+ } , "other B 2" , 5000 , 100 )
+
+assert.eq( 2 , s.onNumShards( "foo" ) , "on 2 shards" );
+
+
+s.stop();
diff --git a/jstests/sharding/shard6.js b/jstests/sharding/shard6.js
new file mode 100644
index 0000000..e15d74c
--- /dev/null
+++ b/jstests/sharding/shard6.js
@@ -0,0 +1,39 @@
+// shard6.js
+
+s = new ShardingTest( "shard6" , 2 , 0 , 1 );
+
+s.adminCommand( { enablesharding : "test" } );
+s.adminCommand( { shardcollection : "test.data" , key : { num : 1 } } );
+
+db = s.getDB( "test" );
+
+// we want a lot of data, so lets make a 50k string to cheat :)
+bigString = "";
+while ( bigString.length < 50000 )
+ bigString += "this is a big string. ";
+
+// ok, now lets insert a some data
+var num = 0;
+for ( ; num<100; num++ ){
+ db.data.save( { num : num , bigString : bigString } );
+}
+
+assert.eq( 100 , db.data.find().toArray().length );
+
+// limit
+
+assert.eq( 77 , db.data.find().limit(77).itcount() , "limit test 1" );
+assert.eq( 1 , db.data.find().limit(1).itcount() , "limit test 2" );
+for ( var i=1; i<10; i++ ){
+ assert.eq( i , db.data.find().limit(i).itcount() , "limit test 3 : " + i );
+}
+
+
+// --- test save support ---
+
+o = db.data.findOne();
+o.x = 16;
+db.data.save( o );
+assert.eq( 16 , db.data.findOne( { _id : o._id } ).x , "x1 - did save fail?" );
+
+s.stop();
diff --git a/jstests/sharding/splitpick.js b/jstests/sharding/splitpick.js
new file mode 100644
index 0000000..ad27645
--- /dev/null
+++ b/jstests/sharding/splitpick.js
@@ -0,0 +1,33 @@
+// splitpick.js
+
+/**
+* tests picking the middle to split on
+*/
+
+s = new ShardingTest( "splitpick" , 2 );
+
+db = s.getDB( "test" );
+
+s.adminCommand( { enablesharding : "test" } );
+s.adminCommand( { shardcollection : "test.foo" , key : { a : 1 } } );
+
+c = db.foo;
+
+for ( var i=1; i<20; i++ ){
+ c.save( { a : i } );
+}
+c.save( { a : 99 } );
+
+assert.eq( s.admin.runCommand( { splitvalue : "test.foo" , find : { a : 1 } } ).middle.a , 1 , "splitvalue 1" );
+assert.eq( s.admin.runCommand( { splitvalue : "test.foo" , find : { a : 3 } } ).middle.a , 1 , "splitvalue 2" );
+
+s.adminCommand( { split : "test.foo" , find : { a : 1 } } );
+assert.eq( s.admin.runCommand( { splitvalue : "test.foo" , find : { a : 3 } } ).middle.a , 99 , "splitvalue 3" );
+s.adminCommand( { split : "test.foo" , find : { a : 99 } } );
+
+assert.eq( s.config.chunks.count() , 3 );
+s.printChunks();
+
+assert.eq( s.admin.runCommand( { splitvalue : "test.foo" , find : { a : 50 } } ).middle.a , 10 , "splitvalue 4 " );
+
+s.stop();
diff --git a/jstests/sharding/update1.js b/jstests/sharding/update1.js
new file mode 100644
index 0000000..82c3d8a
--- /dev/null
+++ b/jstests/sharding/update1.js
@@ -0,0 +1,33 @@
+s = new ShardingTest( "auto1" , 2 , 1 , 1 );
+
+s.adminCommand( { enablesharding : "test" } );
+s.adminCommand( { shardcollection : "test.update1" , key : { key : 1 } } );
+
+db = s.getDB( "test" )
+coll = db.update1;
+
+coll.insert({_id:1, key:1});
+
+// these are upserts
+coll.save({_id:2, key:2});
+coll.save({_id:3, key:3});
+
+assert.eq(coll.count(), 3, "count A")
+
+// update existing using save()
+coll.save({_id:1, key:1, other:1});
+
+// update existing using update()
+coll.update({_id:2}, {key:2, other:2});
+//coll.update({_id:3, key:3}, {other:3}); //should add key to new object (doesn't work yet)
+coll.update({_id:3}, {key:3, other:3});
+
+assert.eq(coll.count(), 3, "count B")
+coll.find().forEach(function(x){
+ assert.eq(x._id, x.key, "_id == key");
+ assert.eq(x._id, x.other, "_id == other");
+});
+
+
+s.stop()
+
diff --git a/jstests/sharding/version1.js b/jstests/sharding/version1.js
new file mode 100644
index 0000000..0516aff
--- /dev/null
+++ b/jstests/sharding/version1.js
@@ -0,0 +1,23 @@
+// version1.js
+
+s = new ShardingTest( "version1" , 1 , 2 )
+
+a = s._connections[0].getDB( "admin" );
+
+assert( a.runCommand( { "setShardVersion" : "alleyinsider.foo" , configdb : s._configDB } ).ok == 0 );
+assert( a.runCommand( { "setShardVersion" : "alleyinsider.foo" , configdb : s._configDB , version : "a" } ).ok == 0 );
+assert( a.runCommand( { "setShardVersion" : "alleyinsider.foo" , configdb : s._configDB , authoritative : true } ).ok == 0 );
+assert( a.runCommand( { "setShardVersion" : "alleyinsider.foo" , configdb : s._configDB , version : 2 } ).ok == 0 , "should have failed b/c no auth" );
+
+assert.commandWorked( a.runCommand( { "setShardVersion" : "alleyinsider.foo" , configdb : s._configDB , version : 2 , authoritative : true } ) , "should have worked" );
+assert( a.runCommand( { "setShardVersion" : "alleyinsider.foo" , configdb : "a" , version : 2 } ).ok == 0 );
+
+assert( a.runCommand( { "setShardVersion" : "alleyinsider.foo" , configdb : s._configDB , version : 2 } ).ok == 1 );
+assert( a.runCommand( { "setShardVersion" : "alleyinsider.foo" , configdb : s._configDB , version : 1 } ).ok == 0 );
+
+assert.eq( a.runCommand( { "setShardVersion" : "alleyinsider.foo" , configdb : s._configDB , version : 3 } ).oldVersion.i , 2 , "oldVersion" );
+
+assert.eq( a.runCommand( { "getShardVersion" : "alleyinsider.foo" } ).mine.i , 3 , "my get version A" );
+assert.eq( a.runCommand( { "getShardVersion" : "alleyinsider.foo" } ).global.i , 3 , "my get version B" );
+
+s.stop();
diff --git a/jstests/sharding/version2.js b/jstests/sharding/version2.js
new file mode 100644
index 0000000..9683c92
--- /dev/null
+++ b/jstests/sharding/version2.js
@@ -0,0 +1,36 @@
+// version2.js
+
+s = new ShardingTest( "version2" , 1 , 2 )
+
+a = s._connections[0].getDB( "admin" );
+
+// setup from one client
+
+assert( a.runCommand( { "getShardVersion" : "alleyinsider.foo" , configdb : s._configDB } ).mine.i == 0 );
+assert( a.runCommand( { "getShardVersion" : "alleyinsider.foo" , configdb : s._configDB } ).global.i == 0 );
+
+assert( a.runCommand( { "setShardVersion" : "alleyinsider.foo" , configdb : s._configDB , version : 2 , authoritative : true } ).ok == 1 );
+
+assert( a.runCommand( { "getShardVersion" : "alleyinsider.foo" , configdb : s._configDB } ).mine.i == 2 );
+assert( a.runCommand( { "getShardVersion" : "alleyinsider.foo" , configdb : s._configDB } ).global.i == 2 );
+
+// from another client
+
+a2 = connect( s._connections[0].name + "/admin" );
+
+assert.eq( a2.runCommand( { "getShardVersion" : "alleyinsider.foo" , configdb : s._configDB } ).global.i , 2 , "a2 global 1" );
+assert.eq( a2.runCommand( { "getShardVersion" : "alleyinsider.foo" , configdb : s._configDB } ).mine.i , 0 , "a2 mine 1" );
+
+function simpleFindOne(){
+ return a2.getMongo().getDB( "alleyinsider" ).foo.findOne();
+}
+
+assert.commandWorked( a2.runCommand( { "setShardVersion" : "alleyinsider.bar" , configdb : s._configDB , version : 2 , authoritative : true } ) , "setShardVersion bar temp");
+assert.throws( simpleFindOne , [] , "should complain about not in sharded mode 1" );
+assert( a2.runCommand( { "setShardVersion" : "alleyinsider.foo" , configdb : s._configDB , version : 2 } ).ok == 1 , "setShardVersion a2-1");
+simpleFindOne(); // now should run ok
+assert( a2.runCommand( { "setShardVersion" : "alleyinsider.foo" , configdb : s._configDB , version : 3 } ).ok == 1 , "setShardVersion a2-2");
+simpleFindOne(); // newer version is ok
+
+
+s.stop();
diff --git a/jstests/shellspawn.js b/jstests/shellspawn.js
new file mode 100644
index 0000000..ea2b671
--- /dev/null
+++ b/jstests/shellspawn.js
@@ -0,0 +1,24 @@
+baseName = "jstests_shellspawn";
+t = db.getCollection( baseName );
+t.drop();
+
+if ( typeof( _startMongoProgram ) == "undefined" ){
+ print( "no fork support" );
+}
+else {
+ spawn = startMongoProgramNoConnect( "mongo", "--port", myPort(), "--eval", "sleep( 2000 ); db.getCollection( \"" + baseName + "\" ).save( {a:1} );" );
+
+ assert.soon( function() { return 1 == t.count(); } );
+
+ stopMongoProgramByPid( spawn );
+
+ spawn = startMongoProgramNoConnect( "mongo", "--port", myPort(), "--eval", "print( \"I am a shell\" );" );
+
+ spawn = startMongoProgramNoConnect( "mongo", "--port", myPort() );
+
+ spawn = startMongoProgramNoConnect( "mongo", "--port", myPort() );
+
+ stopMongoProgramByPid( spawn );
+
+ // all these shells should be killed
+} \ No newline at end of file
diff --git a/jstests/slow/ns1.js b/jstests/slow/ns1.js
new file mode 100644
index 0000000..f51db01
--- /dev/null
+++ b/jstests/slow/ns1.js
@@ -0,0 +1,49 @@
+
+mydb = db.getSisterDB( "test_ns1" );
+mydb.dropDatabase();
+
+check = function( n , isNew ){
+ var coll = mydb["x" + n];
+ if ( isNew ){
+ assert.eq( 0 , coll.count() , "pop a: " + n );
+ coll.insert( { _id : n } );
+ }
+ assert.eq( 1 , coll.count() , "pop b: " + n );
+ assert.eq( n , coll.findOne()._id , "pop c: " + n );
+ return coll;
+}
+
+max = 0;
+
+for ( ; max<1000; max++ ){
+ check(max,true);
+}
+
+function checkall( removed ){
+ for ( var i=0; i<max; i++ ){
+ if ( removed == i ){
+ assert.eq( 0 , mydb["x"+i].count() , "should be 0 : " + removed );
+ }
+ else {
+ check( i , false );
+ }
+ }
+}
+
+checkall();
+
+Random.srand( 123124 );
+its = max / 2;
+print( "its: " + its );
+for ( i=0; i<its; i++ ){
+ x = Random.randInt( max );
+ check( x , false ).drop();
+ checkall( x );
+ check( x , true );
+ if ( ( i + 1 ) % 20 == 0 ){
+ print( i + "/" + its );
+ }
+}
+print( "yay" )
+
+mydb.dropDatabase();
diff --git a/jstests/sort1.js b/jstests/sort1.js
new file mode 100644
index 0000000..341b343
--- /dev/null
+++ b/jstests/sort1.js
@@ -0,0 +1,50 @@
+// test sorting, mainly a test ver simple with no index
+
+debug = function( s ){
+ //print( s );
+}
+
+t = db.sorrrt;
+t.drop();
+
+t.save({x:3,z:33});
+t.save({x:5,z:33});
+t.save({x:2,z:33});
+t.save({x:3,z:33});
+t.save({x:1,z:33});
+
+debug( "a" )
+for( var pass = 0; pass < 2; pass++ ) {
+ assert( t.find().sort({x:1})[0].x == 1 );
+ assert( t.find().sort({x:1}).skip(1)[0].x == 2 );
+ assert( t.find().sort({x:-1})[0].x == 5 );
+ assert( t.find().sort({x:-1})[1].x == 3 );
+ assert.eq( t.find().sort({x:-1}).skip(0)[0].x , 5 );
+ assert.eq( t.find().sort({x:-1}).skip(1)[0].x , 3 );
+ t.ensureIndex({x:1});
+
+}
+
+debug( "b" )
+assert(t.validate().valid);
+
+
+db.sorrrt2.drop();
+db.sorrrt2.save({x:'a'});
+db.sorrrt2.save({x:'aba'});
+db.sorrrt2.save({x:'zed'});
+db.sorrrt2.save({x:'foo'});
+
+debug( "c" )
+
+for( var pass = 0; pass < 2; pass++ ) {
+ debug( tojson( db.sorrrt2.find().sort( { "x" : 1 } ).limit(1).next() ) );
+ assert.eq( "a" , db.sorrrt2.find().sort({'x': 1}).limit(1).next().x , "c.1" );
+ assert.eq( "a" , db.sorrrt2.find().sort({'x': 1}).next().x , "c.2" );
+ assert.eq( "zed" , db.sorrrt2.find().sort({'x': -1}).limit(1).next().x , "c.3" );
+ assert.eq( "zed" , db.sorrrt2.find().sort({'x': -1}).next().x , "c.4" );
+}
+
+debug( "d" )
+
+assert(db.sorrrt2.validate().valid);
diff --git a/jstests/sort2.js b/jstests/sort2.js
new file mode 100644
index 0000000..facd64c
--- /dev/null
+++ b/jstests/sort2.js
@@ -0,0 +1,22 @@
+// test sorting, mainly a test ver simple with no index
+
+t = db.sorrrt2;
+t.drop();
+
+t.save({x:1, y:{a:5,b:4}});
+t.save({x:1, y:{a:7,b:3}});
+t.save({x:1, y:{a:2,b:3}});
+t.save({x:1, y:{a:9,b:3}});
+
+for( var pass = 0; pass < 2; pass++ ) {
+
+ var res = t.find().sort({'y.a':1}).toArray();
+ assert( res[0].y.a == 2 );
+ assert( res[1].y.a == 5 );
+ assert( res.length == 4 );
+
+ t.ensureIndex({"y.a":1});
+
+}
+
+assert(t.validate().valid);
diff --git a/jstests/sort3.js b/jstests/sort3.js
new file mode 100644
index 0000000..b79f1f6
--- /dev/null
+++ b/jstests/sort3.js
@@ -0,0 +1,16 @@
+
+t = db.sort3;
+t.drop();
+
+t.save( { a : 1 } );
+t.save( { a : 5 } );
+t.save( { a : 3 } );
+
+assert.eq( "1,5,3" , t.find().toArray().map( function(z){ return z.a; } ) );
+
+assert.eq( "1,3,5" , t.find().sort( { a : 1 } ).toArray().map( function(z){ return z.a; } ) );
+assert.eq( "5,3,1" , t.find().sort( { a : -1 } ).toArray().map( function(z){ return z.a; } ) );
+
+assert.eq( "1,3,5" , t.find( { query : {} , orderby : { a : 1 } } ).toArray().map( function(z){ return z.a; } ) );
+assert.eq( "5,3,1" , t.find( { query : {} , orderby : { a : -1 } } ).toArray().map( function(z){ return z.a; } ) );
+
diff --git a/jstests/sort4.js b/jstests/sort4.js
new file mode 100644
index 0000000..5174b46
--- /dev/null
+++ b/jstests/sort4.js
@@ -0,0 +1,43 @@
+t = db.sort4;
+t.drop();
+
+
+function nice( sort , correct , extra ){
+ var c = t.find().sort( sort );
+ var s = "";
+ c.forEach(
+ function(z){
+ if ( s.length )
+ s += ",";
+ s += z.name;
+ if ( z.prename )
+ s += z.prename;
+ }
+ );
+ print( tojson( sort ) + "\t" + s );
+ if ( correct )
+ assert.eq( correct , s , tojson( sort ) + "(" + extra + ")" );
+ return s;
+}
+
+t.save({name: 'A', prename: 'B'})
+t.save({name: 'A', prename: 'C'})
+t.save({name: 'B', prename: 'B'})
+t.save({name: 'B', prename: 'D'})
+
+nice( { name:1 } , "AB,AC,BB,BD" , "s1" );
+nice( { prename : 1 } , "AB,BB,AC,BD" , "s2" );
+nice( {name:1, prename:1} , "AB,AC,BB,BD" , "s3" );
+
+t.save({name: 'A'})
+nice( {name:1, prename:1} , "A,AB,AC,BB,BD" , "e1" );
+
+t.save({name: 'C'})
+nice( {name:1, prename:1} , "A,AB,AC,BB,BD,C" , "e2" ); // SERVER-282
+
+t.ensureIndex( { name : 1 , prename : 1 } );
+nice( {name:1, prename:1} , "A,AB,AC,BB,BD,C" , "e2ia" ); // SERVER-282
+
+t.dropIndexes();
+t.ensureIndex( { name : 1 } );
+nice( {name:1, prename:1} , "A,AB,AC,BB,BD,C" , "e2ib" ); // SERVER-282
diff --git a/jstests/sort5.js b/jstests/sort5.js
new file mode 100644
index 0000000..a589355
--- /dev/null
+++ b/jstests/sort5.js
@@ -0,0 +1,21 @@
+var t = db.sort5;
+t.drop();
+
+t.save({_id: 5, x: 1, y: {a: 5, b: 4}});
+t.save({_id: 7, x: 2, y: {a: 7, b: 3}});
+t.save({_id: 2, x: 3, y: {a: 2, b: 3}});
+t.save({_id: 9, x: 4, y: {a: 9, b: 3}});
+
+// test compound sorting
+
+assert.eq( [4,2,3,1] , t.find().sort({"y.b": 1 , "y.a" : -1 }).map( function(z){ return z.x; } ) , "A no index" );
+t.ensureIndex({"y.b": 1, "y.a": -1});
+assert.eq( [4,2,3,1] , t.find().sort({"y.b": 1 , "y.a" : -1 }).map( function(z){ return z.x; } ) , "A index" );
+assert(t.validate().valid, "A valid");
+
+// test sorting on compound key involving _id
+
+// assert.eq( [4,2,3,1] , t.find().sort({"y.b": 1 , _id : -1 }).map( function(z){ return z.x; } ) , "B no index" );
+// t.ensureIndex({"y.b": 1, "_id": -1});
+// assert.eq( [4,2,3,1] , t.find().sort({"y.b": 1 , _id : -1 }).map( function(z){ return z.x; } ) , "B index" );
+// assert(t.validate().valid, "B valid");
diff --git a/jstests/sort_numeric.js b/jstests/sort_numeric.js
new file mode 100644
index 0000000..807f23d
--- /dev/null
+++ b/jstests/sort_numeric.js
@@ -0,0 +1,35 @@
+
+t = db.sort_numeric;
+t.drop();
+
+// there are two numeric types int he db; make sure it handles them right
+// for comparisons.
+
+t.save( { a : 3 } );
+t.save( { a : 3.1 } );
+t.save( { a : 2.9 } );
+t.save( { a : 1 } );
+t.save( { a : 1.9 } );
+t.save( { a : 5 } );
+t.save( { a : 4.9 } );
+t.save( { a : 2.91 } );
+
+for( var pass = 0; pass < 2; pass++ ) {
+
+ var c = t.find().sort({a:1});
+ var last = 0;
+ while( c.hasNext() ) {
+ current = c.next();
+ assert( current.a > last );
+ last = current.a;
+ }
+
+ assert( t.find({a:3}).count() == 1 );
+ assert( t.find({a:3.0}).count() == 1 );
+ assert( t.find({a:3.0}).length() == 1 );
+
+ t.ensureIndex({a:1});
+}
+
+assert(t.validate().valid);
+
diff --git a/jstests/stats.js b/jstests/stats.js
new file mode 100644
index 0000000..26de644
--- /dev/null
+++ b/jstests/stats.js
@@ -0,0 +1,9 @@
+
+t = db.stats1;
+t.drop();
+
+t.save( { a : 1 } );
+
+assert.lt( 0 , t.dataSize() , "A" );
+assert.lt( t.dataSize() , t.storageSize() , "B" );
+assert.lt( 0 , t.totalIndexSize() , "C" );
diff --git a/jstests/storefunc.js b/jstests/storefunc.js
new file mode 100644
index 0000000..bae1090
--- /dev/null
+++ b/jstests/storefunc.js
@@ -0,0 +1,31 @@
+
+s = db.system.js;
+s.remove({});
+assert.eq( 0 , s.count() , "setup - A" );
+
+s.save( { _id : "x" , value : "3" } );
+assert.isnull( db.getLastError() , "setup - B" );
+assert.eq( 1 , s.count() , "setup - C" );
+
+s.remove( { _id : "x" } );
+assert.eq( 0 , s.count() , "setup - D" );
+s.save( { _id : "x" , value : "4" } );
+assert.eq( 1 , s.count() , "setup - E" );
+
+assert.eq( 4 , s.findOne().value , "setup - F" );
+s.update( { _id : "x" } , { $set : { value : 5 } } );
+assert.eq( 1 , s.count() , "setup - G" );
+assert.eq( 5 , s.findOne().value , "setup - H" );
+
+assert.eq( 5 , db.eval( "return x" ) , "exec - 1 " );
+
+s.update( { _id : "x" } , { $set : { value : 6 } } );
+assert.eq( 1 , s.count() , "setup2 - A" );
+assert.eq( 6 , s.findOne().value , "setup - B" );
+
+assert.eq( 6 , db.eval( "return x" ) , "exec - 2 " );
+
+
+
+s.insert( { _id : "bar" , value : function( z ){ return 17 + z; } } );
+assert.eq( 22 , db.eval( "return bar(5);" ) , "exec - 3 " );
diff --git a/jstests/sub1.js b/jstests/sub1.js
new file mode 100644
index 0000000..9e643f8
--- /dev/null
+++ b/jstests/sub1.js
@@ -0,0 +1,14 @@
+// sub1.js
+
+t = db.sub1;
+t.drop();
+
+x = { a : 1 , b : { c : { d : 2 } } }
+
+t.save( x );
+
+y = t.findOne();
+
+assert.eq( 1 , y.a );
+assert.eq( 2 , y.b.c.d );
+print( tojson( y ) );
diff --git a/jstests/tool/csv1.js b/jstests/tool/csv1.js
new file mode 100644
index 0000000..df8aa10
--- /dev/null
+++ b/jstests/tool/csv1.js
@@ -0,0 +1,43 @@
+// csv1.js
+
+t = new ToolTest( "csv1" )
+
+c = t.startDB( "foo" );
+
+base = { a : 1 , b : "foo,bar" , c: 5 };
+
+assert.eq( 0 , c.count() , "setup1" );
+c.insert( base );
+delete base._id
+assert.eq( 1 , c.count() , "setup2" );
+
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--csv" , "-f" , "a,b,c" )
+
+c.drop()
+assert.eq( 0 , c.count() , "after drop" )
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "-f" , "a,b,c" );
+assert.soon( "c.findOne()" , "no data after sleep" );
+assert.eq( 2 , c.count() , "after restore 2" );
+
+a = c.find().sort( { a : 1 } ).toArray();
+delete a[0]._id
+delete a[1]._id
+assert.eq( tojson( { a : "a" , b : "b" , c : "c" } ) , tojson( a[1] ) , "csv parse 1" );
+assert.eq( tojson( base ) , tojson(a[0]) , "csv parse 0" )
+
+c.drop()
+assert.eq( 0 , c.count() , "after drop 2" )
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" , "--type" , "csv" , "--headerline" )
+assert.soon( "c.findOne()" , "no data after sleep" );
+assert.eq( 1 , c.count() , "after restore 2" );
+
+x = c.findOne()
+delete x._id;
+assert.eq( tojson( base ) , tojson(x) , "csv parse 2" )
+
+
+
+
+t.stop()
diff --git a/jstests/tool/dumprestore1.js b/jstests/tool/dumprestore1.js
new file mode 100644
index 0000000..73f8fea
--- /dev/null
+++ b/jstests/tool/dumprestore1.js
@@ -0,0 +1,20 @@
+// dumprestore1.js
+
+t = new ToolTest( "dumprestore1" );
+
+c = t.startDB( "foo" );
+assert.eq( 0 , c.count() , "setup1" );
+c.save( { a : 22 } );
+assert.eq( 1 , c.count() , "setup2" );
+
+t.runTool( "dump" , "--out" , t.ext );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" );
+
+t.runTool( "restore" , "--dir" , t.ext );
+assert.soon( "c.findOne()" , "no data after sleep" );
+assert.eq( 1 , c.count() , "after restore 2" );
+assert.eq( 22 , c.findOne().a , "after restore 2" );
+
+t.stop();
diff --git a/jstests/tool/dumprestore2.js b/jstests/tool/dumprestore2.js
new file mode 100644
index 0000000..86e65ae
--- /dev/null
+++ b/jstests/tool/dumprestore2.js
@@ -0,0 +1,26 @@
+// dumprestore2.js
+
+t = new ToolTest( "dumprestore2" );
+
+c = t.startDB( "foo" );
+assert.eq( 0 , c.count() , "setup1" );
+c.save( { a : 22 } );
+assert.eq( 1 , c.count() , "setup2" );
+t.stop();
+
+t.runTool( "dump" , "--dbpath" , t.dbpath , "--out" , t.ext );
+
+resetDbpath( t.dbpath );
+assert.eq( 0 , listFiles( t.dbpath ).length , "clear" );
+
+t.runTool( "restore" , "--dbpath" , t.dbpath , "--dir" , t.ext );
+
+listFiles( t.dbpath ).forEach( printjson )
+
+c = t.startDB( "foo" );
+assert.soon( "c.findOne()" , "no data after startup" );
+assert.eq( 1 , c.count() , "after restore 2" );
+assert.eq( 22 , c.findOne().a , "after restore 2" );
+
+t.stop();
+
diff --git a/jstests/tool/exportimport1.js b/jstests/tool/exportimport1.js
new file mode 100644
index 0000000..22934fe
--- /dev/null
+++ b/jstests/tool/exportimport1.js
@@ -0,0 +1,20 @@
+// exportimport1.js
+
+t = new ToolTest( "exportimport1" );
+
+c = t.startDB( "foo" );
+assert.eq( 0 , c.count() , "setup1" );
+c.save( { a : 22 } );
+assert.eq( 1 , c.count() , "setup2" );
+
+t.runTool( "export" , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+
+c.drop();
+assert.eq( 0 , c.count() , "after drop" , "-d" , t.baseName , "-c" , "foo" );;
+
+t.runTool( "import" , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+assert.soon( "c.findOne()" , "no data after sleep" );
+assert.eq( 1 , c.count() , "after restore 2" );
+assert.eq( 22 , c.findOne().a , "after restore 2" );
+
+t.stop();
diff --git a/jstests/tool/exportimport2.js b/jstests/tool/exportimport2.js
new file mode 100644
index 0000000..fbcf239
--- /dev/null
+++ b/jstests/tool/exportimport2.js
@@ -0,0 +1,24 @@
+// exportimport2.js
+
+t = new ToolTest( "exportimport2" );
+
+c = t.startDB( "foo" );
+assert.eq( 0 , c.count() , "setup1" );
+c.save( { a : 22 } );
+assert.eq( 1 , c.count() , "setup2" );
+t.stop();
+
+t.runTool( "export" , "--dbpath" , t.dbpath , "--out" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+
+resetDbpath( t.dbpath );
+assert.eq( 0 , listFiles( t.dbpath ).length , "clear" );
+
+t.runTool( "import" , "--dbpath" , t.dbpath , "--file" , t.extFile , "-d" , t.baseName , "-c" , "foo" );
+
+c = t.startDB( "foo" );
+assert.soon( "c.findOne()" , "no data after startup" );
+assert.eq( 1 , c.count() , "after restore 2" );
+assert.eq( 22 , c.findOne().a , "after restore 2" );
+
+t.stop();
+
diff --git a/jstests/tool/tool1.js b/jstests/tool/tool1.js
new file mode 100644
index 0000000..00e92e7
--- /dev/null
+++ b/jstests/tool/tool1.js
@@ -0,0 +1,64 @@
+// mongo tool tests, very basic to start with
+
+baseName = "jstests_tool_tool1";
+dbPath = "/data/db/" + baseName + "/";
+externalPath = "/data/db/" + baseName + "_external/"
+externalFile = externalPath + "export.json"
+
+function fileSize(){
+ var l = listFiles( externalPath );
+ for ( var i=0; i<l.length; i++ ){
+ if ( l[i].name == externalFile )
+ return l[i].size;
+ }
+ return -1;
+}
+
+
+port = allocatePorts( 1 )[ 0 ];
+resetDbpath( externalPath );
+
+m = startMongod( "--port", port, "--dbpath", dbPath, "--nohttpinterface", "--bind_ip", "127.0.0.1" );
+c = m.getDB( baseName ).getCollection( baseName );
+c.save( { a: 1 } );
+assert( c.findOne() );
+
+runMongoProgram( "mongodump", "--host", "127.0.0.1:" + port, "--out", externalPath );
+c.drop();
+runMongoProgram( "mongorestore", "--host", "127.0.0.1:" + port, "--dir", externalPath );
+assert.soon( "c.findOne()" , "mongodump then restore has no data w/sleep" );
+assert( c.findOne() , "mongodump then restore has no data" );
+assert.eq( 1 , c.findOne().a , "mongodump then restore has no broken data" );
+
+resetDbpath( externalPath );
+
+assert.eq( -1 , fileSize() , "mongoexport prep invalid" );
+runMongoProgram( "mongoexport", "--host", "127.0.0.1:" + port, "-d", baseName, "-c", baseName, "--out", externalFile );
+assert.lt( 10 , fileSize() , "file size changed" );
+
+c.drop();
+runMongoProgram( "mongoimport", "--host", "127.0.0.1:" + port, "-d", baseName, "-c", baseName, "--file", externalFile );
+assert.soon( "c.findOne()" , "mongo import json A" );
+assert( c.findOne() && 1 == c.findOne().a , "mongo import json B" );
+
+stopMongod( port );
+resetDbpath( externalPath );
+
+runMongoProgram( "mongodump", "--dbpath", dbPath, "--out", externalPath );
+resetDbpath( dbPath );
+runMongoProgram( "mongorestore", "--dbpath", dbPath, "--dir", externalPath );
+m = startMongoProgram( "mongod", "--port", port, "--dbpath", dbPath, "--nohttpinterface", "--bind_ip", "127.0.0.1" );
+c = m.getDB( baseName ).getCollection( baseName );
+assert.soon( "c.findOne()" , "object missing a" );
+assert( 1 == c.findOne().a, "object wrong" );
+
+stopMongod( port );
+resetDbpath( externalPath );
+
+runMongoProgram( "mongoexport", "--dbpath", dbPath, "-d", baseName, "-c", baseName, "--out", externalFile );
+resetDbpath( dbPath );
+runMongoProgram( "mongoimport", "--dbpath", dbPath, "-d", baseName, "-c", baseName, "--file", externalFile );
+m = startMongoProgram( "mongod", "--port", port, "--dbpath", dbPath, "--nohttpinterface", "--bind_ip", "127.0.0.1" );
+c = m.getDB( baseName ).getCollection( baseName );
+assert.soon( "c.findOne()" , "object missing b" );
+assert( 1 == c.findOne().a, "object wrong" );
diff --git a/jstests/type1.js b/jstests/type1.js
new file mode 100644
index 0000000..94385fa
--- /dev/null
+++ b/jstests/type1.js
@@ -0,0 +1,23 @@
+
+t = db.type1;
+t.drop();
+
+t.save( { x : 1.1 } );
+t.save( { x : "3" } );
+t.save( { x : "asd" } );
+t.save( { x : "foo" } );
+
+assert.eq( 4 , t.find().count() , "A1" );
+assert.eq( 1 , t.find( { x : { $type : 1 } } ).count() , "A2" );
+assert.eq( 3 , t.find( { x : { $type : 2 } } ).count() , "A3" );
+assert.eq( 0 , t.find( { x : { $type : 3 } } ).count() , "A4" );
+assert.eq( 4 , t.find( { x : { $type : 1 } } ).explain().nscanned , "A5" );
+
+
+t.ensureIndex( { x : 1 } );
+
+assert.eq( 4 , t.find().count() , "B1" );
+assert.eq( 1 , t.find( { x : { $type : 1 } } ).count() , "B2" );
+assert.eq( 3 , t.find( { x : { $type : 2 } } ).count() , "B3" );
+assert.eq( 0 , t.find( { x : { $type : 3 } } ).count() , "B4" );
+assert.eq( 1 , t.find( { x : { $type : 1 } } ).explain().nscanned , "B5" );
diff --git a/jstests/unique2.js b/jstests/unique2.js
new file mode 100644
index 0000000..42cf9fb
--- /dev/null
+++ b/jstests/unique2.js
@@ -0,0 +1,41 @@
+
+t = db.jstests_unique2;
+
+t.drop();
+
+/* test for good behavior when indexing multikeys */
+
+t.insert({k:3});
+t.insert({k:[2,3]});
+t.insert({k:[4,3]});
+
+t.ensureIndex({k:1}, {unique:true, dropDups:true});
+
+assert( t.count() == 1 ) ;
+assert( t.find().sort({k:1}).toArray().length == 1 ) ;
+assert( t.find().sort({k:1}).count() == 1 ) ;
+
+t.drop();
+
+t.ensureIndex({k:1}, {unique:true});
+
+t.insert({k:3});
+t.insert({k:[2,3]});
+t.insert({k:[4,3]});
+
+assert( t.count() == 1 ) ;
+assert( t.find().sort({k:1}).toArray().length == 1 ) ;
+assert( t.find().sort({k:1}).count() == 1 ) ;
+
+t.dropIndexes();
+
+t.insert({k:[2,3]});
+t.insert({k:[4,3]});
+assert( t.count() == 3 ) ;
+
+t.ensureIndex({k:1}, {unique:true, dropDups:true});
+
+assert( t.count() == 1 ) ;
+assert( t.find().sort({k:1}).toArray().length == 1 ) ;
+assert( t.find().sort({k:1}).count() == 1 ) ;
+
diff --git a/jstests/uniqueness.js b/jstests/uniqueness.js
new file mode 100644
index 0000000..f1651b3
--- /dev/null
+++ b/jstests/uniqueness.js
@@ -0,0 +1,45 @@
+
+t = db.jstests_uniqueness;
+
+t.drop();
+
+// test uniqueness of _id
+
+t.save( { _id : 3 } );
+assert( !db.getLastError(), 1 );
+
+// this should yield an error
+t.insert( { _id : 3 } );
+assert( db.getLastError() , 2);
+assert( t.count() == 1, "hmmm");
+
+t.insert( { _id : 4, x : 99 } );
+assert( !db.getLastError() , 3);
+
+// this should yield an error
+t.update( { _id : 4 } , { _id : 3, x : 99 } );
+assert( db.getLastError() , 4);
+assert( t.findOne( {_id:4} ), 5 );
+
+// Check for an error message when we index and there are dups
+db.jstests_uniqueness2.drop();
+db.jstests_uniqueness2.insert({a:3});
+db.jstests_uniqueness2.insert({a:3});
+assert( db.jstests_uniqueness2.count() == 2 , 6) ;
+db.jstests_uniqueness2.ensureIndex({a:1}, true);
+assert( db.getLastError() , 7);
+
+/* Check that if we update and remove _id, it gets added back by the DB */
+
+/* - test when object grows */
+t.drop();
+t.save( { _id : 'Z' } );
+t.update( {}, { k : 2 } );
+assert( t.findOne()._id == 'Z', "uniqueness.js problem with adding back _id" );
+
+/* - test when doesn't grow */
+t.drop();
+t.save( { _id : 'Z', k : 3 } );
+t.update( {}, { k : 2 } );
+assert( t.findOne()._id == 'Z', "uniqueness.js problem with adding back _id (2)" );
+
diff --git a/jstests/unset.js b/jstests/unset.js
new file mode 100644
index 0000000..f3cdcf0
--- /dev/null
+++ b/jstests/unset.js
@@ -0,0 +1,19 @@
+t = db.unset;
+t.drop();
+
+orig = { _id : 1, emb : {} };
+t.insert(orig);
+
+t.update( { _id : 1 }, { $unset : { 'emb.a' : 1 }});
+t.update( { _id : 1 }, { $unset : { 'z' : 1 }});
+assert.eq( orig , t.findOne() , "A" );
+
+t.update( { _id : 1 }, { $set : { 'emb.a' : 1 }});
+t.update( { _id : 1 }, { $set : { 'z' : 1 }});
+
+t.update( { _id : 1 }, { $unset : { 'emb.a' : 1 }});
+t.update( { _id : 1 }, { $unset : { 'z' : 1 }});
+assert.eq( orig , t.findOne() , "B" ); // note that emb isn't removed
+
+t.update( { _id : 1 }, { $unset : { 'emb' : 1 }});
+assert.eq( {_id :1} , t.findOne() , "C" );
diff --git a/jstests/update.js b/jstests/update.js
new file mode 100644
index 0000000..70f9f15
--- /dev/null
+++ b/jstests/update.js
@@ -0,0 +1,25 @@
+
+
+asdf = db.getCollection( "asdf" );
+asdf.drop();
+
+var txt = "asdf";
+for(var i=0; i<10; i++) {
+ txt = txt + txt;
+}
+
+// fill db
+for(var i=1; i<=5000; i++) {
+ var obj = {txt : txt};
+ asdf.save(obj);
+
+ var obj2 = {txt: txt, comments: [{num: i, txt: txt}, {num: [], txt: txt}, {num: true, txt: txt}]};
+ asdf.update(obj, obj2);
+
+ if(i%100 == 0) {
+ var c = asdf.count();
+ assert.eq(c , i);
+ }
+}
+
+assert(asdf.validate().valid);
diff --git a/jstests/update2.js b/jstests/update2.js
new file mode 100644
index 0000000..654914c
--- /dev/null
+++ b/jstests/update2.js
@@ -0,0 +1,18 @@
+f = db.ed_db_update2;
+
+f.drop();
+f.save( { a: 4 } );
+f.update( { a: 4 }, { $inc: { a: 2 } } );
+assert.eq( 6, f.findOne().a );
+
+f.drop();
+f.save( { a: 4 } );
+f.ensureIndex( { a: 1 } );
+f.update( { a: 4 }, { $inc: { a: 2 } } );
+assert.eq( 6, f.findOne().a );
+
+// Verify that drop clears the index
+f.drop();
+f.save( { a: 4 } );
+f.update( { a: 4 }, { $inc: { a: 2 } } );
+assert.eq( 6, f.findOne().a );
diff --git a/jstests/update3.js b/jstests/update3.js
new file mode 100644
index 0000000..4dfeb90
--- /dev/null
+++ b/jstests/update3.js
@@ -0,0 +1,23 @@
+// Update with mods corner cases.
+
+f = db.jstests_update3;
+
+f.drop();
+f.save( { a:1 } );
+f.update( {}, {$inc:{ a:1 }} );
+assert.eq( 2, f.findOne().a , "A" );
+
+f.drop();
+f.save( { a:{ b: 1 } } );
+f.update( {}, {$inc:{ "a.b":1 }} );
+assert.eq( 2, f.findOne().a.b , "B" );
+
+f.drop();
+f.save( { a:{ b: 1 } } );
+f.update( {}, {$set:{ "a.b":5 }} );
+assert.eq( 5, f.findOne().a.b , "C" );
+
+f.drop();
+f.save( {'_id':0} );
+f.update( {}, {$set:{'_id':5}} );
+assert.eq( 0, f.findOne()._id , "D" );
diff --git a/jstests/update4.js b/jstests/update4.js
new file mode 100644
index 0000000..1502f67
--- /dev/null
+++ b/jstests/update4.js
@@ -0,0 +1,33 @@
+f = db.jstests_update4;
+f.drop();
+
+getLastError = function() {
+ ret = db.runCommand( { getlasterror : 1 } );
+// printjson( ret );
+ return ret;
+}
+
+f.save( {a:1} );
+f.update( {a:1}, {a:2} );
+assert.eq( true, getLastError().updatedExisting , "A" );
+assert.eq( 1, getLastError().n , "B" );
+f.update( {a:1}, {a:2} );
+assert.eq( false, getLastError().updatedExisting , "C" );
+assert.eq( 0, getLastError().n , "D" );
+
+f.update( {a:1}, {a:1}, true );
+assert.eq( false, getLastError().updatedExisting , "E" );
+assert.eq( 1, getLastError().n , "F" );
+f.update( {a:1}, {a:1}, true );
+assert.eq( true, getLastError().updatedExisting , "G" );
+assert.eq( 1, getLastError().n , "H" );
+assert.eq( true, db.getPrevError().updatedExisting , "I" );
+assert.eq( 1, db.getPrevError().nPrev , "J" );
+
+f.findOne();
+assert.eq( undefined, getLastError().updatedExisting , "K" );
+assert.eq( true, db.getPrevError().updatedExisting , "L" );
+assert.eq( 2, db.getPrevError().nPrev , "M" );
+
+db.forceError();
+assert.eq( undefined, getLastError().updatedExisting , "N" );
diff --git a/jstests/update5.js b/jstests/update5.js
new file mode 100644
index 0000000..2728000
--- /dev/null
+++ b/jstests/update5.js
@@ -0,0 +1,41 @@
+
+t = db.update5;
+
+function go( key ){
+
+ t.drop();
+
+ function check( num , name ){
+ assert.eq( 1 , t.find().count() , tojson( key ) + " count " + name );
+ assert.eq( num , t.findOne().n , tojson( key ) + " value " + name );
+ }
+
+ t.update( key , { $inc : { n : 1 } } , true );
+ check( 1 , "A" );
+
+ t.update( key , { $inc : { n : 1 } } , true );
+ check( 2 , "B" );
+
+ t.update( key , { $inc : { n : 1 } } , true );
+ check( 3 , "C" );
+
+ var ik = {};
+ for ( k in key )
+ ik[k] = 1;
+ t.ensureIndex( ik );
+
+ t.update( key , { $inc : { n : 1 } } , true );
+ check( 4 , "D" );
+
+}
+
+go( { a : 5 } );
+go( { a : 5 } );
+
+go( { a : 5 , b : 7 } );
+go( { a : null , b : 7 } );
+
+go( { referer: 'blah' } );
+go( { referer: 'blah', lame: 'bar' } );
+go( { referer: 'blah', name: 'bar' } );
+go( { date: null, referer: 'blah', name: 'bar' } );
diff --git a/jstests/update6.js b/jstests/update6.js
new file mode 100644
index 0000000..1f42fe5
--- /dev/null
+++ b/jstests/update6.js
@@ -0,0 +1,46 @@
+
+t = db.update6;
+t.drop();
+
+t.save( { a : 1 , b : { c : 1 , d : 1 } } );
+
+t.update( { a : 1 } , { $inc : { "b.c" : 1 } } );
+assert.eq( 2 , t.findOne().b.c , "A" );
+assert.eq( "c,d" , Object.keySet( t.findOne().b ).toString() , "B" );
+
+t.update( { a : 1 } , { $inc : { "b.0e" : 1 } } );
+assert.eq( 1 , t.findOne().b["0e"] , "C" );
+assert.eq( "0e,c,d" , Object.keySet( t.findOne().b ).toString() , "D" );
+
+// -----
+
+t.drop();
+
+t.save( {"_id" : 2 ,
+ "b3" : {"0720" : 5 , "0721" : 12 , "0722" : 11 , "0723" : 3 , "0721" : 12} ,
+ //"b323" : {"0720" : 1} ,
+ }
+ );
+
+
+assert.eq( 4 , Object.keySet( t.find({_id:2},{b3:1})[0].b3 ).length , "test 1 : ks before" );
+t.update({_id:2},{$inc: { 'b3.0719' : 1}},true)
+assert.eq( 5 , Object.keySet( t.find({_id:2},{b3:1})[0].b3 ).length , "test 1 : ks after" );
+
+
+// -----
+
+t.drop();
+
+t.save( {"_id" : 2 ,
+ "b3" : {"0720" : 5 , "0721" : 12 , "0722" : 11 , "0723" : 3 , "0721" : 12} ,
+ "b324" : {"0720" : 1} ,
+ }
+ );
+
+
+assert.eq( 4 , Object.keySet( t.find({_id:2},{b3:1})[0].b3 ).length , "test 2 : ks before" );
+printjson( t.find({_id:2},{b3:1})[0].b3 )
+t.update({_id:2},{$inc: { 'b3.0719' : 1}} )
+printjson( t.find({_id:2},{b3:1})[0].b3 )
+assert.eq( 5 , Object.keySet( t.find({_id:2},{b3:1})[0].b3 ).length , "test 2 : ks after" );
diff --git a/jstests/update7.js b/jstests/update7.js
new file mode 100644
index 0000000..b893121
--- /dev/null
+++ b/jstests/update7.js
@@ -0,0 +1,138 @@
+
+t = db.update7;
+t.drop();
+
+function s(){
+ return t.find().sort( { _id : 1 } ).map( function(z){ return z.x; } );
+}
+
+t.save( { _id : 1 , x : 1 } );
+t.save( { _id : 2 , x : 5 } );
+
+assert.eq( "1,5" , s() , "A" );
+
+t.update( {} , { $inc : { x : 1 } } );
+assert.eq( "2,5" , s() , "B" );
+
+t.update( { _id : 1 } , { $inc : { x : 1 } } );
+assert.eq( "3,5" , s() , "C" );
+
+t.update( { _id : 2 } , { $inc : { x : 1 } } );
+assert.eq( "3,6" , s() , "D" );
+
+t.update( {} , { $inc : { x : 1 } } , false , true );
+assert.eq( "4,7" , s() , "E" );
+
+t.update( {} , { $set : { x : 2 } } , false , true );
+assert.eq( "2,2" , s() , "F" );
+
+// non-matching in cursor
+
+t.drop();
+
+t.save( { _id : 1 , x : 1 , a : 1 , b : 1 } );
+t.save( { _id : 2 , x : 5 , a : 1 , b : 2 } );
+assert.eq( "1,5" , s() , "B1" );
+
+t.update( { a : 1 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "2,6" , s() , "B2" );
+
+t.update( { b : 1 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "3,6" , s() , "B3" );
+
+t.update( { b : 3 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "3,6" , s() , "B4" );
+
+t.ensureIndex( { a : 1 } );
+t.ensureIndex( { b : 1 } );
+
+t.update( { a : 1 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "4,7" , s() , "B5" );
+
+t.update( { b : 1 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "5,7" , s() , "B6" );
+
+t.update( { b : 3 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "5,7" , s() , "B7" );
+
+t.update( { b : 2 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "5,8" , s() , "B7" );
+
+
+// multi-key
+
+t.drop();
+
+t.save( { _id : 1 , x : 1 , a : [ 1 , 2 ] } );
+t.save( { _id : 2 , x : 5 , a : [ 2 , 3 ] } );
+assert.eq( "1,5" , s() , "C1" );
+
+t.update( { a : 1 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "2,5" , s() , "C2" );
+
+t.update( { a : 1 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "3,5" , s() , "C3" );
+
+t.update( { a : 3 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "3,6" , s() , "C4" );
+
+t.update( { a : 2 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "4,7" , s() , "C5" );
+
+t.update( { a : { $gt : 0 } } , { $inc : { x : 1 } } , false , true );
+assert.eq( "5,8" , s() , "C6" );
+
+
+t.drop();
+
+t.save( { _id : 1 , x : 1 , a : [ 1 , 2 ] } );
+t.save( { _id : 2 , x : 5 , a : [ 2 , 3 ] } );
+t.ensureIndex( { a : 1 } );
+assert.eq( "1,5" , s() , "D1" );
+
+t.update( { a : 1 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "2,5" , s() , "D2" );
+
+t.update( { a : 1 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "3,5" , s() , "D3" );
+
+t.update( { a : 3 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "3,6" , s() , "D4" );
+
+t.update( { a : 2 } , { $inc : { x : 1 } } , false , true );
+assert.eq( "4,7" , s() , "D5" );
+
+t.update( { a : { $gt : 0 } } , { $inc : { x : 1 } } , false , true );
+assert.eq( "5,8" , s() , "D6" );
+
+t.update( { a : { $lt : 10 } } , { $inc : { x : -1 } } , false , true );
+assert.eq( "4,7" , s() , "D7" );
+
+// ---
+
+t.save( { _id : 3 } );
+assert.eq( "4,7," , s() , "E1" );
+t.update( {} , { $inc : { x : 1 } } , false , true );
+assert.eq( "5,8,1" , s() , "E2" );
+
+for ( i = 4; i<8; i++ )
+ t.save( { _id : i } );
+t.save( { _id : i , x : 1 } );
+assert.eq( "5,8,1,,,,,1" , s() , "E4" );
+t.update( {} , { $inc : { x : 1 } } , false , true );
+assert.eq( "6,9,2,1,1,1,1,2" , s() , "E5" );
+
+
+// --- $inc indexed field
+
+t.drop();
+
+t.save( { x : 1 } );
+t.save( { x : 2 } );
+t.save( { x : 3 } );
+
+t.ensureIndex( { x : 1 } );
+
+assert.eq( "1,2,3" , s() , "F1" )
+t.update( { x : { $gt : 0 } } , { $inc : { x : 5 } } , false , true );
+assert.eq( "6,7,8" , s() , "F1" )
diff --git a/jstests/update8.js b/jstests/update8.js
new file mode 100644
index 0000000..2388ff8
--- /dev/null
+++ b/jstests/update8.js
@@ -0,0 +1,11 @@
+
+t = db.update8;
+t.drop();
+
+t.update( { _id : 1 , tags: {"$ne": "a"}}, {"$push": { tags : "a" } } , true )
+assert.eq( { _id : 1 , tags : [ "a" ] } , t.findOne() , "A" );
+
+t.drop()
+//SERVER-390
+//t.update( { "x.y" : 1 } , { $inc : { i : 1 } } , true );
+//printjson( t.findOne() );
diff --git a/jstests/update9.js b/jstests/update9.js
new file mode 100644
index 0000000..45b9e2d
--- /dev/null
+++ b/jstests/update9.js
@@ -0,0 +1,19 @@
+
+t = db.update9;
+t.drop()
+
+orig = { "_id" : 1 ,
+ "question" : "a",
+ "choices" : { "1" : { "choice" : "b" },
+ "0" : { "choice" : "c" } } ,
+
+ }
+
+t.save( orig );
+assert.eq( orig , t.findOne() , "A" );
+
+t.update({_id: 1, 'choices.0.votes': {$ne: 1}}, {$push: {'choices.0.votes': 1}})
+
+orig.choices["0"].votes = [ 1 ] ;
+assert.eq( orig.choices["0"] , t.findOne().choices["0"] , "B" );
+
diff --git a/jstests/updatea.js b/jstests/updatea.js
new file mode 100644
index 0000000..9864aa6
--- /dev/null
+++ b/jstests/updatea.js
@@ -0,0 +1,50 @@
+
+t = db.updatea;
+t.drop();
+
+orig = { _id : 1 , a : [ { x : 1 , y : 2 } , { x : 10 , y : 11 } ] }
+
+t.save( orig )
+
+// SERVER-181
+t.update( {} , { $set : { "a.0.x" : 3 } } )
+orig.a[0].x = 3;
+assert.eq( orig , t.findOne() , "A1" );
+
+t.update( {} , { $set : { "a.1.z" : 17 } } )
+orig.a[1].z = 17;
+assert.eq( orig , t.findOne() , "A2" );
+
+// SERVER-273
+t.update( {} , { $unset : { "a.1.y" : 1 } } )
+delete orig.a[1].y
+assert.eq( orig , t.findOne() , "A3" );
+
+// SERVER-333
+t.drop();
+orig = { _id : 1 , comments : [ { name : "blah" , rate_up : 0 , rate_ups : [] } ] }
+t.save( orig );
+
+t.update( {} , { $inc: { "comments.0.rate_up" : 1 } , $push: { "comments.0.rate_ups" : 99 } } )
+orig.comments[0].rate_up++;
+orig.comments[0].rate_ups.push( 99 )
+assert.eq( orig , t.findOne() , "B1" )
+
+t.drop();
+orig = { _id : 1 , a : [] }
+for ( i=0; i<12; i++ )
+ orig.a.push( i );
+
+
+t.save( orig );
+assert.eq( orig , t.findOne() , "C1" );
+
+t.update( {} , { $inc: { "a.0" : 1 } } );
+orig.a[0]++;
+assert.eq( orig , t.findOne() , "C2" );
+
+t.update( {} , { $inc: { "a.10" : 1 } } );
+orig.a[10]++;
+
+
+
diff --git a/jstests/updateb.js b/jstests/updateb.js
new file mode 100644
index 0000000..ee7c531
--- /dev/null
+++ b/jstests/updateb.js
@@ -0,0 +1,11 @@
+
+t = db.updateb;
+t.drop();
+
+t.update( { "x.y" : 2 } , { $inc : { a : 7 } } , true );
+
+correct = { a : 7 , x : { y : 2 } };
+got = t.findOne();
+delete got._id;
+assert.eq( correct , got , "A" )
+
diff --git a/jstests/where1.js b/jstests/where1.js
new file mode 100644
index 0000000..017d1f3
--- /dev/null
+++ b/jstests/where1.js
@@ -0,0 +1,14 @@
+
+t = db.getCollection( "where1" );
+t.drop();
+
+t.save( { a : 1 } );
+t.save( { a : 2 } );
+t.save( { a : 3 } );
+
+assert.eq( 1 , t.find( function(){ return this.a == 2; } ).length() , "A" );
+
+assert.eq( 1 , t.find( { $where : "return this.a == 2" } ).toArray().length , "B" );
+assert.eq( 1 , t.find( { $where : "this.a == 2" } ).toArray().length , "C" );
+
+assert.eq( 1 , t.find( "this.a == 2" ).toArray().length , "D" );
diff --git a/jstests/where2.js b/jstests/where2.js
new file mode 100644
index 0000000..9262b30
--- /dev/null
+++ b/jstests/where2.js
@@ -0,0 +1,10 @@
+
+t = db.getCollection( "where2" );
+t.drop();
+
+t.save( { a : 1 } );
+t.save( { a : 2 } );
+t.save( { a : 3 } );
+
+assert.eq( 1 , t.find( { $where : "this.a == 2" } ).toArray().length , "A" );
+assert.eq( 1 , t.find( { $where : "\nthis.a == 2" } ).toArray().length , "B" );